Compare commits
110 Commits
26f5a2ce3b
...
v0.0.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dafd7adb00 | ||
|
|
44eecfa5cd | ||
|
|
ff76751629 | ||
|
|
413839452c | ||
|
|
c33e899be7 | ||
|
|
180514a039 | ||
|
|
60fced56ed | ||
|
|
515c942623 | ||
|
|
3ccd4b6548 | ||
|
|
dad608e3a2 | ||
|
|
7479dd6daf | ||
|
|
e4dff0cad1 | ||
|
|
717367252c | ||
|
|
a06808a2a2 | ||
|
|
6b750df1c4 | ||
|
|
ea56bcf2d7 | ||
|
|
826466aa55 | ||
|
|
6a5dba4eba | ||
|
|
8ad0016a8e | ||
|
|
3c226de62f | ||
|
|
c8c62a98bb | ||
|
|
2ae2871822 | ||
|
|
a950feaef1 | ||
|
|
695969d759 | ||
|
|
a72b0954db | ||
|
|
4572230c9c | ||
|
|
752d7ec0e7 | ||
|
|
9ab38dfc59 | ||
|
|
907bcd5017 | ||
|
|
83caf4be5b | ||
|
|
1533bea2a6 | ||
|
|
94d1e81852 | ||
|
|
8e27f45a2b | ||
|
|
a86f56f588 | ||
|
|
651cf9de6e | ||
|
|
63d8078688 | ||
|
|
ee69dbedfc | ||
|
|
313d871948 | ||
|
|
f4d2693561 | ||
|
|
2051572ee2 | ||
|
|
cc433b4215 | ||
|
|
31b60c4e24 | ||
|
|
017a0c218e | ||
|
|
4ff01681d4 | ||
|
|
f2744e3094 | ||
|
|
ea5b5a685d | ||
|
|
045d9ea890 | ||
|
|
9613bddc60 | ||
|
|
2b111c603c | ||
|
|
82124c3145 | ||
|
|
17ef48e392 | ||
| 4085f42160 | |||
|
|
0fcbe83cc2 | ||
|
|
5a0a915cc6 | ||
| f01487ccb4 | |||
|
|
033cfcf5fc | ||
|
|
6d650cdf34 | ||
|
|
6f5b5b8655 | ||
|
|
653ef958ed | ||
|
|
48b17f83a3 | ||
|
|
9d08e74913 | ||
|
|
f42e6279e6 | ||
|
|
d025919f8d | ||
|
|
db6143f9da | ||
|
|
4821ddebba | ||
|
|
65001e0ed0 | ||
|
|
1881aca0e4 | ||
|
|
4842507ff3 | ||
|
|
708aae720c | ||
|
|
ebe97bd386 | ||
|
|
01295c84d8 | ||
|
|
eb0cc8c141 | ||
|
|
b06b3f52a8 | ||
| ecd76bda97 | |||
|
|
4bc48afbf8 | ||
|
|
038b663b8c | ||
|
|
329e4b0b16 | ||
|
|
7c949274c5 | ||
|
|
6b9988f43a | ||
|
|
0edbdea2eb | ||
|
|
b61c32729b | ||
|
|
9fbda7715c | ||
|
|
4d5a4842b9 | ||
|
|
321b8808cc | ||
|
|
c6da858c2f | ||
|
|
c6b2f7c331 | ||
|
|
0cea8af6bc | ||
|
|
1d6ae00b1c | ||
|
|
e8842e3bdc | ||
|
|
4d33592015 | ||
|
|
a0944a1c72 | ||
|
|
fa3bc592d1 | ||
|
|
950f16be7a | ||
|
|
a634bf9f9d | ||
|
|
2bcbff3ee6 | ||
|
|
fc412f7251 | ||
|
|
82117deaab | ||
|
|
247fdb01c0 | ||
|
|
b393d262cb | ||
|
|
ff3a046f5a | ||
|
|
88df324b4b | ||
|
|
c1cf8ae260 | ||
|
|
229463a2e8 | ||
|
|
15f20d22ad | ||
|
|
672544660f | ||
| 966db8545b | |||
|
|
c346babe33 | ||
| 8c2215ba58 | |||
|
|
c316e80d7f | ||
|
|
796be06a09 |
@@ -2,15 +2,17 @@ name: CI
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main]
|
branches: [main, 'feature/**', 'fix/**', 'feat/**']
|
||||||
tags-ignore:
|
tags-ignore:
|
||||||
- 'v*'
|
- 'v*'
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
|
delete:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name != 'delete'
|
||||||
container:
|
container:
|
||||||
image: maven:3.9-eclipse-temurin-17
|
image: maven:3.9-eclipse-temurin-17
|
||||||
steps:
|
steps:
|
||||||
@@ -60,7 +62,7 @@ jobs:
|
|||||||
docker:
|
docker:
|
||||||
needs: build
|
needs: build
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.ref == 'refs/heads/main'
|
if: github.event_name == 'push'
|
||||||
container:
|
container:
|
||||||
image: docker:27
|
image: docker:27
|
||||||
steps:
|
steps:
|
||||||
@@ -74,15 +76,36 @@ jobs:
|
|||||||
run: echo "$REGISTRY_TOKEN" | docker login gitea.siegeln.net -u cameleer --password-stdin
|
run: echo "$REGISTRY_TOKEN" | docker login gitea.siegeln.net -u cameleer --password-stdin
|
||||||
env:
|
env:
|
||||||
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
||||||
|
- name: Compute branch slug
|
||||||
|
run: |
|
||||||
|
sanitize_branch() {
|
||||||
|
echo "$1" | sed -E 's#^(feature|fix|feat|hotfix)/##' \
|
||||||
|
| tr '[:upper:]' '[:lower:]' \
|
||||||
|
| sed 's/[^a-z0-9-]/-/g' \
|
||||||
|
| sed 's/--*/-/g; s/^-//; s/-$//' \
|
||||||
|
| cut -c1-20 \
|
||||||
|
| sed 's/-$//'
|
||||||
|
}
|
||||||
|
if [ "$GITHUB_REF_NAME" = "main" ]; then
|
||||||
|
echo "BRANCH_SLUG=main" >> "$GITHUB_ENV"
|
||||||
|
echo "IMAGE_TAGS=latest" >> "$GITHUB_ENV"
|
||||||
|
else
|
||||||
|
SLUG=$(sanitize_branch "$GITHUB_REF_NAME")
|
||||||
|
echo "BRANCH_SLUG=$SLUG" >> "$GITHUB_ENV"
|
||||||
|
echo "IMAGE_TAGS=branch-$SLUG" >> "$GITHUB_ENV"
|
||||||
|
fi
|
||||||
- name: Set up QEMU for cross-platform builds
|
- name: Set up QEMU for cross-platform builds
|
||||||
run: docker run --rm --privileged tonistiigi/binfmt --install all
|
run: docker run --rm --privileged tonistiigi/binfmt --install all
|
||||||
- name: Build and push server
|
- name: Build and push server
|
||||||
run: |
|
run: |
|
||||||
docker buildx create --use --name cibuilder
|
docker buildx create --use --name cibuilder
|
||||||
|
TAGS="-t gitea.siegeln.net/cameleer/cameleer3-server:${{ github.sha }}"
|
||||||
|
for TAG in $IMAGE_TAGS; do
|
||||||
|
TAGS="$TAGS -t gitea.siegeln.net/cameleer/cameleer3-server:$TAG"
|
||||||
|
done
|
||||||
docker buildx build --platform linux/amd64 \
|
docker buildx build --platform linux/amd64 \
|
||||||
--build-arg REGISTRY_TOKEN="$REGISTRY_TOKEN" \
|
--build-arg REGISTRY_TOKEN="$REGISTRY_TOKEN" \
|
||||||
-t gitea.siegeln.net/cameleer/cameleer3-server:${{ github.sha }} \
|
$TAGS \
|
||||||
-t gitea.siegeln.net/cameleer/cameleer3-server:latest \
|
|
||||||
--cache-from type=registry,ref=gitea.siegeln.net/cameleer/cameleer3-server:buildcache \
|
--cache-from type=registry,ref=gitea.siegeln.net/cameleer/cameleer3-server:buildcache \
|
||||||
--cache-to type=registry,ref=gitea.siegeln.net/cameleer/cameleer3-server:buildcache,mode=max \
|
--cache-to type=registry,ref=gitea.siegeln.net/cameleer/cameleer3-server:buildcache,mode=max \
|
||||||
--provenance=false \
|
--provenance=false \
|
||||||
@@ -91,10 +114,14 @@ jobs:
|
|||||||
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
||||||
- name: Build and push UI
|
- name: Build and push UI
|
||||||
run: |
|
run: |
|
||||||
|
TAGS="-t gitea.siegeln.net/cameleer/cameleer3-server-ui:${{ github.sha }}"
|
||||||
|
for TAG in $IMAGE_TAGS; do
|
||||||
|
TAGS="$TAGS -t gitea.siegeln.net/cameleer/cameleer3-server-ui:$TAG"
|
||||||
|
done
|
||||||
docker buildx build --platform linux/amd64 \
|
docker buildx build --platform linux/amd64 \
|
||||||
-f ui/Dockerfile \
|
-f ui/Dockerfile \
|
||||||
-t gitea.siegeln.net/cameleer/cameleer3-server-ui:${{ github.sha }} \
|
--build-arg REGISTRY_TOKEN="$REGISTRY_TOKEN" \
|
||||||
-t gitea.siegeln.net/cameleer/cameleer3-server-ui:latest \
|
$TAGS \
|
||||||
--cache-from type=registry,ref=gitea.siegeln.net/cameleer/cameleer3-server-ui:buildcache \
|
--cache-from type=registry,ref=gitea.siegeln.net/cameleer/cameleer3-server-ui:buildcache \
|
||||||
--cache-to type=registry,ref=gitea.siegeln.net/cameleer/cameleer3-server-ui:buildcache,mode=max \
|
--cache-to type=registry,ref=gitea.siegeln.net/cameleer/cameleer3-server-ui:buildcache,mode=max \
|
||||||
--provenance=false \
|
--provenance=false \
|
||||||
@@ -110,13 +137,28 @@ jobs:
|
|||||||
API="https://gitea.siegeln.net/api/v1"
|
API="https://gitea.siegeln.net/api/v1"
|
||||||
AUTH="Authorization: token ${REGISTRY_TOKEN}"
|
AUTH="Authorization: token ${REGISTRY_TOKEN}"
|
||||||
CURRENT_SHA="${{ github.sha }}"
|
CURRENT_SHA="${{ github.sha }}"
|
||||||
|
# Build list of tags to keep
|
||||||
|
KEEP_TAGS="latest buildcache $CURRENT_SHA"
|
||||||
|
if [ "$BRANCH_SLUG" != "main" ]; then
|
||||||
|
KEEP_TAGS="$KEEP_TAGS branch-$BRANCH_SLUG"
|
||||||
|
fi
|
||||||
for PKG in cameleer3-server cameleer3-server-ui; do
|
for PKG in cameleer3-server cameleer3-server-ui; do
|
||||||
curl -sf -H "$AUTH" "$API/packages/cameleer/container/$PKG" | \
|
curl -sf -H "$AUTH" "$API/packages/cameleer/container/$PKG" | \
|
||||||
jq -r '.[] | "\(.id) \(.version)"' | \
|
jq -r '.[] | "\(.id) \(.version)"' | \
|
||||||
while read id version; do
|
while read id version; do
|
||||||
if [ "$version" != "latest" ] && [ "$version" != "$CURRENT_SHA" ]; then
|
SHOULD_KEEP=false
|
||||||
echo "Deleting old image tag: $PKG:$version"
|
for KEEP in $KEEP_TAGS; do
|
||||||
curl -sf -X DELETE -H "$AUTH" "$API/packages/cameleer/container/$PKG/$version"
|
if [ "$version" = "$KEEP" ]; then
|
||||||
|
SHOULD_KEEP=true
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
if [ "$SHOULD_KEEP" = "false" ]; then
|
||||||
|
# Only clean up images for this branch
|
||||||
|
if [ "$BRANCH_SLUG" = "main" ] || echo "$version" | grep -q "branch-$BRANCH_SLUG"; then
|
||||||
|
echo "Deleting old image tag: $PKG:$version"
|
||||||
|
curl -sf -X DELETE -H "$AUTH" "$API/packages/cameleer/container/$PKG/$version"
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
done
|
done
|
||||||
@@ -129,7 +171,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.ref == 'refs/heads/main'
|
if: github.ref == 'refs/heads/main'
|
||||||
container:
|
container:
|
||||||
image: bitnami/kubectl:latest
|
image: alpine/k8s:1.32.3
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
run: |
|
run: |
|
||||||
@@ -181,14 +223,6 @@ jobs:
|
|||||||
--from-literal=AUTHENTIK_SECRET_KEY="${AUTHENTIK_SECRET_KEY}" \
|
--from-literal=AUTHENTIK_SECRET_KEY="${AUTHENTIK_SECRET_KEY}" \
|
||||||
--dry-run=client -o yaml | kubectl apply -f -
|
--dry-run=client -o yaml | kubectl apply -f -
|
||||||
|
|
||||||
kubectl create secret generic cameleer-oidc \
|
|
||||||
--namespace=cameleer \
|
|
||||||
--from-literal=CAMELEER_OIDC_ENABLED="${CAMELEER_OIDC_ENABLED:-false}" \
|
|
||||||
--from-literal=CAMELEER_OIDC_ISSUER="${CAMELEER_OIDC_ISSUER}" \
|
|
||||||
--from-literal=CAMELEER_OIDC_CLIENT_ID="${CAMELEER_OIDC_CLIENT_ID}" \
|
|
||||||
--from-literal=CAMELEER_OIDC_CLIENT_SECRET="${CAMELEER_OIDC_CLIENT_SECRET}" \
|
|
||||||
--dry-run=client -o yaml | kubectl apply -f -
|
|
||||||
|
|
||||||
kubectl apply -f deploy/postgres.yaml
|
kubectl apply -f deploy/postgres.yaml
|
||||||
kubectl -n cameleer rollout status statefulset/postgres --timeout=120s
|
kubectl -n cameleer rollout status statefulset/postgres --timeout=120s
|
||||||
|
|
||||||
@@ -198,12 +232,11 @@ jobs:
|
|||||||
kubectl apply -f deploy/authentik.yaml
|
kubectl apply -f deploy/authentik.yaml
|
||||||
kubectl -n cameleer rollout status deployment/authentik-server --timeout=180s
|
kubectl -n cameleer rollout status deployment/authentik-server --timeout=180s
|
||||||
|
|
||||||
kubectl apply -f deploy/server.yaml
|
kubectl apply -k deploy/overlays/main
|
||||||
kubectl -n cameleer set image deployment/cameleer3-server \
|
kubectl -n cameleer set image deployment/cameleer3-server \
|
||||||
server=gitea.siegeln.net/cameleer/cameleer3-server:${{ github.sha }}
|
server=gitea.siegeln.net/cameleer/cameleer3-server:${{ github.sha }}
|
||||||
kubectl -n cameleer rollout status deployment/cameleer3-server --timeout=120s
|
kubectl -n cameleer rollout status deployment/cameleer3-server --timeout=120s
|
||||||
|
|
||||||
kubectl apply -f deploy/ui.yaml
|
|
||||||
kubectl -n cameleer set image deployment/cameleer3-ui \
|
kubectl -n cameleer set image deployment/cameleer3-ui \
|
||||||
ui=gitea.siegeln.net/cameleer/cameleer3-server-ui:${{ github.sha }}
|
ui=gitea.siegeln.net/cameleer/cameleer3-server-ui:${{ github.sha }}
|
||||||
kubectl -n cameleer rollout status deployment/cameleer3-ui --timeout=120s
|
kubectl -n cameleer rollout status deployment/cameleer3-ui --timeout=120s
|
||||||
@@ -221,7 +254,141 @@ jobs:
|
|||||||
AUTHENTIK_PG_USER: ${{ secrets.AUTHENTIK_PG_USER }}
|
AUTHENTIK_PG_USER: ${{ secrets.AUTHENTIK_PG_USER }}
|
||||||
AUTHENTIK_PG_PASSWORD: ${{ secrets.AUTHENTIK_PG_PASSWORD }}
|
AUTHENTIK_PG_PASSWORD: ${{ secrets.AUTHENTIK_PG_PASSWORD }}
|
||||||
AUTHENTIK_SECRET_KEY: ${{ secrets.AUTHENTIK_SECRET_KEY }}
|
AUTHENTIK_SECRET_KEY: ${{ secrets.AUTHENTIK_SECRET_KEY }}
|
||||||
CAMELEER_OIDC_ENABLED: ${{ secrets.CAMELEER_OIDC_ENABLED }}
|
|
||||||
CAMELEER_OIDC_ISSUER: ${{ secrets.CAMELEER_OIDC_ISSUER }}
|
deploy-feature:
|
||||||
CAMELEER_OIDC_CLIENT_ID: ${{ secrets.CAMELEER_OIDC_CLIENT_ID }}
|
needs: docker
|
||||||
CAMELEER_OIDC_CLIENT_SECRET: ${{ secrets.CAMELEER_OIDC_CLIENT_SECRET }}
|
runs-on: ubuntu-latest
|
||||||
|
if: github.ref != 'refs/heads/main' && github.event_name == 'push'
|
||||||
|
container:
|
||||||
|
image: alpine/k8s:1.32.3
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
run: |
|
||||||
|
git clone --depth=1 --branch=${GITHUB_REF_NAME} https://cameleer:${REGISTRY_TOKEN}@gitea.siegeln.net/${GITHUB_REPOSITORY}.git .
|
||||||
|
env:
|
||||||
|
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
||||||
|
- name: Configure kubectl
|
||||||
|
run: |
|
||||||
|
mkdir -p ~/.kube
|
||||||
|
echo "$KUBECONFIG_B64" | base64 -d > ~/.kube/config
|
||||||
|
env:
|
||||||
|
KUBECONFIG_B64: ${{ secrets.KUBECONFIG_BASE64 }}
|
||||||
|
- name: Compute branch variables
|
||||||
|
run: |
|
||||||
|
sanitize_branch() {
|
||||||
|
echo "$1" | sed -E 's#^(feature|fix|feat|hotfix)/##' \
|
||||||
|
| tr '[:upper:]' '[:lower:]' \
|
||||||
|
| sed 's/[^a-z0-9-]/-/g' \
|
||||||
|
| sed 's/--*/-/g; s/^-//; s/-$//' \
|
||||||
|
| cut -c1-20 \
|
||||||
|
| sed 's/-$//'
|
||||||
|
}
|
||||||
|
SLUG=$(sanitize_branch "$GITHUB_REF_NAME")
|
||||||
|
NS="cam-${SLUG}"
|
||||||
|
SCHEMA="cam_$(echo $SLUG | tr '-' '_')"
|
||||||
|
echo "BRANCH_SLUG=$SLUG" >> "$GITHUB_ENV"
|
||||||
|
echo "BRANCH_NS=$NS" >> "$GITHUB_ENV"
|
||||||
|
echo "BRANCH_SCHEMA=$SCHEMA" >> "$GITHUB_ENV"
|
||||||
|
- name: Create namespace
|
||||||
|
run: kubectl create namespace "$BRANCH_NS" --dry-run=client -o yaml | kubectl apply -f -
|
||||||
|
- name: Copy secrets from cameleer namespace
|
||||||
|
run: |
|
||||||
|
for SECRET in gitea-registry postgres-credentials opensearch-credentials cameleer-auth; do
|
||||||
|
kubectl get secret "$SECRET" -n cameleer -o json \
|
||||||
|
| jq 'del(.metadata.namespace, .metadata.resourceVersion, .metadata.uid, .metadata.creationTimestamp, .metadata.managedFields)' \
|
||||||
|
| kubectl apply -n "$BRANCH_NS" -f -
|
||||||
|
done
|
||||||
|
- name: Substitute placeholders and deploy
|
||||||
|
run: |
|
||||||
|
# Work on a copy preserving the directory structure so ../../base resolves
|
||||||
|
mkdir -p /tmp/feature-deploy/deploy/overlays
|
||||||
|
cp -r deploy/base /tmp/feature-deploy/deploy/base
|
||||||
|
cp -r deploy/overlays/feature /tmp/feature-deploy/deploy/overlays/feature
|
||||||
|
# Substitute all BRANCH_* placeholders
|
||||||
|
for f in /tmp/feature-deploy/deploy/overlays/feature/*.yaml; do
|
||||||
|
sed -i \
|
||||||
|
-e "s|BRANCH_NAMESPACE|${BRANCH_NS}|g" \
|
||||||
|
-e "s|BRANCH_SCHEMA|${BRANCH_SCHEMA}|g" \
|
||||||
|
-e "s|BRANCH_SLUG|${BRANCH_SLUG}|g" \
|
||||||
|
-e "s|BRANCH_SHA|${{ github.sha }}|g" \
|
||||||
|
"$f"
|
||||||
|
done
|
||||||
|
kubectl apply -k /tmp/feature-deploy/deploy/overlays/feature
|
||||||
|
- name: Wait for init-job
|
||||||
|
run: |
|
||||||
|
kubectl -n "$BRANCH_NS" wait --for=condition=complete job/init-schema --timeout=60s || \
|
||||||
|
echo "Warning: init-schema job did not complete in time"
|
||||||
|
- name: Wait for server rollout
|
||||||
|
run: kubectl -n "$BRANCH_NS" rollout status deployment/cameleer3-server --timeout=120s
|
||||||
|
- name: Wait for UI rollout
|
||||||
|
run: kubectl -n "$BRANCH_NS" rollout status deployment/cameleer3-ui --timeout=60s
|
||||||
|
- name: Print deployment URLs
|
||||||
|
run: |
|
||||||
|
echo "===================================="
|
||||||
|
echo "Feature branch deployed!"
|
||||||
|
echo "API: http://${BRANCH_SLUG}-api.cameleer.siegeln.net"
|
||||||
|
echo "UI: http://${BRANCH_SLUG}.cameleer.siegeln.net"
|
||||||
|
echo "===================================="
|
||||||
|
env:
|
||||||
|
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
||||||
|
|
||||||
|
cleanup-branch:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'delete' && github.event.ref_type == 'branch'
|
||||||
|
container:
|
||||||
|
image: alpine/k8s:1.32.3
|
||||||
|
steps:
|
||||||
|
- name: Configure kubectl
|
||||||
|
run: |
|
||||||
|
mkdir -p ~/.kube
|
||||||
|
echo "$KUBECONFIG_B64" | base64 -d > ~/.kube/config
|
||||||
|
env:
|
||||||
|
KUBECONFIG_B64: ${{ secrets.KUBECONFIG_BASE64 }}
|
||||||
|
- name: Compute branch variables
|
||||||
|
run: |
|
||||||
|
sanitize_branch() {
|
||||||
|
echo "$1" | sed -E 's#^(feature|fix|feat|hotfix)/##' \
|
||||||
|
| tr '[:upper:]' '[:lower:]' \
|
||||||
|
| sed 's/[^a-z0-9-]/-/g' \
|
||||||
|
| sed 's/--*/-/g; s/^-//; s/-$//' \
|
||||||
|
| cut -c1-20 \
|
||||||
|
| sed 's/-$//'
|
||||||
|
}
|
||||||
|
SLUG=$(sanitize_branch "${{ github.event.ref }}")
|
||||||
|
NS="cam-${SLUG}"
|
||||||
|
SCHEMA="cam_$(echo $SLUG | tr '-' '_')"
|
||||||
|
echo "BRANCH_SLUG=$SLUG" >> "$GITHUB_ENV"
|
||||||
|
echo "BRANCH_NS=$NS" >> "$GITHUB_ENV"
|
||||||
|
echo "BRANCH_SCHEMA=$SCHEMA" >> "$GITHUB_ENV"
|
||||||
|
- name: Delete namespace
|
||||||
|
run: kubectl delete namespace "$BRANCH_NS" --ignore-not-found
|
||||||
|
- name: Drop PostgreSQL schema
|
||||||
|
run: |
|
||||||
|
kubectl run cleanup-schema-${BRANCH_SLUG} \
|
||||||
|
--namespace=cameleer \
|
||||||
|
--image=postgres:16 \
|
||||||
|
--restart=Never \
|
||||||
|
--env="PGPASSWORD=$(kubectl get secret postgres-credentials -n cameleer -o jsonpath='{.data.POSTGRES_PASSWORD}' | base64 -d)" \
|
||||||
|
--command -- sh -c "psql -h postgres -U $(kubectl get secret postgres-credentials -n cameleer -o jsonpath='{.data.POSTGRES_USER}' | base64 -d) -d cameleer3 -c 'DROP SCHEMA IF EXISTS ${BRANCH_SCHEMA} CASCADE'"
|
||||||
|
kubectl wait --for=condition=Ready pod/cleanup-schema-${BRANCH_SLUG} -n cameleer --timeout=30s || true
|
||||||
|
kubectl wait --for=jsonpath='{.status.phase}'=Succeeded pod/cleanup-schema-${BRANCH_SLUG} -n cameleer --timeout=60s || true
|
||||||
|
kubectl delete pod cleanup-schema-${BRANCH_SLUG} -n cameleer --ignore-not-found
|
||||||
|
- name: Delete OpenSearch indices
|
||||||
|
run: |
|
||||||
|
kubectl run cleanup-indices-${BRANCH_SLUG} \
|
||||||
|
--namespace=cameleer \
|
||||||
|
--image=curlimages/curl:latest \
|
||||||
|
--restart=Never \
|
||||||
|
--command -- curl -sf -X DELETE "http://opensearch:9200/cam-${BRANCH_SLUG}-*"
|
||||||
|
kubectl wait --for=jsonpath='{.status.phase}'=Succeeded pod/cleanup-indices-${BRANCH_SLUG} -n cameleer --timeout=60s || true
|
||||||
|
kubectl delete pod cleanup-indices-${BRANCH_SLUG} -n cameleer --ignore-not-found
|
||||||
|
- name: Cleanup Docker images
|
||||||
|
run: |
|
||||||
|
API="https://gitea.siegeln.net/api/v1"
|
||||||
|
AUTH="Authorization: token ${REGISTRY_TOKEN}"
|
||||||
|
for PKG in cameleer3-server cameleer3-server-ui; do
|
||||||
|
# Delete branch-specific tag
|
||||||
|
curl -sf -X DELETE -H "$AUTH" "$API/packages/cameleer/container/$PKG/branch-${BRANCH_SLUG}" || true
|
||||||
|
done
|
||||||
|
env:
|
||||||
|
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
|
||||||
|
|||||||
21
CLAUDE.md
21
CLAUDE.md
@@ -38,20 +38,25 @@ java -jar cameleer3-server-app/target/cameleer3-server-app-1.0-SNAPSHOT.jar
|
|||||||
- Jackson `JavaTimeModule` for `Instant` deserialization
|
- Jackson `JavaTimeModule` for `Instant` deserialization
|
||||||
- Communication: receives HTTP POST data from agents, serves SSE event streams for config push/commands
|
- Communication: receives HTTP POST data from agents, serves SSE event streams for config push/commands
|
||||||
- Maintains agent instance registry with states: LIVE → STALE → DEAD
|
- Maintains agent instance registry with states: LIVE → STALE → DEAD
|
||||||
- Storage: ClickHouse for structured data, text index for full-text search
|
- Storage: PostgreSQL (TimescaleDB) for structured data, OpenSearch for full-text search
|
||||||
- Security: JWT auth with RBAC (AGENT/VIEWER/OPERATOR/ADMIN roles), Ed25519 config signing, bootstrap token for registration
|
- Security: JWT auth with RBAC (AGENT/VIEWER/OPERATOR/ADMIN roles), Ed25519 config signing, bootstrap token for registration
|
||||||
- OIDC: Optional external identity provider support (token exchange pattern). Configured via `CAMELEER_OIDC_*` env vars
|
- OIDC: Optional external identity provider support (token exchange pattern). Configured via admin API, stored in database (`server_config` table)
|
||||||
- User persistence: ClickHouse `users` table, admin CRUD at `/api/v1/admin/users`
|
- User persistence: PostgreSQL `users` table, admin CRUD at `/api/v1/admin/users`
|
||||||
|
|
||||||
## CI/CD & Deployment
|
## CI/CD & Deployment
|
||||||
|
|
||||||
- CI workflow: `.gitea/workflows/ci.yml` — build → docker → deploy on push to main
|
- CI workflow: `.gitea/workflows/ci.yml` — build → docker → deploy on push to main or feature branches
|
||||||
- Build step skips integration tests (`-DskipITs`) — Testcontainers needs Docker daemon
|
- Build step skips integration tests (`-DskipITs`) — Testcontainers needs Docker daemon
|
||||||
- Docker: multi-stage build (`Dockerfile`), `$BUILDPLATFORM` for native Maven on ARM64 runner, amd64 runtime
|
- Docker: multi-stage build (`Dockerfile`), `$BUILDPLATFORM` for native Maven on ARM64 runner, amd64 runtime
|
||||||
- `REGISTRY_TOKEN` build arg required for `cameleer3-common` dependency resolution
|
- `REGISTRY_TOKEN` build arg required for `cameleer3-common` dependency resolution
|
||||||
- Registry: `gitea.siegeln.net/cameleer/cameleer3-server` (container images)
|
- Registry: `gitea.siegeln.net/cameleer/cameleer3-server` (container images)
|
||||||
- K8s manifests in `deploy/` — ClickHouse StatefulSet + server Deployment + NodePort Service (30081)
|
- K8s manifests in `deploy/` — Kustomize base + overlays (main/feature), shared infra (PostgreSQL, OpenSearch, Authentik) as top-level manifests
|
||||||
- Deployment target: k3s at 192.168.50.86, namespace `cameleer`
|
- Deployment target: k3s at 192.168.50.86, namespace `cameleer` (main), `cam-<slug>` (feature branches)
|
||||||
- Secrets managed in CI deploy step (idempotent `--dry-run=client | kubectl apply`): `cameleer-auth`, `clickhouse-credentials`, `CAMELEER_JWT_SECRET`
|
- Feature branches: isolated namespace, PG schema, OpenSearch index prefix; Traefik Ingress at `<slug>-api.cameleer.siegeln.net`
|
||||||
- K8s probes: server uses `/api/v1/health`, ClickHouse uses `/ping`
|
- Secrets managed in CI deploy step (idempotent `--dry-run=client | kubectl apply`): `cameleer-auth`, `postgres-credentials`, `opensearch-credentials`
|
||||||
|
- K8s probes: server uses `/api/v1/health`, PostgreSQL uses `pg_isready`, OpenSearch uses `/_cluster/health`
|
||||||
- Docker build uses buildx registry cache + `--provenance=false` for Gitea compatibility
|
- Docker build uses buildx registry cache + `--provenance=false` for Gitea compatibility
|
||||||
|
|
||||||
|
## Disabled Skills
|
||||||
|
|
||||||
|
- Do NOT use any `gsd:*` skills in this project. This includes all `/gsd:` prefixed commands.
|
||||||
|
|||||||
40
HOWTO.md
40
HOWTO.md
@@ -21,20 +21,20 @@ mvn clean verify # compile + run all tests (needs Docker for integrati
|
|||||||
|
|
||||||
## Infrastructure Setup
|
## Infrastructure Setup
|
||||||
|
|
||||||
Start ClickHouse:
|
Start PostgreSQL and OpenSearch:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker compose up -d
|
docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
This starts ClickHouse 25.3 and automatically runs the schema init scripts (`clickhouse/init/01-schema.sql`, `clickhouse/init/02-search-columns.sql`, `clickhouse/init/03-users.sql`).
|
This starts TimescaleDB (PostgreSQL 16) and OpenSearch 2.19. The database schema is applied automatically via Flyway migrations on server startup.
|
||||||
|
|
||||||
| Service | Port | Purpose |
|
| Service | Port | Purpose |
|
||||||
|------------|------|------------------|
|
|------------|------|----------------------|
|
||||||
| ClickHouse | 8123 | HTTP API (JDBC) |
|
| PostgreSQL | 5432 | JDBC (Spring JDBC) |
|
||||||
| ClickHouse | 9000 | Native protocol |
|
| OpenSearch | 9200 | REST API (full-text) |
|
||||||
|
|
||||||
ClickHouse credentials: `cameleer` / `cameleer_dev`, database `cameleer3`.
|
PostgreSQL credentials: `cameleer` / `cameleer_dev`, database `cameleer3`.
|
||||||
|
|
||||||
## Run the Server
|
## Run the Server
|
||||||
|
|
||||||
@@ -109,7 +109,7 @@ The env-var local user gets `ADMIN` role. Agents get `AGENT` role at registratio
|
|||||||
|
|
||||||
### OIDC Login (Optional)
|
### OIDC Login (Optional)
|
||||||
|
|
||||||
OIDC configuration is stored in ClickHouse and managed via the admin API or UI. The SPA checks if OIDC is available:
|
OIDC configuration is stored in PostgreSQL and managed via the admin API or UI. The SPA checks if OIDC is available:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 1. SPA checks if OIDC is available (returns 404 if not configured)
|
# 1. SPA checks if OIDC is available (returns 404 if not configured)
|
||||||
@@ -340,9 +340,8 @@ Key settings in `cameleer3-server-app/src/main/resources/application.yml`:
|
|||||||
|---------|---------|-------------|
|
|---------|---------|-------------|
|
||||||
| `server.port` | 8081 | Server port |
|
| `server.port` | 8081 | Server port |
|
||||||
| `ingestion.buffer-capacity` | 50000 | Max items in write buffer |
|
| `ingestion.buffer-capacity` | 50000 | Max items in write buffer |
|
||||||
| `ingestion.batch-size` | 5000 | Items per ClickHouse batch insert |
|
| `ingestion.batch-size` | 5000 | Items per batch insert |
|
||||||
| `ingestion.flush-interval-ms` | 1000 | Buffer flush interval (ms) |
|
| `ingestion.flush-interval-ms` | 1000 | Buffer flush interval (ms) |
|
||||||
| `ingestion.data-ttl-days` | 30 | ClickHouse TTL for auto-deletion |
|
|
||||||
| `agent-registry.heartbeat-interval-seconds` | 30 | Expected heartbeat interval |
|
| `agent-registry.heartbeat-interval-seconds` | 30 | Expected heartbeat interval |
|
||||||
| `agent-registry.stale-threshold-seconds` | 90 | Time before agent marked STALE |
|
| `agent-registry.stale-threshold-seconds` | 90 | Time before agent marked STALE |
|
||||||
| `agent-registry.dead-threshold-seconds` | 300 | Time after STALE before DEAD |
|
| `agent-registry.dead-threshold-seconds` | 300 | Time after STALE before DEAD |
|
||||||
@@ -386,7 +385,7 @@ npm run generate-api # Requires backend running on :8081
|
|||||||
|
|
||||||
## Running Tests
|
## Running Tests
|
||||||
|
|
||||||
Integration tests use Testcontainers (starts ClickHouse automatically — requires Docker):
|
Integration tests use Testcontainers (starts PostgreSQL and OpenSearch automatically — requires Docker):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# All tests
|
# All tests
|
||||||
@@ -399,14 +398,13 @@ mvn test -pl cameleer3-server-core
|
|||||||
mvn test -pl cameleer3-server-app -Dtest=ExecutionControllerIT
|
mvn test -pl cameleer3-server-app -Dtest=ExecutionControllerIT
|
||||||
```
|
```
|
||||||
|
|
||||||
## Verify ClickHouse Data
|
## Verify Database Data
|
||||||
|
|
||||||
After posting data and waiting for the flush interval (1s default):
|
After posting data and waiting for the flush interval (1s default):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker exec -it cameleer3-server-clickhouse-1 clickhouse-client \
|
docker exec -it cameleer3-server-postgres-1 psql -U cameleer -d cameleer3 \
|
||||||
--user cameleer --password cameleer_dev -d cameleer3 \
|
-c "SELECT count(*) FROM route_executions"
|
||||||
-q "SELECT count() FROM route_executions"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Kubernetes Deployment
|
## Kubernetes Deployment
|
||||||
@@ -417,7 +415,8 @@ The full stack is deployed to k3s via CI/CD on push to `main`. K8s manifests are
|
|||||||
|
|
||||||
```
|
```
|
||||||
cameleer namespace:
|
cameleer namespace:
|
||||||
ClickHouse (StatefulSet, 2Gi PVC) ← clickhouse:8123 (ClusterIP)
|
PostgreSQL (StatefulSet, 10Gi PVC) ← postgres:5432 (ClusterIP)
|
||||||
|
OpenSearch (StatefulSet, 10Gi PVC) ← opensearch:9200 (ClusterIP)
|
||||||
cameleer3-server (Deployment) ← NodePort 30081
|
cameleer3-server (Deployment) ← NodePort 30081
|
||||||
cameleer3-ui (Deployment, Nginx) ← NodePort 30090
|
cameleer3-ui (Deployment, Nginx) ← NodePort 30090
|
||||||
Authentik Server (Deployment) ← NodePort 30950
|
Authentik Server (Deployment) ← NodePort 30950
|
||||||
@@ -439,7 +438,7 @@ cameleer namespace:
|
|||||||
|
|
||||||
Push to `main` triggers: **build** (UI npm + Maven, unit tests) → **docker** (buildx amd64 for server + UI, push to Gitea registry) → **deploy** (kubectl apply + rolling update).
|
Push to `main` triggers: **build** (UI npm + Maven, unit tests) → **docker** (buildx amd64 for server + UI, push to Gitea registry) → **deploy** (kubectl apply + rolling update).
|
||||||
|
|
||||||
Required Gitea org secrets: `REGISTRY_TOKEN`, `KUBECONFIG_BASE64`, `CAMELEER_AUTH_TOKEN`, `CAMELEER_JWT_SECRET`, `CLICKHOUSE_USER`, `CLICKHOUSE_PASSWORD`, `CAMELEER_UI_USER` (optional), `CAMELEER_UI_PASSWORD` (optional), `AUTHENTIK_PG_PASSWORD`, `AUTHENTIK_SECRET_KEY`, `CAMELEER_OIDC_ENABLED`, `CAMELEER_OIDC_ISSUER`, `CAMELEER_OIDC_CLIENT_ID`, `CAMELEER_OIDC_CLIENT_SECRET`.
|
Required Gitea org secrets: `REGISTRY_TOKEN`, `KUBECONFIG_BASE64`, `CAMELEER_AUTH_TOKEN`, `CAMELEER_JWT_SECRET`, `POSTGRES_USER`, `POSTGRES_PASSWORD`, `POSTGRES_DB`, `OPENSEARCH_USER`, `OPENSEARCH_PASSWORD`, `CAMELEER_UI_USER` (optional), `CAMELEER_UI_PASSWORD` (optional), `AUTHENTIK_PG_USER`, `AUTHENTIK_PG_PASSWORD`, `AUTHENTIK_SECRET_KEY`, `CAMELEER_OIDC_ENABLED`, `CAMELEER_OIDC_ISSUER`, `CAMELEER_OIDC_CLIENT_ID`, `CAMELEER_OIDC_CLIENT_SECRET`.
|
||||||
|
|
||||||
### Manual K8s Commands
|
### Manual K8s Commands
|
||||||
|
|
||||||
@@ -450,8 +449,11 @@ kubectl -n cameleer get pods
|
|||||||
# View server logs
|
# View server logs
|
||||||
kubectl -n cameleer logs -f deploy/cameleer3-server
|
kubectl -n cameleer logs -f deploy/cameleer3-server
|
||||||
|
|
||||||
# View ClickHouse logs
|
# View PostgreSQL logs
|
||||||
kubectl -n cameleer logs -f statefulset/clickhouse
|
kubectl -n cameleer logs -f statefulset/postgres
|
||||||
|
|
||||||
|
# View OpenSearch logs
|
||||||
|
kubectl -n cameleer logs -f statefulset/opensearch
|
||||||
|
|
||||||
# Restart server
|
# Restart server
|
||||||
kubectl -n cameleer rollout restart deployment/cameleer3-server
|
kubectl -n cameleer rollout restart deployment/cameleer3-server
|
||||||
|
|||||||
@@ -174,7 +174,7 @@
|
|||||||
<artifactId>maven-failsafe-plugin</artifactId>
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<forkCount>1</forkCount>
|
<forkCount>1</forkCount>
|
||||||
<reuseForks>false</reuseForks>
|
<reuseForks>true</reuseForks>
|
||||||
</configuration>
|
</configuration>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
|
|||||||
@@ -1,17 +1,23 @@
|
|||||||
package com.cameleer3.server.app.agent;
|
package com.cameleer3.server.app.agent;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.agent.AgentEventService;
|
||||||
|
import com.cameleer3.server.core.agent.AgentInfo;
|
||||||
import com.cameleer3.server.core.agent.AgentRegistryService;
|
import com.cameleer3.server.core.agent.AgentRegistryService;
|
||||||
|
import com.cameleer3.server.core.agent.AgentState;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.scheduling.annotation.Scheduled;
|
import org.springframework.scheduling.annotation.Scheduled;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Periodic task that checks agent lifecycle and expires old commands.
|
* Periodic task that checks agent lifecycle and expires old commands.
|
||||||
* <p>
|
* <p>
|
||||||
* Runs on a configurable fixed delay (default 10 seconds). Transitions
|
* Runs on a configurable fixed delay (default 10 seconds). Transitions
|
||||||
* agents LIVE -> STALE -> DEAD based on heartbeat timing, and removes
|
* agents LIVE -> STALE -> DEAD based on heartbeat timing, and removes
|
||||||
* expired pending commands.
|
* expired pending commands. Records lifecycle events for state transitions.
|
||||||
*/
|
*/
|
||||||
@Component
|
@Component
|
||||||
public class AgentLifecycleMonitor {
|
public class AgentLifecycleMonitor {
|
||||||
@@ -19,18 +25,46 @@ public class AgentLifecycleMonitor {
|
|||||||
private static final Logger log = LoggerFactory.getLogger(AgentLifecycleMonitor.class);
|
private static final Logger log = LoggerFactory.getLogger(AgentLifecycleMonitor.class);
|
||||||
|
|
||||||
private final AgentRegistryService registryService;
|
private final AgentRegistryService registryService;
|
||||||
|
private final AgentEventService agentEventService;
|
||||||
|
|
||||||
public AgentLifecycleMonitor(AgentRegistryService registryService) {
|
public AgentLifecycleMonitor(AgentRegistryService registryService,
|
||||||
|
AgentEventService agentEventService) {
|
||||||
this.registryService = registryService;
|
this.registryService = registryService;
|
||||||
|
this.agentEventService = agentEventService;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Scheduled(fixedDelayString = "${agent-registry.lifecycle-check-interval-ms:10000}")
|
@Scheduled(fixedDelayString = "${agent-registry.lifecycle-check-interval-ms:10000}")
|
||||||
public void checkLifecycle() {
|
public void checkLifecycle() {
|
||||||
try {
|
try {
|
||||||
|
// Snapshot states before lifecycle check
|
||||||
|
Map<String, AgentState> statesBefore = new HashMap<>();
|
||||||
|
for (AgentInfo agent : registryService.findAll()) {
|
||||||
|
statesBefore.put(agent.id(), agent.state());
|
||||||
|
}
|
||||||
|
|
||||||
registryService.checkLifecycle();
|
registryService.checkLifecycle();
|
||||||
registryService.expireOldCommands();
|
registryService.expireOldCommands();
|
||||||
|
|
||||||
|
// Detect transitions and record events
|
||||||
|
for (AgentInfo agent : registryService.findAll()) {
|
||||||
|
AgentState before = statesBefore.get(agent.id());
|
||||||
|
if (before != null && before != agent.state()) {
|
||||||
|
String eventType = mapTransitionEvent(before, agent.state());
|
||||||
|
if (eventType != null) {
|
||||||
|
agentEventService.recordEvent(agent.id(), agent.application(), eventType,
|
||||||
|
agent.name() + " " + before + " -> " + agent.state());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error("Error during agent lifecycle check", e);
|
log.error("Error during agent lifecycle check", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String mapTransitionEvent(AgentState from, AgentState to) {
|
||||||
|
if (from == AgentState.LIVE && to == AgentState.STALE) return "WENT_STALE";
|
||||||
|
if (from == AgentState.STALE && to == AgentState.DEAD) return "WENT_DEAD";
|
||||||
|
if (from == AgentState.STALE && to == AgentState.LIVE) return "RECOVERED";
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
package com.cameleer3.server.app.config;
|
package com.cameleer3.server.app.config;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.agent.AgentEventRepository;
|
||||||
|
import com.cameleer3.server.core.agent.AgentEventService;
|
||||||
import com.cameleer3.server.core.agent.AgentRegistryService;
|
import com.cameleer3.server.core.agent.AgentRegistryService;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates the {@link AgentRegistryService} bean.
|
* Creates the {@link AgentRegistryService} and {@link AgentEventService} beans.
|
||||||
* <p>
|
* <p>
|
||||||
* Follows the established pattern: core module plain class, app module bean config.
|
* Follows the established pattern: core module plain class, app module bean config.
|
||||||
*/
|
*/
|
||||||
@@ -20,4 +22,9 @@ public class AgentRegistryBeanConfig {
|
|||||||
config.getCommandExpiryMs()
|
config.getCommandExpiryMs()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public AgentEventService agentEventService(AgentEventRepository repository) {
|
||||||
|
return new AgentEventService(repository);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,7 +31,10 @@ public class OpenApiConfig {
|
|||||||
"ExecutionSummary", "ExecutionDetail", "ExecutionStats",
|
"ExecutionSummary", "ExecutionDetail", "ExecutionStats",
|
||||||
"StatsTimeseries", "TimeseriesBucket",
|
"StatsTimeseries", "TimeseriesBucket",
|
||||||
"SearchResultExecutionSummary", "UserInfo",
|
"SearchResultExecutionSummary", "UserInfo",
|
||||||
"ProcessorNode"
|
"ProcessorNode",
|
||||||
|
"AppCatalogEntry", "RouteSummary", "AgentSummary",
|
||||||
|
"RouteMetrics", "AgentEventResponse", "AgentInstanceResponse",
|
||||||
|
"ProcessorMetrics", "AgentMetricsResponse", "MetricBucket"
|
||||||
);
|
);
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
package com.cameleer3.server.app.config;
|
package com.cameleer3.server.app.config;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.admin.AuditRepository;
|
||||||
|
import com.cameleer3.server.core.admin.AuditService;
|
||||||
import com.cameleer3.server.core.detail.DetailService;
|
import com.cameleer3.server.core.detail.DetailService;
|
||||||
import com.cameleer3.server.core.indexing.SearchIndexer;
|
import com.cameleer3.server.core.indexing.SearchIndexer;
|
||||||
import com.cameleer3.server.core.ingestion.IngestionService;
|
import com.cameleer3.server.core.ingestion.IngestionService;
|
||||||
@@ -25,6 +27,11 @@ public class StorageBeanConfig {
|
|||||||
return new SearchIndexer(executionStore, searchIndex, debounceMs, queueSize);
|
return new SearchIndexer(executionStore, searchIndex, debounceMs, queueSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public AuditService auditService(AuditRepository auditRepository) {
|
||||||
|
return new AuditService(auditRepository);
|
||||||
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public IngestionService ingestionService(ExecutionStore executionStore,
|
public IngestionService ingestionService(ExecutionStore executionStore,
|
||||||
DiagramStore diagramStore,
|
DiagramStore diagramStore,
|
||||||
|
|||||||
@@ -92,7 +92,7 @@ public class AgentCommandController {
|
|||||||
|
|
||||||
List<AgentInfo> agents = registryService.findAll().stream()
|
List<AgentInfo> agents = registryService.findAll().stream()
|
||||||
.filter(a -> a.state() == AgentState.LIVE)
|
.filter(a -> a.state() == AgentState.LIVE)
|
||||||
.filter(a -> group.equals(a.group()))
|
.filter(a -> group.equals(a.application()))
|
||||||
.toList();
|
.toList();
|
||||||
|
|
||||||
List<String> commandIds = new ArrayList<>();
|
List<String> commandIds = new ArrayList<>();
|
||||||
|
|||||||
@@ -0,0 +1,49 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.dto.AgentEventResponse;
|
||||||
|
import com.cameleer3.server.core.agent.AgentEventService;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestParam;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/agents/events-log")
|
||||||
|
@Tag(name = "Agent Events", description = "Agent lifecycle event log")
|
||||||
|
public class AgentEventsController {
|
||||||
|
|
||||||
|
private final AgentEventService agentEventService;
|
||||||
|
|
||||||
|
public AgentEventsController(AgentEventService agentEventService) {
|
||||||
|
this.agentEventService = agentEventService;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping
|
||||||
|
@Operation(summary = "Query agent events",
|
||||||
|
description = "Returns agent lifecycle events, optionally filtered by app and/or agent ID")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Events returned")
|
||||||
|
public ResponseEntity<List<AgentEventResponse>> getEvents(
|
||||||
|
@RequestParam(required = false) String appId,
|
||||||
|
@RequestParam(required = false) String agentId,
|
||||||
|
@RequestParam(required = false) String from,
|
||||||
|
@RequestParam(required = false) String to,
|
||||||
|
@RequestParam(defaultValue = "50") int limit) {
|
||||||
|
|
||||||
|
Instant fromInstant = from != null ? Instant.parse(from) : null;
|
||||||
|
Instant toInstant = to != null ? Instant.parse(to) : null;
|
||||||
|
|
||||||
|
var events = agentEventService.queryEvents(appId, agentId, fromInstant, toInstant, limit)
|
||||||
|
.stream()
|
||||||
|
.map(AgentEventResponse::from)
|
||||||
|
.toList();
|
||||||
|
|
||||||
|
return ResponseEntity.ok(events);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.dto.AgentMetricsResponse;
|
||||||
|
import com.cameleer3.server.app.dto.MetricBucket;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.temporal.ChronoUnit;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/agents/{agentId}/metrics")
|
||||||
|
public class AgentMetricsController {
|
||||||
|
|
||||||
|
private final JdbcTemplate jdbc;
|
||||||
|
|
||||||
|
public AgentMetricsController(JdbcTemplate jdbc) {
|
||||||
|
this.jdbc = jdbc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping
|
||||||
|
public AgentMetricsResponse getMetrics(
|
||||||
|
@PathVariable String agentId,
|
||||||
|
@RequestParam String names,
|
||||||
|
@RequestParam(required = false) Instant from,
|
||||||
|
@RequestParam(required = false) Instant to,
|
||||||
|
@RequestParam(defaultValue = "60") int buckets) {
|
||||||
|
|
||||||
|
if (from == null) from = Instant.now().minus(1, ChronoUnit.HOURS);
|
||||||
|
if (to == null) to = Instant.now();
|
||||||
|
|
||||||
|
List<String> metricNames = Arrays.asList(names.split(","));
|
||||||
|
long intervalMs = (to.toEpochMilli() - from.toEpochMilli()) / Math.max(buckets, 1);
|
||||||
|
String intervalStr = intervalMs + " milliseconds";
|
||||||
|
|
||||||
|
Map<String, List<MetricBucket>> result = new LinkedHashMap<>();
|
||||||
|
for (String name : metricNames) {
|
||||||
|
result.put(name.trim(), new ArrayList<>());
|
||||||
|
}
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT time_bucket(CAST(? AS interval), collected_at) AS bucket,
|
||||||
|
metric_name,
|
||||||
|
AVG(metric_value) AS avg_value
|
||||||
|
FROM agent_metrics
|
||||||
|
WHERE agent_id = ?
|
||||||
|
AND collected_at >= ? AND collected_at < ?
|
||||||
|
AND metric_name = ANY(?)
|
||||||
|
GROUP BY bucket, metric_name
|
||||||
|
ORDER BY bucket
|
||||||
|
""";
|
||||||
|
|
||||||
|
String[] namesArray = metricNames.stream().map(String::trim).toArray(String[]::new);
|
||||||
|
jdbc.query(sql, rs -> {
|
||||||
|
String metricName = rs.getString("metric_name");
|
||||||
|
Instant bucket = rs.getTimestamp("bucket").toInstant();
|
||||||
|
double value = rs.getDouble("avg_value");
|
||||||
|
result.computeIfAbsent(metricName, k -> new ArrayList<>())
|
||||||
|
.add(new MetricBucket(bucket, value));
|
||||||
|
}, intervalStr, agentId, Timestamp.from(from), Timestamp.from(to), namesArray);
|
||||||
|
|
||||||
|
return new AgentMetricsResponse(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,6 +8,7 @@ import com.cameleer3.server.app.dto.AgentRegistrationRequest;
|
|||||||
import com.cameleer3.server.app.dto.AgentRegistrationResponse;
|
import com.cameleer3.server.app.dto.AgentRegistrationResponse;
|
||||||
import com.cameleer3.server.app.dto.ErrorResponse;
|
import com.cameleer3.server.app.dto.ErrorResponse;
|
||||||
import com.cameleer3.server.app.security.BootstrapTokenValidator;
|
import com.cameleer3.server.app.security.BootstrapTokenValidator;
|
||||||
|
import com.cameleer3.server.core.agent.AgentEventService;
|
||||||
import com.cameleer3.server.core.agent.AgentInfo;
|
import com.cameleer3.server.core.agent.AgentInfo;
|
||||||
import com.cameleer3.server.core.agent.AgentRegistryService;
|
import com.cameleer3.server.core.agent.AgentRegistryService;
|
||||||
import com.cameleer3.server.core.agent.AgentState;
|
import com.cameleer3.server.core.agent.AgentState;
|
||||||
@@ -23,6 +24,7 @@ import jakarta.servlet.http.HttpServletRequest;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.http.ResponseEntity;
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
import org.springframework.web.bind.annotation.GetMapping;
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
import org.springframework.web.bind.annotation.PathVariable;
|
import org.springframework.web.bind.annotation.PathVariable;
|
||||||
import org.springframework.web.bind.annotation.PostMapping;
|
import org.springframework.web.bind.annotation.PostMapping;
|
||||||
@@ -31,8 +33,13 @@ import org.springframework.web.bind.annotation.RequestMapping;
|
|||||||
import org.springframework.web.bind.annotation.RequestParam;
|
import org.springframework.web.bind.annotation.RequestParam;
|
||||||
import org.springframework.web.bind.annotation.RestController;
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.temporal.ChronoUnit;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Agent registration, heartbeat, listing, and token refresh endpoints.
|
* Agent registration, heartbeat, listing, and token refresh endpoints.
|
||||||
@@ -50,17 +57,23 @@ public class AgentRegistrationController {
|
|||||||
private final BootstrapTokenValidator bootstrapTokenValidator;
|
private final BootstrapTokenValidator bootstrapTokenValidator;
|
||||||
private final JwtService jwtService;
|
private final JwtService jwtService;
|
||||||
private final Ed25519SigningService ed25519SigningService;
|
private final Ed25519SigningService ed25519SigningService;
|
||||||
|
private final AgentEventService agentEventService;
|
||||||
|
private final JdbcTemplate jdbc;
|
||||||
|
|
||||||
public AgentRegistrationController(AgentRegistryService registryService,
|
public AgentRegistrationController(AgentRegistryService registryService,
|
||||||
AgentRegistryConfig config,
|
AgentRegistryConfig config,
|
||||||
BootstrapTokenValidator bootstrapTokenValidator,
|
BootstrapTokenValidator bootstrapTokenValidator,
|
||||||
JwtService jwtService,
|
JwtService jwtService,
|
||||||
Ed25519SigningService ed25519SigningService) {
|
Ed25519SigningService ed25519SigningService,
|
||||||
|
AgentEventService agentEventService,
|
||||||
|
JdbcTemplate jdbc) {
|
||||||
this.registryService = registryService;
|
this.registryService = registryService;
|
||||||
this.config = config;
|
this.config = config;
|
||||||
this.bootstrapTokenValidator = bootstrapTokenValidator;
|
this.bootstrapTokenValidator = bootstrapTokenValidator;
|
||||||
this.jwtService = jwtService;
|
this.jwtService = jwtService;
|
||||||
this.ed25519SigningService = ed25519SigningService;
|
this.ed25519SigningService = ed25519SigningService;
|
||||||
|
this.agentEventService = agentEventService;
|
||||||
|
this.jdbc = jdbc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@PostMapping("/register")
|
@PostMapping("/register")
|
||||||
@@ -89,18 +102,21 @@ public class AgentRegistrationController {
|
|||||||
return ResponseEntity.badRequest().build();
|
return ResponseEntity.badRequest().build();
|
||||||
}
|
}
|
||||||
|
|
||||||
String group = request.group() != null ? request.group() : "default";
|
String application = request.application() != null ? request.application() : "default";
|
||||||
List<String> routeIds = request.routeIds() != null ? request.routeIds() : List.of();
|
List<String> routeIds = request.routeIds() != null ? request.routeIds() : List.of();
|
||||||
var capabilities = request.capabilities() != null ? request.capabilities() : Collections.<String, Object>emptyMap();
|
var capabilities = request.capabilities() != null ? request.capabilities() : Collections.<String, Object>emptyMap();
|
||||||
|
|
||||||
AgentInfo agent = registryService.register(
|
AgentInfo agent = registryService.register(
|
||||||
request.agentId(), request.name(), group, request.version(), routeIds, capabilities);
|
request.agentId(), request.name(), application, request.version(), routeIds, capabilities);
|
||||||
log.info("Agent registered: {} (name={}, group={})", request.agentId(), request.name(), group);
|
log.info("Agent registered: {} (name={}, application={})", request.agentId(), request.name(), application);
|
||||||
|
|
||||||
|
agentEventService.recordEvent(request.agentId(), application, "REGISTERED",
|
||||||
|
"Agent registered: " + request.name());
|
||||||
|
|
||||||
// Issue JWT tokens with AGENT role
|
// Issue JWT tokens with AGENT role
|
||||||
List<String> roles = List.of("AGENT");
|
List<String> roles = List.of("AGENT");
|
||||||
String accessToken = jwtService.createAccessToken(request.agentId(), group, roles);
|
String accessToken = jwtService.createAccessToken(request.agentId(), application, roles);
|
||||||
String refreshToken = jwtService.createRefreshToken(request.agentId(), group, roles);
|
String refreshToken = jwtService.createRefreshToken(request.agentId(), application, roles);
|
||||||
|
|
||||||
return ResponseEntity.ok(new AgentRegistrationResponse(
|
return ResponseEntity.ok(new AgentRegistrationResponse(
|
||||||
agent.id(),
|
agent.id(),
|
||||||
@@ -150,9 +166,10 @@ public class AgentRegistrationController {
|
|||||||
// Preserve roles from refresh token
|
// Preserve roles from refresh token
|
||||||
List<String> roles = result.roles().isEmpty()
|
List<String> roles = result.roles().isEmpty()
|
||||||
? List.of("AGENT") : result.roles();
|
? List.of("AGENT") : result.roles();
|
||||||
String newAccessToken = jwtService.createAccessToken(agentId, agent.group(), roles);
|
String newAccessToken = jwtService.createAccessToken(agentId, agent.application(), roles);
|
||||||
|
String newRefreshToken = jwtService.createRefreshToken(agentId, agent.application(), roles);
|
||||||
|
|
||||||
return ResponseEntity.ok(new AgentRefreshResponse(newAccessToken));
|
return ResponseEntity.ok(new AgentRefreshResponse(newAccessToken, newRefreshToken));
|
||||||
}
|
}
|
||||||
|
|
||||||
@PostMapping("/{id}/heartbeat")
|
@PostMapping("/{id}/heartbeat")
|
||||||
@@ -170,13 +187,13 @@ public class AgentRegistrationController {
|
|||||||
|
|
||||||
@GetMapping
|
@GetMapping
|
||||||
@Operation(summary = "List all agents",
|
@Operation(summary = "List all agents",
|
||||||
description = "Returns all registered agents, optionally filtered by status and/or group")
|
description = "Returns all registered agents with runtime metrics, optionally filtered by status and/or application")
|
||||||
@ApiResponse(responseCode = "200", description = "Agent list returned")
|
@ApiResponse(responseCode = "200", description = "Agent list returned")
|
||||||
@ApiResponse(responseCode = "400", description = "Invalid status filter",
|
@ApiResponse(responseCode = "400", description = "Invalid status filter",
|
||||||
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
|
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
|
||||||
public ResponseEntity<List<AgentInstanceResponse>> listAgents(
|
public ResponseEntity<List<AgentInstanceResponse>> listAgents(
|
||||||
@RequestParam(required = false) String status,
|
@RequestParam(required = false) String status,
|
||||||
@RequestParam(required = false) String group) {
|
@RequestParam(required = false) String application) {
|
||||||
List<AgentInfo> agents;
|
List<AgentInfo> agents;
|
||||||
|
|
||||||
if (status != null) {
|
if (status != null) {
|
||||||
@@ -190,16 +207,59 @@ public class AgentRegistrationController {
|
|||||||
agents = registryService.findAll();
|
agents = registryService.findAll();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Apply group filter if specified
|
// Apply application filter if specified
|
||||||
if (group != null && !group.isBlank()) {
|
if (application != null && !application.isBlank()) {
|
||||||
agents = agents.stream()
|
agents = agents.stream()
|
||||||
.filter(a -> group.equals(a.group()))
|
.filter(a -> application.equals(a.application()))
|
||||||
.toList();
|
.toList();
|
||||||
}
|
}
|
||||||
|
|
||||||
List<AgentInstanceResponse> response = agents.stream()
|
// Enrich with runtime metrics from continuous aggregates
|
||||||
.map(AgentInstanceResponse::from)
|
Map<String, double[]> agentMetrics = queryAgentMetrics();
|
||||||
|
final List<AgentInfo> finalAgents = agents;
|
||||||
|
|
||||||
|
List<AgentInstanceResponse> response = finalAgents.stream()
|
||||||
|
.map(a -> {
|
||||||
|
AgentInstanceResponse dto = AgentInstanceResponse.from(a);
|
||||||
|
double[] m = agentMetrics.get(a.application());
|
||||||
|
if (m != null) {
|
||||||
|
long appAgentCount = finalAgents.stream()
|
||||||
|
.filter(ag -> ag.application().equals(a.application())).count();
|
||||||
|
double agentTps = appAgentCount > 0 ? m[0] / appAgentCount : 0;
|
||||||
|
double errorRate = m[1];
|
||||||
|
int activeRoutes = (int) m[2];
|
||||||
|
return dto.withMetrics(agentTps, errorRate, activeRoutes);
|
||||||
|
}
|
||||||
|
return dto;
|
||||||
|
})
|
||||||
.toList();
|
.toList();
|
||||||
return ResponseEntity.ok(response);
|
return ResponseEntity.ok(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Map<String, double[]> queryAgentMetrics() {
|
||||||
|
Map<String, double[]> result = new HashMap<>();
|
||||||
|
Instant now = Instant.now();
|
||||||
|
Instant from1m = now.minus(1, ChronoUnit.MINUTES);
|
||||||
|
try {
|
||||||
|
jdbc.query(
|
||||||
|
"SELECT application_name, " +
|
||||||
|
"SUM(total_count) AS total, " +
|
||||||
|
"SUM(failed_count) AS failed, " +
|
||||||
|
"COUNT(DISTINCT route_id) AS active_routes " +
|
||||||
|
"FROM stats_1m_route WHERE bucket >= ? AND bucket < ? " +
|
||||||
|
"GROUP BY application_name",
|
||||||
|
rs -> {
|
||||||
|
long total = rs.getLong("total");
|
||||||
|
long failed = rs.getLong("failed");
|
||||||
|
double tps = total / 60.0;
|
||||||
|
double errorRate = total > 0 ? (double) failed / total : 0.0;
|
||||||
|
int activeRoutes = rs.getInt("active_routes");
|
||||||
|
result.put(rs.getString("application_name"), new double[]{tps, errorRate, activeRoutes});
|
||||||
|
},
|
||||||
|
Timestamp.from(from1m), Timestamp.from(now));
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.debug("Could not query agent metrics: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,68 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.dto.AuditLogPageResponse;
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditRepository;
|
||||||
|
import com.cameleer3.server.core.admin.AuditRepository.AuditPage;
|
||||||
|
import com.cameleer3.server.core.admin.AuditRepository.AuditQuery;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import org.springframework.format.annotation.DateTimeFormat;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.security.access.prepost.PreAuthorize;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestParam;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.time.ZoneOffset;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/admin/audit")
|
||||||
|
@PreAuthorize("hasRole('ADMIN')")
|
||||||
|
@Tag(name = "Audit Log", description = "Audit log viewer (ADMIN only)")
|
||||||
|
public class AuditLogController {
|
||||||
|
|
||||||
|
private final AuditRepository auditRepository;
|
||||||
|
|
||||||
|
public AuditLogController(AuditRepository auditRepository) {
|
||||||
|
this.auditRepository = auditRepository;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping
|
||||||
|
@Operation(summary = "Search audit log entries with pagination")
|
||||||
|
public ResponseEntity<AuditLogPageResponse> getAuditLog(
|
||||||
|
@RequestParam(required = false) String username,
|
||||||
|
@RequestParam(required = false) String category,
|
||||||
|
@RequestParam(required = false) String search,
|
||||||
|
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate from,
|
||||||
|
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate to,
|
||||||
|
@RequestParam(defaultValue = "timestamp") String sort,
|
||||||
|
@RequestParam(defaultValue = "desc") String order,
|
||||||
|
@RequestParam(defaultValue = "0") int page,
|
||||||
|
@RequestParam(defaultValue = "25") int size) {
|
||||||
|
|
||||||
|
size = Math.min(size, 100);
|
||||||
|
|
||||||
|
Instant fromInstant = from != null ? from.atStartOfDay(ZoneOffset.UTC).toInstant() : null;
|
||||||
|
Instant toInstant = to != null ? to.plusDays(1).atStartOfDay(ZoneOffset.UTC).toInstant() : null;
|
||||||
|
|
||||||
|
AuditCategory cat = null;
|
||||||
|
if (category != null && !category.isEmpty()) {
|
||||||
|
try {
|
||||||
|
cat = AuditCategory.valueOf(category.toUpperCase());
|
||||||
|
} catch (IllegalArgumentException ignored) {
|
||||||
|
// invalid category is treated as no filter
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
AuditQuery query = new AuditQuery(username, cat, search, fromInstant, toInstant, sort, order, page, size);
|
||||||
|
AuditPage result = auditRepository.find(query);
|
||||||
|
|
||||||
|
int totalPages = Math.max(1, (int) Math.ceil((double) result.totalCount() / size));
|
||||||
|
return ResponseEntity.ok(new AuditLogPageResponse(
|
||||||
|
result.items(), result.totalCount(), page, size, totalPages));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,130 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.dto.ActiveQueryResponse;
|
||||||
|
import com.cameleer3.server.app.dto.ConnectionPoolResponse;
|
||||||
|
import com.cameleer3.server.app.dto.DatabaseStatusResponse;
|
||||||
|
import com.cameleer3.server.app.dto.TableSizeResponse;
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditResult;
|
||||||
|
import com.cameleer3.server.core.admin.AuditService;
|
||||||
|
import com.zaxxer.hikari.HikariDataSource;
|
||||||
|
import com.zaxxer.hikari.HikariPoolMXBean;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.security.access.prepost.PreAuthorize;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.PathVariable;
|
||||||
|
import org.springframework.web.bind.annotation.PostMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
import org.springframework.web.server.ResponseStatusException;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/admin/database")
|
||||||
|
@PreAuthorize("hasRole('ADMIN')")
|
||||||
|
@Tag(name = "Database Admin", description = "Database monitoring and management (ADMIN only)")
|
||||||
|
public class DatabaseAdminController {
|
||||||
|
|
||||||
|
private final JdbcTemplate jdbc;
|
||||||
|
private final DataSource dataSource;
|
||||||
|
private final AuditService auditService;
|
||||||
|
|
||||||
|
public DatabaseAdminController(JdbcTemplate jdbc, DataSource dataSource, AuditService auditService) {
|
||||||
|
this.jdbc = jdbc;
|
||||||
|
this.dataSource = dataSource;
|
||||||
|
this.auditService = auditService;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/status")
|
||||||
|
@Operation(summary = "Get database connection status and version")
|
||||||
|
public ResponseEntity<DatabaseStatusResponse> getStatus() {
|
||||||
|
try {
|
||||||
|
String version = jdbc.queryForObject("SELECT version()", String.class);
|
||||||
|
boolean timescaleDb = Boolean.TRUE.equals(
|
||||||
|
jdbc.queryForObject("SELECT EXISTS(SELECT 1 FROM pg_extension WHERE extname = 'timescaledb')", Boolean.class));
|
||||||
|
String schema = jdbc.queryForObject("SELECT current_schema()", String.class);
|
||||||
|
String host = extractHost(dataSource);
|
||||||
|
return ResponseEntity.ok(new DatabaseStatusResponse(true, version, host, schema, timescaleDb));
|
||||||
|
} catch (Exception e) {
|
||||||
|
return ResponseEntity.ok(new DatabaseStatusResponse(false, null, null, null, false));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/pool")
|
||||||
|
@Operation(summary = "Get HikariCP connection pool stats")
|
||||||
|
public ResponseEntity<ConnectionPoolResponse> getPool() {
|
||||||
|
HikariDataSource hds = (HikariDataSource) dataSource;
|
||||||
|
HikariPoolMXBean pool = hds.getHikariPoolMXBean();
|
||||||
|
return ResponseEntity.ok(new ConnectionPoolResponse(
|
||||||
|
pool.getActiveConnections(), pool.getIdleConnections(),
|
||||||
|
pool.getThreadsAwaitingConnection(), hds.getConnectionTimeout(),
|
||||||
|
hds.getMaximumPoolSize()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/tables")
|
||||||
|
@Operation(summary = "Get table sizes and row counts")
|
||||||
|
public ResponseEntity<List<TableSizeResponse>> getTables() {
|
||||||
|
var tables = jdbc.query("""
|
||||||
|
SELECT relname AS table_name,
|
||||||
|
n_live_tup AS row_count,
|
||||||
|
pg_size_pretty(pg_total_relation_size(relid)) AS data_size,
|
||||||
|
pg_total_relation_size(relid) AS data_size_bytes,
|
||||||
|
pg_size_pretty(pg_indexes_size(relid)) AS index_size,
|
||||||
|
pg_indexes_size(relid) AS index_size_bytes
|
||||||
|
FROM pg_stat_user_tables
|
||||||
|
WHERE schemaname = current_schema()
|
||||||
|
ORDER BY pg_total_relation_size(relid) DESC
|
||||||
|
""", (rs, row) -> new TableSizeResponse(
|
||||||
|
rs.getString("table_name"), rs.getLong("row_count"),
|
||||||
|
rs.getString("data_size"), rs.getString("index_size"),
|
||||||
|
rs.getLong("data_size_bytes"), rs.getLong("index_size_bytes")));
|
||||||
|
return ResponseEntity.ok(tables);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/queries")
|
||||||
|
@Operation(summary = "Get active queries")
|
||||||
|
public ResponseEntity<List<ActiveQueryResponse>> getQueries() {
|
||||||
|
var queries = jdbc.query("""
|
||||||
|
SELECT pid, EXTRACT(EPOCH FROM (now() - query_start)) AS duration_seconds,
|
||||||
|
state, query
|
||||||
|
FROM pg_stat_activity
|
||||||
|
WHERE state != 'idle' AND pid != pg_backend_pid() AND datname = current_database()
|
||||||
|
ORDER BY query_start ASC
|
||||||
|
""", (rs, row) -> new ActiveQueryResponse(
|
||||||
|
rs.getInt("pid"), rs.getDouble("duration_seconds"),
|
||||||
|
rs.getString("state"), rs.getString("query")));
|
||||||
|
return ResponseEntity.ok(queries);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/queries/{pid}/kill")
|
||||||
|
@Operation(summary = "Terminate a query by PID")
|
||||||
|
public ResponseEntity<Void> killQuery(@PathVariable int pid, HttpServletRequest request) {
|
||||||
|
var exists = jdbc.queryForObject(
|
||||||
|
"SELECT EXISTS(SELECT 1 FROM pg_stat_activity WHERE pid = ? AND pid != pg_backend_pid())",
|
||||||
|
Boolean.class, pid);
|
||||||
|
if (!Boolean.TRUE.equals(exists)) {
|
||||||
|
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "No active query with PID " + pid);
|
||||||
|
}
|
||||||
|
jdbc.queryForObject("SELECT pg_terminate_backend(?)", Boolean.class, pid);
|
||||||
|
auditService.log("kill_query", AuditCategory.INFRA, "PID " + pid, null, AuditResult.SUCCESS, request);
|
||||||
|
return ResponseEntity.ok().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
private String extractHost(DataSource ds) {
|
||||||
|
try {
|
||||||
|
if (ds instanceof HikariDataSource hds) {
|
||||||
|
return hds.getJdbcUrl();
|
||||||
|
}
|
||||||
|
return "unknown";
|
||||||
|
} catch (Exception e) {
|
||||||
|
return "unknown";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -90,14 +90,14 @@ public class DiagramRenderController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@GetMapping
|
@GetMapping
|
||||||
@Operation(summary = "Find diagram by application group and route ID",
|
@Operation(summary = "Find diagram by application and route ID",
|
||||||
description = "Resolves group to agent IDs and finds the latest diagram for the route")
|
description = "Resolves application to agent IDs and finds the latest diagram for the route")
|
||||||
@ApiResponse(responseCode = "200", description = "Diagram layout returned")
|
@ApiResponse(responseCode = "200", description = "Diagram layout returned")
|
||||||
@ApiResponse(responseCode = "404", description = "No diagram found for the given group and route")
|
@ApiResponse(responseCode = "404", description = "No diagram found for the given application and route")
|
||||||
public ResponseEntity<DiagramLayout> findByGroupAndRoute(
|
public ResponseEntity<DiagramLayout> findByApplicationAndRoute(
|
||||||
@RequestParam String group,
|
@RequestParam String application,
|
||||||
@RequestParam String routeId) {
|
@RequestParam String routeId) {
|
||||||
List<String> agentIds = registryService.findByGroup(group).stream()
|
List<String> agentIds = registryService.findByApplication(application).stream()
|
||||||
.map(AgentInfo::id)
|
.map(AgentInfo::id)
|
||||||
.toList();
|
.toList();
|
||||||
|
|
||||||
|
|||||||
@@ -53,11 +53,11 @@ public class ExecutionController {
|
|||||||
@ApiResponse(responseCode = "202", description = "Data accepted for processing")
|
@ApiResponse(responseCode = "202", description = "Data accepted for processing")
|
||||||
public ResponseEntity<Void> ingestExecutions(@RequestBody String body) throws JsonProcessingException {
|
public ResponseEntity<Void> ingestExecutions(@RequestBody String body) throws JsonProcessingException {
|
||||||
String agentId = extractAgentId();
|
String agentId = extractAgentId();
|
||||||
String groupName = resolveGroupName(agentId);
|
String applicationName = resolveApplicationName(agentId);
|
||||||
List<RouteExecution> executions = parsePayload(body);
|
List<RouteExecution> executions = parsePayload(body);
|
||||||
|
|
||||||
for (RouteExecution execution : executions) {
|
for (RouteExecution execution : executions) {
|
||||||
ingestionService.ingestExecution(agentId, groupName, execution);
|
ingestionService.ingestExecution(agentId, applicationName, execution);
|
||||||
}
|
}
|
||||||
|
|
||||||
return ResponseEntity.accepted().build();
|
return ResponseEntity.accepted().build();
|
||||||
@@ -68,9 +68,9 @@ public class ExecutionController {
|
|||||||
return auth != null ? auth.getName() : "";
|
return auth != null ? auth.getName() : "";
|
||||||
}
|
}
|
||||||
|
|
||||||
private String resolveGroupName(String agentId) {
|
private String resolveApplicationName(String agentId) {
|
||||||
AgentInfo agent = registryService.findById(agentId);
|
AgentInfo agent = registryService.findById(agentId);
|
||||||
return agent != null ? agent.group() : "";
|
return agent != null ? agent.application() : "";
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<RouteExecution> parsePayload(String body) throws JsonProcessingException {
|
private List<RouteExecution> parsePayload(String body) throws JsonProcessingException {
|
||||||
|
|||||||
@@ -0,0 +1,167 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditResult;
|
||||||
|
import com.cameleer3.server.core.admin.AuditService;
|
||||||
|
import com.cameleer3.server.core.rbac.GroupDetail;
|
||||||
|
import com.cameleer3.server.core.rbac.GroupRepository;
|
||||||
|
import com.cameleer3.server.core.rbac.GroupSummary;
|
||||||
|
import com.cameleer3.server.core.rbac.RbacService;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.security.access.prepost.PreAuthorize;
|
||||||
|
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.PathVariable;
|
||||||
|
import org.springframework.web.bind.annotation.PostMapping;
|
||||||
|
import org.springframework.web.bind.annotation.PutMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestBody;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Admin endpoints for group management.
|
||||||
|
* Protected by {@code ROLE_ADMIN}.
|
||||||
|
*/
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/admin/groups")
|
||||||
|
@Tag(name = "Group Admin", description = "Group management (ADMIN only)")
|
||||||
|
@PreAuthorize("hasRole('ADMIN')")
|
||||||
|
public class GroupAdminController {
|
||||||
|
|
||||||
|
private final GroupRepository groupRepository;
|
||||||
|
private final RbacService rbacService;
|
||||||
|
private final AuditService auditService;
|
||||||
|
|
||||||
|
public GroupAdminController(GroupRepository groupRepository, RbacService rbacService,
|
||||||
|
AuditService auditService) {
|
||||||
|
this.groupRepository = groupRepository;
|
||||||
|
this.rbacService = rbacService;
|
||||||
|
this.auditService = auditService;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping
|
||||||
|
@Operation(summary = "List all groups with hierarchy and effective roles")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Group list returned")
|
||||||
|
public ResponseEntity<List<GroupDetail>> listGroups() {
|
||||||
|
List<GroupSummary> summaries = groupRepository.findAll();
|
||||||
|
List<GroupDetail> details = new ArrayList<>();
|
||||||
|
for (GroupSummary summary : summaries) {
|
||||||
|
groupRepository.findById(summary.id()).ifPresent(details::add);
|
||||||
|
}
|
||||||
|
return ResponseEntity.ok(details);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/{id}")
|
||||||
|
@Operation(summary = "Get group by ID with effective roles")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Group found")
|
||||||
|
@ApiResponse(responseCode = "404", description = "Group not found")
|
||||||
|
public ResponseEntity<GroupDetail> getGroup(@PathVariable UUID id) {
|
||||||
|
return groupRepository.findById(id)
|
||||||
|
.map(ResponseEntity::ok)
|
||||||
|
.orElse(ResponseEntity.notFound().build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping
|
||||||
|
@Operation(summary = "Create a new group")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Group created")
|
||||||
|
public ResponseEntity<Map<String, UUID>> createGroup(@RequestBody CreateGroupRequest request,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
UUID id = groupRepository.create(request.name(), request.parentGroupId());
|
||||||
|
auditService.log("create_group", AuditCategory.RBAC, id.toString(),
|
||||||
|
Map.of("name", request.name()), AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.ok(Map.of("id", id));
|
||||||
|
}
|
||||||
|
|
||||||
|
@PutMapping("/{id}")
|
||||||
|
@Operation(summary = "Update group name or parent")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Group updated")
|
||||||
|
@ApiResponse(responseCode = "404", description = "Group not found")
|
||||||
|
@ApiResponse(responseCode = "409", description = "Cycle detected in group hierarchy")
|
||||||
|
public ResponseEntity<Void> updateGroup(@PathVariable UUID id,
|
||||||
|
@RequestBody UpdateGroupRequest request,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
Optional<GroupDetail> existing = groupRepository.findById(id);
|
||||||
|
if (existing.isEmpty()) {
|
||||||
|
return ResponseEntity.notFound().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cycle detection: walk ancestor chain of proposed parent and check if it includes 'id'
|
||||||
|
if (request.parentGroupId() != null) {
|
||||||
|
List<GroupSummary> ancestors = groupRepository.findAncestorChain(request.parentGroupId());
|
||||||
|
for (GroupSummary ancestor : ancestors) {
|
||||||
|
if (ancestor.id().equals(id)) {
|
||||||
|
return ResponseEntity.status(409).build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Also check that the proposed parent itself is not the group being updated
|
||||||
|
if (request.parentGroupId().equals(id)) {
|
||||||
|
return ResponseEntity.status(409).build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
groupRepository.update(id, request.name(), request.parentGroupId());
|
||||||
|
auditService.log("update_group", AuditCategory.RBAC, id.toString(),
|
||||||
|
null, AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.ok().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@DeleteMapping("/{id}")
|
||||||
|
@Operation(summary = "Delete group")
|
||||||
|
@ApiResponse(responseCode = "204", description = "Group deleted")
|
||||||
|
@ApiResponse(responseCode = "404", description = "Group not found")
|
||||||
|
public ResponseEntity<Void> deleteGroup(@PathVariable UUID id,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
if (groupRepository.findById(id).isEmpty()) {
|
||||||
|
return ResponseEntity.notFound().build();
|
||||||
|
}
|
||||||
|
groupRepository.delete(id);
|
||||||
|
auditService.log("delete_group", AuditCategory.RBAC, id.toString(),
|
||||||
|
null, AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.noContent().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/{id}/roles/{roleId}")
|
||||||
|
@Operation(summary = "Assign a role to a group")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Role assigned to group")
|
||||||
|
@ApiResponse(responseCode = "404", description = "Group not found")
|
||||||
|
public ResponseEntity<Void> assignRoleToGroup(@PathVariable UUID id,
|
||||||
|
@PathVariable UUID roleId,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
if (groupRepository.findById(id).isEmpty()) {
|
||||||
|
return ResponseEntity.notFound().build();
|
||||||
|
}
|
||||||
|
groupRepository.addRole(id, roleId);
|
||||||
|
auditService.log("assign_role_to_group", AuditCategory.RBAC, id.toString(),
|
||||||
|
Map.of("roleId", roleId), AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.ok().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@DeleteMapping("/{id}/roles/{roleId}")
|
||||||
|
@Operation(summary = "Remove a role from a group")
|
||||||
|
@ApiResponse(responseCode = "204", description = "Role removed from group")
|
||||||
|
@ApiResponse(responseCode = "404", description = "Group not found")
|
||||||
|
public ResponseEntity<Void> removeRoleFromGroup(@PathVariable UUID id,
|
||||||
|
@PathVariable UUID roleId,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
if (groupRepository.findById(id).isEmpty()) {
|
||||||
|
return ResponseEntity.notFound().build();
|
||||||
|
}
|
||||||
|
groupRepository.removeRole(id, roleId);
|
||||||
|
auditService.log("remove_role_from_group", AuditCategory.RBAC, id.toString(),
|
||||||
|
Map.of("roleId", roleId), AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.noContent().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public record CreateGroupRequest(String name, UUID parentGroupId) {}
|
||||||
|
public record UpdateGroupRequest(String name, UUID parentGroupId) {}
|
||||||
|
}
|
||||||
@@ -5,8 +5,12 @@ import com.cameleer3.server.app.dto.OidcAdminConfigRequest;
|
|||||||
import com.cameleer3.server.app.dto.OidcAdminConfigResponse;
|
import com.cameleer3.server.app.dto.OidcAdminConfigResponse;
|
||||||
import com.cameleer3.server.app.dto.OidcTestResult;
|
import com.cameleer3.server.app.dto.OidcTestResult;
|
||||||
import com.cameleer3.server.app.security.OidcTokenExchanger;
|
import com.cameleer3.server.app.security.OidcTokenExchanger;
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditResult;
|
||||||
|
import com.cameleer3.server.core.admin.AuditService;
|
||||||
import com.cameleer3.server.core.security.OidcConfig;
|
import com.cameleer3.server.core.security.OidcConfig;
|
||||||
import com.cameleer3.server.core.security.OidcConfigRepository;
|
import com.cameleer3.server.core.security.OidcConfigRepository;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
import io.swagger.v3.oas.annotations.Operation;
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
import io.swagger.v3.oas.annotations.media.Content;
|
import io.swagger.v3.oas.annotations.media.Content;
|
||||||
import io.swagger.v3.oas.annotations.media.Schema;
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
@@ -16,6 +20,7 @@ import org.slf4j.Logger;
|
|||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.http.HttpStatus;
|
import org.springframework.http.HttpStatus;
|
||||||
import org.springframework.http.ResponseEntity;
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.security.access.prepost.PreAuthorize;
|
||||||
import org.springframework.web.bind.annotation.DeleteMapping;
|
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||||
import org.springframework.web.bind.annotation.GetMapping;
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
import org.springframework.web.bind.annotation.PostMapping;
|
import org.springframework.web.bind.annotation.PostMapping;
|
||||||
@@ -26,6 +31,7 @@ import org.springframework.web.bind.annotation.RestController;
|
|||||||
import org.springframework.web.server.ResponseStatusException;
|
import org.springframework.web.server.ResponseStatusException;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -35,17 +41,21 @@ import java.util.Optional;
|
|||||||
@RestController
|
@RestController
|
||||||
@RequestMapping("/api/v1/admin/oidc")
|
@RequestMapping("/api/v1/admin/oidc")
|
||||||
@Tag(name = "OIDC Config Admin", description = "OIDC provider configuration (ADMIN only)")
|
@Tag(name = "OIDC Config Admin", description = "OIDC provider configuration (ADMIN only)")
|
||||||
|
@PreAuthorize("hasRole('ADMIN')")
|
||||||
public class OidcConfigAdminController {
|
public class OidcConfigAdminController {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(OidcConfigAdminController.class);
|
private static final Logger log = LoggerFactory.getLogger(OidcConfigAdminController.class);
|
||||||
|
|
||||||
private final OidcConfigRepository configRepository;
|
private final OidcConfigRepository configRepository;
|
||||||
private final OidcTokenExchanger tokenExchanger;
|
private final OidcTokenExchanger tokenExchanger;
|
||||||
|
private final AuditService auditService;
|
||||||
|
|
||||||
public OidcConfigAdminController(OidcConfigRepository configRepository,
|
public OidcConfigAdminController(OidcConfigRepository configRepository,
|
||||||
OidcTokenExchanger tokenExchanger) {
|
OidcTokenExchanger tokenExchanger,
|
||||||
|
AuditService auditService) {
|
||||||
this.configRepository = configRepository;
|
this.configRepository = configRepository;
|
||||||
this.tokenExchanger = tokenExchanger;
|
this.tokenExchanger = tokenExchanger;
|
||||||
|
this.auditService = auditService;
|
||||||
}
|
}
|
||||||
|
|
||||||
@GetMapping
|
@GetMapping
|
||||||
@@ -64,7 +74,8 @@ public class OidcConfigAdminController {
|
|||||||
@ApiResponse(responseCode = "200", description = "Configuration saved")
|
@ApiResponse(responseCode = "200", description = "Configuration saved")
|
||||||
@ApiResponse(responseCode = "400", description = "Invalid configuration",
|
@ApiResponse(responseCode = "400", description = "Invalid configuration",
|
||||||
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
|
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
|
||||||
public ResponseEntity<OidcAdminConfigResponse> saveConfig(@RequestBody OidcAdminConfigRequest request) {
|
public ResponseEntity<OidcAdminConfigResponse> saveConfig(@RequestBody OidcAdminConfigRequest request,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
// Resolve client_secret: if masked or empty, preserve existing
|
// Resolve client_secret: if masked or empty, preserve existing
|
||||||
String clientSecret = request.clientSecret();
|
String clientSecret = request.clientSecret();
|
||||||
if (clientSecret == null || clientSecret.isBlank() || clientSecret.equals("********")) {
|
if (clientSecret == null || clientSecret.isBlank() || clientSecret.equals("********")) {
|
||||||
@@ -95,6 +106,7 @@ public class OidcConfigAdminController {
|
|||||||
configRepository.save(config);
|
configRepository.save(config);
|
||||||
tokenExchanger.invalidateCache();
|
tokenExchanger.invalidateCache();
|
||||||
|
|
||||||
|
auditService.log("update_oidc", AuditCategory.CONFIG, "oidc", Map.of(), AuditResult.SUCCESS, httpRequest);
|
||||||
log.info("OIDC configuration updated: enabled={}, issuer={}", config.enabled(), config.issuerUri());
|
log.info("OIDC configuration updated: enabled={}, issuer={}", config.enabled(), config.issuerUri());
|
||||||
return ResponseEntity.ok(OidcAdminConfigResponse.from(config));
|
return ResponseEntity.ok(OidcAdminConfigResponse.from(config));
|
||||||
}
|
}
|
||||||
@@ -104,7 +116,7 @@ public class OidcConfigAdminController {
|
|||||||
@ApiResponse(responseCode = "200", description = "Provider reachable")
|
@ApiResponse(responseCode = "200", description = "Provider reachable")
|
||||||
@ApiResponse(responseCode = "400", description = "Provider unreachable or misconfigured",
|
@ApiResponse(responseCode = "400", description = "Provider unreachable or misconfigured",
|
||||||
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
|
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
|
||||||
public ResponseEntity<OidcTestResult> testConnection() {
|
public ResponseEntity<OidcTestResult> testConnection(HttpServletRequest httpRequest) {
|
||||||
Optional<OidcConfig> config = configRepository.find();
|
Optional<OidcConfig> config = configRepository.find();
|
||||||
if (config.isEmpty() || !config.get().enabled()) {
|
if (config.isEmpty() || !config.get().enabled()) {
|
||||||
throw new ResponseStatusException(HttpStatus.BAD_REQUEST,
|
throw new ResponseStatusException(HttpStatus.BAD_REQUEST,
|
||||||
@@ -114,6 +126,7 @@ public class OidcConfigAdminController {
|
|||||||
try {
|
try {
|
||||||
tokenExchanger.invalidateCache();
|
tokenExchanger.invalidateCache();
|
||||||
String authEndpoint = tokenExchanger.getAuthorizationEndpoint();
|
String authEndpoint = tokenExchanger.getAuthorizationEndpoint();
|
||||||
|
auditService.log("test_oidc", AuditCategory.CONFIG, "oidc", null, AuditResult.SUCCESS, httpRequest);
|
||||||
return ResponseEntity.ok(new OidcTestResult("ok", authEndpoint));
|
return ResponseEntity.ok(new OidcTestResult("ok", authEndpoint));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("OIDC connectivity test failed: {}", e.getMessage());
|
log.warn("OIDC connectivity test failed: {}", e.getMessage());
|
||||||
@@ -125,9 +138,10 @@ public class OidcConfigAdminController {
|
|||||||
@DeleteMapping
|
@DeleteMapping
|
||||||
@Operation(summary = "Delete OIDC configuration")
|
@Operation(summary = "Delete OIDC configuration")
|
||||||
@ApiResponse(responseCode = "204", description = "Configuration deleted")
|
@ApiResponse(responseCode = "204", description = "Configuration deleted")
|
||||||
public ResponseEntity<Void> deleteConfig() {
|
public ResponseEntity<Void> deleteConfig(HttpServletRequest httpRequest) {
|
||||||
configRepository.delete();
|
configRepository.delete();
|
||||||
tokenExchanger.invalidateCache();
|
tokenExchanger.invalidateCache();
|
||||||
|
auditService.log("delete_oidc", AuditCategory.CONFIG, "oidc", null, AuditResult.SUCCESS, httpRequest);
|
||||||
log.info("OIDC configuration deleted");
|
log.info("OIDC configuration deleted");
|
||||||
return ResponseEntity.noContent().build();
|
return ResponseEntity.noContent().build();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,257 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.dto.IndexInfoResponse;
|
||||||
|
import com.cameleer3.server.app.dto.IndicesPageResponse;
|
||||||
|
import com.cameleer3.server.app.dto.OpenSearchStatusResponse;
|
||||||
|
import com.cameleer3.server.app.dto.PerformanceResponse;
|
||||||
|
import com.cameleer3.server.app.dto.PipelineStatsResponse;
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditResult;
|
||||||
|
import com.cameleer3.server.core.admin.AuditService;
|
||||||
|
import com.cameleer3.server.core.indexing.SearchIndexerStats;
|
||||||
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import org.opensearch.client.Request;
|
||||||
|
import org.opensearch.client.Response;
|
||||||
|
import org.opensearch.client.RestClient;
|
||||||
|
import org.opensearch.client.opensearch.OpenSearchClient;
|
||||||
|
import org.opensearch.client.opensearch.cluster.HealthResponse;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.security.access.prepost.PreAuthorize;
|
||||||
|
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.PathVariable;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestParam;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
import org.springframework.web.server.ResponseStatusException;
|
||||||
|
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/admin/opensearch")
|
||||||
|
@PreAuthorize("hasRole('ADMIN')")
|
||||||
|
@Tag(name = "OpenSearch Admin", description = "OpenSearch monitoring and management (ADMIN only)")
|
||||||
|
public class OpenSearchAdminController {
|
||||||
|
|
||||||
|
private final OpenSearchClient client;
|
||||||
|
private final RestClient restClient;
|
||||||
|
private final SearchIndexerStats indexerStats;
|
||||||
|
private final AuditService auditService;
|
||||||
|
private final ObjectMapper objectMapper;
|
||||||
|
private final String opensearchUrl;
|
||||||
|
private final String indexPrefix;
|
||||||
|
|
||||||
|
public OpenSearchAdminController(OpenSearchClient client, RestClient restClient,
|
||||||
|
SearchIndexerStats indexerStats, AuditService auditService,
|
||||||
|
ObjectMapper objectMapper,
|
||||||
|
@Value("${opensearch.url:http://localhost:9200}") String opensearchUrl,
|
||||||
|
@Value("${opensearch.index-prefix:executions-}") String indexPrefix) {
|
||||||
|
this.client = client;
|
||||||
|
this.restClient = restClient;
|
||||||
|
this.indexerStats = indexerStats;
|
||||||
|
this.auditService = auditService;
|
||||||
|
this.objectMapper = objectMapper;
|
||||||
|
this.opensearchUrl = opensearchUrl;
|
||||||
|
this.indexPrefix = indexPrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/status")
|
||||||
|
@Operation(summary = "Get OpenSearch cluster status and version")
|
||||||
|
public ResponseEntity<OpenSearchStatusResponse> getStatus() {
|
||||||
|
try {
|
||||||
|
HealthResponse health = client.cluster().health();
|
||||||
|
String version = client.info().version().number();
|
||||||
|
return ResponseEntity.ok(new OpenSearchStatusResponse(
|
||||||
|
true,
|
||||||
|
health.status().name(),
|
||||||
|
version,
|
||||||
|
health.numberOfNodes(),
|
||||||
|
opensearchUrl));
|
||||||
|
} catch (Exception e) {
|
||||||
|
return ResponseEntity.ok(new OpenSearchStatusResponse(
|
||||||
|
false, "UNREACHABLE", null, 0, opensearchUrl));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/pipeline")
|
||||||
|
@Operation(summary = "Get indexing pipeline statistics")
|
||||||
|
public ResponseEntity<PipelineStatsResponse> getPipeline() {
|
||||||
|
return ResponseEntity.ok(new PipelineStatsResponse(
|
||||||
|
indexerStats.getQueueDepth(),
|
||||||
|
indexerStats.getMaxQueueSize(),
|
||||||
|
indexerStats.getFailedCount(),
|
||||||
|
indexerStats.getIndexedCount(),
|
||||||
|
indexerStats.getDebounceMs(),
|
||||||
|
indexerStats.getIndexingRate(),
|
||||||
|
indexerStats.getLastIndexedAt()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/indices")
|
||||||
|
@Operation(summary = "Get OpenSearch indices with pagination")
|
||||||
|
public ResponseEntity<IndicesPageResponse> getIndices(
|
||||||
|
@RequestParam(defaultValue = "0") int page,
|
||||||
|
@RequestParam(defaultValue = "20") int size,
|
||||||
|
@RequestParam(defaultValue = "") String search) {
|
||||||
|
try {
|
||||||
|
Response response = restClient.performRequest(
|
||||||
|
new Request("GET", "/_cat/indices?format=json&h=index,health,docs.count,store.size,pri,rep&bytes=b"));
|
||||||
|
JsonNode indices;
|
||||||
|
try (InputStream is = response.getEntity().getContent()) {
|
||||||
|
indices = objectMapper.readTree(is);
|
||||||
|
}
|
||||||
|
|
||||||
|
List<IndexInfoResponse> allIndices = new ArrayList<>();
|
||||||
|
for (JsonNode idx : indices) {
|
||||||
|
String name = idx.path("index").asText("");
|
||||||
|
if (!name.startsWith(indexPrefix)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (!search.isEmpty() && !name.contains(search)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
allIndices.add(new IndexInfoResponse(
|
||||||
|
name,
|
||||||
|
parseLong(idx.path("docs.count").asText("0")),
|
||||||
|
humanSize(parseLong(idx.path("store.size").asText("0"))),
|
||||||
|
parseLong(idx.path("store.size").asText("0")),
|
||||||
|
idx.path("health").asText("unknown"),
|
||||||
|
parseInt(idx.path("pri").asText("0")),
|
||||||
|
parseInt(idx.path("rep").asText("0"))));
|
||||||
|
}
|
||||||
|
|
||||||
|
allIndices.sort(Comparator.comparing(IndexInfoResponse::name));
|
||||||
|
|
||||||
|
long totalDocs = allIndices.stream().mapToLong(IndexInfoResponse::docCount).sum();
|
||||||
|
long totalBytes = allIndices.stream().mapToLong(IndexInfoResponse::sizeBytes).sum();
|
||||||
|
int totalIndices = allIndices.size();
|
||||||
|
int totalPages = Math.max(1, (int) Math.ceil((double) totalIndices / size));
|
||||||
|
|
||||||
|
int fromIndex = Math.min(page * size, totalIndices);
|
||||||
|
int toIndex = Math.min(fromIndex + size, totalIndices);
|
||||||
|
List<IndexInfoResponse> pageItems = allIndices.subList(fromIndex, toIndex);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(new IndicesPageResponse(
|
||||||
|
pageItems, totalIndices, totalDocs,
|
||||||
|
humanSize(totalBytes), page, size, totalPages));
|
||||||
|
} catch (Exception e) {
|
||||||
|
return ResponseEntity.ok(new IndicesPageResponse(
|
||||||
|
List.of(), 0, 0, "0 B", page, size, 0));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@DeleteMapping("/indices/{name}")
|
||||||
|
@Operation(summary = "Delete an OpenSearch index")
|
||||||
|
public ResponseEntity<Void> deleteIndex(@PathVariable String name, HttpServletRequest request) {
|
||||||
|
try {
|
||||||
|
if (!name.startsWith(indexPrefix)) {
|
||||||
|
throw new ResponseStatusException(HttpStatus.FORBIDDEN, "Cannot delete index outside application scope");
|
||||||
|
}
|
||||||
|
boolean exists = client.indices().exists(r -> r.index(name)).value();
|
||||||
|
if (!exists) {
|
||||||
|
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Index not found: " + name);
|
||||||
|
}
|
||||||
|
client.indices().delete(r -> r.index(name));
|
||||||
|
auditService.log("delete_index", AuditCategory.INFRA, name, null, AuditResult.SUCCESS, request);
|
||||||
|
return ResponseEntity.ok().build();
|
||||||
|
} catch (ResponseStatusException e) {
|
||||||
|
throw e;
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Failed to delete index: " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/performance")
|
||||||
|
@Operation(summary = "Get OpenSearch performance metrics")
|
||||||
|
public ResponseEntity<PerformanceResponse> getPerformance() {
|
||||||
|
try {
|
||||||
|
Response response = restClient.performRequest(
|
||||||
|
new Request("GET", "/_nodes/stats/jvm,indices"));
|
||||||
|
JsonNode root;
|
||||||
|
try (InputStream is = response.getEntity().getContent()) {
|
||||||
|
root = objectMapper.readTree(is);
|
||||||
|
}
|
||||||
|
|
||||||
|
JsonNode nodes = root.path("nodes");
|
||||||
|
long heapUsed = 0, heapMax = 0;
|
||||||
|
long queryCacheHits = 0, queryCacheMisses = 0;
|
||||||
|
long requestCacheHits = 0, requestCacheMisses = 0;
|
||||||
|
long searchQueryTotal = 0, searchQueryTimeMs = 0;
|
||||||
|
long indexTotal = 0, indexTimeMs = 0;
|
||||||
|
|
||||||
|
var it = nodes.fields();
|
||||||
|
while (it.hasNext()) {
|
||||||
|
var entry = it.next();
|
||||||
|
JsonNode node = entry.getValue();
|
||||||
|
|
||||||
|
JsonNode jvm = node.path("jvm").path("mem");
|
||||||
|
heapUsed += jvm.path("heap_used_in_bytes").asLong(0);
|
||||||
|
heapMax += jvm.path("heap_max_in_bytes").asLong(0);
|
||||||
|
|
||||||
|
JsonNode indicesNode = node.path("indices");
|
||||||
|
JsonNode queryCache = indicesNode.path("query_cache");
|
||||||
|
queryCacheHits += queryCache.path("hit_count").asLong(0);
|
||||||
|
queryCacheMisses += queryCache.path("miss_count").asLong(0);
|
||||||
|
|
||||||
|
JsonNode requestCache = indicesNode.path("request_cache");
|
||||||
|
requestCacheHits += requestCache.path("hit_count").asLong(0);
|
||||||
|
requestCacheMisses += requestCache.path("miss_count").asLong(0);
|
||||||
|
|
||||||
|
JsonNode searchNode = indicesNode.path("search");
|
||||||
|
searchQueryTotal += searchNode.path("query_total").asLong(0);
|
||||||
|
searchQueryTimeMs += searchNode.path("query_time_in_millis").asLong(0);
|
||||||
|
|
||||||
|
JsonNode indexing = indicesNode.path("indexing");
|
||||||
|
indexTotal += indexing.path("index_total").asLong(0);
|
||||||
|
indexTimeMs += indexing.path("index_time_in_millis").asLong(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
double queryCacheHitRate = (queryCacheHits + queryCacheMisses) > 0
|
||||||
|
? (double) queryCacheHits / (queryCacheHits + queryCacheMisses) : 0.0;
|
||||||
|
double requestCacheHitRate = (requestCacheHits + requestCacheMisses) > 0
|
||||||
|
? (double) requestCacheHits / (requestCacheHits + requestCacheMisses) : 0.0;
|
||||||
|
double searchLatency = searchQueryTotal > 0
|
||||||
|
? (double) searchQueryTimeMs / searchQueryTotal : 0.0;
|
||||||
|
double indexingLatency = indexTotal > 0
|
||||||
|
? (double) indexTimeMs / indexTotal : 0.0;
|
||||||
|
|
||||||
|
return ResponseEntity.ok(new PerformanceResponse(
|
||||||
|
queryCacheHitRate, requestCacheHitRate,
|
||||||
|
searchLatency, indexingLatency,
|
||||||
|
heapUsed, heapMax));
|
||||||
|
} catch (Exception e) {
|
||||||
|
return ResponseEntity.ok(new PerformanceResponse(0, 0, 0, 0, 0, 0));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static long parseLong(String s) {
|
||||||
|
try {
|
||||||
|
return Long.parseLong(s);
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static int parseInt(String s) {
|
||||||
|
try {
|
||||||
|
return Integer.parseInt(s);
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String humanSize(long bytes) {
|
||||||
|
if (bytes < 1024) return bytes + " B";
|
||||||
|
if (bytes < 1024 * 1024) return String.format("%.1f KB", bytes / 1024.0);
|
||||||
|
if (bytes < 1024 * 1024 * 1024) return String.format("%.1f MB", bytes / (1024.0 * 1024));
|
||||||
|
return String.format("%.1f GB", bytes / (1024.0 * 1024 * 1024));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.rbac.RbacService;
|
||||||
|
import com.cameleer3.server.core.rbac.RbacStats;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.security.access.prepost.PreAuthorize;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Admin endpoint for RBAC statistics.
|
||||||
|
* Protected by {@code ROLE_ADMIN}.
|
||||||
|
*/
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/admin/rbac")
|
||||||
|
@Tag(name = "RBAC Stats", description = "RBAC statistics (ADMIN only)")
|
||||||
|
@PreAuthorize("hasRole('ADMIN')")
|
||||||
|
public class RbacStatsController {
|
||||||
|
|
||||||
|
private final RbacService rbacService;
|
||||||
|
|
||||||
|
public RbacStatsController(RbacService rbacService) {
|
||||||
|
this.rbacService = rbacService;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/stats")
|
||||||
|
@Operation(summary = "Get RBAC statistics for the dashboard")
|
||||||
|
@ApiResponse(responseCode = "200", description = "RBAC stats returned")
|
||||||
|
public ResponseEntity<RbacStats> getStats() {
|
||||||
|
return ResponseEntity.ok(rbacService.getStats());
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,125 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditResult;
|
||||||
|
import com.cameleer3.server.core.admin.AuditService;
|
||||||
|
import com.cameleer3.server.core.rbac.RbacService;
|
||||||
|
import com.cameleer3.server.core.rbac.RoleDetail;
|
||||||
|
import com.cameleer3.server.core.rbac.RoleRepository;
|
||||||
|
import com.cameleer3.server.core.rbac.SystemRole;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.security.access.prepost.PreAuthorize;
|
||||||
|
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.PathVariable;
|
||||||
|
import org.springframework.web.bind.annotation.PostMapping;
|
||||||
|
import org.springframework.web.bind.annotation.PutMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestBody;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Admin endpoints for role management.
|
||||||
|
* Protected by {@code ROLE_ADMIN}.
|
||||||
|
*/
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/admin/roles")
|
||||||
|
@Tag(name = "Role Admin", description = "Role management (ADMIN only)")
|
||||||
|
@PreAuthorize("hasRole('ADMIN')")
|
||||||
|
public class RoleAdminController {
|
||||||
|
|
||||||
|
private final RoleRepository roleRepository;
|
||||||
|
private final RbacService rbacService;
|
||||||
|
private final AuditService auditService;
|
||||||
|
|
||||||
|
public RoleAdminController(RoleRepository roleRepository, RbacService rbacService,
|
||||||
|
AuditService auditService) {
|
||||||
|
this.roleRepository = roleRepository;
|
||||||
|
this.rbacService = rbacService;
|
||||||
|
this.auditService = auditService;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping
|
||||||
|
@Operation(summary = "List all roles (system and custom)")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Role list returned")
|
||||||
|
public ResponseEntity<List<RoleDetail>> listRoles() {
|
||||||
|
return ResponseEntity.ok(roleRepository.findAll());
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/{id}")
|
||||||
|
@Operation(summary = "Get role by ID with effective principals")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Role found")
|
||||||
|
@ApiResponse(responseCode = "404", description = "Role not found")
|
||||||
|
public ResponseEntity<RoleDetail> getRole(@PathVariable UUID id) {
|
||||||
|
return roleRepository.findById(id)
|
||||||
|
.map(ResponseEntity::ok)
|
||||||
|
.orElse(ResponseEntity.notFound().build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping
|
||||||
|
@Operation(summary = "Create a custom role")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Role created")
|
||||||
|
public ResponseEntity<Map<String, UUID>> createRole(@RequestBody CreateRoleRequest request,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
String desc = request.description() != null ? request.description() : "";
|
||||||
|
String sc = request.scope() != null ? request.scope() : "custom";
|
||||||
|
UUID id = roleRepository.create(request.name(), desc, sc);
|
||||||
|
auditService.log("create_role", AuditCategory.RBAC, id.toString(),
|
||||||
|
Map.of("name", request.name()), AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.ok(Map.of("id", id));
|
||||||
|
}
|
||||||
|
|
||||||
|
@PutMapping("/{id}")
|
||||||
|
@Operation(summary = "Update a custom role")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Role updated")
|
||||||
|
@ApiResponse(responseCode = "403", description = "Cannot modify system role")
|
||||||
|
@ApiResponse(responseCode = "404", description = "Role not found")
|
||||||
|
public ResponseEntity<Void> updateRole(@PathVariable UUID id,
|
||||||
|
@RequestBody UpdateRoleRequest request,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
if (SystemRole.isSystem(id)) {
|
||||||
|
auditService.log("update_role", AuditCategory.RBAC, id.toString(),
|
||||||
|
Map.of("reason", "system_role_protected"), AuditResult.FAILURE, httpRequest);
|
||||||
|
return ResponseEntity.status(403).build();
|
||||||
|
}
|
||||||
|
if (roleRepository.findById(id).isEmpty()) {
|
||||||
|
return ResponseEntity.notFound().build();
|
||||||
|
}
|
||||||
|
roleRepository.update(id, request.name(), request.description(), request.scope());
|
||||||
|
auditService.log("update_role", AuditCategory.RBAC, id.toString(),
|
||||||
|
null, AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.ok().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@DeleteMapping("/{id}")
|
||||||
|
@Operation(summary = "Delete a custom role")
|
||||||
|
@ApiResponse(responseCode = "204", description = "Role deleted")
|
||||||
|
@ApiResponse(responseCode = "403", description = "Cannot delete system role")
|
||||||
|
@ApiResponse(responseCode = "404", description = "Role not found")
|
||||||
|
public ResponseEntity<Void> deleteRole(@PathVariable UUID id,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
if (SystemRole.isSystem(id)) {
|
||||||
|
auditService.log("delete_role", AuditCategory.RBAC, id.toString(),
|
||||||
|
Map.of("reason", "system_role_protected"), AuditResult.FAILURE, httpRequest);
|
||||||
|
return ResponseEntity.status(403).build();
|
||||||
|
}
|
||||||
|
if (roleRepository.findById(id).isEmpty()) {
|
||||||
|
return ResponseEntity.notFound().build();
|
||||||
|
}
|
||||||
|
roleRepository.delete(id);
|
||||||
|
auditService.log("delete_role", AuditCategory.RBAC, id.toString(),
|
||||||
|
null, AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.noContent().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public record CreateRoleRequest(String name, String description, String scope) {}
|
||||||
|
public record UpdateRoleRequest(String name, String description, String scope) {}
|
||||||
|
}
|
||||||
@@ -0,0 +1,151 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.dto.AgentSummary;
|
||||||
|
import com.cameleer3.server.app.dto.AppCatalogEntry;
|
||||||
|
import com.cameleer3.server.app.dto.RouteSummary;
|
||||||
|
import com.cameleer3.server.core.agent.AgentInfo;
|
||||||
|
import com.cameleer3.server.core.agent.AgentRegistryService;
|
||||||
|
import com.cameleer3.server.core.agent.AgentState;
|
||||||
|
import com.cameleer3.server.core.storage.StatsStore;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.temporal.ChronoUnit;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.LinkedHashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/routes")
|
||||||
|
@Tag(name = "Route Catalog", description = "Route catalog and discovery")
|
||||||
|
public class RouteCatalogController {
|
||||||
|
|
||||||
|
private final AgentRegistryService registryService;
|
||||||
|
private final JdbcTemplate jdbc;
|
||||||
|
|
||||||
|
public RouteCatalogController(AgentRegistryService registryService, JdbcTemplate jdbc) {
|
||||||
|
this.registryService = registryService;
|
||||||
|
this.jdbc = jdbc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/catalog")
|
||||||
|
@Operation(summary = "Get route catalog",
|
||||||
|
description = "Returns all applications with their routes, agents, and health status")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Catalog returned")
|
||||||
|
public ResponseEntity<List<AppCatalogEntry>> getCatalog() {
|
||||||
|
List<AgentInfo> allAgents = registryService.findAll();
|
||||||
|
|
||||||
|
// Group agents by application name
|
||||||
|
Map<String, List<AgentInfo>> agentsByApp = allAgents.stream()
|
||||||
|
.collect(Collectors.groupingBy(AgentInfo::application, LinkedHashMap::new, Collectors.toList()));
|
||||||
|
|
||||||
|
// Collect all distinct routes per app
|
||||||
|
Map<String, Set<String>> routesByApp = new LinkedHashMap<>();
|
||||||
|
for (var entry : agentsByApp.entrySet()) {
|
||||||
|
Set<String> routes = new LinkedHashSet<>();
|
||||||
|
for (AgentInfo agent : entry.getValue()) {
|
||||||
|
if (agent.routeIds() != null) {
|
||||||
|
routes.addAll(agent.routeIds());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
routesByApp.put(entry.getKey(), routes);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query route-level stats for the last 24 hours
|
||||||
|
Instant now = Instant.now();
|
||||||
|
Instant from24h = now.minus(24, ChronoUnit.HOURS);
|
||||||
|
Instant from1m = now.minus(1, ChronoUnit.MINUTES);
|
||||||
|
|
||||||
|
// Route exchange counts from continuous aggregate
|
||||||
|
Map<String, Long> routeExchangeCounts = new LinkedHashMap<>();
|
||||||
|
Map<String, Instant> routeLastSeen = new LinkedHashMap<>();
|
||||||
|
try {
|
||||||
|
jdbc.query(
|
||||||
|
"SELECT application_name, route_id, SUM(total_count) AS cnt, MAX(bucket) AS last_seen " +
|
||||||
|
"FROM stats_1m_route WHERE bucket >= ? AND bucket < ? " +
|
||||||
|
"GROUP BY application_name, route_id",
|
||||||
|
rs -> {
|
||||||
|
String key = rs.getString("application_name") + "/" + rs.getString("route_id");
|
||||||
|
routeExchangeCounts.put(key, rs.getLong("cnt"));
|
||||||
|
Timestamp ts = rs.getTimestamp("last_seen");
|
||||||
|
if (ts != null) routeLastSeen.put(key, ts.toInstant());
|
||||||
|
},
|
||||||
|
Timestamp.from(from24h), Timestamp.from(now));
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Continuous aggregate may not exist yet
|
||||||
|
}
|
||||||
|
|
||||||
|
// Per-agent TPS from the last minute
|
||||||
|
Map<String, Double> agentTps = new LinkedHashMap<>();
|
||||||
|
try {
|
||||||
|
jdbc.query(
|
||||||
|
"SELECT application_name, SUM(total_count) AS cnt " +
|
||||||
|
"FROM stats_1m_route WHERE bucket >= ? AND bucket < ? " +
|
||||||
|
"GROUP BY application_name",
|
||||||
|
rs -> {
|
||||||
|
// This gives per-app TPS; we'll distribute among agents below
|
||||||
|
},
|
||||||
|
Timestamp.from(from1m), Timestamp.from(now));
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Continuous aggregate may not exist yet
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build catalog entries
|
||||||
|
List<AppCatalogEntry> catalog = new ArrayList<>();
|
||||||
|
for (var entry : agentsByApp.entrySet()) {
|
||||||
|
String appId = entry.getKey();
|
||||||
|
List<AgentInfo> agents = entry.getValue();
|
||||||
|
|
||||||
|
// Routes
|
||||||
|
Set<String> routeIds = routesByApp.getOrDefault(appId, Set.of());
|
||||||
|
List<RouteSummary> routeSummaries = routeIds.stream()
|
||||||
|
.map(routeId -> {
|
||||||
|
String key = appId + "/" + routeId;
|
||||||
|
long count = routeExchangeCounts.getOrDefault(key, 0L);
|
||||||
|
Instant lastSeen = routeLastSeen.get(key);
|
||||||
|
return new RouteSummary(routeId, count, lastSeen);
|
||||||
|
})
|
||||||
|
.toList();
|
||||||
|
|
||||||
|
// Agent summaries
|
||||||
|
List<AgentSummary> agentSummaries = agents.stream()
|
||||||
|
.map(a -> new AgentSummary(a.id(), a.name(), a.state().name().toLowerCase(), 0.0))
|
||||||
|
.toList();
|
||||||
|
|
||||||
|
// Health = worst state among agents
|
||||||
|
String health = computeWorstHealth(agents);
|
||||||
|
|
||||||
|
// Total exchange count for the app
|
||||||
|
long totalExchanges = routeSummaries.stream().mapToLong(RouteSummary::exchangeCount).sum();
|
||||||
|
|
||||||
|
catalog.add(new AppCatalogEntry(appId, routeSummaries, agentSummaries,
|
||||||
|
agents.size(), health, totalExchanges));
|
||||||
|
}
|
||||||
|
|
||||||
|
return ResponseEntity.ok(catalog);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String computeWorstHealth(List<AgentInfo> agents) {
|
||||||
|
boolean hasDead = false;
|
||||||
|
boolean hasStale = false;
|
||||||
|
for (AgentInfo a : agents) {
|
||||||
|
if (a.state() == AgentState.DEAD) hasDead = true;
|
||||||
|
if (a.state() == AgentState.STALE) hasStale = true;
|
||||||
|
}
|
||||||
|
if (hasDead) return "dead";
|
||||||
|
if (hasStale) return "stale";
|
||||||
|
return "live";
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,164 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.dto.ProcessorMetrics;
|
||||||
|
import com.cameleer3.server.app.dto.RouteMetrics;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestParam;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.temporal.ChronoUnit;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/routes")
|
||||||
|
@Tag(name = "Route Metrics", description = "Route performance metrics")
|
||||||
|
public class RouteMetricsController {
|
||||||
|
|
||||||
|
private final JdbcTemplate jdbc;
|
||||||
|
|
||||||
|
public RouteMetricsController(JdbcTemplate jdbc) {
|
||||||
|
this.jdbc = jdbc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/metrics")
|
||||||
|
@Operation(summary = "Get route metrics",
|
||||||
|
description = "Returns aggregated performance metrics per route for the given time window")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Metrics returned")
|
||||||
|
public ResponseEntity<List<RouteMetrics>> getMetrics(
|
||||||
|
@RequestParam(required = false) String from,
|
||||||
|
@RequestParam(required = false) String to,
|
||||||
|
@RequestParam(required = false) String appId) {
|
||||||
|
|
||||||
|
Instant toInstant = to != null ? Instant.parse(to) : Instant.now();
|
||||||
|
Instant fromInstant = from != null ? Instant.parse(from) : toInstant.minus(24, ChronoUnit.HOURS);
|
||||||
|
long windowSeconds = Duration.between(fromInstant, toInstant).toSeconds();
|
||||||
|
|
||||||
|
var sql = new StringBuilder(
|
||||||
|
"SELECT application_name, route_id, " +
|
||||||
|
"SUM(total_count) AS total, " +
|
||||||
|
"SUM(failed_count) AS failed, " +
|
||||||
|
"CASE WHEN SUM(total_count) > 0 THEN SUM(duration_sum) / SUM(total_count) ELSE 0 END AS avg_dur, " +
|
||||||
|
"COALESCE(MAX(p99_duration), 0) AS p99_dur " +
|
||||||
|
"FROM stats_1m_route WHERE bucket >= ? AND bucket < ?");
|
||||||
|
var params = new ArrayList<Object>();
|
||||||
|
params.add(Timestamp.from(fromInstant));
|
||||||
|
params.add(Timestamp.from(toInstant));
|
||||||
|
|
||||||
|
if (appId != null) {
|
||||||
|
sql.append(" AND application_name = ?");
|
||||||
|
params.add(appId);
|
||||||
|
}
|
||||||
|
sql.append(" GROUP BY application_name, route_id ORDER BY application_name, route_id");
|
||||||
|
|
||||||
|
// Key struct for sparkline lookup
|
||||||
|
record RouteKey(String appId, String routeId) {}
|
||||||
|
List<RouteKey> routeKeys = new ArrayList<>();
|
||||||
|
|
||||||
|
List<RouteMetrics> metrics = jdbc.query(sql.toString(), (rs, rowNum) -> {
|
||||||
|
String applicationName = rs.getString("application_name");
|
||||||
|
String routeId = rs.getString("route_id");
|
||||||
|
long total = rs.getLong("total");
|
||||||
|
long failed = rs.getLong("failed");
|
||||||
|
double avgDur = rs.getDouble("avg_dur");
|
||||||
|
double p99Dur = rs.getDouble("p99_dur");
|
||||||
|
|
||||||
|
double successRate = total > 0 ? (double) (total - failed) / total : 1.0;
|
||||||
|
double errorRate = total > 0 ? (double) failed / total : 0.0;
|
||||||
|
double tps = windowSeconds > 0 ? (double) total / windowSeconds : 0.0;
|
||||||
|
|
||||||
|
routeKeys.add(new RouteKey(applicationName, routeId));
|
||||||
|
return new RouteMetrics(routeId, applicationName, total, successRate,
|
||||||
|
avgDur, p99Dur, errorRate, tps, List.of());
|
||||||
|
}, params.toArray());
|
||||||
|
|
||||||
|
// Fetch sparklines (12 buckets over the time window)
|
||||||
|
if (!metrics.isEmpty()) {
|
||||||
|
int sparkBuckets = 12;
|
||||||
|
long bucketSeconds = Math.max(windowSeconds / sparkBuckets, 60);
|
||||||
|
|
||||||
|
for (int i = 0; i < metrics.size(); i++) {
|
||||||
|
RouteMetrics m = metrics.get(i);
|
||||||
|
try {
|
||||||
|
List<Double> sparkline = jdbc.query(
|
||||||
|
"SELECT time_bucket(? * INTERVAL '1 second', bucket) AS period, " +
|
||||||
|
"COALESCE(SUM(total_count), 0) AS cnt " +
|
||||||
|
"FROM stats_1m_route WHERE bucket >= ? AND bucket < ? " +
|
||||||
|
"AND application_name = ? AND route_id = ? " +
|
||||||
|
"GROUP BY period ORDER BY period",
|
||||||
|
(rs, rowNum) -> rs.getDouble("cnt"),
|
||||||
|
bucketSeconds, Timestamp.from(fromInstant), Timestamp.from(toInstant),
|
||||||
|
m.appId(), m.routeId());
|
||||||
|
metrics.set(i, new RouteMetrics(m.routeId(), m.appId(), m.exchangeCount(),
|
||||||
|
m.successRate(), m.avgDurationMs(), m.p99DurationMs(),
|
||||||
|
m.errorRate(), m.throughputPerSec(), sparkline));
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Leave sparkline empty on error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ResponseEntity.ok(metrics);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/metrics/processors")
|
||||||
|
@Operation(summary = "Get processor metrics",
|
||||||
|
description = "Returns aggregated performance metrics per processor for the given route and time window")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Metrics returned")
|
||||||
|
public ResponseEntity<List<ProcessorMetrics>> getProcessorMetrics(
|
||||||
|
@RequestParam String routeId,
|
||||||
|
@RequestParam(required = false) String appId,
|
||||||
|
@RequestParam(required = false) Instant from,
|
||||||
|
@RequestParam(required = false) Instant to) {
|
||||||
|
|
||||||
|
Instant toInstant = to != null ? to : Instant.now();
|
||||||
|
Instant fromInstant = from != null ? from : toInstant.minus(24, ChronoUnit.HOURS);
|
||||||
|
|
||||||
|
var sql = new StringBuilder(
|
||||||
|
"SELECT processor_id, processor_type, route_id, application_name, " +
|
||||||
|
"SUM(total_count) AS total_count, " +
|
||||||
|
"SUM(failed_count) AS failed_count, " +
|
||||||
|
"CASE WHEN SUM(total_count) > 0 THEN SUM(duration_sum)::double precision / SUM(total_count) ELSE 0 END AS avg_duration_ms, " +
|
||||||
|
"MAX(p99_duration) AS p99_duration_ms " +
|
||||||
|
"FROM stats_1m_processor_detail " +
|
||||||
|
"WHERE bucket >= ? AND bucket < ? AND route_id = ?");
|
||||||
|
var params = new ArrayList<Object>();
|
||||||
|
params.add(Timestamp.from(fromInstant));
|
||||||
|
params.add(Timestamp.from(toInstant));
|
||||||
|
params.add(routeId);
|
||||||
|
|
||||||
|
if (appId != null) {
|
||||||
|
sql.append(" AND application_name = ?");
|
||||||
|
params.add(appId);
|
||||||
|
}
|
||||||
|
sql.append(" GROUP BY processor_id, processor_type, route_id, application_name");
|
||||||
|
sql.append(" ORDER BY SUM(total_count) DESC");
|
||||||
|
|
||||||
|
List<ProcessorMetrics> metrics = jdbc.query(sql.toString(), (rs, rowNum) -> {
|
||||||
|
long totalCount = rs.getLong("total_count");
|
||||||
|
long failedCount = rs.getLong("failed_count");
|
||||||
|
double errorRate = failedCount > 0 ? (double) failedCount / totalCount : 0.0;
|
||||||
|
return new ProcessorMetrics(
|
||||||
|
rs.getString("processor_id"),
|
||||||
|
rs.getString("processor_type"),
|
||||||
|
rs.getString("route_id"),
|
||||||
|
rs.getString("application_name"),
|
||||||
|
totalCount,
|
||||||
|
failedCount,
|
||||||
|
rs.getDouble("avg_duration_ms"),
|
||||||
|
rs.getDouble("p99_duration_ms"),
|
||||||
|
errorRate);
|
||||||
|
}, params.toArray());
|
||||||
|
|
||||||
|
return ResponseEntity.ok(metrics);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -51,13 +51,13 @@ public class SearchController {
|
|||||||
@RequestParam(required = false) String routeId,
|
@RequestParam(required = false) String routeId,
|
||||||
@RequestParam(required = false) String agentId,
|
@RequestParam(required = false) String agentId,
|
||||||
@RequestParam(required = false) String processorType,
|
@RequestParam(required = false) String processorType,
|
||||||
@RequestParam(required = false) String group,
|
@RequestParam(required = false) String application,
|
||||||
@RequestParam(defaultValue = "0") int offset,
|
@RequestParam(defaultValue = "0") int offset,
|
||||||
@RequestParam(defaultValue = "50") int limit,
|
@RequestParam(defaultValue = "50") int limit,
|
||||||
@RequestParam(required = false) String sortField,
|
@RequestParam(required = false) String sortField,
|
||||||
@RequestParam(required = false) String sortDir) {
|
@RequestParam(required = false) String sortDir) {
|
||||||
|
|
||||||
List<String> agentIds = resolveGroupToAgentIds(group);
|
List<String> agentIds = resolveApplicationToAgentIds(application);
|
||||||
|
|
||||||
SearchRequest request = new SearchRequest(
|
SearchRequest request = new SearchRequest(
|
||||||
status, timeFrom, timeTo,
|
status, timeFrom, timeTo,
|
||||||
@@ -65,7 +65,7 @@ public class SearchController {
|
|||||||
correlationId,
|
correlationId,
|
||||||
text, null, null, null,
|
text, null, null, null,
|
||||||
routeId, agentId, processorType,
|
routeId, agentId, processorType,
|
||||||
group, agentIds,
|
application, agentIds,
|
||||||
offset, limit,
|
offset, limit,
|
||||||
sortField, sortDir
|
sortField, sortDir
|
||||||
);
|
);
|
||||||
@@ -77,11 +77,11 @@ public class SearchController {
|
|||||||
@Operation(summary = "Advanced search with all filters")
|
@Operation(summary = "Advanced search with all filters")
|
||||||
public ResponseEntity<SearchResult<ExecutionSummary>> searchPost(
|
public ResponseEntity<SearchResult<ExecutionSummary>> searchPost(
|
||||||
@RequestBody SearchRequest request) {
|
@RequestBody SearchRequest request) {
|
||||||
// Resolve group to agentIds if group is specified but agentIds is not
|
// Resolve application to agentIds if application is specified but agentIds is not
|
||||||
SearchRequest resolved = request;
|
SearchRequest resolved = request;
|
||||||
if (request.group() != null && !request.group().isBlank()
|
if (request.application() != null && !request.application().isBlank()
|
||||||
&& (request.agentIds() == null || request.agentIds().isEmpty())) {
|
&& (request.agentIds() == null || request.agentIds().isEmpty())) {
|
||||||
resolved = request.withAgentIds(resolveGroupToAgentIds(request.group()));
|
resolved = request.withAgentIds(resolveApplicationToAgentIds(request.application()));
|
||||||
}
|
}
|
||||||
return ResponseEntity.ok(searchService.search(resolved));
|
return ResponseEntity.ok(searchService.search(resolved));
|
||||||
}
|
}
|
||||||
@@ -92,12 +92,15 @@ public class SearchController {
|
|||||||
@RequestParam Instant from,
|
@RequestParam Instant from,
|
||||||
@RequestParam(required = false) Instant to,
|
@RequestParam(required = false) Instant to,
|
||||||
@RequestParam(required = false) String routeId,
|
@RequestParam(required = false) String routeId,
|
||||||
@RequestParam(required = false) String group) {
|
@RequestParam(required = false) String application) {
|
||||||
Instant end = to != null ? to : Instant.now();
|
Instant end = to != null ? to : Instant.now();
|
||||||
List<String> agentIds = resolveGroupToAgentIds(group);
|
if (routeId == null && application == null) {
|
||||||
if (routeId == null && agentIds == null) {
|
|
||||||
return ResponseEntity.ok(searchService.stats(from, end));
|
return ResponseEntity.ok(searchService.stats(from, end));
|
||||||
}
|
}
|
||||||
|
if (routeId == null) {
|
||||||
|
return ResponseEntity.ok(searchService.statsForApp(from, end, application));
|
||||||
|
}
|
||||||
|
List<String> agentIds = resolveApplicationToAgentIds(application);
|
||||||
return ResponseEntity.ok(searchService.stats(from, end, routeId, agentIds));
|
return ResponseEntity.ok(searchService.stats(from, end, routeId, agentIds));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -108,9 +111,15 @@ public class SearchController {
|
|||||||
@RequestParam(required = false) Instant to,
|
@RequestParam(required = false) Instant to,
|
||||||
@RequestParam(defaultValue = "24") int buckets,
|
@RequestParam(defaultValue = "24") int buckets,
|
||||||
@RequestParam(required = false) String routeId,
|
@RequestParam(required = false) String routeId,
|
||||||
@RequestParam(required = false) String group) {
|
@RequestParam(required = false) String application) {
|
||||||
Instant end = to != null ? to : Instant.now();
|
Instant end = to != null ? to : Instant.now();
|
||||||
List<String> agentIds = resolveGroupToAgentIds(group);
|
if (routeId == null && application == null) {
|
||||||
|
return ResponseEntity.ok(searchService.timeseries(from, end, buckets));
|
||||||
|
}
|
||||||
|
if (routeId == null) {
|
||||||
|
return ResponseEntity.ok(searchService.timeseriesForApp(from, end, buckets, application));
|
||||||
|
}
|
||||||
|
List<String> agentIds = resolveApplicationToAgentIds(application);
|
||||||
if (routeId == null && agentIds == null) {
|
if (routeId == null && agentIds == null) {
|
||||||
return ResponseEntity.ok(searchService.timeseries(from, end, buckets));
|
return ResponseEntity.ok(searchService.timeseries(from, end, buckets));
|
||||||
}
|
}
|
||||||
@@ -118,14 +127,14 @@ public class SearchController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolve an application group name to agent IDs.
|
* Resolve an application name to agent IDs.
|
||||||
* Returns null if group is null/blank (no filtering).
|
* Returns null if application is null/blank (no filtering).
|
||||||
*/
|
*/
|
||||||
private List<String> resolveGroupToAgentIds(String group) {
|
private List<String> resolveApplicationToAgentIds(String application) {
|
||||||
if (group == null || group.isBlank()) {
|
if (application == null || application.isBlank()) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return registryService.findByGroup(group).stream()
|
return registryService.findByApplication(application).stream()
|
||||||
.map(AgentInfo::id)
|
.map(AgentInfo::id)
|
||||||
.toList();
|
.toList();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,62 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.dto.ThresholdConfigRequest;
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditResult;
|
||||||
|
import com.cameleer3.server.core.admin.AuditService;
|
||||||
|
import com.cameleer3.server.core.admin.ThresholdConfig;
|
||||||
|
import com.cameleer3.server.core.admin.ThresholdRepository;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import jakarta.validation.Valid;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.security.access.prepost.PreAuthorize;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.PutMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestBody;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
import org.springframework.web.server.ResponseStatusException;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/admin/thresholds")
|
||||||
|
@PreAuthorize("hasRole('ADMIN')")
|
||||||
|
@Tag(name = "Threshold Admin", description = "Monitoring threshold configuration (ADMIN only)")
|
||||||
|
public class ThresholdAdminController {
|
||||||
|
|
||||||
|
private final ThresholdRepository thresholdRepository;
|
||||||
|
private final AuditService auditService;
|
||||||
|
|
||||||
|
public ThresholdAdminController(ThresholdRepository thresholdRepository, AuditService auditService) {
|
||||||
|
this.thresholdRepository = thresholdRepository;
|
||||||
|
this.auditService = auditService;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping
|
||||||
|
@Operation(summary = "Get current threshold configuration")
|
||||||
|
public ResponseEntity<ThresholdConfig> getThresholds() {
|
||||||
|
ThresholdConfig config = thresholdRepository.find().orElse(ThresholdConfig.defaults());
|
||||||
|
return ResponseEntity.ok(config);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PutMapping
|
||||||
|
@Operation(summary = "Update threshold configuration")
|
||||||
|
public ResponseEntity<ThresholdConfig> updateThresholds(@Valid @RequestBody ThresholdConfigRequest request,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
List<String> errors = request.validate();
|
||||||
|
if (!errors.isEmpty()) {
|
||||||
|
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, String.join("; ", errors));
|
||||||
|
}
|
||||||
|
|
||||||
|
ThresholdConfig config = request.toConfig();
|
||||||
|
thresholdRepository.save(config, null);
|
||||||
|
auditService.log("update_thresholds", AuditCategory.CONFIG, "thresholds",
|
||||||
|
Map.of("config", config), AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.ok(config);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,73 +1,191 @@
|
|||||||
package com.cameleer3.server.app.controller;
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.dto.SetPasswordRequest;
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditResult;
|
||||||
|
import com.cameleer3.server.core.admin.AuditService;
|
||||||
|
import com.cameleer3.server.core.rbac.RbacService;
|
||||||
|
import com.cameleer3.server.core.rbac.SystemRole;
|
||||||
|
import com.cameleer3.server.core.rbac.UserDetail;
|
||||||
import com.cameleer3.server.core.security.UserInfo;
|
import com.cameleer3.server.core.security.UserInfo;
|
||||||
import com.cameleer3.server.core.security.UserRepository;
|
import com.cameleer3.server.core.security.UserRepository;
|
||||||
import io.swagger.v3.oas.annotations.Operation;
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import jakarta.validation.Valid;
|
||||||
import org.springframework.http.ResponseEntity;
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.security.access.prepost.PreAuthorize;
|
||||||
import org.springframework.web.bind.annotation.DeleteMapping;
|
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||||
import org.springframework.web.bind.annotation.GetMapping;
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
import org.springframework.web.bind.annotation.PathVariable;
|
import org.springframework.web.bind.annotation.PathVariable;
|
||||||
|
import org.springframework.web.bind.annotation.PostMapping;
|
||||||
import org.springframework.web.bind.annotation.PutMapping;
|
import org.springframework.web.bind.annotation.PutMapping;
|
||||||
import org.springframework.web.bind.annotation.RequestBody;
|
import org.springframework.web.bind.annotation.RequestBody;
|
||||||
import org.springframework.web.bind.annotation.RequestMapping;
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
import org.springframework.web.bind.annotation.RestController;
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
|
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Admin endpoints for user management.
|
* Admin endpoints for user management.
|
||||||
* Protected by {@code ROLE_ADMIN} via SecurityConfig URL patterns.
|
* Protected by {@code ROLE_ADMIN}.
|
||||||
*/
|
*/
|
||||||
@RestController
|
@RestController
|
||||||
@RequestMapping("/api/v1/admin/users")
|
@RequestMapping("/api/v1/admin/users")
|
||||||
@Tag(name = "User Admin", description = "User management (ADMIN only)")
|
@Tag(name = "User Admin", description = "User management (ADMIN only)")
|
||||||
|
@PreAuthorize("hasRole('ADMIN')")
|
||||||
public class UserAdminController {
|
public class UserAdminController {
|
||||||
|
|
||||||
private final UserRepository userRepository;
|
private static final BCryptPasswordEncoder passwordEncoder = new BCryptPasswordEncoder();
|
||||||
|
|
||||||
public UserAdminController(UserRepository userRepository) {
|
private final RbacService rbacService;
|
||||||
|
private final UserRepository userRepository;
|
||||||
|
private final AuditService auditService;
|
||||||
|
|
||||||
|
public UserAdminController(RbacService rbacService, UserRepository userRepository,
|
||||||
|
AuditService auditService) {
|
||||||
|
this.rbacService = rbacService;
|
||||||
this.userRepository = userRepository;
|
this.userRepository = userRepository;
|
||||||
|
this.auditService = auditService;
|
||||||
}
|
}
|
||||||
|
|
||||||
@GetMapping
|
@GetMapping
|
||||||
@Operation(summary = "List all users")
|
@Operation(summary = "List all users with RBAC detail")
|
||||||
@ApiResponse(responseCode = "200", description = "User list returned")
|
@ApiResponse(responseCode = "200", description = "User list returned")
|
||||||
public ResponseEntity<List<UserInfo>> listUsers() {
|
public ResponseEntity<List<UserDetail>> listUsers() {
|
||||||
return ResponseEntity.ok(userRepository.findAll());
|
return ResponseEntity.ok(rbacService.listUsers());
|
||||||
}
|
}
|
||||||
|
|
||||||
@GetMapping("/{userId}")
|
@GetMapping("/{userId}")
|
||||||
@Operation(summary = "Get user by ID")
|
@Operation(summary = "Get user by ID with RBAC detail")
|
||||||
@ApiResponse(responseCode = "200", description = "User found")
|
@ApiResponse(responseCode = "200", description = "User found")
|
||||||
@ApiResponse(responseCode = "404", description = "User not found")
|
@ApiResponse(responseCode = "404", description = "User not found")
|
||||||
public ResponseEntity<UserInfo> getUser(@PathVariable String userId) {
|
public ResponseEntity<UserDetail> getUser(@PathVariable String userId) {
|
||||||
return userRepository.findById(userId)
|
UserDetail detail = rbacService.getUser(userId);
|
||||||
.map(ResponseEntity::ok)
|
if (detail == null) {
|
||||||
.orElse(ResponseEntity.notFound().build());
|
|
||||||
}
|
|
||||||
|
|
||||||
@PutMapping("/{userId}/roles")
|
|
||||||
@Operation(summary = "Update user roles")
|
|
||||||
@ApiResponse(responseCode = "200", description = "Roles updated")
|
|
||||||
@ApiResponse(responseCode = "404", description = "User not found")
|
|
||||||
public ResponseEntity<Void> updateRoles(@PathVariable String userId,
|
|
||||||
@RequestBody RolesRequest request) {
|
|
||||||
if (userRepository.findById(userId).isEmpty()) {
|
|
||||||
return ResponseEntity.notFound().build();
|
return ResponseEntity.notFound().build();
|
||||||
}
|
}
|
||||||
userRepository.updateRoles(userId, request.roles());
|
return ResponseEntity.ok(detail);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping
|
||||||
|
@Operation(summary = "Create a local user")
|
||||||
|
@ApiResponse(responseCode = "200", description = "User created")
|
||||||
|
public ResponseEntity<UserDetail> createUser(@RequestBody CreateUserRequest request,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
String userId = "user:" + request.username();
|
||||||
|
UserInfo user = new UserInfo(userId, "local",
|
||||||
|
request.email() != null ? request.email() : "",
|
||||||
|
request.displayName() != null ? request.displayName() : request.username(),
|
||||||
|
Instant.now());
|
||||||
|
userRepository.upsert(user);
|
||||||
|
if (request.password() != null && !request.password().isBlank()) {
|
||||||
|
userRepository.setPassword(userId, passwordEncoder.encode(request.password()));
|
||||||
|
}
|
||||||
|
rbacService.assignRoleToUser(userId, SystemRole.VIEWER_ID);
|
||||||
|
auditService.log("create_user", AuditCategory.USER_MGMT, userId,
|
||||||
|
Map.of("username", request.username()), AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.ok(rbacService.getUser(userId));
|
||||||
|
}
|
||||||
|
|
||||||
|
@PutMapping("/{userId}")
|
||||||
|
@Operation(summary = "Update user display name or email")
|
||||||
|
@ApiResponse(responseCode = "200", description = "User updated")
|
||||||
|
@ApiResponse(responseCode = "404", description = "User not found")
|
||||||
|
public ResponseEntity<Void> updateUser(@PathVariable String userId,
|
||||||
|
@RequestBody UpdateUserRequest request,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
var existing = userRepository.findById(userId);
|
||||||
|
if (existing.isEmpty()) return ResponseEntity.notFound().build();
|
||||||
|
var user = existing.get();
|
||||||
|
var updated = new UserInfo(user.userId(), user.provider(),
|
||||||
|
request.email() != null ? request.email() : user.email(),
|
||||||
|
request.displayName() != null ? request.displayName() : user.displayName(),
|
||||||
|
user.createdAt());
|
||||||
|
userRepository.upsert(updated);
|
||||||
|
auditService.log("update_user", AuditCategory.USER_MGMT, userId,
|
||||||
|
null, AuditResult.SUCCESS, httpRequest);
|
||||||
return ResponseEntity.ok().build();
|
return ResponseEntity.ok().build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@PostMapping("/{userId}/roles/{roleId}")
|
||||||
|
@Operation(summary = "Assign a role to a user")
|
||||||
|
@ApiResponse(responseCode = "200", description = "Role assigned")
|
||||||
|
@ApiResponse(responseCode = "404", description = "User or role not found")
|
||||||
|
public ResponseEntity<Void> assignRoleToUser(@PathVariable String userId,
|
||||||
|
@PathVariable UUID roleId,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
rbacService.assignRoleToUser(userId, roleId);
|
||||||
|
auditService.log("assign_role_to_user", AuditCategory.USER_MGMT, userId,
|
||||||
|
Map.of("roleId", roleId), AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.ok().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@DeleteMapping("/{userId}/roles/{roleId}")
|
||||||
|
@Operation(summary = "Remove a role from a user")
|
||||||
|
@ApiResponse(responseCode = "204", description = "Role removed")
|
||||||
|
public ResponseEntity<Void> removeRoleFromUser(@PathVariable String userId,
|
||||||
|
@PathVariable UUID roleId,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
rbacService.removeRoleFromUser(userId, roleId);
|
||||||
|
auditService.log("remove_role_from_user", AuditCategory.USER_MGMT, userId,
|
||||||
|
Map.of("roleId", roleId), AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.noContent().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/{userId}/groups/{groupId}")
|
||||||
|
@Operation(summary = "Add a user to a group")
|
||||||
|
@ApiResponse(responseCode = "200", description = "User added to group")
|
||||||
|
public ResponseEntity<Void> addUserToGroup(@PathVariable String userId,
|
||||||
|
@PathVariable UUID groupId,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
rbacService.addUserToGroup(userId, groupId);
|
||||||
|
auditService.log("add_user_to_group", AuditCategory.USER_MGMT, userId,
|
||||||
|
Map.of("groupId", groupId), AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.ok().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@DeleteMapping("/{userId}/groups/{groupId}")
|
||||||
|
@Operation(summary = "Remove a user from a group")
|
||||||
|
@ApiResponse(responseCode = "204", description = "User removed from group")
|
||||||
|
public ResponseEntity<Void> removeUserFromGroup(@PathVariable String userId,
|
||||||
|
@PathVariable UUID groupId,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
rbacService.removeUserFromGroup(userId, groupId);
|
||||||
|
auditService.log("remove_user_from_group", AuditCategory.USER_MGMT, userId,
|
||||||
|
Map.of("groupId", groupId), AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.noContent().build();
|
||||||
|
}
|
||||||
|
|
||||||
@DeleteMapping("/{userId}")
|
@DeleteMapping("/{userId}")
|
||||||
@Operation(summary = "Delete user")
|
@Operation(summary = "Delete user")
|
||||||
@ApiResponse(responseCode = "204", description = "User deleted")
|
@ApiResponse(responseCode = "204", description = "User deleted")
|
||||||
public ResponseEntity<Void> deleteUser(@PathVariable String userId) {
|
public ResponseEntity<Void> deleteUser(@PathVariable String userId,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
userRepository.delete(userId);
|
userRepository.delete(userId);
|
||||||
|
auditService.log("delete_user", AuditCategory.USER_MGMT, userId,
|
||||||
|
null, AuditResult.SUCCESS, httpRequest);
|
||||||
return ResponseEntity.noContent().build();
|
return ResponseEntity.noContent().build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public record RolesRequest(List<String> roles) {}
|
@PostMapping("/{userId}/password")
|
||||||
|
@Operation(summary = "Reset user password")
|
||||||
|
@ApiResponse(responseCode = "204", description = "Password reset")
|
||||||
|
public ResponseEntity<Void> resetPassword(
|
||||||
|
@PathVariable String userId,
|
||||||
|
@Valid @RequestBody SetPasswordRequest request,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
|
userRepository.setPassword(userId, passwordEncoder.encode(request.password()));
|
||||||
|
auditService.log("reset_password", AuditCategory.USER_MGMT, userId, null, AuditResult.SUCCESS, httpRequest);
|
||||||
|
return ResponseEntity.noContent().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public record CreateUserRequest(String username, String displayName, String email, String password) {}
|
||||||
|
public record UpdateUserRequest(String displayName, String email) {}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,11 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
@Schema(description = "Currently running database query")
|
||||||
|
public record ActiveQueryResponse(
|
||||||
|
@Schema(description = "Backend process ID") int pid,
|
||||||
|
@Schema(description = "Query duration in seconds") double durationSeconds,
|
||||||
|
@Schema(description = "Backend state (active, idle, etc.)") String state,
|
||||||
|
@Schema(description = "SQL query text") String query
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.agent.AgentEventRecord;
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
|
||||||
|
@Schema(description = "Agent lifecycle event")
|
||||||
|
public record AgentEventResponse(
|
||||||
|
@NotNull long id,
|
||||||
|
@NotNull String agentId,
|
||||||
|
@NotNull String appId,
|
||||||
|
@NotNull String eventType,
|
||||||
|
String detail,
|
||||||
|
@NotNull Instant timestamp
|
||||||
|
) {
|
||||||
|
public static AgentEventResponse from(AgentEventRecord record) {
|
||||||
|
return new AgentEventResponse(
|
||||||
|
record.id(), record.agentId(), record.appId(),
|
||||||
|
record.eventType(), record.detail(), record.timestamp()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,24 +4,46 @@ import com.cameleer3.server.core.agent.AgentInfo;
|
|||||||
import io.swagger.v3.oas.annotations.media.Schema;
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
import jakarta.validation.constraints.NotNull;
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
import java.time.Duration;
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
@Schema(description = "Agent instance summary")
|
@Schema(description = "Agent instance summary with runtime metrics")
|
||||||
public record AgentInstanceResponse(
|
public record AgentInstanceResponse(
|
||||||
@NotNull String id,
|
@NotNull String id,
|
||||||
@NotNull String name,
|
@NotNull String name,
|
||||||
@NotNull String group,
|
@NotNull String application,
|
||||||
@NotNull String status,
|
@NotNull String status,
|
||||||
@NotNull List<String> routeIds,
|
@NotNull List<String> routeIds,
|
||||||
@NotNull Instant registeredAt,
|
@NotNull Instant registeredAt,
|
||||||
@NotNull Instant lastHeartbeat
|
@NotNull Instant lastHeartbeat,
|
||||||
|
String version,
|
||||||
|
Map<String, Object> capabilities,
|
||||||
|
double tps,
|
||||||
|
double errorRate,
|
||||||
|
int activeRoutes,
|
||||||
|
int totalRoutes,
|
||||||
|
long uptimeSeconds
|
||||||
) {
|
) {
|
||||||
public static AgentInstanceResponse from(AgentInfo info) {
|
public static AgentInstanceResponse from(AgentInfo info) {
|
||||||
|
long uptime = Duration.between(info.registeredAt(), Instant.now()).toSeconds();
|
||||||
return new AgentInstanceResponse(
|
return new AgentInstanceResponse(
|
||||||
info.id(), info.name(), info.group(),
|
info.id(), info.name(), info.application(),
|
||||||
info.state().name(), info.routeIds(),
|
info.state().name(), info.routeIds(),
|
||||||
info.registeredAt(), info.lastHeartbeat()
|
info.registeredAt(), info.lastHeartbeat(),
|
||||||
|
info.version(), info.capabilities(),
|
||||||
|
0.0, 0.0,
|
||||||
|
0, info.routeIds() != null ? info.routeIds().size() : 0,
|
||||||
|
uptime
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public AgentInstanceResponse withMetrics(double tps, double errorRate, int activeRoutes) {
|
||||||
|
return new AgentInstanceResponse(
|
||||||
|
id, name, application, status, routeIds, registeredAt, lastHeartbeat,
|
||||||
|
version, capabilities,
|
||||||
|
tps, errorRate, activeRoutes, totalRoutes, uptimeSeconds
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,9 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
public record AgentMetricsResponse(
|
||||||
|
@NotNull Map<String, List<MetricBucket>> metrics
|
||||||
|
) {}
|
||||||
@@ -3,5 +3,5 @@ package com.cameleer3.server.app.dto;
|
|||||||
import io.swagger.v3.oas.annotations.media.Schema;
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
import jakarta.validation.constraints.NotNull;
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
|
||||||
@Schema(description = "Refreshed access token")
|
@Schema(description = "Refreshed access and refresh tokens")
|
||||||
public record AgentRefreshResponse(@NotNull String accessToken) {}
|
public record AgentRefreshResponse(@NotNull String accessToken, @NotNull String refreshToken) {}
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import java.util.Map;
|
|||||||
public record AgentRegistrationRequest(
|
public record AgentRegistrationRequest(
|
||||||
@NotNull String agentId,
|
@NotNull String agentId,
|
||||||
@NotNull String name,
|
@NotNull String name,
|
||||||
@Schema(defaultValue = "default") String group,
|
@Schema(defaultValue = "default") String application,
|
||||||
String version,
|
String version,
|
||||||
List<String> routeIds,
|
List<String> routeIds,
|
||||||
Map<String, Object> capabilities
|
Map<String, Object> capabilities
|
||||||
|
|||||||
@@ -0,0 +1,12 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
@Schema(description = "Summary of an agent instance for sidebar display")
|
||||||
|
public record AgentSummary(
|
||||||
|
@NotNull String id,
|
||||||
|
@NotNull String name,
|
||||||
|
@NotNull String status,
|
||||||
|
@NotNull double tps
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Schema(description = "Application catalog entry with routes and agents")
|
||||||
|
public record AppCatalogEntry(
|
||||||
|
@NotNull String appId,
|
||||||
|
@NotNull List<RouteSummary> routes,
|
||||||
|
@NotNull List<AgentSummary> agents,
|
||||||
|
@NotNull int agentCount,
|
||||||
|
@NotNull String health,
|
||||||
|
@NotNull long exchangeCount
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.admin.AuditRecord;
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Schema(description = "Paginated audit log entries")
|
||||||
|
public record AuditLogPageResponse(
|
||||||
|
@Schema(description = "Audit log entries") List<AuditRecord> items,
|
||||||
|
@Schema(description = "Total number of matching entries") long totalCount,
|
||||||
|
@Schema(description = "Current page number (0-based)") int page,
|
||||||
|
@Schema(description = "Page size") int pageSize,
|
||||||
|
@Schema(description = "Total number of pages") int totalPages
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
@Schema(description = "HikariCP connection pool statistics")
|
||||||
|
public record ConnectionPoolResponse(
|
||||||
|
@Schema(description = "Number of currently active connections") int activeConnections,
|
||||||
|
@Schema(description = "Number of idle connections") int idleConnections,
|
||||||
|
@Schema(description = "Number of threads waiting for a connection") int pendingThreads,
|
||||||
|
@Schema(description = "Maximum wait time in milliseconds") long maxWaitMs,
|
||||||
|
@Schema(description = "Maximum pool size") int maxPoolSize
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
@Schema(description = "Database connection and version status")
|
||||||
|
public record DatabaseStatusResponse(
|
||||||
|
@Schema(description = "Whether the database is reachable") boolean connected,
|
||||||
|
@Schema(description = "PostgreSQL version string") String version,
|
||||||
|
@Schema(description = "Database host") String host,
|
||||||
|
@Schema(description = "Current schema search path") String schema,
|
||||||
|
@Schema(description = "Whether TimescaleDB extension is available") boolean timescaleDb
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
@Schema(description = "OpenSearch index information")
|
||||||
|
public record IndexInfoResponse(
|
||||||
|
@Schema(description = "Index name") String name,
|
||||||
|
@Schema(description = "Document count") long docCount,
|
||||||
|
@Schema(description = "Human-readable index size") String size,
|
||||||
|
@Schema(description = "Index size in bytes") long sizeBytes,
|
||||||
|
@Schema(description = "Index health status") String health,
|
||||||
|
@Schema(description = "Number of primary shards") int primaryShards,
|
||||||
|
@Schema(description = "Number of replica shards") int replicaShards
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Schema(description = "Paginated list of OpenSearch indices")
|
||||||
|
public record IndicesPageResponse(
|
||||||
|
@Schema(description = "Index list for current page") List<IndexInfoResponse> indices,
|
||||||
|
@Schema(description = "Total number of indices") long totalIndices,
|
||||||
|
@Schema(description = "Total document count across all indices") long totalDocs,
|
||||||
|
@Schema(description = "Human-readable total size") String totalSize,
|
||||||
|
@Schema(description = "Current page number (0-based)") int page,
|
||||||
|
@Schema(description = "Page size") int pageSize,
|
||||||
|
@Schema(description = "Total number of pages") int totalPages
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
public record MetricBucket(
|
||||||
|
@NotNull Instant time,
|
||||||
|
double value
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
@Schema(description = "OpenSearch cluster status")
|
||||||
|
public record OpenSearchStatusResponse(
|
||||||
|
@Schema(description = "Whether the cluster is reachable") boolean reachable,
|
||||||
|
@Schema(description = "Cluster health status (GREEN, YELLOW, RED)") String clusterHealth,
|
||||||
|
@Schema(description = "OpenSearch version") String version,
|
||||||
|
@Schema(description = "Number of nodes in the cluster") int nodeCount,
|
||||||
|
@Schema(description = "OpenSearch host") String host
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
@Schema(description = "OpenSearch performance metrics")
|
||||||
|
public record PerformanceResponse(
|
||||||
|
@Schema(description = "Query cache hit rate (0.0-1.0)") double queryCacheHitRate,
|
||||||
|
@Schema(description = "Request cache hit rate (0.0-1.0)") double requestCacheHitRate,
|
||||||
|
@Schema(description = "Average search latency in milliseconds") double searchLatencyMs,
|
||||||
|
@Schema(description = "Average indexing latency in milliseconds") double indexingLatencyMs,
|
||||||
|
@Schema(description = "JVM heap used in bytes") long jvmHeapUsedBytes,
|
||||||
|
@Schema(description = "JVM heap max in bytes") long jvmHeapMaxBytes
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
|
||||||
|
@Schema(description = "Search indexing pipeline statistics")
|
||||||
|
public record PipelineStatsResponse(
|
||||||
|
@Schema(description = "Current queue depth") int queueDepth,
|
||||||
|
@Schema(description = "Maximum queue size") int maxQueueSize,
|
||||||
|
@Schema(description = "Number of failed indexing operations") long failedCount,
|
||||||
|
@Schema(description = "Number of successfully indexed documents") long indexedCount,
|
||||||
|
@Schema(description = "Debounce interval in milliseconds") long debounceMs,
|
||||||
|
@Schema(description = "Current indexing rate (docs/sec)") double indexingRate,
|
||||||
|
@Schema(description = "Timestamp of last indexed document") Instant lastIndexedAt
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
public record ProcessorMetrics(
|
||||||
|
@NotNull String processorId,
|
||||||
|
@NotNull String processorType,
|
||||||
|
@NotNull String routeId,
|
||||||
|
@NotNull String appId,
|
||||||
|
long totalCount,
|
||||||
|
long failedCount,
|
||||||
|
double avgDurationMs,
|
||||||
|
double p99DurationMs,
|
||||||
|
double errorRate
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Schema(description = "Aggregated route performance metrics")
|
||||||
|
public record RouteMetrics(
|
||||||
|
@NotNull String routeId,
|
||||||
|
@NotNull String appId,
|
||||||
|
@NotNull long exchangeCount,
|
||||||
|
@NotNull double successRate,
|
||||||
|
@NotNull double avgDurationMs,
|
||||||
|
@NotNull double p99DurationMs,
|
||||||
|
@NotNull double errorRate,
|
||||||
|
@NotNull double throughputPerSec,
|
||||||
|
@NotNull List<Double> sparkline
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
|
||||||
|
@Schema(description = "Summary of a route within an application")
|
||||||
|
public record RouteSummary(
|
||||||
|
@NotNull String routeId,
|
||||||
|
@NotNull long exchangeCount,
|
||||||
|
Instant lastSeen
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import jakarta.validation.constraints.NotBlank;
|
||||||
|
|
||||||
|
public record SetPasswordRequest(
|
||||||
|
@NotBlank String password
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
@Schema(description = "Table size and row count information")
|
||||||
|
public record TableSizeResponse(
|
||||||
|
@Schema(description = "Table name") String tableName,
|
||||||
|
@Schema(description = "Approximate row count") long rowCount,
|
||||||
|
@Schema(description = "Human-readable data size") String dataSize,
|
||||||
|
@Schema(description = "Human-readable index size") String indexSize,
|
||||||
|
@Schema(description = "Data size in bytes") long dataSizeBytes,
|
||||||
|
@Schema(description = "Index size in bytes") long indexSizeBytes
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,144 @@
|
|||||||
|
package com.cameleer3.server.app.dto;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.admin.ThresholdConfig;
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.validation.Valid;
|
||||||
|
import jakarta.validation.constraints.Max;
|
||||||
|
import jakarta.validation.constraints.Min;
|
||||||
|
import jakarta.validation.constraints.NotBlank;
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
import jakarta.validation.constraints.Positive;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Schema(description = "Threshold configuration for admin monitoring")
|
||||||
|
public record ThresholdConfigRequest(
|
||||||
|
@Valid @NotNull DatabaseThresholdsRequest database,
|
||||||
|
@Valid @NotNull OpenSearchThresholdsRequest opensearch
|
||||||
|
) {
|
||||||
|
|
||||||
|
@Schema(description = "Database monitoring thresholds")
|
||||||
|
public record DatabaseThresholdsRequest(
|
||||||
|
@Min(0) @Max(100)
|
||||||
|
@Schema(description = "Connection pool usage warning threshold (percentage)")
|
||||||
|
int connectionPoolWarning,
|
||||||
|
|
||||||
|
@Min(0) @Max(100)
|
||||||
|
@Schema(description = "Connection pool usage critical threshold (percentage)")
|
||||||
|
int connectionPoolCritical,
|
||||||
|
|
||||||
|
@Positive
|
||||||
|
@Schema(description = "Query duration warning threshold (seconds)")
|
||||||
|
double queryDurationWarning,
|
||||||
|
|
||||||
|
@Positive
|
||||||
|
@Schema(description = "Query duration critical threshold (seconds)")
|
||||||
|
double queryDurationCritical
|
||||||
|
) {}
|
||||||
|
|
||||||
|
@Schema(description = "OpenSearch monitoring thresholds")
|
||||||
|
public record OpenSearchThresholdsRequest(
|
||||||
|
@NotBlank
|
||||||
|
@Schema(description = "Cluster health warning threshold (GREEN, YELLOW, RED)")
|
||||||
|
String clusterHealthWarning,
|
||||||
|
|
||||||
|
@NotBlank
|
||||||
|
@Schema(description = "Cluster health critical threshold (GREEN, YELLOW, RED)")
|
||||||
|
String clusterHealthCritical,
|
||||||
|
|
||||||
|
@Min(0)
|
||||||
|
@Schema(description = "Queue depth warning threshold")
|
||||||
|
int queueDepthWarning,
|
||||||
|
|
||||||
|
@Min(0)
|
||||||
|
@Schema(description = "Queue depth critical threshold")
|
||||||
|
int queueDepthCritical,
|
||||||
|
|
||||||
|
@Min(0) @Max(100)
|
||||||
|
@Schema(description = "JVM heap usage warning threshold (percentage)")
|
||||||
|
int jvmHeapWarning,
|
||||||
|
|
||||||
|
@Min(0) @Max(100)
|
||||||
|
@Schema(description = "JVM heap usage critical threshold (percentage)")
|
||||||
|
int jvmHeapCritical,
|
||||||
|
|
||||||
|
@Min(0)
|
||||||
|
@Schema(description = "Failed document count warning threshold")
|
||||||
|
int failedDocsWarning,
|
||||||
|
|
||||||
|
@Min(0)
|
||||||
|
@Schema(description = "Failed document count critical threshold")
|
||||||
|
int failedDocsCritical
|
||||||
|
) {}
|
||||||
|
|
||||||
|
/** Convert to core domain model */
|
||||||
|
public ThresholdConfig toConfig() {
|
||||||
|
return new ThresholdConfig(
|
||||||
|
new ThresholdConfig.DatabaseThresholds(
|
||||||
|
database.connectionPoolWarning(),
|
||||||
|
database.connectionPoolCritical(),
|
||||||
|
database.queryDurationWarning(),
|
||||||
|
database.queryDurationCritical()
|
||||||
|
),
|
||||||
|
new ThresholdConfig.OpenSearchThresholds(
|
||||||
|
opensearch.clusterHealthWarning(),
|
||||||
|
opensearch.clusterHealthCritical(),
|
||||||
|
opensearch.queueDepthWarning(),
|
||||||
|
opensearch.queueDepthCritical(),
|
||||||
|
opensearch.jvmHeapWarning(),
|
||||||
|
opensearch.jvmHeapCritical(),
|
||||||
|
opensearch.failedDocsWarning(),
|
||||||
|
opensearch.failedDocsCritical()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Validate semantic constraints beyond annotation-level validation */
|
||||||
|
public List<String> validate() {
|
||||||
|
List<String> errors = new ArrayList<>();
|
||||||
|
|
||||||
|
if (database != null) {
|
||||||
|
if (database.connectionPoolWarning() > database.connectionPoolCritical()) {
|
||||||
|
errors.add("database.connectionPoolWarning must be <= connectionPoolCritical");
|
||||||
|
}
|
||||||
|
if (database.queryDurationWarning() > database.queryDurationCritical()) {
|
||||||
|
errors.add("database.queryDurationWarning must be <= queryDurationCritical");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (opensearch != null) {
|
||||||
|
if (opensearch.queueDepthWarning() > opensearch.queueDepthCritical()) {
|
||||||
|
errors.add("opensearch.queueDepthWarning must be <= queueDepthCritical");
|
||||||
|
}
|
||||||
|
if (opensearch.jvmHeapWarning() > opensearch.jvmHeapCritical()) {
|
||||||
|
errors.add("opensearch.jvmHeapWarning must be <= jvmHeapCritical");
|
||||||
|
}
|
||||||
|
if (opensearch.failedDocsWarning() > opensearch.failedDocsCritical()) {
|
||||||
|
errors.add("opensearch.failedDocsWarning must be <= failedDocsCritical");
|
||||||
|
}
|
||||||
|
// Validate health severity ordering: GREEN < YELLOW < RED
|
||||||
|
int warningSeverity = healthSeverity(opensearch.clusterHealthWarning());
|
||||||
|
int criticalSeverity = healthSeverity(opensearch.clusterHealthCritical());
|
||||||
|
if (warningSeverity < 0) {
|
||||||
|
errors.add("opensearch.clusterHealthWarning must be GREEN, YELLOW, or RED");
|
||||||
|
}
|
||||||
|
if (criticalSeverity < 0) {
|
||||||
|
errors.add("opensearch.clusterHealthCritical must be GREEN, YELLOW, or RED");
|
||||||
|
}
|
||||||
|
if (warningSeverity >= 0 && criticalSeverity >= 0 && warningSeverity > criticalSeverity) {
|
||||||
|
errors.add("opensearch.clusterHealthWarning severity must be <= clusterHealthCritical (GREEN < YELLOW < RED)");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final Map<String, Integer> HEALTH_SEVERITY =
|
||||||
|
Map.of("GREEN", 0, "YELLOW", 1, "RED", 2);
|
||||||
|
|
||||||
|
private static int healthSeverity(String health) {
|
||||||
|
return HEALTH_SEVERITY.getOrDefault(health != null ? health.toUpperCase() : "", -1);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,253 @@
|
|||||||
|
package com.cameleer3.server.app.rbac;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.rbac.*;
|
||||||
|
import com.cameleer3.server.core.security.UserInfo;
|
||||||
|
import com.cameleer3.server.core.security.UserRepository;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
@Service
|
||||||
|
public class RbacServiceImpl implements RbacService {
|
||||||
|
|
||||||
|
private final JdbcTemplate jdbc;
|
||||||
|
private final UserRepository userRepository;
|
||||||
|
private final GroupRepository groupRepository;
|
||||||
|
private final RoleRepository roleRepository;
|
||||||
|
|
||||||
|
public RbacServiceImpl(JdbcTemplate jdbc, UserRepository userRepository,
|
||||||
|
GroupRepository groupRepository, RoleRepository roleRepository) {
|
||||||
|
this.jdbc = jdbc;
|
||||||
|
this.userRepository = userRepository;
|
||||||
|
this.groupRepository = groupRepository;
|
||||||
|
this.roleRepository = roleRepository;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<UserDetail> listUsers() {
|
||||||
|
return userRepository.findAll().stream()
|
||||||
|
.map(this::buildUserDetail)
|
||||||
|
.toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public UserDetail getUser(String userId) {
|
||||||
|
UserInfo user = userRepository.findById(userId).orElse(null);
|
||||||
|
if (user == null) return null;
|
||||||
|
return buildUserDetail(user);
|
||||||
|
}
|
||||||
|
|
||||||
|
private UserDetail buildUserDetail(UserInfo user) {
|
||||||
|
List<RoleSummary> directRoles = getDirectRolesForUser(user.userId());
|
||||||
|
List<GroupSummary> directGroups = getDirectGroupsForUser(user.userId());
|
||||||
|
List<RoleSummary> effectiveRoles = getEffectiveRolesForUser(user.userId());
|
||||||
|
List<GroupSummary> effectiveGroups = getEffectiveGroupsForUser(user.userId());
|
||||||
|
return new UserDetail(user.userId(), user.provider(), user.email(),
|
||||||
|
user.displayName(), user.createdAt(),
|
||||||
|
directRoles, directGroups, effectiveRoles, effectiveGroups);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void assignRoleToUser(String userId, UUID roleId) {
|
||||||
|
jdbc.update("INSERT INTO user_roles (user_id, role_id) VALUES (?, ?) ON CONFLICT DO NOTHING",
|
||||||
|
userId, roleId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void removeRoleFromUser(String userId, UUID roleId) {
|
||||||
|
jdbc.update("DELETE FROM user_roles WHERE user_id = ? AND role_id = ?", userId, roleId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addUserToGroup(String userId, UUID groupId) {
|
||||||
|
jdbc.update("INSERT INTO user_groups (user_id, group_id) VALUES (?, ?) ON CONFLICT DO NOTHING",
|
||||||
|
userId, groupId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void removeUserFromGroup(String userId, UUID groupId) {
|
||||||
|
jdbc.update("DELETE FROM user_groups WHERE user_id = ? AND group_id = ?", userId, groupId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<RoleSummary> getEffectiveRolesForUser(String userId) {
|
||||||
|
List<RoleSummary> direct = getDirectRolesForUser(userId);
|
||||||
|
|
||||||
|
List<GroupSummary> effectiveGroups = getEffectiveGroupsForUser(userId);
|
||||||
|
Map<UUID, RoleSummary> roleMap = new LinkedHashMap<>();
|
||||||
|
for (RoleSummary r : direct) {
|
||||||
|
roleMap.put(r.id(), r);
|
||||||
|
}
|
||||||
|
for (GroupSummary group : effectiveGroups) {
|
||||||
|
List<RoleSummary> groupRoles = jdbc.query("""
|
||||||
|
SELECT r.id, r.name, r.system FROM group_roles gr
|
||||||
|
JOIN roles r ON r.id = gr.role_id WHERE gr.group_id = ?
|
||||||
|
""", (rs, rowNum) -> new RoleSummary(
|
||||||
|
rs.getObject("id", UUID.class),
|
||||||
|
rs.getString("name"),
|
||||||
|
rs.getBoolean("system"),
|
||||||
|
group.name()
|
||||||
|
), group.id());
|
||||||
|
for (RoleSummary r : groupRoles) {
|
||||||
|
roleMap.putIfAbsent(r.id(), r);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new ArrayList<>(roleMap.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<GroupSummary> getEffectiveGroupsForUser(String userId) {
|
||||||
|
List<GroupSummary> directGroups = getDirectGroupsForUser(userId);
|
||||||
|
Set<UUID> visited = new LinkedHashSet<>();
|
||||||
|
List<GroupSummary> all = new ArrayList<>();
|
||||||
|
for (GroupSummary g : directGroups) {
|
||||||
|
collectAncestors(g.id(), visited, all);
|
||||||
|
}
|
||||||
|
return all;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void collectAncestors(UUID groupId, Set<UUID> visited, List<GroupSummary> result) {
|
||||||
|
if (!visited.add(groupId)) return;
|
||||||
|
var rows = jdbc.query("SELECT id, name, parent_group_id FROM groups WHERE id = ?",
|
||||||
|
(rs, rowNum) -> new Object[]{
|
||||||
|
new GroupSummary(rs.getObject("id", UUID.class), rs.getString("name")),
|
||||||
|
rs.getObject("parent_group_id", UUID.class)
|
||||||
|
}, groupId);
|
||||||
|
if (rows.isEmpty()) return;
|
||||||
|
result.add((GroupSummary) rows.get(0)[0]);
|
||||||
|
UUID parentId = (UUID) rows.get(0)[1];
|
||||||
|
if (parentId != null) {
|
||||||
|
collectAncestors(parentId, visited, result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<RoleSummary> getEffectiveRolesForGroup(UUID groupId) {
|
||||||
|
List<RoleSummary> direct = jdbc.query("""
|
||||||
|
SELECT r.id, r.name, r.system FROM group_roles gr
|
||||||
|
JOIN roles r ON r.id = gr.role_id WHERE gr.group_id = ?
|
||||||
|
""", (rs, rowNum) -> new RoleSummary(rs.getObject("id", UUID.class),
|
||||||
|
rs.getString("name"), rs.getBoolean("system"), "direct"), groupId);
|
||||||
|
|
||||||
|
Map<UUID, RoleSummary> roleMap = new LinkedHashMap<>();
|
||||||
|
for (RoleSummary r : direct) roleMap.put(r.id(), r);
|
||||||
|
|
||||||
|
List<GroupSummary> ancestors = groupRepository.findAncestorChain(groupId);
|
||||||
|
for (GroupSummary ancestor : ancestors) {
|
||||||
|
if (ancestor.id().equals(groupId)) continue;
|
||||||
|
List<RoleSummary> parentRoles = jdbc.query("""
|
||||||
|
SELECT r.id, r.name, r.system FROM group_roles gr
|
||||||
|
JOIN roles r ON r.id = gr.role_id WHERE gr.group_id = ?
|
||||||
|
""", (rs, rowNum) -> new RoleSummary(rs.getObject("id", UUID.class),
|
||||||
|
rs.getString("name"), rs.getBoolean("system"),
|
||||||
|
ancestor.name()), ancestor.id());
|
||||||
|
for (RoleSummary r : parentRoles) roleMap.putIfAbsent(r.id(), r);
|
||||||
|
}
|
||||||
|
return new ArrayList<>(roleMap.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<UserSummary> getEffectivePrincipalsForRole(UUID roleId) {
|
||||||
|
Set<String> seen = new LinkedHashSet<>();
|
||||||
|
List<UserSummary> result = new ArrayList<>();
|
||||||
|
|
||||||
|
List<UserSummary> direct = jdbc.query("""
|
||||||
|
SELECT u.user_id, u.display_name, u.provider FROM user_roles ur
|
||||||
|
JOIN users u ON u.user_id = ur.user_id WHERE ur.role_id = ?
|
||||||
|
""", (rs, rowNum) -> new UserSummary(rs.getString("user_id"),
|
||||||
|
rs.getString("display_name"), rs.getString("provider")), roleId);
|
||||||
|
for (UserSummary u : direct) {
|
||||||
|
if (seen.add(u.userId())) result.add(u);
|
||||||
|
}
|
||||||
|
|
||||||
|
List<UUID> groupsWithRole = jdbc.query(
|
||||||
|
"SELECT group_id FROM group_roles WHERE role_id = ?",
|
||||||
|
(rs, rowNum) -> rs.getObject("group_id", UUID.class), roleId);
|
||||||
|
|
||||||
|
Set<UUID> allGroups = new LinkedHashSet<>(groupsWithRole);
|
||||||
|
for (UUID gid : groupsWithRole) {
|
||||||
|
collectDescendants(gid, allGroups);
|
||||||
|
}
|
||||||
|
for (UUID gid : allGroups) {
|
||||||
|
List<UserSummary> members = jdbc.query("""
|
||||||
|
SELECT u.user_id, u.display_name, u.provider FROM user_groups ug
|
||||||
|
JOIN users u ON u.user_id = ug.user_id WHERE ug.group_id = ?
|
||||||
|
""", (rs, rowNum) -> new UserSummary(rs.getString("user_id"),
|
||||||
|
rs.getString("display_name"), rs.getString("provider")), gid);
|
||||||
|
for (UserSummary u : members) {
|
||||||
|
if (seen.add(u.userId())) result.add(u);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void collectDescendants(UUID groupId, Set<UUID> result) {
|
||||||
|
List<UUID> children = jdbc.query(
|
||||||
|
"SELECT id FROM groups WHERE parent_group_id = ?",
|
||||||
|
(rs, rowNum) -> rs.getObject("id", UUID.class), groupId);
|
||||||
|
for (UUID child : children) {
|
||||||
|
if (result.add(child)) {
|
||||||
|
collectDescendants(child, result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> getSystemRoleNames(String userId) {
|
||||||
|
return getEffectiveRolesForUser(userId).stream()
|
||||||
|
.filter(RoleSummary::system)
|
||||||
|
.map(RoleSummary::name)
|
||||||
|
.toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public RbacStats getStats() {
|
||||||
|
int userCount = jdbc.queryForObject("SELECT COUNT(*) FROM users", Integer.class);
|
||||||
|
int activeUserCount = jdbc.queryForObject(
|
||||||
|
"SELECT COUNT(DISTINCT user_id) FROM user_roles", Integer.class);
|
||||||
|
int groupCount = jdbc.queryForObject("SELECT COUNT(*) FROM groups", Integer.class);
|
||||||
|
int roleCount = jdbc.queryForObject("SELECT COUNT(*) FROM roles", Integer.class);
|
||||||
|
int maxDepth = computeMaxGroupDepth();
|
||||||
|
return new RbacStats(userCount, activeUserCount, groupCount, maxDepth, roleCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
private int computeMaxGroupDepth() {
|
||||||
|
List<UUID> roots = jdbc.query(
|
||||||
|
"SELECT id FROM groups WHERE parent_group_id IS NULL",
|
||||||
|
(rs, rowNum) -> rs.getObject("id", UUID.class));
|
||||||
|
int max = 0;
|
||||||
|
for (UUID root : roots) {
|
||||||
|
max = Math.max(max, measureDepth(root, 1));
|
||||||
|
}
|
||||||
|
return max;
|
||||||
|
}
|
||||||
|
|
||||||
|
private int measureDepth(UUID groupId, int currentDepth) {
|
||||||
|
List<UUID> children = jdbc.query(
|
||||||
|
"SELECT id FROM groups WHERE parent_group_id = ?",
|
||||||
|
(rs, rowNum) -> rs.getObject("id", UUID.class), groupId);
|
||||||
|
if (children.isEmpty()) return currentDepth;
|
||||||
|
int max = currentDepth;
|
||||||
|
for (UUID child : children) {
|
||||||
|
max = Math.max(max, measureDepth(child, currentDepth + 1));
|
||||||
|
}
|
||||||
|
return max;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<RoleSummary> getDirectRolesForUser(String userId) {
|
||||||
|
return jdbc.query("""
|
||||||
|
SELECT r.id, r.name, r.system FROM user_roles ur
|
||||||
|
JOIN roles r ON r.id = ur.role_id WHERE ur.user_id = ?
|
||||||
|
""", (rs, rowNum) -> new RoleSummary(rs.getObject("id", UUID.class),
|
||||||
|
rs.getString("name"), rs.getBoolean("system"), "direct"), userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<GroupSummary> getDirectGroupsForUser(String userId) {
|
||||||
|
return jdbc.query("""
|
||||||
|
SELECT g.id, g.name FROM user_groups ug
|
||||||
|
JOIN groups g ON g.id = ug.group_id WHERE ug.user_id = ?
|
||||||
|
""", (rs, rowNum) -> new GroupSummary(rs.getObject("id", UUID.class),
|
||||||
|
rs.getString("name")), userId);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -17,6 +17,7 @@ import org.opensearch.client.opensearch.core.search.Hit;
|
|||||||
import org.opensearch.client.opensearch.indices.*;
|
import org.opensearch.client.opensearch.indices.*;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import org.springframework.stereotype.Repository;
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@@ -30,29 +31,36 @@ import java.util.stream.Collectors;
|
|||||||
public class OpenSearchIndex implements SearchIndex {
|
public class OpenSearchIndex implements SearchIndex {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(OpenSearchIndex.class);
|
private static final Logger log = LoggerFactory.getLogger(OpenSearchIndex.class);
|
||||||
private static final String INDEX_PREFIX = "executions-";
|
|
||||||
private static final DateTimeFormatter DAY_FMT = DateTimeFormatter.ofPattern("yyyy-MM-dd")
|
private static final DateTimeFormatter DAY_FMT = DateTimeFormatter.ofPattern("yyyy-MM-dd")
|
||||||
.withZone(ZoneOffset.UTC);
|
.withZone(ZoneOffset.UTC);
|
||||||
|
|
||||||
private final OpenSearchClient client;
|
private final OpenSearchClient client;
|
||||||
|
private final String indexPrefix;
|
||||||
|
|
||||||
public OpenSearchIndex(OpenSearchClient client) {
|
public OpenSearchIndex(OpenSearchClient client,
|
||||||
|
@Value("${opensearch.index-prefix:executions-}") String indexPrefix) {
|
||||||
this.client = client;
|
this.client = client;
|
||||||
|
this.indexPrefix = indexPrefix;
|
||||||
}
|
}
|
||||||
|
|
||||||
@PostConstruct
|
@PostConstruct
|
||||||
void ensureIndexTemplate() {
|
void ensureIndexTemplate() {
|
||||||
|
String templateName = indexPrefix + "template";
|
||||||
|
String indexPattern = indexPrefix + "*";
|
||||||
try {
|
try {
|
||||||
boolean exists = client.indices().existsIndexTemplate(
|
boolean exists = client.indices().existsIndexTemplate(
|
||||||
ExistsIndexTemplateRequest.of(b -> b.name("executions-template"))).value();
|
ExistsIndexTemplateRequest.of(b -> b.name(templateName))).value();
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
client.indices().putIndexTemplate(PutIndexTemplateRequest.of(b -> b
|
client.indices().putIndexTemplate(PutIndexTemplateRequest.of(b -> b
|
||||||
.name("executions-template")
|
.name(templateName)
|
||||||
.indexPatterns(List.of("executions-*"))
|
.indexPatterns(List.of(indexPattern))
|
||||||
.template(t -> t
|
.template(t -> t
|
||||||
.settings(s -> s
|
.settings(s -> s
|
||||||
.numberOfShards("3")
|
.numberOfShards("3")
|
||||||
.numberOfReplicas("1")))));
|
.numberOfReplicas("1"))
|
||||||
|
.mappings(m -> m
|
||||||
|
.properties("processors", p -> p
|
||||||
|
.nested(n -> n))))));
|
||||||
log.info("OpenSearch index template created");
|
log.info("OpenSearch index template created");
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
@@ -62,7 +70,7 @@ public class OpenSearchIndex implements SearchIndex {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void index(ExecutionDocument doc) {
|
public void index(ExecutionDocument doc) {
|
||||||
String indexName = INDEX_PREFIX + DAY_FMT.format(doc.startTime());
|
String indexName = indexPrefix + DAY_FMT.format(doc.startTime());
|
||||||
try {
|
try {
|
||||||
client.index(IndexRequest.of(b -> b
|
client.index(IndexRequest.of(b -> b
|
||||||
.index(indexName)
|
.index(indexName)
|
||||||
@@ -95,7 +103,7 @@ public class OpenSearchIndex implements SearchIndex {
|
|||||||
public long count(SearchRequest request) {
|
public long count(SearchRequest request) {
|
||||||
try {
|
try {
|
||||||
var countReq = CountRequest.of(b -> b
|
var countReq = CountRequest.of(b -> b
|
||||||
.index(INDEX_PREFIX + "*")
|
.index(indexPrefix + "*")
|
||||||
.query(buildQuery(request)));
|
.query(buildQuery(request)));
|
||||||
return client.count(countReq).count();
|
return client.count(countReq).count();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
@@ -108,7 +116,7 @@ public class OpenSearchIndex implements SearchIndex {
|
|||||||
public void delete(String executionId) {
|
public void delete(String executionId) {
|
||||||
try {
|
try {
|
||||||
client.deleteByQuery(DeleteByQueryRequest.of(b -> b
|
client.deleteByQuery(DeleteByQueryRequest.of(b -> b
|
||||||
.index(List.of(INDEX_PREFIX + "*"))
|
.index(List.of(indexPrefix + "*"))
|
||||||
.query(Query.of(q -> q.term(t -> t
|
.query(Query.of(q -> q.term(t -> t
|
||||||
.field("execution_id")
|
.field("execution_id")
|
||||||
.value(FieldValue.of(executionId)))))));
|
.value(FieldValue.of(executionId)))))));
|
||||||
@@ -120,8 +128,9 @@ public class OpenSearchIndex implements SearchIndex {
|
|||||||
private org.opensearch.client.opensearch.core.SearchRequest buildSearchRequest(
|
private org.opensearch.client.opensearch.core.SearchRequest buildSearchRequest(
|
||||||
SearchRequest request, int size) {
|
SearchRequest request, int size) {
|
||||||
return org.opensearch.client.opensearch.core.SearchRequest.of(b -> {
|
return org.opensearch.client.opensearch.core.SearchRequest.of(b -> {
|
||||||
b.index(INDEX_PREFIX + "*")
|
b.index(indexPrefix + "*")
|
||||||
.query(buildQuery(request))
|
.query(buildQuery(request))
|
||||||
|
.trackTotalHits(th -> th.enabled(true))
|
||||||
.size(size)
|
.size(size)
|
||||||
.from(request.offset())
|
.from(request.offset())
|
||||||
.sort(s -> s.field(f -> f
|
.sort(s -> s.field(f -> f
|
||||||
@@ -148,27 +157,32 @@ public class OpenSearchIndex implements SearchIndex {
|
|||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Keyword filters
|
// Keyword filters (use .keyword sub-field for exact matching on dynamically mapped text fields)
|
||||||
if (request.status() != null)
|
if (request.status() != null)
|
||||||
filter.add(termQuery("status", request.status()));
|
filter.add(termQuery("status.keyword", request.status()));
|
||||||
if (request.routeId() != null)
|
if (request.routeId() != null)
|
||||||
filter.add(termQuery("route_id", request.routeId()));
|
filter.add(termQuery("route_id.keyword", request.routeId()));
|
||||||
if (request.agentId() != null)
|
if (request.agentId() != null)
|
||||||
filter.add(termQuery("agent_id", request.agentId()));
|
filter.add(termQuery("agent_id.keyword", request.agentId()));
|
||||||
if (request.correlationId() != null)
|
if (request.correlationId() != null)
|
||||||
filter.add(termQuery("correlation_id", request.correlationId()));
|
filter.add(termQuery("correlation_id.keyword", request.correlationId()));
|
||||||
|
|
||||||
// Full-text search across all fields + nested processor fields
|
// Full-text search across all fields + nested processor fields
|
||||||
if (request.text() != null && !request.text().isBlank()) {
|
if (request.text() != null && !request.text().isBlank()) {
|
||||||
String text = request.text();
|
String text = request.text();
|
||||||
|
String wildcard = "*" + text.toLowerCase() + "*";
|
||||||
List<Query> textQueries = new ArrayList<>();
|
List<Query> textQueries = new ArrayList<>();
|
||||||
|
|
||||||
// Search top-level text fields
|
// Search top-level text fields (analyzed match + wildcard for substring)
|
||||||
textQueries.add(Query.of(q -> q.multiMatch(m -> m
|
textQueries.add(Query.of(q -> q.multiMatch(m -> m
|
||||||
.query(text)
|
.query(text)
|
||||||
.fields("error_message", "error_stacktrace"))));
|
.fields("error_message", "error_stacktrace"))));
|
||||||
|
textQueries.add(Query.of(q -> q.wildcard(w -> w
|
||||||
|
.field("error_message").value(wildcard).caseInsensitive(true))));
|
||||||
|
textQueries.add(Query.of(q -> q.wildcard(w -> w
|
||||||
|
.field("error_stacktrace").value(wildcard).caseInsensitive(true))));
|
||||||
|
|
||||||
// Search nested processor fields
|
// Search nested processor fields (analyzed match + wildcard)
|
||||||
textQueries.add(Query.of(q -> q.nested(n -> n
|
textQueries.add(Query.of(q -> q.nested(n -> n
|
||||||
.path("processors")
|
.path("processors")
|
||||||
.query(nq -> nq.multiMatch(m -> m
|
.query(nq -> nq.multiMatch(m -> m
|
||||||
@@ -176,6 +190,14 @@ public class OpenSearchIndex implements SearchIndex {
|
|||||||
.fields("processors.input_body", "processors.output_body",
|
.fields("processors.input_body", "processors.output_body",
|
||||||
"processors.input_headers", "processors.output_headers",
|
"processors.input_headers", "processors.output_headers",
|
||||||
"processors.error_message", "processors.error_stacktrace"))))));
|
"processors.error_message", "processors.error_stacktrace"))))));
|
||||||
|
textQueries.add(Query.of(q -> q.nested(n -> n
|
||||||
|
.path("processors")
|
||||||
|
.query(nq -> nq.bool(nb -> nb.should(
|
||||||
|
wildcardQuery("processors.input_body", wildcard),
|
||||||
|
wildcardQuery("processors.output_body", wildcard),
|
||||||
|
wildcardQuery("processors.input_headers", wildcard),
|
||||||
|
wildcardQuery("processors.output_headers", wildcard)
|
||||||
|
).minimumShouldMatch("1"))))));
|
||||||
|
|
||||||
// Also try keyword fields for exact matches
|
// Also try keyword fields for exact matches
|
||||||
textQueries.add(Query.of(q -> q.multiMatch(m -> m
|
textQueries.add(Query.of(q -> q.multiMatch(m -> m
|
||||||
@@ -185,32 +207,51 @@ public class OpenSearchIndex implements SearchIndex {
|
|||||||
must.add(Query.of(q -> q.bool(b -> b.should(textQueries).minimumShouldMatch("1"))));
|
must.add(Query.of(q -> q.bool(b -> b.should(textQueries).minimumShouldMatch("1"))));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Scoped text searches
|
// Scoped text searches (multiMatch + wildcard fallback for substring matching)
|
||||||
if (request.textInBody() != null && !request.textInBody().isBlank()) {
|
if (request.textInBody() != null && !request.textInBody().isBlank()) {
|
||||||
|
String bodyText = request.textInBody();
|
||||||
|
String bodyWildcard = "*" + bodyText.toLowerCase() + "*";
|
||||||
must.add(Query.of(q -> q.nested(n -> n
|
must.add(Query.of(q -> q.nested(n -> n
|
||||||
.path("processors")
|
.path("processors")
|
||||||
.query(nq -> nq.multiMatch(m -> m
|
.query(nq -> nq.bool(nb -> nb.should(
|
||||||
.query(request.textInBody())
|
Query.of(mq -> mq.multiMatch(m -> m
|
||||||
.fields("processors.input_body", "processors.output_body"))))));
|
.query(bodyText)
|
||||||
|
.fields("processors.input_body", "processors.output_body"))),
|
||||||
|
wildcardQuery("processors.input_body", bodyWildcard),
|
||||||
|
wildcardQuery("processors.output_body", bodyWildcard)
|
||||||
|
).minimumShouldMatch("1"))))));
|
||||||
}
|
}
|
||||||
if (request.textInHeaders() != null && !request.textInHeaders().isBlank()) {
|
if (request.textInHeaders() != null && !request.textInHeaders().isBlank()) {
|
||||||
|
String headerText = request.textInHeaders();
|
||||||
|
String headerWildcard = "*" + headerText.toLowerCase() + "*";
|
||||||
must.add(Query.of(q -> q.nested(n -> n
|
must.add(Query.of(q -> q.nested(n -> n
|
||||||
.path("processors")
|
.path("processors")
|
||||||
.query(nq -> nq.multiMatch(m -> m
|
.query(nq -> nq.bool(nb -> nb.should(
|
||||||
.query(request.textInHeaders())
|
Query.of(mq -> mq.multiMatch(m -> m
|
||||||
.fields("processors.input_headers", "processors.output_headers"))))));
|
.query(headerText)
|
||||||
|
.fields("processors.input_headers", "processors.output_headers"))),
|
||||||
|
wildcardQuery("processors.input_headers", headerWildcard),
|
||||||
|
wildcardQuery("processors.output_headers", headerWildcard)
|
||||||
|
).minimumShouldMatch("1"))))));
|
||||||
}
|
}
|
||||||
if (request.textInErrors() != null && !request.textInErrors().isBlank()) {
|
if (request.textInErrors() != null && !request.textInErrors().isBlank()) {
|
||||||
String errText = request.textInErrors();
|
String errText = request.textInErrors();
|
||||||
|
String errWildcard = "*" + errText.toLowerCase() + "*";
|
||||||
must.add(Query.of(q -> q.bool(b -> b.should(
|
must.add(Query.of(q -> q.bool(b -> b.should(
|
||||||
Query.of(sq -> sq.multiMatch(m -> m
|
Query.of(sq -> sq.multiMatch(m -> m
|
||||||
.query(errText)
|
.query(errText)
|
||||||
.fields("error_message", "error_stacktrace"))),
|
.fields("error_message", "error_stacktrace"))),
|
||||||
|
wildcardQuery("error_message", errWildcard),
|
||||||
|
wildcardQuery("error_stacktrace", errWildcard),
|
||||||
Query.of(sq -> sq.nested(n -> n
|
Query.of(sq -> sq.nested(n -> n
|
||||||
.path("processors")
|
.path("processors")
|
||||||
.query(nq -> nq.multiMatch(m -> m
|
.query(nq -> nq.bool(nb -> nb.should(
|
||||||
.query(errText)
|
Query.of(nmq -> nmq.multiMatch(m -> m
|
||||||
.fields("processors.error_message", "processors.error_stacktrace")))))
|
.query(errText)
|
||||||
|
.fields("processors.error_message", "processors.error_stacktrace"))),
|
||||||
|
wildcardQuery("processors.error_message", errWildcard),
|
||||||
|
wildcardQuery("processors.error_stacktrace", errWildcard)
|
||||||
|
).minimumShouldMatch("1")))))
|
||||||
).minimumShouldMatch("1"))));
|
).minimumShouldMatch("1"))));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -238,12 +279,16 @@ public class OpenSearchIndex implements SearchIndex {
|
|||||||
return Query.of(q -> q.term(t -> t.field(field).value(FieldValue.of(value))));
|
return Query.of(q -> q.term(t -> t.field(field).value(FieldValue.of(value))));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Query wildcardQuery(String field, String pattern) {
|
||||||
|
return Query.of(q -> q.wildcard(w -> w.field(field).value(pattern).caseInsensitive(true)));
|
||||||
|
}
|
||||||
|
|
||||||
private Map<String, Object> toMap(ExecutionDocument doc) {
|
private Map<String, Object> toMap(ExecutionDocument doc) {
|
||||||
Map<String, Object> map = new LinkedHashMap<>();
|
Map<String, Object> map = new LinkedHashMap<>();
|
||||||
map.put("execution_id", doc.executionId());
|
map.put("execution_id", doc.executionId());
|
||||||
map.put("route_id", doc.routeId());
|
map.put("route_id", doc.routeId());
|
||||||
map.put("agent_id", doc.agentId());
|
map.put("agent_id", doc.agentId());
|
||||||
map.put("group_name", doc.groupName());
|
map.put("application_name", doc.applicationName());
|
||||||
map.put("status", doc.status());
|
map.put("status", doc.status());
|
||||||
map.put("correlation_id", doc.correlationId());
|
map.put("correlation_id", doc.correlationId());
|
||||||
map.put("exchange_id", doc.exchangeId());
|
map.put("exchange_id", doc.exchangeId());
|
||||||
@@ -278,6 +323,7 @@ public class OpenSearchIndex implements SearchIndex {
|
|||||||
(String) src.get("execution_id"),
|
(String) src.get("execution_id"),
|
||||||
(String) src.get("route_id"),
|
(String) src.get("route_id"),
|
||||||
(String) src.get("agent_id"),
|
(String) src.get("agent_id"),
|
||||||
|
(String) src.get("application_name"),
|
||||||
(String) src.get("status"),
|
(String) src.get("status"),
|
||||||
src.get("start_time") != null ? Instant.parse((String) src.get("start_time")) : null,
|
src.get("start_time") != null ? Instant.parse((String) src.get("start_time")) : null,
|
||||||
src.get("end_time") != null ? Instant.parse((String) src.get("end_time")) : null,
|
src.get("end_time") != null ? Instant.parse((String) src.get("end_time")) : null,
|
||||||
|
|||||||
@@ -60,13 +60,13 @@ public class JwtServiceImpl implements JwtService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String createAccessToken(String subject, String group, List<String> roles) {
|
public String createAccessToken(String subject, String application, List<String> roles) {
|
||||||
return createToken(subject, group, roles, "access", properties.getAccessTokenExpiryMs());
|
return createToken(subject, application, roles, "access", properties.getAccessTokenExpiryMs());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String createRefreshToken(String subject, String group, List<String> roles) {
|
public String createRefreshToken(String subject, String application, List<String> roles) {
|
||||||
return createToken(subject, group, roles, "refresh", properties.getRefreshTokenExpiryMs());
|
return createToken(subject, application, roles, "refresh", properties.getRefreshTokenExpiryMs());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -84,12 +84,12 @@ public class JwtServiceImpl implements JwtService {
|
|||||||
return validateAccessToken(token).subject();
|
return validateAccessToken(token).subject();
|
||||||
}
|
}
|
||||||
|
|
||||||
private String createToken(String subject, String group, List<String> roles,
|
private String createToken(String subject, String application, List<String> roles,
|
||||||
String type, long expiryMs) {
|
String type, long expiryMs) {
|
||||||
Instant now = Instant.now();
|
Instant now = Instant.now();
|
||||||
JWTClaimsSet claims = new JWTClaimsSet.Builder()
|
JWTClaimsSet claims = new JWTClaimsSet.Builder()
|
||||||
.subject(subject)
|
.subject(subject)
|
||||||
.claim("group", group)
|
.claim("group", application)
|
||||||
.claim("type", type)
|
.claim("type", type)
|
||||||
.claim("roles", roles)
|
.claim("roles", roles)
|
||||||
.issueTime(Date.from(now))
|
.issueTime(Date.from(now))
|
||||||
@@ -132,7 +132,7 @@ public class JwtServiceImpl implements JwtService {
|
|||||||
throw new InvalidTokenException("Token has no subject");
|
throw new InvalidTokenException("Token has no subject");
|
||||||
}
|
}
|
||||||
|
|
||||||
String group = claims.getStringClaim("group");
|
String application = claims.getStringClaim("group");
|
||||||
|
|
||||||
// Extract roles — may be absent in legacy tokens
|
// Extract roles — may be absent in legacy tokens
|
||||||
List<String> roles;
|
List<String> roles;
|
||||||
@@ -145,7 +145,7 @@ public class JwtServiceImpl implements JwtService {
|
|||||||
roles = List.of();
|
roles = List.of();
|
||||||
}
|
}
|
||||||
|
|
||||||
return new JwtValidationResult(subject, group, roles);
|
return new JwtValidationResult(subject, application, roles);
|
||||||
} catch (ParseException e) {
|
} catch (ParseException e) {
|
||||||
throw new InvalidTokenException("Failed to parse JWT", e);
|
throw new InvalidTokenException("Failed to parse JWT", e);
|
||||||
} catch (JOSEException e) {
|
} catch (JOSEException e) {
|
||||||
|
|||||||
@@ -3,11 +3,17 @@ package com.cameleer3.server.app.security;
|
|||||||
import com.cameleer3.server.app.dto.AuthTokenResponse;
|
import com.cameleer3.server.app.dto.AuthTokenResponse;
|
||||||
import com.cameleer3.server.app.dto.ErrorResponse;
|
import com.cameleer3.server.app.dto.ErrorResponse;
|
||||||
import com.cameleer3.server.app.dto.OidcPublicConfigResponse;
|
import com.cameleer3.server.app.dto.OidcPublicConfigResponse;
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditResult;
|
||||||
|
import com.cameleer3.server.core.admin.AuditService;
|
||||||
|
import com.cameleer3.server.core.rbac.RbacService;
|
||||||
|
import com.cameleer3.server.core.rbac.SystemRole;
|
||||||
import com.cameleer3.server.core.security.JwtService;
|
import com.cameleer3.server.core.security.JwtService;
|
||||||
import com.cameleer3.server.core.security.OidcConfig;
|
import com.cameleer3.server.core.security.OidcConfig;
|
||||||
import com.cameleer3.server.core.security.OidcConfigRepository;
|
import com.cameleer3.server.core.security.OidcConfigRepository;
|
||||||
import com.cameleer3.server.core.security.UserInfo;
|
import com.cameleer3.server.core.security.UserInfo;
|
||||||
import com.cameleer3.server.core.security.UserRepository;
|
import com.cameleer3.server.core.security.UserRepository;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
import io.swagger.v3.oas.annotations.Operation;
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
import io.swagger.v3.oas.annotations.media.Content;
|
import io.swagger.v3.oas.annotations.media.Content;
|
||||||
import io.swagger.v3.oas.annotations.media.Schema;
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
@@ -27,12 +33,14 @@ import org.springframework.web.server.ResponseStatusException;
|
|||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* OIDC authentication endpoints for the UI.
|
* OIDC authentication endpoints for the UI.
|
||||||
* <p>
|
* <p>
|
||||||
* Always registered — returns 404 when OIDC is not configured or disabled.
|
* Always registered -- returns 404 when OIDC is not configured or disabled.
|
||||||
* Configuration is read from the database (managed via admin UI).
|
* Configuration is read from the database (managed via admin UI).
|
||||||
*/
|
*/
|
||||||
@RestController
|
@RestController
|
||||||
@@ -46,15 +54,21 @@ public class OidcAuthController {
|
|||||||
private final OidcConfigRepository configRepository;
|
private final OidcConfigRepository configRepository;
|
||||||
private final JwtService jwtService;
|
private final JwtService jwtService;
|
||||||
private final UserRepository userRepository;
|
private final UserRepository userRepository;
|
||||||
|
private final AuditService auditService;
|
||||||
|
private final RbacService rbacService;
|
||||||
|
|
||||||
public OidcAuthController(OidcTokenExchanger tokenExchanger,
|
public OidcAuthController(OidcTokenExchanger tokenExchanger,
|
||||||
OidcConfigRepository configRepository,
|
OidcConfigRepository configRepository,
|
||||||
JwtService jwtService,
|
JwtService jwtService,
|
||||||
UserRepository userRepository) {
|
UserRepository userRepository,
|
||||||
|
AuditService auditService,
|
||||||
|
RbacService rbacService) {
|
||||||
this.tokenExchanger = tokenExchanger;
|
this.tokenExchanger = tokenExchanger;
|
||||||
this.configRepository = configRepository;
|
this.configRepository = configRepository;
|
||||||
this.jwtService = jwtService;
|
this.jwtService = jwtService;
|
||||||
this.userRepository = userRepository;
|
this.userRepository = userRepository;
|
||||||
|
this.auditService = auditService;
|
||||||
|
this.rbacService = rbacService;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -100,7 +114,8 @@ public class OidcAuthController {
|
|||||||
@ApiResponse(responseCode = "403", description = "Account not provisioned",
|
@ApiResponse(responseCode = "403", description = "Account not provisioned",
|
||||||
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
|
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
|
||||||
@ApiResponse(responseCode = "404", description = "OIDC not configured or disabled")
|
@ApiResponse(responseCode = "404", description = "OIDC not configured or disabled")
|
||||||
public ResponseEntity<AuthTokenResponse> callback(@RequestBody CallbackRequest request) {
|
public ResponseEntity<AuthTokenResponse> callback(@RequestBody CallbackRequest request,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
Optional<OidcConfig> config = configRepository.find();
|
Optional<OidcConfig> config = configRepository.find();
|
||||||
if (config.isEmpty() || !config.get().enabled()) {
|
if (config.isEmpty() || !config.get().enabled()) {
|
||||||
return ResponseEntity.notFound().build();
|
return ResponseEntity.notFound().build();
|
||||||
@@ -121,17 +136,24 @@ public class OidcAuthController {
|
|||||||
"Account not provisioned. Contact your administrator.");
|
"Account not provisioned. Contact your administrator.");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resolve roles: DB override > OIDC claim > default
|
// Upsert user (without roles -- roles are in user_roles table)
|
||||||
List<String> roles = resolveRoles(existingUser, oidcUser.roles(), config.get());
|
|
||||||
|
|
||||||
userRepository.upsert(new UserInfo(
|
userRepository.upsert(new UserInfo(
|
||||||
userId, provider, oidcUser.email(), oidcUser.name(), roles, Instant.now()));
|
userId, provider, oidcUser.email(), oidcUser.name(), Instant.now()));
|
||||||
|
|
||||||
|
// Assign roles if new user
|
||||||
|
if (existingUser.isEmpty()) {
|
||||||
|
assignRolesForNewUser(userId, oidcUser.roles(), config.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
List<String> roles = rbacService.getSystemRoleNames(userId);
|
||||||
|
|
||||||
String accessToken = jwtService.createAccessToken(userId, "user", roles);
|
String accessToken = jwtService.createAccessToken(userId, "user", roles);
|
||||||
String refreshToken = jwtService.createRefreshToken(userId, "user", roles);
|
String refreshToken = jwtService.createRefreshToken(userId, "user", roles);
|
||||||
|
|
||||||
String displayName = oidcUser.name() != null && !oidcUser.name().isBlank()
|
String displayName = oidcUser.name() != null && !oidcUser.name().isBlank()
|
||||||
? oidcUser.name() : oidcUser.email();
|
? oidcUser.name() : oidcUser.email();
|
||||||
|
auditService.log(userId, "login_oidc", AuditCategory.AUTH, null,
|
||||||
|
Map.of("provider", config.get().issuerUri()), AuditResult.SUCCESS, httpRequest);
|
||||||
return ResponseEntity.ok(new AuthTokenResponse(accessToken, refreshToken, displayName, oidcUser.idToken()));
|
return ResponseEntity.ok(new AuthTokenResponse(accessToken, refreshToken, displayName, oidcUser.idToken()));
|
||||||
} catch (ResponseStatusException e) {
|
} catch (ResponseStatusException e) {
|
||||||
throw e;
|
throw e;
|
||||||
@@ -142,14 +164,14 @@ public class OidcAuthController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<String> resolveRoles(Optional<UserInfo> existing, List<String> oidcRoles, OidcConfig config) {
|
private void assignRolesForNewUser(String userId, List<String> oidcRoles, OidcConfig config) {
|
||||||
if (existing.isPresent() && !existing.get().roles().isEmpty()) {
|
List<String> roleNames = !oidcRoles.isEmpty() ? oidcRoles : config.defaultRoles();
|
||||||
return existing.get().roles();
|
for (String roleName : roleNames) {
|
||||||
|
UUID roleId = SystemRole.BY_NAME.get(roleName.toUpperCase());
|
||||||
|
if (roleId != null) {
|
||||||
|
rbacService.assignRoleToUser(userId, roleId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (!oidcRoles.isEmpty()) {
|
|
||||||
return oidcRoles;
|
|
||||||
}
|
|
||||||
return config.defaultRoles();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public record CallbackRequest(String code, String redirectUri) {}
|
public record CallbackRequest(String code, String redirectUri) {}
|
||||||
|
|||||||
@@ -1,29 +1,20 @@
|
|||||||
package com.cameleer3.server.app.security;
|
package com.cameleer3.server.app.security;
|
||||||
|
|
||||||
import com.cameleer3.server.core.security.OidcConfig;
|
|
||||||
import com.cameleer3.server.core.security.OidcConfigRepository;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
import org.springframework.beans.factory.InitializingBean;
|
import org.springframework.beans.factory.InitializingBean;
|
||||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configuration class that creates security service beans and validates
|
* Configuration class that creates security service beans and validates
|
||||||
* that required security properties are set.
|
* that required security properties are set.
|
||||||
* <p>
|
* <p>
|
||||||
* Fails fast on startup if {@code CAMELEER_AUTH_TOKEN} is not set.
|
* Fails fast on startup if {@code CAMELEER_AUTH_TOKEN} is not set.
|
||||||
* Seeds OIDC config from env vars into the database if DB is empty.
|
|
||||||
*/
|
*/
|
||||||
@Configuration
|
@Configuration
|
||||||
@EnableConfigurationProperties(SecurityProperties.class)
|
@EnableConfigurationProperties(SecurityProperties.class)
|
||||||
public class SecurityBeanConfig {
|
public class SecurityBeanConfig {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(SecurityBeanConfig.class);
|
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public JwtServiceImpl jwtService(SecurityProperties properties) {
|
public JwtServiceImpl jwtService(SecurityProperties properties) {
|
||||||
return new JwtServiceImpl(properties);
|
return new JwtServiceImpl(properties);
|
||||||
@@ -50,36 +41,4 @@ public class SecurityBeanConfig {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Seeds OIDC config from env vars into the database if the DB has no config yet.
|
|
||||||
* This allows initial provisioning via env vars, after which the admin UI takes over.
|
|
||||||
*/
|
|
||||||
@Bean
|
|
||||||
public InitializingBean oidcConfigSeeder(SecurityProperties properties,
|
|
||||||
OidcConfigRepository configRepository) {
|
|
||||||
return () -> {
|
|
||||||
if (configRepository.find().isPresent()) {
|
|
||||||
log.debug("OIDC config already present in database, skipping env var seed");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
SecurityProperties.Oidc envOidc = properties.getOidc();
|
|
||||||
if (envOidc.isEnabled()
|
|
||||||
&& envOidc.getIssuerUri() != null && !envOidc.getIssuerUri().isBlank()
|
|
||||||
&& envOidc.getClientId() != null && !envOidc.getClientId().isBlank()) {
|
|
||||||
OidcConfig config = new OidcConfig(
|
|
||||||
true,
|
|
||||||
envOidc.getIssuerUri(),
|
|
||||||
envOidc.getClientId(),
|
|
||||||
envOidc.getClientSecret() != null ? envOidc.getClientSecret() : "",
|
|
||||||
envOidc.getRolesClaim(),
|
|
||||||
envOidc.getDefaultRoles(),
|
|
||||||
true,
|
|
||||||
"name"
|
|
||||||
);
|
|
||||||
configRepository.save(config);
|
|
||||||
log.info("OIDC config seeded from environment variables: issuer={}", envOidc.getIssuerUri());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import org.springframework.context.annotation.Bean;
|
|||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.http.HttpMethod;
|
import org.springframework.http.HttpMethod;
|
||||||
import org.springframework.http.HttpStatus;
|
import org.springframework.http.HttpStatus;
|
||||||
|
import org.springframework.security.config.annotation.method.configuration.EnableMethodSecurity;
|
||||||
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
|
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
|
||||||
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
|
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
|
||||||
import org.springframework.security.config.annotation.web.configurers.AbstractHttpConfigurer;
|
import org.springframework.security.config.annotation.web.configurers.AbstractHttpConfigurer;
|
||||||
@@ -27,6 +28,7 @@ import java.util.List;
|
|||||||
*/
|
*/
|
||||||
@Configuration
|
@Configuration
|
||||||
@EnableWebSecurity
|
@EnableWebSecurity
|
||||||
|
@EnableMethodSecurity
|
||||||
public class SecurityConfig {
|
public class SecurityConfig {
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
@@ -78,7 +80,10 @@ public class SecurityConfig {
|
|||||||
// Read-only data endpoints — viewer+
|
// Read-only data endpoints — viewer+
|
||||||
.requestMatchers(HttpMethod.GET, "/api/v1/executions/**").hasAnyRole("VIEWER", "OPERATOR", "ADMIN")
|
.requestMatchers(HttpMethod.GET, "/api/v1/executions/**").hasAnyRole("VIEWER", "OPERATOR", "ADMIN")
|
||||||
.requestMatchers(HttpMethod.GET, "/api/v1/diagrams/**").hasAnyRole("VIEWER", "OPERATOR", "ADMIN")
|
.requestMatchers(HttpMethod.GET, "/api/v1/diagrams/**").hasAnyRole("VIEWER", "OPERATOR", "ADMIN")
|
||||||
|
.requestMatchers(HttpMethod.GET, "/api/v1/agents/*/metrics").hasAnyRole("VIEWER", "OPERATOR", "ADMIN")
|
||||||
.requestMatchers(HttpMethod.GET, "/api/v1/agents").hasAnyRole("VIEWER", "OPERATOR", "ADMIN")
|
.requestMatchers(HttpMethod.GET, "/api/v1/agents").hasAnyRole("VIEWER", "OPERATOR", "ADMIN")
|
||||||
|
.requestMatchers(HttpMethod.GET, "/api/v1/agents/events-log").hasAnyRole("VIEWER", "OPERATOR", "ADMIN")
|
||||||
|
.requestMatchers(HttpMethod.GET, "/api/v1/routes/**").hasAnyRole("VIEWER", "OPERATOR", "ADMIN")
|
||||||
.requestMatchers(HttpMethod.GET, "/api/v1/stats/**").hasAnyRole("VIEWER", "OPERATOR", "ADMIN")
|
.requestMatchers(HttpMethod.GET, "/api/v1/stats/**").hasAnyRole("VIEWER", "OPERATOR", "ADMIN")
|
||||||
|
|
||||||
// Admin endpoints
|
// Admin endpoints
|
||||||
|
|||||||
@@ -2,8 +2,6 @@ package com.cameleer3.server.app.security;
|
|||||||
|
|
||||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configuration properties for security settings.
|
* Configuration properties for security settings.
|
||||||
* Bound from the {@code security.*} namespace in application.yml.
|
* Bound from the {@code security.*} namespace in application.yml.
|
||||||
@@ -19,29 +17,6 @@ public class SecurityProperties {
|
|||||||
private String uiPassword;
|
private String uiPassword;
|
||||||
private String uiOrigin;
|
private String uiOrigin;
|
||||||
private String jwtSecret;
|
private String jwtSecret;
|
||||||
private Oidc oidc = new Oidc();
|
|
||||||
|
|
||||||
public static class Oidc {
|
|
||||||
private boolean enabled = false;
|
|
||||||
private String issuerUri;
|
|
||||||
private String clientId;
|
|
||||||
private String clientSecret;
|
|
||||||
private String rolesClaim = "realm_access.roles";
|
|
||||||
private List<String> defaultRoles = List.of("VIEWER");
|
|
||||||
|
|
||||||
public boolean isEnabled() { return enabled; }
|
|
||||||
public void setEnabled(boolean enabled) { this.enabled = enabled; }
|
|
||||||
public String getIssuerUri() { return issuerUri; }
|
|
||||||
public void setIssuerUri(String issuerUri) { this.issuerUri = issuerUri; }
|
|
||||||
public String getClientId() { return clientId; }
|
|
||||||
public void setClientId(String clientId) { this.clientId = clientId; }
|
|
||||||
public String getClientSecret() { return clientSecret; }
|
|
||||||
public void setClientSecret(String clientSecret) { this.clientSecret = clientSecret; }
|
|
||||||
public String getRolesClaim() { return rolesClaim; }
|
|
||||||
public void setRolesClaim(String rolesClaim) { this.rolesClaim = rolesClaim; }
|
|
||||||
public List<String> getDefaultRoles() { return defaultRoles; }
|
|
||||||
public void setDefaultRoles(List<String> defaultRoles) { this.defaultRoles = defaultRoles; }
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getAccessTokenExpiryMs() { return accessTokenExpiryMs; }
|
public long getAccessTokenExpiryMs() { return accessTokenExpiryMs; }
|
||||||
public void setAccessTokenExpiryMs(long accessTokenExpiryMs) { this.accessTokenExpiryMs = accessTokenExpiryMs; }
|
public void setAccessTokenExpiryMs(long accessTokenExpiryMs) { this.accessTokenExpiryMs = accessTokenExpiryMs; }
|
||||||
@@ -59,6 +34,4 @@ public class SecurityProperties {
|
|||||||
public void setUiOrigin(String uiOrigin) { this.uiOrigin = uiOrigin; }
|
public void setUiOrigin(String uiOrigin) { this.uiOrigin = uiOrigin; }
|
||||||
public String getJwtSecret() { return jwtSecret; }
|
public String getJwtSecret() { return jwtSecret; }
|
||||||
public void setJwtSecret(String jwtSecret) { this.jwtSecret = jwtSecret; }
|
public void setJwtSecret(String jwtSecret) { this.jwtSecret = jwtSecret; }
|
||||||
public Oidc getOidc() { return oidc; }
|
|
||||||
public void setOidc(Oidc oidc) { this.oidc = oidc; }
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,13 @@ package com.cameleer3.server.app.security;
|
|||||||
|
|
||||||
import com.cameleer3.server.app.dto.AuthTokenResponse;
|
import com.cameleer3.server.app.dto.AuthTokenResponse;
|
||||||
import com.cameleer3.server.app.dto.ErrorResponse;
|
import com.cameleer3.server.app.dto.ErrorResponse;
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditResult;
|
||||||
|
import com.cameleer3.server.core.admin.AuditService;
|
||||||
|
import com.cameleer3.server.core.rbac.RbacService;
|
||||||
|
import com.cameleer3.server.core.rbac.SystemRole;
|
||||||
import com.cameleer3.server.core.security.JwtService;
|
import com.cameleer3.server.core.security.JwtService;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
import com.cameleer3.server.core.security.JwtService.JwtValidationResult;
|
import com.cameleer3.server.core.security.JwtService.JwtValidationResult;
|
||||||
import com.cameleer3.server.core.security.UserInfo;
|
import com.cameleer3.server.core.security.UserInfo;
|
||||||
import com.cameleer3.server.core.security.UserRepository;
|
import com.cameleer3.server.core.security.UserRepository;
|
||||||
@@ -15,6 +21,7 @@ import org.slf4j.Logger;
|
|||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.http.HttpStatus;
|
import org.springframework.http.HttpStatus;
|
||||||
import org.springframework.http.ResponseEntity;
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
|
||||||
import org.springframework.web.bind.annotation.PostMapping;
|
import org.springframework.web.bind.annotation.PostMapping;
|
||||||
import org.springframework.web.bind.annotation.RequestBody;
|
import org.springframework.web.bind.annotation.RequestBody;
|
||||||
import org.springframework.web.bind.annotation.RequestMapping;
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
@@ -23,6 +30,8 @@ import org.springframework.web.server.ResponseStatusException;
|
|||||||
|
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Authentication endpoints for the UI (local credentials).
|
* Authentication endpoints for the UI (local credentials).
|
||||||
@@ -37,16 +46,22 @@ import java.util.List;
|
|||||||
public class UiAuthController {
|
public class UiAuthController {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(UiAuthController.class);
|
private static final Logger log = LoggerFactory.getLogger(UiAuthController.class);
|
||||||
|
private static final BCryptPasswordEncoder passwordEncoder = new BCryptPasswordEncoder();
|
||||||
|
|
||||||
private final JwtService jwtService;
|
private final JwtService jwtService;
|
||||||
private final SecurityProperties properties;
|
private final SecurityProperties properties;
|
||||||
private final UserRepository userRepository;
|
private final UserRepository userRepository;
|
||||||
|
private final AuditService auditService;
|
||||||
|
private final RbacService rbacService;
|
||||||
|
|
||||||
public UiAuthController(JwtService jwtService, SecurityProperties properties,
|
public UiAuthController(JwtService jwtService, SecurityProperties properties,
|
||||||
UserRepository userRepository) {
|
UserRepository userRepository, AuditService auditService,
|
||||||
|
RbacService rbacService) {
|
||||||
this.jwtService = jwtService;
|
this.jwtService = jwtService;
|
||||||
this.properties = properties;
|
this.properties = properties;
|
||||||
this.userRepository = userRepository;
|
this.userRepository = userRepository;
|
||||||
|
this.auditService = auditService;
|
||||||
|
this.rbacService = rbacService;
|
||||||
}
|
}
|
||||||
|
|
||||||
@PostMapping("/login")
|
@PostMapping("/login")
|
||||||
@@ -54,36 +69,51 @@ public class UiAuthController {
|
|||||||
@ApiResponse(responseCode = "200", description = "Login successful")
|
@ApiResponse(responseCode = "200", description = "Login successful")
|
||||||
@ApiResponse(responseCode = "401", description = "Invalid credentials",
|
@ApiResponse(responseCode = "401", description = "Invalid credentials",
|
||||||
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
|
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
|
||||||
public ResponseEntity<AuthTokenResponse> login(@RequestBody LoginRequest request) {
|
public ResponseEntity<AuthTokenResponse> login(@RequestBody LoginRequest request,
|
||||||
|
HttpServletRequest httpRequest) {
|
||||||
String configuredUser = properties.getUiUser();
|
String configuredUser = properties.getUiUser();
|
||||||
String configuredPassword = properties.getUiPassword();
|
String configuredPassword = properties.getUiPassword();
|
||||||
|
|
||||||
if (configuredUser == null || configuredUser.isBlank()
|
|
||||||
|| configuredPassword == null || configuredPassword.isBlank()) {
|
|
||||||
log.warn("UI authentication attempted but CAMELEER_UI_USER / CAMELEER_UI_PASSWORD not configured");
|
|
||||||
throw new ResponseStatusException(HttpStatus.UNAUTHORIZED, "UI authentication not configured");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!configuredUser.equals(request.username())
|
|
||||||
|| !configuredPassword.equals(request.password())) {
|
|
||||||
log.debug("UI login failed for user: {}", request.username());
|
|
||||||
throw new ResponseStatusException(HttpStatus.UNAUTHORIZED, "Invalid credentials");
|
|
||||||
}
|
|
||||||
|
|
||||||
String subject = "user:" + request.username();
|
String subject = "user:" + request.username();
|
||||||
List<String> roles = List.of("ADMIN");
|
|
||||||
|
|
||||||
// Upsert local user into store
|
// Try env-var admin first
|
||||||
try {
|
boolean envMatch = configuredUser != null && !configuredUser.isBlank()
|
||||||
userRepository.upsert(new UserInfo(
|
&& configuredPassword != null && !configuredPassword.isBlank()
|
||||||
subject, "local", "", request.username(), roles, Instant.now()));
|
&& configuredUser.equals(request.username())
|
||||||
} catch (Exception e) {
|
&& configuredPassword.equals(request.password());
|
||||||
log.warn("Failed to upsert local user to store (login continues): {}", e.getMessage());
|
|
||||||
|
if (!envMatch) {
|
||||||
|
// Try per-user password
|
||||||
|
Optional<String> hash = userRepository.getPasswordHash(subject);
|
||||||
|
if (hash.isEmpty() || !passwordEncoder.matches(request.password(), hash.get())) {
|
||||||
|
log.debug("UI login failed for user: {}", request.username());
|
||||||
|
auditService.log(request.username(), "login_failed", AuditCategory.AUTH, null,
|
||||||
|
Map.of("reason", "Invalid credentials"), AuditResult.FAILURE, httpRequest);
|
||||||
|
throw new ResponseStatusException(HttpStatus.UNAUTHORIZED, "Invalid credentials");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (envMatch) {
|
||||||
|
// Env-var admin: upsert and ensure ADMIN role + Admins group
|
||||||
|
try {
|
||||||
|
userRepository.upsert(new UserInfo(
|
||||||
|
subject, "local", "", request.username(), Instant.now()));
|
||||||
|
rbacService.assignRoleToUser(subject, SystemRole.ADMIN_ID);
|
||||||
|
rbacService.addUserToGroup(subject, SystemRole.ADMINS_GROUP_ID);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Failed to upsert local admin to store (login continues): {}", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Per-user logins: user already exists in DB (created by admin)
|
||||||
|
|
||||||
|
List<String> roles = rbacService.getSystemRoleNames(subject);
|
||||||
|
if (roles.isEmpty()) {
|
||||||
|
roles = List.of("VIEWER");
|
||||||
}
|
}
|
||||||
|
|
||||||
String accessToken = jwtService.createAccessToken(subject, "user", roles);
|
String accessToken = jwtService.createAccessToken(subject, "user", roles);
|
||||||
String refreshToken = jwtService.createRefreshToken(subject, "user", roles);
|
String refreshToken = jwtService.createRefreshToken(subject, "user", roles);
|
||||||
|
|
||||||
|
auditService.log(request.username(), "login", AuditCategory.AUTH, null, null, AuditResult.SUCCESS, httpRequest);
|
||||||
log.info("UI user logged in: {}", request.username());
|
log.info("UI user logged in: {}", request.username());
|
||||||
return ResponseEntity.ok(new AuthTokenResponse(accessToken, refreshToken, request.username(), null));
|
return ResponseEntity.ok(new AuthTokenResponse(accessToken, refreshToken, request.username(), null));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,62 @@
|
|||||||
|
package com.cameleer3.server.app.storage;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.agent.AgentEventRecord;
|
||||||
|
import com.cameleer3.server.core.agent.AgentEventRepository;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Repository
|
||||||
|
public class PostgresAgentEventRepository implements AgentEventRepository {
|
||||||
|
|
||||||
|
private final JdbcTemplate jdbc;
|
||||||
|
|
||||||
|
public PostgresAgentEventRepository(JdbcTemplate jdbc) {
|
||||||
|
this.jdbc = jdbc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void insert(String agentId, String appId, String eventType, String detail) {
|
||||||
|
jdbc.update(
|
||||||
|
"INSERT INTO agent_events (agent_id, app_id, event_type, detail) VALUES (?, ?, ?, ?)",
|
||||||
|
agentId, appId, eventType, detail);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<AgentEventRecord> query(String appId, String agentId, Instant from, Instant to, int limit) {
|
||||||
|
var sql = new StringBuilder("SELECT id, agent_id, app_id, event_type, detail, timestamp FROM agent_events WHERE 1=1");
|
||||||
|
var params = new ArrayList<Object>();
|
||||||
|
|
||||||
|
if (appId != null) {
|
||||||
|
sql.append(" AND app_id = ?");
|
||||||
|
params.add(appId);
|
||||||
|
}
|
||||||
|
if (agentId != null) {
|
||||||
|
sql.append(" AND agent_id = ?");
|
||||||
|
params.add(agentId);
|
||||||
|
}
|
||||||
|
if (from != null) {
|
||||||
|
sql.append(" AND timestamp >= ?");
|
||||||
|
params.add(Timestamp.from(from));
|
||||||
|
}
|
||||||
|
if (to != null) {
|
||||||
|
sql.append(" AND timestamp < ?");
|
||||||
|
params.add(Timestamp.from(to));
|
||||||
|
}
|
||||||
|
sql.append(" ORDER BY timestamp DESC LIMIT ?");
|
||||||
|
params.add(limit);
|
||||||
|
|
||||||
|
return jdbc.query(sql.toString(), (rs, rowNum) -> new AgentEventRecord(
|
||||||
|
rs.getLong("id"),
|
||||||
|
rs.getString("agent_id"),
|
||||||
|
rs.getString("app_id"),
|
||||||
|
rs.getString("event_type"),
|
||||||
|
rs.getString("detail"),
|
||||||
|
rs.getTimestamp("timestamp").toInstant()
|
||||||
|
), params.toArray());
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,131 @@
|
|||||||
|
package com.cameleer3.server.app.storage;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditRecord;
|
||||||
|
import com.cameleer3.server.core.admin.AuditRepository;
|
||||||
|
import com.cameleer3.server.core.admin.AuditResult;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
@Repository
|
||||||
|
public class PostgresAuditRepository implements AuditRepository {
|
||||||
|
|
||||||
|
private static final Set<String> ALLOWED_SORT_COLUMNS =
|
||||||
|
Set.of("timestamp", "username", "action", "category");
|
||||||
|
private static final int MAX_PAGE_SIZE = 100;
|
||||||
|
|
||||||
|
private final JdbcTemplate jdbc;
|
||||||
|
private final ObjectMapper objectMapper;
|
||||||
|
|
||||||
|
public PostgresAuditRepository(JdbcTemplate jdbc, ObjectMapper objectMapper) {
|
||||||
|
this.jdbc = jdbc;
|
||||||
|
this.objectMapper = objectMapper;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void insert(AuditRecord record) {
|
||||||
|
String detailJson = null;
|
||||||
|
if (record.detail() != null) {
|
||||||
|
try {
|
||||||
|
detailJson = objectMapper.writeValueAsString(record.detail());
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException("Failed to serialize audit detail", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
jdbc.update("""
|
||||||
|
INSERT INTO audit_log (username, action, category, target, detail, result, ip_address, user_agent)
|
||||||
|
VALUES (?, ?, ?, ?, ?::jsonb, ?, ?, ?)
|
||||||
|
""",
|
||||||
|
record.username(), record.action(),
|
||||||
|
record.category() != null ? record.category().name() : null,
|
||||||
|
record.target(), detailJson,
|
||||||
|
record.result() != null ? record.result().name() : null,
|
||||||
|
record.ipAddress(), record.userAgent());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AuditPage find(AuditQuery query) {
|
||||||
|
int pageSize = Math.min(query.size() > 0 ? query.size() : 20, MAX_PAGE_SIZE);
|
||||||
|
int offset = query.page() * pageSize;
|
||||||
|
|
||||||
|
StringBuilder where = new StringBuilder("WHERE timestamp >= ? AND timestamp <= ?");
|
||||||
|
List<Object> params = new ArrayList<>();
|
||||||
|
params.add(Timestamp.from(query.from()));
|
||||||
|
params.add(Timestamp.from(query.to()));
|
||||||
|
|
||||||
|
if (query.username() != null && !query.username().isBlank()) {
|
||||||
|
where.append(" AND username = ?");
|
||||||
|
params.add(query.username());
|
||||||
|
}
|
||||||
|
if (query.category() != null) {
|
||||||
|
where.append(" AND category = ?");
|
||||||
|
params.add(query.category().name());
|
||||||
|
}
|
||||||
|
if (query.search() != null && !query.search().isBlank()) {
|
||||||
|
where.append(" AND (action ILIKE ? OR target ILIKE ?)");
|
||||||
|
String like = "%" + query.search() + "%";
|
||||||
|
params.add(like);
|
||||||
|
params.add(like);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count query
|
||||||
|
String countSql = "SELECT COUNT(*) FROM audit_log " + where;
|
||||||
|
Long totalCount = jdbc.queryForObject(countSql, Long.class, params.toArray());
|
||||||
|
|
||||||
|
// Sort column validation
|
||||||
|
String sortCol = ALLOWED_SORT_COLUMNS.contains(query.sort()) ? query.sort() : "timestamp";
|
||||||
|
String order = "asc".equalsIgnoreCase(query.order()) ? "ASC" : "DESC";
|
||||||
|
|
||||||
|
String dataSql = "SELECT * FROM audit_log " + where
|
||||||
|
+ " ORDER BY " + sortCol + " " + order
|
||||||
|
+ " LIMIT ? OFFSET ?";
|
||||||
|
List<Object> dataParams = new ArrayList<>(params);
|
||||||
|
dataParams.add(pageSize);
|
||||||
|
dataParams.add(offset);
|
||||||
|
|
||||||
|
List<AuditRecord> items = jdbc.query(dataSql, (rs, rowNum) -> mapRecord(rs), dataParams.toArray());
|
||||||
|
return new AuditPage(items, totalCount != null ? totalCount : 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
private AuditRecord mapRecord(ResultSet rs) throws SQLException {
|
||||||
|
Map<String, Object> detail = null;
|
||||||
|
String detailStr = rs.getString("detail");
|
||||||
|
if (detailStr != null) {
|
||||||
|
try {
|
||||||
|
detail = objectMapper.readValue(detailStr, Map.class);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
// leave detail as null if unparseable
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Timestamp ts = rs.getTimestamp("timestamp");
|
||||||
|
String categoryStr = rs.getString("category");
|
||||||
|
String resultStr = rs.getString("result");
|
||||||
|
|
||||||
|
return new AuditRecord(
|
||||||
|
rs.getLong("id"),
|
||||||
|
ts != null ? ts.toInstant() : null,
|
||||||
|
rs.getString("username"),
|
||||||
|
rs.getString("action"),
|
||||||
|
categoryStr != null ? AuditCategory.valueOf(categoryStr) : null,
|
||||||
|
rs.getString("target"),
|
||||||
|
detail,
|
||||||
|
resultStr != null ? AuditResult.valueOf(resultStr) : null,
|
||||||
|
rs.getString("ip_address"),
|
||||||
|
rs.getString("user_agent")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -24,7 +24,7 @@ public class PostgresExecutionStore implements ExecutionStore {
|
|||||||
@Override
|
@Override
|
||||||
public void upsert(ExecutionRecord execution) {
|
public void upsert(ExecutionRecord execution) {
|
||||||
jdbc.update("""
|
jdbc.update("""
|
||||||
INSERT INTO executions (execution_id, route_id, agent_id, group_name,
|
INSERT INTO executions (execution_id, route_id, agent_id, application_name,
|
||||||
status, correlation_id, exchange_id, start_time, end_time,
|
status, correlation_id, exchange_id, start_time, end_time,
|
||||||
duration_ms, error_message, error_stacktrace, diagram_content_hash,
|
duration_ms, error_message, error_stacktrace, diagram_content_hash,
|
||||||
created_at, updated_at)
|
created_at, updated_at)
|
||||||
@@ -45,7 +45,7 @@ public class PostgresExecutionStore implements ExecutionStore {
|
|||||||
updated_at = now()
|
updated_at = now()
|
||||||
""",
|
""",
|
||||||
execution.executionId(), execution.routeId(), execution.agentId(),
|
execution.executionId(), execution.routeId(), execution.agentId(),
|
||||||
execution.groupName(), execution.status(), execution.correlationId(),
|
execution.applicationName(), execution.status(), execution.correlationId(),
|
||||||
execution.exchangeId(),
|
execution.exchangeId(),
|
||||||
Timestamp.from(execution.startTime()),
|
Timestamp.from(execution.startTime()),
|
||||||
execution.endTime() != null ? Timestamp.from(execution.endTime()) : null,
|
execution.endTime() != null ? Timestamp.from(execution.endTime()) : null,
|
||||||
@@ -55,11 +55,11 @@ public class PostgresExecutionStore implements ExecutionStore {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void upsertProcessors(String executionId, Instant startTime,
|
public void upsertProcessors(String executionId, Instant startTime,
|
||||||
String groupName, String routeId,
|
String applicationName, String routeId,
|
||||||
List<ProcessorRecord> processors) {
|
List<ProcessorRecord> processors) {
|
||||||
jdbc.batchUpdate("""
|
jdbc.batchUpdate("""
|
||||||
INSERT INTO processor_executions (execution_id, processor_id, processor_type,
|
INSERT INTO processor_executions (execution_id, processor_id, processor_type,
|
||||||
diagram_node_id, group_name, route_id, depth, parent_processor_id,
|
diagram_node_id, application_name, route_id, depth, parent_processor_id,
|
||||||
status, start_time, end_time, duration_ms, error_message, error_stacktrace,
|
status, start_time, end_time, duration_ms, error_message, error_stacktrace,
|
||||||
input_body, output_body, input_headers, output_headers)
|
input_body, output_body, input_headers, output_headers)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?::jsonb, ?::jsonb)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?::jsonb, ?::jsonb)
|
||||||
@@ -76,7 +76,7 @@ public class PostgresExecutionStore implements ExecutionStore {
|
|||||||
""",
|
""",
|
||||||
processors.stream().map(p -> new Object[]{
|
processors.stream().map(p -> new Object[]{
|
||||||
p.executionId(), p.processorId(), p.processorType(),
|
p.executionId(), p.processorId(), p.processorType(),
|
||||||
p.diagramNodeId(), p.groupName(), p.routeId(),
|
p.diagramNodeId(), p.applicationName(), p.routeId(),
|
||||||
p.depth(), p.parentProcessorId(), p.status(),
|
p.depth(), p.parentProcessorId(), p.status(),
|
||||||
Timestamp.from(p.startTime()),
|
Timestamp.from(p.startTime()),
|
||||||
p.endTime() != null ? Timestamp.from(p.endTime()) : null,
|
p.endTime() != null ? Timestamp.from(p.endTime()) : null,
|
||||||
@@ -103,7 +103,7 @@ public class PostgresExecutionStore implements ExecutionStore {
|
|||||||
private static final RowMapper<ExecutionRecord> EXECUTION_MAPPER = (rs, rowNum) ->
|
private static final RowMapper<ExecutionRecord> EXECUTION_MAPPER = (rs, rowNum) ->
|
||||||
new ExecutionRecord(
|
new ExecutionRecord(
|
||||||
rs.getString("execution_id"), rs.getString("route_id"),
|
rs.getString("execution_id"), rs.getString("route_id"),
|
||||||
rs.getString("agent_id"), rs.getString("group_name"),
|
rs.getString("agent_id"), rs.getString("application_name"),
|
||||||
rs.getString("status"), rs.getString("correlation_id"),
|
rs.getString("status"), rs.getString("correlation_id"),
|
||||||
rs.getString("exchange_id"),
|
rs.getString("exchange_id"),
|
||||||
toInstant(rs, "start_time"), toInstant(rs, "end_time"),
|
toInstant(rs, "start_time"), toInstant(rs, "end_time"),
|
||||||
@@ -115,7 +115,7 @@ public class PostgresExecutionStore implements ExecutionStore {
|
|||||||
new ProcessorRecord(
|
new ProcessorRecord(
|
||||||
rs.getString("execution_id"), rs.getString("processor_id"),
|
rs.getString("execution_id"), rs.getString("processor_id"),
|
||||||
rs.getString("processor_type"), rs.getString("diagram_node_id"),
|
rs.getString("processor_type"), rs.getString("diagram_node_id"),
|
||||||
rs.getString("group_name"), rs.getString("route_id"),
|
rs.getString("application_name"), rs.getString("route_id"),
|
||||||
rs.getInt("depth"), rs.getString("parent_processor_id"),
|
rs.getInt("depth"), rs.getString("parent_processor_id"),
|
||||||
rs.getString("status"),
|
rs.getString("status"),
|
||||||
toInstant(rs, "start_time"), toInstant(rs, "end_time"),
|
toInstant(rs, "start_time"), toInstant(rs, "end_time"),
|
||||||
|
|||||||
@@ -0,0 +1,113 @@
|
|||||||
|
package com.cameleer3.server.app.storage;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.rbac.*;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
@Repository
|
||||||
|
public class PostgresGroupRepository implements GroupRepository {
|
||||||
|
|
||||||
|
private final JdbcTemplate jdbc;
|
||||||
|
|
||||||
|
public PostgresGroupRepository(JdbcTemplate jdbc) {
|
||||||
|
this.jdbc = jdbc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<GroupSummary> findAll() {
|
||||||
|
return jdbc.query("SELECT id, name FROM groups ORDER BY name",
|
||||||
|
(rs, rowNum) -> new GroupSummary(rs.getObject("id", UUID.class), rs.getString("name")));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<GroupDetail> findById(UUID id) {
|
||||||
|
var rows = jdbc.query(
|
||||||
|
"SELECT id, name, parent_group_id, created_at FROM groups WHERE id = ?",
|
||||||
|
(rs, rowNum) -> new GroupDetail(
|
||||||
|
rs.getObject("id", UUID.class),
|
||||||
|
rs.getString("name"),
|
||||||
|
rs.getObject("parent_group_id", UUID.class),
|
||||||
|
rs.getTimestamp("created_at").toInstant(),
|
||||||
|
List.of(), List.of(), List.of(), List.of()
|
||||||
|
), id);
|
||||||
|
if (rows.isEmpty()) return Optional.empty();
|
||||||
|
var g = rows.get(0);
|
||||||
|
|
||||||
|
List<RoleSummary> directRoles = jdbc.query("""
|
||||||
|
SELECT r.id, r.name, r.system FROM group_roles gr
|
||||||
|
JOIN roles r ON r.id = gr.role_id WHERE gr.group_id = ?
|
||||||
|
""", (rs, rowNum) -> new RoleSummary(rs.getObject("id", UUID.class),
|
||||||
|
rs.getString("name"), rs.getBoolean("system"), "direct"), id);
|
||||||
|
|
||||||
|
List<UserSummary> members = jdbc.query("""
|
||||||
|
SELECT u.user_id, u.display_name, u.provider FROM user_groups ug
|
||||||
|
JOIN users u ON u.user_id = ug.user_id WHERE ug.group_id = ?
|
||||||
|
""", (rs, rowNum) -> new UserSummary(rs.getString("user_id"),
|
||||||
|
rs.getString("display_name"), rs.getString("provider")), id);
|
||||||
|
|
||||||
|
List<GroupSummary> children = findChildGroups(id);
|
||||||
|
|
||||||
|
return Optional.of(new GroupDetail(g.id(), g.name(), g.parentGroupId(),
|
||||||
|
g.createdAt(), directRoles, List.of(), members, children));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public UUID create(String name, UUID parentGroupId) {
|
||||||
|
UUID id = UUID.randomUUID();
|
||||||
|
jdbc.update("INSERT INTO groups (id, name, parent_group_id) VALUES (?, ?, ?)",
|
||||||
|
id, name, parentGroupId);
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void update(UUID id, String name, UUID parentGroupId) {
|
||||||
|
jdbc.update("UPDATE groups SET name = COALESCE(?, name), parent_group_id = ? WHERE id = ?",
|
||||||
|
name, parentGroupId, id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void delete(UUID id) {
|
||||||
|
jdbc.update("DELETE FROM groups WHERE id = ?", id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addRole(UUID groupId, UUID roleId) {
|
||||||
|
jdbc.update("INSERT INTO group_roles (group_id, role_id) VALUES (?, ?) ON CONFLICT DO NOTHING",
|
||||||
|
groupId, roleId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void removeRole(UUID groupId, UUID roleId) {
|
||||||
|
jdbc.update("DELETE FROM group_roles WHERE group_id = ? AND role_id = ?", groupId, roleId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<GroupSummary> findChildGroups(UUID parentId) {
|
||||||
|
return jdbc.query("SELECT id, name FROM groups WHERE parent_group_id = ? ORDER BY name",
|
||||||
|
(rs, rowNum) -> new GroupSummary(rs.getObject("id", UUID.class), rs.getString("name")),
|
||||||
|
parentId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<GroupSummary> findAncestorChain(UUID groupId) {
|
||||||
|
List<GroupSummary> chain = new ArrayList<>();
|
||||||
|
UUID current = groupId;
|
||||||
|
Set<UUID> visited = new HashSet<>();
|
||||||
|
while (current != null && visited.add(current)) {
|
||||||
|
UUID id = current;
|
||||||
|
var rows = jdbc.query(
|
||||||
|
"SELECT id, name, parent_group_id FROM groups WHERE id = ?",
|
||||||
|
(rs, rowNum) -> new Object[]{
|
||||||
|
new GroupSummary(rs.getObject("id", UUID.class), rs.getString("name")),
|
||||||
|
rs.getObject("parent_group_id", UUID.class)
|
||||||
|
}, id);
|
||||||
|
if (rows.isEmpty()) break;
|
||||||
|
chain.add((GroupSummary) rows.get(0)[0]);
|
||||||
|
current = (UUID) rows.get(0)[1];
|
||||||
|
}
|
||||||
|
Collections.reverse(chain);
|
||||||
|
return chain;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,10 +2,11 @@ package com.cameleer3.server.app.storage;
|
|||||||
|
|
||||||
import com.cameleer3.server.core.security.OidcConfig;
|
import com.cameleer3.server.core.security.OidcConfig;
|
||||||
import com.cameleer3.server.core.security.OidcConfigRepository;
|
import com.cameleer3.server.core.security.OidcConfigRepository;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
import org.springframework.stereotype.Repository;
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
import java.sql.Array;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
@@ -13,47 +14,49 @@ import java.util.Optional;
|
|||||||
public class PostgresOidcConfigRepository implements OidcConfigRepository {
|
public class PostgresOidcConfigRepository implements OidcConfigRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbc;
|
private final JdbcTemplate jdbc;
|
||||||
|
private final ObjectMapper objectMapper;
|
||||||
|
|
||||||
public PostgresOidcConfigRepository(JdbcTemplate jdbc) {
|
public PostgresOidcConfigRepository(JdbcTemplate jdbc, ObjectMapper objectMapper) {
|
||||||
this.jdbc = jdbc;
|
this.jdbc = jdbc;
|
||||||
|
this.objectMapper = objectMapper;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Optional<OidcConfig> find() {
|
public Optional<OidcConfig> find() {
|
||||||
var results = jdbc.query(
|
List<OidcConfig> results = jdbc.query(
|
||||||
"SELECT * FROM oidc_config WHERE config_id = 'default'",
|
"SELECT config_val FROM server_config WHERE config_key = 'oidc'",
|
||||||
(rs, rowNum) -> {
|
(rs, rowNum) -> {
|
||||||
Array arr = rs.getArray("default_roles");
|
String json = rs.getString("config_val");
|
||||||
String[] roles = arr != null ? (String[]) arr.getArray() : new String[0];
|
try {
|
||||||
return new OidcConfig(
|
return objectMapper.readValue(json, OidcConfig.class);
|
||||||
rs.getBoolean("enabled"), rs.getString("issuer_uri"),
|
} catch (JsonProcessingException e) {
|
||||||
rs.getString("client_id"), rs.getString("client_secret"),
|
throw new RuntimeException("Failed to deserialize OIDC config", e);
|
||||||
rs.getString("roles_claim"), List.of(roles),
|
}
|
||||||
rs.getBoolean("auto_signup"), rs.getString("display_name_claim"));
|
|
||||||
});
|
});
|
||||||
return results.isEmpty() ? Optional.empty() : Optional.of(results.get(0));
|
return results.isEmpty() ? Optional.empty() : Optional.of(results.get(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void save(OidcConfig config) {
|
public void save(OidcConfig config) {
|
||||||
|
String json;
|
||||||
|
try {
|
||||||
|
json = objectMapper.writeValueAsString(config);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException("Failed to serialize OIDC config", e);
|
||||||
|
}
|
||||||
|
|
||||||
jdbc.update("""
|
jdbc.update("""
|
||||||
INSERT INTO oidc_config (config_id, enabled, issuer_uri, client_id, client_secret,
|
INSERT INTO server_config (config_key, config_val, updated_at)
|
||||||
roles_claim, default_roles, auto_signup, display_name_claim, updated_at)
|
VALUES ('oidc', ?::jsonb, now())
|
||||||
VALUES ('default', ?, ?, ?, ?, ?, ?, ?, ?, now())
|
ON CONFLICT (config_key) DO UPDATE SET
|
||||||
ON CONFLICT (config_id) DO UPDATE SET
|
config_val = EXCLUDED.config_val,
|
||||||
enabled = EXCLUDED.enabled, issuer_uri = EXCLUDED.issuer_uri,
|
|
||||||
client_id = EXCLUDED.client_id, client_secret = EXCLUDED.client_secret,
|
|
||||||
roles_claim = EXCLUDED.roles_claim, default_roles = EXCLUDED.default_roles,
|
|
||||||
auto_signup = EXCLUDED.auto_signup, display_name_claim = EXCLUDED.display_name_claim,
|
|
||||||
updated_at = now()
|
updated_at = now()
|
||||||
""",
|
""",
|
||||||
config.enabled(), config.issuerUri(), config.clientId(), config.clientSecret(),
|
json);
|
||||||
config.rolesClaim(), config.defaultRoles().toArray(new String[0]),
|
|
||||||
config.autoSignup(), config.displayNameClaim());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void delete() {
|
public void delete() {
|
||||||
jdbc.update("DELETE FROM oidc_config WHERE config_id = 'default'");
|
jdbc.update("DELETE FROM server_config WHERE config_key = 'oidc'");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,85 @@
|
|||||||
|
package com.cameleer3.server.app.storage;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.rbac.*;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
@Repository
|
||||||
|
public class PostgresRoleRepository implements RoleRepository {
|
||||||
|
|
||||||
|
private final JdbcTemplate jdbc;
|
||||||
|
|
||||||
|
public PostgresRoleRepository(JdbcTemplate jdbc) {
|
||||||
|
this.jdbc = jdbc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<RoleDetail> findAll() {
|
||||||
|
return jdbc.query("""
|
||||||
|
SELECT id, name, description, scope, system, created_at FROM roles ORDER BY system DESC, name
|
||||||
|
""", (rs, rowNum) -> new RoleDetail(
|
||||||
|
rs.getObject("id", UUID.class),
|
||||||
|
rs.getString("name"),
|
||||||
|
rs.getString("description"),
|
||||||
|
rs.getString("scope"),
|
||||||
|
rs.getBoolean("system"),
|
||||||
|
rs.getTimestamp("created_at").toInstant(),
|
||||||
|
List.of(), List.of(), List.of()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<RoleDetail> findById(UUID id) {
|
||||||
|
var rows = jdbc.query("""
|
||||||
|
SELECT id, name, description, scope, system, created_at FROM roles WHERE id = ?
|
||||||
|
""", (rs, rowNum) -> new RoleDetail(
|
||||||
|
rs.getObject("id", UUID.class),
|
||||||
|
rs.getString("name"),
|
||||||
|
rs.getString("description"),
|
||||||
|
rs.getString("scope"),
|
||||||
|
rs.getBoolean("system"),
|
||||||
|
rs.getTimestamp("created_at").toInstant(),
|
||||||
|
List.of(), List.of(), List.of()
|
||||||
|
), id);
|
||||||
|
if (rows.isEmpty()) return Optional.empty();
|
||||||
|
var r = rows.get(0);
|
||||||
|
|
||||||
|
List<GroupSummary> assignedGroups = jdbc.query("""
|
||||||
|
SELECT g.id, g.name FROM group_roles gr
|
||||||
|
JOIN groups g ON g.id = gr.group_id WHERE gr.role_id = ?
|
||||||
|
""", (rs, rowNum) -> new GroupSummary(rs.getObject("id", UUID.class),
|
||||||
|
rs.getString("name")), id);
|
||||||
|
|
||||||
|
List<UserSummary> directUsers = jdbc.query("""
|
||||||
|
SELECT u.user_id, u.display_name, u.provider FROM user_roles ur
|
||||||
|
JOIN users u ON u.user_id = ur.user_id WHERE ur.role_id = ?
|
||||||
|
""", (rs, rowNum) -> new UserSummary(rs.getString("user_id"),
|
||||||
|
rs.getString("display_name"), rs.getString("provider")), id);
|
||||||
|
|
||||||
|
return Optional.of(new RoleDetail(r.id(), r.name(), r.description(),
|
||||||
|
r.scope(), r.system(), r.createdAt(), assignedGroups, directUsers, List.of()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public UUID create(String name, String description, String scope) {
|
||||||
|
UUID id = UUID.randomUUID();
|
||||||
|
jdbc.update("INSERT INTO roles (id, name, description, scope, system) VALUES (?, ?, ?, ?, false)",
|
||||||
|
id, name, description, scope);
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void update(UUID id, String name, String description, String scope) {
|
||||||
|
jdbc.update("""
|
||||||
|
UPDATE roles SET name = COALESCE(?, name), description = COALESCE(?, description),
|
||||||
|
scope = COALESCE(?, scope) WHERE id = ? AND system = false
|
||||||
|
""", name, description, scope, id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void delete(UUID id) {
|
||||||
|
jdbc.update("DELETE FROM roles WHERE id = ? AND system = false", id);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -29,9 +29,9 @@ public class PostgresStatsStore implements StatsStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ExecutionStats statsForApp(Instant from, Instant to, String groupName) {
|
public ExecutionStats statsForApp(Instant from, Instant to, String applicationName) {
|
||||||
return queryStats("stats_1m_app", from, to, List.of(
|
return queryStats("stats_1m_app", from, to, List.of(
|
||||||
new Filter("group_name", groupName)));
|
new Filter("application_name", applicationName)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -56,9 +56,9 @@ public class PostgresStatsStore implements StatsStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StatsTimeseries timeseriesForApp(Instant from, Instant to, int bucketCount, String groupName) {
|
public StatsTimeseries timeseriesForApp(Instant from, Instant to, int bucketCount, String applicationName) {
|
||||||
return queryTimeseries("stats_1m_app", from, to, bucketCount, List.of(
|
return queryTimeseries("stats_1m_app", from, to, bucketCount, List.of(
|
||||||
new Filter("group_name", groupName)), true);
|
new Filter("application_name", applicationName)), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|||||||
@@ -0,0 +1,58 @@
|
|||||||
|
package com.cameleer3.server.app.storage;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.admin.ThresholdConfig;
|
||||||
|
import com.cameleer3.server.core.admin.ThresholdRepository;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
@Repository
|
||||||
|
public class PostgresThresholdRepository implements ThresholdRepository {
|
||||||
|
|
||||||
|
private final JdbcTemplate jdbc;
|
||||||
|
private final ObjectMapper objectMapper;
|
||||||
|
|
||||||
|
public PostgresThresholdRepository(JdbcTemplate jdbc, ObjectMapper objectMapper) {
|
||||||
|
this.jdbc = jdbc;
|
||||||
|
this.objectMapper = objectMapper;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ThresholdConfig> find() {
|
||||||
|
List<ThresholdConfig> results = jdbc.query(
|
||||||
|
"SELECT config_val FROM server_config WHERE config_key = 'thresholds'",
|
||||||
|
(rs, rowNum) -> {
|
||||||
|
String json = rs.getString("config_val");
|
||||||
|
try {
|
||||||
|
return objectMapper.readValue(json, ThresholdConfig.class);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException("Failed to deserialize threshold config", e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return results.isEmpty() ? Optional.empty() : Optional.of(results.get(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void save(ThresholdConfig config, String updatedBy) {
|
||||||
|
String json;
|
||||||
|
try {
|
||||||
|
json = objectMapper.writeValueAsString(config);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException("Failed to serialize threshold config", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
jdbc.update("""
|
||||||
|
INSERT INTO server_config (config_key, config_val, updated_by, updated_at)
|
||||||
|
VALUES ('thresholds', ?::jsonb, ?, now())
|
||||||
|
ON CONFLICT (config_key) DO UPDATE SET
|
||||||
|
config_val = EXCLUDED.config_val,
|
||||||
|
updated_by = EXCLUDED.updated_by,
|
||||||
|
updated_at = now()
|
||||||
|
""",
|
||||||
|
json, updatedBy);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,8 +5,6 @@ import com.cameleer3.server.core.security.UserRepository;
|
|||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
import org.springframework.stereotype.Repository;
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
import java.sql.Array;
|
|
||||||
import java.sql.Timestamp;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
@@ -22,35 +20,28 @@ public class PostgresUserRepository implements UserRepository {
|
|||||||
@Override
|
@Override
|
||||||
public Optional<UserInfo> findById(String userId) {
|
public Optional<UserInfo> findById(String userId) {
|
||||||
var results = jdbc.query(
|
var results = jdbc.query(
|
||||||
"SELECT * FROM users WHERE user_id = ?",
|
"SELECT user_id, provider, email, display_name, created_at FROM users WHERE user_id = ?",
|
||||||
(rs, rowNum) -> mapUser(rs), userId);
|
(rs, rowNum) -> mapUser(rs), userId);
|
||||||
return results.isEmpty() ? Optional.empty() : Optional.of(results.get(0));
|
return results.isEmpty() ? Optional.empty() : Optional.of(results.get(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<UserInfo> findAll() {
|
public List<UserInfo> findAll() {
|
||||||
return jdbc.query("SELECT * FROM users ORDER BY user_id",
|
return jdbc.query("SELECT user_id, provider, email, display_name, created_at FROM users ORDER BY user_id",
|
||||||
(rs, rowNum) -> mapUser(rs));
|
(rs, rowNum) -> mapUser(rs));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void upsert(UserInfo user) {
|
public void upsert(UserInfo user) {
|
||||||
jdbc.update("""
|
jdbc.update("""
|
||||||
INSERT INTO users (user_id, provider, email, display_name, roles, created_at, updated_at)
|
INSERT INTO users (user_id, provider, email, display_name, created_at, updated_at)
|
||||||
VALUES (?, ?, ?, ?, ?, now(), now())
|
VALUES (?, ?, ?, ?, now(), now())
|
||||||
ON CONFLICT (user_id) DO UPDATE SET
|
ON CONFLICT (user_id) DO UPDATE SET
|
||||||
provider = EXCLUDED.provider, email = EXCLUDED.email,
|
provider = EXCLUDED.provider, email = EXCLUDED.email,
|
||||||
display_name = EXCLUDED.display_name, roles = EXCLUDED.roles,
|
display_name = EXCLUDED.display_name,
|
||||||
updated_at = now()
|
updated_at = now()
|
||||||
""",
|
""",
|
||||||
user.userId(), user.provider(), user.email(), user.displayName(),
|
user.userId(), user.provider(), user.email(), user.displayName());
|
||||||
user.roles().toArray(new String[0]));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void updateRoles(String userId, List<String> roles) {
|
|
||||||
jdbc.update("UPDATE users SET roles = ?, updated_at = now() WHERE user_id = ?",
|
|
||||||
roles.toArray(new String[0]), userId);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -58,14 +49,27 @@ public class PostgresUserRepository implements UserRepository {
|
|||||||
jdbc.update("DELETE FROM users WHERE user_id = ?", userId);
|
jdbc.update("DELETE FROM users WHERE user_id = ?", userId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setPassword(String userId, String passwordHash) {
|
||||||
|
jdbc.update("UPDATE users SET password_hash = ? WHERE user_id = ?", passwordHash, userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<String> getPasswordHash(String userId) {
|
||||||
|
List<String> results = jdbc.query(
|
||||||
|
"SELECT password_hash FROM users WHERE user_id = ?",
|
||||||
|
(rs, rowNum) -> rs.getString("password_hash"),
|
||||||
|
userId);
|
||||||
|
if (results.isEmpty() || results.get(0) == null) return Optional.empty();
|
||||||
|
return Optional.of(results.get(0));
|
||||||
|
}
|
||||||
|
|
||||||
private UserInfo mapUser(java.sql.ResultSet rs) throws java.sql.SQLException {
|
private UserInfo mapUser(java.sql.ResultSet rs) throws java.sql.SQLException {
|
||||||
Array rolesArray = rs.getArray("roles");
|
|
||||||
String[] roles = rolesArray != null ? (String[]) rolesArray.getArray() : new String[0];
|
|
||||||
java.sql.Timestamp ts = rs.getTimestamp("created_at");
|
java.sql.Timestamp ts = rs.getTimestamp("created_at");
|
||||||
java.time.Instant createdAt = ts != null ? ts.toInstant() : null;
|
java.time.Instant createdAt = ts != null ? ts.toInstant() : null;
|
||||||
return new UserInfo(
|
return new UserInfo(
|
||||||
rs.getString("user_id"), rs.getString("provider"),
|
rs.getString("user_id"), rs.getString("provider"),
|
||||||
rs.getString("email"), rs.getString("display_name"),
|
rs.getString("email"), rs.getString("display_name"),
|
||||||
List.of(roles), createdAt);
|
createdAt);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,13 +3,18 @@ server:
|
|||||||
|
|
||||||
spring:
|
spring:
|
||||||
datasource:
|
datasource:
|
||||||
url: jdbc:postgresql://localhost:5432/cameleer3
|
url: jdbc:postgresql://localhost:5432/cameleer3?currentSchema=${CAMELEER_DB_SCHEMA:public}
|
||||||
username: cameleer
|
username: cameleer
|
||||||
password: ${CAMELEER_DB_PASSWORD:cameleer_dev}
|
password: ${CAMELEER_DB_PASSWORD:cameleer_dev}
|
||||||
driver-class-name: org.postgresql.Driver
|
driver-class-name: org.postgresql.Driver
|
||||||
flyway:
|
flyway:
|
||||||
enabled: true
|
enabled: true
|
||||||
locations: classpath:db/migration
|
locations: classpath:db/migration
|
||||||
|
url: jdbc:postgresql://localhost:5432/cameleer3?currentSchema=${CAMELEER_DB_SCHEMA:public},public
|
||||||
|
user: ${spring.datasource.username}
|
||||||
|
password: ${spring.datasource.password}
|
||||||
|
schemas: ${CAMELEER_DB_SCHEMA:public}
|
||||||
|
default-schema: ${CAMELEER_DB_SCHEMA:public}
|
||||||
mvc:
|
mvc:
|
||||||
async:
|
async:
|
||||||
request-timeout: -1
|
request-timeout: -1
|
||||||
@@ -34,6 +39,7 @@ ingestion:
|
|||||||
|
|
||||||
opensearch:
|
opensearch:
|
||||||
url: ${OPENSEARCH_URL:http://localhost:9200}
|
url: ${OPENSEARCH_URL:http://localhost:9200}
|
||||||
|
index-prefix: ${CAMELEER_OPENSEARCH_INDEX_PREFIX:executions-}
|
||||||
queue-size: ${CAMELEER_OPENSEARCH_QUEUE_SIZE:10000}
|
queue-size: ${CAMELEER_OPENSEARCH_QUEUE_SIZE:10000}
|
||||||
debounce-ms: ${CAMELEER_OPENSEARCH_DEBOUNCE_MS:2000}
|
debounce-ms: ${CAMELEER_OPENSEARCH_DEBOUNCE_MS:2000}
|
||||||
|
|
||||||
@@ -50,13 +56,7 @@ security:
|
|||||||
ui-password: ${CAMELEER_UI_PASSWORD:admin}
|
ui-password: ${CAMELEER_UI_PASSWORD:admin}
|
||||||
ui-origin: ${CAMELEER_UI_ORIGIN:http://localhost:5173}
|
ui-origin: ${CAMELEER_UI_ORIGIN:http://localhost:5173}
|
||||||
jwt-secret: ${CAMELEER_JWT_SECRET:}
|
jwt-secret: ${CAMELEER_JWT_SECRET:}
|
||||||
oidc:
|
|
||||||
enabled: ${CAMELEER_OIDC_ENABLED:false}
|
|
||||||
issuer-uri: ${CAMELEER_OIDC_ISSUER:}
|
|
||||||
client-id: ${CAMELEER_OIDC_CLIENT_ID:}
|
|
||||||
client-secret: ${CAMELEER_OIDC_CLIENT_SECRET:}
|
|
||||||
roles-claim: ${CAMELEER_OIDC_ROLES_CLAIM:realm_access.roles}
|
|
||||||
default-roles: ${CAMELEER_OIDC_DEFAULT_ROLES:VIEWER}
|
|
||||||
|
|
||||||
springdoc:
|
springdoc:
|
||||||
api-docs:
|
api-docs:
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
CREATE EXTENSION IF NOT EXISTS timescaledb;
|
|
||||||
CREATE EXTENSION IF NOT EXISTS timescaledb_toolkit;
|
|
||||||
@@ -0,0 +1,303 @@
|
|||||||
|
-- V1__init.sql - Consolidated schema for Cameleer3
|
||||||
|
|
||||||
|
-- Extensions
|
||||||
|
CREATE EXTENSION IF NOT EXISTS timescaledb;
|
||||||
|
CREATE EXTENSION IF NOT EXISTS timescaledb_toolkit;
|
||||||
|
|
||||||
|
-- =============================================================
|
||||||
|
-- RBAC
|
||||||
|
-- =============================================================
|
||||||
|
|
||||||
|
CREATE TABLE users (
|
||||||
|
user_id TEXT PRIMARY KEY,
|
||||||
|
provider TEXT NOT NULL,
|
||||||
|
email TEXT,
|
||||||
|
display_name TEXT,
|
||||||
|
password_hash TEXT,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE roles (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
name TEXT NOT NULL UNIQUE,
|
||||||
|
description TEXT NOT NULL DEFAULT '',
|
||||||
|
scope TEXT NOT NULL DEFAULT 'custom',
|
||||||
|
system BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO roles (id, name, description, scope, system) VALUES
|
||||||
|
('00000000-0000-0000-0000-000000000001', 'AGENT', 'Agent registration and data ingestion', 'system-wide', true),
|
||||||
|
('00000000-0000-0000-0000-000000000002', 'VIEWER', 'Read-only access to dashboards and data', 'system-wide', true),
|
||||||
|
('00000000-0000-0000-0000-000000000003', 'OPERATOR', 'Operational commands (start/stop/configure agents)', 'system-wide', true),
|
||||||
|
('00000000-0000-0000-0000-000000000004', 'ADMIN', 'Full administrative access', 'system-wide', true);
|
||||||
|
|
||||||
|
CREATE TABLE groups (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
name TEXT NOT NULL UNIQUE,
|
||||||
|
parent_group_id UUID REFERENCES groups(id) ON DELETE SET NULL,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Built-in Admins group
|
||||||
|
INSERT INTO groups (id, name) VALUES
|
||||||
|
('00000000-0000-0000-0000-000000000010', 'Admins');
|
||||||
|
|
||||||
|
CREATE TABLE group_roles (
|
||||||
|
group_id UUID NOT NULL REFERENCES groups(id) ON DELETE CASCADE,
|
||||||
|
role_id UUID NOT NULL REFERENCES roles(id) ON DELETE CASCADE,
|
||||||
|
PRIMARY KEY (group_id, role_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Assign ADMIN role to Admins group
|
||||||
|
INSERT INTO group_roles (group_id, role_id) VALUES
|
||||||
|
('00000000-0000-0000-0000-000000000010', '00000000-0000-0000-0000-000000000004');
|
||||||
|
|
||||||
|
CREATE TABLE user_groups (
|
||||||
|
user_id TEXT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
|
||||||
|
group_id UUID NOT NULL REFERENCES groups(id) ON DELETE CASCADE,
|
||||||
|
PRIMARY KEY (user_id, group_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE user_roles (
|
||||||
|
user_id TEXT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
|
||||||
|
role_id UUID NOT NULL REFERENCES roles(id) ON DELETE CASCADE,
|
||||||
|
PRIMARY KEY (user_id, role_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_user_roles_user_id ON user_roles(user_id);
|
||||||
|
CREATE INDEX idx_user_groups_user_id ON user_groups(user_id);
|
||||||
|
CREATE INDEX idx_group_roles_group_id ON group_roles(group_id);
|
||||||
|
CREATE INDEX idx_groups_parent ON groups(parent_group_id);
|
||||||
|
|
||||||
|
-- =============================================================
|
||||||
|
-- Execution data (TimescaleDB hypertables)
|
||||||
|
-- =============================================================
|
||||||
|
|
||||||
|
CREATE TABLE executions (
|
||||||
|
execution_id TEXT NOT NULL,
|
||||||
|
route_id TEXT NOT NULL,
|
||||||
|
agent_id TEXT NOT NULL,
|
||||||
|
application_name TEXT NOT NULL,
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
correlation_id TEXT,
|
||||||
|
exchange_id TEXT,
|
||||||
|
start_time TIMESTAMPTZ NOT NULL,
|
||||||
|
end_time TIMESTAMPTZ,
|
||||||
|
duration_ms BIGINT,
|
||||||
|
error_message TEXT,
|
||||||
|
error_stacktrace TEXT,
|
||||||
|
diagram_content_hash TEXT,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
PRIMARY KEY (execution_id, start_time)
|
||||||
|
);
|
||||||
|
|
||||||
|
SELECT create_hypertable('executions', 'start_time', chunk_time_interval => INTERVAL '1 day');
|
||||||
|
|
||||||
|
CREATE INDEX idx_executions_agent_time ON executions (agent_id, start_time DESC);
|
||||||
|
CREATE INDEX idx_executions_route_time ON executions (route_id, start_time DESC);
|
||||||
|
CREATE INDEX idx_executions_app_time ON executions (application_name, start_time DESC);
|
||||||
|
CREATE INDEX idx_executions_correlation ON executions (correlation_id);
|
||||||
|
|
||||||
|
CREATE TABLE processor_executions (
|
||||||
|
id BIGSERIAL,
|
||||||
|
execution_id TEXT NOT NULL,
|
||||||
|
processor_id TEXT NOT NULL,
|
||||||
|
processor_type TEXT NOT NULL,
|
||||||
|
diagram_node_id TEXT,
|
||||||
|
application_name TEXT NOT NULL,
|
||||||
|
route_id TEXT NOT NULL,
|
||||||
|
depth INT NOT NULL,
|
||||||
|
parent_processor_id TEXT,
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
start_time TIMESTAMPTZ NOT NULL,
|
||||||
|
end_time TIMESTAMPTZ,
|
||||||
|
duration_ms BIGINT,
|
||||||
|
error_message TEXT,
|
||||||
|
error_stacktrace TEXT,
|
||||||
|
input_body TEXT,
|
||||||
|
output_body TEXT,
|
||||||
|
input_headers JSONB,
|
||||||
|
output_headers JSONB,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
UNIQUE (execution_id, processor_id, start_time)
|
||||||
|
);
|
||||||
|
|
||||||
|
SELECT create_hypertable('processor_executions', 'start_time', chunk_time_interval => INTERVAL '1 day');
|
||||||
|
|
||||||
|
CREATE INDEX idx_proc_exec_execution ON processor_executions (execution_id);
|
||||||
|
CREATE INDEX idx_proc_exec_type_time ON processor_executions (processor_type, start_time DESC);
|
||||||
|
|
||||||
|
-- =============================================================
|
||||||
|
-- Agent metrics
|
||||||
|
-- =============================================================
|
||||||
|
|
||||||
|
CREATE TABLE agent_metrics (
|
||||||
|
agent_id TEXT NOT NULL,
|
||||||
|
metric_name TEXT NOT NULL,
|
||||||
|
metric_value DOUBLE PRECISION NOT NULL,
|
||||||
|
tags JSONB,
|
||||||
|
collected_at TIMESTAMPTZ NOT NULL,
|
||||||
|
server_received_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||||
|
);
|
||||||
|
|
||||||
|
SELECT create_hypertable('agent_metrics', 'collected_at', chunk_time_interval => INTERVAL '1 day');
|
||||||
|
|
||||||
|
CREATE INDEX idx_metrics_agent_name ON agent_metrics (agent_id, metric_name, collected_at DESC);
|
||||||
|
|
||||||
|
-- =============================================================
|
||||||
|
-- Route diagrams
|
||||||
|
-- =============================================================
|
||||||
|
|
||||||
|
CREATE TABLE route_diagrams (
|
||||||
|
content_hash TEXT PRIMARY KEY,
|
||||||
|
route_id TEXT NOT NULL,
|
||||||
|
agent_id TEXT NOT NULL,
|
||||||
|
definition TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_diagrams_route_agent ON route_diagrams (route_id, agent_id);
|
||||||
|
|
||||||
|
-- =============================================================
|
||||||
|
-- Agent events
|
||||||
|
-- =============================================================
|
||||||
|
|
||||||
|
CREATE TABLE agent_events (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
agent_id TEXT NOT NULL,
|
||||||
|
app_id TEXT NOT NULL,
|
||||||
|
event_type TEXT NOT NULL,
|
||||||
|
detail TEXT,
|
||||||
|
timestamp TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_agent_events_agent ON agent_events(agent_id, timestamp DESC);
|
||||||
|
CREATE INDEX idx_agent_events_app ON agent_events(app_id, timestamp DESC);
|
||||||
|
CREATE INDEX idx_agent_events_time ON agent_events(timestamp DESC);
|
||||||
|
|
||||||
|
-- =============================================================
|
||||||
|
-- Server configuration
|
||||||
|
-- =============================================================
|
||||||
|
|
||||||
|
CREATE TABLE server_config (
|
||||||
|
config_key TEXT PRIMARY KEY,
|
||||||
|
config_val JSONB NOT NULL,
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
updated_by TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
-- =============================================================
|
||||||
|
-- Admin
|
||||||
|
-- =============================================================
|
||||||
|
|
||||||
|
CREATE TABLE audit_log (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
timestamp TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
action TEXT NOT NULL,
|
||||||
|
category TEXT NOT NULL,
|
||||||
|
target TEXT,
|
||||||
|
detail JSONB,
|
||||||
|
result TEXT NOT NULL,
|
||||||
|
ip_address TEXT,
|
||||||
|
user_agent TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_audit_log_timestamp ON audit_log (timestamp DESC);
|
||||||
|
CREATE INDEX idx_audit_log_username ON audit_log (username);
|
||||||
|
CREATE INDEX idx_audit_log_category ON audit_log (category);
|
||||||
|
CREATE INDEX idx_audit_log_action ON audit_log (action);
|
||||||
|
CREATE INDEX idx_audit_log_target ON audit_log (target);
|
||||||
|
|
||||||
|
-- =============================================================
|
||||||
|
-- Continuous aggregates
|
||||||
|
-- =============================================================
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW stats_1m_all
|
||||||
|
WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS
|
||||||
|
SELECT
|
||||||
|
time_bucket('1 minute', start_time) AS bucket,
|
||||||
|
COUNT(*) AS total_count,
|
||||||
|
COUNT(*) FILTER (WHERE status = 'FAILED') AS failed_count,
|
||||||
|
COUNT(*) FILTER (WHERE status = 'RUNNING') AS running_count,
|
||||||
|
SUM(duration_ms) AS duration_sum,
|
||||||
|
MAX(duration_ms) AS duration_max,
|
||||||
|
approx_percentile(0.99, percentile_agg(duration_ms::DOUBLE PRECISION)) AS p99_duration
|
||||||
|
FROM executions
|
||||||
|
WHERE status IS NOT NULL
|
||||||
|
GROUP BY bucket
|
||||||
|
WITH NO DATA;
|
||||||
|
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW stats_1m_app
|
||||||
|
WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS
|
||||||
|
SELECT
|
||||||
|
time_bucket('1 minute', start_time) AS bucket,
|
||||||
|
application_name,
|
||||||
|
COUNT(*) AS total_count,
|
||||||
|
COUNT(*) FILTER (WHERE status = 'FAILED') AS failed_count,
|
||||||
|
COUNT(*) FILTER (WHERE status = 'RUNNING') AS running_count,
|
||||||
|
SUM(duration_ms) AS duration_sum,
|
||||||
|
MAX(duration_ms) AS duration_max,
|
||||||
|
approx_percentile(0.99, percentile_agg(duration_ms::DOUBLE PRECISION)) AS p99_duration
|
||||||
|
FROM executions
|
||||||
|
WHERE status IS NOT NULL
|
||||||
|
GROUP BY bucket, application_name
|
||||||
|
WITH NO DATA;
|
||||||
|
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW stats_1m_route
|
||||||
|
WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS
|
||||||
|
SELECT
|
||||||
|
time_bucket('1 minute', start_time) AS bucket,
|
||||||
|
application_name,
|
||||||
|
route_id,
|
||||||
|
COUNT(*) AS total_count,
|
||||||
|
COUNT(*) FILTER (WHERE status = 'FAILED') AS failed_count,
|
||||||
|
COUNT(*) FILTER (WHERE status = 'RUNNING') AS running_count,
|
||||||
|
SUM(duration_ms) AS duration_sum,
|
||||||
|
MAX(duration_ms) AS duration_max,
|
||||||
|
approx_percentile(0.99, percentile_agg(duration_ms::DOUBLE PRECISION)) AS p99_duration
|
||||||
|
FROM executions
|
||||||
|
WHERE status IS NOT NULL
|
||||||
|
GROUP BY bucket, application_name, route_id
|
||||||
|
WITH NO DATA;
|
||||||
|
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW stats_1m_processor
|
||||||
|
WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS
|
||||||
|
SELECT
|
||||||
|
time_bucket('1 minute', start_time) AS bucket,
|
||||||
|
application_name,
|
||||||
|
route_id,
|
||||||
|
processor_type,
|
||||||
|
COUNT(*) AS total_count,
|
||||||
|
COUNT(*) FILTER (WHERE status = 'FAILED') AS failed_count,
|
||||||
|
SUM(duration_ms) AS duration_sum,
|
||||||
|
MAX(duration_ms) AS duration_max,
|
||||||
|
approx_percentile(0.99, percentile_agg(duration_ms::DOUBLE PRECISION)) AS p99_duration
|
||||||
|
FROM processor_executions
|
||||||
|
GROUP BY bucket, application_name, route_id, processor_type
|
||||||
|
WITH NO DATA;
|
||||||
|
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW stats_1m_processor_detail
|
||||||
|
WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS
|
||||||
|
SELECT
|
||||||
|
time_bucket('1 minute', start_time) AS bucket,
|
||||||
|
application_name,
|
||||||
|
route_id,
|
||||||
|
processor_id,
|
||||||
|
processor_type,
|
||||||
|
COUNT(*) AS total_count,
|
||||||
|
COUNT(*) FILTER (WHERE status = 'FAILED') AS failed_count,
|
||||||
|
SUM(duration_ms) AS duration_sum,
|
||||||
|
MAX(duration_ms) AS duration_max,
|
||||||
|
approx_percentile(0.99, percentile_agg(duration_ms)) AS p99_duration
|
||||||
|
FROM processor_executions
|
||||||
|
GROUP BY bucket, application_name, route_id, processor_id, processor_type
|
||||||
|
WITH NO DATA;
|
||||||
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
CREATE TABLE executions (
|
|
||||||
execution_id TEXT NOT NULL,
|
|
||||||
route_id TEXT NOT NULL,
|
|
||||||
agent_id TEXT NOT NULL,
|
|
||||||
group_name TEXT NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
correlation_id TEXT,
|
|
||||||
exchange_id TEXT,
|
|
||||||
start_time TIMESTAMPTZ NOT NULL,
|
|
||||||
end_time TIMESTAMPTZ,
|
|
||||||
duration_ms BIGINT,
|
|
||||||
error_message TEXT,
|
|
||||||
error_stacktrace TEXT,
|
|
||||||
diagram_content_hash TEXT,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
|
||||||
PRIMARY KEY (execution_id, start_time)
|
|
||||||
);
|
|
||||||
|
|
||||||
SELECT create_hypertable('executions', 'start_time', chunk_time_interval => INTERVAL '1 day');
|
|
||||||
|
|
||||||
CREATE INDEX idx_executions_agent_time ON executions (agent_id, start_time DESC);
|
|
||||||
CREATE INDEX idx_executions_route_time ON executions (route_id, start_time DESC);
|
|
||||||
CREATE INDEX idx_executions_group_time ON executions (group_name, start_time DESC);
|
|
||||||
CREATE INDEX idx_executions_correlation ON executions (correlation_id);
|
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
-- V2__policies.sql - TimescaleDB policies (must run outside transaction)
|
||||||
|
-- flyway:executeInTransaction=false
|
||||||
|
|
||||||
|
-- Agent metrics retention & compression
|
||||||
|
ALTER TABLE agent_metrics SET (timescaledb.compress);
|
||||||
|
SELECT add_retention_policy('agent_metrics', INTERVAL '90 days', if_not_exists => true);
|
||||||
|
SELECT add_compression_policy('agent_metrics', INTERVAL '7 days', if_not_exists => true);
|
||||||
|
|
||||||
|
-- Continuous aggregate refresh policies
|
||||||
|
SELECT add_continuous_aggregate_policy('stats_1m_all',
|
||||||
|
start_offset => INTERVAL '1 hour',
|
||||||
|
end_offset => INTERVAL '1 minute',
|
||||||
|
schedule_interval => INTERVAL '1 minute',
|
||||||
|
if_not_exists => true);
|
||||||
|
|
||||||
|
SELECT add_continuous_aggregate_policy('stats_1m_app',
|
||||||
|
start_offset => INTERVAL '1 hour',
|
||||||
|
end_offset => INTERVAL '1 minute',
|
||||||
|
schedule_interval => INTERVAL '1 minute',
|
||||||
|
if_not_exists => true);
|
||||||
|
|
||||||
|
SELECT add_continuous_aggregate_policy('stats_1m_route',
|
||||||
|
start_offset => INTERVAL '1 hour',
|
||||||
|
end_offset => INTERVAL '1 minute',
|
||||||
|
schedule_interval => INTERVAL '1 minute',
|
||||||
|
if_not_exists => true);
|
||||||
|
|
||||||
|
SELECT add_continuous_aggregate_policy('stats_1m_processor',
|
||||||
|
start_offset => INTERVAL '1 hour',
|
||||||
|
end_offset => INTERVAL '1 minute',
|
||||||
|
schedule_interval => INTERVAL '1 minute',
|
||||||
|
if_not_exists => true);
|
||||||
|
|
||||||
|
SELECT add_continuous_aggregate_policy('stats_1m_processor_detail',
|
||||||
|
start_offset => INTERVAL '1 hour',
|
||||||
|
end_offset => INTERVAL '1 minute',
|
||||||
|
schedule_interval => INTERVAL '1 minute',
|
||||||
|
if_not_exists => true);
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
CREATE TABLE processor_executions (
|
|
||||||
id BIGSERIAL,
|
|
||||||
execution_id TEXT NOT NULL,
|
|
||||||
processor_id TEXT NOT NULL,
|
|
||||||
processor_type TEXT NOT NULL,
|
|
||||||
diagram_node_id TEXT,
|
|
||||||
group_name TEXT NOT NULL,
|
|
||||||
route_id TEXT NOT NULL,
|
|
||||||
depth INT NOT NULL,
|
|
||||||
parent_processor_id TEXT,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
start_time TIMESTAMPTZ NOT NULL,
|
|
||||||
end_time TIMESTAMPTZ,
|
|
||||||
duration_ms BIGINT,
|
|
||||||
error_message TEXT,
|
|
||||||
error_stacktrace TEXT,
|
|
||||||
input_body TEXT,
|
|
||||||
output_body TEXT,
|
|
||||||
input_headers JSONB,
|
|
||||||
output_headers JSONB,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
|
||||||
UNIQUE (execution_id, processor_id, start_time)
|
|
||||||
);
|
|
||||||
|
|
||||||
SELECT create_hypertable('processor_executions', 'start_time', chunk_time_interval => INTERVAL '1 day');
|
|
||||||
|
|
||||||
CREATE INDEX idx_proc_exec_execution ON processor_executions (execution_id);
|
|
||||||
CREATE INDEX idx_proc_exec_type_time ON processor_executions (processor_type, start_time DESC);
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
CREATE TABLE agent_metrics (
|
|
||||||
agent_id TEXT NOT NULL,
|
|
||||||
metric_name TEXT NOT NULL,
|
|
||||||
metric_value DOUBLE PRECISION NOT NULL,
|
|
||||||
tags JSONB,
|
|
||||||
collected_at TIMESTAMPTZ NOT NULL,
|
|
||||||
server_received_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
|
||||||
);
|
|
||||||
|
|
||||||
SELECT create_hypertable('agent_metrics', 'collected_at', chunk_time_interval => INTERVAL '1 day');
|
|
||||||
|
|
||||||
CREATE INDEX idx_metrics_agent_name ON agent_metrics (agent_id, metric_name, collected_at DESC);
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
CREATE TABLE route_diagrams (
|
|
||||||
content_hash TEXT PRIMARY KEY,
|
|
||||||
route_id TEXT NOT NULL,
|
|
||||||
agent_id TEXT NOT NULL,
|
|
||||||
definition TEXT NOT NULL,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_diagrams_route_agent ON route_diagrams (route_id, agent_id);
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
CREATE TABLE users (
|
|
||||||
user_id TEXT PRIMARY KEY,
|
|
||||||
provider TEXT NOT NULL,
|
|
||||||
email TEXT,
|
|
||||||
display_name TEXT,
|
|
||||||
roles TEXT[] NOT NULL DEFAULT '{}',
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
|
||||||
);
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
CREATE TABLE oidc_config (
|
|
||||||
config_id TEXT PRIMARY KEY DEFAULT 'default',
|
|
||||||
enabled BOOLEAN NOT NULL DEFAULT false,
|
|
||||||
issuer_uri TEXT,
|
|
||||||
client_id TEXT,
|
|
||||||
client_secret TEXT,
|
|
||||||
roles_claim TEXT,
|
|
||||||
default_roles TEXT[] NOT NULL DEFAULT '{}',
|
|
||||||
auto_signup BOOLEAN DEFAULT false,
|
|
||||||
display_name_claim TEXT,
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
|
||||||
);
|
|
||||||
@@ -1,87 +0,0 @@
|
|||||||
-- Global stats
|
|
||||||
CREATE MATERIALIZED VIEW stats_1m_all
|
|
||||||
WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS
|
|
||||||
SELECT
|
|
||||||
time_bucket('1 minute', start_time) AS bucket,
|
|
||||||
COUNT(*) AS total_count,
|
|
||||||
COUNT(*) FILTER (WHERE status = 'FAILED') AS failed_count,
|
|
||||||
COUNT(*) FILTER (WHERE status = 'RUNNING') AS running_count,
|
|
||||||
SUM(duration_ms) AS duration_sum,
|
|
||||||
MAX(duration_ms) AS duration_max,
|
|
||||||
approx_percentile(0.99, percentile_agg(duration_ms::DOUBLE PRECISION)) AS p99_duration
|
|
||||||
FROM executions
|
|
||||||
WHERE status IS NOT NULL
|
|
||||||
GROUP BY bucket
|
|
||||||
WITH NO DATA;
|
|
||||||
|
|
||||||
SELECT add_continuous_aggregate_policy('stats_1m_all',
|
|
||||||
start_offset => INTERVAL '1 hour',
|
|
||||||
end_offset => INTERVAL '1 minute',
|
|
||||||
schedule_interval => INTERVAL '1 minute');
|
|
||||||
|
|
||||||
-- Per-application stats
|
|
||||||
CREATE MATERIALIZED VIEW stats_1m_app
|
|
||||||
WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS
|
|
||||||
SELECT
|
|
||||||
time_bucket('1 minute', start_time) AS bucket,
|
|
||||||
group_name,
|
|
||||||
COUNT(*) AS total_count,
|
|
||||||
COUNT(*) FILTER (WHERE status = 'FAILED') AS failed_count,
|
|
||||||
COUNT(*) FILTER (WHERE status = 'RUNNING') AS running_count,
|
|
||||||
SUM(duration_ms) AS duration_sum,
|
|
||||||
MAX(duration_ms) AS duration_max,
|
|
||||||
approx_percentile(0.99, percentile_agg(duration_ms::DOUBLE PRECISION)) AS p99_duration
|
|
||||||
FROM executions
|
|
||||||
WHERE status IS NOT NULL
|
|
||||||
GROUP BY bucket, group_name
|
|
||||||
WITH NO DATA;
|
|
||||||
|
|
||||||
SELECT add_continuous_aggregate_policy('stats_1m_app',
|
|
||||||
start_offset => INTERVAL '1 hour',
|
|
||||||
end_offset => INTERVAL '1 minute',
|
|
||||||
schedule_interval => INTERVAL '1 minute');
|
|
||||||
|
|
||||||
-- Per-route stats
|
|
||||||
CREATE MATERIALIZED VIEW stats_1m_route
|
|
||||||
WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS
|
|
||||||
SELECT
|
|
||||||
time_bucket('1 minute', start_time) AS bucket,
|
|
||||||
group_name,
|
|
||||||
route_id,
|
|
||||||
COUNT(*) AS total_count,
|
|
||||||
COUNT(*) FILTER (WHERE status = 'FAILED') AS failed_count,
|
|
||||||
COUNT(*) FILTER (WHERE status = 'RUNNING') AS running_count,
|
|
||||||
SUM(duration_ms) AS duration_sum,
|
|
||||||
MAX(duration_ms) AS duration_max,
|
|
||||||
approx_percentile(0.99, percentile_agg(duration_ms::DOUBLE PRECISION)) AS p99_duration
|
|
||||||
FROM executions
|
|
||||||
WHERE status IS NOT NULL
|
|
||||||
GROUP BY bucket, group_name, route_id
|
|
||||||
WITH NO DATA;
|
|
||||||
|
|
||||||
SELECT add_continuous_aggregate_policy('stats_1m_route',
|
|
||||||
start_offset => INTERVAL '1 hour',
|
|
||||||
end_offset => INTERVAL '1 minute',
|
|
||||||
schedule_interval => INTERVAL '1 minute');
|
|
||||||
|
|
||||||
-- Per-processor stats (uses denormalized group_name/route_id on processor_executions)
|
|
||||||
CREATE MATERIALIZED VIEW stats_1m_processor
|
|
||||||
WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS
|
|
||||||
SELECT
|
|
||||||
time_bucket('1 minute', start_time) AS bucket,
|
|
||||||
group_name,
|
|
||||||
route_id,
|
|
||||||
processor_type,
|
|
||||||
COUNT(*) AS total_count,
|
|
||||||
COUNT(*) FILTER (WHERE status = 'FAILED') AS failed_count,
|
|
||||||
SUM(duration_ms) AS duration_sum,
|
|
||||||
MAX(duration_ms) AS duration_max,
|
|
||||||
approx_percentile(0.99, percentile_agg(duration_ms::DOUBLE PRECISION)) AS p99_duration
|
|
||||||
FROM processor_executions
|
|
||||||
GROUP BY bucket, group_name, route_id, processor_type
|
|
||||||
WITH NO DATA;
|
|
||||||
|
|
||||||
SELECT add_continuous_aggregate_policy('stats_1m_processor',
|
|
||||||
start_offset => INTERVAL '1 hour',
|
|
||||||
end_offset => INTERVAL '1 minute',
|
|
||||||
schedule_interval => INTERVAL '1 minute');
|
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
package com.cameleer3.server.app;
|
package com.cameleer3.server.app;
|
||||||
|
|
||||||
|
import org.opensearch.testcontainers.OpensearchContainer;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.boot.test.context.SpringBootTest;
|
import org.springframework.boot.test.context.SpringBootTest;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
@@ -7,25 +8,29 @@ import org.springframework.test.context.ActiveProfiles;
|
|||||||
import org.springframework.test.context.DynamicPropertyRegistry;
|
import org.springframework.test.context.DynamicPropertyRegistry;
|
||||||
import org.springframework.test.context.DynamicPropertySource;
|
import org.springframework.test.context.DynamicPropertySource;
|
||||||
import org.testcontainers.containers.PostgreSQLContainer;
|
import org.testcontainers.containers.PostgreSQLContainer;
|
||||||
import org.testcontainers.junit.jupiter.Container;
|
|
||||||
import org.testcontainers.junit.jupiter.Testcontainers;
|
|
||||||
import org.testcontainers.utility.DockerImageName;
|
import org.testcontainers.utility.DockerImageName;
|
||||||
|
|
||||||
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
|
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
|
||||||
@ActiveProfiles("test")
|
@ActiveProfiles("test")
|
||||||
@Testcontainers
|
|
||||||
public abstract class AbstractPostgresIT {
|
public abstract class AbstractPostgresIT {
|
||||||
|
|
||||||
private static final DockerImageName TIMESCALEDB_IMAGE =
|
private static final DockerImageName TIMESCALEDB_IMAGE =
|
||||||
DockerImageName.parse("timescale/timescaledb-ha:pg16")
|
DockerImageName.parse("timescale/timescaledb-ha:pg16")
|
||||||
.asCompatibleSubstituteFor("postgres");
|
.asCompatibleSubstituteFor("postgres");
|
||||||
|
|
||||||
@Container
|
static final PostgreSQLContainer<?> postgres;
|
||||||
static final PostgreSQLContainer<?> postgres =
|
static final OpensearchContainer<?> opensearch;
|
||||||
new PostgreSQLContainer<>(TIMESCALEDB_IMAGE)
|
|
||||||
.withDatabaseName("cameleer3")
|
static {
|
||||||
.withUsername("cameleer")
|
postgres = new PostgreSQLContainer<>(TIMESCALEDB_IMAGE)
|
||||||
.withPassword("test");
|
.withDatabaseName("cameleer3")
|
||||||
|
.withUsername("cameleer")
|
||||||
|
.withPassword("test");
|
||||||
|
postgres.start();
|
||||||
|
|
||||||
|
opensearch = new OpensearchContainer<>("opensearchproject/opensearch:2.19.0");
|
||||||
|
opensearch.start();
|
||||||
|
}
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
protected JdbcTemplate jdbcTemplate;
|
protected JdbcTemplate jdbcTemplate;
|
||||||
@@ -37,5 +42,9 @@ public abstract class AbstractPostgresIT {
|
|||||||
registry.add("spring.datasource.password", postgres::getPassword);
|
registry.add("spring.datasource.password", postgres::getPassword);
|
||||||
registry.add("spring.datasource.driver-class-name", () -> "org.postgresql.Driver");
|
registry.add("spring.datasource.driver-class-name", () -> "org.postgresql.Driver");
|
||||||
registry.add("spring.flyway.enabled", () -> "true");
|
registry.add("spring.flyway.enabled", () -> "true");
|
||||||
|
registry.add("spring.flyway.url", postgres::getJdbcUrl);
|
||||||
|
registry.add("spring.flyway.user", postgres::getUsername);
|
||||||
|
registry.add("spring.flyway.password", postgres::getPassword);
|
||||||
|
registry.add("opensearch.url", opensearch::getHttpHostAddress);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -37,8 +37,8 @@ public class TestSecurityHelper {
|
|||||||
/**
|
/**
|
||||||
* Returns a valid JWT access token with the given roles (no agent registration).
|
* Returns a valid JWT access token with the given roles (no agent registration).
|
||||||
*/
|
*/
|
||||||
public String createToken(String subject, String group, List<String> roles) {
|
public String createToken(String subject, String application, List<String> roles) {
|
||||||
return jwtService.createAccessToken(subject, group, roles);
|
return jwtService.createAccessToken(subject, application, roles);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -0,0 +1,49 @@
|
|||||||
|
package com.cameleer3.server.app.admin;
|
||||||
|
|
||||||
|
import com.cameleer3.server.core.admin.*;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.mockito.ArgumentCaptor;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
import static org.mockito.Mockito.*;
|
||||||
|
|
||||||
|
class AuditServiceTest {
|
||||||
|
private AuditRepository mockRepository;
|
||||||
|
private AuditService auditService;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setUp() {
|
||||||
|
mockRepository = mock(AuditRepository.class);
|
||||||
|
auditService = new AuditService(mockRepository);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void log_withExplicitUsername_insertsRecordWithCorrectFields() {
|
||||||
|
var request = mock(HttpServletRequest.class);
|
||||||
|
when(request.getRemoteAddr()).thenReturn("192.168.1.1");
|
||||||
|
when(request.getHeader("User-Agent")).thenReturn("Mozilla/5.0");
|
||||||
|
|
||||||
|
auditService.log("admin", "kill_query", AuditCategory.INFRA, "PID 42",
|
||||||
|
Map.of("query", "SELECT 1"), AuditResult.SUCCESS, request);
|
||||||
|
|
||||||
|
var captor = ArgumentCaptor.forClass(AuditRecord.class);
|
||||||
|
verify(mockRepository).insert(captor.capture());
|
||||||
|
var record = captor.getValue();
|
||||||
|
assertEquals("admin", record.username());
|
||||||
|
assertEquals("kill_query", record.action());
|
||||||
|
assertEquals(AuditCategory.INFRA, record.category());
|
||||||
|
assertEquals("PID 42", record.target());
|
||||||
|
assertEquals("192.168.1.1", record.ipAddress());
|
||||||
|
assertEquals("Mozilla/5.0", record.userAgent());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void log_withNullRequest_handlesGracefully() {
|
||||||
|
auditService.log("admin", "test", AuditCategory.CONFIG, null, null, AuditResult.SUCCESS, null);
|
||||||
|
verify(mockRepository).insert(any(AuditRecord.class));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -38,17 +38,17 @@ class AgentCommandControllerIT extends AbstractPostgresIT {
|
|||||||
operatorJwt = securityHelper.operatorToken();
|
operatorJwt = securityHelper.operatorToken();
|
||||||
}
|
}
|
||||||
|
|
||||||
private ResponseEntity<String> registerAgent(String agentId, String name, String group) {
|
private ResponseEntity<String> registerAgent(String agentId, String name, String application) {
|
||||||
String json = """
|
String json = """
|
||||||
{
|
{
|
||||||
"agentId": "%s",
|
"agentId": "%s",
|
||||||
"name": "%s",
|
"name": "%s",
|
||||||
"group": "%s",
|
"application": "%s",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"routeIds": ["route-1"],
|
"routeIds": ["route-1"],
|
||||||
"capabilities": {}
|
"capabilities": {}
|
||||||
}
|
}
|
||||||
""".formatted(agentId, name, group);
|
""".formatted(agentId, name, application);
|
||||||
|
|
||||||
return restTemplate.postForEntity(
|
return restTemplate.postForEntity(
|
||||||
"/api/v1/agents/register",
|
"/api/v1/agents/register",
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ class AgentRegistrationControllerIT extends AbstractPostgresIT {
|
|||||||
{
|
{
|
||||||
"agentId": "%s",
|
"agentId": "%s",
|
||||||
"name": "%s",
|
"name": "%s",
|
||||||
"group": "test-group",
|
"application": "test-group",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"routeIds": ["route-1", "route-2"],
|
"routeIds": ["route-1", "route-2"],
|
||||||
"capabilities": {"tracing": true}
|
"capabilities": {"tracing": true}
|
||||||
|
|||||||
@@ -53,17 +53,17 @@ class AgentSseControllerIT extends AbstractPostgresIT {
|
|||||||
operatorJwt = securityHelper.operatorToken();
|
operatorJwt = securityHelper.operatorToken();
|
||||||
}
|
}
|
||||||
|
|
||||||
private ResponseEntity<String> registerAgent(String agentId, String name, String group) {
|
private ResponseEntity<String> registerAgent(String agentId, String name, String application) {
|
||||||
String json = """
|
String json = """
|
||||||
{
|
{
|
||||||
"agentId": "%s",
|
"agentId": "%s",
|
||||||
"name": "%s",
|
"name": "%s",
|
||||||
"group": "%s",
|
"application": "%s",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"routeIds": ["route-1"],
|
"routeIds": ["route-1"],
|
||||||
"capabilities": {}
|
"capabilities": {}
|
||||||
}
|
}
|
||||||
""".formatted(agentId, name, group);
|
""".formatted(agentId, name, application);
|
||||||
|
|
||||||
return restTemplate.postForEntity(
|
return restTemplate.postForEntity(
|
||||||
"/api/v1/agents/register",
|
"/api/v1/agents/register",
|
||||||
|
|||||||
@@ -0,0 +1,112 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.AbstractPostgresIT;
|
||||||
|
import com.cameleer3.server.app.TestSecurityHelper;
|
||||||
|
import com.cameleer3.server.core.admin.AuditCategory;
|
||||||
|
import com.cameleer3.server.core.admin.AuditResult;
|
||||||
|
import com.cameleer3.server.core.admin.AuditService;
|
||||||
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.test.web.client.TestRestTemplate;
|
||||||
|
import org.springframework.http.HttpEntity;
|
||||||
|
import org.springframework.http.HttpMethod;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
|
class AuditLogControllerIT extends AbstractPostgresIT {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TestRestTemplate restTemplate;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ObjectMapper objectMapper;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TestSecurityHelper securityHelper;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private AuditService auditService;
|
||||||
|
|
||||||
|
private String adminJwt;
|
||||||
|
private String viewerJwt;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setUp() {
|
||||||
|
adminJwt = securityHelper.adminToken();
|
||||||
|
viewerJwt = securityHelper.viewerToken();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getAuditLog_asAdmin_returns200() throws Exception {
|
||||||
|
// Insert a test audit entry
|
||||||
|
auditService.log("test-admin", "test_action", AuditCategory.CONFIG,
|
||||||
|
"test-target", Map.of("key", "value"), AuditResult.SUCCESS, null);
|
||||||
|
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/audit", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.has("items")).isTrue();
|
||||||
|
assertThat(body.has("totalCount")).isTrue();
|
||||||
|
assertThat(body.get("totalCount").asLong()).isGreaterThanOrEqualTo(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getAuditLog_asViewer_returns403() {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/audit", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(viewerJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.FORBIDDEN);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getAuditLog_withCategoryFilter_returnsFilteredResults() throws Exception {
|
||||||
|
auditService.log("filter-test", "infra_action", AuditCategory.INFRA,
|
||||||
|
"infra-target", null, AuditResult.SUCCESS, null);
|
||||||
|
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/audit?category=INFRA", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.get("items").isArray()).isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getAuditLog_withPagination_respectsPageSize() throws Exception {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/audit?page=0&size=5", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.get("pageSize").asInt()).isEqualTo(5);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getAuditLog_maxPageSizeEnforced() throws Exception {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/audit?size=500", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.get("pageSize").asInt()).isEqualTo(100);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,109 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.AbstractPostgresIT;
|
||||||
|
import com.cameleer3.server.app.TestSecurityHelper;
|
||||||
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.test.web.client.TestRestTemplate;
|
||||||
|
import org.springframework.http.HttpEntity;
|
||||||
|
import org.springframework.http.HttpMethod;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
|
class DatabaseAdminControllerIT extends AbstractPostgresIT {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TestRestTemplate restTemplate;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ObjectMapper objectMapper;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TestSecurityHelper securityHelper;
|
||||||
|
|
||||||
|
private String adminJwt;
|
||||||
|
private String viewerJwt;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setUp() {
|
||||||
|
adminJwt = securityHelper.adminToken();
|
||||||
|
viewerJwt = securityHelper.viewerToken();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getStatus_asAdmin_returns200WithConnected() throws Exception {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/database/status", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.get("connected").asBoolean()).isTrue();
|
||||||
|
assertThat(body.get("version").asText()).contains("PostgreSQL");
|
||||||
|
assertThat(body.has("schema")).isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getStatus_asViewer_returns403() {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/database/status", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(viewerJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.FORBIDDEN);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getPool_asAdmin_returns200WithPoolStats() throws Exception {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/database/pool", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.has("activeConnections")).isTrue();
|
||||||
|
assertThat(body.has("idleConnections")).isTrue();
|
||||||
|
assertThat(body.get("maxPoolSize").asInt()).isGreaterThan(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getTables_asAdmin_returns200WithTableList() throws Exception {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/database/tables", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.isArray()).isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getQueries_asAdmin_returns200() throws Exception {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/database/queries", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.isArray()).isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void killQuery_unknownPid_returns404() {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/database/queries/999999/kill", HttpMethod.POST,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.NOT_FOUND);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -123,13 +123,13 @@ class DetailControllerIT extends AbstractPostgresIT {
|
|||||||
// Wait for flush and get the execution_id
|
// Wait for flush and get the execution_id
|
||||||
await().atMost(10, SECONDS).untilAsserted(() -> {
|
await().atMost(10, SECONDS).untilAsserted(() -> {
|
||||||
Integer count = jdbcTemplate.queryForObject(
|
Integer count = jdbcTemplate.queryForObject(
|
||||||
"SELECT count(*) FROM route_executions WHERE route_id = 'detail-test-route'",
|
"SELECT count(*) FROM executions WHERE route_id = 'detail-test-route'",
|
||||||
Integer.class);
|
Integer.class);
|
||||||
assertThat(count).isGreaterThanOrEqualTo(1);
|
assertThat(count).isGreaterThanOrEqualTo(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
seededExecutionId = jdbcTemplate.queryForObject(
|
seededExecutionId = jdbcTemplate.queryForObject(
|
||||||
"SELECT execution_id FROM route_executions WHERE route_id = 'detail-test-route' LIMIT 1",
|
"SELECT execution_id FROM executions WHERE route_id = 'detail-test-route' LIMIT 1",
|
||||||
String.class);
|
String.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -111,7 +111,7 @@ class ExecutionControllerIT extends AbstractPostgresIT {
|
|||||||
|
|
||||||
await().atMost(10, SECONDS).untilAsserted(() -> {
|
await().atMost(10, SECONDS).untilAsserted(() -> {
|
||||||
Integer count = jdbcTemplate.queryForObject(
|
Integer count = jdbcTemplate.queryForObject(
|
||||||
"SELECT count(*) FROM route_executions WHERE route_id = 'flush-test-route'",
|
"SELECT count(*) FROM executions WHERE route_id = 'flush-test-route'",
|
||||||
Integer.class);
|
Integer.class);
|
||||||
assertThat(count).isGreaterThanOrEqualTo(1);
|
assertThat(count).isGreaterThanOrEqualTo(1);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -0,0 +1,112 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.AbstractPostgresIT;
|
||||||
|
import com.cameleer3.server.app.TestSecurityHelper;
|
||||||
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.test.web.client.TestRestTemplate;
|
||||||
|
import org.springframework.http.HttpEntity;
|
||||||
|
import org.springframework.http.HttpMethod;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
|
class OpenSearchAdminControllerIT extends AbstractPostgresIT {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TestRestTemplate restTemplate;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ObjectMapper objectMapper;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TestSecurityHelper securityHelper;
|
||||||
|
|
||||||
|
private String adminJwt;
|
||||||
|
private String viewerJwt;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setUp() {
|
||||||
|
adminJwt = securityHelper.adminToken();
|
||||||
|
viewerJwt = securityHelper.viewerToken();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getStatus_asAdmin_returns200() throws Exception {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/opensearch/status", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.get("reachable").asBoolean()).isTrue();
|
||||||
|
assertThat(body.has("clusterHealth")).isTrue();
|
||||||
|
assertThat(body.has("version")).isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getStatus_asViewer_returns403() {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/opensearch/status", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(viewerJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.FORBIDDEN);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getPipeline_asAdmin_returns200() throws Exception {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/opensearch/pipeline", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.has("queueDepth")).isTrue();
|
||||||
|
assertThat(body.has("maxQueueSize")).isTrue();
|
||||||
|
assertThat(body.has("indexedCount")).isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getIndices_asAdmin_returns200() throws Exception {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/opensearch/indices", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.has("indices")).isTrue();
|
||||||
|
assertThat(body.has("totalIndices")).isTrue();
|
||||||
|
assertThat(body.has("page")).isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void deleteIndex_nonExistent_returns404() {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/opensearch/indices/nonexistent-index-xyz", HttpMethod.DELETE,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.NOT_FOUND);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getPerformance_asAdmin_returns200() throws Exception {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/opensearch/performance", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.has("queryCacheHitRate")).isTrue();
|
||||||
|
assertThat(body.has("jvmHeapUsedBytes")).isTrue();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -14,7 +14,9 @@ import org.springframework.http.HttpMethod;
|
|||||||
import org.springframework.http.HttpStatus;
|
import org.springframework.http.HttpStatus;
|
||||||
import org.springframework.http.ResponseEntity;
|
import org.springframework.http.ResponseEntity;
|
||||||
|
|
||||||
|
import static java.util.concurrent.TimeUnit.SECONDS;
|
||||||
import static org.assertj.core.api.Assertions.assertThat;
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
import static org.awaitility.Awaitility.await;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Integration tests for the search controller endpoints.
|
* Integration tests for the search controller endpoints.
|
||||||
@@ -153,11 +155,19 @@ class SearchControllerIT extends AbstractPostgresIT {
|
|||||||
""", i, i, i, i, i));
|
""", i, i, i, i, i));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify all data is available (synchronous writes)
|
// Verify all data is in PostgreSQL (synchronous writes)
|
||||||
Integer count = jdbcTemplate.queryForObject(
|
Integer count = jdbcTemplate.queryForObject(
|
||||||
"SELECT count(*) FROM executions WHERE route_id LIKE 'search-route-%'",
|
"SELECT count(*) FROM executions WHERE route_id LIKE 'search-route-%'",
|
||||||
Integer.class);
|
Integer.class);
|
||||||
assertThat(count).isEqualTo(10);
|
assertThat(count).isEqualTo(10);
|
||||||
|
|
||||||
|
// Wait for async OpenSearch indexing (debounce + index time)
|
||||||
|
// Check for last seeded execution specifically to avoid false positives from other test classes
|
||||||
|
await().atMost(30, SECONDS).untilAsserted(() -> {
|
||||||
|
ResponseEntity<String> r = searchGet("?correlationId=corr-page-10");
|
||||||
|
JsonNode body = objectMapper.readTree(r.getBody());
|
||||||
|
assertThat(body.get("total").asLong()).isGreaterThanOrEqualTo(1);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|||||||
@@ -0,0 +1,126 @@
|
|||||||
|
package com.cameleer3.server.app.controller;
|
||||||
|
|
||||||
|
import com.cameleer3.server.app.AbstractPostgresIT;
|
||||||
|
import com.cameleer3.server.app.TestSecurityHelper;
|
||||||
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.test.web.client.TestRestTemplate;
|
||||||
|
import org.springframework.http.HttpEntity;
|
||||||
|
import org.springframework.http.HttpMethod;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
|
class ThresholdAdminControllerIT extends AbstractPostgresIT {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TestRestTemplate restTemplate;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ObjectMapper objectMapper;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TestSecurityHelper securityHelper;
|
||||||
|
|
||||||
|
private String adminJwt;
|
||||||
|
private String viewerJwt;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setUp() {
|
||||||
|
adminJwt = securityHelper.adminToken();
|
||||||
|
viewerJwt = securityHelper.viewerToken();
|
||||||
|
jdbcTemplate.update("DELETE FROM server_config WHERE config_key = 'thresholds'");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getThresholds_asAdmin_returnsDefaults() throws Exception {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/thresholds", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.has("database")).isTrue();
|
||||||
|
assertThat(body.has("opensearch")).isTrue();
|
||||||
|
assertThat(body.path("database").path("connectionPoolWarning").asInt()).isEqualTo(80);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getThresholds_asViewer_returns403() {
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/thresholds", HttpMethod.GET,
|
||||||
|
new HttpEntity<>(securityHelper.authHeadersNoBody(viewerJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.FORBIDDEN);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void updateThresholds_asAdmin_returns200() throws Exception {
|
||||||
|
String json = """
|
||||||
|
{
|
||||||
|
"database": {
|
||||||
|
"connectionPoolWarning": 70,
|
||||||
|
"connectionPoolCritical": 90,
|
||||||
|
"queryDurationWarning": 2.0,
|
||||||
|
"queryDurationCritical": 15.0
|
||||||
|
},
|
||||||
|
"opensearch": {
|
||||||
|
"clusterHealthWarning": "YELLOW",
|
||||||
|
"clusterHealthCritical": "RED",
|
||||||
|
"queueDepthWarning": 200,
|
||||||
|
"queueDepthCritical": 1000,
|
||||||
|
"jvmHeapWarning": 80,
|
||||||
|
"jvmHeapCritical": 95,
|
||||||
|
"failedDocsWarning": 5,
|
||||||
|
"failedDocsCritical": 20
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/thresholds", HttpMethod.PUT,
|
||||||
|
new HttpEntity<>(json, securityHelper.authHeaders(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
|
||||||
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
|
assertThat(body.path("database").path("connectionPoolWarning").asInt()).isEqualTo(70);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void updateThresholds_invalidWarningGreaterThanCritical_returns400() {
|
||||||
|
String json = """
|
||||||
|
{
|
||||||
|
"database": {
|
||||||
|
"connectionPoolWarning": 95,
|
||||||
|
"connectionPoolCritical": 80,
|
||||||
|
"queryDurationWarning": 2.0,
|
||||||
|
"queryDurationCritical": 15.0
|
||||||
|
},
|
||||||
|
"opensearch": {
|
||||||
|
"clusterHealthWarning": "YELLOW",
|
||||||
|
"clusterHealthCritical": "RED",
|
||||||
|
"queueDepthWarning": 100,
|
||||||
|
"queueDepthCritical": 500,
|
||||||
|
"jvmHeapWarning": 75,
|
||||||
|
"jvmHeapCritical": 90,
|
||||||
|
"failedDocsWarning": 1,
|
||||||
|
"failedDocsCritical": 10
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
ResponseEntity<String> response = restTemplate.exchange(
|
||||||
|
"/api/v1/admin/thresholds", HttpMethod.PUT,
|
||||||
|
new HttpEntity<>(json, securityHelper.authHeaders(adminJwt)),
|
||||||
|
String.class);
|
||||||
|
|
||||||
|
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.BAD_REQUEST);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -10,29 +10,16 @@ import com.cameleer3.server.core.storage.model.ExecutionDocument.ProcessorDoc;
|
|||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.opensearch.client.opensearch.OpenSearchClient;
|
import org.opensearch.client.opensearch.OpenSearchClient;
|
||||||
import org.opensearch.client.opensearch.indices.RefreshRequest;
|
import org.opensearch.client.opensearch.indices.RefreshRequest;
|
||||||
import org.opensearch.testcontainers.OpensearchContainer;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.test.context.DynamicPropertyRegistry;
|
|
||||||
import org.springframework.test.context.DynamicPropertySource;
|
|
||||||
import org.testcontainers.junit.jupiter.Container;
|
|
||||||
|
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.*;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
||||||
// Extends AbstractPostgresIT for PostgreSQL datasource needed by Spring context
|
// Extends AbstractPostgresIT which provides both PostgreSQL and OpenSearch testcontainers
|
||||||
class OpenSearchIndexIT extends AbstractPostgresIT {
|
class OpenSearchIndexIT extends AbstractPostgresIT {
|
||||||
|
|
||||||
@Container
|
|
||||||
static final OpensearchContainer<?> opensearch =
|
|
||||||
new OpensearchContainer<>("opensearchproject/opensearch:2.19.0");
|
|
||||||
|
|
||||||
@DynamicPropertySource
|
|
||||||
static void configureOpenSearch(DynamicPropertyRegistry registry) {
|
|
||||||
registry.add("opensearch.url", opensearch::getHttpHostAddress);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
SearchIndex searchIndex;
|
SearchIndex searchIndex;
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ class BootstrapTokenIT extends AbstractPostgresIT {
|
|||||||
{
|
{
|
||||||
"agentId": "bootstrap-test-agent",
|
"agentId": "bootstrap-test-agent",
|
||||||
"name": "Bootstrap Test",
|
"name": "Bootstrap Test",
|
||||||
"group": "test-group",
|
"application": "test-group",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"routeIds": [],
|
"routeIds": [],
|
||||||
"capabilities": {}
|
"capabilities": {}
|
||||||
@@ -97,7 +97,7 @@ class BootstrapTokenIT extends AbstractPostgresIT {
|
|||||||
{
|
{
|
||||||
"agentId": "bootstrap-test-previous",
|
"agentId": "bootstrap-test-previous",
|
||||||
"name": "Previous Token Test",
|
"name": "Previous Token Test",
|
||||||
"group": "test-group",
|
"application": "test-group",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"routeIds": [],
|
"routeIds": [],
|
||||||
"capabilities": {}
|
"capabilities": {}
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ class JwtRefreshIT extends AbstractPostgresIT {
|
|||||||
{
|
{
|
||||||
"agentId": "%s",
|
"agentId": "%s",
|
||||||
"name": "Refresh Test Agent",
|
"name": "Refresh Test Agent",
|
||||||
"group": "test-group",
|
"application": "test-group",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"routeIds": [],
|
"routeIds": [],
|
||||||
"capabilities": {}
|
"capabilities": {}
|
||||||
@@ -79,6 +79,8 @@ class JwtRefreshIT extends AbstractPostgresIT {
|
|||||||
|
|
||||||
JsonNode body = objectMapper.readTree(response.getBody());
|
JsonNode body = objectMapper.readTree(response.getBody());
|
||||||
assertThat(body.get("accessToken").asText()).isNotEmpty();
|
assertThat(body.get("accessToken").asText()).isNotEmpty();
|
||||||
|
assertThat(body.get("refreshToken").asText()).isNotEmpty();
|
||||||
|
assertThat(body.get("refreshToken").asText()).isNotEqualTo(refreshToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ class JwtServiceTest {
|
|||||||
String token = jwtService.createAccessToken("user:admin", "user", roles);
|
String token = jwtService.createAccessToken("user:admin", "user", roles);
|
||||||
JwtService.JwtValidationResult result = jwtService.validateAccessToken(token);
|
JwtService.JwtValidationResult result = jwtService.validateAccessToken(token);
|
||||||
assertEquals("user:admin", result.subject());
|
assertEquals("user:admin", result.subject());
|
||||||
assertEquals("user", result.group());
|
assertEquals("user", result.application());
|
||||||
assertEquals(roles, result.roles());
|
assertEquals(roles, result.roles());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,7 +88,7 @@ class JwtServiceTest {
|
|||||||
String token = jwtService.createRefreshToken("agent-1", "default", roles);
|
String token = jwtService.createRefreshToken("agent-1", "default", roles);
|
||||||
JwtService.JwtValidationResult result = jwtService.validateRefreshToken(token);
|
JwtService.JwtValidationResult result = jwtService.validateRefreshToken(token);
|
||||||
assertEquals("agent-1", result.subject());
|
assertEquals("agent-1", result.subject());
|
||||||
assertEquals("default", result.group());
|
assertEquals("default", result.application());
|
||||||
assertEquals(roles, result.roles());
|
assertEquals(roles, result.roles());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ class RegistrationSecurityIT extends AbstractPostgresIT {
|
|||||||
{
|
{
|
||||||
"agentId": "%s",
|
"agentId": "%s",
|
||||||
"name": "Security Test Agent",
|
"name": "Security Test Agent",
|
||||||
"group": "test-group",
|
"application": "test-group",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"routeIds": [],
|
"routeIds": [],
|
||||||
"capabilities": {}
|
"capabilities": {}
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ class SseSigningIT extends AbstractPostgresIT {
|
|||||||
{
|
{
|
||||||
"agentId": "%s",
|
"agentId": "%s",
|
||||||
"name": "SSE Signing Test Agent",
|
"name": "SSE Signing Test Agent",
|
||||||
"group": "test-group",
|
"application": "test-group",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"routeIds": ["route-1"],
|
"routeIds": ["route-1"],
|
||||||
"capabilities": {}
|
"capabilities": {}
|
||||||
|
|||||||
@@ -23,38 +23,41 @@ class PostgresStatsStoreIT extends AbstractPostgresIT {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
void statsReturnsCountsForTimeWindow() {
|
void statsReturnsCountsForTimeWindow() {
|
||||||
Instant now = Instant.now().truncatedTo(ChronoUnit.SECONDS);
|
// Use a unique route + statsForRoute to avoid data contamination from other tests
|
||||||
insertExecution("stats-1", "route-a", "app-1", "COMPLETED", now, 100L);
|
String uniqueRoute = "stats-route-" + System.nanoTime();
|
||||||
insertExecution("stats-2", "route-a", "app-1", "FAILED", now.plusSeconds(10), 200L);
|
Instant base = Instant.now().minus(5, ChronoUnit.MINUTES).truncatedTo(ChronoUnit.SECONDS);
|
||||||
insertExecution("stats-3", "route-b", "app-1", "COMPLETED", now.plusSeconds(20), 50L);
|
insertExecution("stats-1-" + uniqueRoute, uniqueRoute, "app-stats", "COMPLETED", base, 100L);
|
||||||
|
insertExecution("stats-2-" + uniqueRoute, uniqueRoute, "app-stats", "FAILED", base.plusSeconds(10), 200L);
|
||||||
|
insertExecution("stats-3-" + uniqueRoute, uniqueRoute, "app-stats", "COMPLETED", base.plusSeconds(20), 50L);
|
||||||
|
|
||||||
// Force continuous aggregate refresh
|
// Force continuous aggregate refresh
|
||||||
jdbc.execute("CALL refresh_continuous_aggregate('stats_1m_all', NOW() - INTERVAL '1 hour', NOW() + INTERVAL '1 hour')");
|
jdbc.execute("CALL refresh_continuous_aggregate('stats_1m_route', NOW() - INTERVAL '1 hour', NOW() + INTERVAL '1 hour')");
|
||||||
|
|
||||||
ExecutionStats stats = statsStore.stats(now.minusSeconds(60), now.plusSeconds(60));
|
ExecutionStats stats = statsStore.statsForRoute(base.minusSeconds(60), base.plusSeconds(60), uniqueRoute, null);
|
||||||
assertEquals(3, stats.totalCount());
|
assertEquals(3, stats.totalCount());
|
||||||
assertEquals(1, stats.failedCount());
|
assertEquals(1, stats.failedCount());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void timeseriesReturnsBuckets() {
|
void timeseriesReturnsBuckets() {
|
||||||
Instant now = Instant.now().truncatedTo(ChronoUnit.MINUTES);
|
String uniqueRoute = "ts-route-" + System.nanoTime();
|
||||||
|
Instant base = Instant.now().minus(10, ChronoUnit.MINUTES).truncatedTo(ChronoUnit.MINUTES);
|
||||||
for (int i = 0; i < 10; i++) {
|
for (int i = 0; i < 10; i++) {
|
||||||
insertExecution("ts-" + i, "route-a", "app-1", "COMPLETED",
|
insertExecution("ts-" + i + "-" + uniqueRoute, uniqueRoute, "app-ts", "COMPLETED",
|
||||||
now.plusSeconds(i * 30), 100L + i);
|
base.plusSeconds(i * 30), 100L + i);
|
||||||
}
|
}
|
||||||
|
|
||||||
jdbc.execute("CALL refresh_continuous_aggregate('stats_1m_all', NOW() - INTERVAL '1 hour', NOW() + INTERVAL '1 hour')");
|
jdbc.execute("CALL refresh_continuous_aggregate('stats_1m_route', NOW() - INTERVAL '1 hour', NOW() + INTERVAL '1 hour')");
|
||||||
|
|
||||||
StatsTimeseries ts = statsStore.timeseries(now.minus(1, ChronoUnit.MINUTES), now.plus(10, ChronoUnit.MINUTES), 5);
|
StatsTimeseries ts = statsStore.timeseriesForRoute(base.minus(1, ChronoUnit.MINUTES), base.plus(10, ChronoUnit.MINUTES), 5, uniqueRoute, null);
|
||||||
assertNotNull(ts);
|
assertNotNull(ts);
|
||||||
assertFalse(ts.buckets().isEmpty());
|
assertFalse(ts.buckets().isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
private void insertExecution(String id, String routeId, String groupName,
|
private void insertExecution(String id, String routeId, String applicationName,
|
||||||
String status, Instant startTime, long durationMs) {
|
String status, Instant startTime, long durationMs) {
|
||||||
executionStore.upsert(new ExecutionRecord(
|
executionStore.upsert(new ExecutionRecord(
|
||||||
id, routeId, "agent-1", groupName, status, null, null,
|
id, routeId, "agent-1", applicationName, status, null, null,
|
||||||
startTime, startTime.plusMillis(durationMs), durationMs,
|
startTime, startTime.plusMillis(durationMs), durationMs,
|
||||||
status.equals("FAILED") ? "error" : null, null, null));
|
status.equals("FAILED") ? "error" : null, null, null));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ spring:
|
|||||||
|
|
||||||
opensearch:
|
opensearch:
|
||||||
url: http://localhost:9200
|
url: http://localhost:9200
|
||||||
|
debounce-ms: 100
|
||||||
|
|
||||||
ingestion:
|
ingestion:
|
||||||
buffer-capacity: 100
|
buffer-capacity: 100
|
||||||
|
|||||||
@@ -27,6 +27,16 @@
|
|||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-api</artifactId>
|
<artifactId>slf4j-api</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>jakarta.servlet</groupId>
|
||||||
|
<artifactId>jakarta.servlet-api</artifactId>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.security</groupId>
|
||||||
|
<artifactId>spring-security-core</artifactId>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.junit.jupiter</groupId>
|
<groupId>org.junit.jupiter</groupId>
|
||||||
<artifactId>junit-jupiter</artifactId>
|
<artifactId>junit-jupiter</artifactId>
|
||||||
|
|||||||
@@ -0,0 +1,5 @@
|
|||||||
|
package com.cameleer3.server.core.admin;
|
||||||
|
|
||||||
|
public enum AuditCategory {
|
||||||
|
INFRA, AUTH, USER_MGMT, CONFIG, RBAC
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user