Compare commits
33 Commits
9e7e35cbe3
...
autoload-w
| Author | SHA1 | Date | |
|---|---|---|---|
| af5175b96a | |||
| 8af8c1af9c | |||
| d4c6240485 | |||
| 4d5a3b1bf5 | |||
| 8ba7e3bb84 | |||
| 0782675a2b | |||
| 5a18c73572 | |||
| 1c16f65476 | |||
| ae8029f9c4 | |||
| 882ba0da84 | |||
| ee4fc31b9d | |||
| c791495572 | |||
| 35182ccb28 | |||
| 16e6b69fc7 | |||
| a7962eec09 | |||
| 2182be1008 | |||
| 43b27044bb | |||
| 4df621c5c8 | |||
| 57fa3bf7cf | |||
| 1d59ff5de4 | |||
| f96861d417 | |||
| 643023b6d5 | |||
| feb070c165 | |||
| 6a86dd7ca6 | |||
| 6307888722 | |||
| 9b0ff4a6d2 | |||
| 5c0ff6f271 | |||
| 1645ad84ee | |||
| 765afc7d76 | |||
| b5d6bb2243 | |||
| a7ed135af2 | |||
| 71ea3f34ca | |||
| 5b45b17fa6 |
@@ -50,8 +50,8 @@ web/node_modules/
|
|||||||
web/dist/
|
web/dist/
|
||||||
web/.vite/
|
web/.vite/
|
||||||
|
|
||||||
# SQLx offline data (generated at build time)
|
# SQLx offline data (generated when using `cargo sqlx prepare`)
|
||||||
#.sqlx/
|
# .sqlx/
|
||||||
|
|
||||||
# Configuration files (copied selectively)
|
# Configuration files (copied selectively)
|
||||||
config.development.yaml
|
config.development.yaml
|
||||||
@@ -61,6 +61,7 @@ config.example.yaml
|
|||||||
|
|
||||||
# Scripts (not needed in runtime)
|
# Scripts (not needed in runtime)
|
||||||
scripts/
|
scripts/
|
||||||
|
!scripts/load_core_pack.py
|
||||||
|
|
||||||
# Cargo lock (workspace handles this)
|
# Cargo lock (workspace handles this)
|
||||||
# Uncomment if you want deterministic builds:
|
# Uncomment if you want deterministic builds:
|
||||||
|
|||||||
@@ -9,10 +9,12 @@ on:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
CARGO_TERM_COLOR: always
|
CARGO_TERM_COLOR: always
|
||||||
RUST_MIN_STACK: 16777216
|
RUST_MIN_STACK: 67108864
|
||||||
CARGO_INCREMENTAL: 0
|
CARGO_INCREMENTAL: 0
|
||||||
CARGO_NET_RETRY: 10
|
CARGO_NET_RETRY: 10
|
||||||
RUSTUP_MAX_RETRIES: 10
|
RUSTUP_MAX_RETRIES: 10
|
||||||
|
# Gitea Actions runner tool cache. Actions like setup-node/setup-python can reuse this.
|
||||||
|
RUNNER_TOOL_CACHE: /toolcache
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
rust-fmt:
|
rust-fmt:
|
||||||
@@ -22,6 +24,17 @@ jobs:
|
|||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cache Rust toolchain
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.rustup/toolchains
|
||||||
|
~/.rustup/update-hashes
|
||||||
|
key: rustup-rustfmt-${{ runner.os }}-stable-v1
|
||||||
|
restore-keys: |
|
||||||
|
rustup-${{ runner.os }}-stable-v1
|
||||||
|
rustup-
|
||||||
|
|
||||||
- name: Setup Rust
|
- name: Setup Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
with:
|
||||||
@@ -37,6 +50,17 @@ jobs:
|
|||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cache Rust toolchain
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.rustup/toolchains
|
||||||
|
~/.rustup/update-hashes
|
||||||
|
key: rustup-clippy-${{ runner.os }}-stable-v1
|
||||||
|
restore-keys: |
|
||||||
|
rustup-${{ runner.os }}-stable-v1
|
||||||
|
rustup-
|
||||||
|
|
||||||
- name: Setup Rust
|
- name: Setup Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
with:
|
||||||
@@ -72,6 +96,17 @@ jobs:
|
|||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cache Rust toolchain
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.rustup/toolchains
|
||||||
|
~/.rustup/update-hashes
|
||||||
|
key: rustup-test-${{ runner.os }}-stable-v1
|
||||||
|
restore-keys: |
|
||||||
|
rustup-${{ runner.os }}-stable-v1
|
||||||
|
rustup-
|
||||||
|
|
||||||
- name: Setup Rust
|
- name: Setup Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
|
||||||
@@ -105,6 +140,17 @@ jobs:
|
|||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cache Rust toolchain
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.rustup/toolchains
|
||||||
|
~/.rustup/update-hashes
|
||||||
|
key: rustup-audit-${{ runner.os }}-stable-v1
|
||||||
|
restore-keys: |
|
||||||
|
rustup-${{ runner.os }}-stable-v1
|
||||||
|
rustup-
|
||||||
|
|
||||||
- name: Setup Rust
|
- name: Setup Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
|
||||||
|
|||||||
333
.gitea/workflows/publish.yml
Normal file
333
.gitea/workflows/publish.yml
Normal file
@@ -0,0 +1,333 @@
|
|||||||
|
name: Publish Images And Chart
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- master
|
||||||
|
tags:
|
||||||
|
- "v*"
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY_HOST: ${{ vars.CLUSTER_GITEA_HOST }}
|
||||||
|
REGISTRY_NAMESPACE: ${{ vars.CONTAINER_REGISTRY_NAMESPACE }}
|
||||||
|
REGISTRY_PLAIN_HTTP: ${{ vars.CONTAINER_REGISTRY_INSECURE }}
|
||||||
|
CHART_NAME: attune
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
metadata:
|
||||||
|
name: Resolve Publish Metadata
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
registry: ${{ steps.meta.outputs.registry }}
|
||||||
|
namespace: ${{ steps.meta.outputs.namespace }}
|
||||||
|
registry_plain_http: ${{ steps.meta.outputs.registry_plain_http }}
|
||||||
|
image_tag: ${{ steps.meta.outputs.image_tag }}
|
||||||
|
image_tags: ${{ steps.meta.outputs.image_tags }}
|
||||||
|
chart_version: ${{ steps.meta.outputs.chart_version }}
|
||||||
|
app_version: ${{ steps.meta.outputs.app_version }}
|
||||||
|
release_channel: ${{ steps.meta.outputs.release_channel }}
|
||||||
|
steps:
|
||||||
|
- name: Resolve tags and registry paths
|
||||||
|
id: meta
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
registry="${REGISTRY_HOST}"
|
||||||
|
namespace="${REGISTRY_NAMESPACE}"
|
||||||
|
registry_plain_http_raw="${REGISTRY_PLAIN_HTTP:-}"
|
||||||
|
registry_host_only="${registry%%:*}"
|
||||||
|
registry_plain_http_default="false"
|
||||||
|
|
||||||
|
if [ -z "$registry" ]; then
|
||||||
|
echo "CLUSTER_GITEA_HOST app variable is required"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$namespace" ]; then
|
||||||
|
namespace="${{ github.repository_owner }}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if printf '%s' "$registry_host_only" | grep -Eq '(^|[.])svc[.]cluster[.]local$'; then
|
||||||
|
registry_plain_http_default="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$registry_plain_http_raw" ]; then
|
||||||
|
case "$(printf '%s' "$registry_plain_http_raw" | tr '[:upper:]' '[:lower:]')" in
|
||||||
|
1|true|yes|on)
|
||||||
|
registry_plain_http="true"
|
||||||
|
;;
|
||||||
|
0|false|no|off)
|
||||||
|
registry_plain_http="false"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "CONTAINER_REGISTRY_INSECURE must be a boolean when set"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
else
|
||||||
|
registry_plain_http="$registry_plain_http_default"
|
||||||
|
fi
|
||||||
|
|
||||||
|
short_sha="$(printf '%s' "${{ github.sha }}" | cut -c1-12)"
|
||||||
|
ref_type="${{ github.ref_type }}"
|
||||||
|
ref_name="${{ github.ref_name }}"
|
||||||
|
|
||||||
|
if [ "$ref_type" = "tag" ] && printf '%s' "$ref_name" | grep -Eq '^v[0-9]+\.[0-9]+\.[0-9]+([-.].*)?$'; then
|
||||||
|
version="${ref_name#v}"
|
||||||
|
image_tags="${version},latest,sha-${short_sha}"
|
||||||
|
chart_version="$version"
|
||||||
|
release_channel="release"
|
||||||
|
else
|
||||||
|
version="sha-${short_sha}"
|
||||||
|
image_tags="edge,sha-${short_sha}"
|
||||||
|
chart_version="0.0.0-dev.${{ github.run_number }}"
|
||||||
|
release_channel="edge"
|
||||||
|
fi
|
||||||
|
|
||||||
|
{
|
||||||
|
echo "registry=$registry"
|
||||||
|
echo "namespace=$namespace"
|
||||||
|
echo "registry_plain_http=$registry_plain_http"
|
||||||
|
echo "image_tag=$version"
|
||||||
|
echo "image_tags=$image_tags"
|
||||||
|
echo "chart_version=$chart_version"
|
||||||
|
echo "app_version=$version"
|
||||||
|
echo "release_channel=$release_channel"
|
||||||
|
} >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
publish-images:
|
||||||
|
name: Publish ${{ matrix.image.name }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: metadata
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
image:
|
||||||
|
- name: api
|
||||||
|
repository: attune-api
|
||||||
|
dockerfile: docker/Dockerfile.optimized
|
||||||
|
context: .
|
||||||
|
target: ""
|
||||||
|
build_args: |
|
||||||
|
SERVICE=api
|
||||||
|
- name: executor
|
||||||
|
repository: attune-executor
|
||||||
|
dockerfile: docker/Dockerfile.optimized
|
||||||
|
context: .
|
||||||
|
target: ""
|
||||||
|
build_args: |
|
||||||
|
SERVICE=executor
|
||||||
|
- name: notifier
|
||||||
|
repository: attune-notifier
|
||||||
|
dockerfile: docker/Dockerfile.optimized
|
||||||
|
context: .
|
||||||
|
target: ""
|
||||||
|
build_args: |
|
||||||
|
SERVICE=notifier
|
||||||
|
- name: sensor
|
||||||
|
repository: attune-sensor
|
||||||
|
dockerfile: docker/Dockerfile.sensor.optimized
|
||||||
|
context: .
|
||||||
|
target: sensor-full
|
||||||
|
build_args: ""
|
||||||
|
- name: worker
|
||||||
|
repository: attune-worker
|
||||||
|
dockerfile: docker/Dockerfile.worker.optimized
|
||||||
|
context: .
|
||||||
|
target: worker-full
|
||||||
|
build_args: ""
|
||||||
|
- name: web
|
||||||
|
repository: attune-web
|
||||||
|
dockerfile: docker/Dockerfile.web
|
||||||
|
context: .
|
||||||
|
target: ""
|
||||||
|
build_args: ""
|
||||||
|
- name: migrations
|
||||||
|
repository: attune-migrations
|
||||||
|
dockerfile: docker/Dockerfile.migrations
|
||||||
|
context: .
|
||||||
|
target: ""
|
||||||
|
build_args: ""
|
||||||
|
- name: init-user
|
||||||
|
repository: attune-init-user
|
||||||
|
dockerfile: docker/Dockerfile.init-user
|
||||||
|
context: .
|
||||||
|
target: ""
|
||||||
|
build_args: ""
|
||||||
|
- name: init-packs
|
||||||
|
repository: attune-init-packs
|
||||||
|
dockerfile: docker/Dockerfile.init-packs
|
||||||
|
context: .
|
||||||
|
target: ""
|
||||||
|
build_args: ""
|
||||||
|
- name: agent
|
||||||
|
repository: attune-agent
|
||||||
|
dockerfile: docker/Dockerfile.agent
|
||||||
|
context: .
|
||||||
|
target: agent-init
|
||||||
|
build_args: ""
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Docker Buildx
|
||||||
|
if: needs.metadata.outputs.registry_plain_http != 'true'
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Setup Docker Buildx For Plain HTTP Registry
|
||||||
|
if: needs.metadata.outputs.registry_plain_http == 'true'
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
with:
|
||||||
|
buildkitd-config-inline: |
|
||||||
|
[registry."${{ needs.metadata.outputs.registry }}"]
|
||||||
|
http = true
|
||||||
|
insecure = true
|
||||||
|
|
||||||
|
- name: Configure OCI registry auth
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
REGISTRY_USERNAME: ${{ secrets.CONTAINER_REGISTRY_USERNAME }}
|
||||||
|
REGISTRY_PASSWORD: ${{ secrets.CONTAINER_REGISTRY_PASSWORD }}
|
||||||
|
GITHUB_TOKEN_FALLBACK: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
username="${REGISTRY_USERNAME:-${{ github.actor }}}"
|
||||||
|
password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
|
||||||
|
registry="${{ needs.metadata.outputs.registry }}"
|
||||||
|
|
||||||
|
if [ -z "$password" ]; then
|
||||||
|
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p "$HOME/.docker"
|
||||||
|
auth="$(printf '%s:%s' "$username" "$password" | base64 | tr -d '\n')"
|
||||||
|
|
||||||
|
cat > "$HOME/.docker/config.json" <<EOF
|
||||||
|
{
|
||||||
|
"auths": {
|
||||||
|
"${registry}": {
|
||||||
|
"auth": "${auth}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Prepare image tags
|
||||||
|
id: tags
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
image_ref_base="${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/${{ matrix.image.repository }}"
|
||||||
|
tag_lines=""
|
||||||
|
IFS=',' read -ra tags <<< "${{ needs.metadata.outputs.image_tags }}"
|
||||||
|
for tag in "${tags[@]}"; do
|
||||||
|
tag_lines="${tag_lines}${image_ref_base}:${tag}"$'\n'
|
||||||
|
done
|
||||||
|
printf 'tags<<EOF\n%sEOF\n' "$tag_lines" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Build and push image
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
image_names_csv=""
|
||||||
|
build_cmd=(
|
||||||
|
docker buildx build
|
||||||
|
"${{ matrix.image.context }}"
|
||||||
|
--file "${{ matrix.image.dockerfile }}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if [ -n "${{ matrix.image.target }}" ]; then
|
||||||
|
build_cmd+=(--target "${{ matrix.image.target }}")
|
||||||
|
fi
|
||||||
|
|
||||||
|
while IFS= read -r tag; do
|
||||||
|
if [ -n "$tag" ]; then
|
||||||
|
if [ -n "$image_names_csv" ]; then
|
||||||
|
image_names_csv="${image_names_csv},${tag}"
|
||||||
|
else
|
||||||
|
image_names_csv="${tag}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${{ needs.metadata.outputs.registry_plain_http }}" != "true" ]; then
|
||||||
|
build_cmd+=(--tag "$tag")
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done <<< "${{ steps.tags.outputs.tags }}"
|
||||||
|
|
||||||
|
while IFS= read -r build_arg; do
|
||||||
|
[ -n "$build_arg" ] && build_cmd+=(--build-arg "$build_arg")
|
||||||
|
done <<< "${{ matrix.image.build_args }}"
|
||||||
|
|
||||||
|
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
||||||
|
build_cmd+=(--output "type=image,\"name=${image_names_csv}\",push=true,registry.insecure=true")
|
||||||
|
else
|
||||||
|
build_cmd+=(--push)
|
||||||
|
fi
|
||||||
|
|
||||||
|
"${build_cmd[@]}"
|
||||||
|
|
||||||
|
publish-chart:
|
||||||
|
name: Publish Helm Chart
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- metadata
|
||||||
|
- publish-images
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Helm
|
||||||
|
uses: azure/setup-helm@v4
|
||||||
|
|
||||||
|
- name: Log in to Gitea OCI registry
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
REGISTRY_USERNAME: ${{ secrets.CONTAINER_REGISTRY_USERNAME }}
|
||||||
|
REGISTRY_PASSWORD: ${{ secrets.CONTAINER_REGISTRY_PASSWORD }}
|
||||||
|
GITHUB_TOKEN_FALLBACK: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
registry_username="${REGISTRY_USERNAME:-${{ github.actor }}}"
|
||||||
|
registry_password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
|
||||||
|
login_args=()
|
||||||
|
|
||||||
|
if [ -z "$registry_password" ]; then
|
||||||
|
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
||||||
|
login_args+=(--plain-http)
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf '%s' "$registry_password" | helm registry login "${{ needs.metadata.outputs.registry }}" \
|
||||||
|
--username "$registry_username" \
|
||||||
|
"${login_args[@]}" \
|
||||||
|
--password-stdin
|
||||||
|
|
||||||
|
- name: Lint chart
|
||||||
|
run: |
|
||||||
|
helm lint charts/attune
|
||||||
|
|
||||||
|
- name: Package chart
|
||||||
|
run: |
|
||||||
|
mkdir -p dist
|
||||||
|
helm package charts/attune \
|
||||||
|
--destination dist \
|
||||||
|
--version "${{ needs.metadata.outputs.chart_version }}" \
|
||||||
|
--app-version "${{ needs.metadata.outputs.app_version }}"
|
||||||
|
|
||||||
|
- name: Push chart to OCI registry
|
||||||
|
run: |
|
||||||
|
push_args=()
|
||||||
|
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
||||||
|
push_args+=(--plain-http)
|
||||||
|
fi
|
||||||
|
|
||||||
|
helm push "dist/${CHART_NAME}-${{ needs.metadata.outputs.chart_version }}.tgz" \
|
||||||
|
"oci://${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/helm" \
|
||||||
|
"${push_args[@]}"
|
||||||
15
.githooks/pre-commit
Executable file
15
.githooks/pre-commit
Executable file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
repo_root="$(git rev-parse --show-toplevel)"
|
||||||
|
cd "$repo_root"
|
||||||
|
|
||||||
|
echo "Formatting Rust code..."
|
||||||
|
cargo fmt --all
|
||||||
|
|
||||||
|
echo "Refreshing staged Rust files..."
|
||||||
|
git add --all '*.rs'
|
||||||
|
|
||||||
|
echo "Running pre-commit checks..."
|
||||||
|
make pre-commit
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -78,3 +78,4 @@ docker-compose.override.yml
|
|||||||
*.pid
|
*.pid
|
||||||
|
|
||||||
packs.examples/
|
packs.examples/
|
||||||
|
codex/
|
||||||
|
|||||||
@@ -2,6 +2,5 @@ target/
|
|||||||
web/dist/
|
web/dist/
|
||||||
web/node_modules/
|
web/node_modules/
|
||||||
web/src/api/
|
web/src/api/
|
||||||
packs/
|
|
||||||
packs.dev/
|
packs.dev/
|
||||||
packs.external/
|
packs.external/
|
||||||
|
|||||||
714
Cargo.lock
generated
714
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
10
Cargo.toml
10
Cargo.toml
@@ -21,7 +21,7 @@ repository = "https://git.rdrx.app/attune-system/attune"
|
|||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
# Async runtime
|
# Async runtime
|
||||||
tokio = { version = "1.50", features = ["full"] }
|
tokio = { version = "1.50", features = ["full"] }
|
||||||
tokio-util = "0.7"
|
tokio-util = { version = "0.7", features = ["io"] }
|
||||||
tokio-stream = { version = "0.1", features = ["sync"] }
|
tokio-stream = { version = "0.1", features = ["sync"] }
|
||||||
|
|
||||||
# Web framework
|
# Web framework
|
||||||
@@ -52,17 +52,17 @@ config = "0.15"
|
|||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
|
||||||
# UUID
|
# UUID
|
||||||
uuid = { version = "1.21", features = ["v4", "serde"] }
|
uuid = { version = "1.22", features = ["v4", "serde"] }
|
||||||
|
|
||||||
# Validation
|
# Validation
|
||||||
validator = { version = "0.20", features = ["derive"] }
|
validator = { version = "0.20", features = ["derive"] }
|
||||||
|
|
||||||
# CLI
|
# CLI
|
||||||
clap = { version = "4.5", features = ["derive"] }
|
clap = { version = "4.6", features = ["derive"] }
|
||||||
|
|
||||||
# Message queue / PubSub
|
# Message queue / PubSub
|
||||||
# RabbitMQ
|
# RabbitMQ
|
||||||
lapin = "4.1"
|
lapin = "4.3"
|
||||||
# Redis
|
# Redis
|
||||||
redis = { version = "1.0", features = ["tokio-comp", "connection-manager"] }
|
redis = { version = "1.0", features = ["tokio-comp", "connection-manager"] }
|
||||||
|
|
||||||
@@ -114,7 +114,7 @@ futures = "0.3"
|
|||||||
semver = { version = "1.0", features = ["serde"] }
|
semver = { version = "1.0", features = ["serde"] }
|
||||||
|
|
||||||
# Temp files
|
# Temp files
|
||||||
tempfile = "3.26"
|
tempfile = "3.27"
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
mockall = "0.14"
|
mockall = "0.14"
|
||||||
|
|||||||
92
Makefile
92
Makefile
@@ -3,7 +3,10 @@
|
|||||||
docker-up docker-down docker-cache-warm docker-stop-system-services dev watch generate-agents-index \
|
docker-up docker-down docker-cache-warm docker-stop-system-services dev watch generate-agents-index \
|
||||||
docker-build-workers docker-build-worker-base docker-build-worker-python \
|
docker-build-workers docker-build-worker-base docker-build-worker-python \
|
||||||
docker-build-worker-node docker-build-worker-full deny ci-rust ci-web-blocking ci-web-advisory \
|
docker-build-worker-node docker-build-worker-full deny ci-rust ci-web-blocking ci-web-advisory \
|
||||||
ci-security-blocking ci-security-advisory ci-blocking ci-advisory
|
ci-security-blocking ci-security-advisory ci-blocking ci-advisory \
|
||||||
|
fmt-check pre-commit install-git-hooks \
|
||||||
|
build-agent docker-build-agent run-agent run-agent-release \
|
||||||
|
docker-up-agent docker-down-agent
|
||||||
|
|
||||||
# Default target
|
# Default target
|
||||||
help:
|
help:
|
||||||
@@ -25,8 +28,12 @@ help:
|
|||||||
@echo ""
|
@echo ""
|
||||||
@echo "Code Quality:"
|
@echo "Code Quality:"
|
||||||
@echo " make fmt - Format all code"
|
@echo " make fmt - Format all code"
|
||||||
|
@echo " make fmt-check - Verify formatting without changing files"
|
||||||
@echo " make clippy - Run linter"
|
@echo " make clippy - Run linter"
|
||||||
@echo " make lint - Run both fmt and clippy"
|
@echo " make lint - Run both fmt and clippy"
|
||||||
|
@echo " make deny - Run cargo-deny checks"
|
||||||
|
@echo " make pre-commit - Run the git pre-commit checks locally"
|
||||||
|
@echo " make install-git-hooks - Configure git to use the repo hook scripts"
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "Running Services:"
|
@echo "Running Services:"
|
||||||
@echo " make run-api - Run API service"
|
@echo " make run-api - Run API service"
|
||||||
@@ -55,6 +62,14 @@ help:
|
|||||||
@echo " make docker-up - Start services with docker compose"
|
@echo " make docker-up - Start services with docker compose"
|
||||||
@echo " make docker-down - Stop services"
|
@echo " make docker-down - Stop services"
|
||||||
@echo ""
|
@echo ""
|
||||||
|
@echo "Agent (Universal Worker):"
|
||||||
|
@echo " make build-agent - Build statically-linked agent binary (musl)"
|
||||||
|
@echo " make docker-build-agent - Build agent Docker image"
|
||||||
|
@echo " make run-agent - Run agent in development mode"
|
||||||
|
@echo " make run-agent-release - Run agent in release mode"
|
||||||
|
@echo " make docker-up-agent - Start all services + agent workers (ruby, etc.)"
|
||||||
|
@echo " make docker-down-agent - Stop agent stack"
|
||||||
|
@echo ""
|
||||||
@echo "Development:"
|
@echo "Development:"
|
||||||
@echo " make watch - Watch and rebuild on changes"
|
@echo " make watch - Watch and rebuild on changes"
|
||||||
@echo " make install-tools - Install development tools"
|
@echo " make install-tools - Install development tools"
|
||||||
@@ -64,7 +79,7 @@ help:
|
|||||||
@echo ""
|
@echo ""
|
||||||
|
|
||||||
# Increase rustc stack size to prevent SIGSEGV during compilation
|
# Increase rustc stack size to prevent SIGSEGV during compilation
|
||||||
export RUST_MIN_STACK := 16777216
|
export RUST_MIN_STACK:=67108864
|
||||||
|
|
||||||
# Building
|
# Building
|
||||||
build:
|
build:
|
||||||
@@ -111,6 +126,9 @@ check:
|
|||||||
fmt:
|
fmt:
|
||||||
cargo fmt --all
|
cargo fmt --all
|
||||||
|
|
||||||
|
fmt-check:
|
||||||
|
cargo fmt --all -- --check
|
||||||
|
|
||||||
clippy:
|
clippy:
|
||||||
cargo clippy --all-features -- -D warnings
|
cargo clippy --all-features -- -D warnings
|
||||||
|
|
||||||
@@ -219,38 +237,53 @@ docker-build-api:
|
|||||||
docker-build-web:
|
docker-build-web:
|
||||||
docker compose build web
|
docker compose build web
|
||||||
|
|
||||||
# Build worker images
|
# Agent binary (statically-linked for injection into any container)
|
||||||
docker-build-workers: docker-build-worker-base docker-build-worker-python docker-build-worker-node docker-build-worker-full
|
build-agent:
|
||||||
@echo "✅ All worker images built successfully"
|
@echo "Installing musl target (if not already installed)..."
|
||||||
|
rustup target add x86_64-unknown-linux-musl 2>/dev/null || true
|
||||||
|
@echo "Building statically-linked worker and sensor agent binaries..."
|
||||||
|
SQLX_OFFLINE=true cargo build --release --target x86_64-unknown-linux-musl --bin attune-agent --bin attune-sensor-agent
|
||||||
|
strip target/x86_64-unknown-linux-musl/release/attune-agent
|
||||||
|
strip target/x86_64-unknown-linux-musl/release/attune-sensor-agent
|
||||||
|
@echo "✅ Agent binaries built:"
|
||||||
|
@echo " - target/x86_64-unknown-linux-musl/release/attune-agent"
|
||||||
|
@echo " - target/x86_64-unknown-linux-musl/release/attune-sensor-agent"
|
||||||
|
@ls -lh target/x86_64-unknown-linux-musl/release/attune-agent
|
||||||
|
@ls -lh target/x86_64-unknown-linux-musl/release/attune-sensor-agent
|
||||||
|
|
||||||
docker-build-worker-base:
|
docker-build-agent:
|
||||||
@echo "Building base worker (shell only)..."
|
@echo "Building agent Docker image (statically-linked binary)..."
|
||||||
DOCKER_BUILDKIT=1 docker build --target worker-base -t attune-worker:base -f docker/Dockerfile.worker .
|
DOCKER_BUILDKIT=1 docker buildx build --target agent-init -f docker/Dockerfile.agent -t attune-agent:latest .
|
||||||
@echo "✅ Base worker image built: attune-worker:base"
|
@echo "✅ Agent image built: attune-agent:latest"
|
||||||
|
|
||||||
docker-build-worker-python:
|
run-agent:
|
||||||
@echo "Building Python worker (shell + python)..."
|
cargo run --bin attune-agent
|
||||||
DOCKER_BUILDKIT=1 docker build --target worker-python -t attune-worker:python -f docker/Dockerfile.worker .
|
|
||||||
@echo "✅ Python worker image built: attune-worker:python"
|
|
||||||
|
|
||||||
docker-build-worker-node:
|
run-agent-release:
|
||||||
@echo "Building Node.js worker (shell + node)..."
|
cargo run --bin attune-agent --release
|
||||||
DOCKER_BUILDKIT=1 docker build --target worker-node -t attune-worker:node -f docker/Dockerfile.worker .
|
|
||||||
@echo "✅ Node.js worker image built: attune-worker:node"
|
|
||||||
|
|
||||||
docker-build-worker-full:
|
run-sensor-agent:
|
||||||
@echo "Building full worker (all runtimes)..."
|
cargo run --bin attune-sensor-agent
|
||||||
DOCKER_BUILDKIT=1 docker build --target worker-full -t attune-worker:full -f docker/Dockerfile.worker .
|
|
||||||
@echo "✅ Full worker image built: attune-worker:full"
|
run-sensor-agent-release:
|
||||||
|
cargo run --bin attune-sensor-agent --release
|
||||||
|
|
||||||
docker-up:
|
docker-up:
|
||||||
@echo "Starting all services with Docker Compose..."
|
@echo "Starting all services with Docker Compose..."
|
||||||
docker compose up -d
|
docker compose up -d
|
||||||
|
|
||||||
|
docker-up-agent:
|
||||||
|
@echo "Starting all services + agent-based workers..."
|
||||||
|
docker compose -f docker-compose.yaml -f docker-compose.agent.yaml up -d
|
||||||
|
|
||||||
docker-down:
|
docker-down:
|
||||||
@echo "Stopping all services..."
|
@echo "Stopping all services..."
|
||||||
docker compose down
|
docker compose down
|
||||||
|
|
||||||
|
docker-down-agent:
|
||||||
|
@echo "Stopping all services (including agent workers)..."
|
||||||
|
docker compose -f docker-compose.yaml -f docker-compose.agent.yaml down
|
||||||
|
|
||||||
docker-down-volumes:
|
docker-down-volumes:
|
||||||
@echo "Stopping all services and removing volumes (WARNING: deletes data)..."
|
@echo "Stopping all services and removing volumes (WARNING: deletes data)..."
|
||||||
docker compose down -v
|
docker compose down -v
|
||||||
@@ -341,6 +374,11 @@ ci-web-blocking:
|
|||||||
cd web && npm run typecheck
|
cd web && npm run typecheck
|
||||||
cd web && npm run build
|
cd web && npm run build
|
||||||
|
|
||||||
|
ci-web-pre-commit:
|
||||||
|
cd web && npm ci
|
||||||
|
cd web && npm run lint
|
||||||
|
cd web && npm run typecheck
|
||||||
|
|
||||||
ci-web-advisory:
|
ci-web-advisory:
|
||||||
cd web && npm ci
|
cd web && npm ci
|
||||||
cd web && npm run knip
|
cd web && npm run knip
|
||||||
@@ -381,9 +419,15 @@ licenses:
|
|||||||
cargo license --json > licenses.json
|
cargo license --json > licenses.json
|
||||||
@echo "License information saved to licenses.json"
|
@echo "License information saved to licenses.json"
|
||||||
|
|
||||||
# All-in-one check before committing
|
# Blocking checks run by the git pre-commit hook after formatting.
|
||||||
pre-commit: fmt clippy test
|
# Keep the local web step fast; full production builds stay in CI.
|
||||||
@echo "✅ All checks passed! Ready to commit."
|
pre-commit: deny ci-web-pre-commit ci-security-blocking
|
||||||
|
@echo "✅ Pre-commit checks passed."
|
||||||
|
|
||||||
|
install-git-hooks:
|
||||||
|
git config core.hooksPath .githooks
|
||||||
|
chmod +x .githooks/pre-commit
|
||||||
|
@echo "✅ Git hooks configured to use .githooks/"
|
||||||
|
|
||||||
# CI simulation
|
# CI simulation
|
||||||
ci: ci-blocking ci-advisory
|
ci: ci-blocking ci-advisory
|
||||||
|
|||||||
6
charts/attune/Chart.yaml
Normal file
6
charts/attune/Chart.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
apiVersion: v2
|
||||||
|
name: attune
|
||||||
|
description: Helm chart for deploying the Attune automation platform
|
||||||
|
type: application
|
||||||
|
version: 0.1.0
|
||||||
|
appVersion: "0.1.0"
|
||||||
26
charts/attune/templates/NOTES.txt
Normal file
26
charts/attune/templates/NOTES.txt
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
1. Set `global.imageRegistry`, `global.imageNamespace`, and `global.imageTag` so the chart pulls the images published by the Gitea workflow.
|
||||||
|
2. Set `web.config.apiUrl` and `web.config.wsUrl` to browser-reachable endpoints before exposing the web UI.
|
||||||
|
3. The shared `packs`, `runtime_envs`, and `artifacts` PVCs default to `ReadWriteMany`; your cluster storage class must support RWX or you need to override those claims.
|
||||||
|
{{- if .Values.agentWorkers }}
|
||||||
|
|
||||||
|
Agent-based workers enabled:
|
||||||
|
{{- range .Values.agentWorkers }}
|
||||||
|
- {{ .name }}: image={{ .image }}, replicas={{ .replicas | default 1 }}
|
||||||
|
{{- if .runtimes }} runtimes={{ join "," .runtimes }}{{ else }} runtimes=auto-detect{{ end }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
Each agent worker uses an init container to copy the statically-linked
|
||||||
|
attune-agent binary into the worker pod via an emptyDir volume. The agent
|
||||||
|
auto-detects available runtimes in the container and registers with Attune.
|
||||||
|
|
||||||
|
The default sensor deployment also uses the same injection pattern, copying
|
||||||
|
`attune-sensor-agent` into the pod before starting a stock runtime image.
|
||||||
|
|
||||||
|
To add more agent workers, append entries to `agentWorkers` in your values:
|
||||||
|
|
||||||
|
agentWorkers:
|
||||||
|
- name: my-runtime
|
||||||
|
image: my-org/my-image:latest
|
||||||
|
replicas: 1
|
||||||
|
runtimes: [] # auto-detect
|
||||||
|
{{- end }}
|
||||||
113
charts/attune/templates/_helpers.tpl
Normal file
113
charts/attune/templates/_helpers.tpl
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
{{- define "attune.name" -}}
|
||||||
|
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.fullname" -}}
|
||||||
|
{{- if .Values.fullnameOverride -}}
|
||||||
|
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s-%s" .Release.Name (include "attune.name" .) | trunc 63 | trimSuffix "-" -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.chart" -}}
|
||||||
|
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.labels" -}}
|
||||||
|
helm.sh/chart: {{ include "attune.chart" . }}
|
||||||
|
app.kubernetes.io/name: {{ include "attune.name" . }}
|
||||||
|
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||||
|
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||||
|
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.selectorLabels" -}}
|
||||||
|
app.kubernetes.io/name: {{ include "attune.name" . }}
|
||||||
|
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.componentLabels" -}}
|
||||||
|
{{ include "attune.selectorLabels" .root }}
|
||||||
|
app.kubernetes.io/component: {{ .component }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.image" -}}
|
||||||
|
{{- $root := .root -}}
|
||||||
|
{{- $image := .image -}}
|
||||||
|
{{- $registry := $root.Values.global.imageRegistry -}}
|
||||||
|
{{- $namespace := $root.Values.global.imageNamespace -}}
|
||||||
|
{{- $repository := $image.repository -}}
|
||||||
|
{{- $tag := default $root.Values.global.imageTag $image.tag -}}
|
||||||
|
{{- if and $registry $namespace -}}
|
||||||
|
{{- printf "%s/%s/%s:%s" $registry $namespace $repository $tag -}}
|
||||||
|
{{- else if $registry -}}
|
||||||
|
{{- printf "%s/%s:%s" $registry $repository $tag -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s:%s" $repository $tag -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.secretName" -}}
|
||||||
|
{{- if .Values.security.existingSecret -}}
|
||||||
|
{{- .Values.security.existingSecret -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s-secrets" (include "attune.fullname" .) -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.postgresqlServiceName" -}}
|
||||||
|
{{- if .Values.database.host -}}
|
||||||
|
{{- .Values.database.host -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s-postgresql" (include "attune.fullname" .) -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.rabbitmqServiceName" -}}
|
||||||
|
{{- if .Values.rabbitmq.host -}}
|
||||||
|
{{- .Values.rabbitmq.host -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s-rabbitmq" (include "attune.fullname" .) -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.redisServiceName" -}}
|
||||||
|
{{- if .Values.redis.host -}}
|
||||||
|
{{- .Values.redis.host -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s-redis" (include "attune.fullname" .) -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.databaseUrl" -}}
|
||||||
|
{{- if .Values.database.url -}}
|
||||||
|
{{- .Values.database.url -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "postgresql://%s:%s@%s:%v/%s" .Values.database.username .Values.database.password (include "attune.postgresqlServiceName" .) .Values.database.port .Values.database.database -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.rabbitmqUrl" -}}
|
||||||
|
{{- if .Values.rabbitmq.url -}}
|
||||||
|
{{- .Values.rabbitmq.url -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "amqp://%s:%s@%s:%v" .Values.rabbitmq.username .Values.rabbitmq.password (include "attune.rabbitmqServiceName" .) .Values.rabbitmq.port -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.redisUrl" -}}
|
||||||
|
{{- if .Values.redis.url -}}
|
||||||
|
{{- .Values.redis.url -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "redis://%s:%v" (include "attune.redisServiceName" .) .Values.redis.port -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.apiServiceName" -}}
|
||||||
|
{{- printf "%s-api" (include "attune.fullname" .) -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- define "attune.notifierServiceName" -}}
|
||||||
|
{{- printf "%s-notifier" (include "attune.fullname" .) -}}
|
||||||
|
{{- end -}}
|
||||||
137
charts/attune/templates/agent-workers.yaml
Normal file
137
charts/attune/templates/agent-workers.yaml
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
{{- range .Values.agentWorkers }}
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" $ }}-agent-worker-{{ .name }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" $ | nindent 4 }}
|
||||||
|
app.kubernetes.io/component: agent-worker-{{ .name }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .replicas | default 1 }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "attune.selectorLabels" $ | nindent 6 }}
|
||||||
|
app.kubernetes.io/component: agent-worker-{{ .name }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.selectorLabels" $ | nindent 8 }}
|
||||||
|
app.kubernetes.io/component: agent-worker-{{ .name }}
|
||||||
|
spec:
|
||||||
|
{{- if $.Values.global.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml $.Values.global.imagePullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .runtimeClassName }}
|
||||||
|
runtimeClassName: {{ .runtimeClassName }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .nodeSelector }}
|
||||||
|
nodeSelector:
|
||||||
|
{{- toYaml .nodeSelector | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .tolerations }}
|
||||||
|
tolerations:
|
||||||
|
{{- toYaml .tolerations | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .stopGracePeriod }}
|
||||||
|
terminationGracePeriodSeconds: {{ .stopGracePeriod }}
|
||||||
|
{{- else }}
|
||||||
|
terminationGracePeriodSeconds: 45
|
||||||
|
{{- end }}
|
||||||
|
initContainers:
|
||||||
|
- name: agent-loader
|
||||||
|
image: {{ include "attune.image" (dict "root" $ "image" $.Values.images.agent) }}
|
||||||
|
imagePullPolicy: {{ $.Values.images.agent.pullPolicy }}
|
||||||
|
command: ["cp", "/usr/local/bin/attune-agent", "/opt/attune/agent/attune-agent"]
|
||||||
|
volumeMounts:
|
||||||
|
- name: agent-bin
|
||||||
|
mountPath: /opt/attune/agent
|
||||||
|
- name: wait-for-schema
|
||||||
|
image: postgres:16-alpine
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -tAc "SELECT to_regclass('${DB_SCHEMA}.identity')" | grep -q identity; do
|
||||||
|
echo "waiting for schema";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" $ }}
|
||||||
|
- name: wait-for-packs
|
||||||
|
image: busybox:1.36
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until [ -f /opt/attune/packs/core/pack.yaml ]; do
|
||||||
|
echo "waiting for packs";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
volumeMounts:
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
containers:
|
||||||
|
- name: worker
|
||||||
|
image: {{ .image }}
|
||||||
|
{{- if .imagePullPolicy }}
|
||||||
|
imagePullPolicy: {{ .imagePullPolicy }}
|
||||||
|
{{- end }}
|
||||||
|
command: ["/opt/attune/agent/attune-agent"]
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" $ }}
|
||||||
|
env:
|
||||||
|
- name: ATTUNE_CONFIG
|
||||||
|
value: /opt/attune/config.yaml
|
||||||
|
- name: ATTUNE__DATABASE__SCHEMA
|
||||||
|
value: {{ $.Values.database.schema | quote }}
|
||||||
|
- name: ATTUNE_WORKER_TYPE
|
||||||
|
value: container
|
||||||
|
- name: ATTUNE_WORKER_NAME
|
||||||
|
valueFrom:
|
||||||
|
fieldRef:
|
||||||
|
fieldPath: metadata.name
|
||||||
|
- name: ATTUNE_API_URL
|
||||||
|
value: http://{{ include "attune.apiServiceName" $ }}:{{ $.Values.api.service.port }}
|
||||||
|
- name: RUST_LOG
|
||||||
|
value: {{ .logLevel | default "info" }}
|
||||||
|
{{- if .runtimes }}
|
||||||
|
- name: ATTUNE_WORKER_RUNTIMES
|
||||||
|
value: {{ join "," .runtimes | quote }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .env }}
|
||||||
|
{{- toYaml .env | nindent 12 }}
|
||||||
|
{{- end }}
|
||||||
|
resources:
|
||||||
|
{{- toYaml (.resources | default dict) | nindent 12 }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: agent-bin
|
||||||
|
mountPath: /opt/attune/agent
|
||||||
|
readOnly: true
|
||||||
|
- name: config
|
||||||
|
mountPath: /opt/attune/config.yaml
|
||||||
|
subPath: config.yaml
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
readOnly: true
|
||||||
|
- name: runtime-envs
|
||||||
|
mountPath: /opt/attune/runtime_envs
|
||||||
|
- name: artifacts
|
||||||
|
mountPath: /opt/attune/artifacts
|
||||||
|
volumes:
|
||||||
|
- name: agent-bin
|
||||||
|
emptyDir: {}
|
||||||
|
- name: config
|
||||||
|
configMap:
|
||||||
|
name: {{ include "attune.fullname" $ }}-config
|
||||||
|
- name: packs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" $ }}-packs
|
||||||
|
- name: runtime-envs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" $ }}-runtime-envs
|
||||||
|
- name: artifacts
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" $ }}-artifacts
|
||||||
|
{{- end }}
|
||||||
542
charts/attune/templates/applications.yaml
Normal file
542
charts/attune/templates/applications.yaml
Normal file
@@ -0,0 +1,542 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.apiServiceName" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
type: {{ .Values.api.service.type }}
|
||||||
|
selector:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "api") | nindent 4 }}
|
||||||
|
ports:
|
||||||
|
- name: http
|
||||||
|
port: {{ .Values.api.service.port }}
|
||||||
|
targetPort: http
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.apiServiceName" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.api.replicaCount }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "api") | nindent 6 }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "api") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
{{- if .Values.global.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml .Values.global.imagePullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
initContainers:
|
||||||
|
- name: wait-for-schema
|
||||||
|
image: postgres:16-alpine
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -tAc "SELECT to_regclass('${DB_SCHEMA}.identity')" | grep -q identity; do
|
||||||
|
echo "waiting for schema";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
- name: wait-for-packs
|
||||||
|
image: busybox:1.36
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until [ -f /opt/attune/packs/core/pack.yaml ]; do
|
||||||
|
echo "waiting for packs";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
volumeMounts:
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
containers:
|
||||||
|
- name: api
|
||||||
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.api) }}
|
||||||
|
imagePullPolicy: {{ .Values.images.api.pullPolicy }}
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
env:
|
||||||
|
- name: ATTUNE_CONFIG
|
||||||
|
value: /opt/attune/config.yaml
|
||||||
|
- name: ATTUNE__DATABASE__SCHEMA
|
||||||
|
value: {{ .Values.database.schema | quote }}
|
||||||
|
- name: ATTUNE__WORKER__WORKER_TYPE
|
||||||
|
value: container
|
||||||
|
ports:
|
||||||
|
- name: http
|
||||||
|
containerPort: 8080
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 20
|
||||||
|
periodSeconds: 15
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.api.resources | nindent 12 }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: config
|
||||||
|
mountPath: /opt/attune/config.yaml
|
||||||
|
subPath: config.yaml
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
- name: runtime-envs
|
||||||
|
mountPath: /opt/attune/runtime_envs
|
||||||
|
- name: artifacts
|
||||||
|
mountPath: /opt/attune/artifacts
|
||||||
|
volumes:
|
||||||
|
- name: config
|
||||||
|
configMap:
|
||||||
|
name: {{ include "attune.fullname" . }}-config
|
||||||
|
- name: packs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-packs
|
||||||
|
- name: runtime-envs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-runtime-envs
|
||||||
|
- name: artifacts
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-artifacts
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-executor
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.executor.replicaCount }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "executor") | nindent 6 }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "executor") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
{{- if .Values.global.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml .Values.global.imagePullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
initContainers:
|
||||||
|
- name: wait-for-schema
|
||||||
|
image: postgres:16-alpine
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -tAc "SELECT to_regclass('${DB_SCHEMA}.identity')" | grep -q identity; do
|
||||||
|
echo "waiting for schema";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
- name: wait-for-packs
|
||||||
|
image: busybox:1.36
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until [ -f /opt/attune/packs/core/pack.yaml ]; do
|
||||||
|
echo "waiting for packs";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
volumeMounts:
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
containers:
|
||||||
|
- name: executor
|
||||||
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.executor) }}
|
||||||
|
imagePullPolicy: {{ .Values.images.executor.pullPolicy }}
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
env:
|
||||||
|
- name: ATTUNE_CONFIG
|
||||||
|
value: /opt/attune/config.yaml
|
||||||
|
- name: ATTUNE__DATABASE__SCHEMA
|
||||||
|
value: {{ .Values.database.schema | quote }}
|
||||||
|
- name: ATTUNE__WORKER__WORKER_TYPE
|
||||||
|
value: container
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.executor.resources | nindent 12 }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: config
|
||||||
|
mountPath: /opt/attune/config.yaml
|
||||||
|
subPath: config.yaml
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
- name: artifacts
|
||||||
|
mountPath: /opt/attune/artifacts
|
||||||
|
volumes:
|
||||||
|
- name: config
|
||||||
|
configMap:
|
||||||
|
name: {{ include "attune.fullname" . }}-config
|
||||||
|
- name: packs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-packs
|
||||||
|
- name: artifacts
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-artifacts
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-worker
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.worker.replicaCount }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "worker") | nindent 6 }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "worker") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
{{- if .Values.global.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml .Values.global.imagePullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
initContainers:
|
||||||
|
- name: wait-for-schema
|
||||||
|
image: postgres:16-alpine
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -tAc "SELECT to_regclass('${DB_SCHEMA}.identity')" | grep -q identity; do
|
||||||
|
echo "waiting for schema";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
- name: wait-for-packs
|
||||||
|
image: busybox:1.36
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until [ -f /opt/attune/packs/core/pack.yaml ]; do
|
||||||
|
echo "waiting for packs";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
volumeMounts:
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
containers:
|
||||||
|
- name: worker
|
||||||
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.worker) }}
|
||||||
|
imagePullPolicy: {{ .Values.images.worker.pullPolicy }}
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
env:
|
||||||
|
- name: ATTUNE_CONFIG
|
||||||
|
value: /opt/attune/config.yaml
|
||||||
|
- name: ATTUNE__DATABASE__SCHEMA
|
||||||
|
value: {{ .Values.database.schema | quote }}
|
||||||
|
- name: ATTUNE_WORKER_RUNTIMES
|
||||||
|
value: {{ .Values.worker.runtimes | quote }}
|
||||||
|
- name: ATTUNE_WORKER_TYPE
|
||||||
|
value: container
|
||||||
|
- name: ATTUNE_WORKER_NAME
|
||||||
|
value: {{ .Values.worker.name | quote }}
|
||||||
|
- name: ATTUNE_API_URL
|
||||||
|
value: http://{{ include "attune.apiServiceName" . }}:{{ .Values.api.service.port }}
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.worker.resources | nindent 12 }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: config
|
||||||
|
mountPath: /opt/attune/config.yaml
|
||||||
|
subPath: config.yaml
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
- name: runtime-envs
|
||||||
|
mountPath: /opt/attune/runtime_envs
|
||||||
|
- name: artifacts
|
||||||
|
mountPath: /opt/attune/artifacts
|
||||||
|
volumes:
|
||||||
|
- name: config
|
||||||
|
configMap:
|
||||||
|
name: {{ include "attune.fullname" . }}-config
|
||||||
|
- name: packs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-packs
|
||||||
|
- name: runtime-envs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-runtime-envs
|
||||||
|
- name: artifacts
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-artifacts
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-sensor
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.sensor.replicaCount }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "sensor") | nindent 6 }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "sensor") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
{{- if .Values.global.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml .Values.global.imagePullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
terminationGracePeriodSeconds: 45
|
||||||
|
initContainers:
|
||||||
|
- name: sensor-agent-loader
|
||||||
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.agent) }}
|
||||||
|
imagePullPolicy: {{ .Values.images.agent.pullPolicy }}
|
||||||
|
command: ["cp", "/usr/local/bin/attune-sensor-agent", "/opt/attune/agent/attune-sensor-agent"]
|
||||||
|
volumeMounts:
|
||||||
|
- name: agent-bin
|
||||||
|
mountPath: /opt/attune/agent
|
||||||
|
- name: wait-for-schema
|
||||||
|
image: postgres:16-alpine
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -tAc "SELECT to_regclass('${DB_SCHEMA}.identity')" | grep -q identity; do
|
||||||
|
echo "waiting for schema";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
- name: wait-for-packs
|
||||||
|
image: busybox:1.36
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until [ -f /opt/attune/packs/core/pack.yaml ]; do
|
||||||
|
echo "waiting for packs";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
volumeMounts:
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
containers:
|
||||||
|
- name: sensor
|
||||||
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.sensor) }}
|
||||||
|
imagePullPolicy: {{ .Values.images.sensor.pullPolicy }}
|
||||||
|
command: ["/opt/attune/agent/attune-sensor-agent"]
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
env:
|
||||||
|
- name: ATTUNE_CONFIG
|
||||||
|
value: /opt/attune/config.yaml
|
||||||
|
- name: ATTUNE__DATABASE__SCHEMA
|
||||||
|
value: {{ .Values.database.schema | quote }}
|
||||||
|
- name: ATTUNE__WORKER__WORKER_TYPE
|
||||||
|
value: container
|
||||||
|
- name: ATTUNE_SENSOR_RUNTIMES
|
||||||
|
value: {{ .Values.sensor.runtimes | quote }}
|
||||||
|
- name: ATTUNE_API_URL
|
||||||
|
value: http://{{ include "attune.apiServiceName" . }}:{{ .Values.api.service.port }}
|
||||||
|
- name: ATTUNE_MQ_URL
|
||||||
|
value: {{ include "attune.rabbitmqUrl" . | quote }}
|
||||||
|
- name: ATTUNE_PACKS_BASE_DIR
|
||||||
|
value: /opt/attune/packs
|
||||||
|
- name: RUST_LOG
|
||||||
|
value: {{ .Values.sensor.logLevel | quote }}
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.sensor.resources | nindent 12 }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: agent-bin
|
||||||
|
mountPath: /opt/attune/agent
|
||||||
|
readOnly: true
|
||||||
|
- name: config
|
||||||
|
mountPath: /opt/attune/config.yaml
|
||||||
|
subPath: config.yaml
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
readOnly: true
|
||||||
|
- name: runtime-envs
|
||||||
|
mountPath: /opt/attune/runtime_envs
|
||||||
|
volumes:
|
||||||
|
- name: agent-bin
|
||||||
|
emptyDir: {}
|
||||||
|
- name: config
|
||||||
|
configMap:
|
||||||
|
name: {{ include "attune.fullname" . }}-config
|
||||||
|
- name: packs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-packs
|
||||||
|
- name: runtime-envs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-runtime-envs
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.notifierServiceName" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
type: {{ .Values.notifier.service.type }}
|
||||||
|
selector:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "notifier") | nindent 4 }}
|
||||||
|
ports:
|
||||||
|
- name: ws
|
||||||
|
port: {{ .Values.notifier.service.port }}
|
||||||
|
targetPort: ws
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.notifierServiceName" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.notifier.replicaCount }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "notifier") | nindent 6 }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "notifier") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
{{- if .Values.global.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml .Values.global.imagePullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
initContainers:
|
||||||
|
- name: wait-for-schema
|
||||||
|
image: postgres:16-alpine
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -tAc "SELECT to_regclass('${DB_SCHEMA}.identity')" | grep -q identity; do
|
||||||
|
echo "waiting for schema";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
containers:
|
||||||
|
- name: notifier
|
||||||
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.notifier) }}
|
||||||
|
imagePullPolicy: {{ .Values.images.notifier.pullPolicy }}
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
env:
|
||||||
|
- name: ATTUNE_CONFIG
|
||||||
|
value: /opt/attune/config.yaml
|
||||||
|
- name: ATTUNE__DATABASE__SCHEMA
|
||||||
|
value: {{ .Values.database.schema | quote }}
|
||||||
|
- name: ATTUNE__WORKER__WORKER_TYPE
|
||||||
|
value: container
|
||||||
|
ports:
|
||||||
|
- name: ws
|
||||||
|
containerPort: 8081
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: ws
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: ws
|
||||||
|
initialDelaySeconds: 20
|
||||||
|
periodSeconds: 15
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.notifier.resources | nindent 12 }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: config
|
||||||
|
mountPath: /opt/attune/config.yaml
|
||||||
|
subPath: config.yaml
|
||||||
|
volumes:
|
||||||
|
- name: config
|
||||||
|
configMap:
|
||||||
|
name: {{ include "attune.fullname" . }}-config
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-web
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
type: {{ .Values.web.service.type }}
|
||||||
|
selector:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "web") | nindent 4 }}
|
||||||
|
ports:
|
||||||
|
- name: http
|
||||||
|
port: {{ .Values.web.service.port }}
|
||||||
|
targetPort: http
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-web
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.web.replicaCount }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "web") | nindent 6 }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "web") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
{{- if .Values.global.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml .Values.global.imagePullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
containers:
|
||||||
|
- name: web
|
||||||
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.web) }}
|
||||||
|
imagePullPolicy: {{ .Values.images.web.pullPolicy }}
|
||||||
|
env:
|
||||||
|
- name: API_URL
|
||||||
|
value: {{ .Values.web.config.apiUrl | quote }}
|
||||||
|
- name: WS_URL
|
||||||
|
value: {{ .Values.web.config.wsUrl | quote }}
|
||||||
|
- name: ENVIRONMENT
|
||||||
|
value: {{ .Values.web.config.environment | quote }}
|
||||||
|
ports:
|
||||||
|
- name: http
|
||||||
|
containerPort: 80
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 20
|
||||||
|
periodSeconds: 15
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.web.resources | nindent 12 }}
|
||||||
9
charts/attune/templates/configmap.yaml
Normal file
9
charts/attune/templates/configmap.yaml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: ConfigMap
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-config
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
data:
|
||||||
|
config.yaml: |
|
||||||
|
{{ .Files.Get "files/config.docker.yaml" | indent 4 }}
|
||||||
225
charts/attune/templates/infrastructure.yaml
Normal file
225
charts/attune/templates/infrastructure.yaml
Normal file
@@ -0,0 +1,225 @@
|
|||||||
|
{{- if .Values.database.postgresql.enabled }}
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.postgresqlServiceName" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "postgresql") | nindent 4 }}
|
||||||
|
ports:
|
||||||
|
- name: postgres
|
||||||
|
port: {{ .Values.database.port }}
|
||||||
|
targetPort: postgres
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: StatefulSet
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.postgresqlServiceName" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
serviceName: {{ include "attune.postgresqlServiceName" . }}
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "postgresql") | nindent 6 }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "postgresql") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: postgresql
|
||||||
|
image: "{{ .Values.database.postgresql.image.repository }}:{{ .Values.database.postgresql.image.tag }}"
|
||||||
|
imagePullPolicy: IfNotPresent
|
||||||
|
env:
|
||||||
|
- name: POSTGRES_USER
|
||||||
|
value: {{ .Values.database.username | quote }}
|
||||||
|
- name: POSTGRES_PASSWORD
|
||||||
|
value: {{ .Values.database.password | quote }}
|
||||||
|
- name: POSTGRES_DB
|
||||||
|
value: {{ .Values.database.database | quote }}
|
||||||
|
- name: PGDATA
|
||||||
|
value: /var/lib/postgresql/data/pgdata
|
||||||
|
ports:
|
||||||
|
- name: postgres
|
||||||
|
containerPort: 5432
|
||||||
|
livenessProbe:
|
||||||
|
exec:
|
||||||
|
command: ["pg_isready", "-U", "{{ .Values.database.username }}"]
|
||||||
|
initialDelaySeconds: 20
|
||||||
|
periodSeconds: 10
|
||||||
|
readinessProbe:
|
||||||
|
exec:
|
||||||
|
command: ["pg_isready", "-U", "{{ .Values.database.username }}"]
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.database.postgresql.resources | nindent 12 }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: data
|
||||||
|
mountPath: /var/lib/postgresql/data
|
||||||
|
volumeClaimTemplates:
|
||||||
|
- metadata:
|
||||||
|
name: data
|
||||||
|
spec:
|
||||||
|
accessModes:
|
||||||
|
{{- toYaml .Values.database.postgresql.persistence.accessModes | nindent 10 }}
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: {{ .Values.database.postgresql.persistence.size }}
|
||||||
|
{{- if .Values.database.postgresql.persistence.storageClassName }}
|
||||||
|
storageClassName: {{ .Values.database.postgresql.persistence.storageClassName }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.rabbitmq.enabled }}
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.rabbitmqServiceName" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "rabbitmq") | nindent 4 }}
|
||||||
|
ports:
|
||||||
|
- name: amqp
|
||||||
|
port: {{ .Values.rabbitmq.port }}
|
||||||
|
targetPort: amqp
|
||||||
|
- name: management
|
||||||
|
port: {{ .Values.rabbitmq.managementPort }}
|
||||||
|
targetPort: management
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: StatefulSet
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.rabbitmqServiceName" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
serviceName: {{ include "attune.rabbitmqServiceName" . }}
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "rabbitmq") | nindent 6 }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "rabbitmq") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: rabbitmq
|
||||||
|
image: "{{ .Values.rabbitmq.image.repository }}:{{ .Values.rabbitmq.image.tag }}"
|
||||||
|
imagePullPolicy: IfNotPresent
|
||||||
|
env:
|
||||||
|
- name: RABBITMQ_DEFAULT_USER
|
||||||
|
value: {{ .Values.rabbitmq.username | quote }}
|
||||||
|
- name: RABBITMQ_DEFAULT_PASS
|
||||||
|
value: {{ .Values.rabbitmq.password | quote }}
|
||||||
|
- name: RABBITMQ_DEFAULT_VHOST
|
||||||
|
value: /
|
||||||
|
ports:
|
||||||
|
- name: amqp
|
||||||
|
containerPort: 5672
|
||||||
|
- name: management
|
||||||
|
containerPort: 15672
|
||||||
|
livenessProbe:
|
||||||
|
exec:
|
||||||
|
command: ["rabbitmq-diagnostics", "-q", "ping"]
|
||||||
|
initialDelaySeconds: 20
|
||||||
|
periodSeconds: 15
|
||||||
|
readinessProbe:
|
||||||
|
exec:
|
||||||
|
command: ["rabbitmq-diagnostics", "-q", "ping"]
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.rabbitmq.resources | nindent 12 }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: data
|
||||||
|
mountPath: /var/lib/rabbitmq
|
||||||
|
volumeClaimTemplates:
|
||||||
|
- metadata:
|
||||||
|
name: data
|
||||||
|
spec:
|
||||||
|
accessModes:
|
||||||
|
{{- toYaml .Values.rabbitmq.persistence.accessModes | nindent 10 }}
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: {{ .Values.rabbitmq.persistence.size }}
|
||||||
|
{{- if .Values.rabbitmq.persistence.storageClassName }}
|
||||||
|
storageClassName: {{ .Values.rabbitmq.persistence.storageClassName }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.redis.enabled }}
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.redisServiceName" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "redis") | nindent 4 }}
|
||||||
|
ports:
|
||||||
|
- name: redis
|
||||||
|
port: {{ .Values.redis.port }}
|
||||||
|
targetPort: redis
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: StatefulSet
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.redisServiceName" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
serviceName: {{ include "attune.redisServiceName" . }}
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "redis") | nindent 6 }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "redis") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: redis
|
||||||
|
image: "{{ .Values.redis.image.repository }}:{{ .Values.redis.image.tag }}"
|
||||||
|
imagePullPolicy: IfNotPresent
|
||||||
|
command: ["redis-server", "--appendonly", "yes"]
|
||||||
|
ports:
|
||||||
|
- name: redis
|
||||||
|
containerPort: 6379
|
||||||
|
livenessProbe:
|
||||||
|
exec:
|
||||||
|
command: ["redis-cli", "ping"]
|
||||||
|
initialDelaySeconds: 15
|
||||||
|
periodSeconds: 10
|
||||||
|
readinessProbe:
|
||||||
|
exec:
|
||||||
|
command: ["redis-cli", "ping"]
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.redis.resources | nindent 12 }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: data
|
||||||
|
mountPath: /data
|
||||||
|
volumeClaimTemplates:
|
||||||
|
- metadata:
|
||||||
|
name: data
|
||||||
|
spec:
|
||||||
|
accessModes:
|
||||||
|
{{- toYaml .Values.redis.persistence.accessModes | nindent 10 }}
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: {{ .Values.redis.persistence.size }}
|
||||||
|
{{- if .Values.redis.persistence.storageClassName }}
|
||||||
|
storageClassName: {{ .Values.redis.persistence.storageClassName }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
35
charts/attune/templates/ingress.yaml
Normal file
35
charts/attune/templates/ingress.yaml
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{{- if .Values.web.ingress.enabled }}
|
||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-web
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
{{- with .Values.web.ingress.annotations }}
|
||||||
|
annotations:
|
||||||
|
{{- toYaml . | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
spec:
|
||||||
|
{{- if .Values.web.ingress.className }}
|
||||||
|
ingressClassName: {{ .Values.web.ingress.className }}
|
||||||
|
{{- end }}
|
||||||
|
rules:
|
||||||
|
{{- range .Values.web.ingress.hosts }}
|
||||||
|
- host: {{ .host | quote }}
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
{{- range .paths }}
|
||||||
|
- path: {{ .path }}
|
||||||
|
pathType: {{ .pathType }}
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: {{ include "attune.fullname" $ }}-web
|
||||||
|
port:
|
||||||
|
number: {{ $.Values.web.service.port }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
{{- with .Values.web.ingress.tls }}
|
||||||
|
tls:
|
||||||
|
{{- toYaml . | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
154
charts/attune/templates/jobs.yaml
Normal file
154
charts/attune/templates/jobs.yaml
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
apiVersion: batch/v1
|
||||||
|
kind: Job
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-migrations
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
app.kubernetes.io/component: migrations
|
||||||
|
annotations:
|
||||||
|
helm.sh/hook: post-install,post-upgrade
|
||||||
|
helm.sh/hook-weight: "-20"
|
||||||
|
helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded
|
||||||
|
spec:
|
||||||
|
ttlSecondsAfterFinished: {{ .Values.jobs.migrations.ttlSecondsAfterFinished }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "migrations") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
restartPolicy: OnFailure
|
||||||
|
{{- if .Values.global.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml .Values.global.imagePullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
containers:
|
||||||
|
- name: migrations
|
||||||
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.migrations) }}
|
||||||
|
imagePullPolicy: {{ .Values.images.migrations.pullPolicy }}
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
env:
|
||||||
|
- name: MIGRATIONS_DIR
|
||||||
|
value: /migrations
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.jobs.migrations.resources | nindent 12 }}
|
||||||
|
---
|
||||||
|
apiVersion: batch/v1
|
||||||
|
kind: Job
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-init-user
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
app.kubernetes.io/component: init-user
|
||||||
|
annotations:
|
||||||
|
helm.sh/hook: post-install,post-upgrade
|
||||||
|
helm.sh/hook-weight: "-10"
|
||||||
|
helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded
|
||||||
|
spec:
|
||||||
|
ttlSecondsAfterFinished: {{ .Values.jobs.initUser.ttlSecondsAfterFinished }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "init-user") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
restartPolicy: OnFailure
|
||||||
|
{{- if .Values.global.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml .Values.global.imagePullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
containers:
|
||||||
|
- name: init-user
|
||||||
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.initUser) }}
|
||||||
|
imagePullPolicy: {{ .Values.images.initUser.pullPolicy }}
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -tAc "SELECT to_regclass('${DB_SCHEMA}.identity')" | grep -q identity; do
|
||||||
|
echo "waiting for database schema";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
exec /init-user.sh
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.jobs.initUser.resources | nindent 12 }}
|
||||||
|
---
|
||||||
|
apiVersion: batch/v1
|
||||||
|
kind: Job
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-init-packs
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
app.kubernetes.io/component: init-packs
|
||||||
|
annotations:
|
||||||
|
helm.sh/hook: post-install,post-upgrade
|
||||||
|
helm.sh/hook-weight: "0"
|
||||||
|
helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded
|
||||||
|
spec:
|
||||||
|
ttlSecondsAfterFinished: {{ .Values.jobs.initPacks.ttlSecondsAfterFinished }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.componentLabels" (dict "root" . "component" "init-packs") | nindent 8 }}
|
||||||
|
spec:
|
||||||
|
restartPolicy: OnFailure
|
||||||
|
{{- if .Values.global.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml .Values.global.imagePullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
containers:
|
||||||
|
- name: init-packs
|
||||||
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.initPacks) }}
|
||||||
|
imagePullPolicy: {{ .Values.images.initPacks.pullPolicy }}
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until python3 - <<'PY'
|
||||||
|
import os
|
||||||
|
import psycopg2
|
||||||
|
|
||||||
|
conn = psycopg2.connect(
|
||||||
|
host=os.environ["DB_HOST"],
|
||||||
|
port=os.environ["DB_PORT"],
|
||||||
|
user=os.environ["DB_USER"],
|
||||||
|
password=os.environ["DB_PASSWORD"],
|
||||||
|
dbname=os.environ["DB_NAME"],
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute("SET search_path TO %s, public" % os.environ["DB_SCHEMA"])
|
||||||
|
cur.execute("SELECT to_regclass(%s)", (f"{os.environ['DB_SCHEMA']}.identity",))
|
||||||
|
value = cur.fetchone()[0]
|
||||||
|
raise SystemExit(0 if value else 1)
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
PY
|
||||||
|
do
|
||||||
|
echo "waiting for database schema";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
exec /init-packs.sh
|
||||||
|
volumeMounts:
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
- name: runtime-envs
|
||||||
|
mountPath: /opt/attune/runtime_envs
|
||||||
|
- name: artifacts
|
||||||
|
mountPath: /opt/attune/artifacts
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.jobs.initPacks.resources | nindent 12 }}
|
||||||
|
volumes:
|
||||||
|
- name: packs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-packs
|
||||||
|
- name: runtime-envs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-runtime-envs
|
||||||
|
- name: artifacts
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" . }}-artifacts
|
||||||
53
charts/attune/templates/pvc.yaml
Normal file
53
charts/attune/templates/pvc.yaml
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
{{- if .Values.sharedStorage.packs.enabled }}
|
||||||
|
apiVersion: v1
|
||||||
|
kind: PersistentVolumeClaim
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-packs
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
accessModes:
|
||||||
|
{{- toYaml .Values.sharedStorage.packs.accessModes | nindent 4 }}
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: {{ .Values.sharedStorage.packs.size }}
|
||||||
|
{{- if .Values.sharedStorage.packs.storageClassName }}
|
||||||
|
storageClassName: {{ .Values.sharedStorage.packs.storageClassName }}
|
||||||
|
{{- end }}
|
||||||
|
---
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.sharedStorage.runtimeEnvs.enabled }}
|
||||||
|
apiVersion: v1
|
||||||
|
kind: PersistentVolumeClaim
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-runtime-envs
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
accessModes:
|
||||||
|
{{- toYaml .Values.sharedStorage.runtimeEnvs.accessModes | nindent 4 }}
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: {{ .Values.sharedStorage.runtimeEnvs.size }}
|
||||||
|
{{- if .Values.sharedStorage.runtimeEnvs.storageClassName }}
|
||||||
|
storageClassName: {{ .Values.sharedStorage.runtimeEnvs.storageClassName }}
|
||||||
|
{{- end }}
|
||||||
|
---
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.sharedStorage.artifacts.enabled }}
|
||||||
|
apiVersion: v1
|
||||||
|
kind: PersistentVolumeClaim
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" . }}-artifacts
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
accessModes:
|
||||||
|
{{- toYaml .Values.sharedStorage.artifacts.accessModes | nindent 4 }}
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: {{ .Values.sharedStorage.artifacts.size }}
|
||||||
|
{{- if .Values.sharedStorage.artifacts.storageClassName }}
|
||||||
|
storageClassName: {{ .Values.sharedStorage.artifacts.storageClassName }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
31
charts/attune/templates/secret.yaml
Normal file
31
charts/attune/templates/secret.yaml
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
{{- if not .Values.security.existingSecret }}
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Secret
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.secretName" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" . | nindent 4 }}
|
||||||
|
type: Opaque
|
||||||
|
stringData:
|
||||||
|
ATTUNE__SECURITY__JWT_SECRET: {{ .Values.security.jwtSecret | quote }}
|
||||||
|
ATTUNE__SECURITY__ENCRYPTION_KEY: {{ .Values.security.encryptionKey | quote }}
|
||||||
|
ATTUNE__DATABASE__URL: {{ include "attune.databaseUrl" . | quote }}
|
||||||
|
ATTUNE__MESSAGE_QUEUE__URL: {{ include "attune.rabbitmqUrl" . | quote }}
|
||||||
|
ATTUNE__CACHE__URL: {{ include "attune.redisUrl" . | quote }}
|
||||||
|
DB_HOST: {{ include "attune.postgresqlServiceName" . | quote }}
|
||||||
|
DB_PORT: {{ .Values.database.port | quote }}
|
||||||
|
DB_USER: {{ .Values.database.username | quote }}
|
||||||
|
DB_PASSWORD: {{ .Values.database.password | quote }}
|
||||||
|
DB_NAME: {{ .Values.database.database | quote }}
|
||||||
|
DB_SCHEMA: {{ .Values.database.schema | quote }}
|
||||||
|
TEST_LOGIN: {{ .Values.bootstrap.testUser.login | quote }}
|
||||||
|
TEST_DISPLAY_NAME: {{ .Values.bootstrap.testUser.displayName | quote }}
|
||||||
|
TEST_PASSWORD: {{ .Values.bootstrap.testUser.password | quote }}
|
||||||
|
DEFAULT_ADMIN_LOGIN: {{ .Values.bootstrap.testUser.login | quote }}
|
||||||
|
DEFAULT_ADMIN_PERMISSION_SET_REF: "core.admin"
|
||||||
|
SOURCE_PACKS_DIR: "/source/packs"
|
||||||
|
TARGET_PACKS_DIR: "/opt/attune/packs"
|
||||||
|
RUNTIME_ENVS_DIR: "/opt/attune/runtime_envs"
|
||||||
|
ARTIFACTS_DIR: "/opt/attune/artifacts"
|
||||||
|
LOADER_SCRIPT: "/scripts/load_core_pack.py"
|
||||||
|
{{- end }}
|
||||||
253
charts/attune/values.yaml
Normal file
253
charts/attune/values.yaml
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
nameOverride: ""
|
||||||
|
fullnameOverride: ""
|
||||||
|
|
||||||
|
global:
|
||||||
|
imageRegistry: ""
|
||||||
|
imageNamespace: ""
|
||||||
|
imageTag: edge
|
||||||
|
imagePullSecrets: []
|
||||||
|
|
||||||
|
security:
|
||||||
|
existingSecret: ""
|
||||||
|
jwtSecret: change-me-in-production
|
||||||
|
encryptionKey: change-me-in-production-32-bytes-minimum
|
||||||
|
|
||||||
|
database:
|
||||||
|
schema: public
|
||||||
|
username: attune
|
||||||
|
password: attune
|
||||||
|
database: attune
|
||||||
|
host: ""
|
||||||
|
port: 5432
|
||||||
|
url: ""
|
||||||
|
postgresql:
|
||||||
|
enabled: true
|
||||||
|
image:
|
||||||
|
repository: timescale/timescaledb
|
||||||
|
tag: 2.17.2-pg16
|
||||||
|
persistence:
|
||||||
|
enabled: true
|
||||||
|
accessModes:
|
||||||
|
- ReadWriteOnce
|
||||||
|
size: 20Gi
|
||||||
|
storageClassName: ""
|
||||||
|
resources: {}
|
||||||
|
|
||||||
|
rabbitmq:
|
||||||
|
username: attune
|
||||||
|
password: attune
|
||||||
|
host: ""
|
||||||
|
port: 5672
|
||||||
|
url: ""
|
||||||
|
managementPort: 15672
|
||||||
|
enabled: true
|
||||||
|
image:
|
||||||
|
repository: rabbitmq
|
||||||
|
tag: 3.13-management-alpine
|
||||||
|
persistence:
|
||||||
|
enabled: true
|
||||||
|
accessModes:
|
||||||
|
- ReadWriteOnce
|
||||||
|
size: 8Gi
|
||||||
|
storageClassName: ""
|
||||||
|
resources: {}
|
||||||
|
|
||||||
|
redis:
|
||||||
|
enabled: true
|
||||||
|
host: ""
|
||||||
|
port: 6379
|
||||||
|
url: ""
|
||||||
|
image:
|
||||||
|
repository: redis
|
||||||
|
tag: 7-alpine
|
||||||
|
persistence:
|
||||||
|
enabled: true
|
||||||
|
accessModes:
|
||||||
|
- ReadWriteOnce
|
||||||
|
size: 8Gi
|
||||||
|
storageClassName: ""
|
||||||
|
resources: {}
|
||||||
|
|
||||||
|
bootstrap:
|
||||||
|
testUser:
|
||||||
|
login: test@attune.local
|
||||||
|
displayName: Test User
|
||||||
|
password: TestPass123!
|
||||||
|
|
||||||
|
sharedStorage:
|
||||||
|
packs:
|
||||||
|
enabled: true
|
||||||
|
accessModes:
|
||||||
|
- ReadWriteMany
|
||||||
|
size: 2Gi
|
||||||
|
storageClassName: ""
|
||||||
|
runtimeEnvs:
|
||||||
|
enabled: true
|
||||||
|
accessModes:
|
||||||
|
- ReadWriteMany
|
||||||
|
size: 10Gi
|
||||||
|
storageClassName: ""
|
||||||
|
artifacts:
|
||||||
|
enabled: true
|
||||||
|
accessModes:
|
||||||
|
- ReadWriteMany
|
||||||
|
size: 20Gi
|
||||||
|
storageClassName: ""
|
||||||
|
|
||||||
|
images:
|
||||||
|
api:
|
||||||
|
repository: attune-api
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
executor:
|
||||||
|
repository: attune-executor
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
worker:
|
||||||
|
repository: attune-worker
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
sensor:
|
||||||
|
repository: nikolaik/python-nodejs
|
||||||
|
tag: python3.12-nodejs22-slim
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
notifier:
|
||||||
|
repository: attune-notifier
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
web:
|
||||||
|
repository: attune-web
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
migrations:
|
||||||
|
repository: attune-migrations
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
initUser:
|
||||||
|
repository: attune-init-user
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
initPacks:
|
||||||
|
repository: attune-init-packs
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
agent:
|
||||||
|
repository: attune-agent
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
migrations:
|
||||||
|
ttlSecondsAfterFinished: 300
|
||||||
|
resources: {}
|
||||||
|
initUser:
|
||||||
|
ttlSecondsAfterFinished: 300
|
||||||
|
resources: {}
|
||||||
|
initPacks:
|
||||||
|
ttlSecondsAfterFinished: 300
|
||||||
|
resources: {}
|
||||||
|
|
||||||
|
api:
|
||||||
|
replicaCount: 1
|
||||||
|
service:
|
||||||
|
type: ClusterIP
|
||||||
|
port: 8080
|
||||||
|
resources: {}
|
||||||
|
|
||||||
|
executor:
|
||||||
|
replicaCount: 1
|
||||||
|
resources: {}
|
||||||
|
|
||||||
|
worker:
|
||||||
|
replicaCount: 1
|
||||||
|
runtimes: shell,python,node,native
|
||||||
|
name: worker-full-01
|
||||||
|
resources: {}
|
||||||
|
|
||||||
|
sensor:
|
||||||
|
replicaCount: 1
|
||||||
|
runtimes: shell,python,node,native
|
||||||
|
logLevel: debug
|
||||||
|
resources: {}
|
||||||
|
|
||||||
|
notifier:
|
||||||
|
replicaCount: 1
|
||||||
|
service:
|
||||||
|
type: ClusterIP
|
||||||
|
port: 8081
|
||||||
|
resources: {}
|
||||||
|
|
||||||
|
web:
|
||||||
|
replicaCount: 1
|
||||||
|
service:
|
||||||
|
type: ClusterIP
|
||||||
|
port: 80
|
||||||
|
config:
|
||||||
|
environment: kubernetes
|
||||||
|
apiUrl: http://localhost:8080
|
||||||
|
wsUrl: ws://localhost:8081
|
||||||
|
resources: {}
|
||||||
|
ingress:
|
||||||
|
enabled: false
|
||||||
|
className: ""
|
||||||
|
annotations: {}
|
||||||
|
hosts:
|
||||||
|
- host: attune.local
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
tls: []
|
||||||
|
|
||||||
|
# Agent-based workers
|
||||||
|
# These deploy the universal worker agent into any container image.
|
||||||
|
# The agent auto-detects available runtimes (python, ruby, node, etc.)
|
||||||
|
# and registers with the Attune platform.
|
||||||
|
#
|
||||||
|
# Each entry creates a separate Deployment with an init container that
|
||||||
|
# copies the statically-linked agent binary into the worker container.
|
||||||
|
#
|
||||||
|
# Supported fields per worker:
|
||||||
|
# name (required) - Unique name for this worker (used in resource names)
|
||||||
|
# image (required) - Container image with your desired runtime(s)
|
||||||
|
# replicas (optional) - Number of pod replicas (default: 1)
|
||||||
|
# runtimes (optional) - List of runtimes to expose; [] = auto-detect
|
||||||
|
# resources (optional) - Kubernetes resource requests/limits
|
||||||
|
# env (optional) - Extra environment variables (list of {name, value})
|
||||||
|
# imagePullPolicy (optional) - Pull policy for the worker image
|
||||||
|
# logLevel (optional) - RUST_LOG level (default: "info")
|
||||||
|
# runtimeClassName (optional) - Kubernetes RuntimeClass (e.g., "nvidia" for GPU)
|
||||||
|
# nodeSelector (optional) - Node selector map for pod scheduling
|
||||||
|
# tolerations (optional) - Tolerations list for pod scheduling
|
||||||
|
# stopGracePeriod (optional) - Termination grace period in seconds (default: 45)
|
||||||
|
#
|
||||||
|
# Examples:
|
||||||
|
# agentWorkers:
|
||||||
|
# - name: ruby
|
||||||
|
# image: ruby:3.3
|
||||||
|
# replicas: 2
|
||||||
|
# runtimes: [] # auto-detect
|
||||||
|
# resources: {}
|
||||||
|
#
|
||||||
|
# - name: python-gpu
|
||||||
|
# image: nvidia/cuda:12.3.1-runtime-ubuntu22.04
|
||||||
|
# replicas: 1
|
||||||
|
# runtimes: [python, shell]
|
||||||
|
# runtimeClassName: nvidia
|
||||||
|
# nodeSelector:
|
||||||
|
# gpu: "true"
|
||||||
|
# tolerations:
|
||||||
|
# - key: nvidia.com/gpu
|
||||||
|
# operator: Exists
|
||||||
|
# effect: NoSchedule
|
||||||
|
# resources:
|
||||||
|
# limits:
|
||||||
|
# nvidia.com/gpu: 1
|
||||||
|
#
|
||||||
|
# - name: custom
|
||||||
|
# image: my-org/my-custom-image:latest
|
||||||
|
# replicas: 1
|
||||||
|
# runtimes: []
|
||||||
|
# env:
|
||||||
|
# - name: MY_CUSTOM_VAR
|
||||||
|
# value: my-value
|
||||||
|
agentWorkers: []
|
||||||
@@ -46,6 +46,22 @@ security:
|
|||||||
jwt_refresh_expiration: 2592000 # 30 days
|
jwt_refresh_expiration: 2592000 # 30 days
|
||||||
encryption_key: test-encryption-key-32-chars-okay
|
encryption_key: test-encryption-key-32-chars-okay
|
||||||
enable_auth: true
|
enable_auth: true
|
||||||
|
allow_self_registration: true
|
||||||
|
oidc:
|
||||||
|
enabled: false
|
||||||
|
discovery_url: https://auth.rdrx.app/.well-known/openid-configuration
|
||||||
|
client_id: 31d194737840d32bd3afe6474826976bae346d77247a158c4dc43887278eb605
|
||||||
|
client_secret: null
|
||||||
|
redirect_uri: http://localhost:3000/auth/callback
|
||||||
|
post_logout_redirect_uri: http://localhost:3000/login
|
||||||
|
scopes:
|
||||||
|
- groups
|
||||||
|
ldap:
|
||||||
|
enabled: false
|
||||||
|
url: ldap://localhost:389
|
||||||
|
bind_dn_template: "uid={login},ou=users,dc=example,dc=com"
|
||||||
|
provider_name: ldap
|
||||||
|
provider_label: Development LDAP
|
||||||
|
|
||||||
# Packs directory (where pack action files are located)
|
# Packs directory (where pack action files are located)
|
||||||
packs_base_dir: ./packs
|
packs_base_dir: ./packs
|
||||||
@@ -109,3 +125,8 @@ executor:
|
|||||||
scheduled_timeout: 120 # 2 minutes (faster feedback in dev)
|
scheduled_timeout: 120 # 2 minutes (faster feedback in dev)
|
||||||
timeout_check_interval: 30 # Check every 30 seconds
|
timeout_check_interval: 30 # Check every 30 seconds
|
||||||
enable_timeout_monitor: true
|
enable_timeout_monitor: true
|
||||||
|
|
||||||
|
# Agent binary distribution (optional - for local development)
|
||||||
|
# Binary is built via: make build-agent
|
||||||
|
# agent:
|
||||||
|
# binary_dir: ./target/x86_64-unknown-linux-musl/release
|
||||||
|
|||||||
@@ -86,6 +86,48 @@ security:
|
|||||||
# Enable authentication
|
# Enable authentication
|
||||||
enable_auth: true
|
enable_auth: true
|
||||||
|
|
||||||
|
# Login page defaults for the web UI. Users can still override with:
|
||||||
|
# /login?auth=direct
|
||||||
|
# /login?auth=<provider_name>
|
||||||
|
login_page:
|
||||||
|
show_local_login: true
|
||||||
|
show_oidc_login: true
|
||||||
|
show_ldap_login: true
|
||||||
|
|
||||||
|
# Optional OIDC browser login configuration
|
||||||
|
oidc:
|
||||||
|
enabled: false
|
||||||
|
discovery_url: https://auth.example.com/.well-known/openid-configuration
|
||||||
|
client_id: your-confidential-client-id
|
||||||
|
provider_name: sso
|
||||||
|
provider_label: Example SSO
|
||||||
|
provider_icon_url: https://auth.example.com/assets/logo.svg
|
||||||
|
client_secret: your-confidential-client-secret
|
||||||
|
redirect_uri: http://localhost:3000/auth/callback
|
||||||
|
post_logout_redirect_uri: http://localhost:3000/login
|
||||||
|
scopes:
|
||||||
|
- groups
|
||||||
|
|
||||||
|
# Optional LDAP authentication configuration
|
||||||
|
ldap:
|
||||||
|
enabled: false
|
||||||
|
url: ldap://ldap.example.com:389
|
||||||
|
# Direct-bind mode: construct DN from template
|
||||||
|
# bind_dn_template: "uid={login},ou=users,dc=example,dc=com"
|
||||||
|
# Search-and-bind mode: search for user with a service account
|
||||||
|
user_search_base: "ou=users,dc=example,dc=com"
|
||||||
|
user_filter: "(uid={login})"
|
||||||
|
search_bind_dn: "cn=readonly,dc=example,dc=com"
|
||||||
|
search_bind_password: "readonly-password"
|
||||||
|
login_attr: uid
|
||||||
|
email_attr: mail
|
||||||
|
display_name_attr: cn
|
||||||
|
group_attr: memberOf
|
||||||
|
starttls: false
|
||||||
|
danger_skip_tls_verify: false
|
||||||
|
provider_name: ldap
|
||||||
|
provider_label: Company LDAP
|
||||||
|
|
||||||
# Worker configuration (optional, for worker services)
|
# Worker configuration (optional, for worker services)
|
||||||
# Uncomment and configure if running worker processes
|
# Uncomment and configure if running worker processes
|
||||||
# worker:
|
# worker:
|
||||||
|
|||||||
@@ -48,6 +48,7 @@ security:
|
|||||||
jwt_refresh_expiration: 3600 # 1 hour
|
jwt_refresh_expiration: 3600 # 1 hour
|
||||||
encryption_key: test-encryption-key-32-chars-okay
|
encryption_key: test-encryption-key-32-chars-okay
|
||||||
enable_auth: true
|
enable_auth: true
|
||||||
|
allow_self_registration: true
|
||||||
|
|
||||||
# Test packs directory (use /tmp for tests to avoid permission issues)
|
# Test packs directory (use /tmp for tests to avoid permission issues)
|
||||||
packs_base_dir: /tmp/attune-test-packs
|
packs_base_dir: /tmp/attune-test-packs
|
||||||
|
|||||||
@@ -27,6 +27,8 @@ futures = { workspace = true }
|
|||||||
|
|
||||||
# Web framework
|
# Web framework
|
||||||
axum = { workspace = true, features = ["multipart"] }
|
axum = { workspace = true, features = ["multipart"] }
|
||||||
|
axum-extra = { version = "0.10", features = ["cookie"] }
|
||||||
|
cookie = "0.18"
|
||||||
tower = { workspace = true }
|
tower = { workspace = true }
|
||||||
tower-http = { workspace = true }
|
tower-http = { workspace = true }
|
||||||
|
|
||||||
@@ -67,6 +69,9 @@ jsonschema = { workspace = true }
|
|||||||
|
|
||||||
# HTTP client
|
# HTTP client
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
|
openidconnect = "4.0"
|
||||||
|
ldap3 = "0.12"
|
||||||
|
url = { workspace = true }
|
||||||
|
|
||||||
# Archive/compression
|
# Archive/compression
|
||||||
tar = { workspace = true }
|
tar = { workspace = true }
|
||||||
@@ -84,10 +89,12 @@ hmac = "0.12"
|
|||||||
sha1 = "0.10"
|
sha1 = "0.10"
|
||||||
sha2 = { workspace = true }
|
sha2 = { workspace = true }
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
|
subtle = "2.6"
|
||||||
|
|
||||||
# OpenAPI/Swagger
|
# OpenAPI/Swagger
|
||||||
utoipa = { workspace = true, features = ["axum_extras"] }
|
utoipa = { workspace = true, features = ["axum_extras"] }
|
||||||
utoipa-swagger-ui = { version = "9.0", features = ["axum"] }
|
utoipa-swagger-ui = { version = "9.0", features = ["axum"] }
|
||||||
|
jsonwebtoken = { workspace = true, features = ["rust_crypto"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
mockall = { workspace = true }
|
mockall = { workspace = true }
|
||||||
|
|||||||
479
crates/api/src/auth/ldap.rs
Normal file
479
crates/api/src/auth/ldap.rs
Normal file
@@ -0,0 +1,479 @@
|
|||||||
|
//! LDAP authentication helpers for username/password login.
|
||||||
|
|
||||||
|
use attune_common::{
|
||||||
|
config::LdapConfig,
|
||||||
|
repositories::{
|
||||||
|
identity::{CreateIdentityInput, IdentityRepository, UpdateIdentityInput},
|
||||||
|
Create, Update,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use ldap3::{dn_escape, ldap_escape, Ldap, LdapConnAsync, LdapConnSettings, Scope, SearchEntry};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::json;
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
auth::jwt::{generate_access_token, generate_refresh_token},
|
||||||
|
dto::TokenResponse,
|
||||||
|
middleware::error::ApiError,
|
||||||
|
state::SharedState,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Claims extracted from the LDAP directory for an authenticated user.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct LdapUserClaims {
|
||||||
|
/// The LDAP server URL the user was authenticated against.
|
||||||
|
pub server_url: String,
|
||||||
|
/// The user's full distinguished name.
|
||||||
|
pub dn: String,
|
||||||
|
/// Login attribute value (uid, sAMAccountName, etc.).
|
||||||
|
pub login: Option<String>,
|
||||||
|
/// Email address.
|
||||||
|
pub email: Option<String>,
|
||||||
|
/// Display name (cn).
|
||||||
|
pub display_name: Option<String>,
|
||||||
|
/// Group memberships (memberOf values).
|
||||||
|
pub groups: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The result of a successful LDAP authentication.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct LdapAuthenticatedIdentity {
|
||||||
|
pub token_response: TokenResponse,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Authenticate a user against the configured LDAP directory.
|
||||||
|
///
|
||||||
|
/// This performs a bind (either direct or search+bind) to verify
|
||||||
|
/// the user's credentials, then fetches their attributes and upserts
|
||||||
|
/// the identity in the database.
|
||||||
|
pub async fn authenticate(
|
||||||
|
state: &SharedState,
|
||||||
|
login: &str,
|
||||||
|
password: &str,
|
||||||
|
) -> Result<LdapAuthenticatedIdentity, ApiError> {
|
||||||
|
let ldap_config = ldap_config(state)?;
|
||||||
|
|
||||||
|
// Connect and authenticate
|
||||||
|
let claims = if ldap_config.bind_dn_template.is_some() {
|
||||||
|
direct_bind(&ldap_config, login, password).await?
|
||||||
|
} else {
|
||||||
|
search_and_bind(&ldap_config, login, password).await?
|
||||||
|
};
|
||||||
|
|
||||||
|
// Upsert identity in DB and issue JWT tokens
|
||||||
|
let identity = upsert_identity(state, &claims).await?;
|
||||||
|
let access_token = generate_access_token(identity.id, &identity.login, &state.jwt_config)?;
|
||||||
|
let refresh_token = generate_refresh_token(identity.id, &identity.login, &state.jwt_config)?;
|
||||||
|
|
||||||
|
let token_response = TokenResponse::new(
|
||||||
|
access_token,
|
||||||
|
refresh_token,
|
||||||
|
state.jwt_config.access_token_expiration,
|
||||||
|
)
|
||||||
|
.with_user(
|
||||||
|
identity.id,
|
||||||
|
identity.login.clone(),
|
||||||
|
identity.display_name.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(LdapAuthenticatedIdentity { token_response })
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Internal helpers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
fn ldap_config(state: &SharedState) -> Result<LdapConfig, ApiError> {
|
||||||
|
let config = state
|
||||||
|
.config
|
||||||
|
.security
|
||||||
|
.ldap
|
||||||
|
.clone()
|
||||||
|
.filter(|ldap| ldap.enabled)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::NotImplemented("LDAP authentication is not configured".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Reject partial service-account configuration: having exactly one of
|
||||||
|
// search_bind_dn / search_bind_password is almost certainly a config
|
||||||
|
// error and would silently fall back to anonymous search, which is a
|
||||||
|
// very different security posture than the admin intended.
|
||||||
|
let has_dn = config.search_bind_dn.is_some();
|
||||||
|
let has_pw = config.search_bind_password.is_some();
|
||||||
|
if has_dn != has_pw {
|
||||||
|
let missing = if has_dn {
|
||||||
|
"search_bind_password"
|
||||||
|
} else {
|
||||||
|
"search_bind_dn"
|
||||||
|
};
|
||||||
|
return Err(ApiError::InternalServerError(format!(
|
||||||
|
"LDAP misconfiguration: search_bind_dn and search_bind_password must both be set \
|
||||||
|
or both be omitted (missing {missing})"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build an `LdapConnSettings` from the config.
|
||||||
|
fn conn_settings(config: &LdapConfig) -> LdapConnSettings {
|
||||||
|
let mut settings = LdapConnSettings::new();
|
||||||
|
if config.starttls {
|
||||||
|
settings = settings.set_starttls(true);
|
||||||
|
}
|
||||||
|
if config.danger_skip_tls_verify {
|
||||||
|
settings = settings.set_no_tls_verify(true);
|
||||||
|
}
|
||||||
|
settings
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Open a new LDAP connection.
|
||||||
|
async fn connect(config: &LdapConfig) -> Result<Ldap, ApiError> {
|
||||||
|
let settings = conn_settings(config);
|
||||||
|
let (conn, ldap) = LdapConnAsync::with_settings(settings, &config.url)
|
||||||
|
.await
|
||||||
|
.map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("Failed to connect to LDAP server: {err}"))
|
||||||
|
})?;
|
||||||
|
// Drive the connection in the background
|
||||||
|
ldap3::drive!(conn);
|
||||||
|
Ok(ldap)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Direct-bind authentication: construct the DN from the template and bind.
|
||||||
|
async fn direct_bind(
|
||||||
|
config: &LdapConfig,
|
||||||
|
login: &str,
|
||||||
|
password: &str,
|
||||||
|
) -> Result<LdapUserClaims, ApiError> {
|
||||||
|
let template = config.bind_dn_template.as_deref().unwrap_or_default();
|
||||||
|
// Escape the login value for safe interpolation into a Distinguished Name
|
||||||
|
// (RFC 4514). Without this, characters like `,`, `+`, `"`, `\`, `<`, `>`,
|
||||||
|
// `;`, `=`, NUL, `#` (leading), or space (leading/trailing) in the username
|
||||||
|
// would alter the DN structure.
|
||||||
|
let escaped_login = dn_escape(login);
|
||||||
|
let bind_dn = template.replace("{login}", &escaped_login);
|
||||||
|
|
||||||
|
let mut ldap = connect(config).await?;
|
||||||
|
|
||||||
|
// Bind as the user
|
||||||
|
let result = ldap
|
||||||
|
.simple_bind(&bind_dn, password)
|
||||||
|
.await
|
||||||
|
.map_err(|err| ApiError::InternalServerError(format!("LDAP bind failed: {err}")))?;
|
||||||
|
|
||||||
|
if result.rc != 0 {
|
||||||
|
let _ = ldap.unbind().await;
|
||||||
|
return Err(ApiError::Unauthorized(
|
||||||
|
"Invalid LDAP credentials".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch user attributes
|
||||||
|
let claims = fetch_user_attributes(config, &mut ldap, &bind_dn).await?;
|
||||||
|
|
||||||
|
let _ = ldap.unbind().await;
|
||||||
|
Ok(claims)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Search-and-bind authentication:
|
||||||
|
/// 1. Bind as the service account (or anonymous)
|
||||||
|
/// 2. Search for the user entry (must match exactly one)
|
||||||
|
/// 3. Re-bind as the user with their DN + password
|
||||||
|
async fn search_and_bind(
|
||||||
|
config: &LdapConfig,
|
||||||
|
login: &str,
|
||||||
|
password: &str,
|
||||||
|
) -> Result<LdapUserClaims, ApiError> {
|
||||||
|
let search_base = config.user_search_base.as_deref().ok_or_else(|| {
|
||||||
|
ApiError::InternalServerError(
|
||||||
|
"LDAP user_search_base is required when bind_dn_template is not set".to_string(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut ldap = connect(config).await?;
|
||||||
|
|
||||||
|
// Step 1: Bind as service account or anonymous.
|
||||||
|
// Partial config (only one of dn/password) is already rejected by
|
||||||
|
// ldap_config(), so this match is exhaustive over valid states.
|
||||||
|
if let (Some(bind_dn), Some(bind_pw)) = (
|
||||||
|
config.search_bind_dn.as_deref(),
|
||||||
|
config.search_bind_password.as_deref(),
|
||||||
|
) {
|
||||||
|
let result = ldap.simple_bind(bind_dn, bind_pw).await.map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("LDAP service bind failed: {err}"))
|
||||||
|
})?;
|
||||||
|
if result.rc != 0 {
|
||||||
|
let _ = ldap.unbind().await;
|
||||||
|
return Err(ApiError::InternalServerError(
|
||||||
|
"LDAP service account bind failed — check search_bind_dn and search_bind_password"
|
||||||
|
.to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If no service account, we proceed with an anonymous connection (already connected)
|
||||||
|
|
||||||
|
// Step 2: Search for the user.
|
||||||
|
// Escape the login value for safe interpolation into an LDAP search filter
|
||||||
|
// (RFC 4515). Without this, characters like `(`, `)`, `*`, `\`, and NUL in
|
||||||
|
// the username could broaden the filter, match unintended entries, or break
|
||||||
|
// the search entirely.
|
||||||
|
let escaped_login = ldap_escape(login);
|
||||||
|
let filter = config.user_filter.replace("{login}", &escaped_login);
|
||||||
|
let attrs = vec![
|
||||||
|
config.login_attr.as_str(),
|
||||||
|
config.email_attr.as_str(),
|
||||||
|
config.display_name_attr.as_str(),
|
||||||
|
config.group_attr.as_str(),
|
||||||
|
"dn",
|
||||||
|
];
|
||||||
|
|
||||||
|
let (results, _result) = ldap
|
||||||
|
.search(search_base, Scope::Subtree, &filter, attrs)
|
||||||
|
.await
|
||||||
|
.map_err(|err| ApiError::InternalServerError(format!("LDAP user search failed: {err}")))?
|
||||||
|
.success()
|
||||||
|
.map_err(|err| ApiError::InternalServerError(format!("LDAP search error: {err}")))?;
|
||||||
|
|
||||||
|
// The search must return exactly one entry. Zero means the user was not
|
||||||
|
// found; more than one means the filter or directory layout is ambiguous
|
||||||
|
// and we must not guess which identity to authenticate.
|
||||||
|
let result_count = results.len();
|
||||||
|
if result_count == 0 {
|
||||||
|
let _ = ldap.unbind().await;
|
||||||
|
return Err(ApiError::Unauthorized(
|
||||||
|
"Invalid LDAP credentials".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if result_count > 1 {
|
||||||
|
let _ = ldap.unbind().await;
|
||||||
|
return Err(ApiError::InternalServerError(format!(
|
||||||
|
"LDAP user search returned {result_count} entries (expected exactly 1) — \
|
||||||
|
tighten the user_filter or user_search_base to ensure uniqueness"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// SAFETY: result_count == 1 guaranteed by the checks above.
|
||||||
|
let entry = results
|
||||||
|
.into_iter()
|
||||||
|
.next()
|
||||||
|
.expect("checked result_count == 1");
|
||||||
|
let search_entry = SearchEntry::construct(entry);
|
||||||
|
let user_dn = search_entry.dn.clone();
|
||||||
|
|
||||||
|
// Step 3: Re-bind as the user
|
||||||
|
let result = ldap
|
||||||
|
.simple_bind(&user_dn, password)
|
||||||
|
.await
|
||||||
|
.map_err(|err| ApiError::InternalServerError(format!("LDAP user bind failed: {err}")))?;
|
||||||
|
if result.rc != 0 {
|
||||||
|
let _ = ldap.unbind().await;
|
||||||
|
return Err(ApiError::Unauthorized(
|
||||||
|
"Invalid LDAP credentials".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let claims = extract_claims(config, &search_entry);
|
||||||
|
let _ = ldap.unbind().await;
|
||||||
|
Ok(claims)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch the user's LDAP attributes after a successful bind.
|
||||||
|
async fn fetch_user_attributes(
|
||||||
|
config: &LdapConfig,
|
||||||
|
ldap: &mut Ldap,
|
||||||
|
user_dn: &str,
|
||||||
|
) -> Result<LdapUserClaims, ApiError> {
|
||||||
|
let attrs = vec![
|
||||||
|
config.login_attr.as_str(),
|
||||||
|
config.email_attr.as_str(),
|
||||||
|
config.display_name_attr.as_str(),
|
||||||
|
config.group_attr.as_str(),
|
||||||
|
];
|
||||||
|
|
||||||
|
let (results, _result) = ldap
|
||||||
|
.search(user_dn, Scope::Base, "(objectClass=*)", attrs)
|
||||||
|
.await
|
||||||
|
.map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!(
|
||||||
|
"LDAP attribute fetch failed for DN {user_dn}: {err}"
|
||||||
|
))
|
||||||
|
})?
|
||||||
|
.success()
|
||||||
|
.map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("LDAP attribute search error: {err}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let entry = results.into_iter().next().ok_or_else(|| {
|
||||||
|
ApiError::InternalServerError(format!("LDAP entry not found for DN: {user_dn}"))
|
||||||
|
})?;
|
||||||
|
let search_entry = SearchEntry::construct(entry);
|
||||||
|
|
||||||
|
Ok(extract_claims(config, &search_entry))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract user claims from an LDAP search entry.
|
||||||
|
fn extract_claims(config: &LdapConfig, entry: &SearchEntry) -> LdapUserClaims {
|
||||||
|
let first_attr =
|
||||||
|
|name: &str| -> Option<String> { entry.attrs.get(name).and_then(|v| v.first()).cloned() };
|
||||||
|
|
||||||
|
let groups = entry
|
||||||
|
.attrs
|
||||||
|
.get(&config.group_attr)
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
LdapUserClaims {
|
||||||
|
server_url: config.url.clone(),
|
||||||
|
dn: entry.dn.clone(),
|
||||||
|
login: first_attr(&config.login_attr),
|
||||||
|
email: first_attr(&config.email_attr),
|
||||||
|
display_name: first_attr(&config.display_name_attr),
|
||||||
|
groups,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Upsert an identity row for the LDAP-authenticated user.
|
||||||
|
async fn upsert_identity(
|
||||||
|
state: &SharedState,
|
||||||
|
claims: &LdapUserClaims,
|
||||||
|
) -> Result<attune_common::models::identity::Identity, ApiError> {
|
||||||
|
let existing =
|
||||||
|
IdentityRepository::find_by_ldap_dn(&state.db, &claims.server_url, &claims.dn).await?;
|
||||||
|
let desired_login = derive_login(claims);
|
||||||
|
let display_name = claims.display_name.clone();
|
||||||
|
let attributes = json!({ "ldap": claims });
|
||||||
|
|
||||||
|
match existing {
|
||||||
|
Some(identity) => {
|
||||||
|
let updated = UpdateIdentityInput {
|
||||||
|
display_name,
|
||||||
|
password_hash: None,
|
||||||
|
attributes: Some(attributes),
|
||||||
|
};
|
||||||
|
IdentityRepository::update(&state.db, identity.id, updated)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
// Avoid login collisions
|
||||||
|
let login = match IdentityRepository::find_by_login(&state.db, &desired_login).await? {
|
||||||
|
Some(_) => fallback_dn_login(claims),
|
||||||
|
None => desired_login,
|
||||||
|
};
|
||||||
|
|
||||||
|
IdentityRepository::create(
|
||||||
|
&state.db,
|
||||||
|
CreateIdentityInput {
|
||||||
|
login,
|
||||||
|
display_name,
|
||||||
|
password_hash: None,
|
||||||
|
attributes,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Derive the login name from LDAP claims.
|
||||||
|
fn derive_login(claims: &LdapUserClaims) -> String {
|
||||||
|
claims
|
||||||
|
.login
|
||||||
|
.clone()
|
||||||
|
.or_else(|| claims.email.clone())
|
||||||
|
.unwrap_or_else(|| fallback_dn_login(claims))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a deterministic fallback login from the LDAP server URL + DN.
|
||||||
|
fn fallback_dn_login(claims: &LdapUserClaims) -> String {
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
hasher.update(claims.server_url.as_bytes());
|
||||||
|
hasher.update(b":");
|
||||||
|
hasher.update(claims.dn.as_bytes());
|
||||||
|
let digest = hex::encode(hasher.finalize());
|
||||||
|
format!("ldap:{}", &digest[..24])
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn direct_bind_dn_escapes_special_characters() {
|
||||||
|
// Simulate what direct_bind does with the template
|
||||||
|
let template = "uid={login},ou=users,dc=example,dc=com";
|
||||||
|
let malicious_login = "admin,ou=admins,dc=evil,dc=com";
|
||||||
|
let escaped = dn_escape(malicious_login);
|
||||||
|
let bind_dn = template.replace("{login}", &escaped);
|
||||||
|
// The commas in the login value must be escaped so they don't
|
||||||
|
// introduce additional RDN components.
|
||||||
|
assert!(
|
||||||
|
bind_dn.contains("\\2c"),
|
||||||
|
"commas in login must be escaped in DN: {bind_dn}"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
bind_dn.starts_with("uid=admin\\2cou\\3dadmins\\2cdc\\3devil\\2cdc\\3dcom,ou=users"),
|
||||||
|
"DN structure must be preserved: {bind_dn}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn search_filter_escapes_special_characters() {
|
||||||
|
let filter_template = "(uid={login})";
|
||||||
|
let malicious_login = "admin)(|(uid=*))";
|
||||||
|
let escaped = ldap_escape(malicious_login);
|
||||||
|
let filter = filter_template.replace("{login}", &escaped);
|
||||||
|
// The parentheses and asterisk must be escaped so they don't
|
||||||
|
// alter the filter structure.
|
||||||
|
assert!(
|
||||||
|
!filter.contains(")("),
|
||||||
|
"parentheses in login must be escaped in filter: {filter}"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
filter.contains("\\28"),
|
||||||
|
"open-paren must be hex-escaped: {filter}"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
filter.contains("\\29"),
|
||||||
|
"close-paren must be hex-escaped: {filter}"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
filter.contains("\\2a"),
|
||||||
|
"asterisk must be hex-escaped: {filter}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn dn_escape_preserves_safe_usernames() {
|
||||||
|
let safe = "jdoe";
|
||||||
|
let escaped = dn_escape(safe);
|
||||||
|
assert_eq!(escaped.as_ref(), "jdoe");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn filter_escape_preserves_safe_usernames() {
|
||||||
|
let safe = "jdoe";
|
||||||
|
let escaped = ldap_escape(safe);
|
||||||
|
assert_eq!(escaped.as_ref(), "jdoe");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fallback_dn_login_is_deterministic() {
|
||||||
|
let claims = LdapUserClaims {
|
||||||
|
server_url: "ldap://ldap.example.com".to_string(),
|
||||||
|
dn: "uid=test,ou=users,dc=example,dc=com".to_string(),
|
||||||
|
login: None,
|
||||||
|
email: None,
|
||||||
|
display_name: None,
|
||||||
|
groups: vec![],
|
||||||
|
};
|
||||||
|
let a = fallback_dn_login(&claims);
|
||||||
|
let b = fallback_dn_login(&claims);
|
||||||
|
assert_eq!(a, b);
|
||||||
|
assert!(a.starts_with("ldap:"));
|
||||||
|
assert_eq!(a.len(), "ldap:".len() + 24);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{Request, State},
|
extract::{Request, State},
|
||||||
http::{header::AUTHORIZATION, StatusCode},
|
http::{header::AUTHORIZATION, HeaderMap, StatusCode},
|
||||||
middleware::Next,
|
middleware::Next,
|
||||||
response::{IntoResponse, Response},
|
response::{IntoResponse, Response},
|
||||||
Json,
|
Json,
|
||||||
@@ -14,6 +14,8 @@ use attune_common::auth::jwt::{
|
|||||||
extract_token_from_header, validate_token, Claims, JwtConfig, TokenType,
|
extract_token_from_header, validate_token, Claims, JwtConfig, TokenType,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use super::oidc::{cookie_authenticated_user, ACCESS_COOKIE_NAME};
|
||||||
|
|
||||||
/// Authentication middleware state
|
/// Authentication middleware state
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AuthMiddleware {
|
pub struct AuthMiddleware {
|
||||||
@@ -50,21 +52,7 @@ pub async fn require_auth(
|
|||||||
mut request: Request,
|
mut request: Request,
|
||||||
next: Next,
|
next: Next,
|
||||||
) -> Result<Response, AuthError> {
|
) -> Result<Response, AuthError> {
|
||||||
// Extract Authorization header
|
let claims = extract_claims(request.headers(), &auth.jwt_config)?;
|
||||||
let auth_header = request
|
|
||||||
.headers()
|
|
||||||
.get(AUTHORIZATION)
|
|
||||||
.and_then(|h| h.to_str().ok())
|
|
||||||
.ok_or(AuthError::MissingToken)?;
|
|
||||||
|
|
||||||
// Extract token from Bearer scheme
|
|
||||||
let token = extract_token_from_header(auth_header).ok_or(AuthError::InvalidToken)?;
|
|
||||||
|
|
||||||
// Validate token
|
|
||||||
let claims = validate_token(token, &auth.jwt_config).map_err(|e| match e {
|
|
||||||
super::jwt::JwtError::Expired => AuthError::ExpiredToken,
|
|
||||||
_ => AuthError::InvalidToken,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Add claims to request extensions
|
// Add claims to request extensions
|
||||||
request
|
request
|
||||||
@@ -90,22 +78,13 @@ impl axum::extract::FromRequestParts<crate::state::SharedState> for RequireAuth
|
|||||||
return Ok(RequireAuth(user.clone()));
|
return Ok(RequireAuth(user.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise, extract and validate token directly from header
|
let claims = if let Some(user) =
|
||||||
// Extract Authorization header
|
cookie_authenticated_user(&parts.headers, state).map_err(map_cookie_auth_error)?
|
||||||
let auth_header = parts
|
{
|
||||||
.headers
|
user.claims
|
||||||
.get(AUTHORIZATION)
|
} else {
|
||||||
.and_then(|h| h.to_str().ok())
|
extract_claims(&parts.headers, &state.jwt_config)?
|
||||||
.ok_or(AuthError::MissingToken)?;
|
};
|
||||||
|
|
||||||
// Extract token from Bearer scheme
|
|
||||||
let token = extract_token_from_header(auth_header).ok_or(AuthError::InvalidToken)?;
|
|
||||||
|
|
||||||
// Validate token using jwt_config from app state
|
|
||||||
let claims = validate_token(token, &state.jwt_config).map_err(|e| match e {
|
|
||||||
super::jwt::JwtError::Expired => AuthError::ExpiredToken,
|
|
||||||
_ => AuthError::InvalidToken,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Allow access, sensor, and execution-scoped tokens
|
// Allow access, sensor, and execution-scoped tokens
|
||||||
if claims.token_type != TokenType::Access
|
if claims.token_type != TokenType::Access
|
||||||
@@ -119,6 +98,33 @@ impl axum::extract::FromRequestParts<crate::state::SharedState> for RequireAuth
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn extract_claims(headers: &HeaderMap, jwt_config: &JwtConfig) -> Result<Claims, AuthError> {
|
||||||
|
if let Some(auth_header) = headers.get(AUTHORIZATION).and_then(|h| h.to_str().ok()) {
|
||||||
|
let token = extract_token_from_header(auth_header).ok_or(AuthError::InvalidToken)?;
|
||||||
|
return validate_token(token, jwt_config).map_err(|e| match e {
|
||||||
|
super::jwt::JwtError::Expired => AuthError::ExpiredToken,
|
||||||
|
_ => AuthError::InvalidToken,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if headers
|
||||||
|
.get(axum::http::header::COOKIE)
|
||||||
|
.and_then(|value| value.to_str().ok())
|
||||||
|
.is_some_and(|cookies| cookies.contains(ACCESS_COOKIE_NAME))
|
||||||
|
{
|
||||||
|
return Err(AuthError::InvalidToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(AuthError::MissingToken)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map_cookie_auth_error(error: crate::middleware::error::ApiError) -> AuthError {
|
||||||
|
match error {
|
||||||
|
crate::middleware::error::ApiError::Unauthorized(_) => AuthError::InvalidToken,
|
||||||
|
_ => AuthError::InvalidToken,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Authentication errors
|
/// Authentication errors
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum AuthError {
|
pub enum AuthError {
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
//! Authentication and authorization module
|
//! Authentication and authorization module
|
||||||
|
|
||||||
pub mod jwt;
|
pub mod jwt;
|
||||||
|
pub mod ldap;
|
||||||
pub mod middleware;
|
pub mod middleware;
|
||||||
|
pub mod oidc;
|
||||||
pub mod password;
|
pub mod password;
|
||||||
|
|
||||||
pub use jwt::{generate_token, validate_token, Claims};
|
pub use jwt::{generate_token, validate_token, Claims};
|
||||||
|
|||||||
773
crates/api/src/auth/oidc.rs
Normal file
773
crates/api/src/auth/oidc.rs
Normal file
@@ -0,0 +1,773 @@
|
|||||||
|
//! OpenID Connect helpers for browser login.
|
||||||
|
|
||||||
|
use attune_common::{
|
||||||
|
config::OidcConfig,
|
||||||
|
repositories::{
|
||||||
|
identity::{CreateIdentityInput, IdentityRepository, UpdateIdentityInput},
|
||||||
|
Create, Update,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use axum::{
|
||||||
|
http::{header, HeaderMap, HeaderValue, StatusCode},
|
||||||
|
response::{IntoResponse, Redirect, Response},
|
||||||
|
};
|
||||||
|
use axum_extra::extract::cookie::{Cookie, SameSite};
|
||||||
|
use cookie::time::Duration as CookieDuration;
|
||||||
|
use jsonwebtoken::{
|
||||||
|
decode, decode_header,
|
||||||
|
jwk::{AlgorithmParameters, JwkSet},
|
||||||
|
Algorithm, DecodingKey, Validation,
|
||||||
|
};
|
||||||
|
use openidconnect::{
|
||||||
|
core::{CoreAuthenticationFlow, CoreClient, CoreProviderMetadata, CoreUserInfoClaims},
|
||||||
|
reqwest::Client as OidcHttpClient,
|
||||||
|
AuthorizationCode, ClientId, ClientSecret, CsrfToken, LocalizedClaim, Nonce,
|
||||||
|
OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RedirectUrl, Scope,
|
||||||
|
TokenResponse as OidcTokenResponse,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::{json, Value as JsonValue};
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use url::{form_urlencoded::byte_serialize, Url};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
auth::jwt::{generate_access_token, generate_refresh_token, validate_token},
|
||||||
|
dto::{CurrentUserResponse, TokenResponse},
|
||||||
|
middleware::error::ApiError,
|
||||||
|
state::SharedState,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const ACCESS_COOKIE_NAME: &str = "attune_access_token";
|
||||||
|
pub const REFRESH_COOKIE_NAME: &str = "attune_refresh_token";
|
||||||
|
pub const OIDC_ID_TOKEN_COOKIE_NAME: &str = "attune_oidc_id_token";
|
||||||
|
pub const OIDC_STATE_COOKIE_NAME: &str = "attune_oidc_state";
|
||||||
|
pub const OIDC_NONCE_COOKIE_NAME: &str = "attune_oidc_nonce";
|
||||||
|
pub const OIDC_PKCE_COOKIE_NAME: &str = "attune_oidc_pkce_verifier";
|
||||||
|
pub const OIDC_REDIRECT_COOKIE_NAME: &str = "attune_oidc_redirect_to";
|
||||||
|
|
||||||
|
const LOGIN_CALLBACK_PATH: &str = "/login/callback";
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
|
pub struct OidcDiscoveryDocument {
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub metadata: CoreProviderMetadata,
|
||||||
|
#[serde(default)]
|
||||||
|
pub end_session_endpoint: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct OidcIdentityClaims {
|
||||||
|
pub issuer: String,
|
||||||
|
pub sub: String,
|
||||||
|
pub email: Option<String>,
|
||||||
|
pub email_verified: Option<bool>,
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub preferred_username: Option<String>,
|
||||||
|
pub groups: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
|
struct VerifiedIdTokenClaims {
|
||||||
|
iss: String,
|
||||||
|
sub: String,
|
||||||
|
#[serde(default)]
|
||||||
|
nonce: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
email: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
email_verified: Option<bool>,
|
||||||
|
#[serde(default)]
|
||||||
|
name: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
preferred_username: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
groups: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct OidcAuthenticatedIdentity {
|
||||||
|
pub current_user: CurrentUserResponse,
|
||||||
|
pub token_response: TokenResponse,
|
||||||
|
pub id_token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct OidcLoginRedirect {
|
||||||
|
pub authorization_url: String,
|
||||||
|
pub cookies: Vec<Cookie<'static>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct OidcLogoutRedirect {
|
||||||
|
pub redirect_url: String,
|
||||||
|
pub cookies: Vec<Cookie<'static>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct OidcCallbackQuery {
|
||||||
|
pub code: Option<String>,
|
||||||
|
pub state: Option<String>,
|
||||||
|
pub error: Option<String>,
|
||||||
|
pub error_description: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn build_login_redirect(
|
||||||
|
state: &SharedState,
|
||||||
|
redirect_to: Option<&str>,
|
||||||
|
) -> Result<OidcLoginRedirect, ApiError> {
|
||||||
|
let oidc = oidc_config(state)?;
|
||||||
|
let discovery = fetch_discovery_document(&oidc).await?;
|
||||||
|
let _http_client = OidcHttpClient::builder()
|
||||||
|
.redirect(openidconnect::reqwest::redirect::Policy::none())
|
||||||
|
.build()
|
||||||
|
.map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("Failed to build OIDC HTTP client: {err}"))
|
||||||
|
})?;
|
||||||
|
let redirect_uri = RedirectUrl::new(oidc.redirect_uri.clone()).map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("Invalid OIDC redirect URI: {err}"))
|
||||||
|
})?;
|
||||||
|
let client_secret = oidc.client_secret.clone().ok_or_else(|| {
|
||||||
|
ApiError::InternalServerError("OIDC client secret is missing".to_string())
|
||||||
|
})?;
|
||||||
|
let client = CoreClient::from_provider_metadata(
|
||||||
|
discovery.metadata.clone(),
|
||||||
|
ClientId::new(oidc.client_id.clone()),
|
||||||
|
Some(ClientSecret::new(client_secret)),
|
||||||
|
)
|
||||||
|
.set_redirect_uri(redirect_uri);
|
||||||
|
|
||||||
|
let redirect_target = sanitize_redirect_target(redirect_to);
|
||||||
|
let pkce = PkceCodeChallenge::new_random_sha256();
|
||||||
|
let (auth_url, csrf_state, nonce) = client
|
||||||
|
.authorize_url(
|
||||||
|
CoreAuthenticationFlow::AuthorizationCode,
|
||||||
|
CsrfToken::new_random,
|
||||||
|
Nonce::new_random,
|
||||||
|
)
|
||||||
|
.add_scope(Scope::new("openid".to_string()))
|
||||||
|
.add_scope(Scope::new("email".to_string()))
|
||||||
|
.add_scope(Scope::new("profile".to_string()))
|
||||||
|
.add_scopes(
|
||||||
|
oidc.scopes
|
||||||
|
.iter()
|
||||||
|
.filter(|scope| !matches!(scope.as_str(), "openid" | "email" | "profile"))
|
||||||
|
.cloned()
|
||||||
|
.map(Scope::new),
|
||||||
|
)
|
||||||
|
.set_pkce_challenge(pkce.0)
|
||||||
|
.url();
|
||||||
|
|
||||||
|
Ok(OidcLoginRedirect {
|
||||||
|
authorization_url: auth_url.to_string(),
|
||||||
|
cookies: vec![
|
||||||
|
build_cookie(
|
||||||
|
state,
|
||||||
|
OIDC_STATE_COOKIE_NAME,
|
||||||
|
csrf_state.secret().to_string(),
|
||||||
|
600,
|
||||||
|
true,
|
||||||
|
),
|
||||||
|
build_cookie(
|
||||||
|
state,
|
||||||
|
OIDC_NONCE_COOKIE_NAME,
|
||||||
|
nonce.secret().to_string(),
|
||||||
|
600,
|
||||||
|
true,
|
||||||
|
),
|
||||||
|
build_cookie(
|
||||||
|
state,
|
||||||
|
OIDC_PKCE_COOKIE_NAME,
|
||||||
|
pkce.1.secret().to_string(),
|
||||||
|
600,
|
||||||
|
true,
|
||||||
|
),
|
||||||
|
build_cookie(
|
||||||
|
state,
|
||||||
|
OIDC_REDIRECT_COOKIE_NAME,
|
||||||
|
redirect_target,
|
||||||
|
600,
|
||||||
|
false,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_callback(
|
||||||
|
state: &SharedState,
|
||||||
|
headers: &HeaderMap,
|
||||||
|
query: &OidcCallbackQuery,
|
||||||
|
) -> Result<OidcAuthenticatedIdentity, ApiError> {
|
||||||
|
if let Some(error) = &query.error {
|
||||||
|
let description = query
|
||||||
|
.error_description
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or("OpenID Connect login failed");
|
||||||
|
return Err(ApiError::Unauthorized(format!("{error}: {description}")));
|
||||||
|
}
|
||||||
|
|
||||||
|
let code = query
|
||||||
|
.code
|
||||||
|
.as_ref()
|
||||||
|
.ok_or_else(|| ApiError::BadRequest("Missing authorization code".to_string()))?;
|
||||||
|
let returned_state = query
|
||||||
|
.state
|
||||||
|
.as_ref()
|
||||||
|
.ok_or_else(|| ApiError::BadRequest("Missing OIDC state".to_string()))?;
|
||||||
|
|
||||||
|
let expected_state = get_cookie_value(headers, OIDC_STATE_COOKIE_NAME)
|
||||||
|
.ok_or_else(|| ApiError::Unauthorized("Missing OIDC state cookie".to_string()))?;
|
||||||
|
let expected_nonce = get_cookie_value(headers, OIDC_NONCE_COOKIE_NAME)
|
||||||
|
.ok_or_else(|| ApiError::Unauthorized("Missing OIDC nonce cookie".to_string()))?;
|
||||||
|
let pkce_verifier = get_cookie_value(headers, OIDC_PKCE_COOKIE_NAME)
|
||||||
|
.ok_or_else(|| ApiError::Unauthorized("Missing OIDC PKCE verifier cookie".to_string()))?;
|
||||||
|
|
||||||
|
if returned_state != &expected_state {
|
||||||
|
return Err(ApiError::Unauthorized(
|
||||||
|
"OIDC state validation failed".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let oidc = oidc_config(state)?;
|
||||||
|
let discovery = fetch_discovery_document(&oidc).await?;
|
||||||
|
let http_client = OidcHttpClient::builder()
|
||||||
|
.redirect(openidconnect::reqwest::redirect::Policy::none())
|
||||||
|
.build()
|
||||||
|
.map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("Failed to build OIDC HTTP client: {err}"))
|
||||||
|
})?;
|
||||||
|
let redirect_uri = RedirectUrl::new(oidc.redirect_uri.clone()).map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("Invalid OIDC redirect URI: {err}"))
|
||||||
|
})?;
|
||||||
|
let client_secret = oidc.client_secret.clone().ok_or_else(|| {
|
||||||
|
ApiError::InternalServerError("OIDC client secret is missing".to_string())
|
||||||
|
})?;
|
||||||
|
let client = CoreClient::from_provider_metadata(
|
||||||
|
discovery.metadata.clone(),
|
||||||
|
ClientId::new(oidc.client_id.clone()),
|
||||||
|
Some(ClientSecret::new(client_secret)),
|
||||||
|
)
|
||||||
|
.set_redirect_uri(redirect_uri);
|
||||||
|
|
||||||
|
let token_response = client
|
||||||
|
.exchange_code(AuthorizationCode::new(code.clone()))
|
||||||
|
.map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("OIDC token request is misconfigured: {err}"))
|
||||||
|
})?
|
||||||
|
.set_pkce_verifier(PkceCodeVerifier::new(pkce_verifier))
|
||||||
|
.request_async(&http_client)
|
||||||
|
.await
|
||||||
|
.map_err(|err| ApiError::Unauthorized(format!("OIDC token exchange failed: {err}")))?;
|
||||||
|
|
||||||
|
let id_token = token_response.id_token().ok_or_else(|| {
|
||||||
|
ApiError::Unauthorized("OIDC provider did not return an ID token".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let raw_id_token = id_token.to_string();
|
||||||
|
let claims = verify_id_token(&raw_id_token, &discovery, &oidc, &expected_nonce).await?;
|
||||||
|
|
||||||
|
let mut oidc_claims = OidcIdentityClaims {
|
||||||
|
issuer: claims.iss,
|
||||||
|
sub: claims.sub,
|
||||||
|
email: claims.email,
|
||||||
|
email_verified: claims.email_verified,
|
||||||
|
name: claims.name,
|
||||||
|
preferred_username: claims.preferred_username,
|
||||||
|
groups: claims.groups,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Ok(userinfo_request) = client.user_info(token_response.access_token().to_owned(), None) {
|
||||||
|
if let Ok(userinfo) = userinfo_request.request_async(&http_client).await {
|
||||||
|
merge_userinfo_claims(&mut oidc_claims, &userinfo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let identity = upsert_identity(state, &oidc_claims).await?;
|
||||||
|
let access_token = generate_access_token(identity.id, &identity.login, &state.jwt_config)?;
|
||||||
|
let refresh_token = generate_refresh_token(identity.id, &identity.login, &state.jwt_config)?;
|
||||||
|
|
||||||
|
let token_response = TokenResponse::new(
|
||||||
|
access_token,
|
||||||
|
refresh_token,
|
||||||
|
state.jwt_config.access_token_expiration,
|
||||||
|
)
|
||||||
|
.with_user(
|
||||||
|
identity.id,
|
||||||
|
identity.login.clone(),
|
||||||
|
identity.display_name.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(OidcAuthenticatedIdentity {
|
||||||
|
current_user: CurrentUserResponse {
|
||||||
|
id: identity.id,
|
||||||
|
login: identity.login.clone(),
|
||||||
|
display_name: identity.display_name.clone(),
|
||||||
|
},
|
||||||
|
id_token: raw_id_token,
|
||||||
|
token_response,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn build_logout_redirect(
|
||||||
|
state: &SharedState,
|
||||||
|
headers: &HeaderMap,
|
||||||
|
) -> Result<OidcLogoutRedirect, ApiError> {
|
||||||
|
let oidc = oidc_config(state)?;
|
||||||
|
let discovery = fetch_discovery_document(&oidc).await?;
|
||||||
|
let post_logout_redirect_uri = oidc
|
||||||
|
.post_logout_redirect_uri
|
||||||
|
.clone()
|
||||||
|
.unwrap_or_else(|| "/login".to_string());
|
||||||
|
|
||||||
|
let redirect_url = if let Some(end_session_endpoint) = discovery.end_session_endpoint {
|
||||||
|
let mut url = Url::parse(&end_session_endpoint).map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("Invalid end_session_endpoint: {err}"))
|
||||||
|
})?;
|
||||||
|
{
|
||||||
|
let mut pairs = url.query_pairs_mut();
|
||||||
|
if let Some(id_token_hint) = get_cookie_value(headers, OIDC_ID_TOKEN_COOKIE_NAME) {
|
||||||
|
pairs.append_pair("id_token_hint", &id_token_hint);
|
||||||
|
}
|
||||||
|
pairs.append_pair("post_logout_redirect_uri", &post_logout_redirect_uri);
|
||||||
|
pairs.append_pair("client_id", &oidc.client_id);
|
||||||
|
}
|
||||||
|
String::from(url)
|
||||||
|
} else {
|
||||||
|
post_logout_redirect_uri
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(OidcLogoutRedirect {
|
||||||
|
redirect_url,
|
||||||
|
cookies: clear_auth_cookies(state),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clear_auth_cookies(state: &SharedState) -> Vec<Cookie<'static>> {
|
||||||
|
[
|
||||||
|
ACCESS_COOKIE_NAME,
|
||||||
|
REFRESH_COOKIE_NAME,
|
||||||
|
OIDC_ID_TOKEN_COOKIE_NAME,
|
||||||
|
OIDC_STATE_COOKIE_NAME,
|
||||||
|
OIDC_NONCE_COOKIE_NAME,
|
||||||
|
OIDC_PKCE_COOKIE_NAME,
|
||||||
|
OIDC_REDIRECT_COOKIE_NAME,
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.map(|name| remove_cookie(state, name))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build_auth_cookies(
|
||||||
|
state: &SharedState,
|
||||||
|
token_response: &TokenResponse,
|
||||||
|
id_token: &str,
|
||||||
|
) -> Vec<Cookie<'static>> {
|
||||||
|
let mut cookies = vec![
|
||||||
|
build_cookie(
|
||||||
|
state,
|
||||||
|
ACCESS_COOKIE_NAME,
|
||||||
|
token_response.access_token.clone(),
|
||||||
|
state.jwt_config.access_token_expiration,
|
||||||
|
true,
|
||||||
|
),
|
||||||
|
build_cookie(
|
||||||
|
state,
|
||||||
|
REFRESH_COOKIE_NAME,
|
||||||
|
token_response.refresh_token.clone(),
|
||||||
|
state.jwt_config.refresh_token_expiration,
|
||||||
|
true,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
if !id_token.is_empty() {
|
||||||
|
cookies.push(build_cookie(
|
||||||
|
state,
|
||||||
|
OIDC_ID_TOKEN_COOKIE_NAME,
|
||||||
|
id_token.to_string(),
|
||||||
|
state.jwt_config.refresh_token_expiration,
|
||||||
|
true,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
cookies
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn apply_cookies_to_headers(
|
||||||
|
headers: &mut HeaderMap,
|
||||||
|
cookies: &[Cookie<'static>],
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
for cookie in cookies {
|
||||||
|
let value = HeaderValue::from_str(&cookie.to_string()).map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("Failed to serialize cookie header: {err}"))
|
||||||
|
})?;
|
||||||
|
headers.append(header::SET_COOKIE, value);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn oidc_callback_redirect_response(
|
||||||
|
state: &SharedState,
|
||||||
|
token_response: &TokenResponse,
|
||||||
|
redirect_to: Option<String>,
|
||||||
|
id_token: &str,
|
||||||
|
) -> Result<Response, ApiError> {
|
||||||
|
let redirect_target = sanitize_redirect_target(redirect_to.as_deref());
|
||||||
|
let redirect_url = format!(
|
||||||
|
"{LOGIN_CALLBACK_PATH}#access_token={}&refresh_token={}&expires_in={}&redirect_to={}",
|
||||||
|
encode_fragment_value(&token_response.access_token),
|
||||||
|
encode_fragment_value(&token_response.refresh_token),
|
||||||
|
token_response.expires_in,
|
||||||
|
encode_fragment_value(&redirect_target),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut response = Redirect::temporary(&redirect_url).into_response();
|
||||||
|
let mut cookies = build_auth_cookies(state, token_response, id_token);
|
||||||
|
cookies.push(remove_cookie(state, OIDC_STATE_COOKIE_NAME));
|
||||||
|
cookies.push(remove_cookie(state, OIDC_NONCE_COOKIE_NAME));
|
||||||
|
cookies.push(remove_cookie(state, OIDC_PKCE_COOKIE_NAME));
|
||||||
|
cookies.push(remove_cookie(state, OIDC_REDIRECT_COOKIE_NAME));
|
||||||
|
apply_cookies_to_headers(response.headers_mut(), &cookies)?;
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cookie_authenticated_user(
|
||||||
|
headers: &HeaderMap,
|
||||||
|
state: &SharedState,
|
||||||
|
) -> Result<Option<crate::auth::middleware::AuthenticatedUser>, ApiError> {
|
||||||
|
let Some(token) = get_cookie_value(headers, ACCESS_COOKIE_NAME) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let claims = validate_token(&token, &state.jwt_config).map_err(ApiError::from)?;
|
||||||
|
Ok(Some(crate::auth::middleware::AuthenticatedUser { claims }))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_cookie_value(headers: &HeaderMap, name: &str) -> Option<String> {
|
||||||
|
headers
|
||||||
|
.get_all(header::COOKIE)
|
||||||
|
.iter()
|
||||||
|
.filter_map(|value| value.to_str().ok())
|
||||||
|
.flat_map(|value| value.split(';'))
|
||||||
|
.filter_map(|part| {
|
||||||
|
let mut pieces = part.trim().splitn(2, '=');
|
||||||
|
let key = pieces.next()?.trim();
|
||||||
|
let value = pieces.next()?.trim();
|
||||||
|
if key == name {
|
||||||
|
Some(value.to_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn oidc_config(state: &SharedState) -> Result<OidcConfig, ApiError> {
|
||||||
|
state
|
||||||
|
.config
|
||||||
|
.security
|
||||||
|
.oidc
|
||||||
|
.clone()
|
||||||
|
.filter(|oidc| oidc.enabled)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::NotImplemented("OIDC authentication is not configured".to_string())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch_discovery_document(oidc: &OidcConfig) -> Result<OidcDiscoveryDocument, ApiError> {
|
||||||
|
let discovery = reqwest::get(&oidc.discovery_url).await.map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("Failed to fetch OIDC discovery document: {err}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if !discovery.status().is_success() {
|
||||||
|
return Err(ApiError::InternalServerError(format!(
|
||||||
|
"OIDC discovery request failed with status {}",
|
||||||
|
discovery.status()
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
discovery
|
||||||
|
.json::<OidcDiscoveryDocument>()
|
||||||
|
.await
|
||||||
|
.map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("Failed to parse OIDC discovery document: {err}"))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn upsert_identity(
|
||||||
|
state: &SharedState,
|
||||||
|
oidc_claims: &OidcIdentityClaims,
|
||||||
|
) -> Result<attune_common::models::identity::Identity, ApiError> {
|
||||||
|
let existing_by_subject =
|
||||||
|
IdentityRepository::find_by_oidc_subject(&state.db, &oidc_claims.issuer, &oidc_claims.sub)
|
||||||
|
.await?;
|
||||||
|
let desired_login = derive_login(oidc_claims);
|
||||||
|
let display_name = derive_display_name(oidc_claims);
|
||||||
|
let attributes = json!({
|
||||||
|
"oidc": oidc_claims,
|
||||||
|
});
|
||||||
|
|
||||||
|
match existing_by_subject {
|
||||||
|
Some(identity) => {
|
||||||
|
let updated = UpdateIdentityInput {
|
||||||
|
display_name,
|
||||||
|
password_hash: None,
|
||||||
|
attributes: Some(attributes.clone()),
|
||||||
|
};
|
||||||
|
IdentityRepository::update(&state.db, identity.id, updated)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let login = match IdentityRepository::find_by_login(&state.db, &desired_login).await? {
|
||||||
|
Some(_) => fallback_subject_login(oidc_claims),
|
||||||
|
None => desired_login,
|
||||||
|
};
|
||||||
|
|
||||||
|
IdentityRepository::create(
|
||||||
|
&state.db,
|
||||||
|
CreateIdentityInput {
|
||||||
|
login,
|
||||||
|
display_name,
|
||||||
|
password_hash: None,
|
||||||
|
attributes,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn derive_login(oidc_claims: &OidcIdentityClaims) -> String {
|
||||||
|
oidc_claims
|
||||||
|
.email
|
||||||
|
.clone()
|
||||||
|
.or_else(|| oidc_claims.preferred_username.clone())
|
||||||
|
.unwrap_or_else(|| fallback_subject_login(oidc_claims))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn verify_id_token(
|
||||||
|
raw_id_token: &str,
|
||||||
|
discovery: &OidcDiscoveryDocument,
|
||||||
|
oidc: &OidcConfig,
|
||||||
|
expected_nonce: &str,
|
||||||
|
) -> Result<VerifiedIdTokenClaims, ApiError> {
|
||||||
|
let header = decode_header(raw_id_token).map_err(|err| {
|
||||||
|
ApiError::Unauthorized(format!("OIDC ID token header decode failed: {err}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let algorithm = match header.alg {
|
||||||
|
Algorithm::RS256 => Algorithm::RS256,
|
||||||
|
Algorithm::RS384 => Algorithm::RS384,
|
||||||
|
Algorithm::RS512 => Algorithm::RS512,
|
||||||
|
other => {
|
||||||
|
return Err(ApiError::Unauthorized(format!(
|
||||||
|
"OIDC ID token uses unsupported signing algorithm: {other:?}"
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let jwks = reqwest::get(discovery.metadata.jwks_uri().url().as_str())
|
||||||
|
.await
|
||||||
|
.map_err(|err| ApiError::InternalServerError(format!("Failed to fetch OIDC JWKS: {err}")))?
|
||||||
|
.json::<JwkSet>()
|
||||||
|
.await
|
||||||
|
.map_err(|err| {
|
||||||
|
ApiError::InternalServerError(format!("Failed to parse OIDC JWKS: {err}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let jwk = jwks
|
||||||
|
.keys
|
||||||
|
.iter()
|
||||||
|
.find(|jwk| {
|
||||||
|
jwk.common.key_id == header.kid
|
||||||
|
&& matches!(
|
||||||
|
jwk.common.public_key_use,
|
||||||
|
Some(jsonwebtoken::jwk::PublicKeyUse::Signature)
|
||||||
|
)
|
||||||
|
&& matches!(
|
||||||
|
jwk.algorithm,
|
||||||
|
AlgorithmParameters::RSA(_) | AlgorithmParameters::EllipticCurve(_)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.ok_or_else(|| ApiError::Unauthorized("OIDC signing key not found in JWKS".to_string()))?;
|
||||||
|
|
||||||
|
let decoding_key = DecodingKey::from_jwk(jwk)
|
||||||
|
.map_err(|err| ApiError::Unauthorized(format!("OIDC JWK decode failed: {err}")))?;
|
||||||
|
|
||||||
|
let issuer = discovery.metadata.issuer().to_string();
|
||||||
|
let mut validation = Validation::new(algorithm);
|
||||||
|
validation.set_issuer(&[issuer.as_str()]);
|
||||||
|
validation.set_audience(&[oidc.client_id.as_str()]);
|
||||||
|
validation.set_required_spec_claims(&["exp", "iat", "iss", "sub", "aud"]);
|
||||||
|
validation.validate_nbf = false;
|
||||||
|
|
||||||
|
let token = decode::<VerifiedIdTokenClaims>(raw_id_token, &decoding_key, &validation)
|
||||||
|
.map_err(|err| ApiError::Unauthorized(format!("OIDC ID token validation failed: {err}")))?;
|
||||||
|
|
||||||
|
if token.claims.nonce.as_deref() != Some(expected_nonce) {
|
||||||
|
return Err(ApiError::Unauthorized(
|
||||||
|
"OIDC nonce validation failed".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(token.claims)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn derive_display_name(oidc_claims: &OidcIdentityClaims) -> Option<String> {
|
||||||
|
oidc_claims
|
||||||
|
.name
|
||||||
|
.clone()
|
||||||
|
.or_else(|| oidc_claims.preferred_username.clone())
|
||||||
|
.or_else(|| oidc_claims.email.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fallback_subject_login(oidc_claims: &OidcIdentityClaims) -> String {
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
hasher.update(oidc_claims.issuer.as_bytes());
|
||||||
|
hasher.update(b":");
|
||||||
|
hasher.update(oidc_claims.sub.as_bytes());
|
||||||
|
let digest = hex::encode(hasher.finalize());
|
||||||
|
format!("oidc:{}", &digest[..24])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_groups_from_claims<T>(claims: &T) -> Vec<String>
|
||||||
|
where
|
||||||
|
T: Serialize,
|
||||||
|
{
|
||||||
|
let Ok(json) = serde_json::to_value(claims) else {
|
||||||
|
return Vec::new();
|
||||||
|
};
|
||||||
|
match json.get("groups") {
|
||||||
|
Some(JsonValue::Array(values)) => values
|
||||||
|
.iter()
|
||||||
|
.filter_map(|value| value.as_str().map(ToString::to_string))
|
||||||
|
.collect(),
|
||||||
|
Some(JsonValue::String(value)) => vec![value.to_string()],
|
||||||
|
_ => Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn merge_userinfo_claims(oidc_claims: &mut OidcIdentityClaims, userinfo: &CoreUserInfoClaims) {
|
||||||
|
if oidc_claims.email.is_none() {
|
||||||
|
oidc_claims.email = userinfo.email().map(|email| email.as_str().to_string());
|
||||||
|
}
|
||||||
|
if oidc_claims.name.is_none() {
|
||||||
|
oidc_claims.name = userinfo.name().and_then(first_localized_claim);
|
||||||
|
}
|
||||||
|
if oidc_claims.preferred_username.is_none() {
|
||||||
|
oidc_claims.preferred_username = userinfo
|
||||||
|
.preferred_username()
|
||||||
|
.map(|username| username.as_str().to_string());
|
||||||
|
}
|
||||||
|
if oidc_claims.groups.is_empty() {
|
||||||
|
oidc_claims.groups = extract_groups_from_claims(userinfo.additional_claims());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn first_localized_claim<T>(claim: &LocalizedClaim<T>) -> Option<String>
|
||||||
|
where
|
||||||
|
T: std::ops::Deref<Target = String>,
|
||||||
|
{
|
||||||
|
claim
|
||||||
|
.iter()
|
||||||
|
.next()
|
||||||
|
.map(|(_, value)| value.as_str().to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_cookie(
|
||||||
|
state: &SharedState,
|
||||||
|
name: &'static str,
|
||||||
|
value: String,
|
||||||
|
max_age_seconds: i64,
|
||||||
|
http_only: bool,
|
||||||
|
) -> Cookie<'static> {
|
||||||
|
let mut cookie = Cookie::build((name, value))
|
||||||
|
.path("/")
|
||||||
|
.same_site(SameSite::Lax)
|
||||||
|
.http_only(http_only)
|
||||||
|
.max_age(CookieDuration::seconds(max_age_seconds))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
if should_use_secure_cookies(state) {
|
||||||
|
cookie.set_secure(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
cookie
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remove_cookie(state: &SharedState, name: &'static str) -> Cookie<'static> {
|
||||||
|
let mut cookie = Cookie::build((name, String::new()))
|
||||||
|
.path("/")
|
||||||
|
.same_site(SameSite::Lax)
|
||||||
|
.http_only(true)
|
||||||
|
.max_age(CookieDuration::seconds(0))
|
||||||
|
.build();
|
||||||
|
cookie.make_removal();
|
||||||
|
if should_use_secure_cookies(state) {
|
||||||
|
cookie.set_secure(true);
|
||||||
|
}
|
||||||
|
cookie
|
||||||
|
}
|
||||||
|
|
||||||
|
fn should_use_secure_cookies(state: &SharedState) -> bool {
|
||||||
|
state.config.is_production()
|
||||||
|
|| state
|
||||||
|
.config
|
||||||
|
.security
|
||||||
|
.oidc
|
||||||
|
.as_ref()
|
||||||
|
.map(|oidc| oidc.redirect_uri.starts_with("https://"))
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sanitize_redirect_target(redirect_to: Option<&str>) -> String {
|
||||||
|
let fallback = "/".to_string();
|
||||||
|
let Some(redirect_to) = redirect_to else {
|
||||||
|
return fallback;
|
||||||
|
};
|
||||||
|
if redirect_to.starts_with('/') && !redirect_to.starts_with("//") {
|
||||||
|
redirect_to.to_string()
|
||||||
|
} else {
|
||||||
|
fallback
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn unauthorized_redirect(location: &str) -> Response {
|
||||||
|
let mut response = Redirect::to(location).into_response();
|
||||||
|
*response.status_mut() = StatusCode::FOUND;
|
||||||
|
response
|
||||||
|
}
|
||||||
|
|
||||||
|
fn encode_fragment_value(value: &str) -> String {
|
||||||
|
byte_serialize(value.as_bytes()).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sanitize_redirect_target_rejects_external_urls() {
|
||||||
|
assert_eq!(sanitize_redirect_target(Some("https://example.com")), "/");
|
||||||
|
assert_eq!(sanitize_redirect_target(Some("//example.com")), "/");
|
||||||
|
assert_eq!(
|
||||||
|
sanitize_redirect_target(Some("/executions/42")),
|
||||||
|
"/executions/42"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_groups_from_claims_accepts_array_and_string() {
|
||||||
|
let array_claims = serde_json::json!({ "groups": ["admins", "operators"] });
|
||||||
|
let string_claims = serde_json::json!({ "groups": "admins" });
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
extract_groups_from_claims(&array_claims),
|
||||||
|
vec!["admins".to_string(), "operators".to_string()]
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
extract_groups_from_claims(&string_claims),
|
||||||
|
vec!["admins".to_string()]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
149
crates/api/src/authz.rs
Normal file
149
crates/api/src/authz.rs
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
//! RBAC authorization service for API handlers.
|
||||||
|
//!
|
||||||
|
//! This module evaluates grants assigned to user identities via
|
||||||
|
//! `permission_set` and `permission_assignment`.
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
auth::{jwt::TokenType, middleware::AuthenticatedUser},
|
||||||
|
middleware::ApiError,
|
||||||
|
};
|
||||||
|
use attune_common::{
|
||||||
|
rbac::{Action, AuthorizationContext, Grant, Resource},
|
||||||
|
repositories::{
|
||||||
|
identity::{IdentityRepository, PermissionSetRepository},
|
||||||
|
FindById,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use sqlx::PgPool;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct AuthorizationCheck {
|
||||||
|
pub resource: Resource,
|
||||||
|
pub action: Action,
|
||||||
|
pub context: AuthorizationContext,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct AuthorizationService {
|
||||||
|
db: PgPool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuthorizationService {
|
||||||
|
pub fn new(db: PgPool) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn authorize(
|
||||||
|
&self,
|
||||||
|
user: &AuthenticatedUser,
|
||||||
|
mut check: AuthorizationCheck,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
// Non-access tokens are governed by dedicated scope checks in route logic.
|
||||||
|
// They are not evaluated through identity RBAC grants.
|
||||||
|
if user.claims.token_type != TokenType::Access {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let identity_id = user.identity_id().map_err(|_| {
|
||||||
|
ApiError::Unauthorized("Invalid authentication subject in access token".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Ensure identity exists and load identity attributes used by attribute constraints.
|
||||||
|
let identity = IdentityRepository::find_by_id(&self.db, identity_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::Unauthorized("Identity not found".to_string()))?;
|
||||||
|
|
||||||
|
check.context.identity_id = identity_id;
|
||||||
|
check.context.identity_attributes = match identity.attributes {
|
||||||
|
serde_json::Value::Object(map) => map.into_iter().collect(),
|
||||||
|
_ => Default::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let grants = self.load_effective_grants(identity_id).await?;
|
||||||
|
|
||||||
|
let allowed = Self::is_allowed(&grants, check.resource, check.action, &check.context);
|
||||||
|
|
||||||
|
if !allowed {
|
||||||
|
return Err(ApiError::Forbidden(format!(
|
||||||
|
"Insufficient permissions: {}:{}",
|
||||||
|
resource_name(check.resource),
|
||||||
|
action_name(check.action)
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn effective_grants(&self, user: &AuthenticatedUser) -> Result<Vec<Grant>, ApiError> {
|
||||||
|
if user.claims.token_type != TokenType::Access {
|
||||||
|
return Ok(Vec::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
let identity_id = user.identity_id().map_err(|_| {
|
||||||
|
ApiError::Unauthorized("Invalid authentication subject in access token".to_string())
|
||||||
|
})?;
|
||||||
|
self.load_effective_grants(identity_id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_allowed(
|
||||||
|
grants: &[Grant],
|
||||||
|
resource: Resource,
|
||||||
|
action: Action,
|
||||||
|
context: &AuthorizationContext,
|
||||||
|
) -> bool {
|
||||||
|
grants.iter().any(|g| g.allows(resource, action, context))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn load_effective_grants(&self, identity_id: i64) -> Result<Vec<Grant>, ApiError> {
|
||||||
|
let permission_sets =
|
||||||
|
PermissionSetRepository::find_by_identity(&self.db, identity_id).await?;
|
||||||
|
|
||||||
|
let mut grants = Vec::new();
|
||||||
|
for permission_set in permission_sets {
|
||||||
|
let set_grants: Vec<Grant> =
|
||||||
|
serde_json::from_value(permission_set.grants).map_err(|e| {
|
||||||
|
ApiError::InternalServerError(format!(
|
||||||
|
"Invalid grant schema in permission set '{}': {}",
|
||||||
|
permission_set.r#ref, e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
grants.extend(set_grants);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(grants)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resource_name(resource: Resource) -> &'static str {
|
||||||
|
match resource {
|
||||||
|
Resource::Packs => "packs",
|
||||||
|
Resource::Actions => "actions",
|
||||||
|
Resource::Rules => "rules",
|
||||||
|
Resource::Triggers => "triggers",
|
||||||
|
Resource::Executions => "executions",
|
||||||
|
Resource::Events => "events",
|
||||||
|
Resource::Enforcements => "enforcements",
|
||||||
|
Resource::Inquiries => "inquiries",
|
||||||
|
Resource::Keys => "keys",
|
||||||
|
Resource::Artifacts => "artifacts",
|
||||||
|
Resource::Workflows => "workflows",
|
||||||
|
Resource::Webhooks => "webhooks",
|
||||||
|
Resource::Analytics => "analytics",
|
||||||
|
Resource::History => "history",
|
||||||
|
Resource::Identities => "identities",
|
||||||
|
Resource::Permissions => "permissions",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn action_name(action: Action) -> &'static str {
|
||||||
|
match action {
|
||||||
|
Action::Read => "read",
|
||||||
|
Action::Create => "create",
|
||||||
|
Action::Update => "update",
|
||||||
|
Action::Delete => "delete",
|
||||||
|
Action::Execute => "execute",
|
||||||
|
Action::Cancel => "cancel",
|
||||||
|
Action::Respond => "respond",
|
||||||
|
Action::Manage => "manage",
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -76,9 +76,8 @@ pub struct UpdateActionRequest {
|
|||||||
#[schema(example = 1)]
|
#[schema(example = 1)]
|
||||||
pub runtime: Option<i64>,
|
pub runtime: Option<i64>,
|
||||||
|
|
||||||
/// Optional semver version constraint for the runtime (e.g., ">=3.12", ">=3.12,<4.0", "~18.0")
|
/// Optional semver version constraint patch for the runtime.
|
||||||
#[schema(example = ">=3.12", nullable = true)]
|
pub runtime_version_constraint: Option<RuntimeVersionConstraintPatch>,
|
||||||
pub runtime_version_constraint: Option<Option<String>>,
|
|
||||||
|
|
||||||
/// Parameter schema (StackStorm-style with inline required/secret)
|
/// Parameter schema (StackStorm-style with inline required/secret)
|
||||||
#[schema(value_type = Object, nullable = true)]
|
#[schema(value_type = Object, nullable = true)]
|
||||||
@@ -89,6 +88,14 @@ pub struct UpdateActionRequest {
|
|||||||
pub out_schema: Option<JsonValue>,
|
pub out_schema: Option<JsonValue>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Explicit patch operation for a nullable runtime version constraint.
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
pub enum RuntimeVersionConstraintPatch {
|
||||||
|
Set(String),
|
||||||
|
Clear,
|
||||||
|
}
|
||||||
|
|
||||||
/// Response DTO for action information
|
/// Response DTO for action information
|
||||||
#[derive(Debug, Clone, Serialize, ToSchema)]
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
pub struct ActionResponse {
|
pub struct ActionResponse {
|
||||||
|
|||||||
@@ -97,19 +97,41 @@ pub struct UpdateArtifactRequest {
|
|||||||
pub retention_limit: Option<i32>,
|
pub retention_limit: Option<i32>,
|
||||||
|
|
||||||
/// Updated name
|
/// Updated name
|
||||||
pub name: Option<String>,
|
pub name: Option<ArtifactStringPatch>,
|
||||||
|
|
||||||
/// Updated description
|
/// Updated description
|
||||||
pub description: Option<String>,
|
pub description: Option<ArtifactStringPatch>,
|
||||||
|
|
||||||
/// Updated content type
|
/// Updated content type
|
||||||
pub content_type: Option<String>,
|
pub content_type: Option<ArtifactStringPatch>,
|
||||||
|
|
||||||
/// Updated execution ID (re-links artifact to a different execution)
|
/// Updated execution patch (set a new execution ID or clear the link)
|
||||||
pub execution: Option<i64>,
|
pub execution: Option<ArtifactExecutionPatch>,
|
||||||
|
|
||||||
/// Updated structured data (replaces existing data entirely)
|
/// Updated structured data (replaces existing data entirely)
|
||||||
pub data: Option<JsonValue>,
|
pub data: Option<ArtifactJsonPatch>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explicit patch operation for a nullable execution link.
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
pub enum ArtifactExecutionPatch {
|
||||||
|
Set(i64),
|
||||||
|
Clear,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
pub enum ArtifactStringPatch {
|
||||||
|
Set(String),
|
||||||
|
Clear,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
pub enum ArtifactJsonPatch {
|
||||||
|
Set(JsonValue),
|
||||||
|
Clear,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Request DTO for appending to a progress-type artifact
|
/// Request DTO for appending to a progress-type artifact
|
||||||
|
|||||||
@@ -136,3 +136,63 @@ pub struct CurrentUserResponse {
|
|||||||
#[schema(example = "Administrator")]
|
#[schema(example = "Administrator")]
|
||||||
pub display_name: Option<String>,
|
pub display_name: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Public authentication settings for the login page.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
|
||||||
|
pub struct AuthSettingsResponse {
|
||||||
|
/// Whether authentication is enabled for the server.
|
||||||
|
#[schema(example = true)]
|
||||||
|
pub authentication_enabled: bool,
|
||||||
|
|
||||||
|
/// Whether local username/password login is configured.
|
||||||
|
#[schema(example = true)]
|
||||||
|
pub local_password_enabled: bool,
|
||||||
|
|
||||||
|
/// Whether local username/password login should be shown by default.
|
||||||
|
#[schema(example = true)]
|
||||||
|
pub local_password_visible_by_default: bool,
|
||||||
|
|
||||||
|
/// Whether OIDC login is configured and enabled.
|
||||||
|
#[schema(example = false)]
|
||||||
|
pub oidc_enabled: bool,
|
||||||
|
|
||||||
|
/// Whether OIDC login should be shown by default.
|
||||||
|
#[schema(example = false)]
|
||||||
|
pub oidc_visible_by_default: bool,
|
||||||
|
|
||||||
|
/// Provider name for `?auth=<provider>`.
|
||||||
|
#[schema(example = "sso")]
|
||||||
|
pub oidc_provider_name: Option<String>,
|
||||||
|
|
||||||
|
/// User-facing provider label for the login button.
|
||||||
|
#[schema(example = "Example SSO")]
|
||||||
|
pub oidc_provider_label: Option<String>,
|
||||||
|
|
||||||
|
/// Optional icon URL shown beside the provider label.
|
||||||
|
#[schema(example = "https://auth.example.com/assets/logo.svg")]
|
||||||
|
pub oidc_provider_icon_url: Option<String>,
|
||||||
|
|
||||||
|
/// Whether LDAP login is configured and enabled.
|
||||||
|
#[schema(example = false)]
|
||||||
|
pub ldap_enabled: bool,
|
||||||
|
|
||||||
|
/// Whether LDAP login should be shown by default.
|
||||||
|
#[schema(example = false)]
|
||||||
|
pub ldap_visible_by_default: bool,
|
||||||
|
|
||||||
|
/// Provider name for `?auth=<provider>`.
|
||||||
|
#[schema(example = "ldap")]
|
||||||
|
pub ldap_provider_name: Option<String>,
|
||||||
|
|
||||||
|
/// User-facing provider label for the login button.
|
||||||
|
#[schema(example = "Company LDAP")]
|
||||||
|
pub ldap_provider_label: Option<String>,
|
||||||
|
|
||||||
|
/// Optional icon URL shown beside the provider label.
|
||||||
|
#[schema(example = "https://ldap.example.com/assets/logo.svg")]
|
||||||
|
pub ldap_provider_icon_url: Option<String>,
|
||||||
|
|
||||||
|
/// Whether unauthenticated self-service registration is allowed.
|
||||||
|
#[schema(example = false)]
|
||||||
|
pub self_registration_enabled: bool,
|
||||||
|
}
|
||||||
|
|||||||
@@ -52,10 +52,14 @@ pub struct ExecutionResponse {
|
|||||||
#[schema(example = 1)]
|
#[schema(example = 1)]
|
||||||
pub enforcement: Option<i64>,
|
pub enforcement: Option<i64>,
|
||||||
|
|
||||||
/// Executor ID (worker/executor that ran this)
|
/// Identity ID that initiated this execution
|
||||||
#[schema(example = 1)]
|
#[schema(example = 1)]
|
||||||
pub executor: Option<i64>,
|
pub executor: Option<i64>,
|
||||||
|
|
||||||
|
/// Worker ID currently assigned to this execution
|
||||||
|
#[schema(example = 1)]
|
||||||
|
pub worker: Option<i64>,
|
||||||
|
|
||||||
/// Execution status
|
/// Execution status
|
||||||
#[schema(example = "succeeded")]
|
#[schema(example = "succeeded")]
|
||||||
pub status: ExecutionStatus,
|
pub status: ExecutionStatus,
|
||||||
@@ -216,6 +220,7 @@ impl From<attune_common::models::execution::Execution> for ExecutionResponse {
|
|||||||
parent: execution.parent,
|
parent: execution.parent,
|
||||||
enforcement: execution.enforcement,
|
enforcement: execution.enforcement,
|
||||||
executor: execution.executor,
|
executor: execution.executor,
|
||||||
|
worker: execution.worker,
|
||||||
status: execution.status,
|
status: execution.status,
|
||||||
result: execution
|
result: execution
|
||||||
.result
|
.result
|
||||||
|
|||||||
@@ -11,7 +11,9 @@ pub mod history;
|
|||||||
pub mod inquiry;
|
pub mod inquiry;
|
||||||
pub mod key;
|
pub mod key;
|
||||||
pub mod pack;
|
pub mod pack;
|
||||||
|
pub mod permission;
|
||||||
pub mod rule;
|
pub mod rule;
|
||||||
|
pub mod runtime;
|
||||||
pub mod trigger;
|
pub mod trigger;
|
||||||
pub mod webhook;
|
pub mod webhook;
|
||||||
pub mod workflow;
|
pub mod workflow;
|
||||||
@@ -28,8 +30,8 @@ pub use artifact::{
|
|||||||
CreateVersionJsonRequest, SetDataRequest, UpdateArtifactRequest,
|
CreateVersionJsonRequest, SetDataRequest, UpdateArtifactRequest,
|
||||||
};
|
};
|
||||||
pub use auth::{
|
pub use auth::{
|
||||||
ChangePasswordRequest, CurrentUserResponse, LoginRequest, RefreshTokenRequest, RegisterRequest,
|
AuthSettingsResponse, ChangePasswordRequest, CurrentUserResponse, LoginRequest,
|
||||||
TokenResponse,
|
RefreshTokenRequest, RegisterRequest, TokenResponse,
|
||||||
};
|
};
|
||||||
pub use common::{
|
pub use common::{
|
||||||
ApiResponse, PaginatedResponse, PaginationMeta, PaginationParams, SuccessResponse,
|
ApiResponse, PaginatedResponse, PaginationMeta, PaginationParams, SuccessResponse,
|
||||||
@@ -48,7 +50,13 @@ pub use inquiry::{
|
|||||||
};
|
};
|
||||||
pub use key::{CreateKeyRequest, KeyQueryParams, KeyResponse, KeySummary, UpdateKeyRequest};
|
pub use key::{CreateKeyRequest, KeyQueryParams, KeyResponse, KeySummary, UpdateKeyRequest};
|
||||||
pub use pack::{CreatePackRequest, PackResponse, PackSummary, UpdatePackRequest};
|
pub use pack::{CreatePackRequest, PackResponse, PackSummary, UpdatePackRequest};
|
||||||
|
pub use permission::{
|
||||||
|
CreateIdentityRequest, CreatePermissionAssignmentRequest, IdentityResponse, IdentitySummary,
|
||||||
|
PermissionAssignmentResponse, PermissionSetQueryParams, PermissionSetSummary,
|
||||||
|
UpdateIdentityRequest,
|
||||||
|
};
|
||||||
pub use rule::{CreateRuleRequest, RuleResponse, RuleSummary, UpdateRuleRequest};
|
pub use rule::{CreateRuleRequest, RuleResponse, RuleSummary, UpdateRuleRequest};
|
||||||
|
pub use runtime::{CreateRuntimeRequest, RuntimeResponse, RuntimeSummary, UpdateRuntimeRequest};
|
||||||
pub use trigger::{
|
pub use trigger::{
|
||||||
CreateSensorRequest, CreateTriggerRequest, SensorResponse, SensorSummary, TriggerResponse,
|
CreateSensorRequest, CreateTriggerRequest, SensorResponse, SensorSummary, TriggerResponse,
|
||||||
TriggerSummary, UpdateSensorRequest, UpdateTriggerRequest,
|
TriggerSummary, UpdateSensorRequest, UpdateTriggerRequest,
|
||||||
|
|||||||
@@ -129,7 +129,7 @@ pub struct UpdatePackRequest {
|
|||||||
|
|
||||||
/// Pack description
|
/// Pack description
|
||||||
#[schema(example = "Enhanced Slack integration with new features")]
|
#[schema(example = "Enhanced Slack integration with new features")]
|
||||||
pub description: Option<String>,
|
pub description: Option<PackDescriptionPatch>,
|
||||||
|
|
||||||
/// Pack version
|
/// Pack version
|
||||||
#[validate(length(min = 1, max = 50))]
|
#[validate(length(min = 1, max = 50))]
|
||||||
@@ -165,6 +165,13 @@ pub struct UpdatePackRequest {
|
|||||||
pub is_standard: Option<bool>,
|
pub is_standard: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
pub enum PackDescriptionPatch {
|
||||||
|
Set(String),
|
||||||
|
Clear,
|
||||||
|
}
|
||||||
|
|
||||||
/// Response DTO for pack information
|
/// Response DTO for pack information
|
||||||
#[derive(Debug, Clone, Serialize, ToSchema)]
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
pub struct PackResponse {
|
pub struct PackResponse {
|
||||||
|
|||||||
65
crates/api/src/dto/permission.rs
Normal file
65
crates/api/src/dto/permission.rs
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value as JsonValue;
|
||||||
|
use utoipa::{IntoParams, ToSchema};
|
||||||
|
use validator::Validate;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, IntoParams)]
|
||||||
|
pub struct PermissionSetQueryParams {
|
||||||
|
#[serde(default)]
|
||||||
|
pub pack_ref: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
|
pub struct IdentitySummary {
|
||||||
|
pub id: i64,
|
||||||
|
pub login: String,
|
||||||
|
pub display_name: Option<String>,
|
||||||
|
pub attributes: JsonValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type IdentityResponse = IdentitySummary;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
|
pub struct PermissionSetSummary {
|
||||||
|
pub id: i64,
|
||||||
|
pub r#ref: String,
|
||||||
|
pub pack_ref: Option<String>,
|
||||||
|
pub label: Option<String>,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub grants: JsonValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
|
pub struct PermissionAssignmentResponse {
|
||||||
|
pub id: i64,
|
||||||
|
pub identity_id: i64,
|
||||||
|
pub permission_set_id: i64,
|
||||||
|
pub permission_set_ref: String,
|
||||||
|
pub created: chrono::DateTime<chrono::Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, ToSchema)]
|
||||||
|
pub struct CreatePermissionAssignmentRequest {
|
||||||
|
pub identity_id: Option<i64>,
|
||||||
|
pub identity_login: Option<String>,
|
||||||
|
pub permission_set_ref: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Validate, ToSchema)]
|
||||||
|
pub struct CreateIdentityRequest {
|
||||||
|
#[validate(length(min = 3, max = 255))]
|
||||||
|
pub login: String,
|
||||||
|
#[validate(length(max = 255))]
|
||||||
|
pub display_name: Option<String>,
|
||||||
|
#[validate(length(min = 8, max = 128))]
|
||||||
|
pub password: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub attributes: JsonValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, ToSchema)]
|
||||||
|
pub struct UpdateIdentityRequest {
|
||||||
|
pub display_name: Option<String>,
|
||||||
|
pub password: Option<String>,
|
||||||
|
pub attributes: Option<JsonValue>,
|
||||||
|
}
|
||||||
181
crates/api/src/dto/runtime.rs
Normal file
181
crates/api/src/dto/runtime.rs
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
//! Runtime DTOs for API requests and responses
|
||||||
|
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value as JsonValue;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
use validator::Validate;
|
||||||
|
|
||||||
|
/// Request DTO for creating a runtime.
|
||||||
|
#[derive(Debug, Clone, Deserialize, Validate, ToSchema)]
|
||||||
|
pub struct CreateRuntimeRequest {
|
||||||
|
/// Unique reference identifier (e.g. "core.python", "core.nodejs")
|
||||||
|
#[validate(length(min = 1, max = 255))]
|
||||||
|
#[schema(example = "core.python")]
|
||||||
|
pub r#ref: String,
|
||||||
|
|
||||||
|
/// Optional pack reference this runtime belongs to
|
||||||
|
#[validate(length(min = 1, max = 255))]
|
||||||
|
#[schema(example = "core", nullable = true)]
|
||||||
|
pub pack_ref: Option<String>,
|
||||||
|
|
||||||
|
/// Optional human-readable description
|
||||||
|
#[validate(length(min = 1))]
|
||||||
|
#[schema(example = "Python runtime with virtualenv support", nullable = true)]
|
||||||
|
pub description: Option<String>,
|
||||||
|
|
||||||
|
/// Display name
|
||||||
|
#[validate(length(min = 1, max = 255))]
|
||||||
|
#[schema(example = "Python")]
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
/// Distribution metadata used for verification and platform support
|
||||||
|
#[serde(default)]
|
||||||
|
#[schema(value_type = Object, example = json!({"linux": {"supported": true}}))]
|
||||||
|
pub distributions: JsonValue,
|
||||||
|
|
||||||
|
/// Optional installation metadata
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
#[schema(value_type = Object, nullable = true, example = json!({"method": "system"}))]
|
||||||
|
pub installation: Option<JsonValue>,
|
||||||
|
|
||||||
|
/// Runtime execution configuration
|
||||||
|
#[serde(default)]
|
||||||
|
#[schema(value_type = Object, example = json!({"interpreter": {"command": "python3"}}))]
|
||||||
|
pub execution_config: JsonValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Request DTO for updating a runtime.
|
||||||
|
#[derive(Debug, Clone, Deserialize, Validate, ToSchema)]
|
||||||
|
pub struct UpdateRuntimeRequest {
|
||||||
|
/// Optional human-readable description patch.
|
||||||
|
pub description: Option<NullableStringPatch>,
|
||||||
|
|
||||||
|
/// Display name
|
||||||
|
#[validate(length(min = 1, max = 255))]
|
||||||
|
#[schema(example = "Python 3")]
|
||||||
|
pub name: Option<String>,
|
||||||
|
|
||||||
|
/// Distribution metadata used for verification and platform support
|
||||||
|
#[schema(value_type = Object, nullable = true)]
|
||||||
|
pub distributions: Option<JsonValue>,
|
||||||
|
|
||||||
|
/// Optional installation metadata patch.
|
||||||
|
pub installation: Option<NullableJsonPatch>,
|
||||||
|
|
||||||
|
/// Runtime execution configuration
|
||||||
|
#[schema(value_type = Object, nullable = true)]
|
||||||
|
pub execution_config: Option<JsonValue>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explicit patch operation for nullable string fields.
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
pub enum NullableStringPatch {
|
||||||
|
#[schema(title = "SetString")]
|
||||||
|
Set(String),
|
||||||
|
Clear,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Explicit patch operation for nullable JSON fields.
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
pub enum NullableJsonPatch {
|
||||||
|
#[schema(title = "SetJson")]
|
||||||
|
Set(JsonValue),
|
||||||
|
Clear,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Full runtime response.
|
||||||
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
|
pub struct RuntimeResponse {
|
||||||
|
#[schema(example = 1)]
|
||||||
|
pub id: i64,
|
||||||
|
|
||||||
|
#[schema(example = "core.python")]
|
||||||
|
pub r#ref: String,
|
||||||
|
|
||||||
|
#[schema(example = 1, nullable = true)]
|
||||||
|
pub pack: Option<i64>,
|
||||||
|
|
||||||
|
#[schema(example = "core", nullable = true)]
|
||||||
|
pub pack_ref: Option<String>,
|
||||||
|
|
||||||
|
#[schema(example = "Python runtime with virtualenv support", nullable = true)]
|
||||||
|
pub description: Option<String>,
|
||||||
|
|
||||||
|
#[schema(example = "Python")]
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
#[schema(value_type = Object)]
|
||||||
|
pub distributions: JsonValue,
|
||||||
|
|
||||||
|
#[schema(value_type = Object, nullable = true)]
|
||||||
|
pub installation: Option<JsonValue>,
|
||||||
|
|
||||||
|
#[schema(value_type = Object)]
|
||||||
|
pub execution_config: JsonValue,
|
||||||
|
|
||||||
|
#[schema(example = "2024-01-13T10:30:00Z")]
|
||||||
|
pub created: DateTime<Utc>,
|
||||||
|
|
||||||
|
#[schema(example = "2024-01-13T10:30:00Z")]
|
||||||
|
pub updated: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Runtime summary for list views.
|
||||||
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
|
pub struct RuntimeSummary {
|
||||||
|
#[schema(example = 1)]
|
||||||
|
pub id: i64,
|
||||||
|
|
||||||
|
#[schema(example = "core.python")]
|
||||||
|
pub r#ref: String,
|
||||||
|
|
||||||
|
#[schema(example = "core", nullable = true)]
|
||||||
|
pub pack_ref: Option<String>,
|
||||||
|
|
||||||
|
#[schema(example = "Python runtime with virtualenv support", nullable = true)]
|
||||||
|
pub description: Option<String>,
|
||||||
|
|
||||||
|
#[schema(example = "Python")]
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
#[schema(example = "2024-01-13T10:30:00Z")]
|
||||||
|
pub created: DateTime<Utc>,
|
||||||
|
|
||||||
|
#[schema(example = "2024-01-13T10:30:00Z")]
|
||||||
|
pub updated: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<attune_common::models::runtime::Runtime> for RuntimeResponse {
|
||||||
|
fn from(runtime: attune_common::models::runtime::Runtime) -> Self {
|
||||||
|
Self {
|
||||||
|
id: runtime.id,
|
||||||
|
r#ref: runtime.r#ref,
|
||||||
|
pack: runtime.pack,
|
||||||
|
pack_ref: runtime.pack_ref,
|
||||||
|
description: runtime.description,
|
||||||
|
name: runtime.name,
|
||||||
|
distributions: runtime.distributions,
|
||||||
|
installation: runtime.installation,
|
||||||
|
execution_config: runtime.execution_config,
|
||||||
|
created: runtime.created,
|
||||||
|
updated: runtime.updated,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<attune_common::models::runtime::Runtime> for RuntimeSummary {
|
||||||
|
fn from(runtime: attune_common::models::runtime::Runtime) -> Self {
|
||||||
|
Self {
|
||||||
|
id: runtime.id,
|
||||||
|
r#ref: runtime.r#ref,
|
||||||
|
pack_ref: runtime.pack_ref,
|
||||||
|
description: runtime.description,
|
||||||
|
name: runtime.name,
|
||||||
|
created: runtime.created,
|
||||||
|
updated: runtime.updated,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -54,21 +54,35 @@ pub struct UpdateTriggerRequest {
|
|||||||
|
|
||||||
/// Trigger description
|
/// Trigger description
|
||||||
#[schema(example = "Updated webhook trigger description")]
|
#[schema(example = "Updated webhook trigger description")]
|
||||||
pub description: Option<String>,
|
pub description: Option<TriggerStringPatch>,
|
||||||
|
|
||||||
/// Parameter schema (StackStorm-style with inline required/secret)
|
/// Parameter schema (StackStorm-style with inline required/secret)
|
||||||
#[schema(value_type = Object, nullable = true)]
|
#[schema(value_type = Object, nullable = true)]
|
||||||
pub param_schema: Option<JsonValue>,
|
pub param_schema: Option<TriggerJsonPatch>,
|
||||||
|
|
||||||
/// Output schema
|
/// Output schema
|
||||||
#[schema(value_type = Object, nullable = true)]
|
#[schema(value_type = Object, nullable = true)]
|
||||||
pub out_schema: Option<JsonValue>,
|
pub out_schema: Option<TriggerJsonPatch>,
|
||||||
|
|
||||||
/// Whether the trigger is enabled
|
/// Whether the trigger is enabled
|
||||||
#[schema(example = true)]
|
#[schema(example = true)]
|
||||||
pub enabled: Option<bool>,
|
pub enabled: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
pub enum TriggerStringPatch {
|
||||||
|
Set(String),
|
||||||
|
Clear,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
pub enum TriggerJsonPatch {
|
||||||
|
Set(JsonValue),
|
||||||
|
Clear,
|
||||||
|
}
|
||||||
|
|
||||||
/// Response DTO for trigger information
|
/// Response DTO for trigger information
|
||||||
#[derive(Debug, Clone, Serialize, ToSchema)]
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
pub struct TriggerResponse {
|
pub struct TriggerResponse {
|
||||||
@@ -244,13 +258,20 @@ pub struct UpdateSensorRequest {
|
|||||||
|
|
||||||
/// Parameter schema (StackStorm-style with inline required/secret)
|
/// Parameter schema (StackStorm-style with inline required/secret)
|
||||||
#[schema(value_type = Object, nullable = true)]
|
#[schema(value_type = Object, nullable = true)]
|
||||||
pub param_schema: Option<JsonValue>,
|
pub param_schema: Option<SensorJsonPatch>,
|
||||||
|
|
||||||
/// Whether the sensor is enabled
|
/// Whether the sensor is enabled
|
||||||
#[schema(example = false)]
|
#[schema(example = false)]
|
||||||
pub enabled: Option<bool>,
|
pub enabled: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
pub enum SensorJsonPatch {
|
||||||
|
Set(JsonValue),
|
||||||
|
Clear,
|
||||||
|
}
|
||||||
|
|
||||||
/// Response DTO for sensor information
|
/// Response DTO for sensor information
|
||||||
#[derive(Debug, Clone, Serialize, ToSchema)]
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
pub struct SensorResponse {
|
pub struct SensorResponse {
|
||||||
|
|||||||
@@ -48,10 +48,6 @@ pub struct SaveWorkflowFileRequest {
|
|||||||
/// Tags for categorization
|
/// Tags for categorization
|
||||||
#[schema(example = json!(["deployment", "automation"]))]
|
#[schema(example = json!(["deployment", "automation"]))]
|
||||||
pub tags: Option<Vec<String>>,
|
pub tags: Option<Vec<String>>,
|
||||||
|
|
||||||
/// Whether the workflow is enabled
|
|
||||||
#[schema(example = true)]
|
|
||||||
pub enabled: Option<bool>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Request DTO for creating a new workflow
|
/// Request DTO for creating a new workflow
|
||||||
@@ -96,10 +92,6 @@ pub struct CreateWorkflowRequest {
|
|||||||
/// Tags for categorization and search
|
/// Tags for categorization and search
|
||||||
#[schema(example = json!(["incident", "slack", "approval"]))]
|
#[schema(example = json!(["incident", "slack", "approval"]))]
|
||||||
pub tags: Option<Vec<String>>,
|
pub tags: Option<Vec<String>>,
|
||||||
|
|
||||||
/// Whether the workflow is enabled
|
|
||||||
#[schema(example = true)]
|
|
||||||
pub enabled: Option<bool>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Request DTO for updating a workflow
|
/// Request DTO for updating a workflow
|
||||||
@@ -134,10 +126,6 @@ pub struct UpdateWorkflowRequest {
|
|||||||
/// Tags
|
/// Tags
|
||||||
#[schema(example = json!(["incident", "slack", "approval", "automation"]))]
|
#[schema(example = json!(["incident", "slack", "approval", "automation"]))]
|
||||||
pub tags: Option<Vec<String>>,
|
pub tags: Option<Vec<String>>,
|
||||||
|
|
||||||
/// Whether the workflow is enabled
|
|
||||||
#[schema(example = true)]
|
|
||||||
pub enabled: Option<bool>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Response DTO for workflow information
|
/// Response DTO for workflow information
|
||||||
@@ -187,10 +175,6 @@ pub struct WorkflowResponse {
|
|||||||
#[schema(example = json!(["incident", "slack", "approval"]))]
|
#[schema(example = json!(["incident", "slack", "approval"]))]
|
||||||
pub tags: Vec<String>,
|
pub tags: Vec<String>,
|
||||||
|
|
||||||
/// Whether the workflow is enabled
|
|
||||||
#[schema(example = true)]
|
|
||||||
pub enabled: bool,
|
|
||||||
|
|
||||||
/// Creation timestamp
|
/// Creation timestamp
|
||||||
#[schema(example = "2024-01-13T10:30:00Z")]
|
#[schema(example = "2024-01-13T10:30:00Z")]
|
||||||
pub created: DateTime<Utc>,
|
pub created: DateTime<Utc>,
|
||||||
@@ -231,10 +215,6 @@ pub struct WorkflowSummary {
|
|||||||
#[schema(example = json!(["incident", "slack", "approval"]))]
|
#[schema(example = json!(["incident", "slack", "approval"]))]
|
||||||
pub tags: Vec<String>,
|
pub tags: Vec<String>,
|
||||||
|
|
||||||
/// Whether the workflow is enabled
|
|
||||||
#[schema(example = true)]
|
|
||||||
pub enabled: bool,
|
|
||||||
|
|
||||||
/// Creation timestamp
|
/// Creation timestamp
|
||||||
#[schema(example = "2024-01-13T10:30:00Z")]
|
#[schema(example = "2024-01-13T10:30:00Z")]
|
||||||
pub created: DateTime<Utc>,
|
pub created: DateTime<Utc>,
|
||||||
@@ -259,7 +239,6 @@ impl From<attune_common::models::workflow::WorkflowDefinition> for WorkflowRespo
|
|||||||
out_schema: workflow.out_schema,
|
out_schema: workflow.out_schema,
|
||||||
definition: workflow.definition,
|
definition: workflow.definition,
|
||||||
tags: workflow.tags,
|
tags: workflow.tags,
|
||||||
enabled: workflow.enabled,
|
|
||||||
created: workflow.created,
|
created: workflow.created,
|
||||||
updated: workflow.updated,
|
updated: workflow.updated,
|
||||||
}
|
}
|
||||||
@@ -277,7 +256,6 @@ impl From<attune_common::models::workflow::WorkflowDefinition> for WorkflowSumma
|
|||||||
description: workflow.description,
|
description: workflow.description,
|
||||||
version: workflow.version,
|
version: workflow.version,
|
||||||
tags: workflow.tags,
|
tags: workflow.tags,
|
||||||
enabled: workflow.enabled,
|
|
||||||
created: workflow.created,
|
created: workflow.created,
|
||||||
updated: workflow.updated,
|
updated: workflow.updated,
|
||||||
}
|
}
|
||||||
@@ -291,10 +269,6 @@ pub struct WorkflowSearchParams {
|
|||||||
#[param(example = "incident,approval")]
|
#[param(example = "incident,approval")]
|
||||||
pub tags: Option<String>,
|
pub tags: Option<String>,
|
||||||
|
|
||||||
/// Filter by enabled status
|
|
||||||
#[param(example = true)]
|
|
||||||
pub enabled: Option<bool>,
|
|
||||||
|
|
||||||
/// Search term for label/description (case-insensitive)
|
/// Search term for label/description (case-insensitive)
|
||||||
#[param(example = "incident")]
|
#[param(example = "incident")]
|
||||||
pub search: Option<String>,
|
pub search: Option<String>,
|
||||||
@@ -320,7 +294,6 @@ mod tests {
|
|||||||
out_schema: None,
|
out_schema: None,
|
||||||
definition: serde_json::json!({"tasks": []}),
|
definition: serde_json::json!({"tasks": []}),
|
||||||
tags: None,
|
tags: None,
|
||||||
enabled: None,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(req.validate().is_err());
|
assert!(req.validate().is_err());
|
||||||
@@ -338,7 +311,6 @@ mod tests {
|
|||||||
out_schema: None,
|
out_schema: None,
|
||||||
definition: serde_json::json!({"tasks": []}),
|
definition: serde_json::json!({"tasks": []}),
|
||||||
tags: Some(vec!["test".to_string()]),
|
tags: Some(vec!["test".to_string()]),
|
||||||
enabled: Some(true),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(req.validate().is_ok());
|
assert!(req.validate().is_ok());
|
||||||
@@ -354,7 +326,6 @@ mod tests {
|
|||||||
out_schema: None,
|
out_schema: None,
|
||||||
definition: None,
|
definition: None,
|
||||||
tags: None,
|
tags: None,
|
||||||
enabled: None,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Should be valid even with all None values
|
// Should be valid even with all None values
|
||||||
@@ -365,7 +336,6 @@ mod tests {
|
|||||||
fn test_workflow_search_params() {
|
fn test_workflow_search_params() {
|
||||||
let params = WorkflowSearchParams {
|
let params = WorkflowSearchParams {
|
||||||
tags: Some("incident,approval".to_string()),
|
tags: Some("incident,approval".to_string()),
|
||||||
enabled: Some(true),
|
|
||||||
search: Some("response".to_string()),
|
search: Some("response".to_string()),
|
||||||
pack_ref: Some("core".to_string()),
|
pack_ref: Some("core".to_string()),
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
//! It is primarily used by the binary target and integration tests.
|
//! It is primarily used by the binary target and integration tests.
|
||||||
|
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
|
pub mod authz;
|
||||||
pub mod dto;
|
pub mod dto;
|
||||||
pub mod middleware;
|
pub mod middleware;
|
||||||
pub mod openapi;
|
pub mod openapi;
|
||||||
|
|||||||
@@ -115,8 +115,9 @@ async fn mq_reconnect_loop(state: Arc<AppState>, mq_url: String) {
|
|||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<()> {
|
async fn main() -> Result<()> {
|
||||||
// Install HMAC-only JWT crypto provider (must be before any token operations)
|
// Install a JWT crypto provider that supports both Attune's HS tokens
|
||||||
attune_common::auth::install_crypto_provider();
|
// and external RS256 OIDC identity tokens.
|
||||||
|
let _ = jsonwebtoken::crypto::rust_crypto::DEFAULT_PROVIDER.install_default();
|
||||||
|
|
||||||
// Initialize tracing subscriber
|
// Initialize tracing subscriber
|
||||||
tracing_subscriber::fmt()
|
tracing_subscriber::fmt()
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ use crate::dto::{
|
|||||||
ActionResponse, ActionSummary, CreateActionRequest, QueueStatsResponse, UpdateActionRequest,
|
ActionResponse, ActionSummary, CreateActionRequest, QueueStatsResponse, UpdateActionRequest,
|
||||||
},
|
},
|
||||||
auth::{
|
auth::{
|
||||||
ChangePasswordRequest, CurrentUserResponse, LoginRequest, RefreshTokenRequest,
|
AuthSettingsResponse, ChangePasswordRequest, CurrentUserResponse, LoginRequest,
|
||||||
RegisterRequest, TokenResponse,
|
RefreshTokenRequest, RegisterRequest, TokenResponse,
|
||||||
},
|
},
|
||||||
common::{ApiResponse, PaginatedResponse, PaginationMeta, SuccessResponse},
|
common::{ApiResponse, PaginatedResponse, PaginationMeta, SuccessResponse},
|
||||||
event::{EnforcementResponse, EnforcementSummary, EventResponse, EventSummary},
|
event::{EnforcementResponse, EnforcementSummary, EventResponse, EventSummary},
|
||||||
@@ -26,7 +26,12 @@ use crate::dto::{
|
|||||||
PackWorkflowSyncResponse, PackWorkflowValidationResponse, RegisterPackRequest,
|
PackWorkflowSyncResponse, PackWorkflowValidationResponse, RegisterPackRequest,
|
||||||
UpdatePackRequest, WorkflowSyncResult,
|
UpdatePackRequest, WorkflowSyncResult,
|
||||||
},
|
},
|
||||||
|
permission::{
|
||||||
|
CreateIdentityRequest, CreatePermissionAssignmentRequest, IdentityResponse,
|
||||||
|
IdentitySummary, PermissionAssignmentResponse, PermissionSetSummary, UpdateIdentityRequest,
|
||||||
|
},
|
||||||
rule::{CreateRuleRequest, RuleResponse, RuleSummary, UpdateRuleRequest},
|
rule::{CreateRuleRequest, RuleResponse, RuleSummary, UpdateRuleRequest},
|
||||||
|
runtime::{CreateRuntimeRequest, RuntimeResponse, RuntimeSummary, UpdateRuntimeRequest},
|
||||||
trigger::{
|
trigger::{
|
||||||
CreateSensorRequest, CreateTriggerRequest, SensorResponse, SensorSummary, TriggerResponse,
|
CreateSensorRequest, CreateTriggerRequest, SensorResponse, SensorSummary, TriggerResponse,
|
||||||
TriggerSummary, UpdateSensorRequest, UpdateTriggerRequest,
|
TriggerSummary, UpdateSensorRequest, UpdateTriggerRequest,
|
||||||
@@ -63,7 +68,9 @@ use crate::dto::{
|
|||||||
crate::routes::health::liveness,
|
crate::routes::health::liveness,
|
||||||
|
|
||||||
// Authentication
|
// Authentication
|
||||||
|
crate::routes::auth::auth_settings,
|
||||||
crate::routes::auth::login,
|
crate::routes::auth::login,
|
||||||
|
crate::routes::auth::ldap_login,
|
||||||
crate::routes::auth::register,
|
crate::routes::auth::register,
|
||||||
crate::routes::auth::refresh_token,
|
crate::routes::auth::refresh_token,
|
||||||
crate::routes::auth::get_current_user,
|
crate::routes::auth::get_current_user,
|
||||||
@@ -92,6 +99,14 @@ use crate::dto::{
|
|||||||
crate::routes::actions::delete_action,
|
crate::routes::actions::delete_action,
|
||||||
crate::routes::actions::get_queue_stats,
|
crate::routes::actions::get_queue_stats,
|
||||||
|
|
||||||
|
// Runtimes
|
||||||
|
crate::routes::runtimes::list_runtimes,
|
||||||
|
crate::routes::runtimes::list_runtimes_by_pack,
|
||||||
|
crate::routes::runtimes::get_runtime,
|
||||||
|
crate::routes::runtimes::create_runtime,
|
||||||
|
crate::routes::runtimes::update_runtime,
|
||||||
|
crate::routes::runtimes::delete_runtime,
|
||||||
|
|
||||||
// Triggers
|
// Triggers
|
||||||
crate::routes::triggers::list_triggers,
|
crate::routes::triggers::list_triggers,
|
||||||
crate::routes::triggers::list_enabled_triggers,
|
crate::routes::triggers::list_enabled_triggers,
|
||||||
@@ -160,6 +175,17 @@ use crate::dto::{
|
|||||||
crate::routes::keys::update_key,
|
crate::routes::keys::update_key,
|
||||||
crate::routes::keys::delete_key,
|
crate::routes::keys::delete_key,
|
||||||
|
|
||||||
|
// Permissions
|
||||||
|
crate::routes::permissions::list_identities,
|
||||||
|
crate::routes::permissions::get_identity,
|
||||||
|
crate::routes::permissions::create_identity,
|
||||||
|
crate::routes::permissions::update_identity,
|
||||||
|
crate::routes::permissions::delete_identity,
|
||||||
|
crate::routes::permissions::list_permission_sets,
|
||||||
|
crate::routes::permissions::list_identity_permissions,
|
||||||
|
crate::routes::permissions::create_permission_assignment,
|
||||||
|
crate::routes::permissions::delete_permission_assignment,
|
||||||
|
|
||||||
// Workflows
|
// Workflows
|
||||||
crate::routes::workflows::list_workflows,
|
crate::routes::workflows::list_workflows,
|
||||||
crate::routes::workflows::list_workflows_by_pack,
|
crate::routes::workflows::list_workflows_by_pack,
|
||||||
@@ -173,15 +199,21 @@ use crate::dto::{
|
|||||||
crate::routes::webhooks::disable_webhook,
|
crate::routes::webhooks::disable_webhook,
|
||||||
crate::routes::webhooks::regenerate_webhook_key,
|
crate::routes::webhooks::regenerate_webhook_key,
|
||||||
crate::routes::webhooks::receive_webhook,
|
crate::routes::webhooks::receive_webhook,
|
||||||
|
|
||||||
|
// Agent
|
||||||
|
crate::routes::agent::download_agent_binary,
|
||||||
|
crate::routes::agent::agent_info,
|
||||||
),
|
),
|
||||||
components(
|
components(
|
||||||
schemas(
|
schemas(
|
||||||
// Common types
|
// Common types
|
||||||
ApiResponse<TokenResponse>,
|
ApiResponse<TokenResponse>,
|
||||||
|
ApiResponse<AuthSettingsResponse>,
|
||||||
ApiResponse<CurrentUserResponse>,
|
ApiResponse<CurrentUserResponse>,
|
||||||
ApiResponse<PackResponse>,
|
ApiResponse<PackResponse>,
|
||||||
ApiResponse<PackInstallResponse>,
|
ApiResponse<PackInstallResponse>,
|
||||||
ApiResponse<ActionResponse>,
|
ApiResponse<ActionResponse>,
|
||||||
|
ApiResponse<RuntimeResponse>,
|
||||||
ApiResponse<TriggerResponse>,
|
ApiResponse<TriggerResponse>,
|
||||||
ApiResponse<SensorResponse>,
|
ApiResponse<SensorResponse>,
|
||||||
ApiResponse<RuleResponse>,
|
ApiResponse<RuleResponse>,
|
||||||
@@ -190,10 +222,13 @@ use crate::dto::{
|
|||||||
ApiResponse<EnforcementResponse>,
|
ApiResponse<EnforcementResponse>,
|
||||||
ApiResponse<InquiryResponse>,
|
ApiResponse<InquiryResponse>,
|
||||||
ApiResponse<KeyResponse>,
|
ApiResponse<KeyResponse>,
|
||||||
|
ApiResponse<IdentityResponse>,
|
||||||
|
ApiResponse<PermissionAssignmentResponse>,
|
||||||
ApiResponse<WorkflowResponse>,
|
ApiResponse<WorkflowResponse>,
|
||||||
ApiResponse<QueueStatsResponse>,
|
ApiResponse<QueueStatsResponse>,
|
||||||
PaginatedResponse<PackSummary>,
|
PaginatedResponse<PackSummary>,
|
||||||
PaginatedResponse<ActionSummary>,
|
PaginatedResponse<ActionSummary>,
|
||||||
|
PaginatedResponse<RuntimeSummary>,
|
||||||
PaginatedResponse<TriggerSummary>,
|
PaginatedResponse<TriggerSummary>,
|
||||||
PaginatedResponse<SensorSummary>,
|
PaginatedResponse<SensorSummary>,
|
||||||
PaginatedResponse<RuleSummary>,
|
PaginatedResponse<RuleSummary>,
|
||||||
@@ -202,12 +237,14 @@ use crate::dto::{
|
|||||||
PaginatedResponse<EnforcementSummary>,
|
PaginatedResponse<EnforcementSummary>,
|
||||||
PaginatedResponse<InquirySummary>,
|
PaginatedResponse<InquirySummary>,
|
||||||
PaginatedResponse<KeySummary>,
|
PaginatedResponse<KeySummary>,
|
||||||
|
PaginatedResponse<IdentitySummary>,
|
||||||
PaginatedResponse<WorkflowSummary>,
|
PaginatedResponse<WorkflowSummary>,
|
||||||
PaginationMeta,
|
PaginationMeta,
|
||||||
SuccessResponse,
|
SuccessResponse,
|
||||||
|
|
||||||
// Auth DTOs
|
// Auth DTOs
|
||||||
LoginRequest,
|
LoginRequest,
|
||||||
|
crate::routes::auth::LdapLoginRequest,
|
||||||
RegisterRequest,
|
RegisterRequest,
|
||||||
RefreshTokenRequest,
|
RefreshTokenRequest,
|
||||||
ChangePasswordRequest,
|
ChangePasswordRequest,
|
||||||
@@ -233,6 +270,21 @@ use crate::dto::{
|
|||||||
attune_common::models::pack_test::PackTestSummary,
|
attune_common::models::pack_test::PackTestSummary,
|
||||||
PaginatedResponse<attune_common::models::pack_test::PackTestSummary>,
|
PaginatedResponse<attune_common::models::pack_test::PackTestSummary>,
|
||||||
|
|
||||||
|
// Permission DTOs
|
||||||
|
CreateIdentityRequest,
|
||||||
|
UpdateIdentityRequest,
|
||||||
|
IdentityResponse,
|
||||||
|
PermissionSetSummary,
|
||||||
|
PermissionAssignmentResponse,
|
||||||
|
CreatePermissionAssignmentRequest,
|
||||||
|
|
||||||
|
// Runtime DTOs
|
||||||
|
CreateRuntimeRequest,
|
||||||
|
UpdateRuntimeRequest,
|
||||||
|
RuntimeResponse,
|
||||||
|
RuntimeSummary,
|
||||||
|
IdentitySummary,
|
||||||
|
|
||||||
// Action DTOs
|
// Action DTOs
|
||||||
CreateActionRequest,
|
CreateActionRequest,
|
||||||
UpdateActionRequest,
|
UpdateActionRequest,
|
||||||
@@ -293,6 +345,10 @@ use crate::dto::{
|
|||||||
WebhookReceiverRequest,
|
WebhookReceiverRequest,
|
||||||
WebhookReceiverResponse,
|
WebhookReceiverResponse,
|
||||||
ApiResponse<WebhookReceiverResponse>,
|
ApiResponse<WebhookReceiverResponse>,
|
||||||
|
|
||||||
|
// Agent DTOs
|
||||||
|
crate::routes::agent::AgentBinaryInfo,
|
||||||
|
crate::routes::agent::AgentArchInfo,
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
modifiers(&SecurityAddon),
|
modifiers(&SecurityAddon),
|
||||||
@@ -311,6 +367,7 @@ use crate::dto::{
|
|||||||
(name = "secrets", description = "Secret management endpoints"),
|
(name = "secrets", description = "Secret management endpoints"),
|
||||||
(name = "workflows", description = "Workflow management endpoints"),
|
(name = "workflows", description = "Workflow management endpoints"),
|
||||||
(name = "webhooks", description = "Webhook management and receiver endpoints"),
|
(name = "webhooks", description = "Webhook management and receiver endpoints"),
|
||||||
|
(name = "agent", description = "Agent binary distribution endpoints"),
|
||||||
)
|
)
|
||||||
)]
|
)]
|
||||||
pub struct ApiDoc;
|
pub struct ApiDoc;
|
||||||
@@ -393,18 +450,57 @@ mod tests {
|
|||||||
// We have 57 unique paths with 81 total operations (HTTP methods)
|
// We have 57 unique paths with 81 total operations (HTTP methods)
|
||||||
// This test ensures we don't accidentally remove endpoints
|
// This test ensures we don't accidentally remove endpoints
|
||||||
assert!(
|
assert!(
|
||||||
path_count >= 57,
|
path_count >= 59,
|
||||||
"Expected at least 57 unique API paths, found {}",
|
"Expected at least 59 unique API paths, found {}",
|
||||||
path_count
|
path_count
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
operation_count >= 81,
|
operation_count >= 83,
|
||||||
"Expected at least 81 API operations, found {}",
|
"Expected at least 83 API operations, found {}",
|
||||||
operation_count
|
operation_count
|
||||||
);
|
);
|
||||||
|
|
||||||
println!("Total API paths: {}", path_count);
|
println!("Total API paths: {}", path_count);
|
||||||
println!("Total API operations: {}", operation_count);
|
println!("Total API operations: {}", operation_count);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_auth_endpoints_registered() {
|
||||||
|
let doc = ApiDoc::openapi();
|
||||||
|
|
||||||
|
let expected_auth_paths = vec![
|
||||||
|
"/auth/settings",
|
||||||
|
"/auth/login",
|
||||||
|
"/auth/ldap/login",
|
||||||
|
"/auth/register",
|
||||||
|
"/auth/refresh",
|
||||||
|
"/auth/me",
|
||||||
|
"/auth/change-password",
|
||||||
|
];
|
||||||
|
|
||||||
|
for path in &expected_auth_paths {
|
||||||
|
assert!(
|
||||||
|
doc.paths.paths.contains_key(*path),
|
||||||
|
"Expected auth endpoint {} to be registered in OpenAPI spec, but it was missing. \
|
||||||
|
Registered paths: {:?}",
|
||||||
|
path,
|
||||||
|
doc.paths.paths.keys().collect::<Vec<_>>()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ldap_login_request_schema_registered() {
|
||||||
|
let doc = ApiDoc::openapi();
|
||||||
|
|
||||||
|
let components = doc.components.as_ref().expect("components should exist");
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
components.schemas.contains_key("LdapLoginRequest"),
|
||||||
|
"Expected LdapLoginRequest schema to be registered in OpenAPI components. \
|
||||||
|
Registered schemas: {:?}",
|
||||||
|
components.schemas.keys().collect::<Vec<_>>()
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,19 +10,21 @@ use axum::{
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
|
|
||||||
|
use attune_common::rbac::{Action, AuthorizationContext, Resource};
|
||||||
use attune_common::repositories::{
|
use attune_common::repositories::{
|
||||||
action::{ActionRepository, ActionSearchFilters, CreateActionInput, UpdateActionInput},
|
action::{ActionRepository, ActionSearchFilters, CreateActionInput, UpdateActionInput},
|
||||||
pack::PackRepository,
|
pack::PackRepository,
|
||||||
queue_stats::QueueStatsRepository,
|
queue_stats::QueueStatsRepository,
|
||||||
Create, Delete, FindByRef, Update,
|
Create, Delete, FindByRef, Patch, Update,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::middleware::RequireAuth,
|
auth::middleware::RequireAuth,
|
||||||
|
authz::{AuthorizationCheck, AuthorizationService},
|
||||||
dto::{
|
dto::{
|
||||||
action::{
|
action::{
|
||||||
ActionResponse, ActionSummary, CreateActionRequest, QueueStatsResponse,
|
ActionResponse, ActionSummary, CreateActionRequest, QueueStatsResponse,
|
||||||
UpdateActionRequest,
|
RuntimeVersionConstraintPatch, UpdateActionRequest,
|
||||||
},
|
},
|
||||||
common::{PaginatedResponse, PaginationParams},
|
common::{PaginatedResponse, PaginationParams},
|
||||||
ApiResponse, SuccessResponse,
|
ApiResponse, SuccessResponse,
|
||||||
@@ -153,7 +155,7 @@ pub async fn get_action(
|
|||||||
)]
|
)]
|
||||||
pub async fn create_action(
|
pub async fn create_action(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Json(request): Json<CreateActionRequest>,
|
Json(request): Json<CreateActionRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Validate request
|
// Validate request
|
||||||
@@ -175,6 +177,26 @@ pub async fn create_action(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Pack '{}' not found", request.pack_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Pack '{}' not found", request.pack_ref)))?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.pack_ref = Some(pack.r#ref.clone());
|
||||||
|
ctx.target_ref = Some(request.r#ref.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Actions,
|
||||||
|
action: Action::Create,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// If runtime is specified, we could verify it exists (future enhancement)
|
// If runtime is specified, we could verify it exists (future enhancement)
|
||||||
// For now, the database foreign key constraint will handle invalid runtime IDs
|
// For now, the database foreign key constraint will handle invalid runtime IDs
|
||||||
|
|
||||||
@@ -219,7 +241,7 @@ pub async fn create_action(
|
|||||||
)]
|
)]
|
||||||
pub async fn update_action(
|
pub async fn update_action(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Path(action_ref): Path<String>,
|
Path(action_ref): Path<String>,
|
||||||
Json(request): Json<UpdateActionRequest>,
|
Json(request): Json<UpdateActionRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -231,13 +253,37 @@ pub async fn update_action(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Action '{}' not found", action_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Action '{}' not found", action_ref)))?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_id = Some(existing_action.id);
|
||||||
|
ctx.target_ref = Some(existing_action.r#ref.clone());
|
||||||
|
ctx.pack_ref = Some(existing_action.pack_ref.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Actions,
|
||||||
|
action: Action::Update,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Create update input
|
// Create update input
|
||||||
let update_input = UpdateActionInput {
|
let update_input = UpdateActionInput {
|
||||||
label: request.label,
|
label: request.label,
|
||||||
description: request.description,
|
description: request.description,
|
||||||
entrypoint: request.entrypoint,
|
entrypoint: request.entrypoint,
|
||||||
runtime: request.runtime,
|
runtime: request.runtime,
|
||||||
runtime_version_constraint: request.runtime_version_constraint,
|
runtime_version_constraint: request.runtime_version_constraint.map(|patch| match patch {
|
||||||
|
RuntimeVersionConstraintPatch::Set(value) => Patch::Set(value),
|
||||||
|
RuntimeVersionConstraintPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
param_schema: request.param_schema,
|
param_schema: request.param_schema,
|
||||||
out_schema: request.out_schema,
|
out_schema: request.out_schema,
|
||||||
parameter_delivery: None,
|
parameter_delivery: None,
|
||||||
@@ -269,7 +315,7 @@ pub async fn update_action(
|
|||||||
)]
|
)]
|
||||||
pub async fn delete_action(
|
pub async fn delete_action(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Path(action_ref): Path<String>,
|
Path(action_ref): Path<String>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Check if action exists
|
// Check if action exists
|
||||||
@@ -277,6 +323,27 @@ pub async fn delete_action(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Action '{}' not found", action_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Action '{}' not found", action_ref)))?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_id = Some(action.id);
|
||||||
|
ctx.target_ref = Some(action.r#ref.clone());
|
||||||
|
ctx.pack_ref = Some(action.pack_ref.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Actions,
|
||||||
|
action: Action::Delete,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Delete the action
|
// Delete the action
|
||||||
let deleted = ActionRepository::delete(&state.db, action.id).await?;
|
let deleted = ActionRepository::delete(&state.db, action.id).await?;
|
||||||
|
|
||||||
|
|||||||
482
crates/api/src/routes/agent.rs
Normal file
482
crates/api/src/routes/agent.rs
Normal file
@@ -0,0 +1,482 @@
|
|||||||
|
//! Agent binary download endpoints
|
||||||
|
//!
|
||||||
|
//! Provides endpoints for downloading the attune-agent binary for injection
|
||||||
|
//! into arbitrary containers. This supports deployments where shared Docker
|
||||||
|
//! volumes are impractical (Kubernetes, ECS, remote Docker hosts).
|
||||||
|
|
||||||
|
use axum::{
|
||||||
|
body::Body,
|
||||||
|
extract::{Query, State},
|
||||||
|
http::{header, HeaderMap, StatusCode},
|
||||||
|
response::IntoResponse,
|
||||||
|
routing::get,
|
||||||
|
Json, Router,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use subtle::ConstantTimeEq;
|
||||||
|
use tokio::fs;
|
||||||
|
use tokio_util::io::ReaderStream;
|
||||||
|
use utoipa::{IntoParams, ToSchema};
|
||||||
|
|
||||||
|
use crate::state::AppState;
|
||||||
|
|
||||||
|
/// Query parameters for the binary download endpoint
|
||||||
|
#[derive(Debug, Deserialize, IntoParams)]
|
||||||
|
pub struct BinaryDownloadParams {
|
||||||
|
/// Target architecture (x86_64, aarch64). Defaults to x86_64.
|
||||||
|
#[param(example = "x86_64")]
|
||||||
|
pub arch: Option<String>,
|
||||||
|
/// Optional bootstrap token for authentication
|
||||||
|
pub token: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Agent binary metadata
|
||||||
|
#[derive(Debug, Serialize, ToSchema)]
|
||||||
|
pub struct AgentBinaryInfo {
|
||||||
|
/// Available architectures
|
||||||
|
pub architectures: Vec<AgentArchInfo>,
|
||||||
|
/// Agent version (from build)
|
||||||
|
pub version: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Per-architecture binary info
|
||||||
|
#[derive(Debug, Serialize, ToSchema)]
|
||||||
|
pub struct AgentArchInfo {
|
||||||
|
/// Architecture name
|
||||||
|
pub arch: String,
|
||||||
|
/// Binary size in bytes
|
||||||
|
pub size_bytes: u64,
|
||||||
|
/// Whether this binary is available
|
||||||
|
pub available: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Validate that the architecture name is safe (no path traversal) and normalize it.
|
||||||
|
fn validate_arch(arch: &str) -> Result<&str, (StatusCode, Json<serde_json::Value>)> {
|
||||||
|
match arch {
|
||||||
|
"x86_64" | "aarch64" => Ok(arch),
|
||||||
|
// Accept arm64 as an alias for aarch64
|
||||||
|
"arm64" => Ok("aarch64"),
|
||||||
|
_ => Err((
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Invalid architecture",
|
||||||
|
"message": format!("Unsupported architecture '{}'. Supported: x86_64, aarch64", arch),
|
||||||
|
})),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Validate bootstrap token if configured.
|
||||||
|
///
|
||||||
|
/// If the agent config has a `bootstrap_token` set, the request must provide it
|
||||||
|
/// via the `X-Agent-Token` header or the `token` query parameter. If no token
|
||||||
|
/// is configured, access is unrestricted.
|
||||||
|
fn validate_token(
|
||||||
|
config: &attune_common::config::Config,
|
||||||
|
headers: &HeaderMap,
|
||||||
|
query_token: &Option<String>,
|
||||||
|
) -> Result<(), (StatusCode, Json<serde_json::Value>)> {
|
||||||
|
let expected_token = config
|
||||||
|
.agent
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|ac| ac.bootstrap_token.as_ref());
|
||||||
|
|
||||||
|
let expected_token = match expected_token {
|
||||||
|
Some(t) => t,
|
||||||
|
None => {
|
||||||
|
use std::sync::Once;
|
||||||
|
static WARN_ONCE: Once = Once::new();
|
||||||
|
WARN_ONCE.call_once(|| {
|
||||||
|
tracing::warn!(
|
||||||
|
"Agent binary download endpoint has no bootstrap_token configured. \
|
||||||
|
Anyone with network access to the API can download the agent binary. \
|
||||||
|
Set agent.bootstrap_token in config to restrict access."
|
||||||
|
);
|
||||||
|
});
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check X-Agent-Token header first, then query param
|
||||||
|
let provided_token = headers
|
||||||
|
.get("x-agent-token")
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.or_else(|| query_token.clone());
|
||||||
|
|
||||||
|
match provided_token {
|
||||||
|
Some(ref t) if bool::from(t.as_bytes().ct_eq(expected_token.as_bytes())) => Ok(()),
|
||||||
|
Some(_) => Err((
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Invalid token",
|
||||||
|
"message": "The provided bootstrap token is invalid",
|
||||||
|
})),
|
||||||
|
)),
|
||||||
|
None => Err((
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Token required",
|
||||||
|
"message": "A bootstrap token is required. Provide via X-Agent-Token header or token query parameter.",
|
||||||
|
})),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Download the agent binary
|
||||||
|
///
|
||||||
|
/// Returns the statically-linked attune-agent binary for the requested architecture.
|
||||||
|
/// The binary can be injected into any container to turn it into an Attune worker.
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/api/v1/agent/binary",
|
||||||
|
params(BinaryDownloadParams),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Agent binary", content_type = "application/octet-stream"),
|
||||||
|
(status = 400, description = "Invalid architecture"),
|
||||||
|
(status = 401, description = "Invalid or missing bootstrap token"),
|
||||||
|
(status = 404, description = "Agent binary not found"),
|
||||||
|
(status = 503, description = "Agent binary distribution not configured"),
|
||||||
|
),
|
||||||
|
tag = "agent"
|
||||||
|
)]
|
||||||
|
pub async fn download_agent_binary(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
headers: HeaderMap,
|
||||||
|
Query(params): Query<BinaryDownloadParams>,
|
||||||
|
) -> Result<impl IntoResponse, (StatusCode, Json<serde_json::Value>)> {
|
||||||
|
// Validate bootstrap token if configured
|
||||||
|
validate_token(&state.config, &headers, ¶ms.token)?;
|
||||||
|
|
||||||
|
let agent_config = state.config.agent.as_ref().ok_or_else(|| {
|
||||||
|
(
|
||||||
|
StatusCode::SERVICE_UNAVAILABLE,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Not configured",
|
||||||
|
"message": "Agent binary distribution is not configured. Set agent.binary_dir in config.",
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let arch = params.arch.as_deref().unwrap_or("x86_64");
|
||||||
|
let arch = validate_arch(arch)?;
|
||||||
|
|
||||||
|
let binary_dir = std::path::Path::new(&agent_config.binary_dir);
|
||||||
|
|
||||||
|
// Try arch-specific binary first, then fall back to generic name.
|
||||||
|
// IMPORTANT: The generic `attune-agent` binary is only safe to serve for
|
||||||
|
// x86_64 requests, because the current build pipeline produces an
|
||||||
|
// x86_64-unknown-linux-musl binary. Serving it for aarch64/arm64 would
|
||||||
|
// give the caller an incompatible executable (exec format error).
|
||||||
|
let arch_specific = binary_dir.join(format!("attune-agent-{}", arch));
|
||||||
|
let generic = binary_dir.join("attune-agent");
|
||||||
|
|
||||||
|
let binary_path = if arch_specific.exists() {
|
||||||
|
arch_specific
|
||||||
|
} else if arch == "x86_64" && generic.exists() {
|
||||||
|
tracing::debug!(
|
||||||
|
"Arch-specific binary not found at {:?}, falling back to generic {:?} (safe for x86_64)",
|
||||||
|
arch_specific,
|
||||||
|
generic
|
||||||
|
);
|
||||||
|
generic
|
||||||
|
} else {
|
||||||
|
tracing::warn!(
|
||||||
|
"Agent binary not found. Checked: {:?} and {:?}",
|
||||||
|
arch_specific,
|
||||||
|
generic
|
||||||
|
);
|
||||||
|
return Err((
|
||||||
|
StatusCode::NOT_FOUND,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Not found",
|
||||||
|
"message": format!(
|
||||||
|
"Agent binary not found for architecture '{}'. Ensure the agent binary is built and placed in '{}'.",
|
||||||
|
arch,
|
||||||
|
agent_config.binary_dir
|
||||||
|
),
|
||||||
|
})),
|
||||||
|
));
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get file metadata for Content-Length
|
||||||
|
let metadata = fs::metadata(&binary_path).await.map_err(|e| {
|
||||||
|
tracing::error!(
|
||||||
|
"Failed to read agent binary metadata at {:?}: {}",
|
||||||
|
binary_path,
|
||||||
|
e
|
||||||
|
);
|
||||||
|
(
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Internal error",
|
||||||
|
"message": "Failed to read agent binary",
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Open file for streaming
|
||||||
|
let file = fs::File::open(&binary_path).await.map_err(|e| {
|
||||||
|
tracing::error!("Failed to open agent binary at {:?}: {}", binary_path, e);
|
||||||
|
(
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Internal error",
|
||||||
|
"message": "Failed to open agent binary",
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let stream = ReaderStream::new(file);
|
||||||
|
let body = Body::from_stream(stream);
|
||||||
|
|
||||||
|
let headers_response = [
|
||||||
|
(header::CONTENT_TYPE, "application/octet-stream".to_string()),
|
||||||
|
(
|
||||||
|
header::CONTENT_DISPOSITION,
|
||||||
|
"attachment; filename=\"attune-agent\"".to_string(),
|
||||||
|
),
|
||||||
|
(header::CONTENT_LENGTH, metadata.len().to_string()),
|
||||||
|
(header::CACHE_CONTROL, "public, max-age=3600".to_string()),
|
||||||
|
];
|
||||||
|
|
||||||
|
tracing::info!(
|
||||||
|
arch = arch,
|
||||||
|
size_bytes = metadata.len(),
|
||||||
|
path = ?binary_path,
|
||||||
|
"Serving agent binary download"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok((headers_response, body))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get agent binary metadata
|
||||||
|
///
|
||||||
|
/// Returns information about available agent binaries, including
|
||||||
|
/// supported architectures and binary sizes.
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/api/v1/agent/info",
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Agent binary info", body = AgentBinaryInfo),
|
||||||
|
(status = 503, description = "Agent binary distribution not configured"),
|
||||||
|
),
|
||||||
|
tag = "agent"
|
||||||
|
)]
|
||||||
|
pub async fn agent_info(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
) -> Result<impl IntoResponse, (StatusCode, Json<serde_json::Value>)> {
|
||||||
|
let agent_config = state.config.agent.as_ref().ok_or_else(|| {
|
||||||
|
(
|
||||||
|
StatusCode::SERVICE_UNAVAILABLE,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Not configured",
|
||||||
|
"message": "Agent binary distribution is not configured.",
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let binary_dir = std::path::Path::new(&agent_config.binary_dir);
|
||||||
|
let architectures = ["x86_64", "aarch64"];
|
||||||
|
|
||||||
|
let mut arch_infos = Vec::new();
|
||||||
|
for arch in &architectures {
|
||||||
|
let arch_specific = binary_dir.join(format!("attune-agent-{}", arch));
|
||||||
|
let generic = binary_dir.join("attune-agent");
|
||||||
|
|
||||||
|
// Only fall back to the generic binary for x86_64, since the build
|
||||||
|
// pipeline currently produces x86_64-only generic binaries.
|
||||||
|
let (available, size_bytes) = if arch_specific.exists() {
|
||||||
|
match fs::metadata(&arch_specific).await {
|
||||||
|
Ok(m) => (true, m.len()),
|
||||||
|
Err(_) => (false, 0),
|
||||||
|
}
|
||||||
|
} else if *arch == "x86_64" && generic.exists() {
|
||||||
|
match fs::metadata(&generic).await {
|
||||||
|
Ok(m) => (true, m.len()),
|
||||||
|
Err(_) => (false, 0),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
(false, 0)
|
||||||
|
};
|
||||||
|
|
||||||
|
arch_infos.push(AgentArchInfo {
|
||||||
|
arch: arch.to_string(),
|
||||||
|
size_bytes,
|
||||||
|
available,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(AgentBinaryInfo {
|
||||||
|
architectures: arch_infos,
|
||||||
|
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create agent routes
|
||||||
|
pub fn routes() -> Router<Arc<AppState>> {
|
||||||
|
Router::new()
|
||||||
|
.route("/agent/binary", get(download_agent_binary))
|
||||||
|
.route("/agent/info", get(agent_info))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use attune_common::config::AgentConfig;
|
||||||
|
use axum::http::{HeaderMap, HeaderValue};
|
||||||
|
|
||||||
|
// ── validate_arch tests ─────────────────────────────────────────
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_arch_valid_x86_64() {
|
||||||
|
let result = validate_arch("x86_64");
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap(), "x86_64");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_arch_valid_aarch64() {
|
||||||
|
let result = validate_arch("aarch64");
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap(), "aarch64");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_arch_arm64_alias() {
|
||||||
|
// "arm64" is an alias for "aarch64"
|
||||||
|
let result = validate_arch("arm64");
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap(), "aarch64");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_arch_invalid() {
|
||||||
|
let result = validate_arch("mips");
|
||||||
|
assert!(result.is_err());
|
||||||
|
let (status, body) = result.unwrap_err();
|
||||||
|
assert_eq!(status, StatusCode::BAD_REQUEST);
|
||||||
|
assert_eq!(body.0["error"], "Invalid architecture");
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── validate_token tests ────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Helper: build a minimal Config with the given agent config.
|
||||||
|
/// Only the `agent` field is relevant for `validate_token`.
|
||||||
|
fn test_config(agent: Option<AgentConfig>) -> attune_common::config::Config {
|
||||||
|
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||||
|
let config_path = format!("{}/../../config.test.yaml", manifest_dir);
|
||||||
|
let mut config = attune_common::config::Config::load_from_file(&config_path)
|
||||||
|
.expect("Failed to load test config");
|
||||||
|
config.agent = agent;
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_no_config() {
|
||||||
|
// When no agent config is set at all, no token is required.
|
||||||
|
let config = test_config(None);
|
||||||
|
let headers = HeaderMap::new();
|
||||||
|
let query_token = None;
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_no_bootstrap_token_configured() {
|
||||||
|
// Agent config exists but bootstrap_token is None → no token required.
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: None,
|
||||||
|
}));
|
||||||
|
let headers = HeaderMap::new();
|
||||||
|
let query_token = None;
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_valid_from_header() {
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: Some("s3cret-bootstrap".to_string()),
|
||||||
|
}));
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
"x-agent-token",
|
||||||
|
HeaderValue::from_static("s3cret-bootstrap"),
|
||||||
|
);
|
||||||
|
let query_token = None;
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_valid_from_query() {
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: Some("s3cret-bootstrap".to_string()),
|
||||||
|
}));
|
||||||
|
let headers = HeaderMap::new();
|
||||||
|
let query_token = Some("s3cret-bootstrap".to_string());
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_invalid() {
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: Some("correct-token".to_string()),
|
||||||
|
}));
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert("x-agent-token", HeaderValue::from_static("wrong-token"));
|
||||||
|
let query_token = None;
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_err());
|
||||||
|
let (status, body) = result.unwrap_err();
|
||||||
|
assert_eq!(status, StatusCode::UNAUTHORIZED);
|
||||||
|
assert_eq!(body.0["error"], "Invalid token");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_missing_when_required() {
|
||||||
|
// bootstrap_token is configured but caller provides nothing.
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: Some("required-token".to_string()),
|
||||||
|
}));
|
||||||
|
let headers = HeaderMap::new();
|
||||||
|
let query_token = None;
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_err());
|
||||||
|
let (status, body) = result.unwrap_err();
|
||||||
|
assert_eq!(status, StatusCode::UNAUTHORIZED);
|
||||||
|
assert_eq!(body.0["error"], "Token required");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_header_takes_precedence_over_query() {
|
||||||
|
// When both header and query provide a token, the header value is
|
||||||
|
// checked first (it appears first in the or_else chain). Provide a
|
||||||
|
// valid token in the header and an invalid one in the query — should
|
||||||
|
// succeed because the header matches.
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: Some("the-real-token".to_string()),
|
||||||
|
}));
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert("x-agent-token", HeaderValue::from_static("the-real-token"));
|
||||||
|
let query_token = Some("wrong-token".to_string());
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -36,15 +36,16 @@ use attune_common::repositories::{
|
|||||||
ArtifactRepository, ArtifactSearchFilters, ArtifactVersionRepository, CreateArtifactInput,
|
ArtifactRepository, ArtifactSearchFilters, ArtifactVersionRepository, CreateArtifactInput,
|
||||||
CreateArtifactVersionInput, UpdateArtifactInput,
|
CreateArtifactVersionInput, UpdateArtifactInput,
|
||||||
},
|
},
|
||||||
Create, Delete, FindById, FindByRef, Update,
|
Create, Delete, FindById, FindByRef, Patch, Update,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::middleware::RequireAuth,
|
auth::middleware::RequireAuth,
|
||||||
dto::{
|
dto::{
|
||||||
artifact::{
|
artifact::{
|
||||||
AllocateFileVersionByRefRequest, AppendProgressRequest, ArtifactQueryParams,
|
AllocateFileVersionByRefRequest, AppendProgressRequest, ArtifactExecutionPatch,
|
||||||
ArtifactResponse, ArtifactSummary, ArtifactVersionResponse, ArtifactVersionSummary,
|
ArtifactJsonPatch, ArtifactQueryParams, ArtifactResponse, ArtifactStringPatch,
|
||||||
|
ArtifactSummary, ArtifactVersionResponse, ArtifactVersionSummary,
|
||||||
CreateArtifactRequest, CreateFileVersionRequest, CreateVersionJsonRequest,
|
CreateArtifactRequest, CreateFileVersionRequest, CreateVersionJsonRequest,
|
||||||
SetDataRequest, UpdateArtifactRequest,
|
SetDataRequest, UpdateArtifactRequest,
|
||||||
},
|
},
|
||||||
@@ -257,12 +258,27 @@ pub async fn update_artifact(
|
|||||||
visibility: request.visibility,
|
visibility: request.visibility,
|
||||||
retention_policy: request.retention_policy,
|
retention_policy: request.retention_policy,
|
||||||
retention_limit: request.retention_limit,
|
retention_limit: request.retention_limit,
|
||||||
name: request.name,
|
name: request.name.map(|patch| match patch {
|
||||||
description: request.description,
|
ArtifactStringPatch::Set(value) => Patch::Set(value),
|
||||||
content_type: request.content_type,
|
ArtifactStringPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
|
description: request.description.map(|patch| match patch {
|
||||||
|
ArtifactStringPatch::Set(value) => Patch::Set(value),
|
||||||
|
ArtifactStringPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
|
content_type: request.content_type.map(|patch| match patch {
|
||||||
|
ArtifactStringPatch::Set(value) => Patch::Set(value),
|
||||||
|
ArtifactStringPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
size_bytes: None, // Managed by version creation trigger
|
size_bytes: None, // Managed by version creation trigger
|
||||||
execution: request.execution.map(Some),
|
execution: request.execution.map(|patch| match patch {
|
||||||
data: request.data,
|
ArtifactExecutionPatch::Set(value) => Patch::Set(value),
|
||||||
|
ArtifactExecutionPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
|
data: request.data.map(|patch| match patch {
|
||||||
|
ArtifactJsonPatch::Set(value) => Patch::Set(value),
|
||||||
|
ArtifactJsonPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
let updated = ArtifactRepository::update(&state.db, id, input).await?;
|
let updated = ArtifactRepository::update(&state.db, id, input).await?;
|
||||||
@@ -1155,7 +1171,7 @@ pub async fn upload_version_by_ref(
|
|||||||
description: None,
|
description: None,
|
||||||
content_type: None,
|
content_type: None,
|
||||||
size_bytes: None,
|
size_bytes: None,
|
||||||
execution: execution_id.map(Some),
|
execution: execution_id.map(Patch::Set),
|
||||||
data: None,
|
data: None,
|
||||||
};
|
};
|
||||||
ArtifactRepository::update(&state.db, existing.id, update_input).await?
|
ArtifactRepository::update(&state.db, existing.id, update_input).await?
|
||||||
@@ -1303,7 +1319,7 @@ pub async fn allocate_file_version_by_ref(
|
|||||||
description: None,
|
description: None,
|
||||||
content_type: None,
|
content_type: None,
|
||||||
size_bytes: None,
|
size_bytes: None,
|
||||||
execution: request.execution.map(Some),
|
execution: request.execution.map(Patch::Set),
|
||||||
data: None,
|
data: None,
|
||||||
};
|
};
|
||||||
ArtifactRepository::update(&state.db, existing.id, update_input).await?
|
ArtifactRepository::update(&state.db, existing.id, update_input).await?
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
//! Authentication routes
|
//! Authentication routes
|
||||||
|
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::State,
|
extract::{Query, State},
|
||||||
|
http::HeaderMap,
|
||||||
|
response::{IntoResponse, Redirect, Response},
|
||||||
routing::{get, post},
|
routing::{get, post},
|
||||||
Json, Router,
|
Json, Router,
|
||||||
};
|
};
|
||||||
@@ -21,11 +23,16 @@ use crate::{
|
|||||||
TokenType,
|
TokenType,
|
||||||
},
|
},
|
||||||
middleware::RequireAuth,
|
middleware::RequireAuth,
|
||||||
|
oidc::{
|
||||||
|
apply_cookies_to_headers, build_login_redirect, build_logout_redirect,
|
||||||
|
cookie_authenticated_user, get_cookie_value, oidc_callback_redirect_response,
|
||||||
|
OidcCallbackQuery, REFRESH_COOKIE_NAME,
|
||||||
|
},
|
||||||
verify_password,
|
verify_password,
|
||||||
},
|
},
|
||||||
dto::{
|
dto::{
|
||||||
ApiResponse, ChangePasswordRequest, CurrentUserResponse, LoginRequest, RefreshTokenRequest,
|
ApiResponse, AuthSettingsResponse, ChangePasswordRequest, CurrentUserResponse,
|
||||||
RegisterRequest, SuccessResponse, TokenResponse,
|
LoginRequest, RefreshTokenRequest, RegisterRequest, SuccessResponse, TokenResponse,
|
||||||
},
|
},
|
||||||
middleware::error::ApiError,
|
middleware::error::ApiError,
|
||||||
state::SharedState,
|
state::SharedState,
|
||||||
@@ -63,7 +70,12 @@ pub struct SensorTokenResponse {
|
|||||||
/// Create authentication routes
|
/// Create authentication routes
|
||||||
pub fn routes() -> Router<SharedState> {
|
pub fn routes() -> Router<SharedState> {
|
||||||
Router::new()
|
Router::new()
|
||||||
|
.route("/settings", get(auth_settings))
|
||||||
.route("/login", post(login))
|
.route("/login", post(login))
|
||||||
|
.route("/oidc/login", get(oidc_login))
|
||||||
|
.route("/callback", get(oidc_callback))
|
||||||
|
.route("/ldap/login", post(ldap_login))
|
||||||
|
.route("/logout", get(logout))
|
||||||
.route("/register", post(register))
|
.route("/register", post(register))
|
||||||
.route("/refresh", post(refresh_token))
|
.route("/refresh", post(refresh_token))
|
||||||
.route("/me", get(get_current_user))
|
.route("/me", get(get_current_user))
|
||||||
@@ -72,6 +84,63 @@ pub fn routes() -> Router<SharedState> {
|
|||||||
.route("/internal/sensor-token", post(create_sensor_token_internal))
|
.route("/internal/sensor-token", post(create_sensor_token_internal))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Authentication settings endpoint
|
||||||
|
///
|
||||||
|
/// GET /auth/settings
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/auth/settings",
|
||||||
|
tag = "auth",
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Authentication settings", body = inline(ApiResponse<AuthSettingsResponse>))
|
||||||
|
)
|
||||||
|
)]
|
||||||
|
pub async fn auth_settings(
|
||||||
|
State(state): State<SharedState>,
|
||||||
|
) -> Result<Json<ApiResponse<AuthSettingsResponse>>, ApiError> {
|
||||||
|
let oidc = state
|
||||||
|
.config
|
||||||
|
.security
|
||||||
|
.oidc
|
||||||
|
.as_ref()
|
||||||
|
.filter(|oidc| oidc.enabled);
|
||||||
|
|
||||||
|
let ldap = state
|
||||||
|
.config
|
||||||
|
.security
|
||||||
|
.ldap
|
||||||
|
.as_ref()
|
||||||
|
.filter(|ldap| ldap.enabled);
|
||||||
|
|
||||||
|
let response = AuthSettingsResponse {
|
||||||
|
authentication_enabled: state.config.security.enable_auth,
|
||||||
|
local_password_enabled: state.config.security.enable_auth,
|
||||||
|
local_password_visible_by_default: state.config.security.enable_auth
|
||||||
|
&& state.config.security.login_page.show_local_login,
|
||||||
|
oidc_enabled: oidc.is_some(),
|
||||||
|
oidc_visible_by_default: oidc.is_some() && state.config.security.login_page.show_oidc_login,
|
||||||
|
oidc_provider_name: oidc.map(|oidc| oidc.provider_name.clone()),
|
||||||
|
oidc_provider_label: oidc.map(|oidc| {
|
||||||
|
oidc.provider_label
|
||||||
|
.clone()
|
||||||
|
.unwrap_or_else(|| oidc.provider_name.clone())
|
||||||
|
}),
|
||||||
|
oidc_provider_icon_url: oidc.and_then(|oidc| oidc.provider_icon_url.clone()),
|
||||||
|
ldap_enabled: ldap.is_some(),
|
||||||
|
ldap_visible_by_default: ldap.is_some() && state.config.security.login_page.show_ldap_login,
|
||||||
|
ldap_provider_name: ldap.map(|ldap| ldap.provider_name.clone()),
|
||||||
|
ldap_provider_label: ldap.map(|ldap| {
|
||||||
|
ldap.provider_label
|
||||||
|
.clone()
|
||||||
|
.unwrap_or_else(|| ldap.provider_name.clone())
|
||||||
|
}),
|
||||||
|
ldap_provider_icon_url: ldap.and_then(|ldap| ldap.provider_icon_url.clone()),
|
||||||
|
self_registration_enabled: state.config.security.allow_self_registration,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(ApiResponse::new(response)))
|
||||||
|
}
|
||||||
|
|
||||||
/// Login endpoint
|
/// Login endpoint
|
||||||
///
|
///
|
||||||
/// POST /auth/login
|
/// POST /auth/login
|
||||||
@@ -152,6 +221,12 @@ pub async fn register(
|
|||||||
State(state): State<SharedState>,
|
State(state): State<SharedState>,
|
||||||
Json(payload): Json<RegisterRequest>,
|
Json(payload): Json<RegisterRequest>,
|
||||||
) -> Result<Json<ApiResponse<TokenResponse>>, ApiError> {
|
) -> Result<Json<ApiResponse<TokenResponse>>, ApiError> {
|
||||||
|
if !state.config.security.allow_self_registration {
|
||||||
|
return Err(ApiError::Forbidden(
|
||||||
|
"Self-service registration is disabled; identities must be provisioned by an administrator or identity provider".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
// Validate request
|
// Validate request
|
||||||
payload
|
payload
|
||||||
.validate()
|
.validate()
|
||||||
@@ -171,7 +246,7 @@ pub async fn register(
|
|||||||
// Hash password
|
// Hash password
|
||||||
let password_hash = hash_password(&payload.password)?;
|
let password_hash = hash_password(&payload.password)?;
|
||||||
|
|
||||||
// Create identity with password hash
|
// Registration creates an identity only; permission assignments are managed separately.
|
||||||
let input = CreateIdentityInput {
|
let input = CreateIdentityInput {
|
||||||
login: payload.login.clone(),
|
login: payload.login.clone(),
|
||||||
display_name: payload.display_name,
|
display_name: payload.display_name,
|
||||||
@@ -215,15 +290,22 @@ pub async fn register(
|
|||||||
)]
|
)]
|
||||||
pub async fn refresh_token(
|
pub async fn refresh_token(
|
||||||
State(state): State<SharedState>,
|
State(state): State<SharedState>,
|
||||||
Json(payload): Json<RefreshTokenRequest>,
|
headers: HeaderMap,
|
||||||
) -> Result<Json<ApiResponse<TokenResponse>>, ApiError> {
|
payload: Option<Json<RefreshTokenRequest>>,
|
||||||
// Validate request
|
) -> Result<Response, ApiError> {
|
||||||
payload
|
let browser_cookie_refresh = payload.is_none();
|
||||||
.validate()
|
let refresh_token = if let Some(Json(payload)) = payload {
|
||||||
.map_err(|e| ApiError::ValidationError(format!("Invalid refresh token request: {}", e)))?;
|
payload.validate().map_err(|e| {
|
||||||
|
ApiError::ValidationError(format!("Invalid refresh token request: {}", e))
|
||||||
|
})?;
|
||||||
|
payload.refresh_token
|
||||||
|
} else {
|
||||||
|
get_cookie_value(&headers, REFRESH_COOKIE_NAME)
|
||||||
|
.ok_or_else(|| ApiError::Unauthorized("Missing refresh token".to_string()))?
|
||||||
|
};
|
||||||
|
|
||||||
// Validate refresh token
|
// Validate refresh token
|
||||||
let claims = validate_token(&payload.refresh_token, &state.jwt_config)
|
let claims = validate_token(&refresh_token, &state.jwt_config)
|
||||||
.map_err(|_| ApiError::Unauthorized("Invalid or expired refresh token".to_string()))?;
|
.map_err(|_| ApiError::Unauthorized("Invalid or expired refresh token".to_string()))?;
|
||||||
|
|
||||||
// Ensure it's a refresh token
|
// Ensure it's a refresh token
|
||||||
@@ -251,8 +333,18 @@ pub async fn refresh_token(
|
|||||||
refresh_token,
|
refresh_token,
|
||||||
state.jwt_config.access_token_expiration,
|
state.jwt_config.access_token_expiration,
|
||||||
);
|
);
|
||||||
|
let response_body = Json(ApiResponse::new(response.clone()));
|
||||||
|
|
||||||
Ok(Json(ApiResponse::new(response)))
|
if browser_cookie_refresh {
|
||||||
|
let mut http_response = response_body.into_response();
|
||||||
|
apply_cookies_to_headers(
|
||||||
|
http_response.headers_mut(),
|
||||||
|
&crate::auth::oidc::build_auth_cookies(&state, &response, ""),
|
||||||
|
)?;
|
||||||
|
return Ok(http_response);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(response_body.into_response())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get current user endpoint
|
/// Get current user endpoint
|
||||||
@@ -273,9 +365,15 @@ pub async fn refresh_token(
|
|||||||
)]
|
)]
|
||||||
pub async fn get_current_user(
|
pub async fn get_current_user(
|
||||||
State(state): State<SharedState>,
|
State(state): State<SharedState>,
|
||||||
RequireAuth(user): RequireAuth,
|
headers: HeaderMap,
|
||||||
|
user: Result<RequireAuth, crate::auth::middleware::AuthError>,
|
||||||
) -> Result<Json<ApiResponse<CurrentUserResponse>>, ApiError> {
|
) -> Result<Json<ApiResponse<CurrentUserResponse>>, ApiError> {
|
||||||
let identity_id = user.identity_id()?;
|
let authenticated_user = match user {
|
||||||
|
Ok(RequireAuth(user)) => user,
|
||||||
|
Err(_) => cookie_authenticated_user(&headers, &state)?
|
||||||
|
.ok_or_else(|| ApiError::Unauthorized("Unauthorized".to_string()))?,
|
||||||
|
};
|
||||||
|
let identity_id = authenticated_user.identity_id()?;
|
||||||
|
|
||||||
// Fetch identity from database
|
// Fetch identity from database
|
||||||
let identity = IdentityRepository::find_by_id(&state.db, identity_id)
|
let identity = IdentityRepository::find_by_id(&state.db, identity_id)
|
||||||
@@ -291,6 +389,106 @@ pub async fn get_current_user(
|
|||||||
Ok(Json(ApiResponse::new(response)))
|
Ok(Json(ApiResponse::new(response)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Request body for LDAP login.
|
||||||
|
#[derive(Debug, Serialize, Deserialize, Validate, ToSchema)]
|
||||||
|
pub struct LdapLoginRequest {
|
||||||
|
/// User login name (uid, sAMAccountName, etc.)
|
||||||
|
#[validate(length(min = 1, max = 255))]
|
||||||
|
pub login: String,
|
||||||
|
/// User password
|
||||||
|
#[validate(length(min = 1, max = 512))]
|
||||||
|
pub password: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct OidcLoginParams {
|
||||||
|
pub redirect_to: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Begin browser OIDC login by redirecting to the provider.
|
||||||
|
pub async fn oidc_login(
|
||||||
|
State(state): State<SharedState>,
|
||||||
|
Query(params): Query<OidcLoginParams>,
|
||||||
|
) -> Result<Response, ApiError> {
|
||||||
|
let login_redirect = build_login_redirect(&state, params.redirect_to.as_deref()).await?;
|
||||||
|
let mut response = Redirect::temporary(&login_redirect.authorization_url).into_response();
|
||||||
|
apply_cookies_to_headers(response.headers_mut(), &login_redirect.cookies)?;
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle the OIDC authorization code callback.
|
||||||
|
pub async fn oidc_callback(
|
||||||
|
State(state): State<SharedState>,
|
||||||
|
headers: HeaderMap,
|
||||||
|
Query(query): Query<OidcCallbackQuery>,
|
||||||
|
) -> Result<Response, ApiError> {
|
||||||
|
let redirect_to = get_cookie_value(&headers, crate::auth::oidc::OIDC_REDIRECT_COOKIE_NAME);
|
||||||
|
let authenticated = crate::auth::oidc::handle_callback(&state, &headers, &query).await?;
|
||||||
|
oidc_callback_redirect_response(
|
||||||
|
&state,
|
||||||
|
&authenticated.token_response,
|
||||||
|
redirect_to,
|
||||||
|
&authenticated.id_token,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Authenticate via LDAP directory.
|
||||||
|
///
|
||||||
|
/// POST /auth/ldap/login
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/auth/ldap/login",
|
||||||
|
tag = "auth",
|
||||||
|
request_body = LdapLoginRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Successfully authenticated via LDAP", body = inline(ApiResponse<TokenResponse>)),
|
||||||
|
(status = 401, description = "Invalid LDAP credentials"),
|
||||||
|
(status = 501, description = "LDAP not configured")
|
||||||
|
)
|
||||||
|
)]
|
||||||
|
pub async fn ldap_login(
|
||||||
|
State(state): State<SharedState>,
|
||||||
|
Json(payload): Json<LdapLoginRequest>,
|
||||||
|
) -> Result<Json<ApiResponse<TokenResponse>>, ApiError> {
|
||||||
|
payload
|
||||||
|
.validate()
|
||||||
|
.map_err(|e| ApiError::ValidationError(format!("Invalid LDAP login request: {e}")))?;
|
||||||
|
|
||||||
|
let authenticated =
|
||||||
|
crate::auth::ldap::authenticate(&state, &payload.login, &payload.password).await?;
|
||||||
|
|
||||||
|
Ok(Json(ApiResponse::new(authenticated.token_response)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Logout the current browser session and optionally redirect through the provider logout flow.
|
||||||
|
pub async fn logout(
|
||||||
|
State(state): State<SharedState>,
|
||||||
|
headers: HeaderMap,
|
||||||
|
) -> Result<Response, ApiError> {
|
||||||
|
let oidc_enabled = state
|
||||||
|
.config
|
||||||
|
.security
|
||||||
|
.oidc
|
||||||
|
.as_ref()
|
||||||
|
.is_some_and(|oidc| oidc.enabled);
|
||||||
|
|
||||||
|
let response = if oidc_enabled {
|
||||||
|
let logout_redirect = build_logout_redirect(&state, &headers).await?;
|
||||||
|
let mut response = Redirect::temporary(&logout_redirect.redirect_url).into_response();
|
||||||
|
apply_cookies_to_headers(response.headers_mut(), &logout_redirect.cookies)?;
|
||||||
|
response
|
||||||
|
} else {
|
||||||
|
let mut response = Redirect::temporary("/login").into_response();
|
||||||
|
apply_cookies_to_headers(
|
||||||
|
response.headers_mut(),
|
||||||
|
&crate::auth::oidc::clear_auth_cookies(&state),
|
||||||
|
)?;
|
||||||
|
response
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
/// Change password endpoint
|
/// Change password endpoint
|
||||||
///
|
///
|
||||||
/// POST /auth/change-password
|
/// POST /auth/change-password
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ use axum::{
|
|||||||
routing::get,
|
routing::get,
|
||||||
Json, Router,
|
Json, Router,
|
||||||
};
|
};
|
||||||
|
use chrono::Utc;
|
||||||
use futures::stream::{Stream, StreamExt};
|
use futures::stream::{Stream, StreamExt};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio_stream::wrappers::BroadcastStream;
|
use tokio_stream::wrappers::BroadcastStream;
|
||||||
@@ -32,6 +33,7 @@ use sqlx::Row;
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::middleware::RequireAuth,
|
auth::middleware::RequireAuth,
|
||||||
|
authz::{AuthorizationCheck, AuthorizationService},
|
||||||
dto::{
|
dto::{
|
||||||
common::{PaginatedResponse, PaginationParams},
|
common::{PaginatedResponse, PaginationParams},
|
||||||
execution::{
|
execution::{
|
||||||
@@ -42,6 +44,7 @@ use crate::{
|
|||||||
middleware::{ApiError, ApiResult},
|
middleware::{ApiError, ApiResult},
|
||||||
state::AppState,
|
state::AppState,
|
||||||
};
|
};
|
||||||
|
use attune_common::rbac::{Action, AuthorizationContext, Resource};
|
||||||
|
|
||||||
/// Create a new execution (manual execution)
|
/// Create a new execution (manual execution)
|
||||||
///
|
///
|
||||||
@@ -61,7 +64,7 @@ use crate::{
|
|||||||
)]
|
)]
|
||||||
pub async fn create_execution(
|
pub async fn create_execution(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Json(request): Json<CreateExecutionRequest>,
|
Json(request): Json<CreateExecutionRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Validate that the action exists
|
// Validate that the action exists
|
||||||
@@ -69,6 +72,42 @@ pub async fn create_execution(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Action '{}' not found", request.action_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Action '{}' not found", request.action_ref)))?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
|
||||||
|
let mut action_ctx = AuthorizationContext::new(identity_id);
|
||||||
|
action_ctx.target_id = Some(action.id);
|
||||||
|
action_ctx.target_ref = Some(action.r#ref.clone());
|
||||||
|
action_ctx.pack_ref = Some(action.pack_ref.clone());
|
||||||
|
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Actions,
|
||||||
|
action: Action::Execute,
|
||||||
|
context: action_ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut execution_ctx = AuthorizationContext::new(identity_id);
|
||||||
|
execution_ctx.pack_ref = Some(action.pack_ref.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Executions,
|
||||||
|
action: Action::Create,
|
||||||
|
context: execution_ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Create execution input
|
// Create execution input
|
||||||
let execution_input = CreateExecutionInput {
|
let execution_input = CreateExecutionInput {
|
||||||
action: Some(action.id),
|
action: Some(action.id),
|
||||||
@@ -84,6 +123,7 @@ pub async fn create_execution(
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None, // Non-workflow execution
|
workflow_task: None, // Non-workflow execution
|
||||||
@@ -440,9 +480,17 @@ pub async fn cancel_execution(
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let updated = ExecutionRepository::update(&state.db, id, update).await?;
|
let updated = ExecutionRepository::update(&state.db, id, update).await?;
|
||||||
|
let delegated_to_executor = publish_status_change_to_executor(
|
||||||
|
publisher.as_deref(),
|
||||||
|
&execution,
|
||||||
|
ExecutionStatus::Cancelled,
|
||||||
|
"api-service",
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
// Cascade to workflow children if this is a workflow execution
|
if !delegated_to_executor {
|
||||||
cancel_workflow_children(&state.db, publisher.as_deref(), id).await;
|
cancel_workflow_children(&state.db, publisher.as_deref(), id).await;
|
||||||
|
}
|
||||||
|
|
||||||
let response = ApiResponse::new(ExecutionResponse::from(updated));
|
let response = ApiResponse::new(ExecutionResponse::from(updated));
|
||||||
return Ok((StatusCode::OK, Json(response)));
|
return Ok((StatusCode::OK, Json(response)));
|
||||||
@@ -454,19 +502,27 @@ pub async fn cancel_execution(
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let updated = ExecutionRepository::update(&state.db, id, update).await?;
|
let updated = ExecutionRepository::update(&state.db, id, update).await?;
|
||||||
|
let delegated_to_executor = publish_status_change_to_executor(
|
||||||
|
publisher.as_deref(),
|
||||||
|
&execution,
|
||||||
|
ExecutionStatus::Canceling,
|
||||||
|
"api-service",
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
// Send cancel request to the worker via MQ
|
// Send cancel request to the worker via MQ
|
||||||
if let Some(worker_id) = execution.executor {
|
if let Some(worker_id) = execution.worker {
|
||||||
send_cancel_to_worker(publisher.as_deref(), id, worker_id).await;
|
send_cancel_to_worker(publisher.as_deref(), id, worker_id).await;
|
||||||
} else {
|
} else {
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
"Execution {} has no executor/worker assigned; marked as canceling but no MQ message sent",
|
"Execution {} has no worker assigned; marked as canceling but no MQ message sent",
|
||||||
id
|
id
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cascade to workflow children if this is a workflow execution
|
if !delegated_to_executor {
|
||||||
cancel_workflow_children(&state.db, publisher.as_deref(), id).await;
|
cancel_workflow_children(&state.db, publisher.as_deref(), id).await;
|
||||||
|
}
|
||||||
|
|
||||||
let response = ApiResponse::new(ExecutionResponse::from(updated));
|
let response = ApiResponse::new(ExecutionResponse::from(updated));
|
||||||
Ok((StatusCode::OK, Json(response)))
|
Ok((StatusCode::OK, Json(response)))
|
||||||
@@ -504,6 +560,53 @@ async fn send_cancel_to_worker(publisher: Option<&Publisher>, execution_id: i64,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn publish_status_change_to_executor(
|
||||||
|
publisher: Option<&Publisher>,
|
||||||
|
execution: &attune_common::models::Execution,
|
||||||
|
new_status: ExecutionStatus,
|
||||||
|
source: &str,
|
||||||
|
) -> bool {
|
||||||
|
let Some(publisher) = publisher else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
let new_status = match new_status {
|
||||||
|
ExecutionStatus::Requested => "requested",
|
||||||
|
ExecutionStatus::Scheduling => "scheduling",
|
||||||
|
ExecutionStatus::Scheduled => "scheduled",
|
||||||
|
ExecutionStatus::Running => "running",
|
||||||
|
ExecutionStatus::Completed => "completed",
|
||||||
|
ExecutionStatus::Failed => "failed",
|
||||||
|
ExecutionStatus::Canceling => "canceling",
|
||||||
|
ExecutionStatus::Cancelled => "cancelled",
|
||||||
|
ExecutionStatus::Timeout => "timeout",
|
||||||
|
ExecutionStatus::Abandoned => "abandoned",
|
||||||
|
};
|
||||||
|
|
||||||
|
let payload = attune_common::mq::ExecutionStatusChangedPayload {
|
||||||
|
execution_id: execution.id,
|
||||||
|
action_ref: execution.action_ref.clone(),
|
||||||
|
previous_status: format!("{:?}", execution.status).to_lowercase(),
|
||||||
|
new_status: new_status.to_string(),
|
||||||
|
changed_at: Utc::now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let envelope = MessageEnvelope::new(MessageType::ExecutionStatusChanged, payload)
|
||||||
|
.with_source(source)
|
||||||
|
.with_correlation_id(uuid::Uuid::new_v4());
|
||||||
|
|
||||||
|
if let Err(e) = publisher.publish_envelope(&envelope).await {
|
||||||
|
tracing::error!(
|
||||||
|
"Failed to publish status change for execution {} to executor: {}",
|
||||||
|
execution.id,
|
||||||
|
e
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
/// Resolve the [`CancellationPolicy`] for a workflow parent execution.
|
/// Resolve the [`CancellationPolicy`] for a workflow parent execution.
|
||||||
///
|
///
|
||||||
/// Looks up the `workflow_execution` → `workflow_definition` chain and
|
/// Looks up the `workflow_execution` → `workflow_definition` chain and
|
||||||
@@ -652,7 +755,7 @@ async fn cancel_workflow_children_with_policy(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(worker_id) = child.executor {
|
if let Some(worker_id) = child.worker {
|
||||||
send_cancel_to_worker(publisher, child_id, worker_id).await;
|
send_cancel_to_worker(publisher, child_id, worker_id).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ use axum::{
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
|
|
||||||
use attune_common::models::OwnerType;
|
|
||||||
use attune_common::repositories::{
|
use attune_common::repositories::{
|
||||||
action::ActionRepository,
|
action::ActionRepository,
|
||||||
key::{CreateKeyInput, KeyRepository, KeySearchFilters, UpdateKeyInput},
|
key::{CreateKeyInput, KeyRepository, KeySearchFilters, UpdateKeyInput},
|
||||||
@@ -18,9 +17,14 @@ use attune_common::repositories::{
|
|||||||
trigger::SensorRepository,
|
trigger::SensorRepository,
|
||||||
Create, Delete, FindByRef, Update,
|
Create, Delete, FindByRef, Update,
|
||||||
};
|
};
|
||||||
|
use attune_common::{
|
||||||
|
models::{key::Key, OwnerType},
|
||||||
|
rbac::{Action, AuthorizationContext, Resource},
|
||||||
|
};
|
||||||
|
|
||||||
use crate::auth::RequireAuth;
|
use crate::auth::{jwt::TokenType, RequireAuth};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
authz::{AuthorizationCheck, AuthorizationService},
|
||||||
dto::{
|
dto::{
|
||||||
common::{PaginatedResponse, PaginationParams},
|
common::{PaginatedResponse, PaginationParams},
|
||||||
key::{CreateKeyRequest, KeyQueryParams, KeyResponse, KeySummary, UpdateKeyRequest},
|
key::{CreateKeyRequest, KeyQueryParams, KeyResponse, KeySummary, UpdateKeyRequest},
|
||||||
@@ -42,7 +46,7 @@ use crate::{
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn list_keys(
|
pub async fn list_keys(
|
||||||
_user: RequireAuth,
|
user: RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Query(query): Query<KeyQueryParams>,
|
Query(query): Query<KeyQueryParams>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -55,8 +59,33 @@ pub async fn list_keys(
|
|||||||
};
|
};
|
||||||
|
|
||||||
let result = KeyRepository::search(&state.db, &filters).await?;
|
let result = KeyRepository::search(&state.db, &filters).await?;
|
||||||
|
let mut rows = result.rows;
|
||||||
|
|
||||||
let paginated_keys: Vec<KeySummary> = result.rows.into_iter().map(KeySummary::from).collect();
|
if user.0.claims.token_type == TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.0
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let grants = authz.effective_grants(&user.0).await?;
|
||||||
|
|
||||||
|
// Ensure the principal can read at least some key records.
|
||||||
|
let can_read_any_key = grants
|
||||||
|
.iter()
|
||||||
|
.any(|g| g.resource == Resource::Keys && g.actions.contains(&Action::Read));
|
||||||
|
if !can_read_any_key {
|
||||||
|
return Err(ApiError::Forbidden(
|
||||||
|
"Insufficient permissions: keys:read".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
rows.retain(|key| {
|
||||||
|
let ctx = key_authorization_context(identity_id, key);
|
||||||
|
AuthorizationService::is_allowed(&grants, Resource::Keys, Action::Read, &ctx)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let paginated_keys: Vec<KeySummary> = rows.into_iter().map(KeySummary::from).collect();
|
||||||
|
|
||||||
let pagination_params = PaginationParams {
|
let pagination_params = PaginationParams {
|
||||||
page: query.page,
|
page: query.page,
|
||||||
@@ -83,7 +112,7 @@ pub async fn list_keys(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn get_key(
|
pub async fn get_key(
|
||||||
_user: RequireAuth,
|
user: RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(key_ref): Path<String>,
|
Path(key_ref): Path<String>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -91,6 +120,26 @@ pub async fn get_key(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Key '{}' not found", key_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Key '{}' not found", key_ref)))?;
|
||||||
|
|
||||||
|
if user.0.claims.token_type == TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.0
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user.0,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Keys,
|
||||||
|
action: Action::Read,
|
||||||
|
context: key_authorization_context(identity_id, &key),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
// Hide unauthorized records behind 404 to reduce enumeration leakage.
|
||||||
|
.map_err(|_| ApiError::NotFound(format!("Key '{}' not found", key_ref)))?;
|
||||||
|
}
|
||||||
|
|
||||||
// Decrypt value if encrypted
|
// Decrypt value if encrypted
|
||||||
if key.encrypted {
|
if key.encrypted {
|
||||||
let encryption_key = state
|
let encryption_key = state
|
||||||
@@ -130,13 +179,37 @@ pub async fn get_key(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn create_key(
|
pub async fn create_key(
|
||||||
_user: RequireAuth,
|
user: RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Json(request): Json<CreateKeyRequest>,
|
Json(request): Json<CreateKeyRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Validate request
|
// Validate request
|
||||||
request.validate()?;
|
request.validate()?;
|
||||||
|
|
||||||
|
if user.0.claims.token_type == TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.0
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.owner_identity_id = request.owner_identity;
|
||||||
|
ctx.owner_type = Some(request.owner_type);
|
||||||
|
ctx.encrypted = Some(request.encrypted);
|
||||||
|
ctx.target_ref = Some(request.r#ref.clone());
|
||||||
|
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user.0,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Keys,
|
||||||
|
action: Action::Create,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Check if key with same ref already exists
|
// Check if key with same ref already exists
|
||||||
if KeyRepository::find_by_ref(&state.db, &request.r#ref)
|
if KeyRepository::find_by_ref(&state.db, &request.r#ref)
|
||||||
.await?
|
.await?
|
||||||
@@ -299,7 +372,7 @@ pub async fn create_key(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn update_key(
|
pub async fn update_key(
|
||||||
_user: RequireAuth,
|
user: RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(key_ref): Path<String>,
|
Path(key_ref): Path<String>,
|
||||||
Json(request): Json<UpdateKeyRequest>,
|
Json(request): Json<UpdateKeyRequest>,
|
||||||
@@ -312,6 +385,24 @@ pub async fn update_key(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Key '{}' not found", key_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Key '{}' not found", key_ref)))?;
|
||||||
|
|
||||||
|
if user.0.claims.token_type == TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.0
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user.0,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Keys,
|
||||||
|
action: Action::Update,
|
||||||
|
context: key_authorization_context(identity_id, &existing),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Handle value update with encryption
|
// Handle value update with encryption
|
||||||
let (value, encrypted, encryption_key_hash) = if let Some(new_value) = request.value {
|
let (value, encrypted, encryption_key_hash) = if let Some(new_value) = request.value {
|
||||||
let should_encrypt = request.encrypted.unwrap_or(existing.encrypted);
|
let should_encrypt = request.encrypted.unwrap_or(existing.encrypted);
|
||||||
@@ -395,7 +486,7 @@ pub async fn update_key(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn delete_key(
|
pub async fn delete_key(
|
||||||
_user: RequireAuth,
|
user: RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(key_ref): Path<String>,
|
Path(key_ref): Path<String>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -404,6 +495,24 @@ pub async fn delete_key(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Key '{}' not found", key_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Key '{}' not found", key_ref)))?;
|
||||||
|
|
||||||
|
if user.0.claims.token_type == TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.0
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user.0,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Keys,
|
||||||
|
action: Action::Delete,
|
||||||
|
context: key_authorization_context(identity_id, &key),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Delete the key
|
// Delete the key
|
||||||
let deleted = KeyRepository::delete(&state.db, key.id).await?;
|
let deleted = KeyRepository::delete(&state.db, key.id).await?;
|
||||||
|
|
||||||
@@ -425,3 +534,13 @@ pub fn routes() -> Router<Arc<AppState>> {
|
|||||||
get(get_key).put(update_key).delete(delete_key),
|
get(get_key).put(update_key).delete(delete_key),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn key_authorization_context(identity_id: i64, key: &Key) -> AuthorizationContext {
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_id = Some(key.id);
|
||||||
|
ctx.target_ref = Some(key.r#ref.clone());
|
||||||
|
ctx.owner_identity_id = key.owner_identity;
|
||||||
|
ctx.owner_type = Some(key.owner_type);
|
||||||
|
ctx.encrypted = Some(key.encrypted);
|
||||||
|
ctx
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
//! API route modules
|
//! API route modules
|
||||||
|
|
||||||
pub mod actions;
|
pub mod actions;
|
||||||
|
pub mod agent;
|
||||||
pub mod analytics;
|
pub mod analytics;
|
||||||
pub mod artifacts;
|
pub mod artifacts;
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
@@ -11,12 +12,15 @@ pub mod history;
|
|||||||
pub mod inquiries;
|
pub mod inquiries;
|
||||||
pub mod keys;
|
pub mod keys;
|
||||||
pub mod packs;
|
pub mod packs;
|
||||||
|
pub mod permissions;
|
||||||
pub mod rules;
|
pub mod rules;
|
||||||
|
pub mod runtimes;
|
||||||
pub mod triggers;
|
pub mod triggers;
|
||||||
pub mod webhooks;
|
pub mod webhooks;
|
||||||
pub mod workflows;
|
pub mod workflows;
|
||||||
|
|
||||||
pub use actions::routes as action_routes;
|
pub use actions::routes as action_routes;
|
||||||
|
pub use agent::routes as agent_routes;
|
||||||
pub use analytics::routes as analytics_routes;
|
pub use analytics::routes as analytics_routes;
|
||||||
pub use artifacts::routes as artifact_routes;
|
pub use artifacts::routes as artifact_routes;
|
||||||
pub use auth::routes as auth_routes;
|
pub use auth::routes as auth_routes;
|
||||||
@@ -27,7 +31,9 @@ pub use history::routes as history_routes;
|
|||||||
pub use inquiries::routes as inquiry_routes;
|
pub use inquiries::routes as inquiry_routes;
|
||||||
pub use keys::routes as key_routes;
|
pub use keys::routes as key_routes;
|
||||||
pub use packs::routes as pack_routes;
|
pub use packs::routes as pack_routes;
|
||||||
|
pub use permissions::routes as permission_routes;
|
||||||
pub use rules::routes as rule_routes;
|
pub use rules::routes as rule_routes;
|
||||||
|
pub use runtimes::routes as runtime_routes;
|
||||||
pub use triggers::routes as trigger_routes;
|
pub use triggers::routes as trigger_routes;
|
||||||
pub use webhooks::routes as webhook_routes;
|
pub use webhooks::routes as webhook_routes;
|
||||||
pub use workflows::routes as workflow_routes;
|
pub use workflows::routes as workflow_routes;
|
||||||
|
|||||||
@@ -13,22 +13,26 @@ use validator::Validate;
|
|||||||
|
|
||||||
use attune_common::models::pack_test::PackTestResult;
|
use attune_common::models::pack_test::PackTestResult;
|
||||||
use attune_common::mq::{MessageEnvelope, MessageType, PackRegisteredPayload};
|
use attune_common::mq::{MessageEnvelope, MessageType, PackRegisteredPayload};
|
||||||
|
use attune_common::rbac::{Action, AuthorizationContext, Resource};
|
||||||
use attune_common::repositories::{
|
use attune_common::repositories::{
|
||||||
pack::{CreatePackInput, UpdatePackInput},
|
pack::{CreatePackInput, UpdatePackInput},
|
||||||
Create, Delete, FindById, FindByRef, PackRepository, PackTestRepository, Pagination, Update,
|
Create, Delete, FindById, FindByRef, PackRepository, PackTestRepository, Pagination, Patch,
|
||||||
|
Update,
|
||||||
};
|
};
|
||||||
use attune_common::workflow::{PackWorkflowService, PackWorkflowServiceConfig};
|
use attune_common::workflow::{PackWorkflowService, PackWorkflowServiceConfig};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::middleware::RequireAuth,
|
auth::middleware::RequireAuth,
|
||||||
|
authz::{AuthorizationCheck, AuthorizationService},
|
||||||
dto::{
|
dto::{
|
||||||
common::{PaginatedResponse, PaginationParams},
|
common::{PaginatedResponse, PaginationParams},
|
||||||
pack::{
|
pack::{
|
||||||
BuildPackEnvsRequest, BuildPackEnvsResponse, CreatePackRequest, DownloadPacksRequest,
|
BuildPackEnvsRequest, BuildPackEnvsResponse, CreatePackRequest, DownloadPacksRequest,
|
||||||
DownloadPacksResponse, GetPackDependenciesRequest, GetPackDependenciesResponse,
|
DownloadPacksResponse, GetPackDependenciesRequest, GetPackDependenciesResponse,
|
||||||
InstallPackRequest, PackInstallResponse, PackResponse, PackSummary,
|
InstallPackRequest, PackDescriptionPatch, PackInstallResponse, PackResponse,
|
||||||
PackWorkflowSyncResponse, PackWorkflowValidationResponse, RegisterPackRequest,
|
PackSummary, PackWorkflowSyncResponse, PackWorkflowValidationResponse,
|
||||||
RegisterPacksRequest, RegisterPacksResponse, UpdatePackRequest, WorkflowSyncResult,
|
RegisterPackRequest, RegisterPacksRequest, RegisterPacksResponse, UpdatePackRequest,
|
||||||
|
WorkflowSyncResult,
|
||||||
},
|
},
|
||||||
ApiResponse, SuccessResponse,
|
ApiResponse, SuccessResponse,
|
||||||
},
|
},
|
||||||
@@ -115,7 +119,7 @@ pub async fn get_pack(
|
|||||||
)]
|
)]
|
||||||
pub async fn create_pack(
|
pub async fn create_pack(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Json(request): Json<CreatePackRequest>,
|
Json(request): Json<CreatePackRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Validate request
|
// Validate request
|
||||||
@@ -129,6 +133,25 @@ pub async fn create_pack(
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_ref = Some(request.r#ref.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Packs,
|
||||||
|
action: Action::Create,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Create pack input
|
// Create pack input
|
||||||
let pack_input = CreatePackInput {
|
let pack_input = CreatePackInput {
|
||||||
r#ref: request.r#ref,
|
r#ref: request.r#ref,
|
||||||
@@ -202,7 +225,7 @@ pub async fn create_pack(
|
|||||||
)]
|
)]
|
||||||
pub async fn update_pack(
|
pub async fn update_pack(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Path(pack_ref): Path<String>,
|
Path(pack_ref): Path<String>,
|
||||||
Json(request): Json<UpdatePackRequest>,
|
Json(request): Json<UpdatePackRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -214,10 +237,33 @@ pub async fn update_pack(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Pack '{}' not found", pack_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Pack '{}' not found", pack_ref)))?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_id = Some(existing_pack.id);
|
||||||
|
ctx.target_ref = Some(existing_pack.r#ref.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Packs,
|
||||||
|
action: Action::Update,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Create update input
|
// Create update input
|
||||||
let update_input = UpdatePackInput {
|
let update_input = UpdatePackInput {
|
||||||
label: request.label,
|
label: request.label,
|
||||||
description: request.description,
|
description: request.description.map(|patch| match patch {
|
||||||
|
PackDescriptionPatch::Set(value) => Patch::Set(value),
|
||||||
|
PackDescriptionPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
version: request.version,
|
version: request.version,
|
||||||
conf_schema: request.conf_schema,
|
conf_schema: request.conf_schema,
|
||||||
config: request.config,
|
config: request.config,
|
||||||
@@ -284,7 +330,7 @@ pub async fn update_pack(
|
|||||||
)]
|
)]
|
||||||
pub async fn delete_pack(
|
pub async fn delete_pack(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Path(pack_ref): Path<String>,
|
Path(pack_ref): Path<String>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Check if pack exists
|
// Check if pack exists
|
||||||
@@ -292,6 +338,26 @@ pub async fn delete_pack(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Pack '{}' not found", pack_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Pack '{}' not found", pack_ref)))?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_id = Some(pack.id);
|
||||||
|
ctx.target_ref = Some(pack.r#ref.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Packs,
|
||||||
|
action: Action::Delete,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Delete the pack from the database (cascades to actions, triggers, sensors, rules, etc.
|
// Delete the pack from the database (cascades to actions, triggers, sensors, rules, etc.
|
||||||
// Foreign keys on execution, event, enforcement, and rule tables use ON DELETE SET NULL
|
// Foreign keys on execution, event, enforcement, and rule tables use ON DELETE SET NULL
|
||||||
// so historical records are preserved with their text ref fields intact.)
|
// so historical records are preserved with their text ref fields intact.)
|
||||||
@@ -475,6 +541,23 @@ pub async fn upload_pack(
|
|||||||
|
|
||||||
const MAX_PACK_SIZE: usize = 100 * 1024 * 1024; // 100 MB
|
const MAX_PACK_SIZE: usize = 100 * 1024 * 1024; // 100 MB
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Packs,
|
||||||
|
action: Action::Create,
|
||||||
|
context: AuthorizationContext::new(identity_id),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
let mut pack_bytes: Option<Vec<u8>> = None;
|
let mut pack_bytes: Option<Vec<u8>> = None;
|
||||||
let mut force = false;
|
let mut force = false;
|
||||||
let mut skip_tests = false;
|
let mut skip_tests = false;
|
||||||
@@ -649,6 +732,23 @@ pub async fn register_pack(
|
|||||||
// Validate request
|
// Validate request
|
||||||
request.validate()?;
|
request.validate()?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Packs,
|
||||||
|
action: Action::Create,
|
||||||
|
context: AuthorizationContext::new(identity_id),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Call internal registration logic
|
// Call internal registration logic
|
||||||
let pack_id = register_pack_internal(
|
let pack_id = register_pack_internal(
|
||||||
state.clone(),
|
state.clone(),
|
||||||
@@ -781,7 +881,10 @@ async fn register_pack_internal(
|
|||||||
// Update existing pack in place — preserves pack ID and all child entity IDs
|
// Update existing pack in place — preserves pack ID and all child entity IDs
|
||||||
let update_input = UpdatePackInput {
|
let update_input = UpdatePackInput {
|
||||||
label: Some(label),
|
label: Some(label),
|
||||||
description: Some(description.unwrap_or_default()),
|
description: Some(match description {
|
||||||
|
Some(value) => Patch::Set(value),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
version: Some(version.clone()),
|
version: Some(version.clone()),
|
||||||
conf_schema: Some(conf_schema),
|
conf_schema: Some(conf_schema),
|
||||||
config: None, // preserve user-set config
|
config: None, // preserve user-set config
|
||||||
@@ -1207,6 +1310,23 @@ pub async fn install_pack(
|
|||||||
|
|
||||||
tracing::info!("Installing pack from source: {}", request.source);
|
tracing::info!("Installing pack from source: {}", request.source);
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Packs,
|
||||||
|
action: Action::Create,
|
||||||
|
context: AuthorizationContext::new(identity_id),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Get user ID early to avoid borrow issues
|
// Get user ID early to avoid borrow issues
|
||||||
let user_id = user.identity_id().ok();
|
let user_id = user.identity_id().ok();
|
||||||
let user_sub = user.claims.sub.clone();
|
let user_sub = user.claims.sub.clone();
|
||||||
@@ -2247,6 +2367,23 @@ pub async fn register_packs_batch(
|
|||||||
RequireAuth(user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Json(request): Json<RegisterPacksRequest>,
|
Json(request): Json<RegisterPacksRequest>,
|
||||||
) -> ApiResult<Json<ApiResponse<RegisterPacksResponse>>> {
|
) -> ApiResult<Json<ApiResponse<RegisterPacksResponse>>> {
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Packs,
|
||||||
|
action: Action::Create,
|
||||||
|
context: AuthorizationContext::new(identity_id),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
let start = std::time::Instant::now();
|
let start = std::time::Instant::now();
|
||||||
let mut registered = Vec::new();
|
let mut registered = Vec::new();
|
||||||
let mut failed = Vec::new();
|
let mut failed = Vec::new();
|
||||||
|
|||||||
507
crates/api/src/routes/permissions.rs
Normal file
507
crates/api/src/routes/permissions.rs
Normal file
@@ -0,0 +1,507 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::{Path, Query, State},
|
||||||
|
http::StatusCode,
|
||||||
|
response::IntoResponse,
|
||||||
|
routing::{delete, get, post},
|
||||||
|
Json, Router,
|
||||||
|
};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use validator::Validate;
|
||||||
|
|
||||||
|
use attune_common::{
|
||||||
|
models::identity::{Identity, PermissionSet},
|
||||||
|
rbac::{Action, AuthorizationContext, Resource},
|
||||||
|
repositories::{
|
||||||
|
identity::{
|
||||||
|
CreateIdentityInput, CreatePermissionAssignmentInput, IdentityRepository,
|
||||||
|
PermissionAssignmentRepository, PermissionSetRepository, UpdateIdentityInput,
|
||||||
|
},
|
||||||
|
Create, Delete, FindById, FindByRef, List, Update,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
auth::hash_password,
|
||||||
|
auth::middleware::RequireAuth,
|
||||||
|
authz::{AuthorizationCheck, AuthorizationService},
|
||||||
|
dto::{
|
||||||
|
common::{PaginatedResponse, PaginationParams},
|
||||||
|
ApiResponse, CreateIdentityRequest, CreatePermissionAssignmentRequest, IdentityResponse,
|
||||||
|
IdentitySummary, PermissionAssignmentResponse, PermissionSetQueryParams,
|
||||||
|
PermissionSetSummary, SuccessResponse, UpdateIdentityRequest,
|
||||||
|
},
|
||||||
|
middleware::{ApiError, ApiResult},
|
||||||
|
state::AppState,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/api/v1/identities",
|
||||||
|
tag = "permissions",
|
||||||
|
params(PaginationParams),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "List identities", body = PaginatedResponse<IdentitySummary>)
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_identities(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Query(query): Query<PaginationParams>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Identities, Action::Read).await?;
|
||||||
|
|
||||||
|
let identities = IdentityRepository::list(&state.db).await?;
|
||||||
|
let total = identities.len() as u64;
|
||||||
|
let start = query.offset() as usize;
|
||||||
|
let end = (start + query.limit() as usize).min(identities.len());
|
||||||
|
let page_items = if start >= identities.len() {
|
||||||
|
Vec::new()
|
||||||
|
} else {
|
||||||
|
identities[start..end]
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.map(IdentitySummary::from)
|
||||||
|
.collect()
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(PaginatedResponse::new(page_items, &query, total)),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/api/v1/identities/{id}",
|
||||||
|
tag = "permissions",
|
||||||
|
params(
|
||||||
|
("id" = i64, Path, description = "Identity ID")
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Identity details", body = inline(ApiResponse<IdentityResponse>)),
|
||||||
|
(status = 404, description = "Identity not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_identity(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Path(identity_id): Path<i64>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Identities, Action::Read).await?;
|
||||||
|
|
||||||
|
let identity = IdentityRepository::find_by_id(&state.db, identity_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::NotFound(format!("Identity '{}' not found", identity_id)))?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(ApiResponse::new(IdentityResponse::from(identity))),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/api/v1/identities",
|
||||||
|
tag = "permissions",
|
||||||
|
request_body = CreateIdentityRequest,
|
||||||
|
responses(
|
||||||
|
(status = 201, description = "Identity created", body = inline(ApiResponse<IdentityResponse>)),
|
||||||
|
(status = 409, description = "Identity already exists")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn create_identity(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Json(request): Json<CreateIdentityRequest>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Identities, Action::Create).await?;
|
||||||
|
request.validate()?;
|
||||||
|
|
||||||
|
let password_hash = match request.password {
|
||||||
|
Some(password) => Some(hash_password(&password)?),
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let identity = IdentityRepository::create(
|
||||||
|
&state.db,
|
||||||
|
CreateIdentityInput {
|
||||||
|
login: request.login,
|
||||||
|
display_name: request.display_name,
|
||||||
|
password_hash,
|
||||||
|
attributes: request.attributes,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::CREATED,
|
||||||
|
Json(ApiResponse::new(IdentityResponse::from(identity))),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
put,
|
||||||
|
path = "/api/v1/identities/{id}",
|
||||||
|
tag = "permissions",
|
||||||
|
params(
|
||||||
|
("id" = i64, Path, description = "Identity ID")
|
||||||
|
),
|
||||||
|
request_body = UpdateIdentityRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Identity updated", body = inline(ApiResponse<IdentityResponse>)),
|
||||||
|
(status = 404, description = "Identity not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_identity(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Path(identity_id): Path<i64>,
|
||||||
|
Json(request): Json<UpdateIdentityRequest>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Identities, Action::Update).await?;
|
||||||
|
|
||||||
|
IdentityRepository::find_by_id(&state.db, identity_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::NotFound(format!("Identity '{}' not found", identity_id)))?;
|
||||||
|
|
||||||
|
let password_hash = match request.password {
|
||||||
|
Some(password) => Some(hash_password(&password)?),
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let identity = IdentityRepository::update(
|
||||||
|
&state.db,
|
||||||
|
identity_id,
|
||||||
|
UpdateIdentityInput {
|
||||||
|
display_name: request.display_name,
|
||||||
|
password_hash,
|
||||||
|
attributes: request.attributes,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(ApiResponse::new(IdentityResponse::from(identity))),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
delete,
|
||||||
|
path = "/api/v1/identities/{id}",
|
||||||
|
tag = "permissions",
|
||||||
|
params(
|
||||||
|
("id" = i64, Path, description = "Identity ID")
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Identity deleted", body = inline(ApiResponse<SuccessResponse>)),
|
||||||
|
(status = 404, description = "Identity not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn delete_identity(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Path(identity_id): Path<i64>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Identities, Action::Delete).await?;
|
||||||
|
|
||||||
|
let caller_identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
if caller_identity_id == identity_id {
|
||||||
|
return Err(ApiError::BadRequest(
|
||||||
|
"Refusing to delete the currently authenticated identity".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let deleted = IdentityRepository::delete(&state.db, identity_id).await?;
|
||||||
|
if !deleted {
|
||||||
|
return Err(ApiError::NotFound(format!(
|
||||||
|
"Identity '{}' not found",
|
||||||
|
identity_id
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(ApiResponse::new(SuccessResponse::new(
|
||||||
|
"Identity deleted successfully",
|
||||||
|
))),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/api/v1/permissions/sets",
|
||||||
|
tag = "permissions",
|
||||||
|
params(PermissionSetQueryParams),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "List permission sets", body = Vec<PermissionSetSummary>)
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_permission_sets(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Query(query): Query<PermissionSetQueryParams>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Permissions, Action::Read).await?;
|
||||||
|
|
||||||
|
let mut permission_sets = PermissionSetRepository::list(&state.db).await?;
|
||||||
|
if let Some(pack_ref) = &query.pack_ref {
|
||||||
|
permission_sets.retain(|ps| ps.pack_ref.as_deref() == Some(pack_ref.as_str()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let response: Vec<PermissionSetSummary> = permission_sets
|
||||||
|
.into_iter()
|
||||||
|
.map(PermissionSetSummary::from)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok((StatusCode::OK, Json(response)))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/api/v1/identities/{id}/permissions",
|
||||||
|
tag = "permissions",
|
||||||
|
params(
|
||||||
|
("id" = i64, Path, description = "Identity ID")
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "List permission assignments for an identity", body = Vec<PermissionAssignmentResponse>),
|
||||||
|
(status = 404, description = "Identity not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_identity_permissions(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Path(identity_id): Path<i64>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Permissions, Action::Read).await?;
|
||||||
|
|
||||||
|
IdentityRepository::find_by_id(&state.db, identity_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::NotFound(format!("Identity '{}' not found", identity_id)))?;
|
||||||
|
|
||||||
|
let assignments =
|
||||||
|
PermissionAssignmentRepository::find_by_identity(&state.db, identity_id).await?;
|
||||||
|
let permission_sets = PermissionSetRepository::find_by_identity(&state.db, identity_id).await?;
|
||||||
|
|
||||||
|
let permission_set_refs = permission_sets
|
||||||
|
.into_iter()
|
||||||
|
.map(|ps| (ps.id, ps.r#ref))
|
||||||
|
.collect::<std::collections::HashMap<_, _>>();
|
||||||
|
|
||||||
|
let response: Vec<PermissionAssignmentResponse> = assignments
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|assignment| {
|
||||||
|
permission_set_refs
|
||||||
|
.get(&assignment.permset)
|
||||||
|
.cloned()
|
||||||
|
.map(|permission_set_ref| PermissionAssignmentResponse {
|
||||||
|
id: assignment.id,
|
||||||
|
identity_id: assignment.identity,
|
||||||
|
permission_set_id: assignment.permset,
|
||||||
|
permission_set_ref,
|
||||||
|
created: assignment.created,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok((StatusCode::OK, Json(response)))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/api/v1/permissions/assignments",
|
||||||
|
tag = "permissions",
|
||||||
|
request_body = CreatePermissionAssignmentRequest,
|
||||||
|
responses(
|
||||||
|
(status = 201, description = "Permission assignment created", body = inline(ApiResponse<PermissionAssignmentResponse>)),
|
||||||
|
(status = 404, description = "Identity or permission set not found"),
|
||||||
|
(status = 409, description = "Assignment already exists")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn create_permission_assignment(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Json(request): Json<CreatePermissionAssignmentRequest>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Permissions, Action::Manage).await?;
|
||||||
|
|
||||||
|
let identity = resolve_identity(&state, &request).await?;
|
||||||
|
let permission_set =
|
||||||
|
PermissionSetRepository::find_by_ref(&state.db, &request.permission_set_ref)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::NotFound(format!(
|
||||||
|
"Permission set '{}' not found",
|
||||||
|
request.permission_set_ref
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let assignment = PermissionAssignmentRepository::create(
|
||||||
|
&state.db,
|
||||||
|
CreatePermissionAssignmentInput {
|
||||||
|
identity: identity.id,
|
||||||
|
permset: permission_set.id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let response = PermissionAssignmentResponse {
|
||||||
|
id: assignment.id,
|
||||||
|
identity_id: assignment.identity,
|
||||||
|
permission_set_id: assignment.permset,
|
||||||
|
permission_set_ref: permission_set.r#ref,
|
||||||
|
created: assignment.created,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((StatusCode::CREATED, Json(ApiResponse::new(response))))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
delete,
|
||||||
|
path = "/api/v1/permissions/assignments/{id}",
|
||||||
|
tag = "permissions",
|
||||||
|
params(
|
||||||
|
("id" = i64, Path, description = "Permission assignment ID")
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Permission assignment deleted", body = inline(ApiResponse<SuccessResponse>)),
|
||||||
|
(status = 404, description = "Assignment not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn delete_permission_assignment(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Path(assignment_id): Path<i64>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Permissions, Action::Manage).await?;
|
||||||
|
|
||||||
|
let existing = PermissionAssignmentRepository::find_by_id(&state.db, assignment_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::NotFound(format!(
|
||||||
|
"Permission assignment '{}' not found",
|
||||||
|
assignment_id
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let deleted = PermissionAssignmentRepository::delete(&state.db, existing.id).await?;
|
||||||
|
if !deleted {
|
||||||
|
return Err(ApiError::NotFound(format!(
|
||||||
|
"Permission assignment '{}' not found",
|
||||||
|
assignment_id
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(ApiResponse::new(SuccessResponse::new(
|
||||||
|
"Permission assignment deleted successfully",
|
||||||
|
))),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn routes() -> Router<Arc<AppState>> {
|
||||||
|
Router::new()
|
||||||
|
.route("/identities", get(list_identities).post(create_identity))
|
||||||
|
.route(
|
||||||
|
"/identities/{id}",
|
||||||
|
get(get_identity)
|
||||||
|
.put(update_identity)
|
||||||
|
.delete(delete_identity),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
"/identities/{id}/permissions",
|
||||||
|
get(list_identity_permissions),
|
||||||
|
)
|
||||||
|
.route("/permissions/sets", get(list_permission_sets))
|
||||||
|
.route(
|
||||||
|
"/permissions/assignments",
|
||||||
|
post(create_permission_assignment),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
"/permissions/assignments/{id}",
|
||||||
|
delete(delete_permission_assignment),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn authorize_permissions(
|
||||||
|
state: &Arc<AppState>,
|
||||||
|
user: &crate::auth::middleware::AuthenticatedUser,
|
||||||
|
resource: Resource,
|
||||||
|
action: Action,
|
||||||
|
) -> ApiResult<()> {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource,
|
||||||
|
action,
|
||||||
|
context: AuthorizationContext::new(identity_id),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn resolve_identity(
|
||||||
|
state: &Arc<AppState>,
|
||||||
|
request: &CreatePermissionAssignmentRequest,
|
||||||
|
) -> ApiResult<Identity> {
|
||||||
|
match (request.identity_id, request.identity_login.as_deref()) {
|
||||||
|
(Some(identity_id), None) => IdentityRepository::find_by_id(&state.db, identity_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::NotFound(format!("Identity '{}' not found", identity_id))),
|
||||||
|
(None, Some(identity_login)) => {
|
||||||
|
IdentityRepository::find_by_login(&state.db, identity_login)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::NotFound(format!("Identity '{}' not found", identity_login))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
(Some(_), Some(_)) => Err(ApiError::BadRequest(
|
||||||
|
"Provide either identity_id or identity_login, not both".to_string(),
|
||||||
|
)),
|
||||||
|
(None, None) => Err(ApiError::BadRequest(
|
||||||
|
"Either identity_id or identity_login is required".to_string(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Identity> for IdentitySummary {
|
||||||
|
fn from(value: Identity) -> Self {
|
||||||
|
Self {
|
||||||
|
id: value.id,
|
||||||
|
login: value.login,
|
||||||
|
display_name: value.display_name,
|
||||||
|
attributes: value.attributes,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<PermissionSet> for PermissionSetSummary {
|
||||||
|
fn from(value: PermissionSet) -> Self {
|
||||||
|
Self {
|
||||||
|
id: value.id,
|
||||||
|
r#ref: value.r#ref,
|
||||||
|
pack_ref: value.pack_ref,
|
||||||
|
label: value.label,
|
||||||
|
description: value.description,
|
||||||
|
grants: value.grants,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -14,6 +14,7 @@ use validator::Validate;
|
|||||||
use attune_common::mq::{
|
use attune_common::mq::{
|
||||||
MessageEnvelope, MessageType, RuleCreatedPayload, RuleDisabledPayload, RuleEnabledPayload,
|
MessageEnvelope, MessageType, RuleCreatedPayload, RuleDisabledPayload, RuleEnabledPayload,
|
||||||
};
|
};
|
||||||
|
use attune_common::rbac::{Action, AuthorizationContext, Resource};
|
||||||
use attune_common::repositories::{
|
use attune_common::repositories::{
|
||||||
action::ActionRepository,
|
action::ActionRepository,
|
||||||
pack::PackRepository,
|
pack::PackRepository,
|
||||||
@@ -24,6 +25,7 @@ use attune_common::repositories::{
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::middleware::RequireAuth,
|
auth::middleware::RequireAuth,
|
||||||
|
authz::{AuthorizationCheck, AuthorizationService},
|
||||||
dto::{
|
dto::{
|
||||||
common::{PaginatedResponse, PaginationParams},
|
common::{PaginatedResponse, PaginationParams},
|
||||||
rule::{CreateRuleRequest, RuleResponse, RuleSummary, UpdateRuleRequest},
|
rule::{CreateRuleRequest, RuleResponse, RuleSummary, UpdateRuleRequest},
|
||||||
@@ -283,7 +285,7 @@ pub async fn get_rule(
|
|||||||
)]
|
)]
|
||||||
pub async fn create_rule(
|
pub async fn create_rule(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Json(request): Json<CreateRuleRequest>,
|
Json(request): Json<CreateRuleRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Validate request
|
// Validate request
|
||||||
@@ -317,6 +319,26 @@ pub async fn create_rule(
|
|||||||
ApiError::NotFound(format!("Trigger '{}' not found", request.trigger_ref))
|
ApiError::NotFound(format!("Trigger '{}' not found", request.trigger_ref))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.pack_ref = Some(pack.r#ref.clone());
|
||||||
|
ctx.target_ref = Some(request.r#ref.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Rules,
|
||||||
|
action: Action::Create,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Validate trigger parameters against schema
|
// Validate trigger parameters against schema
|
||||||
validate_trigger_params(&trigger, &request.trigger_params)?;
|
validate_trigger_params(&trigger, &request.trigger_params)?;
|
||||||
|
|
||||||
@@ -392,7 +414,7 @@ pub async fn create_rule(
|
|||||||
)]
|
)]
|
||||||
pub async fn update_rule(
|
pub async fn update_rule(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Path(rule_ref): Path<String>,
|
Path(rule_ref): Path<String>,
|
||||||
Json(request): Json<UpdateRuleRequest>,
|
Json(request): Json<UpdateRuleRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -404,6 +426,27 @@ pub async fn update_rule(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Rule '{}' not found", rule_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Rule '{}' not found", rule_ref)))?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_id = Some(existing_rule.id);
|
||||||
|
ctx.target_ref = Some(existing_rule.r#ref.clone());
|
||||||
|
ctx.pack_ref = Some(existing_rule.pack_ref.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Rules,
|
||||||
|
action: Action::Update,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// If action parameters are being updated, validate against the action's schema
|
// If action parameters are being updated, validate against the action's schema
|
||||||
if let Some(ref action_params) = request.action_params {
|
if let Some(ref action_params) = request.action_params {
|
||||||
let action = ActionRepository::find_by_ref(&state.db, &existing_rule.action_ref)
|
let action = ActionRepository::find_by_ref(&state.db, &existing_rule.action_ref)
|
||||||
@@ -489,7 +532,7 @@ pub async fn update_rule(
|
|||||||
)]
|
)]
|
||||||
pub async fn delete_rule(
|
pub async fn delete_rule(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Path(rule_ref): Path<String>,
|
Path(rule_ref): Path<String>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Check if rule exists
|
// Check if rule exists
|
||||||
@@ -497,6 +540,27 @@ pub async fn delete_rule(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Rule '{}' not found", rule_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Rule '{}' not found", rule_ref)))?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_id = Some(rule.id);
|
||||||
|
ctx.target_ref = Some(rule.r#ref.clone());
|
||||||
|
ctx.pack_ref = Some(rule.pack_ref.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Rules,
|
||||||
|
action: Action::Delete,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Delete the rule
|
// Delete the rule
|
||||||
let deleted = RuleRepository::delete(&state.db, rule.id).await?;
|
let deleted = RuleRepository::delete(&state.db, rule.id).await?;
|
||||||
|
|
||||||
|
|||||||
307
crates/api/src/routes/runtimes.rs
Normal file
307
crates/api/src/routes/runtimes.rs
Normal file
@@ -0,0 +1,307 @@
|
|||||||
|
//! Runtime management API routes
|
||||||
|
|
||||||
|
use axum::{
|
||||||
|
extract::{Path, Query, State},
|
||||||
|
http::StatusCode,
|
||||||
|
response::IntoResponse,
|
||||||
|
routing::get,
|
||||||
|
Json, Router,
|
||||||
|
};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use validator::Validate;
|
||||||
|
|
||||||
|
use attune_common::repositories::{
|
||||||
|
pack::PackRepository,
|
||||||
|
runtime::{CreateRuntimeInput, RuntimeRepository, UpdateRuntimeInput},
|
||||||
|
Create, Delete, FindByRef, List, Patch, Update,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
auth::middleware::RequireAuth,
|
||||||
|
dto::{
|
||||||
|
common::{PaginatedResponse, PaginationParams},
|
||||||
|
runtime::{
|
||||||
|
CreateRuntimeRequest, NullableJsonPatch, NullableStringPatch, RuntimeResponse,
|
||||||
|
RuntimeSummary, UpdateRuntimeRequest,
|
||||||
|
},
|
||||||
|
ApiResponse, SuccessResponse,
|
||||||
|
},
|
||||||
|
middleware::{ApiError, ApiResult},
|
||||||
|
state::AppState,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/api/v1/runtimes",
|
||||||
|
tag = "runtimes",
|
||||||
|
params(PaginationParams),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "List of runtimes", body = PaginatedResponse<RuntimeSummary>)
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_runtimes(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(_user): RequireAuth,
|
||||||
|
Query(pagination): Query<PaginationParams>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
let all_runtimes = RuntimeRepository::list(&state.db).await?;
|
||||||
|
let total = all_runtimes.len() as u64;
|
||||||
|
let rows: Vec<_> = all_runtimes
|
||||||
|
.into_iter()
|
||||||
|
.skip(pagination.offset() as usize)
|
||||||
|
.take(pagination.limit() as usize)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let response = PaginatedResponse::new(
|
||||||
|
rows.into_iter().map(RuntimeSummary::from).collect(),
|
||||||
|
&pagination,
|
||||||
|
total,
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok((StatusCode::OK, Json(response)))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/api/v1/packs/{pack_ref}/runtimes",
|
||||||
|
tag = "runtimes",
|
||||||
|
params(
|
||||||
|
("pack_ref" = String, Path, description = "Pack reference identifier"),
|
||||||
|
PaginationParams
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "List of runtimes for a pack", body = PaginatedResponse<RuntimeSummary>),
|
||||||
|
(status = 404, description = "Pack not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_runtimes_by_pack(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(_user): RequireAuth,
|
||||||
|
Path(pack_ref): Path<String>,
|
||||||
|
Query(pagination): Query<PaginationParams>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
let pack = PackRepository::find_by_ref(&state.db, &pack_ref)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::NotFound(format!("Pack '{}' not found", pack_ref)))?;
|
||||||
|
|
||||||
|
let all_runtimes = RuntimeRepository::find_by_pack(&state.db, pack.id).await?;
|
||||||
|
let total = all_runtimes.len() as u64;
|
||||||
|
let rows: Vec<_> = all_runtimes
|
||||||
|
.into_iter()
|
||||||
|
.skip(pagination.offset() as usize)
|
||||||
|
.take(pagination.limit() as usize)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let response = PaginatedResponse::new(
|
||||||
|
rows.into_iter().map(RuntimeSummary::from).collect(),
|
||||||
|
&pagination,
|
||||||
|
total,
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok((StatusCode::OK, Json(response)))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/api/v1/runtimes/{ref}",
|
||||||
|
tag = "runtimes",
|
||||||
|
params(("ref" = String, Path, description = "Runtime reference identifier")),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Runtime details", body = ApiResponse<RuntimeResponse>),
|
||||||
|
(status = 404, description = "Runtime not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_runtime(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(_user): RequireAuth,
|
||||||
|
Path(runtime_ref): Path<String>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
let runtime = RuntimeRepository::find_by_ref(&state.db, &runtime_ref)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::NotFound(format!("Runtime '{}' not found", runtime_ref)))?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(ApiResponse::new(RuntimeResponse::from(runtime))),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/api/v1/runtimes",
|
||||||
|
tag = "runtimes",
|
||||||
|
request_body = CreateRuntimeRequest,
|
||||||
|
responses(
|
||||||
|
(status = 201, description = "Runtime created successfully", body = ApiResponse<RuntimeResponse>),
|
||||||
|
(status = 400, description = "Validation error"),
|
||||||
|
(status = 404, description = "Pack not found"),
|
||||||
|
(status = 409, description = "Runtime with same ref already exists")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn create_runtime(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(_user): RequireAuth,
|
||||||
|
Json(request): Json<CreateRuntimeRequest>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
request.validate()?;
|
||||||
|
|
||||||
|
if RuntimeRepository::find_by_ref(&state.db, &request.r#ref)
|
||||||
|
.await?
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
return Err(ApiError::Conflict(format!(
|
||||||
|
"Runtime with ref '{}' already exists",
|
||||||
|
request.r#ref
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let (pack_id, pack_ref) = if let Some(ref pack_ref_str) = request.pack_ref {
|
||||||
|
let pack = PackRepository::find_by_ref(&state.db, pack_ref_str)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::NotFound(format!("Pack '{}' not found", pack_ref_str)))?;
|
||||||
|
(Some(pack.id), Some(pack.r#ref))
|
||||||
|
} else {
|
||||||
|
(None, None)
|
||||||
|
};
|
||||||
|
|
||||||
|
let runtime = RuntimeRepository::create(
|
||||||
|
&state.db,
|
||||||
|
CreateRuntimeInput {
|
||||||
|
r#ref: request.r#ref,
|
||||||
|
pack: pack_id,
|
||||||
|
pack_ref,
|
||||||
|
description: request.description,
|
||||||
|
name: request.name,
|
||||||
|
aliases: vec![],
|
||||||
|
distributions: request.distributions,
|
||||||
|
installation: request.installation,
|
||||||
|
execution_config: request.execution_config,
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: serde_json::json!({}),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::CREATED,
|
||||||
|
Json(ApiResponse::with_message(
|
||||||
|
RuntimeResponse::from(runtime),
|
||||||
|
"Runtime created successfully",
|
||||||
|
)),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
put,
|
||||||
|
path = "/api/v1/runtimes/{ref}",
|
||||||
|
tag = "runtimes",
|
||||||
|
params(("ref" = String, Path, description = "Runtime reference identifier")),
|
||||||
|
request_body = UpdateRuntimeRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Runtime updated successfully", body = ApiResponse<RuntimeResponse>),
|
||||||
|
(status = 400, description = "Validation error"),
|
||||||
|
(status = 404, description = "Runtime not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_runtime(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(_user): RequireAuth,
|
||||||
|
Path(runtime_ref): Path<String>,
|
||||||
|
Json(request): Json<UpdateRuntimeRequest>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
request.validate()?;
|
||||||
|
|
||||||
|
let existing_runtime = RuntimeRepository::find_by_ref(&state.db, &runtime_ref)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::NotFound(format!("Runtime '{}' not found", runtime_ref)))?;
|
||||||
|
|
||||||
|
let runtime = RuntimeRepository::update(
|
||||||
|
&state.db,
|
||||||
|
existing_runtime.id,
|
||||||
|
UpdateRuntimeInput {
|
||||||
|
description: request.description.map(|patch| match patch {
|
||||||
|
NullableStringPatch::Set(value) => Patch::Set(value),
|
||||||
|
NullableStringPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
|
name: request.name,
|
||||||
|
distributions: request.distributions,
|
||||||
|
installation: request.installation.map(|patch| match patch {
|
||||||
|
NullableJsonPatch::Set(value) => Patch::Set(value),
|
||||||
|
NullableJsonPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
|
execution_config: request.execution_config,
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(ApiResponse::with_message(
|
||||||
|
RuntimeResponse::from(runtime),
|
||||||
|
"Runtime updated successfully",
|
||||||
|
)),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
delete,
|
||||||
|
path = "/api/v1/runtimes/{ref}",
|
||||||
|
tag = "runtimes",
|
||||||
|
params(("ref" = String, Path, description = "Runtime reference identifier")),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Runtime deleted successfully", body = SuccessResponse),
|
||||||
|
(status = 404, description = "Runtime not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn delete_runtime(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(_user): RequireAuth,
|
||||||
|
Path(runtime_ref): Path<String>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
let runtime = RuntimeRepository::find_by_ref(&state.db, &runtime_ref)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::NotFound(format!("Runtime '{}' not found", runtime_ref)))?;
|
||||||
|
|
||||||
|
let deleted = RuntimeRepository::delete(&state.db, runtime.id).await?;
|
||||||
|
if !deleted {
|
||||||
|
return Err(ApiError::NotFound(format!(
|
||||||
|
"Runtime '{}' not found",
|
||||||
|
runtime_ref
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(SuccessResponse::new(format!(
|
||||||
|
"Runtime '{}' deleted successfully",
|
||||||
|
runtime_ref
|
||||||
|
))),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn routes() -> Router<Arc<AppState>> {
|
||||||
|
Router::new()
|
||||||
|
.route("/runtimes", get(list_runtimes).post(create_runtime))
|
||||||
|
.route(
|
||||||
|
"/runtimes/{ref}",
|
||||||
|
get(get_runtime).put(update_runtime).delete(delete_runtime),
|
||||||
|
)
|
||||||
|
.route("/packs/{pack_ref}/runtimes", get(list_runtimes_by_pack))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_runtime_routes_structure() {
|
||||||
|
let _router = routes();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -17,7 +17,7 @@ use attune_common::repositories::{
|
|||||||
CreateSensorInput, CreateTriggerInput, SensorRepository, SensorSearchFilters,
|
CreateSensorInput, CreateTriggerInput, SensorRepository, SensorSearchFilters,
|
||||||
TriggerRepository, TriggerSearchFilters, UpdateSensorInput, UpdateTriggerInput,
|
TriggerRepository, TriggerSearchFilters, UpdateSensorInput, UpdateTriggerInput,
|
||||||
},
|
},
|
||||||
Create, Delete, FindByRef, Update,
|
Create, Delete, FindByRef, Patch, Update,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@@ -25,8 +25,9 @@ use crate::{
|
|||||||
dto::{
|
dto::{
|
||||||
common::{PaginatedResponse, PaginationParams},
|
common::{PaginatedResponse, PaginationParams},
|
||||||
trigger::{
|
trigger::{
|
||||||
CreateSensorRequest, CreateTriggerRequest, SensorResponse, SensorSummary,
|
CreateSensorRequest, CreateTriggerRequest, SensorJsonPatch, SensorResponse,
|
||||||
TriggerResponse, TriggerSummary, UpdateSensorRequest, UpdateTriggerRequest,
|
SensorSummary, TriggerJsonPatch, TriggerResponse, TriggerStringPatch, TriggerSummary,
|
||||||
|
UpdateSensorRequest, UpdateTriggerRequest,
|
||||||
},
|
},
|
||||||
ApiResponse, SuccessResponse,
|
ApiResponse, SuccessResponse,
|
||||||
},
|
},
|
||||||
@@ -274,10 +275,19 @@ pub async fn update_trigger(
|
|||||||
// Create update input
|
// Create update input
|
||||||
let update_input = UpdateTriggerInput {
|
let update_input = UpdateTriggerInput {
|
||||||
label: request.label,
|
label: request.label,
|
||||||
description: request.description,
|
description: request.description.map(|patch| match patch {
|
||||||
|
TriggerStringPatch::Set(value) => Patch::Set(value),
|
||||||
|
TriggerStringPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
enabled: request.enabled,
|
enabled: request.enabled,
|
||||||
param_schema: request.param_schema,
|
param_schema: request.param_schema.map(|patch| match patch {
|
||||||
out_schema: request.out_schema,
|
TriggerJsonPatch::Set(value) => Patch::Set(value),
|
||||||
|
TriggerJsonPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
|
out_schema: request.out_schema.map(|patch| match patch {
|
||||||
|
TriggerJsonPatch::Set(value) => Patch::Set(value),
|
||||||
|
TriggerJsonPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
let trigger = TriggerRepository::update(&state.db, existing_trigger.id, update_input).await?;
|
let trigger = TriggerRepository::update(&state.db, existing_trigger.id, update_input).await?;
|
||||||
@@ -722,7 +732,10 @@ pub async fn update_sensor(
|
|||||||
trigger: None,
|
trigger: None,
|
||||||
trigger_ref: None,
|
trigger_ref: None,
|
||||||
enabled: request.enabled,
|
enabled: request.enabled,
|
||||||
param_schema: request.param_schema,
|
param_schema: request.param_schema.map(|patch| match patch {
|
||||||
|
SensorJsonPatch::Set(value) => Patch::Set(value),
|
||||||
|
SensorJsonPatch::Clear => Patch::Clear,
|
||||||
|
}),
|
||||||
config: None,
|
config: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -66,7 +66,6 @@ pub async fn list_workflows(
|
|||||||
let filters = WorkflowSearchFilters {
|
let filters = WorkflowSearchFilters {
|
||||||
pack: None,
|
pack: None,
|
||||||
pack_ref: search_params.pack_ref.clone(),
|
pack_ref: search_params.pack_ref.clone(),
|
||||||
enabled: search_params.enabled,
|
|
||||||
tags,
|
tags,
|
||||||
search: search_params.search.clone(),
|
search: search_params.search.clone(),
|
||||||
limit: pagination.limit(),
|
limit: pagination.limit(),
|
||||||
@@ -113,7 +112,6 @@ pub async fn list_workflows_by_pack(
|
|||||||
let filters = WorkflowSearchFilters {
|
let filters = WorkflowSearchFilters {
|
||||||
pack: None,
|
pack: None,
|
||||||
pack_ref: Some(pack_ref),
|
pack_ref: Some(pack_ref),
|
||||||
enabled: None,
|
|
||||||
tags: None,
|
tags: None,
|
||||||
search: None,
|
search: None,
|
||||||
limit: pagination.limit(),
|
limit: pagination.limit(),
|
||||||
@@ -208,7 +206,6 @@ pub async fn create_workflow(
|
|||||||
out_schema: request.out_schema.clone(),
|
out_schema: request.out_schema.clone(),
|
||||||
definition: request.definition,
|
definition: request.definition,
|
||||||
tags: request.tags.clone().unwrap_or_default(),
|
tags: request.tags.clone().unwrap_or_default(),
|
||||||
enabled: request.enabled.unwrap_or(true),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let workflow = WorkflowDefinitionRepository::create(&state.db, workflow_input).await?;
|
let workflow = WorkflowDefinitionRepository::create(&state.db, workflow_input).await?;
|
||||||
@@ -275,7 +272,6 @@ pub async fn update_workflow(
|
|||||||
out_schema: request.out_schema.clone(),
|
out_schema: request.out_schema.clone(),
|
||||||
definition: request.definition,
|
definition: request.definition,
|
||||||
tags: request.tags,
|
tags: request.tags,
|
||||||
enabled: request.enabled,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let workflow =
|
let workflow =
|
||||||
@@ -408,7 +404,6 @@ pub async fn save_workflow_file(
|
|||||||
out_schema: request.out_schema.clone(),
|
out_schema: request.out_schema.clone(),
|
||||||
definition: definition_json,
|
definition: definition_json,
|
||||||
tags: request.tags.clone().unwrap_or_default(),
|
tags: request.tags.clone().unwrap_or_default(),
|
||||||
enabled: request.enabled.unwrap_or(true),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let workflow = WorkflowDefinitionRepository::create(&state.db, workflow_input).await?;
|
let workflow = WorkflowDefinitionRepository::create(&state.db, workflow_input).await?;
|
||||||
@@ -489,7 +484,6 @@ pub async fn update_workflow_file(
|
|||||||
out_schema: request.out_schema.clone(),
|
out_schema: request.out_schema.clone(),
|
||||||
definition: Some(definition_json),
|
definition: Some(definition_json),
|
||||||
tags: request.tags,
|
tags: request.tags,
|
||||||
enabled: request.enabled,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let workflow =
|
let workflow =
|
||||||
@@ -647,7 +641,6 @@ fn build_action_yaml(pack_ref: &str, request: &SaveWorkflowFileRequest) -> Strin
|
|||||||
lines.push(format!("description: \"{}\"", desc.replace('"', "\\\"")));
|
lines.push(format!("description: \"{}\"", desc.replace('"', "\\\"")));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
lines.push("enabled: true".to_string());
|
|
||||||
lines.push(format!(
|
lines.push(format!(
|
||||||
"workflow_file: workflows/{}.workflow.yaml",
|
"workflow_file: workflows/{}.workflow.yaml",
|
||||||
request.name
|
request.name
|
||||||
|
|||||||
@@ -47,17 +47,20 @@ impl Server {
|
|||||||
let api_v1 = Router::new()
|
let api_v1 = Router::new()
|
||||||
.merge(routes::pack_routes())
|
.merge(routes::pack_routes())
|
||||||
.merge(routes::action_routes())
|
.merge(routes::action_routes())
|
||||||
|
.merge(routes::runtime_routes())
|
||||||
.merge(routes::rule_routes())
|
.merge(routes::rule_routes())
|
||||||
.merge(routes::execution_routes())
|
.merge(routes::execution_routes())
|
||||||
.merge(routes::trigger_routes())
|
.merge(routes::trigger_routes())
|
||||||
.merge(routes::inquiry_routes())
|
.merge(routes::inquiry_routes())
|
||||||
.merge(routes::event_routes())
|
.merge(routes::event_routes())
|
||||||
.merge(routes::key_routes())
|
.merge(routes::key_routes())
|
||||||
|
.merge(routes::permission_routes())
|
||||||
.merge(routes::workflow_routes())
|
.merge(routes::workflow_routes())
|
||||||
.merge(routes::webhook_routes())
|
.merge(routes::webhook_routes())
|
||||||
.merge(routes::history_routes())
|
.merge(routes::history_routes())
|
||||||
.merge(routes::analytics_routes())
|
.merge(routes::analytics_routes())
|
||||||
.merge(routes::artifact_routes())
|
.merge(routes::artifact_routes())
|
||||||
|
.merge(routes::agent_routes())
|
||||||
.with_state(self.state.clone());
|
.with_state(self.state.clone());
|
||||||
|
|
||||||
// Auth routes at root level (not versioned for frontend compatibility)
|
// Auth routes at root level (not versioned for frontend compatibility)
|
||||||
|
|||||||
138
crates/api/tests/agent_tests.rs
Normal file
138
crates/api/tests/agent_tests.rs
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
//! Integration tests for agent binary distribution endpoints
|
||||||
|
//!
|
||||||
|
//! The agent endpoints (`/api/v1/agent/binary` and `/api/v1/agent/info`) are
|
||||||
|
//! intentionally unauthenticated — the agent needs to download its binary
|
||||||
|
//! before it has JWT credentials. An optional `bootstrap_token` can restrict
|
||||||
|
//! access, but that is validated inside the handler, not via RequireAuth
|
||||||
|
//! middleware.
|
||||||
|
//!
|
||||||
|
//! The test configuration (`config.test.yaml`) does NOT include an `agent`
|
||||||
|
//! section, so both endpoints return 503 Service Unavailable. This is the
|
||||||
|
//! correct behaviour: the endpoints are reachable (no 401/404 from middleware)
|
||||||
|
//! but the feature is not configured.
|
||||||
|
|
||||||
|
use axum::http::StatusCode;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
mod helpers;
|
||||||
|
use helpers::TestContext;
|
||||||
|
|
||||||
|
// ── /api/v1/agent/info ──────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_agent_info_not_configured() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/api/v1/agent/info", None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// Agent config is not set in config.test.yaml, so the handler returns 503.
|
||||||
|
assert_eq!(response.status(), StatusCode::SERVICE_UNAVAILABLE);
|
||||||
|
|
||||||
|
let body: serde_json::Value = response.json().await.expect("Failed to parse JSON");
|
||||||
|
assert_eq!(body["error"], "Not configured");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_agent_info_no_auth_required() {
|
||||||
|
// Verify that the endpoint is reachable WITHOUT any JWT token.
|
||||||
|
// If RequireAuth middleware were applied, this would return 401.
|
||||||
|
// Instead we expect 503 (not configured) — proving the endpoint
|
||||||
|
// is publicly accessible.
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/api/v1/agent/info", None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// Must NOT be 401 Unauthorized — the endpoint has no auth middleware.
|
||||||
|
assert_ne!(
|
||||||
|
response.status(),
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
"agent/info should not require authentication"
|
||||||
|
);
|
||||||
|
// Should be 503 because agent config is absent.
|
||||||
|
assert_eq!(response.status(), StatusCode::SERVICE_UNAVAILABLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── /api/v1/agent/binary ────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_agent_binary_not_configured() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/api/v1/agent/binary", None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// Agent config is not set in config.test.yaml, so the handler returns 503.
|
||||||
|
assert_eq!(response.status(), StatusCode::SERVICE_UNAVAILABLE);
|
||||||
|
|
||||||
|
let body: serde_json::Value = response.json().await.expect("Failed to parse JSON");
|
||||||
|
assert_eq!(body["error"], "Not configured");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_agent_binary_no_auth_required() {
|
||||||
|
// Same reasoning as test_agent_info_no_auth_required: the binary
|
||||||
|
// download endpoint must be publicly accessible (no RequireAuth).
|
||||||
|
// When no bootstrap_token is configured, any caller can reach the
|
||||||
|
// handler. We still get 503 because the agent feature itself is
|
||||||
|
// not configured in the test environment.
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/api/v1/agent/binary", None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// Must NOT be 401 Unauthorized — the endpoint has no auth middleware.
|
||||||
|
assert_ne!(
|
||||||
|
response.status(),
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
"agent/binary should not require authentication when no bootstrap_token is configured"
|
||||||
|
);
|
||||||
|
// Should be 503 because agent config is absent.
|
||||||
|
assert_eq!(response.status(), StatusCode::SERVICE_UNAVAILABLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_agent_binary_invalid_arch() {
|
||||||
|
// Architecture validation (`validate_arch`) rejects unsupported values
|
||||||
|
// with 400 Bad Request. However, in the handler the execution order is:
|
||||||
|
// 1. validate_token (passes — no bootstrap_token configured)
|
||||||
|
// 2. check agent config (fails with 503 — not configured)
|
||||||
|
// 3. validate_arch (never reached)
|
||||||
|
//
|
||||||
|
// So even with an invalid arch like "mips", we get 503 from the config
|
||||||
|
// check before the arch is ever validated. The arch validation is covered
|
||||||
|
// by unit tests in routes/agent.rs instead.
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/api/v1/agent/binary?arch=mips", None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// 503 from the agent-config-not-set check, NOT 400 from arch validation.
|
||||||
|
assert_eq!(response.status(), StatusCode::SERVICE_UNAVAILABLE);
|
||||||
|
}
|
||||||
@@ -305,6 +305,126 @@ async fn test_login_nonexistent_user() {
|
|||||||
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
|
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ── LDAP auth tests ──────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_ldap_login_returns_501_when_not_configured() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.post(
|
||||||
|
"/auth/ldap/login",
|
||||||
|
json!({
|
||||||
|
"login": "jdoe",
|
||||||
|
"password": "secret"
|
||||||
|
}),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// LDAP is not configured in config.test.yaml, so the endpoint
|
||||||
|
// should return 501 Not Implemented.
|
||||||
|
assert_eq!(response.status(), StatusCode::NOT_IMPLEMENTED);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_ldap_login_validates_empty_login() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.post(
|
||||||
|
"/auth/ldap/login",
|
||||||
|
json!({
|
||||||
|
"login": "",
|
||||||
|
"password": "secret"
|
||||||
|
}),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// Validation should fail before we even check LDAP config
|
||||||
|
assert_eq!(response.status(), StatusCode::UNPROCESSABLE_ENTITY);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_ldap_login_validates_empty_password() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.post(
|
||||||
|
"/auth/ldap/login",
|
||||||
|
json!({
|
||||||
|
"login": "jdoe",
|
||||||
|
"password": ""
|
||||||
|
}),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
assert_eq!(response.status(), StatusCode::UNPROCESSABLE_ENTITY);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_ldap_login_validates_missing_fields() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.post("/auth/ldap/login", json!({}), None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// Missing required fields should return 422
|
||||||
|
assert_eq!(response.status(), StatusCode::UNPROCESSABLE_ENTITY);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── auth/settings LDAP field tests ──────────────────────────────────
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_auth_settings_includes_ldap_fields_disabled() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/auth/settings", None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
assert_eq!(response.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let body: serde_json::Value = response.json().await.expect("Failed to parse JSON");
|
||||||
|
|
||||||
|
// LDAP is not configured in config.test.yaml, so these should all
|
||||||
|
// reflect the disabled state.
|
||||||
|
assert_eq!(body["data"]["ldap_enabled"], false);
|
||||||
|
assert_eq!(body["data"]["ldap_visible_by_default"], false);
|
||||||
|
assert!(body["data"]["ldap_provider_name"].is_null());
|
||||||
|
assert!(body["data"]["ldap_provider_label"].is_null());
|
||||||
|
assert!(body["data"]["ldap_provider_icon_url"].is_null());
|
||||||
|
|
||||||
|
// Existing fields should still be present
|
||||||
|
assert!(body["data"]["authentication_enabled"].is_boolean());
|
||||||
|
assert!(body["data"]["local_password_enabled"].is_boolean());
|
||||||
|
assert!(body["data"]["oidc_enabled"].is_boolean());
|
||||||
|
assert!(body["data"]["self_registration_enabled"].is_boolean());
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[ignore = "integration test — requires database"]
|
#[ignore = "integration test — requires database"]
|
||||||
async fn test_get_current_user() {
|
async fn test_get_current_user() {
|
||||||
|
|||||||
@@ -9,6 +9,10 @@ use attune_common::{
|
|||||||
models::*,
|
models::*,
|
||||||
repositories::{
|
repositories::{
|
||||||
action::{ActionRepository, CreateActionInput},
|
action::{ActionRepository, CreateActionInput},
|
||||||
|
identity::{
|
||||||
|
CreatePermissionAssignmentInput, CreatePermissionSetInput,
|
||||||
|
PermissionAssignmentRepository, PermissionSetRepository,
|
||||||
|
},
|
||||||
pack::{CreatePackInput, PackRepository},
|
pack::{CreatePackInput, PackRepository},
|
||||||
trigger::{CreateTriggerInput, TriggerRepository},
|
trigger::{CreateTriggerInput, TriggerRepository},
|
||||||
workflow::{CreateWorkflowDefinitionInput, WorkflowDefinitionRepository},
|
workflow::{CreateWorkflowDefinitionInput, WorkflowDefinitionRepository},
|
||||||
@@ -246,6 +250,48 @@ impl TestContext {
|
|||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create and authenticate a test user with identity + permission admin grants.
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub async fn with_admin_auth(mut self) -> Result<Self> {
|
||||||
|
let unique_id = uuid::Uuid::new_v4().to_string().replace("-", "")[..8].to_string();
|
||||||
|
let login = format!("adminuser_{}", unique_id);
|
||||||
|
let token = self.create_test_user(&login).await?;
|
||||||
|
|
||||||
|
let identity = attune_common::repositories::identity::IdentityRepository::find_by_login(
|
||||||
|
&self.pool, &login,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| format!("Failed to find newly created identity '{}'", login))?;
|
||||||
|
|
||||||
|
let permset = PermissionSetRepository::create(
|
||||||
|
&self.pool,
|
||||||
|
CreatePermissionSetInput {
|
||||||
|
r#ref: "core.admin".to_string(),
|
||||||
|
pack: None,
|
||||||
|
pack_ref: None,
|
||||||
|
label: Some("Admin".to_string()),
|
||||||
|
description: Some("Test admin permission set".to_string()),
|
||||||
|
grants: json!([
|
||||||
|
{"resource": "identities", "actions": ["read", "create", "update", "delete"]},
|
||||||
|
{"resource": "permissions", "actions": ["read", "create", "update", "delete", "manage"]}
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
PermissionAssignmentRepository::create(
|
||||||
|
&self.pool,
|
||||||
|
CreatePermissionAssignmentInput {
|
||||||
|
identity: identity.id,
|
||||||
|
permset: permset.id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
self.token = Some(token);
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
/// Create a test user and return access token
|
/// Create a test user and return access token
|
||||||
async fn create_test_user(&self, login: &str) -> Result<String> {
|
async fn create_test_user(&self, login: &str) -> Result<String> {
|
||||||
// Register via API to get real token
|
// Register via API to get real token
|
||||||
@@ -506,7 +552,6 @@ pub async fn create_test_workflow(
|
|||||||
]
|
]
|
||||||
}),
|
}),
|
||||||
tags: vec!["test".to_string()],
|
tags: vec!["test".to_string()],
|
||||||
enabled: true,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(WorkflowDefinitionRepository::create(pool, input).await?)
|
Ok(WorkflowDefinitionRepository::create(pool, input).await?)
|
||||||
|
|||||||
@@ -22,7 +22,6 @@ ref: {}.example_workflow
|
|||||||
label: Example Workflow
|
label: Example Workflow
|
||||||
description: A test workflow for integration testing
|
description: A test workflow for integration testing
|
||||||
version: "1.0.0"
|
version: "1.0.0"
|
||||||
enabled: true
|
|
||||||
parameters:
|
parameters:
|
||||||
message:
|
message:
|
||||||
type: string
|
type: string
|
||||||
@@ -46,7 +45,6 @@ ref: {}.another_workflow
|
|||||||
label: Another Workflow
|
label: Another Workflow
|
||||||
description: Second test workflow
|
description: Second test workflow
|
||||||
version: "1.0.0"
|
version: "1.0.0"
|
||||||
enabled: false
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: task1
|
- name: task1
|
||||||
action: core.noop
|
action: core.noop
|
||||||
|
|||||||
178
crates/api/tests/permissions_api_tests.rs
Normal file
178
crates/api/tests/permissions_api_tests.rs
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
use axum::http::StatusCode;
|
||||||
|
use helpers::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
mod helpers;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_identity_crud_and_permission_assignment_flow() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context")
|
||||||
|
.with_admin_auth()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create admin-authenticated test user");
|
||||||
|
|
||||||
|
let create_identity_response = ctx
|
||||||
|
.post(
|
||||||
|
"/api/v1/identities",
|
||||||
|
json!({
|
||||||
|
"login": "managed_user",
|
||||||
|
"display_name": "Managed User",
|
||||||
|
"password": "ManagedPass123!",
|
||||||
|
"attributes": {
|
||||||
|
"department": "platform"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
ctx.token(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create identity");
|
||||||
|
|
||||||
|
assert_eq!(create_identity_response.status(), StatusCode::CREATED);
|
||||||
|
|
||||||
|
let created_identity: serde_json::Value = create_identity_response
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.expect("Failed to parse identity create response");
|
||||||
|
let identity_id = created_identity["data"]["id"]
|
||||||
|
.as_i64()
|
||||||
|
.expect("Missing identity id");
|
||||||
|
|
||||||
|
let list_identities_response = ctx
|
||||||
|
.get("/api/v1/identities", ctx.token())
|
||||||
|
.await
|
||||||
|
.expect("Failed to list identities");
|
||||||
|
assert_eq!(list_identities_response.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let identities_body: serde_json::Value = list_identities_response
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.expect("Failed to parse identities response");
|
||||||
|
assert!(identities_body["data"]
|
||||||
|
.as_array()
|
||||||
|
.expect("Expected data array")
|
||||||
|
.iter()
|
||||||
|
.any(|item| item["login"] == "managed_user"));
|
||||||
|
|
||||||
|
let update_identity_response = ctx
|
||||||
|
.put(
|
||||||
|
&format!("/api/v1/identities/{}", identity_id),
|
||||||
|
json!({
|
||||||
|
"display_name": "Managed User Updated",
|
||||||
|
"attributes": {
|
||||||
|
"department": "security"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
ctx.token(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to update identity");
|
||||||
|
assert_eq!(update_identity_response.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let get_identity_response = ctx
|
||||||
|
.get(&format!("/api/v1/identities/{}", identity_id), ctx.token())
|
||||||
|
.await
|
||||||
|
.expect("Failed to get identity");
|
||||||
|
assert_eq!(get_identity_response.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let identity_body: serde_json::Value = get_identity_response
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.expect("Failed to parse get identity response");
|
||||||
|
assert_eq!(
|
||||||
|
identity_body["data"]["display_name"],
|
||||||
|
"Managed User Updated"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
identity_body["data"]["attributes"]["department"],
|
||||||
|
"security"
|
||||||
|
);
|
||||||
|
|
||||||
|
let permission_sets_response = ctx
|
||||||
|
.get("/api/v1/permissions/sets", ctx.token())
|
||||||
|
.await
|
||||||
|
.expect("Failed to list permission sets");
|
||||||
|
assert_eq!(permission_sets_response.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let assignment_response = ctx
|
||||||
|
.post(
|
||||||
|
"/api/v1/permissions/assignments",
|
||||||
|
json!({
|
||||||
|
"identity_id": identity_id,
|
||||||
|
"permission_set_ref": "core.admin"
|
||||||
|
}),
|
||||||
|
ctx.token(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create permission assignment");
|
||||||
|
assert_eq!(assignment_response.status(), StatusCode::CREATED);
|
||||||
|
|
||||||
|
let assignment_body: serde_json::Value = assignment_response
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.expect("Failed to parse permission assignment response");
|
||||||
|
let assignment_id = assignment_body["data"]["id"]
|
||||||
|
.as_i64()
|
||||||
|
.expect("Missing assignment id");
|
||||||
|
assert_eq!(assignment_body["data"]["permission_set_ref"], "core.admin");
|
||||||
|
|
||||||
|
let list_assignments_response = ctx
|
||||||
|
.get(
|
||||||
|
&format!("/api/v1/identities/{}/permissions", identity_id),
|
||||||
|
ctx.token(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to list identity permissions");
|
||||||
|
assert_eq!(list_assignments_response.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let assignments_body: serde_json::Value = list_assignments_response
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.expect("Failed to parse identity permissions response");
|
||||||
|
assert!(assignments_body
|
||||||
|
.as_array()
|
||||||
|
.expect("Expected array response")
|
||||||
|
.iter()
|
||||||
|
.any(|item| item["permission_set_ref"] == "core.admin"));
|
||||||
|
|
||||||
|
let delete_assignment_response = ctx
|
||||||
|
.delete(
|
||||||
|
&format!("/api/v1/permissions/assignments/{}", assignment_id),
|
||||||
|
ctx.token(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to delete assignment");
|
||||||
|
assert_eq!(delete_assignment_response.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let delete_identity_response = ctx
|
||||||
|
.delete(&format!("/api/v1/identities/{}", identity_id), ctx.token())
|
||||||
|
.await
|
||||||
|
.expect("Failed to delete identity");
|
||||||
|
assert_eq!(delete_identity_response.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let missing_identity_response = ctx
|
||||||
|
.get(&format!("/api/v1/identities/{}", identity_id), ctx.token())
|
||||||
|
.await
|
||||||
|
.expect("Failed to fetch deleted identity");
|
||||||
|
assert_eq!(missing_identity_response.status(), StatusCode::NOT_FOUND);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_plain_authenticated_user_cannot_manage_identities() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context")
|
||||||
|
.with_auth()
|
||||||
|
.await
|
||||||
|
.expect("Failed to authenticate plain test user");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/api/v1/identities", ctx.token())
|
||||||
|
.await
|
||||||
|
.expect("Failed to call identities endpoint");
|
||||||
|
|
||||||
|
assert_eq!(response.status(), StatusCode::FORBIDDEN);
|
||||||
|
}
|
||||||
@@ -75,6 +75,7 @@ async fn create_test_execution(pool: &PgPool, action_id: i64) -> Result<Executio
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Scheduled,
|
status: ExecutionStatus::Scheduled,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
|
|||||||
@@ -46,8 +46,7 @@ async fn test_create_workflow_success() {
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"tags": ["test", "automation"],
|
"tags": ["test", "automation"]
|
||||||
"enabled": true
|
|
||||||
}),
|
}),
|
||||||
ctx.token(),
|
ctx.token(),
|
||||||
)
|
)
|
||||||
@@ -60,7 +59,6 @@ async fn test_create_workflow_success() {
|
|||||||
assert_eq!(body["data"]["ref"], "test-pack.test_workflow");
|
assert_eq!(body["data"]["ref"], "test-pack.test_workflow");
|
||||||
assert_eq!(body["data"]["label"], "Test Workflow");
|
assert_eq!(body["data"]["label"], "Test Workflow");
|
||||||
assert_eq!(body["data"]["version"], "1.0.0");
|
assert_eq!(body["data"]["version"], "1.0.0");
|
||||||
assert_eq!(body["data"]["enabled"], true);
|
|
||||||
assert!(body["data"]["tags"].as_array().unwrap().len() == 2);
|
assert!(body["data"]["tags"].as_array().unwrap().len() == 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -85,7 +83,6 @@ async fn test_create_workflow_duplicate_ref() {
|
|||||||
out_schema: None,
|
out_schema: None,
|
||||||
definition: json!({"tasks": []}),
|
definition: json!({"tasks": []}),
|
||||||
tags: vec![],
|
tags: vec![],
|
||||||
enabled: true,
|
|
||||||
};
|
};
|
||||||
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
||||||
.await
|
.await
|
||||||
@@ -152,7 +149,6 @@ async fn test_get_workflow_by_ref() {
|
|||||||
out_schema: None,
|
out_schema: None,
|
||||||
definition: json!({"tasks": [{"name": "task1"}]}),
|
definition: json!({"tasks": [{"name": "task1"}]}),
|
||||||
tags: vec!["test".to_string()],
|
tags: vec!["test".to_string()],
|
||||||
enabled: true,
|
|
||||||
};
|
};
|
||||||
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
||||||
.await
|
.await
|
||||||
@@ -206,7 +202,6 @@ async fn test_list_workflows() {
|
|||||||
out_schema: None,
|
out_schema: None,
|
||||||
definition: json!({"tasks": []}),
|
definition: json!({"tasks": []}),
|
||||||
tags: vec!["test".to_string()],
|
tags: vec!["test".to_string()],
|
||||||
enabled: i % 2 == 1, // Odd ones enabled
|
|
||||||
};
|
};
|
||||||
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
||||||
.await
|
.await
|
||||||
@@ -256,7 +251,6 @@ async fn test_list_workflows_by_pack() {
|
|||||||
out_schema: None,
|
out_schema: None,
|
||||||
definition: json!({"tasks": []}),
|
definition: json!({"tasks": []}),
|
||||||
tags: vec![],
|
tags: vec![],
|
||||||
enabled: true,
|
|
||||||
};
|
};
|
||||||
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
||||||
.await
|
.await
|
||||||
@@ -275,7 +269,6 @@ async fn test_list_workflows_by_pack() {
|
|||||||
out_schema: None,
|
out_schema: None,
|
||||||
definition: json!({"tasks": []}),
|
definition: json!({"tasks": []}),
|
||||||
tags: vec![],
|
tags: vec![],
|
||||||
enabled: true,
|
|
||||||
};
|
};
|
||||||
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
||||||
.await
|
.await
|
||||||
@@ -308,14 +301,14 @@ async fn test_list_workflows_with_filters() {
|
|||||||
let pack_name = unique_pack_name();
|
let pack_name = unique_pack_name();
|
||||||
let pack = create_test_pack(&ctx.pool, &pack_name).await.unwrap();
|
let pack = create_test_pack(&ctx.pool, &pack_name).await.unwrap();
|
||||||
|
|
||||||
// Create workflows with different tags and enabled status
|
// Create workflows with different tags
|
||||||
let workflows = vec![
|
let workflows = vec![
|
||||||
("workflow1", vec!["incident", "approval"], true),
|
("workflow1", vec!["incident", "approval"]),
|
||||||
("workflow2", vec!["incident"], false),
|
("workflow2", vec!["incident"]),
|
||||||
("workflow3", vec!["automation"], true),
|
("workflow3", vec!["automation"]),
|
||||||
];
|
];
|
||||||
|
|
||||||
for (ref_name, tags, enabled) in workflows {
|
for (ref_name, tags) in workflows {
|
||||||
let input = CreateWorkflowDefinitionInput {
|
let input = CreateWorkflowDefinitionInput {
|
||||||
r#ref: format!("test-pack.{}", ref_name),
|
r#ref: format!("test-pack.{}", ref_name),
|
||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
@@ -327,24 +320,12 @@ async fn test_list_workflows_with_filters() {
|
|||||||
out_schema: None,
|
out_schema: None,
|
||||||
definition: json!({"tasks": []}),
|
definition: json!({"tasks": []}),
|
||||||
tags: tags.iter().map(|s| s.to_string()).collect(),
|
tags: tags.iter().map(|s| s.to_string()).collect(),
|
||||||
enabled,
|
|
||||||
};
|
};
|
||||||
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter by enabled (and pack_ref for isolation)
|
|
||||||
let response = ctx
|
|
||||||
.get(
|
|
||||||
&format!("/api/v1/workflows?enabled=true&pack_ref={}", pack_name),
|
|
||||||
ctx.token(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
let body: Value = response.json().await.unwrap();
|
|
||||||
assert_eq!(body["data"].as_array().unwrap().len(), 2);
|
|
||||||
|
|
||||||
// Filter by tag (and pack_ref for isolation)
|
// Filter by tag (and pack_ref for isolation)
|
||||||
let response = ctx
|
let response = ctx
|
||||||
.get(
|
.get(
|
||||||
@@ -387,7 +368,6 @@ async fn test_update_workflow() {
|
|||||||
out_schema: None,
|
out_schema: None,
|
||||||
definition: json!({"tasks": []}),
|
definition: json!({"tasks": []}),
|
||||||
tags: vec!["test".to_string()],
|
tags: vec!["test".to_string()],
|
||||||
enabled: true,
|
|
||||||
};
|
};
|
||||||
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
||||||
.await
|
.await
|
||||||
@@ -400,8 +380,7 @@ async fn test_update_workflow() {
|
|||||||
json!({
|
json!({
|
||||||
"label": "Updated Label",
|
"label": "Updated Label",
|
||||||
"description": "Updated description",
|
"description": "Updated description",
|
||||||
"version": "1.1.0",
|
"version": "1.1.0"
|
||||||
"enabled": false
|
|
||||||
}),
|
}),
|
||||||
ctx.token(),
|
ctx.token(),
|
||||||
)
|
)
|
||||||
@@ -414,7 +393,6 @@ async fn test_update_workflow() {
|
|||||||
assert_eq!(body["data"]["label"], "Updated Label");
|
assert_eq!(body["data"]["label"], "Updated Label");
|
||||||
assert_eq!(body["data"]["description"], "Updated description");
|
assert_eq!(body["data"]["description"], "Updated description");
|
||||||
assert_eq!(body["data"]["version"], "1.1.0");
|
assert_eq!(body["data"]["version"], "1.1.0");
|
||||||
assert_eq!(body["data"]["enabled"], false);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
@@ -455,7 +433,6 @@ async fn test_delete_workflow() {
|
|||||||
out_schema: None,
|
out_schema: None,
|
||||||
definition: json!({"tasks": []}),
|
definition: json!({"tasks": []}),
|
||||||
tags: vec![],
|
tags: vec![],
|
||||||
enabled: true,
|
|
||||||
};
|
};
|
||||||
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
WorkflowDefinitionRepository::create(&ctx.pool, input)
|
||||||
.await
|
.await
|
||||||
|
|||||||
@@ -69,7 +69,7 @@ tracing-subscriber = { workspace = true }
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempfile = { workspace = true }
|
tempfile = { workspace = true }
|
||||||
wiremock = "0.6"
|
wiremock = "0.6"
|
||||||
assert_cmd = "2.1"
|
assert_cmd = "2.2"
|
||||||
predicates = "3.1"
|
predicates = "3.1"
|
||||||
mockito = "1.7"
|
mockito = "1.7"
|
||||||
tokio-test = "0.4"
|
tokio-test = "0.4"
|
||||||
|
|||||||
@@ -1775,19 +1775,25 @@ async fn handle_update(
|
|||||||
anyhow::bail!("At least one field must be provided to update");
|
anyhow::bail!("At least one field must be provided to update");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
enum PackDescriptionPatch {
|
||||||
|
Set(String),
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct UpdatePackRequest {
|
struct UpdatePackRequest {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
label: Option<String>,
|
label: Option<String>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
description: Option<String>,
|
description: Option<PackDescriptionPatch>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
version: Option<String>,
|
version: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
let request = UpdatePackRequest {
|
let request = UpdatePackRequest {
|
||||||
label,
|
label,
|
||||||
description,
|
description: description.map(PackDescriptionPatch::Set),
|
||||||
version,
|
version,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -254,19 +254,25 @@ async fn handle_update(
|
|||||||
anyhow::bail!("At least one field must be provided to update");
|
anyhow::bail!("At least one field must be provided to update");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(tag = "op", content = "value", rename_all = "snake_case")]
|
||||||
|
enum TriggerDescriptionPatch {
|
||||||
|
Set(String),
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct UpdateTriggerRequest {
|
struct UpdateTriggerRequest {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
label: Option<String>,
|
label: Option<String>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
description: Option<String>,
|
description: Option<TriggerDescriptionPatch>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
enabled: Option<bool>,
|
enabled: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
let request = UpdateTriggerRequest {
|
let request = UpdateTriggerRequest {
|
||||||
label,
|
label,
|
||||||
description,
|
description: description.map(TriggerDescriptionPatch::Set),
|
||||||
enabled,
|
enabled,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -85,10 +85,6 @@ struct ActionYaml {
|
|||||||
/// Tags
|
/// Tags
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
tags: Option<Vec<String>>,
|
tags: Option<Vec<String>>,
|
||||||
|
|
||||||
/// Whether the action is enabled
|
|
||||||
#[serde(default)]
|
|
||||||
enabled: Option<bool>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── API DTOs ────────────────────────────────────────────────────────────
|
// ── API DTOs ────────────────────────────────────────────────────────────
|
||||||
@@ -109,8 +105,6 @@ struct SaveWorkflowFileRequest {
|
|||||||
out_schema: Option<serde_json::Value>,
|
out_schema: Option<serde_json::Value>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
tags: Option<Vec<String>>,
|
tags: Option<Vec<String>>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
enabled: Option<bool>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
@@ -127,7 +121,6 @@ struct WorkflowResponse {
|
|||||||
out_schema: Option<serde_json::Value>,
|
out_schema: Option<serde_json::Value>,
|
||||||
definition: serde_json::Value,
|
definition: serde_json::Value,
|
||||||
tags: Vec<String>,
|
tags: Vec<String>,
|
||||||
enabled: bool,
|
|
||||||
created: String,
|
created: String,
|
||||||
updated: String,
|
updated: String,
|
||||||
}
|
}
|
||||||
@@ -142,7 +135,6 @@ struct WorkflowSummary {
|
|||||||
description: Option<String>,
|
description: Option<String>,
|
||||||
version: String,
|
version: String,
|
||||||
tags: Vec<String>,
|
tags: Vec<String>,
|
||||||
enabled: bool,
|
|
||||||
created: String,
|
created: String,
|
||||||
updated: String,
|
updated: String,
|
||||||
}
|
}
|
||||||
@@ -281,7 +273,6 @@ async fn handle_upload(
|
|||||||
param_schema: action.parameters.clone(),
|
param_schema: action.parameters.clone(),
|
||||||
out_schema: action.output.clone(),
|
out_schema: action.output.clone(),
|
||||||
tags: action.tags.clone(),
|
tags: action.tags.clone(),
|
||||||
enabled: action.enabled,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// ── 6. Print progress ───────────────────────────────────────────────
|
// ── 6. Print progress ───────────────────────────────────────────────
|
||||||
@@ -357,7 +348,6 @@ async fn handle_upload(
|
|||||||
response.tags.join(", ")
|
response.tags.join(", ")
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
("Enabled", output::format_bool(response.enabled)),
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -408,15 +398,7 @@ async fn handle_list(
|
|||||||
let mut table = output::create_table();
|
let mut table = output::create_table();
|
||||||
output::add_header(
|
output::add_header(
|
||||||
&mut table,
|
&mut table,
|
||||||
vec![
|
vec!["ID", "Reference", "Pack", "Label", "Version", "Tags"],
|
||||||
"ID",
|
|
||||||
"Reference",
|
|
||||||
"Pack",
|
|
||||||
"Label",
|
|
||||||
"Version",
|
|
||||||
"Enabled",
|
|
||||||
"Tags",
|
|
||||||
],
|
|
||||||
);
|
);
|
||||||
|
|
||||||
for wf in &workflows {
|
for wf in &workflows {
|
||||||
@@ -426,7 +408,6 @@ async fn handle_list(
|
|||||||
wf.pack_ref.clone(),
|
wf.pack_ref.clone(),
|
||||||
output::truncate(&wf.label, 30),
|
output::truncate(&wf.label, 30),
|
||||||
wf.version.clone(),
|
wf.version.clone(),
|
||||||
output::format_bool(wf.enabled),
|
|
||||||
if wf.tags.is_empty() {
|
if wf.tags.is_empty() {
|
||||||
"-".to_string()
|
"-".to_string()
|
||||||
} else {
|
} else {
|
||||||
@@ -478,7 +459,6 @@ async fn handle_show(
|
|||||||
.unwrap_or_else(|| "-".to_string()),
|
.unwrap_or_else(|| "-".to_string()),
|
||||||
),
|
),
|
||||||
("Version", workflow.version.clone()),
|
("Version", workflow.version.clone()),
|
||||||
("Enabled", output::format_bool(workflow.enabled)),
|
|
||||||
(
|
(
|
||||||
"Tags",
|
"Tags",
|
||||||
if workflow.tags.is_empty() {
|
if workflow.tags.is_empty() {
|
||||||
|
|||||||
@@ -401,8 +401,10 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_effective_format_defaults_to_config() {
|
fn test_effective_format_defaults_to_config() {
|
||||||
let mut config = CliConfig::default();
|
let config = CliConfig {
|
||||||
config.format = "json".to_string();
|
format: "json".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
// No CLI override → uses config
|
// No CLI override → uses config
|
||||||
assert_eq!(config.effective_format(None), OutputFormat::Json);
|
assert_eq!(config.effective_format(None), OutputFormat::Json);
|
||||||
@@ -410,8 +412,10 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_effective_format_cli_overrides_config() {
|
fn test_effective_format_cli_overrides_config() {
|
||||||
let mut config = CliConfig::default();
|
let config = CliConfig {
|
||||||
config.format = "json".to_string();
|
format: "json".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
// CLI override wins
|
// CLI override wins
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|||||||
107
crates/common/src/agent_bootstrap.rs
Normal file
107
crates/common/src/agent_bootstrap.rs
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
//! Shared bootstrap helpers for injected agent binaries.
|
||||||
|
|
||||||
|
use crate::agent_runtime_detection::{
|
||||||
|
detect_runtimes, format_as_env_value, print_detection_report_for_env, DetectedRuntime,
|
||||||
|
};
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct RuntimeBootstrapResult {
|
||||||
|
pub runtimes_override: Option<String>,
|
||||||
|
pub detected_runtimes: Option<Vec<DetectedRuntime>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Detect runtimes and populate the agent runtime environment variable when needed.
|
||||||
|
///
|
||||||
|
/// This must run before the Tokio runtime starts because it may mutate process
|
||||||
|
/// environment variables.
|
||||||
|
pub fn bootstrap_runtime_env(env_var_name: &str) -> RuntimeBootstrapResult {
|
||||||
|
let runtimes_override = std::env::var(env_var_name).ok();
|
||||||
|
let mut detected_runtimes = None;
|
||||||
|
|
||||||
|
if let Some(ref override_value) = runtimes_override {
|
||||||
|
info!(
|
||||||
|
"{} already set (override): {}",
|
||||||
|
env_var_name, override_value
|
||||||
|
);
|
||||||
|
info!("Running auto-detection for override-specified runtimes...");
|
||||||
|
|
||||||
|
let detected = detect_runtimes();
|
||||||
|
let override_names: Vec<&str> = override_value.split(',').map(|s| s.trim()).collect();
|
||||||
|
|
||||||
|
let filtered: Vec<_> = detected
|
||||||
|
.into_iter()
|
||||||
|
.filter(|rt| {
|
||||||
|
let lower_name = rt.name.to_ascii_lowercase();
|
||||||
|
override_names
|
||||||
|
.iter()
|
||||||
|
.any(|ov| ov.to_ascii_lowercase() == lower_name)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if filtered.is_empty() {
|
||||||
|
warn!(
|
||||||
|
"None of the override runtimes ({}) were found on this system",
|
||||||
|
override_value
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
info!(
|
||||||
|
"Matched {} override runtime(s) to detected interpreters:",
|
||||||
|
filtered.len()
|
||||||
|
);
|
||||||
|
for rt in &filtered {
|
||||||
|
match &rt.version {
|
||||||
|
Some(ver) => info!(" ✓ {} — {} ({})", rt.name, rt.path, ver),
|
||||||
|
None => info!(" ✓ {} — {}", rt.name, rt.path),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
detected_runtimes = Some(filtered);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
info!("No {} override — running auto-detection...", env_var_name);
|
||||||
|
|
||||||
|
let detected = detect_runtimes();
|
||||||
|
|
||||||
|
if detected.is_empty() {
|
||||||
|
warn!("No runtimes detected! The agent may not be able to execute any work.");
|
||||||
|
} else {
|
||||||
|
info!("Detected {} runtime(s):", detected.len());
|
||||||
|
for rt in &detected {
|
||||||
|
match &rt.version {
|
||||||
|
Some(ver) => info!(" ✓ {} — {} ({})", rt.name, rt.path, ver),
|
||||||
|
None => info!(" ✓ {} — {}", rt.name, rt.path),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let runtime_csv = format_as_env_value(&detected);
|
||||||
|
info!("Setting {}={}", env_var_name, runtime_csv);
|
||||||
|
std::env::set_var(env_var_name, &runtime_csv);
|
||||||
|
detected_runtimes = Some(detected);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
RuntimeBootstrapResult {
|
||||||
|
runtimes_override,
|
||||||
|
detected_runtimes,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_detect_only_report(env_var_name: &str, result: &RuntimeBootstrapResult) {
|
||||||
|
if result.runtimes_override.is_some() {
|
||||||
|
info!("--detect-only: re-running detection to show what is available on this system...");
|
||||||
|
println!(
|
||||||
|
"NOTE: {} is set — auto-detection was skipped during normal startup.",
|
||||||
|
env_var_name
|
||||||
|
);
|
||||||
|
println!(" Showing what auto-detection would find on this system:");
|
||||||
|
println!();
|
||||||
|
|
||||||
|
let detected = detect_runtimes();
|
||||||
|
print_detection_report_for_env(env_var_name, &detected);
|
||||||
|
} else if let Some(ref detected) = result.detected_runtimes {
|
||||||
|
print_detection_report_for_env(env_var_name, detected);
|
||||||
|
} else {
|
||||||
|
let detected = detect_runtimes();
|
||||||
|
print_detection_report_for_env(env_var_name, &detected);
|
||||||
|
}
|
||||||
|
}
|
||||||
306
crates/common/src/agent_runtime_detection.rs
Normal file
306
crates/common/src/agent_runtime_detection.rs
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
//! Runtime auto-detection for injected Attune agent binaries.
|
||||||
|
//!
|
||||||
|
//! This module probes the local system directly for well-known interpreters,
|
||||||
|
//! without requiring database access.
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::fmt;
|
||||||
|
use std::process::Command;
|
||||||
|
use tracing::{debug, info};
|
||||||
|
|
||||||
|
/// A runtime interpreter discovered on the local system.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct DetectedRuntime {
|
||||||
|
/// Canonical runtime name (for example, "python" or "node").
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
/// Absolute path to the interpreter binary.
|
||||||
|
pub path: String,
|
||||||
|
|
||||||
|
/// Version string if the version command succeeded.
|
||||||
|
pub version: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for DetectedRuntime {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match &self.version {
|
||||||
|
Some(v) => write!(f, "{} ({}, v{})", self.name, self.path, v),
|
||||||
|
None => write!(f, "{} ({})", self.name, self.path),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct RuntimeCandidate {
|
||||||
|
name: &'static str,
|
||||||
|
binaries: &'static [&'static str],
|
||||||
|
version_args: &'static [&'static str],
|
||||||
|
version_parser: VersionParser,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum VersionParser {
|
||||||
|
SemverLike,
|
||||||
|
JavaStyle,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn candidates() -> Vec<RuntimeCandidate> {
|
||||||
|
vec![
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "shell",
|
||||||
|
binaries: &["bash", "sh"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "python",
|
||||||
|
binaries: &["python3", "python"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "node",
|
||||||
|
binaries: &["node", "nodejs"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "ruby",
|
||||||
|
binaries: &["ruby"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "go",
|
||||||
|
binaries: &["go"],
|
||||||
|
version_args: &["version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "java",
|
||||||
|
binaries: &["java"],
|
||||||
|
version_args: &["-version"],
|
||||||
|
version_parser: VersionParser::JavaStyle,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "r",
|
||||||
|
binaries: &["Rscript"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "perl",
|
||||||
|
binaries: &["perl"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Detect available runtimes by probing the local system.
|
||||||
|
pub fn detect_runtimes() -> Vec<DetectedRuntime> {
|
||||||
|
info!("Starting runtime auto-detection...");
|
||||||
|
|
||||||
|
let mut detected = Vec::new();
|
||||||
|
|
||||||
|
for candidate in candidates() {
|
||||||
|
match detect_single_runtime(&candidate) {
|
||||||
|
Some(runtime) => {
|
||||||
|
info!(" ✓ Detected: {}", runtime);
|
||||||
|
detected.push(runtime);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
debug!(" ✗ Not found: {}", candidate.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Runtime auto-detection complete: found {} runtime(s): [{}]",
|
||||||
|
detected.len(),
|
||||||
|
detected
|
||||||
|
.iter()
|
||||||
|
.map(|r| r.name.as_str())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ")
|
||||||
|
);
|
||||||
|
|
||||||
|
detected
|
||||||
|
}
|
||||||
|
|
||||||
|
fn detect_single_runtime(candidate: &RuntimeCandidate) -> Option<DetectedRuntime> {
|
||||||
|
for binary in candidate.binaries {
|
||||||
|
if let Some(path) = which_binary(binary) {
|
||||||
|
let version = get_version(&path, candidate.version_args, &candidate.version_parser);
|
||||||
|
|
||||||
|
return Some(DetectedRuntime {
|
||||||
|
name: candidate.name.to_string(),
|
||||||
|
path,
|
||||||
|
version,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn which_binary(binary: &str) -> Option<String> {
|
||||||
|
if binary == "bash" || binary == "sh" {
|
||||||
|
let absolute_path = format!("/bin/{}", binary);
|
||||||
|
if std::path::Path::new(&absolute_path).exists() {
|
||||||
|
return Some(absolute_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match Command::new("which").arg(binary).output() {
|
||||||
|
Ok(output) if output.status.success() => {
|
||||||
|
let path = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||||
|
if path.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(_) => None,
|
||||||
|
Err(e) => {
|
||||||
|
debug!("'which' command failed ({}), trying 'command -v'", e);
|
||||||
|
match Command::new("sh")
|
||||||
|
.args(["-c", &format!("command -v {}", binary)])
|
||||||
|
.output()
|
||||||
|
{
|
||||||
|
Ok(output) if output.status.success() => {
|
||||||
|
let path = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||||
|
if path.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_version(binary_path: &str, version_args: &[&str], parser: &VersionParser) -> Option<String> {
|
||||||
|
let output = match Command::new(binary_path).args(version_args).output() {
|
||||||
|
Ok(output) => output,
|
||||||
|
Err(e) => {
|
||||||
|
debug!("Failed to run version command for {}: {}", binary_path, e);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||||
|
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||||
|
let combined = format!("{}{}", stdout, stderr);
|
||||||
|
|
||||||
|
match parser {
|
||||||
|
VersionParser::SemverLike => parse_semver_like(&combined),
|
||||||
|
VersionParser::JavaStyle => parse_java_version(&combined),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_semver_like(output: &str) -> Option<String> {
|
||||||
|
let re = regex::Regex::new(r"(?:v|go)?(\d+\.\d+(?:\.\d+)?)").ok()?;
|
||||||
|
re.captures(output)
|
||||||
|
.and_then(|captures| captures.get(1).map(|m| m.as_str().to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_java_version(output: &str) -> Option<String> {
|
||||||
|
let quoted_re = regex::Regex::new(r#"version\s+"([^"]+)""#).ok()?;
|
||||||
|
if let Some(captures) = quoted_re.captures(output) {
|
||||||
|
return captures.get(1).map(|m| m.as_str().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
parse_semver_like(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn format_as_env_value(runtimes: &[DetectedRuntime]) -> String {
|
||||||
|
runtimes
|
||||||
|
.iter()
|
||||||
|
.map(|r| r.name.as_str())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(",")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_detection_report_for_env(env_var_name: &str, runtimes: &[DetectedRuntime]) {
|
||||||
|
println!("=== Attune Agent Runtime Detection Report ===");
|
||||||
|
println!();
|
||||||
|
|
||||||
|
if runtimes.is_empty() {
|
||||||
|
println!("No runtimes detected!");
|
||||||
|
println!();
|
||||||
|
println!("The agent could not find any supported interpreter binaries.");
|
||||||
|
println!("Ensure at least one of the following is installed and on PATH:");
|
||||||
|
println!(" - bash / sh (shell scripts)");
|
||||||
|
println!(" - python3 / python (Python scripts)");
|
||||||
|
println!(" - node / nodejs (Node.js scripts)");
|
||||||
|
println!(" - ruby (Ruby scripts)");
|
||||||
|
println!(" - go (Go programs)");
|
||||||
|
println!(" - java (Java programs)");
|
||||||
|
println!(" - Rscript (R scripts)");
|
||||||
|
println!(" - perl (Perl scripts)");
|
||||||
|
} else {
|
||||||
|
println!("Detected {} runtime(s):", runtimes.len());
|
||||||
|
println!();
|
||||||
|
for rt in runtimes {
|
||||||
|
let version_str = rt.version.as_deref().unwrap_or("unknown version");
|
||||||
|
println!(" ✓ {:<10} {} ({})", rt.name, rt.path, version_str);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println!();
|
||||||
|
println!("{}={}", env_var_name, format_as_env_value(runtimes));
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_semver_like_python() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_semver_like("Python 3.12.1"),
|
||||||
|
Some("3.12.1".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_semver_like_node() {
|
||||||
|
assert_eq!(parse_semver_like("v20.11.0"), Some("20.11.0".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_semver_like_go() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_semver_like("go version go1.22.0 linux/amd64"),
|
||||||
|
Some("1.22.0".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_java_version_openjdk() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_java_version(r#"openjdk version "21.0.1" 2023-10-17"#),
|
||||||
|
Some("21.0.1".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_as_env_value_multiple() {
|
||||||
|
let runtimes = vec![
|
||||||
|
DetectedRuntime {
|
||||||
|
name: "shell".to_string(),
|
||||||
|
path: "/bin/bash".to_string(),
|
||||||
|
version: Some("5.2.15".to_string()),
|
||||||
|
},
|
||||||
|
DetectedRuntime {
|
||||||
|
name: "python".to_string(),
|
||||||
|
path: "/usr/bin/python3".to_string(),
|
||||||
|
version: Some("3.12.1".to_string()),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
assert_eq!(format_as_env_value(&runtimes), "shell,python");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -295,6 +295,22 @@ pub struct SecurityConfig {
|
|||||||
/// Enable authentication
|
/// Enable authentication
|
||||||
#[serde(default = "default_true")]
|
#[serde(default = "default_true")]
|
||||||
pub enable_auth: bool,
|
pub enable_auth: bool,
|
||||||
|
|
||||||
|
/// Allow unauthenticated self-service user registration
|
||||||
|
#[serde(default)]
|
||||||
|
pub allow_self_registration: bool,
|
||||||
|
|
||||||
|
/// Login page visibility defaults for the web UI.
|
||||||
|
#[serde(default)]
|
||||||
|
pub login_page: LoginPageConfig,
|
||||||
|
|
||||||
|
/// Optional OpenID Connect configuration for browser login.
|
||||||
|
#[serde(default)]
|
||||||
|
pub oidc: Option<OidcConfig>,
|
||||||
|
|
||||||
|
/// Optional LDAP configuration for username/password login against a directory.
|
||||||
|
#[serde(default)]
|
||||||
|
pub ldap: Option<LdapConfig>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_jwt_access_expiration() -> u64 {
|
fn default_jwt_access_expiration() -> u64 {
|
||||||
@@ -305,6 +321,162 @@ fn default_jwt_refresh_expiration() -> u64 {
|
|||||||
604800 // 7 days
|
604800 // 7 days
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Web login page configuration.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct LoginPageConfig {
|
||||||
|
/// Show the local username/password form by default.
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub show_local_login: bool,
|
||||||
|
|
||||||
|
/// Show the OIDC/SSO option by default when configured.
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub show_oidc_login: bool,
|
||||||
|
|
||||||
|
/// Show the LDAP option by default when configured.
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub show_ldap_login: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for LoginPageConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
show_local_login: true,
|
||||||
|
show_oidc_login: true,
|
||||||
|
show_ldap_login: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// OpenID Connect configuration
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct OidcConfig {
|
||||||
|
/// Enable OpenID Connect login flow.
|
||||||
|
#[serde(default)]
|
||||||
|
pub enabled: bool,
|
||||||
|
|
||||||
|
/// OpenID Provider discovery document URL.
|
||||||
|
pub discovery_url: String,
|
||||||
|
|
||||||
|
/// Confidential client ID.
|
||||||
|
pub client_id: String,
|
||||||
|
|
||||||
|
/// Provider name used in login-page overrides such as `?auth=<provider_name>`.
|
||||||
|
#[serde(default = "default_oidc_provider_name")]
|
||||||
|
pub provider_name: String,
|
||||||
|
|
||||||
|
/// User-facing provider label shown on the login page.
|
||||||
|
pub provider_label: Option<String>,
|
||||||
|
|
||||||
|
/// Optional icon URL shown beside the provider label on the login page.
|
||||||
|
pub provider_icon_url: Option<String>,
|
||||||
|
|
||||||
|
/// Confidential client secret.
|
||||||
|
pub client_secret: Option<String>,
|
||||||
|
|
||||||
|
/// Redirect URI registered with the provider.
|
||||||
|
pub redirect_uri: String,
|
||||||
|
|
||||||
|
/// Optional post-logout redirect URI.
|
||||||
|
pub post_logout_redirect_uri: Option<String>,
|
||||||
|
|
||||||
|
/// Optional requested scopes in addition to `openid email profile`.
|
||||||
|
#[serde(default)]
|
||||||
|
pub scopes: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_oidc_provider_name() -> String {
|
||||||
|
"oidc".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// LDAP authentication configuration
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct LdapConfig {
|
||||||
|
/// Enable LDAP login flow.
|
||||||
|
#[serde(default)]
|
||||||
|
pub enabled: bool,
|
||||||
|
|
||||||
|
/// LDAP server URL (e.g., "ldap://ldap.example.com:389" or "ldaps://ldap.example.com:636").
|
||||||
|
pub url: String,
|
||||||
|
|
||||||
|
/// Bind DN template. Use `{login}` as placeholder for the user-supplied login.
|
||||||
|
/// Example: "uid={login},ou=users,dc=example,dc=com"
|
||||||
|
/// If not set, an anonymous bind is attempted first to search for the user.
|
||||||
|
pub bind_dn_template: Option<String>,
|
||||||
|
|
||||||
|
/// Base DN for user searches when bind_dn_template is not set.
|
||||||
|
/// Example: "ou=users,dc=example,dc=com"
|
||||||
|
pub user_search_base: Option<String>,
|
||||||
|
|
||||||
|
/// LDAP search filter template. Use `{login}` as placeholder.
|
||||||
|
/// Default: "(uid={login})"
|
||||||
|
#[serde(default = "default_ldap_user_filter")]
|
||||||
|
pub user_filter: String,
|
||||||
|
|
||||||
|
/// DN of a service account used to search for users (required when using search-based auth).
|
||||||
|
pub search_bind_dn: Option<String>,
|
||||||
|
|
||||||
|
/// Password for the search service account.
|
||||||
|
pub search_bind_password: Option<String>,
|
||||||
|
|
||||||
|
/// LDAP attribute to use as the login name. Default: "uid"
|
||||||
|
#[serde(default = "default_ldap_login_attr")]
|
||||||
|
pub login_attr: String,
|
||||||
|
|
||||||
|
/// LDAP attribute to use as the email. Default: "mail"
|
||||||
|
#[serde(default = "default_ldap_email_attr")]
|
||||||
|
pub email_attr: String,
|
||||||
|
|
||||||
|
/// LDAP attribute to use as the display name. Default: "cn"
|
||||||
|
#[serde(default = "default_ldap_display_name_attr")]
|
||||||
|
pub display_name_attr: String,
|
||||||
|
|
||||||
|
/// LDAP attribute that contains group membership. Default: "memberOf"
|
||||||
|
#[serde(default = "default_ldap_group_attr")]
|
||||||
|
pub group_attr: String,
|
||||||
|
|
||||||
|
/// Whether to use STARTTLS. Default: false
|
||||||
|
#[serde(default)]
|
||||||
|
pub starttls: bool,
|
||||||
|
|
||||||
|
/// Whether to skip TLS certificate verification (insecure!). Default: false
|
||||||
|
#[serde(default)]
|
||||||
|
pub danger_skip_tls_verify: bool,
|
||||||
|
|
||||||
|
/// Provider name used in login-page overrides such as `?auth=<provider_name>`.
|
||||||
|
#[serde(default = "default_ldap_provider_name")]
|
||||||
|
pub provider_name: String,
|
||||||
|
|
||||||
|
/// User-facing provider label shown on the login page.
|
||||||
|
pub provider_label: Option<String>,
|
||||||
|
|
||||||
|
/// Optional icon URL shown beside the provider label on the login page.
|
||||||
|
pub provider_icon_url: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_ldap_provider_name() -> String {
|
||||||
|
"ldap".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_ldap_user_filter() -> String {
|
||||||
|
"(uid={login})".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_ldap_login_attr() -> String {
|
||||||
|
"uid".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_ldap_email_attr() -> String {
|
||||||
|
"mail".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_ldap_display_name_attr() -> String {
|
||||||
|
"cn".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_ldap_group_attr() -> String {
|
||||||
|
"memberOf".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
/// Worker configuration
|
/// Worker configuration
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct WorkerConfig {
|
pub struct WorkerConfig {
|
||||||
@@ -505,6 +677,15 @@ impl Default for PackRegistryConfig {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Agent binary distribution configuration
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct AgentConfig {
|
||||||
|
/// Directory containing agent binary files
|
||||||
|
pub binary_dir: String,
|
||||||
|
/// Optional bootstrap token for authenticating agent binary downloads
|
||||||
|
pub bootstrap_token: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Executor service configuration
|
/// Executor service configuration
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct ExecutorConfig {
|
pub struct ExecutorConfig {
|
||||||
@@ -598,6 +779,9 @@ pub struct Config {
|
|||||||
|
|
||||||
/// Executor configuration (optional, for executor service)
|
/// Executor configuration (optional, for executor service)
|
||||||
pub executor: Option<ExecutorConfig>,
|
pub executor: Option<ExecutorConfig>,
|
||||||
|
|
||||||
|
/// Agent configuration (optional, for agent binary distribution)
|
||||||
|
pub agent: Option<AgentConfig>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_service_name() -> String {
|
fn default_service_name() -> String {
|
||||||
@@ -676,6 +860,10 @@ impl Default for SecurityConfig {
|
|||||||
jwt_refresh_expiration: default_jwt_refresh_expiration(),
|
jwt_refresh_expiration: default_jwt_refresh_expiration(),
|
||||||
encryption_key: None,
|
encryption_key: None,
|
||||||
enable_auth: true,
|
enable_auth: true,
|
||||||
|
allow_self_registration: false,
|
||||||
|
login_page: LoginPageConfig::default(),
|
||||||
|
oidc: None,
|
||||||
|
ldap: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -795,6 +983,37 @@ impl Config {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(oidc) = &self.security.oidc {
|
||||||
|
if oidc.enabled {
|
||||||
|
if oidc.discovery_url.trim().is_empty() {
|
||||||
|
return Err(crate::Error::validation(
|
||||||
|
"OIDC discovery URL cannot be empty when OIDC is enabled",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if oidc.client_id.trim().is_empty() {
|
||||||
|
return Err(crate::Error::validation(
|
||||||
|
"OIDC client ID cannot be empty when OIDC is enabled",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if oidc
|
||||||
|
.client_secret
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or("")
|
||||||
|
.trim()
|
||||||
|
.is_empty()
|
||||||
|
{
|
||||||
|
return Err(crate::Error::validation(
|
||||||
|
"OIDC client secret is required when OIDC is enabled",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if oidc.redirect_uri.trim().is_empty() {
|
||||||
|
return Err(crate::Error::validation(
|
||||||
|
"OIDC redirect URI cannot be empty when OIDC is enabled",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Validate encryption key if provided
|
// Validate encryption key if provided
|
||||||
if let Some(ref key) = self.security.encryption_key {
|
if let Some(ref key) = self.security.encryption_key {
|
||||||
if key.len() < 32 {
|
if key.len() < 32 {
|
||||||
@@ -859,6 +1078,7 @@ mod tests {
|
|||||||
notifier: None,
|
notifier: None,
|
||||||
pack_registry: PackRegistryConfig::default(),
|
pack_registry: PackRegistryConfig::default(),
|
||||||
executor: None,
|
executor: None,
|
||||||
|
agent: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(config.service_name, "attune");
|
assert_eq!(config.service_name, "attune");
|
||||||
@@ -924,6 +1144,10 @@ mod tests {
|
|||||||
jwt_refresh_expiration: 604800,
|
jwt_refresh_expiration: 604800,
|
||||||
encryption_key: Some("a".repeat(32)),
|
encryption_key: Some("a".repeat(32)),
|
||||||
enable_auth: true,
|
enable_auth: true,
|
||||||
|
allow_self_registration: false,
|
||||||
|
login_page: LoginPageConfig::default(),
|
||||||
|
oidc: None,
|
||||||
|
ldap: None,
|
||||||
},
|
},
|
||||||
worker: None,
|
worker: None,
|
||||||
sensor: None,
|
sensor: None,
|
||||||
@@ -933,6 +1157,7 @@ mod tests {
|
|||||||
notifier: None,
|
notifier: None,
|
||||||
pack_registry: PackRegistryConfig::default(),
|
pack_registry: PackRegistryConfig::default(),
|
||||||
executor: None,
|
executor: None,
|
||||||
|
agent: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(config.validate().is_ok());
|
assert!(config.validate().is_ok());
|
||||||
@@ -946,4 +1171,102 @@ mod tests {
|
|||||||
config.security.jwt_secret = None;
|
config.security.jwt_secret = None;
|
||||||
assert!(config.validate().is_err());
|
assert!(config.validate().is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ldap_config_defaults() {
|
||||||
|
let yaml = r#"
|
||||||
|
enabled: true
|
||||||
|
url: "ldap://localhost:389"
|
||||||
|
client_id: "test"
|
||||||
|
"#;
|
||||||
|
let cfg: LdapConfig = serde_yaml_ng::from_str(yaml).unwrap();
|
||||||
|
|
||||||
|
assert!(cfg.enabled);
|
||||||
|
assert_eq!(cfg.url, "ldap://localhost:389");
|
||||||
|
assert_eq!(cfg.user_filter, "(uid={login})");
|
||||||
|
assert_eq!(cfg.login_attr, "uid");
|
||||||
|
assert_eq!(cfg.email_attr, "mail");
|
||||||
|
assert_eq!(cfg.display_name_attr, "cn");
|
||||||
|
assert_eq!(cfg.group_attr, "memberOf");
|
||||||
|
assert_eq!(cfg.provider_name, "ldap");
|
||||||
|
assert!(!cfg.starttls);
|
||||||
|
assert!(!cfg.danger_skip_tls_verify);
|
||||||
|
assert!(cfg.bind_dn_template.is_none());
|
||||||
|
assert!(cfg.user_search_base.is_none());
|
||||||
|
assert!(cfg.search_bind_dn.is_none());
|
||||||
|
assert!(cfg.search_bind_password.is_none());
|
||||||
|
assert!(cfg.provider_label.is_none());
|
||||||
|
assert!(cfg.provider_icon_url.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ldap_config_full_deserialization() {
|
||||||
|
let yaml = r#"
|
||||||
|
enabled: true
|
||||||
|
url: "ldaps://ldap.corp.com:636"
|
||||||
|
bind_dn_template: "uid={login},ou=people,dc=corp,dc=com"
|
||||||
|
user_search_base: "ou=people,dc=corp,dc=com"
|
||||||
|
user_filter: "(sAMAccountName={login})"
|
||||||
|
search_bind_dn: "cn=svc,dc=corp,dc=com"
|
||||||
|
search_bind_password: "secret"
|
||||||
|
login_attr: "sAMAccountName"
|
||||||
|
email_attr: "userPrincipalName"
|
||||||
|
display_name_attr: "displayName"
|
||||||
|
group_attr: "memberOf"
|
||||||
|
starttls: true
|
||||||
|
danger_skip_tls_verify: true
|
||||||
|
provider_name: "corpldap"
|
||||||
|
provider_label: "Corporate Directory"
|
||||||
|
provider_icon_url: "https://corp.com/icon.svg"
|
||||||
|
"#;
|
||||||
|
let cfg: LdapConfig = serde_yaml_ng::from_str(yaml).unwrap();
|
||||||
|
|
||||||
|
assert!(cfg.enabled);
|
||||||
|
assert_eq!(cfg.url, "ldaps://ldap.corp.com:636");
|
||||||
|
assert_eq!(
|
||||||
|
cfg.bind_dn_template.as_deref(),
|
||||||
|
Some("uid={login},ou=people,dc=corp,dc=com")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
cfg.user_search_base.as_deref(),
|
||||||
|
Some("ou=people,dc=corp,dc=com")
|
||||||
|
);
|
||||||
|
assert_eq!(cfg.user_filter, "(sAMAccountName={login})");
|
||||||
|
assert_eq!(cfg.search_bind_dn.as_deref(), Some("cn=svc,dc=corp,dc=com"));
|
||||||
|
assert_eq!(cfg.search_bind_password.as_deref(), Some("secret"));
|
||||||
|
assert_eq!(cfg.login_attr, "sAMAccountName");
|
||||||
|
assert_eq!(cfg.email_attr, "userPrincipalName");
|
||||||
|
assert_eq!(cfg.display_name_attr, "displayName");
|
||||||
|
assert_eq!(cfg.group_attr, "memberOf");
|
||||||
|
assert!(cfg.starttls);
|
||||||
|
assert!(cfg.danger_skip_tls_verify);
|
||||||
|
assert_eq!(cfg.provider_name, "corpldap");
|
||||||
|
assert_eq!(cfg.provider_label.as_deref(), Some("Corporate Directory"));
|
||||||
|
assert_eq!(
|
||||||
|
cfg.provider_icon_url.as_deref(),
|
||||||
|
Some("https://corp.com/icon.svg")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_security_config_ldap_none_by_default() {
|
||||||
|
let yaml = r#"jwt_secret: "s""#;
|
||||||
|
let cfg: SecurityConfig = serde_yaml_ng::from_str(yaml).unwrap();
|
||||||
|
|
||||||
|
assert!(cfg.ldap.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_login_page_show_ldap_default_true() {
|
||||||
|
let cfg: LoginPageConfig = serde_yaml_ng::from_str("{}").unwrap();
|
||||||
|
|
||||||
|
assert!(cfg.show_ldap_login);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_login_page_show_ldap_explicit_false() {
|
||||||
|
let cfg: LoginPageConfig = serde_yaml_ng::from_str("show_ldap_login: false").unwrap();
|
||||||
|
|
||||||
|
assert!(!cfg.show_ldap_login);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,8 @@
|
|||||||
//! - Configuration
|
//! - Configuration
|
||||||
//! - Utilities
|
//! - Utilities
|
||||||
|
|
||||||
|
pub mod agent_bootstrap;
|
||||||
|
pub mod agent_runtime_detection;
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod crypto;
|
pub mod crypto;
|
||||||
@@ -15,6 +17,7 @@ pub mod models;
|
|||||||
pub mod mq;
|
pub mod mq;
|
||||||
pub mod pack_environment;
|
pub mod pack_environment;
|
||||||
pub mod pack_registry;
|
pub mod pack_registry;
|
||||||
|
pub mod rbac;
|
||||||
pub mod repositories;
|
pub mod repositories;
|
||||||
pub mod runtime_detection;
|
pub mod runtime_detection;
|
||||||
pub mod schema;
|
pub mod schema;
|
||||||
|
|||||||
@@ -430,6 +430,10 @@ pub mod runtime {
|
|||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub interpreter: InterpreterConfig,
|
pub interpreter: InterpreterConfig,
|
||||||
|
|
||||||
|
/// Strategy for inline code execution.
|
||||||
|
#[serde(default)]
|
||||||
|
pub inline_execution: InlineExecutionConfig,
|
||||||
|
|
||||||
/// Optional isolated environment configuration (venv, node_modules, etc.)
|
/// Optional isolated environment configuration (venv, node_modules, etc.)
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub environment: Option<EnvironmentConfig>,
|
pub environment: Option<EnvironmentConfig>,
|
||||||
@@ -449,6 +453,33 @@ pub mod runtime {
|
|||||||
pub env_vars: HashMap<String, String>,
|
pub env_vars: HashMap<String, String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Controls how inline code is materialized before execution.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||||
|
pub struct InlineExecutionConfig {
|
||||||
|
/// Whether inline code is passed directly to the interpreter or first
|
||||||
|
/// written to a temporary file.
|
||||||
|
#[serde(default)]
|
||||||
|
pub strategy: InlineExecutionStrategy,
|
||||||
|
|
||||||
|
/// Optional extension for temporary inline files (e.g. ".sh").
|
||||||
|
#[serde(default)]
|
||||||
|
pub extension: Option<String>,
|
||||||
|
|
||||||
|
/// When true, inline wrapper files export the merged input map as shell
|
||||||
|
/// environment variables (`PARAM_*` and bare names) before executing the
|
||||||
|
/// script body.
|
||||||
|
#[serde(default)]
|
||||||
|
pub inject_shell_helpers: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum InlineExecutionStrategy {
|
||||||
|
#[default]
|
||||||
|
Direct,
|
||||||
|
TempFile,
|
||||||
|
}
|
||||||
|
|
||||||
/// Describes the interpreter binary and how it invokes action scripts.
|
/// Describes the interpreter binary and how it invokes action scripts.
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct InterpreterConfig {
|
pub struct InterpreterConfig {
|
||||||
@@ -745,10 +776,13 @@ pub mod runtime {
|
|||||||
pub pack_ref: Option<String>,
|
pub pack_ref: Option<String>,
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
pub aliases: Vec<String>,
|
||||||
pub distributions: JsonDict,
|
pub distributions: JsonDict,
|
||||||
pub installation: Option<JsonDict>,
|
pub installation: Option<JsonDict>,
|
||||||
pub installers: JsonDict,
|
pub installers: JsonDict,
|
||||||
pub execution_config: JsonDict,
|
pub execution_config: JsonDict,
|
||||||
|
pub auto_detected: bool,
|
||||||
|
pub detection_config: JsonDict,
|
||||||
pub created: DateTime<Utc>,
|
pub created: DateTime<Utc>,
|
||||||
pub updated: DateTime<Utc>,
|
pub updated: DateTime<Utc>,
|
||||||
}
|
}
|
||||||
@@ -1102,6 +1136,7 @@ pub mod execution {
|
|||||||
|
|
||||||
pub enforcement: Option<Id>,
|
pub enforcement: Option<Id>,
|
||||||
pub executor: Option<Id>,
|
pub executor: Option<Id>,
|
||||||
|
pub worker: Option<Id>,
|
||||||
pub status: ExecutionStatus,
|
pub status: ExecutionStatus,
|
||||||
pub result: Option<JsonDict>,
|
pub result: Option<JsonDict>,
|
||||||
|
|
||||||
@@ -1353,7 +1388,6 @@ pub mod workflow {
|
|||||||
pub out_schema: Option<JsonSchema>,
|
pub out_schema: Option<JsonSchema>,
|
||||||
pub definition: JsonDict,
|
pub definition: JsonDict,
|
||||||
pub tags: Vec<String>,
|
pub tags: Vec<String>,
|
||||||
pub enabled: bool,
|
|
||||||
pub created: DateTime<Utc>,
|
pub created: DateTime<Utc>,
|
||||||
pub updated: DateTime<Utc>,
|
pub updated: DateTime<Utc>,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -481,9 +481,8 @@ pub struct PackRegisteredPayload {
|
|||||||
|
|
||||||
/// Payload for ExecutionCancelRequested message
|
/// Payload for ExecutionCancelRequested message
|
||||||
///
|
///
|
||||||
/// Sent by the API to the worker that is running a specific execution,
|
/// Sent by the API or executor to the worker that is running a specific
|
||||||
/// instructing it to gracefully terminate the process (SIGINT, then SIGTERM
|
/// execution, instructing it to terminate the process promptly.
|
||||||
/// after a grace period).
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct ExecutionCancelRequestedPayload {
|
pub struct ExecutionCancelRequestedPayload {
|
||||||
/// Execution ID to cancel
|
/// Execution ID to cancel
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ use crate::config::Config;
|
|||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
use crate::models::Runtime;
|
use crate::models::Runtime;
|
||||||
use crate::repositories::action::ActionRepository;
|
use crate::repositories::action::ActionRepository;
|
||||||
use crate::repositories::runtime::RuntimeRepository;
|
use crate::repositories::runtime::{self, RuntimeRepository};
|
||||||
use crate::repositories::FindById as _;
|
use crate::repositories::FindById as _;
|
||||||
use serde_json::Value as JsonValue;
|
use serde_json::Value as JsonValue;
|
||||||
use sqlx::{PgPool, Row};
|
use sqlx::{PgPool, Row};
|
||||||
@@ -370,19 +370,15 @@ impl PackEnvironmentManager {
|
|||||||
// ========================================================================
|
// ========================================================================
|
||||||
|
|
||||||
async fn get_runtime(&self, runtime_id: i64) -> Result<Runtime> {
|
async fn get_runtime(&self, runtime_id: i64) -> Result<Runtime> {
|
||||||
sqlx::query_as::<_, Runtime>(
|
let query = format!(
|
||||||
r#"
|
"SELECT {} FROM runtime WHERE id = $1",
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
runtime::SELECT_COLUMNS
|
||||||
distributions, installation, installers, execution_config,
|
);
|
||||||
created, updated
|
sqlx::query_as::<_, Runtime>(&query)
|
||||||
FROM runtime
|
.bind(runtime_id)
|
||||||
WHERE id = $1
|
.fetch_one(&self.pool)
|
||||||
"#,
|
.await
|
||||||
)
|
.map_err(|e| Error::Internal(format!("Failed to fetch runtime: {}", e)))
|
||||||
.bind(runtime_id)
|
|
||||||
.fetch_one(&self.pool)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Error::Internal(format!("Failed to fetch runtime: {}", e)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn runtime_requires_environment(&self, runtime: &Runtime) -> Result<bool> {
|
fn runtime_requires_environment(&self, runtime: &Runtime) -> Result<bool> {
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
//! Pack Component Loader
|
//! Pack Component Loader
|
||||||
//!
|
//!
|
||||||
//! Reads runtime, action, trigger, and sensor YAML definitions from a pack directory
|
//! Reads permission set, runtime, action, trigger, and sensor YAML definitions from a pack directory
|
||||||
//! and registers them in the database. This is the Rust-native equivalent of
|
//! and registers them in the database. This is the Rust-native equivalent of
|
||||||
//! the Python `load_core_pack.py` script used during init-packs.
|
//! the Python `load_core_pack.py` script used during init-packs.
|
||||||
//!
|
//!
|
||||||
//! Components are loaded in dependency order:
|
//! Components are loaded in dependency order:
|
||||||
//! 1. Runtimes (no dependencies)
|
//! 1. Permission sets (no dependencies)
|
||||||
//! 2. Triggers (no dependencies)
|
//! 2. Runtimes (no dependencies)
|
||||||
//! 3. Actions (depend on runtime; workflow actions also create workflow_definition records)
|
//! 3. Triggers (no dependencies)
|
||||||
//! 4. Sensors (depend on triggers and runtime)
|
//! 4. Actions (depend on runtime; workflow actions also create workflow_definition records)
|
||||||
|
//! 5. Sensors (depend on triggers and runtime)
|
||||||
//!
|
//!
|
||||||
//! All loaders use **upsert** semantics: if an entity with the same ref already
|
//! All loaders use **upsert** semantics: if an entity with the same ref already
|
||||||
//! exists it is updated in place (preserving its database ID); otherwise a new
|
//! exists it is updated in place (preserving its database ID); otherwise a new
|
||||||
@@ -38,7 +39,9 @@ use tracing::{debug, info, warn};
|
|||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
use crate::models::Id;
|
use crate::models::Id;
|
||||||
use crate::repositories::action::{ActionRepository, UpdateActionInput};
|
use crate::repositories::action::{ActionRepository, UpdateActionInput};
|
||||||
use crate::repositories::runtime::{CreateRuntimeInput, RuntimeRepository, UpdateRuntimeInput};
|
use crate::repositories::identity::{
|
||||||
|
CreatePermissionSetInput, PermissionSetRepository, UpdatePermissionSetInput,
|
||||||
|
};
|
||||||
use crate::repositories::runtime_version::{
|
use crate::repositories::runtime_version::{
|
||||||
CreateRuntimeVersionInput, RuntimeVersionRepository, UpdateRuntimeVersionInput,
|
CreateRuntimeVersionInput, RuntimeVersionRepository, UpdateRuntimeVersionInput,
|
||||||
};
|
};
|
||||||
@@ -49,13 +52,22 @@ use crate::repositories::trigger::{
|
|||||||
use crate::repositories::workflow::{
|
use crate::repositories::workflow::{
|
||||||
CreateWorkflowDefinitionInput, UpdateWorkflowDefinitionInput, WorkflowDefinitionRepository,
|
CreateWorkflowDefinitionInput, UpdateWorkflowDefinitionInput, WorkflowDefinitionRepository,
|
||||||
};
|
};
|
||||||
use crate::repositories::{Create, Delete, FindById, FindByRef, Update};
|
use crate::repositories::{
|
||||||
|
runtime::{CreateRuntimeInput, RuntimeRepository, UpdateRuntimeInput},
|
||||||
|
Create, Delete, FindById, FindByRef, Patch, Update,
|
||||||
|
};
|
||||||
use crate::version_matching::extract_version_components;
|
use crate::version_matching::extract_version_components;
|
||||||
use crate::workflow::parser::parse_workflow_yaml;
|
use crate::workflow::parser::parse_workflow_yaml;
|
||||||
|
|
||||||
/// Result of loading pack components into the database.
|
/// Result of loading pack components into the database.
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct PackLoadResult {
|
pub struct PackLoadResult {
|
||||||
|
/// Number of permission sets created
|
||||||
|
pub permission_sets_loaded: usize,
|
||||||
|
/// Number of permission sets updated
|
||||||
|
pub permission_sets_updated: usize,
|
||||||
|
/// Number of permission sets skipped
|
||||||
|
pub permission_sets_skipped: usize,
|
||||||
/// Number of runtimes created
|
/// Number of runtimes created
|
||||||
pub runtimes_loaded: usize,
|
pub runtimes_loaded: usize,
|
||||||
/// Number of runtimes updated (already existed)
|
/// Number of runtimes updated (already existed)
|
||||||
@@ -88,15 +100,27 @@ pub struct PackLoadResult {
|
|||||||
|
|
||||||
impl PackLoadResult {
|
impl PackLoadResult {
|
||||||
pub fn total_loaded(&self) -> usize {
|
pub fn total_loaded(&self) -> usize {
|
||||||
self.runtimes_loaded + self.triggers_loaded + self.actions_loaded + self.sensors_loaded
|
self.permission_sets_loaded
|
||||||
|
+ self.runtimes_loaded
|
||||||
|
+ self.triggers_loaded
|
||||||
|
+ self.actions_loaded
|
||||||
|
+ self.sensors_loaded
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn total_skipped(&self) -> usize {
|
pub fn total_skipped(&self) -> usize {
|
||||||
self.runtimes_skipped + self.triggers_skipped + self.actions_skipped + self.sensors_skipped
|
self.permission_sets_skipped
|
||||||
|
+ self.runtimes_skipped
|
||||||
|
+ self.triggers_skipped
|
||||||
|
+ self.actions_skipped
|
||||||
|
+ self.sensors_skipped
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn total_updated(&self) -> usize {
|
pub fn total_updated(&self) -> usize {
|
||||||
self.runtimes_updated + self.triggers_updated + self.actions_updated + self.sensors_updated
|
self.permission_sets_updated
|
||||||
|
+ self.runtimes_updated
|
||||||
|
+ self.triggers_updated
|
||||||
|
+ self.actions_updated
|
||||||
|
+ self.sensors_updated
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -132,22 +156,26 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
pack_dir.display()
|
pack_dir.display()
|
||||||
);
|
);
|
||||||
|
|
||||||
// 1. Load runtimes first (no dependencies)
|
// 1. Load permission sets first (no dependencies)
|
||||||
|
let permission_set_refs = self.load_permission_sets(pack_dir, &mut result).await?;
|
||||||
|
|
||||||
|
// 2. Load runtimes (no dependencies)
|
||||||
let runtime_refs = self.load_runtimes(pack_dir, &mut result).await?;
|
let runtime_refs = self.load_runtimes(pack_dir, &mut result).await?;
|
||||||
|
|
||||||
// 2. Load triggers (no dependencies)
|
// 3. Load triggers (no dependencies)
|
||||||
let (trigger_ids, trigger_refs) = self.load_triggers(pack_dir, &mut result).await?;
|
let (trigger_ids, trigger_refs) = self.load_triggers(pack_dir, &mut result).await?;
|
||||||
|
|
||||||
// 3. Load actions (depend on runtime)
|
// 4. Load actions (depend on runtime)
|
||||||
let action_refs = self.load_actions(pack_dir, &mut result).await?;
|
let action_refs = self.load_actions(pack_dir, &mut result).await?;
|
||||||
|
|
||||||
// 4. Load sensors (depend on triggers and runtime)
|
// 5. Load sensors (depend on triggers and runtime)
|
||||||
let sensor_refs = self
|
let sensor_refs = self
|
||||||
.load_sensors(pack_dir, &trigger_ids, &mut result)
|
.load_sensors(pack_dir, &trigger_ids, &mut result)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
// 5. Clean up entities that are no longer in the pack's YAML files
|
// 6. Clean up entities that are no longer in the pack's YAML files
|
||||||
self.cleanup_removed_entities(
|
self.cleanup_removed_entities(
|
||||||
|
&permission_set_refs,
|
||||||
&runtime_refs,
|
&runtime_refs,
|
||||||
&trigger_refs,
|
&trigger_refs,
|
||||||
&action_refs,
|
&action_refs,
|
||||||
@@ -169,6 +197,146 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Load permission set definitions from `pack_dir/permission_sets/*.yaml`.
|
||||||
|
///
|
||||||
|
/// Permission sets are pack-scoped authorization metadata. Their `grants`
|
||||||
|
/// payload is stored verbatim and interpreted by the API authorization
|
||||||
|
/// layer at request time.
|
||||||
|
async fn load_permission_sets(
|
||||||
|
&self,
|
||||||
|
pack_dir: &Path,
|
||||||
|
result: &mut PackLoadResult,
|
||||||
|
) -> Result<Vec<String>> {
|
||||||
|
let permission_sets_dir = pack_dir.join("permission_sets");
|
||||||
|
let mut loaded_refs = Vec::new();
|
||||||
|
|
||||||
|
if !permission_sets_dir.exists() {
|
||||||
|
info!(
|
||||||
|
"No permission_sets directory found for pack '{}'",
|
||||||
|
self.pack_ref
|
||||||
|
);
|
||||||
|
return Ok(loaded_refs);
|
||||||
|
}
|
||||||
|
|
||||||
|
let yaml_files = read_yaml_files(&permission_sets_dir)?;
|
||||||
|
info!(
|
||||||
|
"Found {} permission set definition(s) for pack '{}'",
|
||||||
|
yaml_files.len(),
|
||||||
|
self.pack_ref
|
||||||
|
);
|
||||||
|
|
||||||
|
for (filename, content) in &yaml_files {
|
||||||
|
let data: serde_yaml_ng::Value = serde_yaml_ng::from_str(content).map_err(|e| {
|
||||||
|
Error::validation(format!(
|
||||||
|
"Failed to parse permission set YAML {}: {}",
|
||||||
|
filename, e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let permission_set_ref = match data.get("ref").and_then(|v| v.as_str()) {
|
||||||
|
Some(r) => r.to_string(),
|
||||||
|
None => {
|
||||||
|
let msg = format!(
|
||||||
|
"Permission set YAML {} missing 'ref' field, skipping",
|
||||||
|
filename
|
||||||
|
);
|
||||||
|
warn!("{}", msg);
|
||||||
|
result.warnings.push(msg);
|
||||||
|
result.permission_sets_skipped += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let label = data
|
||||||
|
.get("label")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.map(|s| s.to_string());
|
||||||
|
|
||||||
|
let description = data
|
||||||
|
.get("description")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.map(|s| s.to_string());
|
||||||
|
|
||||||
|
let grants = data
|
||||||
|
.get("grants")
|
||||||
|
.and_then(|v| serde_json::to_value(v).ok())
|
||||||
|
.unwrap_or_else(|| serde_json::json!([]));
|
||||||
|
|
||||||
|
if !grants.is_array() {
|
||||||
|
let msg = format!(
|
||||||
|
"Permission set '{}' has non-array 'grants', skipping",
|
||||||
|
permission_set_ref
|
||||||
|
);
|
||||||
|
warn!("{}", msg);
|
||||||
|
result.warnings.push(msg);
|
||||||
|
result.permission_sets_skipped += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(existing) =
|
||||||
|
PermissionSetRepository::find_by_ref(self.pool, &permission_set_ref).await?
|
||||||
|
{
|
||||||
|
let update_input = UpdatePermissionSetInput {
|
||||||
|
label,
|
||||||
|
description,
|
||||||
|
grants: Some(grants),
|
||||||
|
};
|
||||||
|
|
||||||
|
match PermissionSetRepository::update(self.pool, existing.id, update_input).await {
|
||||||
|
Ok(_) => {
|
||||||
|
info!(
|
||||||
|
"Updated permission set '{}' (ID: {})",
|
||||||
|
permission_set_ref, existing.id
|
||||||
|
);
|
||||||
|
result.permission_sets_updated += 1;
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let msg = format!(
|
||||||
|
"Failed to update permission set '{}': {}",
|
||||||
|
permission_set_ref, e
|
||||||
|
);
|
||||||
|
warn!("{}", msg);
|
||||||
|
result.warnings.push(msg);
|
||||||
|
result.permission_sets_skipped += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loaded_refs.push(permission_set_ref);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = CreatePermissionSetInput {
|
||||||
|
r#ref: permission_set_ref.clone(),
|
||||||
|
pack: Some(self.pack_id),
|
||||||
|
pack_ref: Some(self.pack_ref.clone()),
|
||||||
|
label,
|
||||||
|
description,
|
||||||
|
grants,
|
||||||
|
};
|
||||||
|
|
||||||
|
match PermissionSetRepository::create(self.pool, input).await {
|
||||||
|
Ok(permission_set) => {
|
||||||
|
info!(
|
||||||
|
"Created permission set '{}' (ID: {})",
|
||||||
|
permission_set_ref, permission_set.id
|
||||||
|
);
|
||||||
|
result.permission_sets_loaded += 1;
|
||||||
|
loaded_refs.push(permission_set_ref);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let msg = format!(
|
||||||
|
"Failed to create permission set '{}': {}",
|
||||||
|
permission_set_ref, e
|
||||||
|
);
|
||||||
|
warn!("{}", msg);
|
||||||
|
result.warnings.push(msg);
|
||||||
|
result.permission_sets_skipped += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(loaded_refs)
|
||||||
|
}
|
||||||
|
|
||||||
/// Load runtime definitions from `pack_dir/runtimes/*.yaml`.
|
/// Load runtime definitions from `pack_dir/runtimes/*.yaml`.
|
||||||
///
|
///
|
||||||
/// Runtimes define how actions and sensors are executed (interpreter,
|
/// Runtimes define how actions and sensors are executed (interpreter,
|
||||||
@@ -236,14 +404,32 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
.and_then(|v| serde_json::to_value(v).ok())
|
.and_then(|v| serde_json::to_value(v).ok())
|
||||||
.unwrap_or_else(|| serde_json::json!({}));
|
.unwrap_or_else(|| serde_json::json!({}));
|
||||||
|
|
||||||
|
let aliases: Vec<String> = data
|
||||||
|
.get("aliases")
|
||||||
|
.and_then(|v| v.as_sequence())
|
||||||
|
.map(|arr| {
|
||||||
|
arr.iter()
|
||||||
|
.filter_map(|v| v.as_str().map(|s| s.to_ascii_lowercase()))
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
// Check if runtime already exists — update in place if so
|
// Check if runtime already exists — update in place if so
|
||||||
if let Some(existing) = RuntimeRepository::find_by_ref(self.pool, &runtime_ref).await? {
|
if let Some(existing) = RuntimeRepository::find_by_ref(self.pool, &runtime_ref).await? {
|
||||||
let update_input = UpdateRuntimeInput {
|
let update_input = UpdateRuntimeInput {
|
||||||
description,
|
description: Some(match description {
|
||||||
|
Some(description) => Patch::Set(description),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
name: Some(name),
|
name: Some(name),
|
||||||
distributions: Some(distributions),
|
distributions: Some(distributions),
|
||||||
installation,
|
installation: Some(match installation {
|
||||||
|
Some(installation) => Patch::Set(installation),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
execution_config: Some(execution_config),
|
execution_config: Some(execution_config),
|
||||||
|
aliases: Some(aliases),
|
||||||
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
match RuntimeRepository::update(self.pool, existing.id, update_input).await {
|
match RuntimeRepository::update(self.pool, existing.id, update_input).await {
|
||||||
@@ -274,6 +460,9 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
distributions,
|
distributions,
|
||||||
installation,
|
installation,
|
||||||
execution_config,
|
execution_config,
|
||||||
|
aliases,
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: serde_json::json!({}),
|
||||||
};
|
};
|
||||||
|
|
||||||
match RuntimeRepository::create(self.pool, input).await {
|
match RuntimeRepository::create(self.pool, input).await {
|
||||||
@@ -381,9 +570,18 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
{
|
{
|
||||||
let update_input = UpdateRuntimeVersionInput {
|
let update_input = UpdateRuntimeVersionInput {
|
||||||
version: None, // version string doesn't change
|
version: None, // version string doesn't change
|
||||||
version_major: Some(version_major),
|
version_major: Some(match version_major {
|
||||||
version_minor: Some(version_minor),
|
Some(value) => Patch::Set(value),
|
||||||
version_patch: Some(version_patch),
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
|
version_minor: Some(match version_minor {
|
||||||
|
Some(value) => Patch::Set(value),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
|
version_patch: Some(match version_patch {
|
||||||
|
Some(value) => Patch::Set(value),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
execution_config: Some(execution_config),
|
execution_config: Some(execution_config),
|
||||||
distributions: Some(distributions),
|
distributions: Some(distributions),
|
||||||
is_default: Some(is_default),
|
is_default: Some(is_default),
|
||||||
@@ -547,10 +745,16 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
if let Some(existing) = TriggerRepository::find_by_ref(self.pool, &trigger_ref).await? {
|
if let Some(existing) = TriggerRepository::find_by_ref(self.pool, &trigger_ref).await? {
|
||||||
let update_input = UpdateTriggerInput {
|
let update_input = UpdateTriggerInput {
|
||||||
label: Some(label),
|
label: Some(label),
|
||||||
description: Some(description),
|
description: Some(Patch::Set(description)),
|
||||||
enabled: Some(enabled),
|
enabled: Some(enabled),
|
||||||
param_schema,
|
param_schema: Some(match param_schema {
|
||||||
out_schema,
|
Some(value) => Patch::Set(value),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
|
out_schema: Some(match out_schema {
|
||||||
|
Some(value) => Patch::Set(value),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
match TriggerRepository::update(self.pool, existing.id, update_input).await {
|
match TriggerRepository::update(self.pool, existing.id, update_input).await {
|
||||||
@@ -755,7 +959,10 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
description: Some(description),
|
description: Some(description),
|
||||||
entrypoint: Some(entrypoint),
|
entrypoint: Some(entrypoint),
|
||||||
runtime: runtime_id,
|
runtime: runtime_id,
|
||||||
runtime_version_constraint: Some(runtime_version_constraint),
|
runtime_version_constraint: Some(match runtime_version_constraint {
|
||||||
|
Some(value) => Patch::Set(value),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
param_schema,
|
param_schema,
|
||||||
out_schema,
|
out_schema,
|
||||||
parameter_delivery: Some(parameter_delivery),
|
parameter_delivery: Some(parameter_delivery),
|
||||||
@@ -965,7 +1172,6 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
out_schema,
|
out_schema,
|
||||||
definition: Some(definition_json),
|
definition: Some(definition_json),
|
||||||
tags: Some(tags),
|
tags: Some(tags),
|
||||||
enabled: Some(true),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
WorkflowDefinitionRepository::update(self.pool, existing.id, update_input).await?;
|
WorkflowDefinitionRepository::update(self.pool, existing.id, update_input).await?;
|
||||||
@@ -993,7 +1199,6 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
out_schema,
|
out_schema,
|
||||||
definition: definition_json,
|
definition: definition_json,
|
||||||
tags,
|
tags,
|
||||||
enabled: true,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let created = WorkflowDefinitionRepository::create(self.pool, create_input).await?;
|
let created = WorkflowDefinitionRepository::create(self.pool, create_input).await?;
|
||||||
@@ -1146,11 +1351,17 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
entrypoint: Some(entrypoint),
|
entrypoint: Some(entrypoint),
|
||||||
runtime: Some(sensor_runtime_id),
|
runtime: Some(sensor_runtime_id),
|
||||||
runtime_ref: Some(sensor_runtime_ref.clone()),
|
runtime_ref: Some(sensor_runtime_ref.clone()),
|
||||||
runtime_version_constraint: Some(runtime_version_constraint.clone()),
|
runtime_version_constraint: Some(match runtime_version_constraint.clone() {
|
||||||
|
Some(value) => Patch::Set(value),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
trigger: Some(trigger_id.unwrap_or(existing.trigger)),
|
trigger: Some(trigger_id.unwrap_or(existing.trigger)),
|
||||||
trigger_ref: Some(trigger_ref.unwrap_or(existing.trigger_ref.clone())),
|
trigger_ref: Some(trigger_ref.unwrap_or(existing.trigger_ref.clone())),
|
||||||
enabled: Some(enabled),
|
enabled: Some(enabled),
|
||||||
param_schema,
|
param_schema: Some(match param_schema {
|
||||||
|
Some(value) => Patch::Set(value),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
config: Some(config),
|
config: Some(config),
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1308,12 +1519,37 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
/// removed.
|
/// removed.
|
||||||
async fn cleanup_removed_entities(
|
async fn cleanup_removed_entities(
|
||||||
&self,
|
&self,
|
||||||
|
permission_set_refs: &[String],
|
||||||
runtime_refs: &[String],
|
runtime_refs: &[String],
|
||||||
trigger_refs: &[String],
|
trigger_refs: &[String],
|
||||||
action_refs: &[String],
|
action_refs: &[String],
|
||||||
sensor_refs: &[String],
|
sensor_refs: &[String],
|
||||||
result: &mut PackLoadResult,
|
result: &mut PackLoadResult,
|
||||||
) {
|
) {
|
||||||
|
match PermissionSetRepository::delete_by_pack_excluding(
|
||||||
|
self.pool,
|
||||||
|
self.pack_id,
|
||||||
|
permission_set_refs,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(count) => {
|
||||||
|
if count > 0 {
|
||||||
|
info!(
|
||||||
|
"Removed {} stale permission set(s) from pack '{}'",
|
||||||
|
count, self.pack_ref
|
||||||
|
);
|
||||||
|
result.removed += count as usize;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!(
|
||||||
|
"Failed to clean up stale permission sets for pack '{}': {}",
|
||||||
|
self.pack_ref, e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Clean up sensors first (they depend on triggers/runtimes)
|
// Clean up sensors first (they depend on triggers/runtimes)
|
||||||
match SensorRepository::delete_by_pack_excluding(self.pool, self.pack_id, sensor_refs).await
|
match SensorRepository::delete_by_pack_excluding(self.pool, self.pack_id, sensor_refs).await
|
||||||
{
|
{
|
||||||
|
|||||||
292
crates/common/src/rbac.rs
Normal file
292
crates/common/src/rbac.rs
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
//! Role-based access control (RBAC) model and evaluator.
|
||||||
|
//!
|
||||||
|
//! Permission sets store `grants` as a JSON array of [`Grant`].
|
||||||
|
//! This module defines the canonical grant schema and matching logic.
|
||||||
|
|
||||||
|
use crate::models::{ArtifactVisibility, Id, OwnerType};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value as JsonValue;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum Resource {
|
||||||
|
Packs,
|
||||||
|
Actions,
|
||||||
|
Rules,
|
||||||
|
Triggers,
|
||||||
|
Executions,
|
||||||
|
Events,
|
||||||
|
Enforcements,
|
||||||
|
Inquiries,
|
||||||
|
Keys,
|
||||||
|
Artifacts,
|
||||||
|
Workflows,
|
||||||
|
Webhooks,
|
||||||
|
Analytics,
|
||||||
|
History,
|
||||||
|
Identities,
|
||||||
|
Permissions,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum Action {
|
||||||
|
Read,
|
||||||
|
Create,
|
||||||
|
Update,
|
||||||
|
Delete,
|
||||||
|
Execute,
|
||||||
|
Cancel,
|
||||||
|
Respond,
|
||||||
|
Manage,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum OwnerConstraint {
|
||||||
|
#[serde(rename = "self")]
|
||||||
|
SelfOnly,
|
||||||
|
Any,
|
||||||
|
None,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum ExecutionScopeConstraint {
|
||||||
|
#[serde(rename = "self")]
|
||||||
|
SelfOnly,
|
||||||
|
Descendants,
|
||||||
|
Any,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq)]
|
||||||
|
pub struct GrantConstraints {
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub pack_refs: Option<Vec<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub owner: Option<OwnerConstraint>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub owner_types: Option<Vec<OwnerType>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub visibility: Option<Vec<ArtifactVisibility>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub execution_scope: Option<ExecutionScopeConstraint>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub refs: Option<Vec<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub ids: Option<Vec<Id>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub encrypted: Option<bool>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub attributes: Option<HashMap<String, JsonValue>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||||
|
pub struct Grant {
|
||||||
|
pub resource: Resource,
|
||||||
|
pub actions: Vec<Action>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub constraints: Option<GrantConstraints>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct AuthorizationContext {
|
||||||
|
pub identity_id: Id,
|
||||||
|
pub identity_attributes: HashMap<String, JsonValue>,
|
||||||
|
pub target_id: Option<Id>,
|
||||||
|
pub target_ref: Option<String>,
|
||||||
|
pub pack_ref: Option<String>,
|
||||||
|
pub owner_identity_id: Option<Id>,
|
||||||
|
pub owner_type: Option<OwnerType>,
|
||||||
|
pub visibility: Option<ArtifactVisibility>,
|
||||||
|
pub encrypted: Option<bool>,
|
||||||
|
pub execution_owner_identity_id: Option<Id>,
|
||||||
|
pub execution_ancestor_identity_ids: Vec<Id>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuthorizationContext {
|
||||||
|
pub fn new(identity_id: Id) -> Self {
|
||||||
|
Self {
|
||||||
|
identity_id,
|
||||||
|
identity_attributes: HashMap::new(),
|
||||||
|
target_id: None,
|
||||||
|
target_ref: None,
|
||||||
|
pack_ref: None,
|
||||||
|
owner_identity_id: None,
|
||||||
|
owner_type: None,
|
||||||
|
visibility: None,
|
||||||
|
encrypted: None,
|
||||||
|
execution_owner_identity_id: None,
|
||||||
|
execution_ancestor_identity_ids: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Grant {
|
||||||
|
pub fn allows(&self, resource: Resource, action: Action, ctx: &AuthorizationContext) -> bool {
|
||||||
|
self.resource == resource && self.actions.contains(&action) && self.constraints_match(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn constraints_match(&self, ctx: &AuthorizationContext) -> bool {
|
||||||
|
let Some(constraints) = &self.constraints else {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(pack_refs) = &constraints.pack_refs {
|
||||||
|
let Some(pack_ref) = &ctx.pack_ref else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if !pack_refs.contains(pack_ref) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(owner) = constraints.owner {
|
||||||
|
let owner_match = match owner {
|
||||||
|
OwnerConstraint::SelfOnly => ctx.owner_identity_id == Some(ctx.identity_id),
|
||||||
|
OwnerConstraint::Any => true,
|
||||||
|
OwnerConstraint::None => ctx.owner_identity_id.is_none(),
|
||||||
|
};
|
||||||
|
if !owner_match {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(owner_types) = &constraints.owner_types {
|
||||||
|
let Some(owner_type) = ctx.owner_type else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if !owner_types.contains(&owner_type) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(visibility) = &constraints.visibility {
|
||||||
|
let Some(target_visibility) = ctx.visibility else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if !visibility.contains(&target_visibility) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(execution_scope) = constraints.execution_scope {
|
||||||
|
let execution_match = match execution_scope {
|
||||||
|
ExecutionScopeConstraint::SelfOnly => {
|
||||||
|
ctx.execution_owner_identity_id == Some(ctx.identity_id)
|
||||||
|
}
|
||||||
|
ExecutionScopeConstraint::Descendants => {
|
||||||
|
ctx.execution_owner_identity_id == Some(ctx.identity_id)
|
||||||
|
|| ctx
|
||||||
|
.execution_ancestor_identity_ids
|
||||||
|
.contains(&ctx.identity_id)
|
||||||
|
}
|
||||||
|
ExecutionScopeConstraint::Any => true,
|
||||||
|
};
|
||||||
|
if !execution_match {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(refs) = &constraints.refs {
|
||||||
|
let Some(target_ref) = &ctx.target_ref else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if !refs.contains(target_ref) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ids) = &constraints.ids {
|
||||||
|
let Some(target_id) = ctx.target_id else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if !ids.contains(&target_id) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(encrypted) = constraints.encrypted {
|
||||||
|
let Some(target_encrypted) = ctx.encrypted else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if encrypted != target_encrypted {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(attributes) = &constraints.attributes {
|
||||||
|
for (key, expected_value) in attributes {
|
||||||
|
let Some(actual_value) = ctx.identity_attributes.get(key) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if actual_value != expected_value {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn grant_without_constraints_allows() {
|
||||||
|
let grant = Grant {
|
||||||
|
resource: Resource::Actions,
|
||||||
|
actions: vec![Action::Read],
|
||||||
|
constraints: None,
|
||||||
|
};
|
||||||
|
let ctx = AuthorizationContext::new(42);
|
||||||
|
assert!(grant.allows(Resource::Actions, Action::Read, &ctx));
|
||||||
|
assert!(!grant.allows(Resource::Actions, Action::Create, &ctx));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn key_constraint_owner_type_and_encrypted() {
|
||||||
|
let grant = Grant {
|
||||||
|
resource: Resource::Keys,
|
||||||
|
actions: vec![Action::Read],
|
||||||
|
constraints: Some(GrantConstraints {
|
||||||
|
owner_types: Some(vec![OwnerType::System]),
|
||||||
|
encrypted: Some(false),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut ctx = AuthorizationContext::new(1);
|
||||||
|
ctx.owner_type = Some(OwnerType::System);
|
||||||
|
ctx.encrypted = Some(false);
|
||||||
|
assert!(grant.allows(Resource::Keys, Action::Read, &ctx));
|
||||||
|
|
||||||
|
ctx.encrypted = Some(true);
|
||||||
|
assert!(!grant.allows(Resource::Keys, Action::Read, &ctx));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn attributes_constraint_requires_exact_value_match() {
|
||||||
|
let grant = Grant {
|
||||||
|
resource: Resource::Packs,
|
||||||
|
actions: vec![Action::Read],
|
||||||
|
constraints: Some(GrantConstraints {
|
||||||
|
attributes: Some(HashMap::from([("team".to_string(), json!("platform"))])),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut ctx = AuthorizationContext::new(1);
|
||||||
|
ctx.identity_attributes
|
||||||
|
.insert("team".to_string(), json!("platform"));
|
||||||
|
assert!(grant.allows(Resource::Packs, Action::Read, &ctx));
|
||||||
|
|
||||||
|
ctx.identity_attributes
|
||||||
|
.insert("team".to_string(), json!("infra"));
|
||||||
|
assert!(!grant.allows(Resource::Packs, Action::Read, &ctx));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -6,7 +6,7 @@ use crate::models::{action::*, enums::PolicyMethod, Id, JsonSchema};
|
|||||||
use crate::{Error, Result};
|
use crate::{Error, Result};
|
||||||
use sqlx::{Executor, Postgres, QueryBuilder};
|
use sqlx::{Executor, Postgres, QueryBuilder};
|
||||||
|
|
||||||
use super::{Create, Delete, FindById, FindByRef, List, Repository, Update};
|
use super::{Create, Delete, FindById, FindByRef, List, Patch, Repository, Update};
|
||||||
|
|
||||||
/// Columns selected in all Action queries. Must match the `Action` model's `FromRow` fields.
|
/// Columns selected in all Action queries. Must match the `Action` model's `FromRow` fields.
|
||||||
pub const ACTION_COLUMNS: &str = "id, ref, pack, pack_ref, label, description, entrypoint, \
|
pub const ACTION_COLUMNS: &str = "id, ref, pack, pack_ref, label, description, entrypoint, \
|
||||||
@@ -67,7 +67,7 @@ pub struct UpdateActionInput {
|
|||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
pub entrypoint: Option<String>,
|
pub entrypoint: Option<String>,
|
||||||
pub runtime: Option<Id>,
|
pub runtime: Option<Id>,
|
||||||
pub runtime_version_constraint: Option<Option<String>>,
|
pub runtime_version_constraint: Option<Patch<String>>,
|
||||||
pub param_schema: Option<JsonSchema>,
|
pub param_schema: Option<JsonSchema>,
|
||||||
pub out_schema: Option<JsonSchema>,
|
pub out_schema: Option<JsonSchema>,
|
||||||
pub parameter_delivery: Option<String>,
|
pub parameter_delivery: Option<String>,
|
||||||
@@ -237,7 +237,10 @@ impl Update for ActionRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("runtime_version_constraint = ");
|
query.push("runtime_version_constraint = ");
|
||||||
query.push_bind(runtime_version_constraint);
|
match runtime_version_constraint {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<String>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ use crate::models::{
|
|||||||
use crate::Result;
|
use crate::Result;
|
||||||
use sqlx::{Executor, Postgres, QueryBuilder};
|
use sqlx::{Executor, Postgres, QueryBuilder};
|
||||||
|
|
||||||
use super::{Create, Delete, FindById, FindByRef, List, Repository, Update};
|
use super::{Create, Delete, FindById, FindByRef, List, Patch, Repository, Update};
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// ArtifactRepository
|
// ArtifactRepository
|
||||||
@@ -48,12 +48,12 @@ pub struct UpdateArtifactInput {
|
|||||||
pub visibility: Option<ArtifactVisibility>,
|
pub visibility: Option<ArtifactVisibility>,
|
||||||
pub retention_policy: Option<RetentionPolicyType>,
|
pub retention_policy: Option<RetentionPolicyType>,
|
||||||
pub retention_limit: Option<i32>,
|
pub retention_limit: Option<i32>,
|
||||||
pub name: Option<String>,
|
pub name: Option<Patch<String>>,
|
||||||
pub description: Option<String>,
|
pub description: Option<Patch<String>>,
|
||||||
pub content_type: Option<String>,
|
pub content_type: Option<Patch<String>>,
|
||||||
pub size_bytes: Option<i64>,
|
pub size_bytes: Option<i64>,
|
||||||
pub execution: Option<Option<i64>>,
|
pub execution: Option<Patch<i64>>,
|
||||||
pub data: Option<serde_json::Value>,
|
pub data: Option<Patch<serde_json::Value>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filters for searching artifacts
|
/// Filters for searching artifacts
|
||||||
@@ -186,20 +186,62 @@ impl Update for ArtifactRepository {
|
|||||||
push_field!(input.visibility, "visibility");
|
push_field!(input.visibility, "visibility");
|
||||||
push_field!(input.retention_policy, "retention_policy");
|
push_field!(input.retention_policy, "retention_policy");
|
||||||
push_field!(input.retention_limit, "retention_limit");
|
push_field!(input.retention_limit, "retention_limit");
|
||||||
push_field!(&input.name, "name");
|
if let Some(name) = &input.name {
|
||||||
push_field!(&input.description, "description");
|
if has_updates {
|
||||||
push_field!(&input.content_type, "content_type");
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("name = ");
|
||||||
|
match name {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<String>::None),
|
||||||
|
};
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
|
if let Some(description) = &input.description {
|
||||||
|
if has_updates {
|
||||||
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("description = ");
|
||||||
|
match description {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<String>::None),
|
||||||
|
};
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
|
if let Some(content_type) = &input.content_type {
|
||||||
|
if has_updates {
|
||||||
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("content_type = ");
|
||||||
|
match content_type {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<String>::None),
|
||||||
|
};
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
push_field!(input.size_bytes, "size_bytes");
|
push_field!(input.size_bytes, "size_bytes");
|
||||||
// execution is Option<Option<i64>> — outer Option = "was field provided?",
|
|
||||||
// inner Option = nullable column value
|
|
||||||
if let Some(exec_val) = input.execution {
|
if let Some(exec_val) = input.execution {
|
||||||
if has_updates {
|
if has_updates {
|
||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("execution = ").push_bind(exec_val);
|
query.push("execution = ");
|
||||||
|
match exec_val {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<i64>::None),
|
||||||
|
};
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
|
if let Some(data) = &input.data {
|
||||||
|
if has_updates {
|
||||||
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("data = ");
|
||||||
|
match data {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<serde_json::Value>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
push_field!(&input.data, "data");
|
|
||||||
|
|
||||||
if !has_updates {
|
if !has_updates {
|
||||||
return Self::get_by_id(executor, id).await;
|
return Self::get_by_id(executor, id).await;
|
||||||
|
|||||||
@@ -54,6 +54,7 @@ pub struct ExecutionWithRefs {
|
|||||||
pub parent: Option<Id>,
|
pub parent: Option<Id>,
|
||||||
pub enforcement: Option<Id>,
|
pub enforcement: Option<Id>,
|
||||||
pub executor: Option<Id>,
|
pub executor: Option<Id>,
|
||||||
|
pub worker: Option<Id>,
|
||||||
pub status: ExecutionStatus,
|
pub status: ExecutionStatus,
|
||||||
pub result: Option<JsonDict>,
|
pub result: Option<JsonDict>,
|
||||||
pub started_at: Option<DateTime<Utc>>,
|
pub started_at: Option<DateTime<Utc>>,
|
||||||
@@ -73,7 +74,7 @@ pub struct ExecutionWithRefs {
|
|||||||
/// are NOT in the Rust struct, so `SELECT *` must never be used.
|
/// are NOT in the Rust struct, so `SELECT *` must never be used.
|
||||||
pub const SELECT_COLUMNS: &str = "\
|
pub const SELECT_COLUMNS: &str = "\
|
||||||
id, action, action_ref, config, env_vars, parent, enforcement, \
|
id, action, action_ref, config, env_vars, parent, enforcement, \
|
||||||
executor, status, result, started_at, workflow_task, created, updated";
|
executor, worker, status, result, started_at, workflow_task, created, updated";
|
||||||
|
|
||||||
pub struct ExecutionRepository;
|
pub struct ExecutionRepository;
|
||||||
|
|
||||||
@@ -93,6 +94,7 @@ pub struct CreateExecutionInput {
|
|||||||
pub parent: Option<Id>,
|
pub parent: Option<Id>,
|
||||||
pub enforcement: Option<Id>,
|
pub enforcement: Option<Id>,
|
||||||
pub executor: Option<Id>,
|
pub executor: Option<Id>,
|
||||||
|
pub worker: Option<Id>,
|
||||||
pub status: ExecutionStatus,
|
pub status: ExecutionStatus,
|
||||||
pub result: Option<JsonDict>,
|
pub result: Option<JsonDict>,
|
||||||
pub workflow_task: Option<WorkflowTaskMetadata>,
|
pub workflow_task: Option<WorkflowTaskMetadata>,
|
||||||
@@ -103,6 +105,7 @@ pub struct UpdateExecutionInput {
|
|||||||
pub status: Option<ExecutionStatus>,
|
pub status: Option<ExecutionStatus>,
|
||||||
pub result: Option<JsonDict>,
|
pub result: Option<JsonDict>,
|
||||||
pub executor: Option<Id>,
|
pub executor: Option<Id>,
|
||||||
|
pub worker: Option<Id>,
|
||||||
pub started_at: Option<DateTime<Utc>>,
|
pub started_at: Option<DateTime<Utc>>,
|
||||||
pub workflow_task: Option<WorkflowTaskMetadata>,
|
pub workflow_task: Option<WorkflowTaskMetadata>,
|
||||||
}
|
}
|
||||||
@@ -113,6 +116,7 @@ impl From<Execution> for UpdateExecutionInput {
|
|||||||
status: Some(execution.status),
|
status: Some(execution.status),
|
||||||
result: execution.result,
|
result: execution.result,
|
||||||
executor: execution.executor,
|
executor: execution.executor,
|
||||||
|
worker: execution.worker,
|
||||||
started_at: execution.started_at,
|
started_at: execution.started_at,
|
||||||
workflow_task: execution.workflow_task,
|
workflow_task: execution.workflow_task,
|
||||||
}
|
}
|
||||||
@@ -158,8 +162,8 @@ impl Create for ExecutionRepository {
|
|||||||
{
|
{
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
"INSERT INTO execution \
|
"INSERT INTO execution \
|
||||||
(action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task) \
|
(action, action_ref, config, env_vars, parent, enforcement, executor, worker, status, result, workflow_task) \
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) \
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) \
|
||||||
RETURNING {SELECT_COLUMNS}"
|
RETURNING {SELECT_COLUMNS}"
|
||||||
);
|
);
|
||||||
sqlx::query_as::<_, Execution>(&sql)
|
sqlx::query_as::<_, Execution>(&sql)
|
||||||
@@ -170,6 +174,7 @@ impl Create for ExecutionRepository {
|
|||||||
.bind(input.parent)
|
.bind(input.parent)
|
||||||
.bind(input.enforcement)
|
.bind(input.enforcement)
|
||||||
.bind(input.executor)
|
.bind(input.executor)
|
||||||
|
.bind(input.worker)
|
||||||
.bind(input.status)
|
.bind(input.status)
|
||||||
.bind(&input.result)
|
.bind(&input.result)
|
||||||
.bind(sqlx::types::Json(&input.workflow_task))
|
.bind(sqlx::types::Json(&input.workflow_task))
|
||||||
@@ -208,6 +213,13 @@ impl Update for ExecutionRepository {
|
|||||||
query.push("executor = ").push_bind(executor_id);
|
query.push("executor = ").push_bind(executor_id);
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
if let Some(worker_id) = input.worker {
|
||||||
|
if has_updates {
|
||||||
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("worker = ").push_bind(worker_id);
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
if let Some(started_at) = input.started_at {
|
if let Some(started_at) = input.started_at {
|
||||||
if has_updates {
|
if has_updates {
|
||||||
query.push(", ");
|
query.push(", ");
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use crate::models::{identity::*, Id, JsonDict};
|
|||||||
use crate::Result;
|
use crate::Result;
|
||||||
use sqlx::{Executor, Postgres, QueryBuilder};
|
use sqlx::{Executor, Postgres, QueryBuilder};
|
||||||
|
|
||||||
use super::{Create, Delete, FindById, List, Repository, Update};
|
use super::{Create, Delete, FindById, FindByRef, List, Repository, Update};
|
||||||
|
|
||||||
pub struct IdentityRepository;
|
pub struct IdentityRepository;
|
||||||
|
|
||||||
@@ -159,6 +159,48 @@ impl IdentityRepository {
|
|||||||
"SELECT id, login, display_name, password_hash, attributes, created, updated FROM identity WHERE login = $1"
|
"SELECT id, login, display_name, password_hash, attributes, created, updated FROM identity WHERE login = $1"
|
||||||
).bind(login).fetch_optional(executor).await.map_err(Into::into)
|
).bind(login).fetch_optional(executor).await.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_oidc_subject<'e, E>(
|
||||||
|
executor: E,
|
||||||
|
issuer: &str,
|
||||||
|
subject: &str,
|
||||||
|
) -> Result<Option<Identity>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query_as::<_, Identity>(
|
||||||
|
"SELECT id, login, display_name, password_hash, attributes, created, updated
|
||||||
|
FROM identity
|
||||||
|
WHERE attributes->'oidc'->>'issuer' = $1
|
||||||
|
AND attributes->'oidc'->>'sub' = $2",
|
||||||
|
)
|
||||||
|
.bind(issuer)
|
||||||
|
.bind(subject)
|
||||||
|
.fetch_optional(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_ldap_dn<'e, E>(
|
||||||
|
executor: E,
|
||||||
|
server_url: &str,
|
||||||
|
dn: &str,
|
||||||
|
) -> Result<Option<Identity>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query_as::<_, Identity>(
|
||||||
|
"SELECT id, login, display_name, password_hash, attributes, created, updated
|
||||||
|
FROM identity
|
||||||
|
WHERE attributes->'ldap'->>'server_url' = $1
|
||||||
|
AND attributes->'ldap'->>'dn' = $2",
|
||||||
|
)
|
||||||
|
.bind(server_url)
|
||||||
|
.bind(dn)
|
||||||
|
.fetch_optional(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Permission Set Repository
|
// Permission Set Repository
|
||||||
@@ -200,6 +242,22 @@ impl FindById for PermissionSetRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl FindByRef for PermissionSetRepository {
|
||||||
|
async fn find_by_ref<'e, E>(executor: E, ref_str: &str) -> Result<Option<Self::Entity>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query_as::<_, PermissionSet>(
|
||||||
|
"SELECT id, ref, pack, pack_ref, label, description, grants, created, updated FROM permission_set WHERE ref = $1"
|
||||||
|
)
|
||||||
|
.bind(ref_str)
|
||||||
|
.fetch_optional(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
impl List for PermissionSetRepository {
|
impl List for PermissionSetRepository {
|
||||||
async fn list<'e, E>(executor: E) -> Result<Vec<Self::Entity>>
|
async fn list<'e, E>(executor: E) -> Result<Vec<Self::Entity>>
|
||||||
@@ -287,6 +345,54 @@ impl Delete for PermissionSetRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PermissionSetRepository {
|
||||||
|
pub async fn find_by_identity<'e, E>(executor: E, identity_id: Id) -> Result<Vec<PermissionSet>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query_as::<_, PermissionSet>(
|
||||||
|
"SELECT ps.id, ps.ref, ps.pack, ps.pack_ref, ps.label, ps.description, ps.grants, ps.created, ps.updated
|
||||||
|
FROM permission_set ps
|
||||||
|
INNER JOIN permission_assignment pa ON pa.permset = ps.id
|
||||||
|
WHERE pa.identity = $1
|
||||||
|
ORDER BY ps.ref ASC",
|
||||||
|
)
|
||||||
|
.bind(identity_id)
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete permission sets belonging to a pack whose refs are NOT in the given set.
|
||||||
|
///
|
||||||
|
/// Used during pack reinstallation to clean up permission sets that were
|
||||||
|
/// removed from the pack's metadata. Associated permission assignments are
|
||||||
|
/// cascade-deleted by the FK constraint.
|
||||||
|
pub async fn delete_by_pack_excluding<'e, E>(
|
||||||
|
executor: E,
|
||||||
|
pack_id: Id,
|
||||||
|
keep_refs: &[String],
|
||||||
|
) -> Result<u64>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
let result = if keep_refs.is_empty() {
|
||||||
|
sqlx::query("DELETE FROM permission_set WHERE pack = $1")
|
||||||
|
.bind(pack_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
} else {
|
||||||
|
sqlx::query("DELETE FROM permission_set WHERE pack = $1 AND ref != ALL($2)")
|
||||||
|
.bind(pack_id)
|
||||||
|
.bind(keep_refs)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(result.rows_affected())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Permission Assignment Repository
|
// Permission Assignment Repository
|
||||||
pub struct PermissionAssignmentRepository;
|
pub struct PermissionAssignmentRepository;
|
||||||
|
|
||||||
|
|||||||
@@ -66,6 +66,14 @@ pub use runtime_version::RuntimeVersionRepository;
|
|||||||
pub use trigger::{SensorRepository, TriggerRepository};
|
pub use trigger::{SensorRepository, TriggerRepository};
|
||||||
pub use workflow::{WorkflowDefinitionRepository, WorkflowExecutionRepository};
|
pub use workflow::{WorkflowDefinitionRepository, WorkflowExecutionRepository};
|
||||||
|
|
||||||
|
/// Explicit patch operation for update inputs where callers must distinguish
|
||||||
|
/// between "leave unchanged", "set value", and "clear to NULL".
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum Patch<T> {
|
||||||
|
Set(T),
|
||||||
|
Clear,
|
||||||
|
}
|
||||||
|
|
||||||
/// Type alias for database connection/transaction
|
/// Type alias for database connection/transaction
|
||||||
pub type DbConnection<'c> = &'c mut Transaction<'c, Postgres>;
|
pub type DbConnection<'c> = &'c mut Transaction<'c, Postgres>;
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ use crate::models::{pack::Pack, JsonDict, JsonSchema};
|
|||||||
use crate::{Error, Result};
|
use crate::{Error, Result};
|
||||||
use sqlx::{Executor, Postgres, QueryBuilder};
|
use sqlx::{Executor, Postgres, QueryBuilder};
|
||||||
|
|
||||||
use super::{Create, Delete, FindById, FindByRef, List, Pagination, Repository, Update};
|
use super::{Create, Delete, FindById, FindByRef, List, Pagination, Patch, Repository, Update};
|
||||||
|
|
||||||
/// Repository for Pack operations
|
/// Repository for Pack operations
|
||||||
pub struct PackRepository;
|
pub struct PackRepository;
|
||||||
@@ -40,7 +40,7 @@ pub struct CreatePackInput {
|
|||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct UpdatePackInput {
|
pub struct UpdatePackInput {
|
||||||
pub label: Option<String>,
|
pub label: Option<String>,
|
||||||
pub description: Option<String>,
|
pub description: Option<Patch<String>>,
|
||||||
pub version: Option<String>,
|
pub version: Option<String>,
|
||||||
pub conf_schema: Option<JsonSchema>,
|
pub conf_schema: Option<JsonSchema>,
|
||||||
pub config: Option<JsonDict>,
|
pub config: Option<JsonDict>,
|
||||||
@@ -186,7 +186,10 @@ impl Update for PackRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("description = ");
|
query.push("description = ");
|
||||||
query.push_bind(description);
|
match description {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<String>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ use crate::models::{
|
|||||||
use crate::Result;
|
use crate::Result;
|
||||||
use sqlx::{Executor, Postgres, QueryBuilder};
|
use sqlx::{Executor, Postgres, QueryBuilder};
|
||||||
|
|
||||||
use super::{Create, Delete, FindById, FindByRef, List, Repository, Update};
|
use super::{Create, Delete, FindById, FindByRef, List, Patch, Repository, Update};
|
||||||
|
|
||||||
/// Repository for Runtime operations
|
/// Repository for Runtime operations
|
||||||
pub struct RuntimeRepository;
|
pub struct RuntimeRepository;
|
||||||
@@ -23,6 +23,13 @@ impl Repository for RuntimeRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Columns selected for all Runtime queries. Centralised here so that
|
||||||
|
/// schema changes only need one update.
|
||||||
|
pub const SELECT_COLUMNS: &str = "id, ref, pack, pack_ref, description, name, aliases, \
|
||||||
|
distributions, installation, installers, execution_config, \
|
||||||
|
auto_detected, detection_config, \
|
||||||
|
created, updated";
|
||||||
|
|
||||||
/// Input for creating a new runtime
|
/// Input for creating a new runtime
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct CreateRuntimeInput {
|
pub struct CreateRuntimeInput {
|
||||||
@@ -31,19 +38,25 @@ pub struct CreateRuntimeInput {
|
|||||||
pub pack_ref: Option<String>,
|
pub pack_ref: Option<String>,
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
pub aliases: Vec<String>,
|
||||||
pub distributions: JsonDict,
|
pub distributions: JsonDict,
|
||||||
pub installation: Option<JsonDict>,
|
pub installation: Option<JsonDict>,
|
||||||
pub execution_config: JsonDict,
|
pub execution_config: JsonDict,
|
||||||
|
pub auto_detected: bool,
|
||||||
|
pub detection_config: JsonDict,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Input for updating a runtime
|
/// Input for updating a runtime
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct UpdateRuntimeInput {
|
pub struct UpdateRuntimeInput {
|
||||||
pub description: Option<String>,
|
pub description: Option<Patch<String>>,
|
||||||
pub name: Option<String>,
|
pub name: Option<String>,
|
||||||
|
pub aliases: Option<Vec<String>>,
|
||||||
pub distributions: Option<JsonDict>,
|
pub distributions: Option<JsonDict>,
|
||||||
pub installation: Option<JsonDict>,
|
pub installation: Option<Patch<JsonDict>>,
|
||||||
pub execution_config: Option<JsonDict>,
|
pub execution_config: Option<JsonDict>,
|
||||||
|
pub auto_detected: Option<bool>,
|
||||||
|
pub detection_config: Option<JsonDict>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
@@ -52,18 +65,11 @@ impl FindById for RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtime = sqlx::query_as::<_, Runtime>(
|
let query = format!("SELECT {} FROM runtime WHERE id = $1", SELECT_COLUMNS);
|
||||||
r#"
|
let runtime = sqlx::query_as::<_, Runtime>(&query)
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
.bind(id)
|
||||||
distributions, installation, installers, execution_config,
|
.fetch_optional(executor)
|
||||||
created, updated
|
.await?;
|
||||||
FROM runtime
|
|
||||||
WHERE id = $1
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.bind(id)
|
|
||||||
.fetch_optional(executor)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(runtime)
|
Ok(runtime)
|
||||||
}
|
}
|
||||||
@@ -75,18 +81,11 @@ impl FindByRef for RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtime = sqlx::query_as::<_, Runtime>(
|
let query = format!("SELECT {} FROM runtime WHERE ref = $1", SELECT_COLUMNS);
|
||||||
r#"
|
let runtime = sqlx::query_as::<_, Runtime>(&query)
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
.bind(ref_str)
|
||||||
distributions, installation, installers, execution_config,
|
.fetch_optional(executor)
|
||||||
created, updated
|
.await?;
|
||||||
FROM runtime
|
|
||||||
WHERE ref = $1
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.bind(ref_str)
|
|
||||||
.fetch_optional(executor)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(runtime)
|
Ok(runtime)
|
||||||
}
|
}
|
||||||
@@ -98,17 +97,10 @@ impl List for RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtimes = sqlx::query_as::<_, Runtime>(
|
let query = format!("SELECT {} FROM runtime ORDER BY ref ASC", SELECT_COLUMNS);
|
||||||
r#"
|
let runtimes = sqlx::query_as::<_, Runtime>(&query)
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
.fetch_all(executor)
|
||||||
distributions, installation, installers, execution_config,
|
.await?;
|
||||||
created, updated
|
|
||||||
FROM runtime
|
|
||||||
ORDER BY ref ASC
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.fetch_all(executor)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(runtimes)
|
Ok(runtimes)
|
||||||
}
|
}
|
||||||
@@ -122,27 +114,29 @@ impl Create for RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtime = sqlx::query_as::<_, Runtime>(
|
let query = format!(
|
||||||
r#"
|
"INSERT INTO runtime (ref, pack, pack_ref, description, name, aliases, \
|
||||||
INSERT INTO runtime (ref, pack, pack_ref, description, name,
|
distributions, installation, installers, execution_config, \
|
||||||
distributions, installation, installers, execution_config)
|
auto_detected, detection_config) \
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) \
|
||||||
RETURNING id, ref, pack, pack_ref, description, name,
|
RETURNING {}",
|
||||||
distributions, installation, installers, execution_config,
|
SELECT_COLUMNS
|
||||||
created, updated
|
);
|
||||||
"#,
|
let runtime = sqlx::query_as::<_, Runtime>(&query)
|
||||||
)
|
.bind(&input.r#ref)
|
||||||
.bind(&input.r#ref)
|
.bind(input.pack)
|
||||||
.bind(input.pack)
|
.bind(&input.pack_ref)
|
||||||
.bind(&input.pack_ref)
|
.bind(&input.description)
|
||||||
.bind(&input.description)
|
.bind(&input.name)
|
||||||
.bind(&input.name)
|
.bind(&input.aliases)
|
||||||
.bind(&input.distributions)
|
.bind(&input.distributions)
|
||||||
.bind(&input.installation)
|
.bind(&input.installation)
|
||||||
.bind(serde_json::json!({}))
|
.bind(serde_json::json!({}))
|
||||||
.bind(&input.execution_config)
|
.bind(&input.execution_config)
|
||||||
.fetch_one(executor)
|
.bind(input.auto_detected)
|
||||||
.await?;
|
.bind(&input.detection_config)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await?;
|
||||||
|
|
||||||
Ok(runtime)
|
Ok(runtime)
|
||||||
}
|
}
|
||||||
@@ -163,7 +157,10 @@ impl Update for RuntimeRepository {
|
|||||||
|
|
||||||
if let Some(description) = &input.description {
|
if let Some(description) = &input.description {
|
||||||
query.push("description = ");
|
query.push("description = ");
|
||||||
query.push_bind(description);
|
match description {
|
||||||
|
Patch::Set(description) => query.push_bind(description),
|
||||||
|
Patch::Clear => query.push_bind(Option::<String>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -176,6 +173,15 @@ impl Update for RuntimeRepository {
|
|||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(aliases) = &input.aliases {
|
||||||
|
if has_updates {
|
||||||
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("aliases = ");
|
||||||
|
query.push_bind(aliases.as_slice());
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(distributions) = &input.distributions {
|
if let Some(distributions) = &input.distributions {
|
||||||
if has_updates {
|
if has_updates {
|
||||||
query.push(", ");
|
query.push(", ");
|
||||||
@@ -190,7 +196,10 @@ impl Update for RuntimeRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("installation = ");
|
query.push("installation = ");
|
||||||
query.push_bind(installation);
|
match installation {
|
||||||
|
Patch::Set(installation) => query.push_bind(installation),
|
||||||
|
Patch::Clear => query.push_bind(Option::<JsonDict>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -203,6 +212,24 @@ impl Update for RuntimeRepository {
|
|||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(auto_detected) = input.auto_detected {
|
||||||
|
if has_updates {
|
||||||
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("auto_detected = ");
|
||||||
|
query.push_bind(auto_detected);
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(detection_config) = &input.detection_config {
|
||||||
|
if has_updates {
|
||||||
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("detection_config = ");
|
||||||
|
query.push_bind(detection_config);
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
|
|
||||||
if !has_updates {
|
if !has_updates {
|
||||||
// No updates requested, fetch and return existing entity
|
// No updates requested, fetch and return existing entity
|
||||||
return Self::get_by_id(executor, id).await;
|
return Self::get_by_id(executor, id).await;
|
||||||
@@ -210,10 +237,7 @@ impl Update for RuntimeRepository {
|
|||||||
|
|
||||||
query.push(", updated = NOW() WHERE id = ");
|
query.push(", updated = NOW() WHERE id = ");
|
||||||
query.push_bind(id);
|
query.push_bind(id);
|
||||||
query.push(
|
query.push(&format!(" RETURNING {}", SELECT_COLUMNS));
|
||||||
" RETURNING id, ref, pack, pack_ref, description, name, \
|
|
||||||
distributions, installation, installers, execution_config, created, updated",
|
|
||||||
);
|
|
||||||
|
|
||||||
let runtime = query
|
let runtime = query
|
||||||
.build_query_as::<Runtime>()
|
.build_query_as::<Runtime>()
|
||||||
@@ -245,19 +269,14 @@ impl RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtimes = sqlx::query_as::<_, Runtime>(
|
let query = format!(
|
||||||
r#"
|
"SELECT {} FROM runtime WHERE pack = $1 ORDER BY ref ASC",
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
SELECT_COLUMNS
|
||||||
distributions, installation, installers, execution_config,
|
);
|
||||||
created, updated
|
let runtimes = sqlx::query_as::<_, Runtime>(&query)
|
||||||
FROM runtime
|
.bind(pack_id)
|
||||||
WHERE pack = $1
|
.fetch_all(executor)
|
||||||
ORDER BY ref ASC
|
.await?;
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.bind(pack_id)
|
|
||||||
.fetch_all(executor)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(runtimes)
|
Ok(runtimes)
|
||||||
}
|
}
|
||||||
@@ -267,23 +286,35 @@ impl RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtime = sqlx::query_as::<_, Runtime>(
|
let query = format!(
|
||||||
r#"
|
"SELECT {} FROM runtime WHERE LOWER(name) = LOWER($1) LIMIT 1",
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
SELECT_COLUMNS
|
||||||
distributions, installation, installers, execution_config,
|
);
|
||||||
created, updated
|
let runtime = sqlx::query_as::<_, Runtime>(&query)
|
||||||
FROM runtime
|
.bind(name)
|
||||||
WHERE LOWER(name) = LOWER($1)
|
.fetch_optional(executor)
|
||||||
LIMIT 1
|
.await?;
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.bind(name)
|
|
||||||
.fetch_optional(executor)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(runtime)
|
Ok(runtime)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Find a runtime where the given alias appears in its `aliases` array.
|
||||||
|
/// Uses PostgreSQL's `@>` (array contains) operator with a GIN index.
|
||||||
|
pub async fn find_by_alias<'e, E>(executor: E, alias: &str) -> Result<Option<Runtime>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
let query = format!(
|
||||||
|
"SELECT {} FROM runtime WHERE aliases @> ARRAY[$1]::text[] LIMIT 1",
|
||||||
|
SELECT_COLUMNS
|
||||||
|
);
|
||||||
|
let runtime = sqlx::query_as::<_, Runtime>(&query)
|
||||||
|
.bind(alias)
|
||||||
|
.fetch_optional(executor)
|
||||||
|
.await?;
|
||||||
|
Ok(runtime)
|
||||||
|
}
|
||||||
|
|
||||||
/// Delete runtimes belonging to a pack whose refs are NOT in the given set.
|
/// Delete runtimes belonging to a pack whose refs are NOT in the given set.
|
||||||
///
|
///
|
||||||
/// Used during pack reinstallation to clean up runtimes that were removed
|
/// Used during pack reinstallation to clean up runtimes that were removed
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::models::{Id, RuntimeVersion};
|
use crate::models::{Id, RuntimeVersion};
|
||||||
use crate::repositories::{Create, Delete, FindById, List, Repository, Update};
|
use crate::repositories::{Create, Delete, FindById, List, Patch, Repository, Update};
|
||||||
use sqlx::{Executor, Postgres, QueryBuilder};
|
use sqlx::{Executor, Postgres, QueryBuilder};
|
||||||
|
|
||||||
/// Repository for runtime version database operations
|
/// Repository for runtime version database operations
|
||||||
@@ -39,14 +39,14 @@ pub struct CreateRuntimeVersionInput {
|
|||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct UpdateRuntimeVersionInput {
|
pub struct UpdateRuntimeVersionInput {
|
||||||
pub version: Option<String>,
|
pub version: Option<String>,
|
||||||
pub version_major: Option<Option<i32>>,
|
pub version_major: Option<Patch<i32>>,
|
||||||
pub version_minor: Option<Option<i32>>,
|
pub version_minor: Option<Patch<i32>>,
|
||||||
pub version_patch: Option<Option<i32>>,
|
pub version_patch: Option<Patch<i32>>,
|
||||||
pub execution_config: Option<serde_json::Value>,
|
pub execution_config: Option<serde_json::Value>,
|
||||||
pub distributions: Option<serde_json::Value>,
|
pub distributions: Option<serde_json::Value>,
|
||||||
pub is_default: Option<bool>,
|
pub is_default: Option<bool>,
|
||||||
pub available: Option<bool>,
|
pub available: Option<bool>,
|
||||||
pub verified_at: Option<Option<chrono::DateTime<chrono::Utc>>>,
|
pub verified_at: Option<Patch<chrono::DateTime<chrono::Utc>>>,
|
||||||
pub meta: Option<serde_json::Value>,
|
pub meta: Option<serde_json::Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -154,7 +154,10 @@ impl Update for RuntimeVersionRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("version_major = ");
|
query.push("version_major = ");
|
||||||
query.push_bind(*version_major);
|
match version_major {
|
||||||
|
Patch::Set(value) => query.push_bind(*value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<i32>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -163,7 +166,10 @@ impl Update for RuntimeVersionRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("version_minor = ");
|
query.push("version_minor = ");
|
||||||
query.push_bind(*version_minor);
|
match version_minor {
|
||||||
|
Patch::Set(value) => query.push_bind(*value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<i32>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -172,7 +178,10 @@ impl Update for RuntimeVersionRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("version_patch = ");
|
query.push("version_patch = ");
|
||||||
query.push_bind(*version_patch);
|
match version_patch {
|
||||||
|
Patch::Set(value) => query.push_bind(*value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<i32>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -217,7 +226,10 @@ impl Update for RuntimeVersionRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("verified_at = ");
|
query.push("verified_at = ");
|
||||||
query.push_bind(*verified_at);
|
match verified_at {
|
||||||
|
Patch::Set(value) => query.push_bind(*value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<chrono::DateTime<chrono::Utc>>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ use crate::Result;
|
|||||||
use serde_json::Value as JsonValue;
|
use serde_json::Value as JsonValue;
|
||||||
use sqlx::{Executor, Postgres, QueryBuilder};
|
use sqlx::{Executor, Postgres, QueryBuilder};
|
||||||
|
|
||||||
use super::{Create, Delete, FindById, FindByRef, List, Repository, Update};
|
use super::{Create, Delete, FindById, FindByRef, List, Patch, Repository, Update};
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// Trigger Search
|
// Trigger Search
|
||||||
@@ -88,10 +88,10 @@ pub struct CreateTriggerInput {
|
|||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct UpdateTriggerInput {
|
pub struct UpdateTriggerInput {
|
||||||
pub label: Option<String>,
|
pub label: Option<String>,
|
||||||
pub description: Option<String>,
|
pub description: Option<Patch<String>>,
|
||||||
pub enabled: Option<bool>,
|
pub enabled: Option<bool>,
|
||||||
pub param_schema: Option<JsonSchema>,
|
pub param_schema: Option<Patch<JsonSchema>>,
|
||||||
pub out_schema: Option<JsonSchema>,
|
pub out_schema: Option<Patch<JsonSchema>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
@@ -229,7 +229,10 @@ impl Update for TriggerRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("description = ");
|
query.push("description = ");
|
||||||
query.push_bind(description);
|
match description {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<String>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -247,7 +250,10 @@ impl Update for TriggerRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("param_schema = ");
|
query.push("param_schema = ");
|
||||||
query.push_bind(param_schema);
|
match param_schema {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<JsonSchema>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -256,7 +262,10 @@ impl Update for TriggerRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("out_schema = ");
|
query.push("out_schema = ");
|
||||||
query.push_bind(out_schema);
|
match out_schema {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<JsonSchema>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -676,11 +685,11 @@ pub struct UpdateSensorInput {
|
|||||||
pub entrypoint: Option<String>,
|
pub entrypoint: Option<String>,
|
||||||
pub runtime: Option<Id>,
|
pub runtime: Option<Id>,
|
||||||
pub runtime_ref: Option<String>,
|
pub runtime_ref: Option<String>,
|
||||||
pub runtime_version_constraint: Option<Option<String>>,
|
pub runtime_version_constraint: Option<Patch<String>>,
|
||||||
pub trigger: Option<Id>,
|
pub trigger: Option<Id>,
|
||||||
pub trigger_ref: Option<String>,
|
pub trigger_ref: Option<String>,
|
||||||
pub enabled: Option<bool>,
|
pub enabled: Option<bool>,
|
||||||
pub param_schema: Option<JsonSchema>,
|
pub param_schema: Option<Patch<JsonSchema>>,
|
||||||
pub config: Option<JsonValue>,
|
pub config: Option<JsonValue>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -866,7 +875,10 @@ impl Update for SensorRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("runtime_version_constraint = ");
|
query.push("runtime_version_constraint = ");
|
||||||
query.push_bind(runtime_version_constraint);
|
match runtime_version_constraint {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<String>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -893,7 +905,10 @@ impl Update for SensorRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("param_schema = ");
|
query.push("param_schema = ");
|
||||||
query.push_bind(param_schema);
|
match param_schema {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<JsonSchema>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -20,8 +20,6 @@ pub struct WorkflowSearchFilters {
|
|||||||
pub pack: Option<Id>,
|
pub pack: Option<Id>,
|
||||||
/// Filter by pack reference
|
/// Filter by pack reference
|
||||||
pub pack_ref: Option<String>,
|
pub pack_ref: Option<String>,
|
||||||
/// Filter by enabled status
|
|
||||||
pub enabled: Option<bool>,
|
|
||||||
/// Filter by tags (OR across tags — matches if any tag is present)
|
/// Filter by tags (OR across tags — matches if any tag is present)
|
||||||
pub tags: Option<Vec<String>>,
|
pub tags: Option<Vec<String>>,
|
||||||
/// Text search across label and description (case-insensitive substring)
|
/// Text search across label and description (case-insensitive substring)
|
||||||
@@ -62,7 +60,6 @@ pub struct CreateWorkflowDefinitionInput {
|
|||||||
pub out_schema: Option<JsonSchema>,
|
pub out_schema: Option<JsonSchema>,
|
||||||
pub definition: JsonDict,
|
pub definition: JsonDict,
|
||||||
pub tags: Vec<String>,
|
pub tags: Vec<String>,
|
||||||
pub enabled: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
@@ -74,7 +71,6 @@ pub struct UpdateWorkflowDefinitionInput {
|
|||||||
pub out_schema: Option<JsonSchema>,
|
pub out_schema: Option<JsonSchema>,
|
||||||
pub definition: Option<JsonDict>,
|
pub definition: Option<JsonDict>,
|
||||||
pub tags: Option<Vec<String>>,
|
pub tags: Option<Vec<String>>,
|
||||||
pub enabled: Option<bool>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
@@ -84,7 +80,7 @@ impl FindById for WorkflowDefinitionRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, WorkflowDefinition>(
|
sqlx::query_as::<_, WorkflowDefinition>(
|
||||||
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
|
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
|
||||||
FROM workflow_definition
|
FROM workflow_definition
|
||||||
WHERE id = $1"
|
WHERE id = $1"
|
||||||
)
|
)
|
||||||
@@ -102,7 +98,7 @@ impl FindByRef for WorkflowDefinitionRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, WorkflowDefinition>(
|
sqlx::query_as::<_, WorkflowDefinition>(
|
||||||
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
|
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
|
||||||
FROM workflow_definition
|
FROM workflow_definition
|
||||||
WHERE ref = $1"
|
WHERE ref = $1"
|
||||||
)
|
)
|
||||||
@@ -120,7 +116,7 @@ impl List for WorkflowDefinitionRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, WorkflowDefinition>(
|
sqlx::query_as::<_, WorkflowDefinition>(
|
||||||
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
|
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
|
||||||
FROM workflow_definition
|
FROM workflow_definition
|
||||||
ORDER BY created DESC
|
ORDER BY created DESC
|
||||||
LIMIT 1000"
|
LIMIT 1000"
|
||||||
@@ -141,9 +137,9 @@ impl Create for WorkflowDefinitionRepository {
|
|||||||
{
|
{
|
||||||
sqlx::query_as::<_, WorkflowDefinition>(
|
sqlx::query_as::<_, WorkflowDefinition>(
|
||||||
"INSERT INTO workflow_definition
|
"INSERT INTO workflow_definition
|
||||||
(ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled)
|
(ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
||||||
RETURNING id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated"
|
RETURNING id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated"
|
||||||
)
|
)
|
||||||
.bind(&input.r#ref)
|
.bind(&input.r#ref)
|
||||||
.bind(input.pack)
|
.bind(input.pack)
|
||||||
@@ -155,7 +151,6 @@ impl Create for WorkflowDefinitionRepository {
|
|||||||
.bind(&input.out_schema)
|
.bind(&input.out_schema)
|
||||||
.bind(&input.definition)
|
.bind(&input.definition)
|
||||||
.bind(&input.tags)
|
.bind(&input.tags)
|
||||||
.bind(input.enabled)
|
|
||||||
.fetch_one(executor)
|
.fetch_one(executor)
|
||||||
.await
|
.await
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
@@ -219,20 +214,12 @@ impl Update for WorkflowDefinitionRepository {
|
|||||||
query.push("tags = ").push_bind(tags);
|
query.push("tags = ").push_bind(tags);
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
if let Some(enabled) = input.enabled {
|
|
||||||
if has_updates {
|
|
||||||
query.push(", ");
|
|
||||||
}
|
|
||||||
query.push("enabled = ").push_bind(enabled);
|
|
||||||
has_updates = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !has_updates {
|
if !has_updates {
|
||||||
return Self::get_by_id(executor, id).await;
|
return Self::get_by_id(executor, id).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
query.push(", updated = NOW() WHERE id = ").push_bind(id);
|
query.push(", updated = NOW() WHERE id = ").push_bind(id);
|
||||||
query.push(" RETURNING id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated");
|
query.push(" RETURNING id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated");
|
||||||
|
|
||||||
query
|
query
|
||||||
.build_query_as::<WorkflowDefinition>()
|
.build_query_as::<WorkflowDefinition>()
|
||||||
@@ -269,7 +256,7 @@ impl WorkflowDefinitionRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + Copy + 'e,
|
E: Executor<'e, Database = Postgres> + Copy + 'e,
|
||||||
{
|
{
|
||||||
let select_cols = "id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated";
|
let select_cols = "id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated";
|
||||||
|
|
||||||
let mut qb: QueryBuilder<'_, Postgres> =
|
let mut qb: QueryBuilder<'_, Postgres> =
|
||||||
QueryBuilder::new(format!("SELECT {select_cols} FROM workflow_definition"));
|
QueryBuilder::new(format!("SELECT {select_cols} FROM workflow_definition"));
|
||||||
@@ -301,9 +288,6 @@ impl WorkflowDefinitionRepository {
|
|||||||
if let Some(ref pack_ref) = filters.pack_ref {
|
if let Some(ref pack_ref) = filters.pack_ref {
|
||||||
push_condition!("pack_ref = ", pack_ref.clone());
|
push_condition!("pack_ref = ", pack_ref.clone());
|
||||||
}
|
}
|
||||||
if let Some(enabled) = filters.enabled {
|
|
||||||
push_condition!("enabled = ", enabled);
|
|
||||||
}
|
|
||||||
if let Some(ref tags) = filters.tags {
|
if let Some(ref tags) = filters.tags {
|
||||||
if !tags.is_empty() {
|
if !tags.is_empty() {
|
||||||
// Use PostgreSQL array overlap operator: tags && ARRAY[...]
|
// Use PostgreSQL array overlap operator: tags && ARRAY[...]
|
||||||
@@ -359,7 +343,7 @@ impl WorkflowDefinitionRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, WorkflowDefinition>(
|
sqlx::query_as::<_, WorkflowDefinition>(
|
||||||
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
|
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
|
||||||
FROM workflow_definition
|
FROM workflow_definition
|
||||||
WHERE pack = $1
|
WHERE pack = $1
|
||||||
ORDER BY label"
|
ORDER BY label"
|
||||||
@@ -379,7 +363,7 @@ impl WorkflowDefinitionRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, WorkflowDefinition>(
|
sqlx::query_as::<_, WorkflowDefinition>(
|
||||||
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
|
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
|
||||||
FROM workflow_definition
|
FROM workflow_definition
|
||||||
WHERE pack_ref = $1
|
WHERE pack_ref = $1
|
||||||
ORDER BY label"
|
ORDER BY label"
|
||||||
@@ -403,29 +387,13 @@ impl WorkflowDefinitionRepository {
|
|||||||
Ok(result.0)
|
Ok(result.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find all enabled workflows
|
|
||||||
pub async fn find_enabled<'e, E>(executor: E) -> Result<Vec<WorkflowDefinition>>
|
|
||||||
where
|
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
|
||||||
{
|
|
||||||
sqlx::query_as::<_, WorkflowDefinition>(
|
|
||||||
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
|
|
||||||
FROM workflow_definition
|
|
||||||
WHERE enabled = true
|
|
||||||
ORDER BY label"
|
|
||||||
)
|
|
||||||
.fetch_all(executor)
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Find workflows by tag
|
/// Find workflows by tag
|
||||||
pub async fn find_by_tag<'e, E>(executor: E, tag: &str) -> Result<Vec<WorkflowDefinition>>
|
pub async fn find_by_tag<'e, E>(executor: E, tag: &str) -> Result<Vec<WorkflowDefinition>>
|
||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, WorkflowDefinition>(
|
sqlx::query_as::<_, WorkflowDefinition>(
|
||||||
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
|
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
|
||||||
FROM workflow_definition
|
FROM workflow_definition
|
||||||
WHERE $1 = ANY(tags)
|
WHERE $1 = ANY(tags)
|
||||||
ORDER BY label"
|
ORDER BY label"
|
||||||
|
|||||||
@@ -6,59 +6,41 @@
|
|||||||
//! 2. Config file specification (medium priority)
|
//! 2. Config file specification (medium priority)
|
||||||
//! 3. Database-driven detection with verification (lowest priority)
|
//! 3. Database-driven detection with verification (lowest priority)
|
||||||
//!
|
//!
|
||||||
//! Also provides [`normalize_runtime_name`] for alias-aware runtime name
|
//! Also provides alias-based matching functions ([`runtime_aliases_match_filter`]
|
||||||
//! comparison across the codebase (worker filters, env setup, etc.).
|
//! and [`runtime_aliases_contain`]) for comparing runtime alias lists against
|
||||||
|
//! worker filters and capability strings. Aliases are declared per-runtime in
|
||||||
|
//! pack manifests, so no hardcoded alias table is needed here.
|
||||||
|
|
||||||
use crate::config::Config;
|
use crate::config::Config;
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::models::Runtime;
|
use crate::models::Runtime;
|
||||||
|
use crate::repositories::runtime::SELECT_COLUMNS;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use tracing::{debug, info, warn};
|
use tracing::{debug, info, warn};
|
||||||
|
|
||||||
/// Normalize a runtime name to its canonical short form.
|
/// Check if a runtime's aliases overlap with a filter list.
|
||||||
///
|
///
|
||||||
/// This ensures that different ways of referring to the same runtime
|
/// The filter list comes from `ATTUNE_WORKER_RUNTIMES` (e.g., `["python", "shell"]`).
|
||||||
/// (e.g., "node", "nodejs", "node.js") all resolve to a single canonical
|
/// A runtime matches if any of its declared aliases appear in the filter list.
|
||||||
/// name. Used by worker runtime filters and environment setup to match
|
/// Comparison is case-insensitive.
|
||||||
/// database runtime names against short filter values.
|
pub fn runtime_aliases_match_filter(aliases: &[String], filter: &[String]) -> bool {
|
||||||
///
|
aliases.iter().any(|alias| {
|
||||||
/// The canonical names mirror the alias groups in
|
let lower_alias = alias.to_ascii_lowercase();
|
||||||
/// `PackComponentLoader::resolve_runtime`.
|
filter.iter().any(|f| f.to_ascii_lowercase() == lower_alias)
|
||||||
///
|
})
|
||||||
/// # Examples
|
|
||||||
/// ```
|
|
||||||
/// use attune_common::runtime_detection::normalize_runtime_name;
|
|
||||||
/// assert_eq!(normalize_runtime_name("node.js"), "node");
|
|
||||||
/// assert_eq!(normalize_runtime_name("nodejs"), "node");
|
|
||||||
/// assert_eq!(normalize_runtime_name("python3"), "python");
|
|
||||||
/// assert_eq!(normalize_runtime_name("shell"), "shell");
|
|
||||||
/// ```
|
|
||||||
pub fn normalize_runtime_name(name: &str) -> &str {
|
|
||||||
match name {
|
|
||||||
"node" | "nodejs" | "node.js" => "node",
|
|
||||||
"python" | "python3" => "python",
|
|
||||||
"bash" | "sh" | "shell" => "shell",
|
|
||||||
"native" | "builtin" | "standalone" => "native",
|
|
||||||
other => other,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if a runtime name matches a filter entry, supporting common aliases.
|
/// Check if a runtime's aliases contain a specific name.
|
||||||
///
|
///
|
||||||
/// Both sides are lowercased and then normalized before comparison so that,
|
/// Used by the scheduler to check if a worker's capability string
|
||||||
/// e.g., a filter value of `"node"` matches a database runtime name `"Node.js"`.
|
/// (e.g., "python") matches a runtime's aliases (e.g., ["python", "python3"]).
|
||||||
pub fn runtime_matches_filter(rt_name: &str, filter_entry: &str) -> bool {
|
/// Comparison is case-insensitive.
|
||||||
let rt_lower = rt_name.to_ascii_lowercase();
|
pub fn runtime_aliases_contain(aliases: &[String], name: &str) -> bool {
|
||||||
let filter_lower = filter_entry.to_ascii_lowercase();
|
let lower = name.to_ascii_lowercase();
|
||||||
normalize_runtime_name(&rt_lower) == normalize_runtime_name(&filter_lower)
|
aliases.iter().any(|a| a.to_ascii_lowercase() == lower)
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if a runtime name matches any entry in a filter list.
|
|
||||||
pub fn runtime_in_filter(rt_name: &str, filter: &[String]) -> bool {
|
|
||||||
filter.iter().any(|f| runtime_matches_filter(rt_name, f))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runtime detection service
|
/// Runtime detection service
|
||||||
@@ -156,17 +138,10 @@ impl RuntimeDetector {
|
|||||||
info!("Querying database for runtime definitions...");
|
info!("Querying database for runtime definitions...");
|
||||||
|
|
||||||
// Query all runtimes from database
|
// Query all runtimes from database
|
||||||
let runtimes = sqlx::query_as::<_, Runtime>(
|
let query = format!("SELECT {} FROM runtime ORDER BY ref", SELECT_COLUMNS);
|
||||||
r#"
|
let runtimes = sqlx::query_as::<_, Runtime>(&query)
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
.fetch_all(&self.pool)
|
||||||
distributions, installation, installers, execution_config,
|
.await?;
|
||||||
created, updated
|
|
||||||
FROM runtime
|
|
||||||
ORDER BY ref
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.fetch_all(&self.pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
info!("Found {} runtime(s) in database", runtimes.len());
|
info!("Found {} runtime(s) in database", runtimes.len());
|
||||||
|
|
||||||
@@ -337,69 +312,46 @@ mod tests {
|
|||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_normalize_runtime_name_node_variants() {
|
fn test_runtime_aliases_match_filter() {
|
||||||
assert_eq!(normalize_runtime_name("node"), "node");
|
let aliases = vec!["python".to_string(), "python3".to_string()];
|
||||||
assert_eq!(normalize_runtime_name("nodejs"), "node");
|
let filter = vec!["python".to_string(), "shell".to_string()];
|
||||||
assert_eq!(normalize_runtime_name("node.js"), "node");
|
assert!(runtime_aliases_match_filter(&aliases, &filter));
|
||||||
|
|
||||||
|
let filter_no_match = vec!["node".to_string(), "ruby".to_string()];
|
||||||
|
assert!(!runtime_aliases_match_filter(&aliases, &filter_no_match));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_normalize_runtime_name_python_variants() {
|
fn test_runtime_aliases_match_filter_case_insensitive() {
|
||||||
assert_eq!(normalize_runtime_name("python"), "python");
|
let aliases = vec!["Python".to_string(), "python3".to_string()];
|
||||||
assert_eq!(normalize_runtime_name("python3"), "python");
|
let filter = vec!["python".to_string()];
|
||||||
|
assert!(runtime_aliases_match_filter(&aliases, &filter));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_normalize_runtime_name_shell_variants() {
|
fn test_runtime_aliases_match_filter_empty() {
|
||||||
assert_eq!(normalize_runtime_name("shell"), "shell");
|
let aliases: Vec<String> = vec![];
|
||||||
assert_eq!(normalize_runtime_name("bash"), "shell");
|
let filter = vec!["python".to_string()];
|
||||||
assert_eq!(normalize_runtime_name("sh"), "shell");
|
assert!(!runtime_aliases_match_filter(&aliases, &filter));
|
||||||
|
|
||||||
|
let aliases = vec!["python".to_string()];
|
||||||
|
let filter: Vec<String> = vec![];
|
||||||
|
assert!(!runtime_aliases_match_filter(&aliases, &filter));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_normalize_runtime_name_native_variants() {
|
fn test_runtime_aliases_contain() {
|
||||||
assert_eq!(normalize_runtime_name("native"), "native");
|
let aliases = vec!["ruby".to_string(), "rb".to_string()];
|
||||||
assert_eq!(normalize_runtime_name("builtin"), "native");
|
assert!(runtime_aliases_contain(&aliases, "ruby"));
|
||||||
assert_eq!(normalize_runtime_name("standalone"), "native");
|
assert!(runtime_aliases_contain(&aliases, "rb"));
|
||||||
|
assert!(!runtime_aliases_contain(&aliases, "python"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_normalize_runtime_name_passthrough() {
|
fn test_runtime_aliases_contain_case_insensitive() {
|
||||||
assert_eq!(normalize_runtime_name("custom_runtime"), "custom_runtime");
|
let aliases = vec!["ruby".to_string(), "rb".to_string()];
|
||||||
}
|
assert!(runtime_aliases_contain(&aliases, "Ruby"));
|
||||||
|
assert!(runtime_aliases_contain(&aliases, "RB"));
|
||||||
#[test]
|
|
||||||
fn test_runtime_matches_filter() {
|
|
||||||
// Node.js DB name lowercased vs worker filter "node"
|
|
||||||
assert!(runtime_matches_filter("node.js", "node"));
|
|
||||||
assert!(runtime_matches_filter("node", "nodejs"));
|
|
||||||
assert!(runtime_matches_filter("nodejs", "node.js"));
|
|
||||||
// Exact match
|
|
||||||
assert!(runtime_matches_filter("shell", "shell"));
|
|
||||||
// No match
|
|
||||||
assert!(!runtime_matches_filter("python", "node"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_runtime_matches_filter_case_insensitive() {
|
|
||||||
// Database stores capitalized names (e.g., "Node.js", "Python")
|
|
||||||
// Worker capabilities store lowercase (e.g., "node", "python")
|
|
||||||
assert!(runtime_matches_filter("Node.js", "node"));
|
|
||||||
assert!(runtime_matches_filter("node", "Node.js"));
|
|
||||||
assert!(runtime_matches_filter("Python", "python"));
|
|
||||||
assert!(runtime_matches_filter("python", "Python"));
|
|
||||||
assert!(runtime_matches_filter("Shell", "shell"));
|
|
||||||
assert!(runtime_matches_filter("NODEJS", "node"));
|
|
||||||
assert!(!runtime_matches_filter("Python", "node"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_runtime_in_filter() {
|
|
||||||
let filter = vec!["shell".to_string(), "node".to_string()];
|
|
||||||
assert!(runtime_in_filter("shell", &filter));
|
|
||||||
assert!(runtime_in_filter("node.js", &filter));
|
|
||||||
assert!(runtime_in_filter("nodejs", &filter));
|
|
||||||
assert!(!runtime_in_filter("python", &filter));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -379,7 +379,6 @@ impl WorkflowRegistrar {
|
|||||||
out_schema: workflow.output.clone(),
|
out_schema: workflow.output.clone(),
|
||||||
definition,
|
definition,
|
||||||
tags: workflow.tags.clone(),
|
tags: workflow.tags.clone(),
|
||||||
enabled: true,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let created = WorkflowDefinitionRepository::create(&self.pool, input).await?;
|
let created = WorkflowDefinitionRepository::create(&self.pool, input).await?;
|
||||||
@@ -411,7 +410,6 @@ impl WorkflowRegistrar {
|
|||||||
out_schema: workflow.output.clone(),
|
out_schema: workflow.output.clone(),
|
||||||
definition: Some(definition),
|
definition: Some(definition),
|
||||||
tags: Some(workflow.tags.clone()),
|
tags: Some(workflow.tags.clone()),
|
||||||
enabled: Some(true),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let updated = WorkflowDefinitionRepository::update(&self.pool, *workflow_id, input).await?;
|
let updated = WorkflowDefinitionRepository::update(&self.pool, *workflow_id, input).await?;
|
||||||
|
|||||||
@@ -42,6 +42,7 @@ async fn test_create_execution_basic() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -76,6 +77,7 @@ async fn test_create_execution_without_action() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -110,6 +112,7 @@ async fn test_create_execution_with_all_fields() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None, // Don't reference non-existent identity
|
executor: None, // Don't reference non-existent identity
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Scheduled,
|
status: ExecutionStatus::Scheduled,
|
||||||
result: Some(json!({"status": "ok"})),
|
result: Some(json!({"status": "ok"})),
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -146,6 +149,7 @@ async fn test_create_execution_with_parent() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Running,
|
status: ExecutionStatus::Running,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -164,6 +168,7 @@ async fn test_create_execution_with_parent() {
|
|||||||
parent: Some(parent.id),
|
parent: Some(parent.id),
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -203,6 +208,7 @@ async fn test_find_execution_by_id() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -257,6 +263,7 @@ async fn test_list_executions() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -303,6 +310,7 @@ async fn test_list_executions_ordered_by_created_desc() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -354,6 +362,7 @@ async fn test_update_execution_status() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -399,6 +408,7 @@ async fn test_update_execution_result() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Running,
|
status: ExecutionStatus::Running,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -445,6 +455,7 @@ async fn test_update_execution_executor() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -489,6 +500,7 @@ async fn test_update_execution_status_transitions() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -580,6 +592,7 @@ async fn test_update_execution_failed_status() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Running,
|
status: ExecutionStatus::Running,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -625,6 +638,7 @@ async fn test_update_execution_no_changes() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -669,6 +683,7 @@ async fn test_delete_execution() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Completed,
|
status: ExecutionStatus::Completed,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -736,6 +751,7 @@ async fn test_find_executions_by_status() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: *status,
|
status: *status,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -783,6 +799,7 @@ async fn test_find_executions_by_enforcement() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -801,6 +818,7 @@ async fn test_find_executions_by_enforcement() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None, // Can't reference non-existent enforcement
|
enforcement: None, // Can't reference non-existent enforcement
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -845,6 +863,7 @@ async fn test_parent_child_execution_hierarchy() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Running,
|
status: ExecutionStatus::Running,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -865,6 +884,7 @@ async fn test_parent_child_execution_hierarchy() {
|
|||||||
parent: Some(parent.id),
|
parent: Some(parent.id),
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -909,6 +929,7 @@ async fn test_nested_execution_hierarchy() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Running,
|
status: ExecutionStatus::Running,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -927,6 +948,7 @@ async fn test_nested_execution_hierarchy() {
|
|||||||
parent: Some(grandparent.id),
|
parent: Some(grandparent.id),
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Running,
|
status: ExecutionStatus::Running,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -945,6 +967,7 @@ async fn test_nested_execution_hierarchy() {
|
|||||||
parent: Some(parent.id),
|
parent: Some(parent.id),
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -987,6 +1010,7 @@ async fn test_execution_timestamps() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -1058,6 +1082,7 @@ async fn test_execution_config_json() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Requested,
|
status: ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -1091,6 +1116,7 @@ async fn test_execution_result_json() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: ExecutionStatus::Running,
|
status: ExecutionStatus::Running,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
|
|||||||
@@ -961,9 +961,12 @@ impl RuntimeFixture {
|
|||||||
pack_ref: self.pack_ref,
|
pack_ref: self.pack_ref,
|
||||||
description: self.description,
|
description: self.description,
|
||||||
name: self.name,
|
name: self.name,
|
||||||
|
aliases: vec![],
|
||||||
distributions: self.distributions,
|
distributions: self.distributions,
|
||||||
installation: self.installation,
|
installation: self.installation,
|
||||||
execution_config: self.execution_config,
|
execution_config: self.execution_config,
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: serde_json::json!({}),
|
||||||
};
|
};
|
||||||
|
|
||||||
RuntimeRepository::create(pool, input).await
|
RuntimeRepository::create(pool, input).await
|
||||||
|
|||||||
@@ -479,3 +479,173 @@ async fn test_identity_login_case_sensitive() {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(found_upper.id, identity2.id);
|
assert_eq!(found_upper.id, identity2.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ── LDAP-specific tests ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_find_by_ldap_dn_found() {
|
||||||
|
let pool = create_test_pool().await.unwrap();
|
||||||
|
|
||||||
|
let login = unique_pack_ref("ldap_found");
|
||||||
|
let server_url = "ldap://ldap.example.com";
|
||||||
|
let dn = "uid=jdoe,ou=users,dc=example,dc=com";
|
||||||
|
|
||||||
|
let input = CreateIdentityInput {
|
||||||
|
login: login.clone(),
|
||||||
|
display_name: Some("LDAP User".to_string()),
|
||||||
|
attributes: json!({
|
||||||
|
"ldap": {
|
||||||
|
"server_url": server_url,
|
||||||
|
"dn": dn,
|
||||||
|
"login": "jdoe",
|
||||||
|
"email": "jdoe@example.com"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
password_hash: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let created = IdentityRepository::create(&pool, input).await.unwrap();
|
||||||
|
|
||||||
|
let found = IdentityRepository::find_by_ldap_dn(&pool, server_url, dn)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.expect("LDAP identity not found");
|
||||||
|
|
||||||
|
assert_eq!(found.id, created.id);
|
||||||
|
assert_eq!(found.login, login);
|
||||||
|
assert_eq!(found.attributes["ldap"]["server_url"], server_url);
|
||||||
|
assert_eq!(found.attributes["ldap"]["dn"], dn);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_find_by_ldap_dn_not_found() {
|
||||||
|
let pool = create_test_pool().await.unwrap();
|
||||||
|
|
||||||
|
let found = IdentityRepository::find_by_ldap_dn(
|
||||||
|
&pool,
|
||||||
|
"ldap://nonexistent.example.com",
|
||||||
|
"uid=nobody,ou=users,dc=example,dc=com",
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert!(found.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_find_by_ldap_dn_wrong_server() {
|
||||||
|
let pool = create_test_pool().await.unwrap();
|
||||||
|
|
||||||
|
let dn = "uid=jdoe,ou=users,dc=example,dc=com";
|
||||||
|
|
||||||
|
let input = CreateIdentityInput {
|
||||||
|
login: unique_pack_ref("ldap_wrong_srv"),
|
||||||
|
display_name: Some("Server A User".to_string()),
|
||||||
|
attributes: json!({
|
||||||
|
"ldap": {
|
||||||
|
"server_url": "ldap://server-a.example.com",
|
||||||
|
"dn": dn,
|
||||||
|
"login": "jdoe"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
password_hash: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
IdentityRepository::create(&pool, input).await.unwrap();
|
||||||
|
|
||||||
|
// Search with same DN but different server — composite key must match both
|
||||||
|
let found = IdentityRepository::find_by_ldap_dn(&pool, "ldap://server-b.example.com", dn)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert!(found.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_find_by_ldap_dn_multiple_identities_different_servers() {
|
||||||
|
let pool = create_test_pool().await.unwrap();
|
||||||
|
|
||||||
|
let dn = "uid=shared,ou=users,dc=example,dc=com";
|
||||||
|
let server_a = "ldap://multi-a.example.com";
|
||||||
|
let server_b = "ldap://multi-b.example.com";
|
||||||
|
|
||||||
|
let input_a = CreateIdentityInput {
|
||||||
|
login: unique_pack_ref("ldap_multi_a"),
|
||||||
|
display_name: Some("User on Server A".to_string()),
|
||||||
|
attributes: json!({
|
||||||
|
"ldap": {
|
||||||
|
"server_url": server_a,
|
||||||
|
"dn": dn,
|
||||||
|
"login": "shared_a"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
password_hash: None,
|
||||||
|
};
|
||||||
|
let identity_a = IdentityRepository::create(&pool, input_a).await.unwrap();
|
||||||
|
|
||||||
|
let input_b = CreateIdentityInput {
|
||||||
|
login: unique_pack_ref("ldap_multi_b"),
|
||||||
|
display_name: Some("User on Server B".to_string()),
|
||||||
|
attributes: json!({
|
||||||
|
"ldap": {
|
||||||
|
"server_url": server_b,
|
||||||
|
"dn": dn,
|
||||||
|
"login": "shared_b"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
password_hash: None,
|
||||||
|
};
|
||||||
|
let identity_b = IdentityRepository::create(&pool, input_b).await.unwrap();
|
||||||
|
|
||||||
|
// Query server A — should return identity_a
|
||||||
|
let found_a = IdentityRepository::find_by_ldap_dn(&pool, server_a, dn)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.expect("Identity for server A not found");
|
||||||
|
assert_eq!(found_a.id, identity_a.id);
|
||||||
|
assert_eq!(found_a.attributes["ldap"]["server_url"], server_a);
|
||||||
|
|
||||||
|
// Query server B — should return identity_b
|
||||||
|
let found_b = IdentityRepository::find_by_ldap_dn(&pool, server_b, dn)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.expect("Identity for server B not found");
|
||||||
|
assert_eq!(found_b.id, identity_b.id);
|
||||||
|
assert_eq!(found_b.attributes["ldap"]["server_url"], server_b);
|
||||||
|
|
||||||
|
// Confirm they are distinct identities
|
||||||
|
assert_ne!(found_a.id, found_b.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_find_by_ldap_dn_ignores_oidc_attributes() {
|
||||||
|
let pool = create_test_pool().await.unwrap();
|
||||||
|
|
||||||
|
// Create an identity with OIDC attributes (no "ldap" key)
|
||||||
|
let input = CreateIdentityInput {
|
||||||
|
login: unique_pack_ref("ldap_oidc"),
|
||||||
|
display_name: Some("OIDC User".to_string()),
|
||||||
|
attributes: json!({
|
||||||
|
"oidc": {
|
||||||
|
"issuer": "https://auth.example.com",
|
||||||
|
"subject": "abc123",
|
||||||
|
"email": "oidc@example.com"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
password_hash: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
IdentityRepository::create(&pool, input).await.unwrap();
|
||||||
|
|
||||||
|
// Searching by LDAP DN should not match OIDC-only identities
|
||||||
|
let found = IdentityRepository::find_by_ldap_dn(&pool, "https://auth.example.com", "abc123")
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert!(found.is_none());
|
||||||
|
}
|
||||||
|
|||||||
@@ -49,6 +49,7 @@ async fn test_create_inquiry_minimal() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -109,6 +110,7 @@ async fn test_create_inquiry_with_response_schema() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -167,6 +169,7 @@ async fn test_create_inquiry_with_timeout() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -221,6 +224,7 @@ async fn test_create_inquiry_with_assigned_user() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -310,6 +314,7 @@ async fn test_find_inquiry_by_id() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -372,6 +377,7 @@ async fn test_get_inquiry_by_id() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -443,6 +449,7 @@ async fn test_list_inquiries() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -504,6 +511,7 @@ async fn test_update_inquiry_status() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -560,6 +568,7 @@ async fn test_update_inquiry_status_transitions() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -645,6 +654,7 @@ async fn test_update_inquiry_response() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -703,6 +713,7 @@ async fn test_update_inquiry_with_response_and_status() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -761,6 +772,7 @@ async fn test_update_inquiry_assignment() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -828,6 +840,7 @@ async fn test_update_inquiry_no_changes() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -905,6 +918,7 @@ async fn test_delete_inquiry() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -965,6 +979,7 @@ async fn test_delete_execution_cascades_to_inquiries() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -1032,6 +1047,7 @@ async fn test_find_inquiries_by_status() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -1111,6 +1127,7 @@ async fn test_find_inquiries_by_execution() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -1129,6 +1146,7 @@ async fn test_find_inquiries_by_execution() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -1193,6 +1211,7 @@ async fn test_inquiry_timestamps_auto_managed() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
@@ -1260,6 +1279,7 @@ async fn test_inquiry_complex_response_schema() {
|
|||||||
parent: None,
|
parent: None,
|
||||||
enforcement: None,
|
enforcement: None,
|
||||||
executor: None,
|
executor: None,
|
||||||
|
worker: None,
|
||||||
status: attune_common::models::enums::ExecutionStatus::Requested,
|
status: attune_common::models::enums::ExecutionStatus::Requested,
|
||||||
result: None,
|
result: None,
|
||||||
workflow_task: None,
|
workflow_task: None,
|
||||||
|
|||||||
@@ -6,7 +6,9 @@
|
|||||||
mod helpers;
|
mod helpers;
|
||||||
|
|
||||||
use attune_common::repositories::pack::{self, PackRepository};
|
use attune_common::repositories::pack::{self, PackRepository};
|
||||||
use attune_common::repositories::{Create, Delete, FindById, FindByRef, List, Pagination, Update};
|
use attune_common::repositories::{
|
||||||
|
Create, Delete, FindById, FindByRef, List, Pagination, Patch, Update,
|
||||||
|
};
|
||||||
use attune_common::Error;
|
use attune_common::Error;
|
||||||
use helpers::*;
|
use helpers::*;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
@@ -214,7 +216,7 @@ async fn test_update_pack() {
|
|||||||
let update_input = pack::UpdatePackInput {
|
let update_input = pack::UpdatePackInput {
|
||||||
label: Some("Updated Label".to_string()),
|
label: Some("Updated Label".to_string()),
|
||||||
version: Some("2.0.0".to_string()),
|
version: Some("2.0.0".to_string()),
|
||||||
description: Some("Updated description".to_string()),
|
description: Some(Patch::Set("Updated description".to_string())),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ use attune_common::models::enums::{
|
|||||||
use attune_common::repositories::artifact::{
|
use attune_common::repositories::artifact::{
|
||||||
ArtifactRepository, CreateArtifactInput, UpdateArtifactInput,
|
ArtifactRepository, CreateArtifactInput, UpdateArtifactInput,
|
||||||
};
|
};
|
||||||
use attune_common::repositories::{Create, Delete, FindById, FindByRef, List, Update};
|
use attune_common::repositories::{Create, Delete, FindById, FindByRef, List, Patch, Update};
|
||||||
use attune_common::Error;
|
use attune_common::Error;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use std::collections::hash_map::DefaultHasher;
|
use std::collections::hash_map::DefaultHasher;
|
||||||
@@ -267,11 +267,11 @@ async fn test_update_artifact_all_fields() {
|
|||||||
visibility: Some(ArtifactVisibility::Public),
|
visibility: Some(ArtifactVisibility::Public),
|
||||||
retention_policy: Some(RetentionPolicyType::Days),
|
retention_policy: Some(RetentionPolicyType::Days),
|
||||||
retention_limit: Some(30),
|
retention_limit: Some(30),
|
||||||
name: Some("Updated Name".to_string()),
|
name: Some(Patch::Set("Updated Name".to_string())),
|
||||||
description: Some("Updated description".to_string()),
|
description: Some(Patch::Set("Updated description".to_string())),
|
||||||
content_type: Some("image/png".to_string()),
|
content_type: Some(Patch::Set("image/png".to_string())),
|
||||||
size_bytes: Some(12345),
|
size_bytes: Some(12345),
|
||||||
data: Some(serde_json::json!({"key": "value"})),
|
data: Some(Patch::Set(serde_json::json!({"key": "value"}))),
|
||||||
execution: None,
|
execution: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
use attune_common::repositories::runtime::{
|
use attune_common::repositories::runtime::{
|
||||||
CreateRuntimeInput, RuntimeRepository, UpdateRuntimeInput,
|
CreateRuntimeInput, RuntimeRepository, UpdateRuntimeInput,
|
||||||
};
|
};
|
||||||
use attune_common::repositories::{Create, Delete, FindById, FindByRef, List, Update};
|
use attune_common::repositories::{Create, Delete, FindById, FindByRef, List, Patch, Update};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use std::collections::hash_map::DefaultHasher;
|
use std::collections::hash_map::DefaultHasher;
|
||||||
@@ -64,6 +64,7 @@ impl RuntimeFixture {
|
|||||||
pack_ref: None,
|
pack_ref: None,
|
||||||
description: Some(format!("Test runtime {}", seq)),
|
description: Some(format!("Test runtime {}", seq)),
|
||||||
name,
|
name,
|
||||||
|
aliases: vec![],
|
||||||
distributions: json!({
|
distributions: json!({
|
||||||
"linux": { "supported": true, "versions": ["ubuntu20.04", "ubuntu22.04"] },
|
"linux": { "supported": true, "versions": ["ubuntu20.04", "ubuntu22.04"] },
|
||||||
"darwin": { "supported": true, "versions": ["12", "13"] }
|
"darwin": { "supported": true, "versions": ["12", "13"] }
|
||||||
@@ -79,6 +80,8 @@ impl RuntimeFixture {
|
|||||||
"file_extension": ".py"
|
"file_extension": ".py"
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: json!({}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -93,6 +96,7 @@ impl RuntimeFixture {
|
|||||||
pack_ref: None,
|
pack_ref: None,
|
||||||
description: None,
|
description: None,
|
||||||
name,
|
name,
|
||||||
|
aliases: vec![],
|
||||||
distributions: json!({}),
|
distributions: json!({}),
|
||||||
installation: None,
|
installation: None,
|
||||||
execution_config: json!({
|
execution_config: json!({
|
||||||
@@ -102,6 +106,8 @@ impl RuntimeFixture {
|
|||||||
"file_extension": ".sh"
|
"file_extension": ".sh"
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: json!({}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -259,15 +265,16 @@ async fn test_update_runtime() {
|
|||||||
.expect("Failed to create runtime");
|
.expect("Failed to create runtime");
|
||||||
|
|
||||||
let update_input = UpdateRuntimeInput {
|
let update_input = UpdateRuntimeInput {
|
||||||
description: Some("Updated description".to_string()),
|
description: Some(Patch::Set("Updated description".to_string())),
|
||||||
name: Some("updated_name".to_string()),
|
name: Some("updated_name".to_string()),
|
||||||
distributions: Some(json!({
|
distributions: Some(json!({
|
||||||
"linux": { "supported": false }
|
"linux": { "supported": false }
|
||||||
})),
|
})),
|
||||||
installation: Some(json!({
|
installation: Some(Patch::Set(json!({
|
||||||
"method": "npm"
|
"method": "npm"
|
||||||
})),
|
}))),
|
||||||
execution_config: None,
|
execution_config: None,
|
||||||
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let updated = RuntimeRepository::update(&pool, created.id, update_input.clone())
|
let updated = RuntimeRepository::update(&pool, created.id, update_input.clone())
|
||||||
@@ -275,10 +282,10 @@ async fn test_update_runtime() {
|
|||||||
.expect("Failed to update runtime");
|
.expect("Failed to update runtime");
|
||||||
|
|
||||||
assert_eq!(updated.id, created.id);
|
assert_eq!(updated.id, created.id);
|
||||||
assert_eq!(updated.description, update_input.description);
|
assert_eq!(updated.description, Some("Updated description".to_string()));
|
||||||
assert_eq!(updated.name, update_input.name.unwrap());
|
assert_eq!(updated.name, update_input.name.unwrap());
|
||||||
assert_eq!(updated.distributions, update_input.distributions.unwrap());
|
assert_eq!(updated.distributions, update_input.distributions.unwrap());
|
||||||
assert_eq!(updated.installation, update_input.installation);
|
assert_eq!(updated.installation, Some(json!({ "method": "npm" })));
|
||||||
assert!(updated.updated > created.updated);
|
assert!(updated.updated > created.updated);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -294,18 +301,22 @@ async fn test_update_runtime_partial() {
|
|||||||
.expect("Failed to create runtime");
|
.expect("Failed to create runtime");
|
||||||
|
|
||||||
let update_input = UpdateRuntimeInput {
|
let update_input = UpdateRuntimeInput {
|
||||||
description: Some("Only description changed".to_string()),
|
description: Some(Patch::Set("Only description changed".to_string())),
|
||||||
name: None,
|
name: None,
|
||||||
distributions: None,
|
distributions: None,
|
||||||
installation: None,
|
installation: None,
|
||||||
execution_config: None,
|
execution_config: None,
|
||||||
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let updated = RuntimeRepository::update(&pool, created.id, update_input.clone())
|
let updated = RuntimeRepository::update(&pool, created.id, update_input.clone())
|
||||||
.await
|
.await
|
||||||
.expect("Failed to update runtime");
|
.expect("Failed to update runtime");
|
||||||
|
|
||||||
assert_eq!(updated.description, update_input.description);
|
assert_eq!(
|
||||||
|
updated.description,
|
||||||
|
Some("Only description changed".to_string())
|
||||||
|
);
|
||||||
assert_eq!(updated.name, created.name);
|
assert_eq!(updated.name, created.name);
|
||||||
assert_eq!(updated.distributions, created.distributions);
|
assert_eq!(updated.distributions, created.distributions);
|
||||||
assert_eq!(updated.installation, created.installation);
|
assert_eq!(updated.installation, created.installation);
|
||||||
@@ -610,7 +621,7 @@ async fn test_update_changes_timestamp() {
|
|||||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||||
|
|
||||||
let update_input = UpdateRuntimeInput {
|
let update_input = UpdateRuntimeInput {
|
||||||
description: Some("Updated".to_string()),
|
description: Some(Patch::Set("Updated".to_string())),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -574,6 +574,7 @@ async fn test_worker_with_runtime() {
|
|||||||
pack_ref: None,
|
pack_ref: None,
|
||||||
description: Some("Test runtime".to_string()),
|
description: Some("Test runtime".to_string()),
|
||||||
name: "test_runtime".to_string(),
|
name: "test_runtime".to_string(),
|
||||||
|
aliases: vec![],
|
||||||
distributions: json!({}),
|
distributions: json!({}),
|
||||||
installation: None,
|
installation: None,
|
||||||
execution_config: json!({
|
execution_config: json!({
|
||||||
@@ -583,6 +584,8 @@ async fn test_worker_with_runtime() {
|
|||||||
"file_extension": ".sh"
|
"file_extension": ".sh"
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: json!({}),
|
||||||
};
|
};
|
||||||
|
|
||||||
let runtime = RuntimeRepository::create(&pool, runtime_input)
|
let runtime = RuntimeRepository::create(&pool, runtime_input)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ mod helpers;
|
|||||||
use attune_common::{
|
use attune_common::{
|
||||||
repositories::{
|
repositories::{
|
||||||
trigger::{CreateSensorInput, SensorRepository, UpdateSensorInput},
|
trigger::{CreateSensorInput, SensorRepository, UpdateSensorInput},
|
||||||
Create, Delete, FindById, FindByRef, List, Update,
|
Create, Delete, FindById, FindByRef, List, Patch, Update,
|
||||||
},
|
},
|
||||||
Error,
|
Error,
|
||||||
};
|
};
|
||||||
@@ -888,7 +888,7 @@ async fn test_update_param_schema() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let input = UpdateSensorInput {
|
let input = UpdateSensorInput {
|
||||||
param_schema: Some(new_schema.clone()),
|
param_schema: Some(Patch::Set(new_schema.clone())),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -937,7 +937,7 @@ async fn test_update_multiple_fields() {
|
|||||||
description: Some("Updated multiple fields".to_string()),
|
description: Some("Updated multiple fields".to_string()),
|
||||||
entrypoint: Some("sensors/multi.py".to_string()),
|
entrypoint: Some("sensors/multi.py".to_string()),
|
||||||
enabled: Some(false),
|
enabled: Some(false),
|
||||||
param_schema: Some(json!({"type": "object"})),
|
param_schema: Some(Patch::Set(json!({"type": "object"}))),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1766,7 +1766,7 @@ async fn test_param_schema_can_be_null() {
|
|||||||
// Update to add schema
|
// Update to add schema
|
||||||
let schema = json!({"type": "object"});
|
let schema = json!({"type": "object"});
|
||||||
let input = UpdateSensorInput {
|
let input = UpdateSensorInput {
|
||||||
param_schema: Some(schema.clone()),
|
param_schema: Some(Patch::Set(schema.clone())),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user