Compare commits
7 Commits
0782675a2b
...
perms-mgmt
| Author | SHA1 | Date | |
|---|---|---|---|
| 62307e8c65 | |||
| 2ebb03b868 | |||
| af5175b96a | |||
| 8af8c1af9c | |||
| d4c6240485 | |||
| 4d5a3b1bf5 | |||
| 8ba7e3bb84 |
0
.codex_write_test
Normal file
0
.codex_write_test
Normal file
@@ -1,7 +1,27 @@
|
|||||||
name: Publish Images And Chart
|
name: Publish Images
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
target_arch:
|
||||||
|
description: Architecture to publish
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- all
|
||||||
|
- amd64
|
||||||
|
- arm64
|
||||||
|
default: all
|
||||||
|
target_image:
|
||||||
|
description: Image to publish
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- all
|
||||||
|
- api
|
||||||
|
- executor
|
||||||
|
- notifier
|
||||||
|
- agent
|
||||||
|
- web
|
||||||
|
default: all
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
@@ -13,21 +33,26 @@ env:
|
|||||||
REGISTRY_HOST: ${{ vars.CLUSTER_GITEA_HOST }}
|
REGISTRY_HOST: ${{ vars.CLUSTER_GITEA_HOST }}
|
||||||
REGISTRY_NAMESPACE: ${{ vars.CONTAINER_REGISTRY_NAMESPACE }}
|
REGISTRY_NAMESPACE: ${{ vars.CONTAINER_REGISTRY_NAMESPACE }}
|
||||||
REGISTRY_PLAIN_HTTP: ${{ vars.CONTAINER_REGISTRY_INSECURE }}
|
REGISTRY_PLAIN_HTTP: ${{ vars.CONTAINER_REGISTRY_INSECURE }}
|
||||||
CHART_NAME: attune
|
ARTIFACT_REPOSITORY: attune-build-artifacts
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
CARGO_INCREMENTAL: 0
|
||||||
|
CARGO_NET_RETRY: 10
|
||||||
|
RUSTUP_MAX_RETRIES: 10
|
||||||
|
RUST_MIN_STACK: 67108864
|
||||||
|
SQLX_OFFLINE: true
|
||||||
|
RUNNER_TOOL_CACHE: /toolcache
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
metadata:
|
metadata:
|
||||||
name: Resolve Publish Metadata
|
name: Resolve Publish Metadata
|
||||||
runs-on: ubuntu-latest
|
runs-on: build-amd64
|
||||||
outputs:
|
outputs:
|
||||||
registry: ${{ steps.meta.outputs.registry }}
|
registry: ${{ steps.meta.outputs.registry }}
|
||||||
namespace: ${{ steps.meta.outputs.namespace }}
|
namespace: ${{ steps.meta.outputs.namespace }}
|
||||||
registry_plain_http: ${{ steps.meta.outputs.registry_plain_http }}
|
registry_plain_http: ${{ steps.meta.outputs.registry_plain_http }}
|
||||||
image_tag: ${{ steps.meta.outputs.image_tag }}
|
image_tag: ${{ steps.meta.outputs.image_tag }}
|
||||||
image_tags: ${{ steps.meta.outputs.image_tags }}
|
image_tags: ${{ steps.meta.outputs.image_tags }}
|
||||||
chart_version: ${{ steps.meta.outputs.chart_version }}
|
artifact_ref_base: ${{ steps.meta.outputs.artifact_ref_base }}
|
||||||
app_version: ${{ steps.meta.outputs.app_version }}
|
|
||||||
release_channel: ${{ steps.meta.outputs.release_channel }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: Resolve tags and registry paths
|
- name: Resolve tags and registry paths
|
||||||
id: meta
|
id: meta
|
||||||
@@ -78,91 +103,400 @@ jobs:
|
|||||||
if [ "$ref_type" = "tag" ] && printf '%s' "$ref_name" | grep -Eq '^v[0-9]+\.[0-9]+\.[0-9]+([-.].*)?$'; then
|
if [ "$ref_type" = "tag" ] && printf '%s' "$ref_name" | grep -Eq '^v[0-9]+\.[0-9]+\.[0-9]+([-.].*)?$'; then
|
||||||
version="${ref_name#v}"
|
version="${ref_name#v}"
|
||||||
image_tags="${version},latest,sha-${short_sha}"
|
image_tags="${version},latest,sha-${short_sha}"
|
||||||
chart_version="$version"
|
|
||||||
release_channel="release"
|
|
||||||
else
|
else
|
||||||
version="sha-${short_sha}"
|
version="sha-${short_sha}"
|
||||||
image_tags="edge,sha-${short_sha}"
|
image_tags="edge,sha-${short_sha}"
|
||||||
chart_version="0.0.0-dev.${{ github.run_number }}"
|
|
||||||
release_channel="edge"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
artifact_ref_base="${registry}/${namespace}/${ARTIFACT_REPOSITORY}"
|
||||||
|
|
||||||
{
|
{
|
||||||
echo "registry=$registry"
|
echo "registry=$registry"
|
||||||
echo "namespace=$namespace"
|
echo "namespace=$namespace"
|
||||||
echo "registry_plain_http=$registry_plain_http"
|
echo "registry_plain_http=$registry_plain_http"
|
||||||
echo "image_tag=$version"
|
echo "image_tag=$version"
|
||||||
echo "image_tags=$image_tags"
|
echo "image_tags=$image_tags"
|
||||||
echo "chart_version=$chart_version"
|
echo "artifact_ref_base=$artifact_ref_base"
|
||||||
echo "app_version=$version"
|
|
||||||
echo "release_channel=$release_channel"
|
|
||||||
} >> "$GITHUB_OUTPUT"
|
} >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
publish-images:
|
build-rust-bundles:
|
||||||
name: Publish ${{ matrix.image.name }}
|
name: Build Rust Bundles (${{ matrix.arch }})
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.runner_label }}
|
||||||
needs: metadata
|
needs: metadata
|
||||||
|
if: |
|
||||||
|
github.event_name != 'workflow_dispatch' ||
|
||||||
|
inputs.target_arch == 'all' ||
|
||||||
|
inputs.target_arch == matrix.arch
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
include:
|
||||||
|
- arch: amd64
|
||||||
|
runner_label: build-amd64
|
||||||
|
musl_target: x86_64-unknown-linux-musl
|
||||||
|
- arch: arm64
|
||||||
|
runner_label: build-arm64
|
||||||
|
musl_target: aarch64-unknown-linux-musl
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cache Rust toolchain
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.rustup/toolchains
|
||||||
|
~/.rustup/update-hashes
|
||||||
|
key: rustup-publish-${{ runner.os }}-${{ matrix.arch }}-stable-v1
|
||||||
|
restore-keys: |
|
||||||
|
rustup-${{ runner.os }}-${{ matrix.arch }}-stable-v1
|
||||||
|
rustup-${{ runner.os }}-stable-v1
|
||||||
|
rustup-
|
||||||
|
|
||||||
|
- name: Setup Rust
|
||||||
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
with:
|
||||||
|
targets: ${{ matrix.musl_target }}
|
||||||
|
|
||||||
|
- name: Cache Cargo registry + index
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/registry/index
|
||||||
|
~/.cargo/registry/cache
|
||||||
|
~/.cargo/git/db
|
||||||
|
key: cargo-registry-publish-${{ matrix.arch }}-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
restore-keys: |
|
||||||
|
cargo-registry-publish-${{ matrix.arch }}-
|
||||||
|
cargo-registry-
|
||||||
|
|
||||||
|
- name: Cache Cargo build artifacts
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: target
|
||||||
|
key: cargo-publish-${{ matrix.arch }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('**/*.rs', '**/Cargo.toml') }}
|
||||||
|
restore-keys: |
|
||||||
|
cargo-publish-${{ matrix.arch }}-${{ hashFiles('**/Cargo.lock') }}-
|
||||||
|
cargo-publish-${{ matrix.arch }}-
|
||||||
|
|
||||||
|
- name: Install native build dependencies
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y pkg-config libssl-dev musl-tools file
|
||||||
|
|
||||||
|
- name: Build release binaries
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
cargo build --release \
|
||||||
|
--bin attune-api \
|
||||||
|
--bin attune-executor \
|
||||||
|
--bin attune-notifier
|
||||||
|
|
||||||
|
- name: Build static agent binaries
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
cargo build --release \
|
||||||
|
--target "${{ matrix.musl_target }}" \
|
||||||
|
--bin attune-agent \
|
||||||
|
--bin attune-sensor-agent
|
||||||
|
|
||||||
|
- name: Assemble binary bundle
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
bundle_root="dist/bundle/${{ matrix.arch }}"
|
||||||
|
mkdir -p "$bundle_root/bin" "$bundle_root/agent"
|
||||||
|
|
||||||
|
cp target/release/attune-api "$bundle_root/bin/"
|
||||||
|
cp target/release/attune-executor "$bundle_root/bin/"
|
||||||
|
cp target/release/attune-notifier "$bundle_root/bin/"
|
||||||
|
cp target/${{ matrix.musl_target }}/release/attune-agent "$bundle_root/agent/"
|
||||||
|
cp target/${{ matrix.musl_target }}/release/attune-sensor-agent "$bundle_root/agent/"
|
||||||
|
|
||||||
|
cat > "$bundle_root/metadata.json" <<EOF
|
||||||
|
{
|
||||||
|
"git_sha": "${{ github.sha }}",
|
||||||
|
"ref": "${{ github.ref }}",
|
||||||
|
"arch": "${{ matrix.arch }}",
|
||||||
|
"image_tag": "${{ needs.metadata.outputs.image_tag }}"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
tar -C dist/bundle/${{ matrix.arch }} -czf "dist/attune-binaries-${{ matrix.arch }}.tar.gz" .
|
||||||
|
|
||||||
|
- name: Setup ORAS
|
||||||
|
uses: oras-project/setup-oras@v1
|
||||||
|
|
||||||
|
- name: Log in to registry for artifacts
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
REGISTRY_USERNAME: ${{ secrets.CONTAINER_REGISTRY_USERNAME }}
|
||||||
|
REGISTRY_PASSWORD: ${{ secrets.CONTAINER_REGISTRY_PASSWORD }}
|
||||||
|
GITHUB_TOKEN_FALLBACK: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
registry_username="${REGISTRY_USERNAME:-${{ github.actor }}}"
|
||||||
|
registry_password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
|
||||||
|
login_args=()
|
||||||
|
|
||||||
|
if [ -z "$registry_password" ]; then
|
||||||
|
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
||||||
|
login_args+=(--plain-http)
|
||||||
|
fi
|
||||||
|
|
||||||
|
oras login "${{ needs.metadata.outputs.registry }}" \
|
||||||
|
"${login_args[@]}" \
|
||||||
|
--username "$registry_username" \
|
||||||
|
--password "$registry_password"
|
||||||
|
|
||||||
|
- name: Push binary bundle artifact
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
push_args=()
|
||||||
|
|
||||||
|
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
||||||
|
push_args+=(--plain-http)
|
||||||
|
fi
|
||||||
|
|
||||||
|
oras push \
|
||||||
|
"${push_args[@]}" \
|
||||||
|
"${{ needs.metadata.outputs.artifact_ref_base }}:rust-binaries-${{ needs.metadata.outputs.image_tag }}-${{ matrix.arch }}" \
|
||||||
|
--artifact-type application/vnd.attune.rust-binaries.v1 \
|
||||||
|
"dist/attune-binaries-${{ matrix.arch }}.tar.gz:application/vnd.attune.rust-binaries.layer.v1.tar+gzip"
|
||||||
|
|
||||||
|
publish-rust-images:
|
||||||
|
name: Publish ${{ matrix.image.name }} (${{ matrix.arch }})
|
||||||
|
runs-on: ${{ matrix.runner_label }}
|
||||||
|
needs:
|
||||||
|
- metadata
|
||||||
|
- build-rust-bundles
|
||||||
|
if: |
|
||||||
|
(github.event_name != 'workflow_dispatch' ||
|
||||||
|
inputs.target_arch == 'all' ||
|
||||||
|
inputs.target_arch == matrix.arch) &&
|
||||||
|
(github.event_name != 'workflow_dispatch' ||
|
||||||
|
inputs.target_image == 'all' ||
|
||||||
|
inputs.target_image == matrix.image.name)
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- arch: amd64
|
||||||
|
runner_label: build-amd64
|
||||||
|
platform: linux/amd64
|
||||||
image:
|
image:
|
||||||
- name: api
|
name: api
|
||||||
repository: attune-api
|
repository: attune-api
|
||||||
dockerfile: docker/Dockerfile.optimized
|
source_path: bin/attune-api
|
||||||
context: .
|
dockerfile: docker/Dockerfile.runtime
|
||||||
target: ""
|
- arch: amd64
|
||||||
build_args: |
|
runner_label: build-amd64
|
||||||
SERVICE=api
|
platform: linux/amd64
|
||||||
- name: executor
|
image:
|
||||||
|
name: executor
|
||||||
repository: attune-executor
|
repository: attune-executor
|
||||||
dockerfile: docker/Dockerfile.optimized
|
source_path: bin/attune-executor
|
||||||
context: .
|
dockerfile: docker/Dockerfile.runtime
|
||||||
target: ""
|
- arch: amd64
|
||||||
build_args: |
|
runner_label: build-amd64
|
||||||
SERVICE=executor
|
platform: linux/amd64
|
||||||
- name: notifier
|
image:
|
||||||
|
name: notifier
|
||||||
repository: attune-notifier
|
repository: attune-notifier
|
||||||
dockerfile: docker/Dockerfile.optimized
|
source_path: bin/attune-notifier
|
||||||
context: .
|
dockerfile: docker/Dockerfile.runtime
|
||||||
target: ""
|
- arch: amd64
|
||||||
build_args: |
|
runner_label: build-amd64
|
||||||
SERVICE=notifier
|
platform: linux/amd64
|
||||||
- name: sensor
|
image:
|
||||||
repository: attune-sensor
|
name: agent
|
||||||
dockerfile: docker/Dockerfile.sensor.optimized
|
repository: attune-agent
|
||||||
context: .
|
source_path: agent/attune-agent
|
||||||
target: sensor-full
|
dockerfile: docker/Dockerfile.agent-package
|
||||||
build_args: ""
|
- arch: arm64
|
||||||
- name: worker
|
runner_label: build-arm64
|
||||||
repository: attune-worker
|
platform: linux/arm64
|
||||||
dockerfile: docker/Dockerfile.worker.optimized
|
image:
|
||||||
context: .
|
name: api
|
||||||
target: worker-full
|
repository: attune-api
|
||||||
build_args: ""
|
source_path: bin/attune-api
|
||||||
- name: web
|
dockerfile: docker/Dockerfile.runtime
|
||||||
repository: attune-web
|
- arch: arm64
|
||||||
dockerfile: docker/Dockerfile.web
|
runner_label: build-arm64
|
||||||
context: .
|
platform: linux/arm64
|
||||||
target: ""
|
image:
|
||||||
build_args: ""
|
name: executor
|
||||||
- name: migrations
|
repository: attune-executor
|
||||||
repository: attune-migrations
|
source_path: bin/attune-executor
|
||||||
dockerfile: docker/Dockerfile.migrations
|
dockerfile: docker/Dockerfile.runtime
|
||||||
context: .
|
- arch: arm64
|
||||||
target: ""
|
runner_label: build-arm64
|
||||||
build_args: ""
|
platform: linux/arm64
|
||||||
- name: init-user
|
image:
|
||||||
repository: attune-init-user
|
name: notifier
|
||||||
dockerfile: docker/Dockerfile.init-user
|
repository: attune-notifier
|
||||||
context: .
|
source_path: bin/attune-notifier
|
||||||
target: ""
|
dockerfile: docker/Dockerfile.runtime
|
||||||
build_args: ""
|
- arch: arm64
|
||||||
- name: init-packs
|
runner_label: build-arm64
|
||||||
repository: attune-init-packs
|
platform: linux/arm64
|
||||||
dockerfile: docker/Dockerfile.init-packs
|
image:
|
||||||
context: .
|
name: agent
|
||||||
target: ""
|
repository: attune-agent
|
||||||
build_args: ""
|
source_path: agent/attune-agent
|
||||||
|
dockerfile: docker/Dockerfile.agent-package
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup ORAS
|
||||||
|
uses: oras-project/setup-oras@v1
|
||||||
|
|
||||||
|
- name: Setup Docker Buildx
|
||||||
|
if: needs.metadata.outputs.registry_plain_http != 'true'
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Setup Docker Buildx For Plain HTTP Registry
|
||||||
|
if: needs.metadata.outputs.registry_plain_http == 'true'
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
with:
|
||||||
|
buildkitd-config-inline: |
|
||||||
|
[registry."${{ needs.metadata.outputs.registry }}"]
|
||||||
|
http = true
|
||||||
|
insecure = true
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
REGISTRY_USERNAME: ${{ secrets.CONTAINER_REGISTRY_USERNAME }}
|
||||||
|
REGISTRY_PASSWORD: ${{ secrets.CONTAINER_REGISTRY_PASSWORD }}
|
||||||
|
GITHUB_TOKEN_FALLBACK: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
registry_username="${REGISTRY_USERNAME:-${{ github.actor }}}"
|
||||||
|
registry_password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
|
||||||
|
|
||||||
|
if [ -z "$registry_password" ]; then
|
||||||
|
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p "$HOME/.docker"
|
||||||
|
auth="$(printf '%s:%s' "$registry_username" "$registry_password" | base64 | tr -d '\n')"
|
||||||
|
|
||||||
|
cat > "$HOME/.docker/config.json" <<EOF
|
||||||
|
{
|
||||||
|
"auths": {
|
||||||
|
"${{ needs.metadata.outputs.registry }}": {
|
||||||
|
"auth": "${auth}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
oras_login_args=()
|
||||||
|
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
||||||
|
oras_login_args+=(--plain-http)
|
||||||
|
fi
|
||||||
|
|
||||||
|
oras login "${{ needs.metadata.outputs.registry }}" \
|
||||||
|
"${oras_login_args[@]}" \
|
||||||
|
--username "$registry_username" \
|
||||||
|
--password "$registry_password"
|
||||||
|
|
||||||
|
- name: Pull binary bundle
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
pull_args=()
|
||||||
|
|
||||||
|
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
||||||
|
pull_args+=(--plain-http)
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p dist/artifact
|
||||||
|
cd dist/artifact
|
||||||
|
|
||||||
|
oras pull \
|
||||||
|
"${pull_args[@]}" \
|
||||||
|
"${{ needs.metadata.outputs.artifact_ref_base }}:rust-binaries-${{ needs.metadata.outputs.image_tag }}-${{ matrix.arch }}"
|
||||||
|
|
||||||
|
tar -xzf "attune-binaries-${{ matrix.arch }}.tar.gz"
|
||||||
|
|
||||||
|
- name: Prepare packaging context
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
rm -rf dist/image
|
||||||
|
mkdir -p dist/image
|
||||||
|
|
||||||
|
case "${{ matrix.image.name }}" in
|
||||||
|
api|executor|notifier)
|
||||||
|
cp "dist/artifact/${{ matrix.image.source_path }}" dist/attune-service-binary
|
||||||
|
;;
|
||||||
|
agent)
|
||||||
|
cp dist/artifact/agent/attune-agent dist/attune-agent
|
||||||
|
cp dist/artifact/agent/attune-sensor-agent dist/attune-sensor-agent
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unsupported image: ${{ matrix.image.name }}"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
- name: Push architecture image
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
image_ref="${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/${{ matrix.image.repository }}:${{ needs.metadata.outputs.image_tag }}-${{ matrix.arch }}"
|
||||||
|
|
||||||
|
build_cmd=(
|
||||||
|
docker buildx build
|
||||||
|
.
|
||||||
|
--platform "${{ matrix.platform }}"
|
||||||
|
--file "${{ matrix.image.dockerfile }}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
||||||
|
build_cmd+=(--output "type=image,\"name=${image_ref}\",push=true,registry.insecure=true")
|
||||||
|
else
|
||||||
|
build_cmd+=(--tag "$image_ref" --push)
|
||||||
|
fi
|
||||||
|
|
||||||
|
"${build_cmd[@]}"
|
||||||
|
|
||||||
|
publish-web-images:
|
||||||
|
name: Publish web (${{ matrix.arch }})
|
||||||
|
runs-on: ${{ matrix.runner_label }}
|
||||||
|
needs: metadata
|
||||||
|
if: |
|
||||||
|
(github.event_name != 'workflow_dispatch' ||
|
||||||
|
inputs.target_arch == 'all' ||
|
||||||
|
inputs.target_arch == matrix.arch) &&
|
||||||
|
(github.event_name != 'workflow_dispatch' ||
|
||||||
|
inputs.target_image == 'all' ||
|
||||||
|
inputs.target_image == 'web')
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- arch: amd64
|
||||||
|
runner_label: build-amd64
|
||||||
|
platform: linux/amd64
|
||||||
|
- arch: arm64
|
||||||
|
runner_label: build-arm64
|
||||||
|
platform: linux/arm64
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -190,7 +524,6 @@ jobs:
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
username="${REGISTRY_USERNAME:-${{ github.actor }}}"
|
username="${REGISTRY_USERNAME:-${{ github.actor }}}"
|
||||||
password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
|
password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
|
||||||
registry="${{ needs.metadata.outputs.registry }}"
|
|
||||||
|
|
||||||
if [ -z "$password" ]; then
|
if [ -z "$password" ]; then
|
||||||
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
|
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
|
||||||
@@ -203,81 +536,56 @@ jobs:
|
|||||||
cat > "$HOME/.docker/config.json" <<EOF
|
cat > "$HOME/.docker/config.json" <<EOF
|
||||||
{
|
{
|
||||||
"auths": {
|
"auths": {
|
||||||
"${registry}": {
|
"${{ needs.metadata.outputs.registry }}": {
|
||||||
"auth": "${auth}"
|
"auth": "${auth}"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
- name: Prepare image tags
|
- name: Push architecture image
|
||||||
id: tags
|
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
image_ref_base="${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/${{ matrix.image.repository }}"
|
|
||||||
tag_lines=""
|
|
||||||
IFS=',' read -ra tags <<< "${{ needs.metadata.outputs.image_tags }}"
|
|
||||||
for tag in "${tags[@]}"; do
|
|
||||||
tag_lines="${tag_lines}${image_ref_base}:${tag}"$'\n'
|
|
||||||
done
|
|
||||||
printf 'tags<<EOF\n%sEOF\n' "$tag_lines" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Build and push image
|
image_ref="${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/attune-web:${{ needs.metadata.outputs.image_tag }}-${{ matrix.arch }}"
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
image_names_csv=""
|
|
||||||
build_cmd=(
|
build_cmd=(
|
||||||
docker buildx build
|
docker buildx build
|
||||||
"${{ matrix.image.context }}"
|
.
|
||||||
--file "${{ matrix.image.dockerfile }}"
|
--platform "${{ matrix.platform }}"
|
||||||
|
--file docker/Dockerfile.web
|
||||||
)
|
)
|
||||||
|
|
||||||
if [ -n "${{ matrix.image.target }}" ]; then
|
|
||||||
build_cmd+=(--target "${{ matrix.image.target }}")
|
|
||||||
fi
|
|
||||||
|
|
||||||
while IFS= read -r tag; do
|
|
||||||
if [ -n "$tag" ]; then
|
|
||||||
if [ -n "$image_names_csv" ]; then
|
|
||||||
image_names_csv="${image_names_csv},${tag}"
|
|
||||||
else
|
|
||||||
image_names_csv="${tag}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "${{ needs.metadata.outputs.registry_plain_http }}" != "true" ]; then
|
|
||||||
build_cmd+=(--tag "$tag")
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done <<< "${{ steps.tags.outputs.tags }}"
|
|
||||||
|
|
||||||
while IFS= read -r build_arg; do
|
|
||||||
[ -n "$build_arg" ] && build_cmd+=(--build-arg "$build_arg")
|
|
||||||
done <<< "${{ matrix.image.build_args }}"
|
|
||||||
|
|
||||||
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
||||||
build_cmd+=(--output "type=image,\"name=${image_names_csv}\",push=true,registry.insecure=true")
|
build_cmd+=(--output "type=image,\"name=${image_ref}\",push=true,registry.insecure=true")
|
||||||
else
|
else
|
||||||
build_cmd+=(--push)
|
build_cmd+=(--tag "$image_ref" --push)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
"${build_cmd[@]}"
|
"${build_cmd[@]}"
|
||||||
|
|
||||||
publish-chart:
|
publish-manifests:
|
||||||
name: Publish Helm Chart
|
name: Publish manifest ${{ matrix.repository }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: build-amd64
|
||||||
needs:
|
needs:
|
||||||
- metadata
|
- metadata
|
||||||
- publish-images
|
- publish-rust-images
|
||||||
|
- publish-web-images
|
||||||
|
if: |
|
||||||
|
github.event_name != 'workflow_dispatch' ||
|
||||||
|
(inputs.target_arch == 'all' && inputs.target_image == 'all')
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
repository:
|
||||||
|
- attune-api
|
||||||
|
- attune-executor
|
||||||
|
- attune-notifier
|
||||||
|
- attune-agent
|
||||||
|
- attune-web
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Configure OCI registry auth
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Helm
|
|
||||||
uses: azure/setup-helm@v4
|
|
||||||
|
|
||||||
- name: Log in to Gitea OCI registry
|
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
REGISTRY_USERNAME: ${{ secrets.CONTAINER_REGISTRY_USERNAME }}
|
REGISTRY_USERNAME: ${{ secrets.CONTAINER_REGISTRY_USERNAME }}
|
||||||
@@ -285,43 +593,48 @@ jobs:
|
|||||||
GITHUB_TOKEN_FALLBACK: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN_FALLBACK: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
registry_username="${REGISTRY_USERNAME:-${{ github.actor }}}"
|
username="${REGISTRY_USERNAME:-${{ github.actor }}}"
|
||||||
registry_password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
|
password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
|
||||||
login_args=()
|
|
||||||
|
|
||||||
if [ -z "$registry_password" ]; then
|
if [ -z "$password" ]; then
|
||||||
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
|
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
mkdir -p "$HOME/.docker"
|
||||||
login_args+=(--plain-http)
|
auth="$(printf '%s:%s' "$username" "$password" | base64 | tr -d '\n')"
|
||||||
fi
|
|
||||||
|
|
||||||
printf '%s' "$registry_password" | helm registry login "${{ needs.metadata.outputs.registry }}" \
|
cat > "$HOME/.docker/config.json" <<EOF
|
||||||
--username "$registry_username" \
|
{
|
||||||
"${login_args[@]}" \
|
"auths": {
|
||||||
--password-stdin
|
"${{ needs.metadata.outputs.registry }}": {
|
||||||
|
"auth": "${auth}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
- name: Lint chart
|
- name: Publish manifest tags
|
||||||
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
helm lint charts/attune
|
set -euo pipefail
|
||||||
|
|
||||||
- name: Package chart
|
image_base="${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/${{ matrix.repository }}"
|
||||||
run: |
|
|
||||||
mkdir -p dist
|
|
||||||
helm package charts/attune \
|
|
||||||
--destination dist \
|
|
||||||
--version "${{ needs.metadata.outputs.chart_version }}" \
|
|
||||||
--app-version "${{ needs.metadata.outputs.app_version }}"
|
|
||||||
|
|
||||||
- name: Push chart to OCI registry
|
|
||||||
run: |
|
|
||||||
push_args=()
|
push_args=()
|
||||||
|
|
||||||
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
|
||||||
push_args+=(--plain-http)
|
push_args+=(--insecure)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
helm push "dist/${CHART_NAME}-${{ needs.metadata.outputs.chart_version }}.tgz" \
|
IFS=',' read -ra tags <<< "${{ needs.metadata.outputs.image_tags }}"
|
||||||
"oci://${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/helm" \
|
for tag in "${tags[@]}"; do
|
||||||
"${push_args[@]}"
|
manifest_ref="${image_base}:${tag}"
|
||||||
|
amd64_ref="${image_base}:${{ needs.metadata.outputs.image_tag }}-amd64"
|
||||||
|
arm64_ref="${image_base}:${{ needs.metadata.outputs.image_tag }}-arm64"
|
||||||
|
|
||||||
|
docker manifest rm "$manifest_ref" >/dev/null 2>&1 || true
|
||||||
|
docker manifest create "$manifest_ref" "$amd64_ref" "$arm64_ref"
|
||||||
|
docker manifest annotate "$manifest_ref" "$amd64_ref" --os linux --arch amd64
|
||||||
|
docker manifest annotate "$manifest_ref" "$arm64_ref" --os linux --arch arm64
|
||||||
|
docker manifest push "${push_args[@]}" "$manifest_ref"
|
||||||
|
done
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -78,4 +78,5 @@ docker-compose.override.yml
|
|||||||
*.pid
|
*.pid
|
||||||
|
|
||||||
packs.examples/
|
packs.examples/
|
||||||
|
packs.external/
|
||||||
codex/
|
codex/
|
||||||
|
|||||||
1
Cargo.lock
generated
1
Cargo.lock
generated
@@ -490,6 +490,7 @@ dependencies = [
|
|||||||
"sha1",
|
"sha1",
|
||||||
"sha2",
|
"sha2",
|
||||||
"sqlx",
|
"sqlx",
|
||||||
|
"subtle",
|
||||||
"tar",
|
"tar",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"thiserror 2.0.18",
|
"thiserror 2.0.18",
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ repository = "https://git.rdrx.app/attune-system/attune"
|
|||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
# Async runtime
|
# Async runtime
|
||||||
tokio = { version = "1.50", features = ["full"] }
|
tokio = { version = "1.50", features = ["full"] }
|
||||||
tokio-util = "0.7"
|
tokio-util = { version = "0.7", features = ["io"] }
|
||||||
tokio-stream = { version = "0.1", features = ["sync"] }
|
tokio-stream = { version = "0.1", features = ["sync"] }
|
||||||
|
|
||||||
# Web framework
|
# Web framework
|
||||||
|
|||||||
65
Makefile
65
Makefile
@@ -4,7 +4,9 @@
|
|||||||
docker-build-workers docker-build-worker-base docker-build-worker-python \
|
docker-build-workers docker-build-worker-base docker-build-worker-python \
|
||||||
docker-build-worker-node docker-build-worker-full deny ci-rust ci-web-blocking ci-web-advisory \
|
docker-build-worker-node docker-build-worker-full deny ci-rust ci-web-blocking ci-web-advisory \
|
||||||
ci-security-blocking ci-security-advisory ci-blocking ci-advisory \
|
ci-security-blocking ci-security-advisory ci-blocking ci-advisory \
|
||||||
fmt-check pre-commit install-git-hooks
|
fmt-check pre-commit install-git-hooks \
|
||||||
|
build-agent docker-build-agent run-agent run-agent-release \
|
||||||
|
docker-up-agent docker-down-agent
|
||||||
|
|
||||||
# Default target
|
# Default target
|
||||||
help:
|
help:
|
||||||
@@ -60,6 +62,14 @@ help:
|
|||||||
@echo " make docker-up - Start services with docker compose"
|
@echo " make docker-up - Start services with docker compose"
|
||||||
@echo " make docker-down - Stop services"
|
@echo " make docker-down - Stop services"
|
||||||
@echo ""
|
@echo ""
|
||||||
|
@echo "Agent (Universal Worker):"
|
||||||
|
@echo " make build-agent - Build statically-linked agent binary (musl)"
|
||||||
|
@echo " make docker-build-agent - Build agent Docker image"
|
||||||
|
@echo " make run-agent - Run agent in development mode"
|
||||||
|
@echo " make run-agent-release - Run agent in release mode"
|
||||||
|
@echo " make docker-up-agent - Start all services + agent workers (ruby, etc.)"
|
||||||
|
@echo " make docker-down-agent - Stop agent stack"
|
||||||
|
@echo ""
|
||||||
@echo "Development:"
|
@echo "Development:"
|
||||||
@echo " make watch - Watch and rebuild on changes"
|
@echo " make watch - Watch and rebuild on changes"
|
||||||
@echo " make install-tools - Install development tools"
|
@echo " make install-tools - Install development tools"
|
||||||
@@ -227,38 +237,53 @@ docker-build-api:
|
|||||||
docker-build-web:
|
docker-build-web:
|
||||||
docker compose build web
|
docker compose build web
|
||||||
|
|
||||||
# Build worker images
|
# Agent binary (statically-linked for injection into any container)
|
||||||
docker-build-workers: docker-build-worker-base docker-build-worker-python docker-build-worker-node docker-build-worker-full
|
build-agent:
|
||||||
@echo "✅ All worker images built successfully"
|
@echo "Installing musl target (if not already installed)..."
|
||||||
|
rustup target add x86_64-unknown-linux-musl 2>/dev/null || true
|
||||||
|
@echo "Building statically-linked worker and sensor agent binaries..."
|
||||||
|
SQLX_OFFLINE=true cargo build --release --target x86_64-unknown-linux-musl --bin attune-agent --bin attune-sensor-agent
|
||||||
|
strip target/x86_64-unknown-linux-musl/release/attune-agent
|
||||||
|
strip target/x86_64-unknown-linux-musl/release/attune-sensor-agent
|
||||||
|
@echo "✅ Agent binaries built:"
|
||||||
|
@echo " - target/x86_64-unknown-linux-musl/release/attune-agent"
|
||||||
|
@echo " - target/x86_64-unknown-linux-musl/release/attune-sensor-agent"
|
||||||
|
@ls -lh target/x86_64-unknown-linux-musl/release/attune-agent
|
||||||
|
@ls -lh target/x86_64-unknown-linux-musl/release/attune-sensor-agent
|
||||||
|
|
||||||
docker-build-worker-base:
|
docker-build-agent:
|
||||||
@echo "Building base worker (shell only)..."
|
@echo "Building agent Docker image (statically-linked binary)..."
|
||||||
DOCKER_BUILDKIT=1 docker build --target worker-base -t attune-worker:base -f docker/Dockerfile.worker.optimized .
|
DOCKER_BUILDKIT=1 docker buildx build --target agent-init -f docker/Dockerfile.agent -t attune-agent:latest .
|
||||||
@echo "✅ Base worker image built: attune-worker:base"
|
@echo "✅ Agent image built: attune-agent:latest"
|
||||||
|
|
||||||
docker-build-worker-python:
|
run-agent:
|
||||||
@echo "Building Python worker (shell + python)..."
|
cargo run --bin attune-agent
|
||||||
DOCKER_BUILDKIT=1 docker build --target worker-python -t attune-worker:python -f docker/Dockerfile.worker.optimized .
|
|
||||||
@echo "✅ Python worker image built: attune-worker:python"
|
|
||||||
|
|
||||||
docker-build-worker-node:
|
run-agent-release:
|
||||||
@echo "Building Node.js worker (shell + node)..."
|
cargo run --bin attune-agent --release
|
||||||
DOCKER_BUILDKIT=1 docker build --target worker-node -t attune-worker:node -f docker/Dockerfile.worker.optimized .
|
|
||||||
@echo "✅ Node.js worker image built: attune-worker:node"
|
|
||||||
|
|
||||||
docker-build-worker-full:
|
run-sensor-agent:
|
||||||
@echo "Building full worker (all runtimes)..."
|
cargo run --bin attune-sensor-agent
|
||||||
DOCKER_BUILDKIT=1 docker build --target worker-full -t attune-worker:full -f docker/Dockerfile.worker.optimized .
|
|
||||||
@echo "✅ Full worker image built: attune-worker:full"
|
run-sensor-agent-release:
|
||||||
|
cargo run --bin attune-sensor-agent --release
|
||||||
|
|
||||||
docker-up:
|
docker-up:
|
||||||
@echo "Starting all services with Docker Compose..."
|
@echo "Starting all services with Docker Compose..."
|
||||||
docker compose up -d
|
docker compose up -d
|
||||||
|
|
||||||
|
docker-up-agent:
|
||||||
|
@echo "Starting all services + agent-based workers..."
|
||||||
|
docker compose -f docker-compose.yaml -f docker-compose.agent.yaml up -d
|
||||||
|
|
||||||
docker-down:
|
docker-down:
|
||||||
@echo "Stopping all services..."
|
@echo "Stopping all services..."
|
||||||
docker compose down
|
docker compose down
|
||||||
|
|
||||||
|
docker-down-agent:
|
||||||
|
@echo "Stopping all services (including agent workers)..."
|
||||||
|
docker compose -f docker-compose.yaml -f docker-compose.agent.yaml down
|
||||||
|
|
||||||
docker-down-volumes:
|
docker-down-volumes:
|
||||||
@echo "Stopping all services and removing volumes (WARNING: deletes data)..."
|
@echo "Stopping all services and removing volumes (WARNING: deletes data)..."
|
||||||
docker compose down -v
|
docker compose down -v
|
||||||
|
|||||||
@@ -1,3 +1,26 @@
|
|||||||
1. Set `global.imageRegistry`, `global.imageNamespace`, and `global.imageTag` so the chart pulls the images published by the Gitea workflow.
|
1. Set `global.imageRegistry`, `global.imageNamespace`, and `global.imageTag` so the chart pulls the images published by the Gitea workflow.
|
||||||
2. Set `web.config.apiUrl` and `web.config.wsUrl` to browser-reachable endpoints before exposing the web UI.
|
2. Set `web.config.apiUrl` and `web.config.wsUrl` to browser-reachable endpoints before exposing the web UI.
|
||||||
3. The shared `packs`, `runtime_envs`, and `artifacts` PVCs default to `ReadWriteMany`; your cluster storage class must support RWX or you need to override those claims.
|
3. The shared `packs`, `runtime_envs`, and `artifacts` PVCs default to `ReadWriteMany`; your cluster storage class must support RWX or you need to override those claims.
|
||||||
|
{{- if .Values.agentWorkers }}
|
||||||
|
|
||||||
|
Agent-based workers enabled:
|
||||||
|
{{- range .Values.agentWorkers }}
|
||||||
|
- {{ .name }}: image={{ .image }}, replicas={{ .replicas | default 1 }}
|
||||||
|
{{- if .runtimes }} runtimes={{ join "," .runtimes }}{{ else }} runtimes=auto-detect{{ end }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
Each agent worker uses an init container to copy the statically-linked
|
||||||
|
attune-agent binary into the worker pod via an emptyDir volume. The agent
|
||||||
|
auto-detects available runtimes in the container and registers with Attune.
|
||||||
|
|
||||||
|
The default sensor deployment also uses the same injection pattern, copying
|
||||||
|
`attune-sensor-agent` into the pod before starting a stock runtime image.
|
||||||
|
|
||||||
|
To add more agent workers, append entries to `agentWorkers` in your values:
|
||||||
|
|
||||||
|
agentWorkers:
|
||||||
|
- name: my-runtime
|
||||||
|
image: my-org/my-image:latest
|
||||||
|
replicas: 1
|
||||||
|
runtimes: [] # auto-detect
|
||||||
|
{{- end }}
|
||||||
|
|||||||
137
charts/attune/templates/agent-workers.yaml
Normal file
137
charts/attune/templates/agent-workers.yaml
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
{{- range .Values.agentWorkers }}
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ include "attune.fullname" $ }}-agent-worker-{{ .name }}
|
||||||
|
labels:
|
||||||
|
{{- include "attune.labels" $ | nindent 4 }}
|
||||||
|
app.kubernetes.io/component: agent-worker-{{ .name }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .replicas | default 1 }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "attune.selectorLabels" $ | nindent 6 }}
|
||||||
|
app.kubernetes.io/component: agent-worker-{{ .name }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "attune.selectorLabels" $ | nindent 8 }}
|
||||||
|
app.kubernetes.io/component: agent-worker-{{ .name }}
|
||||||
|
spec:
|
||||||
|
{{- if $.Values.global.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml $.Values.global.imagePullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .runtimeClassName }}
|
||||||
|
runtimeClassName: {{ .runtimeClassName }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .nodeSelector }}
|
||||||
|
nodeSelector:
|
||||||
|
{{- toYaml .nodeSelector | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .tolerations }}
|
||||||
|
tolerations:
|
||||||
|
{{- toYaml .tolerations | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .stopGracePeriod }}
|
||||||
|
terminationGracePeriodSeconds: {{ .stopGracePeriod }}
|
||||||
|
{{- else }}
|
||||||
|
terminationGracePeriodSeconds: 45
|
||||||
|
{{- end }}
|
||||||
|
initContainers:
|
||||||
|
- name: agent-loader
|
||||||
|
image: {{ include "attune.image" (dict "root" $ "image" $.Values.images.agent) }}
|
||||||
|
imagePullPolicy: {{ $.Values.images.agent.pullPolicy }}
|
||||||
|
command: ["cp", "/usr/local/bin/attune-agent", "/opt/attune/agent/attune-agent"]
|
||||||
|
volumeMounts:
|
||||||
|
- name: agent-bin
|
||||||
|
mountPath: /opt/attune/agent
|
||||||
|
- name: wait-for-schema
|
||||||
|
image: postgres:16-alpine
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -tAc "SELECT to_regclass('${DB_SCHEMA}.identity')" | grep -q identity; do
|
||||||
|
echo "waiting for schema";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" $ }}
|
||||||
|
- name: wait-for-packs
|
||||||
|
image: busybox:1.36
|
||||||
|
command: ["/bin/sh", "-ec"]
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
until [ -f /opt/attune/packs/core/pack.yaml ]; do
|
||||||
|
echo "waiting for packs";
|
||||||
|
sleep 2;
|
||||||
|
done
|
||||||
|
volumeMounts:
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
containers:
|
||||||
|
- name: worker
|
||||||
|
image: {{ .image }}
|
||||||
|
{{- if .imagePullPolicy }}
|
||||||
|
imagePullPolicy: {{ .imagePullPolicy }}
|
||||||
|
{{- end }}
|
||||||
|
command: ["/opt/attune/agent/attune-agent"]
|
||||||
|
envFrom:
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "attune.secretName" $ }}
|
||||||
|
env:
|
||||||
|
- name: ATTUNE_CONFIG
|
||||||
|
value: /opt/attune/config.yaml
|
||||||
|
- name: ATTUNE__DATABASE__SCHEMA
|
||||||
|
value: {{ $.Values.database.schema | quote }}
|
||||||
|
- name: ATTUNE_WORKER_TYPE
|
||||||
|
value: container
|
||||||
|
- name: ATTUNE_WORKER_NAME
|
||||||
|
valueFrom:
|
||||||
|
fieldRef:
|
||||||
|
fieldPath: metadata.name
|
||||||
|
- name: ATTUNE_API_URL
|
||||||
|
value: http://{{ include "attune.apiServiceName" $ }}:{{ $.Values.api.service.port }}
|
||||||
|
- name: RUST_LOG
|
||||||
|
value: {{ .logLevel | default "info" }}
|
||||||
|
{{- if .runtimes }}
|
||||||
|
- name: ATTUNE_WORKER_RUNTIMES
|
||||||
|
value: {{ join "," .runtimes | quote }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .env }}
|
||||||
|
{{- toYaml .env | nindent 12 }}
|
||||||
|
{{- end }}
|
||||||
|
resources:
|
||||||
|
{{- toYaml (.resources | default dict) | nindent 12 }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: agent-bin
|
||||||
|
mountPath: /opt/attune/agent
|
||||||
|
readOnly: true
|
||||||
|
- name: config
|
||||||
|
mountPath: /opt/attune/config.yaml
|
||||||
|
subPath: config.yaml
|
||||||
|
- name: packs
|
||||||
|
mountPath: /opt/attune/packs
|
||||||
|
readOnly: true
|
||||||
|
- name: runtime-envs
|
||||||
|
mountPath: /opt/attune/runtime_envs
|
||||||
|
- name: artifacts
|
||||||
|
mountPath: /opt/attune/artifacts
|
||||||
|
volumes:
|
||||||
|
- name: agent-bin
|
||||||
|
emptyDir: {}
|
||||||
|
- name: config
|
||||||
|
configMap:
|
||||||
|
name: {{ include "attune.fullname" $ }}-config
|
||||||
|
- name: packs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" $ }}-packs
|
||||||
|
- name: runtime-envs
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" $ }}-runtime-envs
|
||||||
|
- name: artifacts
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ include "attune.fullname" $ }}-artifacts
|
||||||
|
{{- end }}
|
||||||
@@ -304,7 +304,15 @@ spec:
|
|||||||
imagePullSecrets:
|
imagePullSecrets:
|
||||||
{{- toYaml .Values.global.imagePullSecrets | nindent 8 }}
|
{{- toYaml .Values.global.imagePullSecrets | nindent 8 }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
terminationGracePeriodSeconds: 45
|
||||||
initContainers:
|
initContainers:
|
||||||
|
- name: sensor-agent-loader
|
||||||
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.agent) }}
|
||||||
|
imagePullPolicy: {{ .Values.images.agent.pullPolicy }}
|
||||||
|
command: ["cp", "/usr/local/bin/attune-sensor-agent", "/opt/attune/agent/attune-sensor-agent"]
|
||||||
|
volumeMounts:
|
||||||
|
- name: agent-bin
|
||||||
|
mountPath: /opt/attune/agent
|
||||||
- name: wait-for-schema
|
- name: wait-for-schema
|
||||||
image: postgres:16-alpine
|
image: postgres:16-alpine
|
||||||
command: ["/bin/sh", "-ec"]
|
command: ["/bin/sh", "-ec"]
|
||||||
@@ -333,6 +341,7 @@ spec:
|
|||||||
- name: sensor
|
- name: sensor
|
||||||
image: {{ include "attune.image" (dict "root" . "image" .Values.images.sensor) }}
|
image: {{ include "attune.image" (dict "root" . "image" .Values.images.sensor) }}
|
||||||
imagePullPolicy: {{ .Values.images.sensor.pullPolicy }}
|
imagePullPolicy: {{ .Values.images.sensor.pullPolicy }}
|
||||||
|
command: ["/opt/attune/agent/attune-sensor-agent"]
|
||||||
envFrom:
|
envFrom:
|
||||||
- secretRef:
|
- secretRef:
|
||||||
name: {{ include "attune.secretName" . }}
|
name: {{ include "attune.secretName" . }}
|
||||||
@@ -343,23 +352,33 @@ spec:
|
|||||||
value: {{ .Values.database.schema | quote }}
|
value: {{ .Values.database.schema | quote }}
|
||||||
- name: ATTUNE__WORKER__WORKER_TYPE
|
- name: ATTUNE__WORKER__WORKER_TYPE
|
||||||
value: container
|
value: container
|
||||||
|
- name: ATTUNE_SENSOR_RUNTIMES
|
||||||
|
value: {{ .Values.sensor.runtimes | quote }}
|
||||||
- name: ATTUNE_API_URL
|
- name: ATTUNE_API_URL
|
||||||
value: http://{{ include "attune.apiServiceName" . }}:{{ .Values.api.service.port }}
|
value: http://{{ include "attune.apiServiceName" . }}:{{ .Values.api.service.port }}
|
||||||
- name: ATTUNE_MQ_URL
|
- name: ATTUNE_MQ_URL
|
||||||
value: {{ include "attune.rabbitmqUrl" . | quote }}
|
value: {{ include "attune.rabbitmqUrl" . | quote }}
|
||||||
- name: ATTUNE_PACKS_BASE_DIR
|
- name: ATTUNE_PACKS_BASE_DIR
|
||||||
value: /opt/attune/packs
|
value: /opt/attune/packs
|
||||||
|
- name: RUST_LOG
|
||||||
|
value: {{ .Values.sensor.logLevel | quote }}
|
||||||
resources:
|
resources:
|
||||||
{{- toYaml .Values.sensor.resources | nindent 12 }}
|
{{- toYaml .Values.sensor.resources | nindent 12 }}
|
||||||
volumeMounts:
|
volumeMounts:
|
||||||
|
- name: agent-bin
|
||||||
|
mountPath: /opt/attune/agent
|
||||||
|
readOnly: true
|
||||||
- name: config
|
- name: config
|
||||||
mountPath: /opt/attune/config.yaml
|
mountPath: /opt/attune/config.yaml
|
||||||
subPath: config.yaml
|
subPath: config.yaml
|
||||||
- name: packs
|
- name: packs
|
||||||
mountPath: /opt/attune/packs
|
mountPath: /opt/attune/packs
|
||||||
|
readOnly: true
|
||||||
- name: runtime-envs
|
- name: runtime-envs
|
||||||
mountPath: /opt/attune/runtime_envs
|
mountPath: /opt/attune/runtime_envs
|
||||||
volumes:
|
volumes:
|
||||||
|
- name: agent-bin
|
||||||
|
emptyDir: {}
|
||||||
- name: config
|
- name: config
|
||||||
configMap:
|
configMap:
|
||||||
name: {{ include "attune.fullname" . }}-config
|
name: {{ include "attune.fullname" . }}-config
|
||||||
|
|||||||
@@ -108,8 +108,8 @@ images:
|
|||||||
tag: ""
|
tag: ""
|
||||||
pullPolicy: IfNotPresent
|
pullPolicy: IfNotPresent
|
||||||
sensor:
|
sensor:
|
||||||
repository: attune-sensor
|
repository: nikolaik/python-nodejs
|
||||||
tag: ""
|
tag: python3.12-nodejs22-slim
|
||||||
pullPolicy: IfNotPresent
|
pullPolicy: IfNotPresent
|
||||||
notifier:
|
notifier:
|
||||||
repository: attune-notifier
|
repository: attune-notifier
|
||||||
@@ -131,6 +131,10 @@ images:
|
|||||||
repository: attune-init-packs
|
repository: attune-init-packs
|
||||||
tag: ""
|
tag: ""
|
||||||
pullPolicy: IfNotPresent
|
pullPolicy: IfNotPresent
|
||||||
|
agent:
|
||||||
|
repository: attune-agent
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
migrations:
|
migrations:
|
||||||
@@ -162,6 +166,8 @@ worker:
|
|||||||
|
|
||||||
sensor:
|
sensor:
|
||||||
replicaCount: 1
|
replicaCount: 1
|
||||||
|
runtimes: shell,python,node,native
|
||||||
|
logLevel: debug
|
||||||
resources: {}
|
resources: {}
|
||||||
|
|
||||||
notifier:
|
notifier:
|
||||||
@@ -191,3 +197,57 @@ web:
|
|||||||
- path: /
|
- path: /
|
||||||
pathType: Prefix
|
pathType: Prefix
|
||||||
tls: []
|
tls: []
|
||||||
|
|
||||||
|
# Agent-based workers
|
||||||
|
# These deploy the universal worker agent into any container image.
|
||||||
|
# The agent auto-detects available runtimes (python, ruby, node, etc.)
|
||||||
|
# and registers with the Attune platform.
|
||||||
|
#
|
||||||
|
# Each entry creates a separate Deployment with an init container that
|
||||||
|
# copies the statically-linked agent binary into the worker container.
|
||||||
|
#
|
||||||
|
# Supported fields per worker:
|
||||||
|
# name (required) - Unique name for this worker (used in resource names)
|
||||||
|
# image (required) - Container image with your desired runtime(s)
|
||||||
|
# replicas (optional) - Number of pod replicas (default: 1)
|
||||||
|
# runtimes (optional) - List of runtimes to expose; [] = auto-detect
|
||||||
|
# resources (optional) - Kubernetes resource requests/limits
|
||||||
|
# env (optional) - Extra environment variables (list of {name, value})
|
||||||
|
# imagePullPolicy (optional) - Pull policy for the worker image
|
||||||
|
# logLevel (optional) - RUST_LOG level (default: "info")
|
||||||
|
# runtimeClassName (optional) - Kubernetes RuntimeClass (e.g., "nvidia" for GPU)
|
||||||
|
# nodeSelector (optional) - Node selector map for pod scheduling
|
||||||
|
# tolerations (optional) - Tolerations list for pod scheduling
|
||||||
|
# stopGracePeriod (optional) - Termination grace period in seconds (default: 45)
|
||||||
|
#
|
||||||
|
# Examples:
|
||||||
|
# agentWorkers:
|
||||||
|
# - name: ruby
|
||||||
|
# image: ruby:3.3
|
||||||
|
# replicas: 2
|
||||||
|
# runtimes: [] # auto-detect
|
||||||
|
# resources: {}
|
||||||
|
#
|
||||||
|
# - name: python-gpu
|
||||||
|
# image: nvidia/cuda:12.3.1-runtime-ubuntu22.04
|
||||||
|
# replicas: 1
|
||||||
|
# runtimes: [python, shell]
|
||||||
|
# runtimeClassName: nvidia
|
||||||
|
# nodeSelector:
|
||||||
|
# gpu: "true"
|
||||||
|
# tolerations:
|
||||||
|
# - key: nvidia.com/gpu
|
||||||
|
# operator: Exists
|
||||||
|
# effect: NoSchedule
|
||||||
|
# resources:
|
||||||
|
# limits:
|
||||||
|
# nvidia.com/gpu: 1
|
||||||
|
#
|
||||||
|
# - name: custom
|
||||||
|
# image: my-org/my-custom-image:latest
|
||||||
|
# replicas: 1
|
||||||
|
# runtimes: []
|
||||||
|
# env:
|
||||||
|
# - name: MY_CUSTOM_VAR
|
||||||
|
# value: my-value
|
||||||
|
agentWorkers: []
|
||||||
|
|||||||
@@ -125,3 +125,8 @@ executor:
|
|||||||
scheduled_timeout: 120 # 2 minutes (faster feedback in dev)
|
scheduled_timeout: 120 # 2 minutes (faster feedback in dev)
|
||||||
timeout_check_interval: 30 # Check every 30 seconds
|
timeout_check_interval: 30 # Check every 30 seconds
|
||||||
enable_timeout_monitor: true
|
enable_timeout_monitor: true
|
||||||
|
|
||||||
|
# Agent binary distribution (optional - for local development)
|
||||||
|
# Binary is built via: make build-agent
|
||||||
|
# agent:
|
||||||
|
# binary_dir: ./target/x86_64-unknown-linux-musl/release
|
||||||
|
|||||||
@@ -89,6 +89,7 @@ hmac = "0.12"
|
|||||||
sha1 = "0.10"
|
sha1 = "0.10"
|
||||||
sha2 = { workspace = true }
|
sha2 = { workspace = true }
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
|
subtle = "2.6"
|
||||||
|
|
||||||
# OpenAPI/Swagger
|
# OpenAPI/Swagger
|
||||||
utoipa = { workspace = true, features = ["axum_extras"] }
|
utoipa = { workspace = true, features = ["axum_extras"] }
|
||||||
|
|||||||
@@ -3,7 +3,10 @@
|
|||||||
use attune_common::{
|
use attune_common::{
|
||||||
config::LdapConfig,
|
config::LdapConfig,
|
||||||
repositories::{
|
repositories::{
|
||||||
identity::{CreateIdentityInput, IdentityRepository, UpdateIdentityInput},
|
identity::{
|
||||||
|
CreateIdentityInput, IdentityRepository, IdentityRoleAssignmentRepository,
|
||||||
|
UpdateIdentityInput,
|
||||||
|
},
|
||||||
Create, Update,
|
Create, Update,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -63,6 +66,11 @@ pub async fn authenticate(
|
|||||||
|
|
||||||
// Upsert identity in DB and issue JWT tokens
|
// Upsert identity in DB and issue JWT tokens
|
||||||
let identity = upsert_identity(state, &claims).await?;
|
let identity = upsert_identity(state, &claims).await?;
|
||||||
|
if identity.frozen {
|
||||||
|
return Err(ApiError::Forbidden(
|
||||||
|
"Identity is frozen and cannot authenticate".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
let access_token = generate_access_token(identity.id, &identity.login, &state.jwt_config)?;
|
let access_token = generate_access_token(identity.id, &identity.login, &state.jwt_config)?;
|
||||||
let refresh_token = generate_refresh_token(identity.id, &identity.login, &state.jwt_config)?;
|
let refresh_token = generate_refresh_token(identity.id, &identity.login, &state.jwt_config)?;
|
||||||
|
|
||||||
@@ -351,10 +359,13 @@ async fn upsert_identity(
|
|||||||
display_name,
|
display_name,
|
||||||
password_hash: None,
|
password_hash: None,
|
||||||
attributes: Some(attributes),
|
attributes: Some(attributes),
|
||||||
|
frozen: None,
|
||||||
};
|
};
|
||||||
IdentityRepository::update(&state.db, identity.id, updated)
|
let identity = IdentityRepository::update(&state.db, identity.id, updated)
|
||||||
.await
|
.await
|
||||||
.map_err(Into::into)
|
.map_err(ApiError::from)?;
|
||||||
|
sync_roles(&state.db, identity.id, "ldap", &claims.groups).await?;
|
||||||
|
Ok(identity)
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
// Avoid login collisions
|
// Avoid login collisions
|
||||||
@@ -363,7 +374,7 @@ async fn upsert_identity(
|
|||||||
None => desired_login,
|
None => desired_login,
|
||||||
};
|
};
|
||||||
|
|
||||||
IdentityRepository::create(
|
let identity = IdentityRepository::create(
|
||||||
&state.db,
|
&state.db,
|
||||||
CreateIdentityInput {
|
CreateIdentityInput {
|
||||||
login,
|
login,
|
||||||
@@ -372,11 +383,24 @@ async fn upsert_identity(
|
|||||||
attributes,
|
attributes,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
.await
|
||||||
|
.map_err(ApiError::from)?;
|
||||||
|
sync_roles(&state.db, identity.id, "ldap", &claims.groups).await?;
|
||||||
|
Ok(identity)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_roles(
|
||||||
|
db: &sqlx::PgPool,
|
||||||
|
identity_id: i64,
|
||||||
|
source: &str,
|
||||||
|
roles: &[String],
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
IdentityRoleAssignmentRepository::replace_managed_roles(db, identity_id, source, roles)
|
||||||
.await
|
.await
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Derive the login name from LDAP claims.
|
/// Derive the login name from LDAP claims.
|
||||||
fn derive_login(claims: &LdapUserClaims) -> String {
|
fn derive_login(claims: &LdapUserClaims) -> String {
|
||||||
|
|||||||
@@ -3,7 +3,10 @@
|
|||||||
use attune_common::{
|
use attune_common::{
|
||||||
config::OidcConfig,
|
config::OidcConfig,
|
||||||
repositories::{
|
repositories::{
|
||||||
identity::{CreateIdentityInput, IdentityRepository, UpdateIdentityInput},
|
identity::{
|
||||||
|
CreateIdentityInput, IdentityRepository, IdentityRoleAssignmentRepository,
|
||||||
|
UpdateIdentityInput,
|
||||||
|
},
|
||||||
Create, Update,
|
Create, Update,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -282,6 +285,11 @@ pub async fn handle_callback(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let identity = upsert_identity(state, &oidc_claims).await?;
|
let identity = upsert_identity(state, &oidc_claims).await?;
|
||||||
|
if identity.frozen {
|
||||||
|
return Err(ApiError::Forbidden(
|
||||||
|
"Identity is frozen and cannot authenticate".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
let access_token = generate_access_token(identity.id, &identity.login, &state.jwt_config)?;
|
let access_token = generate_access_token(identity.id, &identity.login, &state.jwt_config)?;
|
||||||
let refresh_token = generate_refresh_token(identity.id, &identity.login, &state.jwt_config)?;
|
let refresh_token = generate_refresh_token(identity.id, &identity.login, &state.jwt_config)?;
|
||||||
|
|
||||||
@@ -511,10 +519,13 @@ async fn upsert_identity(
|
|||||||
display_name,
|
display_name,
|
||||||
password_hash: None,
|
password_hash: None,
|
||||||
attributes: Some(attributes.clone()),
|
attributes: Some(attributes.clone()),
|
||||||
|
frozen: None,
|
||||||
};
|
};
|
||||||
IdentityRepository::update(&state.db, identity.id, updated)
|
let identity = IdentityRepository::update(&state.db, identity.id, updated)
|
||||||
.await
|
.await
|
||||||
.map_err(Into::into)
|
.map_err(ApiError::from)?;
|
||||||
|
sync_roles(&state.db, identity.id, "oidc", &oidc_claims.groups).await?;
|
||||||
|
Ok(identity)
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let login = match IdentityRepository::find_by_login(&state.db, &desired_login).await? {
|
let login = match IdentityRepository::find_by_login(&state.db, &desired_login).await? {
|
||||||
@@ -522,7 +533,7 @@ async fn upsert_identity(
|
|||||||
None => desired_login,
|
None => desired_login,
|
||||||
};
|
};
|
||||||
|
|
||||||
IdentityRepository::create(
|
let identity = IdentityRepository::create(
|
||||||
&state.db,
|
&state.db,
|
||||||
CreateIdentityInput {
|
CreateIdentityInput {
|
||||||
login,
|
login,
|
||||||
@@ -531,11 +542,24 @@ async fn upsert_identity(
|
|||||||
attributes,
|
attributes,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
.await
|
||||||
|
.map_err(ApiError::from)?;
|
||||||
|
sync_roles(&state.db, identity.id, "oidc", &oidc_claims.groups).await?;
|
||||||
|
Ok(identity)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn sync_roles(
|
||||||
|
db: &sqlx::PgPool,
|
||||||
|
identity_id: i64,
|
||||||
|
source: &str,
|
||||||
|
roles: &[String],
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
IdentityRoleAssignmentRepository::replace_managed_roles(db, identity_id, source, roles)
|
||||||
.await
|
.await
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn derive_login(oidc_claims: &OidcIdentityClaims) -> String {
|
fn derive_login(oidc_claims: &OidcIdentityClaims) -> String {
|
||||||
oidc_claims
|
oidc_claims
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ use crate::{
|
|||||||
use attune_common::{
|
use attune_common::{
|
||||||
rbac::{Action, AuthorizationContext, Grant, Resource},
|
rbac::{Action, AuthorizationContext, Grant, Resource},
|
||||||
repositories::{
|
repositories::{
|
||||||
identity::{IdentityRepository, PermissionSetRepository},
|
identity::{IdentityRepository, IdentityRoleAssignmentRepository, PermissionSetRepository},
|
||||||
FindById,
|
FindById,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -95,8 +95,16 @@ impl AuthorizationService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn load_effective_grants(&self, identity_id: i64) -> Result<Vec<Grant>, ApiError> {
|
async fn load_effective_grants(&self, identity_id: i64) -> Result<Vec<Grant>, ApiError> {
|
||||||
let permission_sets =
|
let mut permission_sets =
|
||||||
PermissionSetRepository::find_by_identity(&self.db, identity_id).await?;
|
PermissionSetRepository::find_by_identity(&self.db, identity_id).await?;
|
||||||
|
let roles =
|
||||||
|
IdentityRoleAssignmentRepository::find_role_names_by_identity(&self.db, identity_id)
|
||||||
|
.await?;
|
||||||
|
let role_permission_sets = PermissionSetRepository::find_by_roles(&self.db, &roles).await?;
|
||||||
|
permission_sets.extend(role_permission_sets);
|
||||||
|
|
||||||
|
let mut seen_permission_sets = std::collections::HashSet::new();
|
||||||
|
permission_sets.retain(|permission_set| seen_permission_sets.insert(permission_set.id));
|
||||||
|
|
||||||
let mut grants = Vec::new();
|
let mut grants = Vec::new();
|
||||||
for permission_set in permission_sets {
|
for permission_set in permission_sets {
|
||||||
@@ -126,10 +134,6 @@ fn resource_name(resource: Resource) -> &'static str {
|
|||||||
Resource::Inquiries => "inquiries",
|
Resource::Inquiries => "inquiries",
|
||||||
Resource::Keys => "keys",
|
Resource::Keys => "keys",
|
||||||
Resource::Artifacts => "artifacts",
|
Resource::Artifacts => "artifacts",
|
||||||
Resource::Workflows => "workflows",
|
|
||||||
Resource::Webhooks => "webhooks",
|
|
||||||
Resource::Analytics => "analytics",
|
|
||||||
Resource::History => "history",
|
|
||||||
Resource::Identities => "identities",
|
Resource::Identities => "identities",
|
||||||
Resource::Permissions => "permissions",
|
Resource::Permissions => "permissions",
|
||||||
}
|
}
|
||||||
@@ -145,5 +149,6 @@ fn action_name(action: Action) -> &'static str {
|
|||||||
Action::Cancel => "cancel",
|
Action::Cancel => "cancel",
|
||||||
Action::Respond => "respond",
|
Action::Respond => "respond",
|
||||||
Action::Manage => "manage",
|
Action::Manage => "manage",
|
||||||
|
Action::Decrypt => "decrypt",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -25,9 +25,8 @@ pub struct CreateActionRequest {
|
|||||||
pub label: String,
|
pub label: String,
|
||||||
|
|
||||||
/// Action description
|
/// Action description
|
||||||
#[validate(length(min = 1))]
|
|
||||||
#[schema(example = "Posts a message to a Slack channel")]
|
#[schema(example = "Posts a message to a Slack channel")]
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
|
|
||||||
/// Entry point for action execution (e.g., path to script, function name)
|
/// Entry point for action execution (e.g., path to script, function name)
|
||||||
#[validate(length(min = 1, max = 1024))]
|
#[validate(length(min = 1, max = 1024))]
|
||||||
@@ -63,7 +62,6 @@ pub struct UpdateActionRequest {
|
|||||||
pub label: Option<String>,
|
pub label: Option<String>,
|
||||||
|
|
||||||
/// Action description
|
/// Action description
|
||||||
#[validate(length(min = 1))]
|
|
||||||
#[schema(example = "Posts a message to a Slack channel with enhanced features")]
|
#[schema(example = "Posts a message to a Slack channel with enhanced features")]
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
|
|
||||||
@@ -121,7 +119,7 @@ pub struct ActionResponse {
|
|||||||
|
|
||||||
/// Action description
|
/// Action description
|
||||||
#[schema(example = "Posts a message to a Slack channel")]
|
#[schema(example = "Posts a message to a Slack channel")]
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
|
|
||||||
/// Entry point
|
/// Entry point
|
||||||
#[schema(example = "/actions/slack/post_message.py")]
|
#[schema(example = "/actions/slack/post_message.py")]
|
||||||
@@ -183,7 +181,7 @@ pub struct ActionSummary {
|
|||||||
|
|
||||||
/// Action description
|
/// Action description
|
||||||
#[schema(example = "Posts a message to a Slack channel")]
|
#[schema(example = "Posts a message to a Slack channel")]
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
|
|
||||||
/// Entry point
|
/// Entry point
|
||||||
#[schema(example = "/actions/slack/post_message.py")]
|
#[schema(example = "/actions/slack/post_message.py")]
|
||||||
@@ -321,7 +319,7 @@ mod tests {
|
|||||||
r#ref: "".to_string(), // Invalid: empty
|
r#ref: "".to_string(), // Invalid: empty
|
||||||
pack_ref: "test-pack".to_string(),
|
pack_ref: "test-pack".to_string(),
|
||||||
label: "Test Action".to_string(),
|
label: "Test Action".to_string(),
|
||||||
description: "Test description".to_string(),
|
description: Some("Test description".to_string()),
|
||||||
entrypoint: "/actions/test.py".to_string(),
|
entrypoint: "/actions/test.py".to_string(),
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
@@ -338,7 +336,7 @@ mod tests {
|
|||||||
r#ref: "test.action".to_string(),
|
r#ref: "test.action".to_string(),
|
||||||
pack_ref: "test-pack".to_string(),
|
pack_ref: "test-pack".to_string(),
|
||||||
label: "Test Action".to_string(),
|
label: "Test Action".to_string(),
|
||||||
description: "Test description".to_string(),
|
description: Some("Test description".to_string()),
|
||||||
entrypoint: "/actions/test.py".to_string(),
|
entrypoint: "/actions/test.py".to_string(),
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
|
|||||||
@@ -51,9 +51,10 @@ pub use inquiry::{
|
|||||||
pub use key::{CreateKeyRequest, KeyQueryParams, KeyResponse, KeySummary, UpdateKeyRequest};
|
pub use key::{CreateKeyRequest, KeyQueryParams, KeyResponse, KeySummary, UpdateKeyRequest};
|
||||||
pub use pack::{CreatePackRequest, PackResponse, PackSummary, UpdatePackRequest};
|
pub use pack::{CreatePackRequest, PackResponse, PackSummary, UpdatePackRequest};
|
||||||
pub use permission::{
|
pub use permission::{
|
||||||
CreateIdentityRequest, CreatePermissionAssignmentRequest, IdentityResponse, IdentitySummary,
|
CreateIdentityRequest, CreateIdentityRoleAssignmentRequest, CreatePermissionAssignmentRequest,
|
||||||
PermissionAssignmentResponse, PermissionSetQueryParams, PermissionSetSummary,
|
CreatePermissionSetRoleAssignmentRequest, IdentityResponse, IdentityRoleAssignmentResponse,
|
||||||
UpdateIdentityRequest,
|
IdentitySummary, PermissionAssignmentResponse, PermissionSetQueryParams,
|
||||||
|
PermissionSetRoleAssignmentResponse, PermissionSetSummary, UpdateIdentityRequest,
|
||||||
};
|
};
|
||||||
pub use rule::{CreateRuleRequest, RuleResponse, RuleSummary, UpdateRuleRequest};
|
pub use rule::{CreateRuleRequest, RuleResponse, RuleSummary, UpdateRuleRequest};
|
||||||
pub use runtime::{CreateRuntimeRequest, RuntimeResponse, RuntimeSummary, UpdateRuntimeRequest};
|
pub use runtime::{CreateRuntimeRequest, RuntimeResponse, RuntimeSummary, UpdateRuntimeRequest};
|
||||||
|
|||||||
@@ -14,10 +14,32 @@ pub struct IdentitySummary {
|
|||||||
pub id: i64,
|
pub id: i64,
|
||||||
pub login: String,
|
pub login: String,
|
||||||
pub display_name: Option<String>,
|
pub display_name: Option<String>,
|
||||||
|
pub frozen: bool,
|
||||||
pub attributes: JsonValue,
|
pub attributes: JsonValue,
|
||||||
|
pub roles: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type IdentityResponse = IdentitySummary;
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
|
pub struct IdentityRoleAssignmentResponse {
|
||||||
|
pub id: i64,
|
||||||
|
pub identity_id: i64,
|
||||||
|
pub role: String,
|
||||||
|
pub source: String,
|
||||||
|
pub managed: bool,
|
||||||
|
pub created: chrono::DateTime<chrono::Utc>,
|
||||||
|
pub updated: chrono::DateTime<chrono::Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
|
pub struct IdentityResponse {
|
||||||
|
pub id: i64,
|
||||||
|
pub login: String,
|
||||||
|
pub display_name: Option<String>,
|
||||||
|
pub frozen: bool,
|
||||||
|
pub attributes: JsonValue,
|
||||||
|
pub roles: Vec<IdentityRoleAssignmentResponse>,
|
||||||
|
pub direct_permissions: Vec<PermissionAssignmentResponse>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, ToSchema)]
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
pub struct PermissionSetSummary {
|
pub struct PermissionSetSummary {
|
||||||
@@ -27,6 +49,7 @@ pub struct PermissionSetSummary {
|
|||||||
pub label: Option<String>,
|
pub label: Option<String>,
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
pub grants: JsonValue,
|
pub grants: JsonValue,
|
||||||
|
pub roles: Vec<PermissionSetRoleAssignmentResponse>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, ToSchema)]
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
@@ -38,6 +61,15 @@ pub struct PermissionAssignmentResponse {
|
|||||||
pub created: chrono::DateTime<chrono::Utc>,
|
pub created: chrono::DateTime<chrono::Utc>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
||||||
|
pub struct PermissionSetRoleAssignmentResponse {
|
||||||
|
pub id: i64,
|
||||||
|
pub permission_set_id: i64,
|
||||||
|
pub permission_set_ref: Option<String>,
|
||||||
|
pub role: String,
|
||||||
|
pub created: chrono::DateTime<chrono::Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, ToSchema)]
|
#[derive(Debug, Clone, Deserialize, ToSchema)]
|
||||||
pub struct CreatePermissionAssignmentRequest {
|
pub struct CreatePermissionAssignmentRequest {
|
||||||
pub identity_id: Option<i64>,
|
pub identity_id: Option<i64>,
|
||||||
@@ -45,6 +77,18 @@ pub struct CreatePermissionAssignmentRequest {
|
|||||||
pub permission_set_ref: String,
|
pub permission_set_ref: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Validate, ToSchema)]
|
||||||
|
pub struct CreateIdentityRoleAssignmentRequest {
|
||||||
|
#[validate(length(min = 1, max = 255))]
|
||||||
|
pub role: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Validate, ToSchema)]
|
||||||
|
pub struct CreatePermissionSetRoleAssignmentRequest {
|
||||||
|
#[validate(length(min = 1, max = 255))]
|
||||||
|
pub role: String,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Validate, ToSchema)]
|
#[derive(Debug, Clone, Deserialize, Validate, ToSchema)]
|
||||||
pub struct CreateIdentityRequest {
|
pub struct CreateIdentityRequest {
|
||||||
#[validate(length(min = 3, max = 255))]
|
#[validate(length(min = 3, max = 255))]
|
||||||
@@ -62,4 +106,5 @@ pub struct UpdateIdentityRequest {
|
|||||||
pub display_name: Option<String>,
|
pub display_name: Option<String>,
|
||||||
pub password: Option<String>,
|
pub password: Option<String>,
|
||||||
pub attributes: Option<JsonValue>,
|
pub attributes: Option<JsonValue>,
|
||||||
|
pub frozen: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -25,9 +25,8 @@ pub struct CreateRuleRequest {
|
|||||||
pub label: String,
|
pub label: String,
|
||||||
|
|
||||||
/// Rule description
|
/// Rule description
|
||||||
#[validate(length(min = 1))]
|
|
||||||
#[schema(example = "Send Slack notification when an error occurs")]
|
#[schema(example = "Send Slack notification when an error occurs")]
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
|
|
||||||
/// Action reference to execute when rule matches
|
/// Action reference to execute when rule matches
|
||||||
#[validate(length(min = 1, max = 255))]
|
#[validate(length(min = 1, max = 255))]
|
||||||
@@ -69,7 +68,6 @@ pub struct UpdateRuleRequest {
|
|||||||
pub label: Option<String>,
|
pub label: Option<String>,
|
||||||
|
|
||||||
/// Rule description
|
/// Rule description
|
||||||
#[validate(length(min = 1))]
|
|
||||||
#[schema(example = "Enhanced error notification with filtering")]
|
#[schema(example = "Enhanced error notification with filtering")]
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
|
|
||||||
@@ -115,7 +113,7 @@ pub struct RuleResponse {
|
|||||||
|
|
||||||
/// Rule description
|
/// Rule description
|
||||||
#[schema(example = "Send Slack notification when an error occurs")]
|
#[schema(example = "Send Slack notification when an error occurs")]
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
|
|
||||||
/// Action ID (null if the referenced action has been deleted)
|
/// Action ID (null if the referenced action has been deleted)
|
||||||
#[schema(example = 1)]
|
#[schema(example = 1)]
|
||||||
@@ -183,7 +181,7 @@ pub struct RuleSummary {
|
|||||||
|
|
||||||
/// Rule description
|
/// Rule description
|
||||||
#[schema(example = "Send Slack notification when an error occurs")]
|
#[schema(example = "Send Slack notification when an error occurs")]
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
|
|
||||||
/// Action reference
|
/// Action reference
|
||||||
#[schema(example = "slack.post_message")]
|
#[schema(example = "slack.post_message")]
|
||||||
@@ -297,7 +295,7 @@ mod tests {
|
|||||||
r#ref: "".to_string(), // Invalid: empty
|
r#ref: "".to_string(), // Invalid: empty
|
||||||
pack_ref: "test-pack".to_string(),
|
pack_ref: "test-pack".to_string(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test description".to_string(),
|
description: Some("Test description".to_string()),
|
||||||
action_ref: "test.action".to_string(),
|
action_ref: "test.action".to_string(),
|
||||||
trigger_ref: "test.trigger".to_string(),
|
trigger_ref: "test.trigger".to_string(),
|
||||||
conditions: default_empty_object(),
|
conditions: default_empty_object(),
|
||||||
@@ -315,7 +313,7 @@ mod tests {
|
|||||||
r#ref: "test.rule".to_string(),
|
r#ref: "test.rule".to_string(),
|
||||||
pack_ref: "test-pack".to_string(),
|
pack_ref: "test-pack".to_string(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test description".to_string(),
|
description: Some("Test description".to_string()),
|
||||||
action_ref: "test.action".to_string(),
|
action_ref: "test.action".to_string(),
|
||||||
trigger_ref: "test.trigger".to_string(),
|
trigger_ref: "test.trigger".to_string(),
|
||||||
conditions: serde_json::json!({
|
conditions: serde_json::json!({
|
||||||
|
|||||||
@@ -203,9 +203,8 @@ pub struct CreateSensorRequest {
|
|||||||
pub label: String,
|
pub label: String,
|
||||||
|
|
||||||
/// Sensor description
|
/// Sensor description
|
||||||
#[validate(length(min = 1))]
|
|
||||||
#[schema(example = "Monitors CPU usage and generates events")]
|
#[schema(example = "Monitors CPU usage and generates events")]
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
|
|
||||||
/// Entry point for sensor execution (e.g., path to script, function name)
|
/// Entry point for sensor execution (e.g., path to script, function name)
|
||||||
#[validate(length(min = 1, max = 1024))]
|
#[validate(length(min = 1, max = 1024))]
|
||||||
@@ -247,7 +246,6 @@ pub struct UpdateSensorRequest {
|
|||||||
pub label: Option<String>,
|
pub label: Option<String>,
|
||||||
|
|
||||||
/// Sensor description
|
/// Sensor description
|
||||||
#[validate(length(min = 1))]
|
|
||||||
#[schema(example = "Enhanced CPU monitoring with alerts")]
|
#[schema(example = "Enhanced CPU monitoring with alerts")]
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
|
|
||||||
@@ -297,7 +295,7 @@ pub struct SensorResponse {
|
|||||||
|
|
||||||
/// Sensor description
|
/// Sensor description
|
||||||
#[schema(example = "Monitors CPU usage and generates events")]
|
#[schema(example = "Monitors CPU usage and generates events")]
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
|
|
||||||
/// Entry point
|
/// Entry point
|
||||||
#[schema(example = "/sensors/monitoring/cpu_monitor.py")]
|
#[schema(example = "/sensors/monitoring/cpu_monitor.py")]
|
||||||
@@ -357,7 +355,7 @@ pub struct SensorSummary {
|
|||||||
|
|
||||||
/// Sensor description
|
/// Sensor description
|
||||||
#[schema(example = "Monitors CPU usage and generates events")]
|
#[schema(example = "Monitors CPU usage and generates events")]
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
|
|
||||||
/// Trigger reference
|
/// Trigger reference
|
||||||
#[schema(example = "monitoring.cpu_threshold")]
|
#[schema(example = "monitoring.cpu_threshold")]
|
||||||
@@ -499,7 +497,7 @@ mod tests {
|
|||||||
r#ref: "test.sensor".to_string(),
|
r#ref: "test.sensor".to_string(),
|
||||||
pack_ref: "test-pack".to_string(),
|
pack_ref: "test-pack".to_string(),
|
||||||
label: "Test Sensor".to_string(),
|
label: "Test Sensor".to_string(),
|
||||||
description: "Test description".to_string(),
|
description: Some("Test description".to_string()),
|
||||||
entrypoint: "/sensors/test.py".to_string(),
|
entrypoint: "/sensors/test.py".to_string(),
|
||||||
runtime_ref: "python3".to_string(),
|
runtime_ref: "python3".to_string(),
|
||||||
trigger_ref: "test.trigger".to_string(),
|
trigger_ref: "test.trigger".to_string(),
|
||||||
|
|||||||
@@ -27,8 +27,11 @@ use crate::dto::{
|
|||||||
UpdatePackRequest, WorkflowSyncResult,
|
UpdatePackRequest, WorkflowSyncResult,
|
||||||
},
|
},
|
||||||
permission::{
|
permission::{
|
||||||
CreateIdentityRequest, CreatePermissionAssignmentRequest, IdentityResponse,
|
CreateIdentityRequest, CreateIdentityRoleAssignmentRequest,
|
||||||
IdentitySummary, PermissionAssignmentResponse, PermissionSetSummary, UpdateIdentityRequest,
|
CreatePermissionAssignmentRequest, CreatePermissionSetRoleAssignmentRequest,
|
||||||
|
IdentityResponse, IdentityRoleAssignmentResponse, IdentitySummary,
|
||||||
|
PermissionAssignmentResponse, PermissionSetRoleAssignmentResponse, PermissionSetSummary,
|
||||||
|
UpdateIdentityRequest,
|
||||||
},
|
},
|
||||||
rule::{CreateRuleRequest, RuleResponse, RuleSummary, UpdateRuleRequest},
|
rule::{CreateRuleRequest, RuleResponse, RuleSummary, UpdateRuleRequest},
|
||||||
runtime::{CreateRuntimeRequest, RuntimeResponse, RuntimeSummary, UpdateRuntimeRequest},
|
runtime::{CreateRuntimeRequest, RuntimeResponse, RuntimeSummary, UpdateRuntimeRequest},
|
||||||
@@ -185,6 +188,12 @@ use crate::dto::{
|
|||||||
crate::routes::permissions::list_identity_permissions,
|
crate::routes::permissions::list_identity_permissions,
|
||||||
crate::routes::permissions::create_permission_assignment,
|
crate::routes::permissions::create_permission_assignment,
|
||||||
crate::routes::permissions::delete_permission_assignment,
|
crate::routes::permissions::delete_permission_assignment,
|
||||||
|
crate::routes::permissions::create_identity_role_assignment,
|
||||||
|
crate::routes::permissions::delete_identity_role_assignment,
|
||||||
|
crate::routes::permissions::create_permission_set_role_assignment,
|
||||||
|
crate::routes::permissions::delete_permission_set_role_assignment,
|
||||||
|
crate::routes::permissions::freeze_identity,
|
||||||
|
crate::routes::permissions::unfreeze_identity,
|
||||||
|
|
||||||
// Workflows
|
// Workflows
|
||||||
crate::routes::workflows::list_workflows,
|
crate::routes::workflows::list_workflows,
|
||||||
@@ -199,6 +208,10 @@ use crate::dto::{
|
|||||||
crate::routes::webhooks::disable_webhook,
|
crate::routes::webhooks::disable_webhook,
|
||||||
crate::routes::webhooks::regenerate_webhook_key,
|
crate::routes::webhooks::regenerate_webhook_key,
|
||||||
crate::routes::webhooks::receive_webhook,
|
crate::routes::webhooks::receive_webhook,
|
||||||
|
|
||||||
|
// Agent
|
||||||
|
crate::routes::agent::download_agent_binary,
|
||||||
|
crate::routes::agent::agent_info,
|
||||||
),
|
),
|
||||||
components(
|
components(
|
||||||
schemas(
|
schemas(
|
||||||
@@ -273,6 +286,10 @@ use crate::dto::{
|
|||||||
PermissionSetSummary,
|
PermissionSetSummary,
|
||||||
PermissionAssignmentResponse,
|
PermissionAssignmentResponse,
|
||||||
CreatePermissionAssignmentRequest,
|
CreatePermissionAssignmentRequest,
|
||||||
|
CreateIdentityRoleAssignmentRequest,
|
||||||
|
IdentityRoleAssignmentResponse,
|
||||||
|
CreatePermissionSetRoleAssignmentRequest,
|
||||||
|
PermissionSetRoleAssignmentResponse,
|
||||||
|
|
||||||
// Runtime DTOs
|
// Runtime DTOs
|
||||||
CreateRuntimeRequest,
|
CreateRuntimeRequest,
|
||||||
@@ -341,6 +358,10 @@ use crate::dto::{
|
|||||||
WebhookReceiverRequest,
|
WebhookReceiverRequest,
|
||||||
WebhookReceiverResponse,
|
WebhookReceiverResponse,
|
||||||
ApiResponse<WebhookReceiverResponse>,
|
ApiResponse<WebhookReceiverResponse>,
|
||||||
|
|
||||||
|
// Agent DTOs
|
||||||
|
crate::routes::agent::AgentBinaryInfo,
|
||||||
|
crate::routes::agent::AgentArchInfo,
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
modifiers(&SecurityAddon),
|
modifiers(&SecurityAddon),
|
||||||
@@ -359,6 +380,7 @@ use crate::dto::{
|
|||||||
(name = "secrets", description = "Secret management endpoints"),
|
(name = "secrets", description = "Secret management endpoints"),
|
||||||
(name = "workflows", description = "Workflow management endpoints"),
|
(name = "workflows", description = "Workflow management endpoints"),
|
||||||
(name = "webhooks", description = "Webhook management and receiver endpoints"),
|
(name = "webhooks", description = "Webhook management and receiver endpoints"),
|
||||||
|
(name = "agent", description = "Agent binary distribution endpoints"),
|
||||||
)
|
)
|
||||||
)]
|
)]
|
||||||
pub struct ApiDoc;
|
pub struct ApiDoc;
|
||||||
@@ -441,14 +463,14 @@ mod tests {
|
|||||||
// We have 57 unique paths with 81 total operations (HTTP methods)
|
// We have 57 unique paths with 81 total operations (HTTP methods)
|
||||||
// This test ensures we don't accidentally remove endpoints
|
// This test ensures we don't accidentally remove endpoints
|
||||||
assert!(
|
assert!(
|
||||||
path_count >= 57,
|
path_count >= 59,
|
||||||
"Expected at least 57 unique API paths, found {}",
|
"Expected at least 59 unique API paths, found {}",
|
||||||
path_count
|
path_count
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
operation_count >= 81,
|
operation_count >= 83,
|
||||||
"Expected at least 81 API operations, found {}",
|
"Expected at least 83 API operations, found {}",
|
||||||
operation_count
|
operation_count
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -277,7 +277,7 @@ pub async fn update_action(
|
|||||||
// Create update input
|
// Create update input
|
||||||
let update_input = UpdateActionInput {
|
let update_input = UpdateActionInput {
|
||||||
label: request.label,
|
label: request.label,
|
||||||
description: request.description,
|
description: request.description.map(Patch::Set),
|
||||||
entrypoint: request.entrypoint,
|
entrypoint: request.entrypoint,
|
||||||
runtime: request.runtime,
|
runtime: request.runtime,
|
||||||
runtime_version_constraint: request.runtime_version_constraint.map(|patch| match patch {
|
runtime_version_constraint: request.runtime_version_constraint.map(|patch| match patch {
|
||||||
|
|||||||
482
crates/api/src/routes/agent.rs
Normal file
482
crates/api/src/routes/agent.rs
Normal file
@@ -0,0 +1,482 @@
|
|||||||
|
//! Agent binary download endpoints
|
||||||
|
//!
|
||||||
|
//! Provides endpoints for downloading the attune-agent binary for injection
|
||||||
|
//! into arbitrary containers. This supports deployments where shared Docker
|
||||||
|
//! volumes are impractical (Kubernetes, ECS, remote Docker hosts).
|
||||||
|
|
||||||
|
use axum::{
|
||||||
|
body::Body,
|
||||||
|
extract::{Query, State},
|
||||||
|
http::{header, HeaderMap, StatusCode},
|
||||||
|
response::IntoResponse,
|
||||||
|
routing::get,
|
||||||
|
Json, Router,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use subtle::ConstantTimeEq;
|
||||||
|
use tokio::fs;
|
||||||
|
use tokio_util::io::ReaderStream;
|
||||||
|
use utoipa::{IntoParams, ToSchema};
|
||||||
|
|
||||||
|
use crate::state::AppState;
|
||||||
|
|
||||||
|
/// Query parameters for the binary download endpoint
|
||||||
|
#[derive(Debug, Deserialize, IntoParams)]
|
||||||
|
pub struct BinaryDownloadParams {
|
||||||
|
/// Target architecture (x86_64, aarch64). Defaults to x86_64.
|
||||||
|
#[param(example = "x86_64")]
|
||||||
|
pub arch: Option<String>,
|
||||||
|
/// Optional bootstrap token for authentication
|
||||||
|
pub token: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Agent binary metadata
|
||||||
|
#[derive(Debug, Serialize, ToSchema)]
|
||||||
|
pub struct AgentBinaryInfo {
|
||||||
|
/// Available architectures
|
||||||
|
pub architectures: Vec<AgentArchInfo>,
|
||||||
|
/// Agent version (from build)
|
||||||
|
pub version: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Per-architecture binary info
|
||||||
|
#[derive(Debug, Serialize, ToSchema)]
|
||||||
|
pub struct AgentArchInfo {
|
||||||
|
/// Architecture name
|
||||||
|
pub arch: String,
|
||||||
|
/// Binary size in bytes
|
||||||
|
pub size_bytes: u64,
|
||||||
|
/// Whether this binary is available
|
||||||
|
pub available: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Validate that the architecture name is safe (no path traversal) and normalize it.
|
||||||
|
fn validate_arch(arch: &str) -> Result<&str, (StatusCode, Json<serde_json::Value>)> {
|
||||||
|
match arch {
|
||||||
|
"x86_64" | "aarch64" => Ok(arch),
|
||||||
|
// Accept arm64 as an alias for aarch64
|
||||||
|
"arm64" => Ok("aarch64"),
|
||||||
|
_ => Err((
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Invalid architecture",
|
||||||
|
"message": format!("Unsupported architecture '{}'. Supported: x86_64, aarch64", arch),
|
||||||
|
})),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Validate bootstrap token if configured.
|
||||||
|
///
|
||||||
|
/// If the agent config has a `bootstrap_token` set, the request must provide it
|
||||||
|
/// via the `X-Agent-Token` header or the `token` query parameter. If no token
|
||||||
|
/// is configured, access is unrestricted.
|
||||||
|
fn validate_token(
|
||||||
|
config: &attune_common::config::Config,
|
||||||
|
headers: &HeaderMap,
|
||||||
|
query_token: &Option<String>,
|
||||||
|
) -> Result<(), (StatusCode, Json<serde_json::Value>)> {
|
||||||
|
let expected_token = config
|
||||||
|
.agent
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|ac| ac.bootstrap_token.as_ref());
|
||||||
|
|
||||||
|
let expected_token = match expected_token {
|
||||||
|
Some(t) => t,
|
||||||
|
None => {
|
||||||
|
use std::sync::Once;
|
||||||
|
static WARN_ONCE: Once = Once::new();
|
||||||
|
WARN_ONCE.call_once(|| {
|
||||||
|
tracing::warn!(
|
||||||
|
"Agent binary download endpoint has no bootstrap_token configured. \
|
||||||
|
Anyone with network access to the API can download the agent binary. \
|
||||||
|
Set agent.bootstrap_token in config to restrict access."
|
||||||
|
);
|
||||||
|
});
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check X-Agent-Token header first, then query param
|
||||||
|
let provided_token = headers
|
||||||
|
.get("x-agent-token")
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.or_else(|| query_token.clone());
|
||||||
|
|
||||||
|
match provided_token {
|
||||||
|
Some(ref t) if bool::from(t.as_bytes().ct_eq(expected_token.as_bytes())) => Ok(()),
|
||||||
|
Some(_) => Err((
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Invalid token",
|
||||||
|
"message": "The provided bootstrap token is invalid",
|
||||||
|
})),
|
||||||
|
)),
|
||||||
|
None => Err((
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Token required",
|
||||||
|
"message": "A bootstrap token is required. Provide via X-Agent-Token header or token query parameter.",
|
||||||
|
})),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Download the agent binary
|
||||||
|
///
|
||||||
|
/// Returns the statically-linked attune-agent binary for the requested architecture.
|
||||||
|
/// The binary can be injected into any container to turn it into an Attune worker.
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/api/v1/agent/binary",
|
||||||
|
params(BinaryDownloadParams),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Agent binary", content_type = "application/octet-stream"),
|
||||||
|
(status = 400, description = "Invalid architecture"),
|
||||||
|
(status = 401, description = "Invalid or missing bootstrap token"),
|
||||||
|
(status = 404, description = "Agent binary not found"),
|
||||||
|
(status = 503, description = "Agent binary distribution not configured"),
|
||||||
|
),
|
||||||
|
tag = "agent"
|
||||||
|
)]
|
||||||
|
pub async fn download_agent_binary(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
headers: HeaderMap,
|
||||||
|
Query(params): Query<BinaryDownloadParams>,
|
||||||
|
) -> Result<impl IntoResponse, (StatusCode, Json<serde_json::Value>)> {
|
||||||
|
// Validate bootstrap token if configured
|
||||||
|
validate_token(&state.config, &headers, ¶ms.token)?;
|
||||||
|
|
||||||
|
let agent_config = state.config.agent.as_ref().ok_or_else(|| {
|
||||||
|
(
|
||||||
|
StatusCode::SERVICE_UNAVAILABLE,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Not configured",
|
||||||
|
"message": "Agent binary distribution is not configured. Set agent.binary_dir in config.",
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let arch = params.arch.as_deref().unwrap_or("x86_64");
|
||||||
|
let arch = validate_arch(arch)?;
|
||||||
|
|
||||||
|
let binary_dir = std::path::Path::new(&agent_config.binary_dir);
|
||||||
|
|
||||||
|
// Try arch-specific binary first, then fall back to generic name.
|
||||||
|
// IMPORTANT: The generic `attune-agent` binary is only safe to serve for
|
||||||
|
// x86_64 requests, because the current build pipeline produces an
|
||||||
|
// x86_64-unknown-linux-musl binary. Serving it for aarch64/arm64 would
|
||||||
|
// give the caller an incompatible executable (exec format error).
|
||||||
|
let arch_specific = binary_dir.join(format!("attune-agent-{}", arch));
|
||||||
|
let generic = binary_dir.join("attune-agent");
|
||||||
|
|
||||||
|
let binary_path = if arch_specific.exists() {
|
||||||
|
arch_specific
|
||||||
|
} else if arch == "x86_64" && generic.exists() {
|
||||||
|
tracing::debug!(
|
||||||
|
"Arch-specific binary not found at {:?}, falling back to generic {:?} (safe for x86_64)",
|
||||||
|
arch_specific,
|
||||||
|
generic
|
||||||
|
);
|
||||||
|
generic
|
||||||
|
} else {
|
||||||
|
tracing::warn!(
|
||||||
|
"Agent binary not found. Checked: {:?} and {:?}",
|
||||||
|
arch_specific,
|
||||||
|
generic
|
||||||
|
);
|
||||||
|
return Err((
|
||||||
|
StatusCode::NOT_FOUND,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Not found",
|
||||||
|
"message": format!(
|
||||||
|
"Agent binary not found for architecture '{}'. Ensure the agent binary is built and placed in '{}'.",
|
||||||
|
arch,
|
||||||
|
agent_config.binary_dir
|
||||||
|
),
|
||||||
|
})),
|
||||||
|
));
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get file metadata for Content-Length
|
||||||
|
let metadata = fs::metadata(&binary_path).await.map_err(|e| {
|
||||||
|
tracing::error!(
|
||||||
|
"Failed to read agent binary metadata at {:?}: {}",
|
||||||
|
binary_path,
|
||||||
|
e
|
||||||
|
);
|
||||||
|
(
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Internal error",
|
||||||
|
"message": "Failed to read agent binary",
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Open file for streaming
|
||||||
|
let file = fs::File::open(&binary_path).await.map_err(|e| {
|
||||||
|
tracing::error!("Failed to open agent binary at {:?}: {}", binary_path, e);
|
||||||
|
(
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Internal error",
|
||||||
|
"message": "Failed to open agent binary",
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let stream = ReaderStream::new(file);
|
||||||
|
let body = Body::from_stream(stream);
|
||||||
|
|
||||||
|
let headers_response = [
|
||||||
|
(header::CONTENT_TYPE, "application/octet-stream".to_string()),
|
||||||
|
(
|
||||||
|
header::CONTENT_DISPOSITION,
|
||||||
|
"attachment; filename=\"attune-agent\"".to_string(),
|
||||||
|
),
|
||||||
|
(header::CONTENT_LENGTH, metadata.len().to_string()),
|
||||||
|
(header::CACHE_CONTROL, "public, max-age=3600".to_string()),
|
||||||
|
];
|
||||||
|
|
||||||
|
tracing::info!(
|
||||||
|
arch = arch,
|
||||||
|
size_bytes = metadata.len(),
|
||||||
|
path = ?binary_path,
|
||||||
|
"Serving agent binary download"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok((headers_response, body))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get agent binary metadata
|
||||||
|
///
|
||||||
|
/// Returns information about available agent binaries, including
|
||||||
|
/// supported architectures and binary sizes.
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/api/v1/agent/info",
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Agent binary info", body = AgentBinaryInfo),
|
||||||
|
(status = 503, description = "Agent binary distribution not configured"),
|
||||||
|
),
|
||||||
|
tag = "agent"
|
||||||
|
)]
|
||||||
|
pub async fn agent_info(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
) -> Result<impl IntoResponse, (StatusCode, Json<serde_json::Value>)> {
|
||||||
|
let agent_config = state.config.agent.as_ref().ok_or_else(|| {
|
||||||
|
(
|
||||||
|
StatusCode::SERVICE_UNAVAILABLE,
|
||||||
|
Json(serde_json::json!({
|
||||||
|
"error": "Not configured",
|
||||||
|
"message": "Agent binary distribution is not configured.",
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let binary_dir = std::path::Path::new(&agent_config.binary_dir);
|
||||||
|
let architectures = ["x86_64", "aarch64"];
|
||||||
|
|
||||||
|
let mut arch_infos = Vec::new();
|
||||||
|
for arch in &architectures {
|
||||||
|
let arch_specific = binary_dir.join(format!("attune-agent-{}", arch));
|
||||||
|
let generic = binary_dir.join("attune-agent");
|
||||||
|
|
||||||
|
// Only fall back to the generic binary for x86_64, since the build
|
||||||
|
// pipeline currently produces x86_64-only generic binaries.
|
||||||
|
let (available, size_bytes) = if arch_specific.exists() {
|
||||||
|
match fs::metadata(&arch_specific).await {
|
||||||
|
Ok(m) => (true, m.len()),
|
||||||
|
Err(_) => (false, 0),
|
||||||
|
}
|
||||||
|
} else if *arch == "x86_64" && generic.exists() {
|
||||||
|
match fs::metadata(&generic).await {
|
||||||
|
Ok(m) => (true, m.len()),
|
||||||
|
Err(_) => (false, 0),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
(false, 0)
|
||||||
|
};
|
||||||
|
|
||||||
|
arch_infos.push(AgentArchInfo {
|
||||||
|
arch: arch.to_string(),
|
||||||
|
size_bytes,
|
||||||
|
available,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(AgentBinaryInfo {
|
||||||
|
architectures: arch_infos,
|
||||||
|
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create agent routes
|
||||||
|
pub fn routes() -> Router<Arc<AppState>> {
|
||||||
|
Router::new()
|
||||||
|
.route("/agent/binary", get(download_agent_binary))
|
||||||
|
.route("/agent/info", get(agent_info))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use attune_common::config::AgentConfig;
|
||||||
|
use axum::http::{HeaderMap, HeaderValue};
|
||||||
|
|
||||||
|
// ── validate_arch tests ─────────────────────────────────────────
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_arch_valid_x86_64() {
|
||||||
|
let result = validate_arch("x86_64");
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap(), "x86_64");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_arch_valid_aarch64() {
|
||||||
|
let result = validate_arch("aarch64");
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap(), "aarch64");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_arch_arm64_alias() {
|
||||||
|
// "arm64" is an alias for "aarch64"
|
||||||
|
let result = validate_arch("arm64");
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap(), "aarch64");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_arch_invalid() {
|
||||||
|
let result = validate_arch("mips");
|
||||||
|
assert!(result.is_err());
|
||||||
|
let (status, body) = result.unwrap_err();
|
||||||
|
assert_eq!(status, StatusCode::BAD_REQUEST);
|
||||||
|
assert_eq!(body.0["error"], "Invalid architecture");
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── validate_token tests ────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Helper: build a minimal Config with the given agent config.
|
||||||
|
/// Only the `agent` field is relevant for `validate_token`.
|
||||||
|
fn test_config(agent: Option<AgentConfig>) -> attune_common::config::Config {
|
||||||
|
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||||
|
let config_path = format!("{}/../../config.test.yaml", manifest_dir);
|
||||||
|
let mut config = attune_common::config::Config::load_from_file(&config_path)
|
||||||
|
.expect("Failed to load test config");
|
||||||
|
config.agent = agent;
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_no_config() {
|
||||||
|
// When no agent config is set at all, no token is required.
|
||||||
|
let config = test_config(None);
|
||||||
|
let headers = HeaderMap::new();
|
||||||
|
let query_token = None;
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_no_bootstrap_token_configured() {
|
||||||
|
// Agent config exists but bootstrap_token is None → no token required.
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: None,
|
||||||
|
}));
|
||||||
|
let headers = HeaderMap::new();
|
||||||
|
let query_token = None;
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_valid_from_header() {
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: Some("s3cret-bootstrap".to_string()),
|
||||||
|
}));
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
"x-agent-token",
|
||||||
|
HeaderValue::from_static("s3cret-bootstrap"),
|
||||||
|
);
|
||||||
|
let query_token = None;
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_valid_from_query() {
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: Some("s3cret-bootstrap".to_string()),
|
||||||
|
}));
|
||||||
|
let headers = HeaderMap::new();
|
||||||
|
let query_token = Some("s3cret-bootstrap".to_string());
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_invalid() {
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: Some("correct-token".to_string()),
|
||||||
|
}));
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert("x-agent-token", HeaderValue::from_static("wrong-token"));
|
||||||
|
let query_token = None;
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_err());
|
||||||
|
let (status, body) = result.unwrap_err();
|
||||||
|
assert_eq!(status, StatusCode::UNAUTHORIZED);
|
||||||
|
assert_eq!(body.0["error"], "Invalid token");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_missing_when_required() {
|
||||||
|
// bootstrap_token is configured but caller provides nothing.
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: Some("required-token".to_string()),
|
||||||
|
}));
|
||||||
|
let headers = HeaderMap::new();
|
||||||
|
let query_token = None;
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_err());
|
||||||
|
let (status, body) = result.unwrap_err();
|
||||||
|
assert_eq!(status, StatusCode::UNAUTHORIZED);
|
||||||
|
assert_eq!(body.0["error"], "Token required");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_validate_token_header_takes_precedence_over_query() {
|
||||||
|
// When both header and query provide a token, the header value is
|
||||||
|
// checked first (it appears first in the or_else chain). Provide a
|
||||||
|
// valid token in the header and an invalid one in the query — should
|
||||||
|
// succeed because the header matches.
|
||||||
|
let config = test_config(Some(AgentConfig {
|
||||||
|
binary_dir: "/tmp/test".to_string(),
|
||||||
|
bootstrap_token: Some("the-real-token".to_string()),
|
||||||
|
}));
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert("x-agent-token", HeaderValue::from_static("the-real-token"));
|
||||||
|
let query_token = Some("wrong-token".to_string());
|
||||||
|
|
||||||
|
let result = validate_token(&config, &headers, &query_token);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -40,7 +40,8 @@ use attune_common::repositories::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::middleware::RequireAuth,
|
auth::{jwt::TokenType, middleware::AuthenticatedUser, middleware::RequireAuth},
|
||||||
|
authz::{AuthorizationCheck, AuthorizationService},
|
||||||
dto::{
|
dto::{
|
||||||
artifact::{
|
artifact::{
|
||||||
AllocateFileVersionByRefRequest, AppendProgressRequest, ArtifactExecutionPatch,
|
AllocateFileVersionByRefRequest, AppendProgressRequest, ArtifactExecutionPatch,
|
||||||
@@ -55,6 +56,7 @@ use crate::{
|
|||||||
middleware::{ApiError, ApiResult},
|
middleware::{ApiError, ApiResult},
|
||||||
state::AppState,
|
state::AppState,
|
||||||
};
|
};
|
||||||
|
use attune_common::rbac::{Action, AuthorizationContext, Resource};
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// Artifact CRUD
|
// Artifact CRUD
|
||||||
@@ -72,7 +74,7 @@ use crate::{
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn list_artifacts(
|
pub async fn list_artifacts(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Query(query): Query<ArtifactQueryParams>,
|
Query(query): Query<ArtifactQueryParams>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -88,8 +90,16 @@ pub async fn list_artifacts(
|
|||||||
};
|
};
|
||||||
|
|
||||||
let result = ArtifactRepository::search(&state.db, &filters).await?;
|
let result = ArtifactRepository::search(&state.db, &filters).await?;
|
||||||
|
let mut rows = result.rows;
|
||||||
|
|
||||||
let items: Vec<ArtifactSummary> = result.rows.into_iter().map(ArtifactSummary::from).collect();
|
if let Some((identity_id, grants)) = ensure_can_read_any_artifact(&state, &user).await? {
|
||||||
|
rows.retain(|artifact| {
|
||||||
|
let ctx = artifact_authorization_context(identity_id, artifact);
|
||||||
|
AuthorizationService::is_allowed(&grants, Resource::Artifacts, Action::Read, &ctx)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let items: Vec<ArtifactSummary> = rows.into_iter().map(ArtifactSummary::from).collect();
|
||||||
|
|
||||||
let pagination = PaginationParams {
|
let pagination = PaginationParams {
|
||||||
page: query.page,
|
page: query.page,
|
||||||
@@ -113,7 +123,7 @@ pub async fn list_artifacts(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn get_artifact(
|
pub async fn get_artifact(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(id): Path<i64>,
|
Path(id): Path<i64>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -121,6 +131,10 @@ pub async fn get_artifact(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Read, &artifact)
|
||||||
|
.await
|
||||||
|
.map_err(|_| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
StatusCode::OK,
|
StatusCode::OK,
|
||||||
Json(ApiResponse::new(ArtifactResponse::from(artifact))),
|
Json(ApiResponse::new(ArtifactResponse::from(artifact))),
|
||||||
@@ -140,7 +154,7 @@ pub async fn get_artifact(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn get_artifact_by_ref(
|
pub async fn get_artifact_by_ref(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(artifact_ref): Path<String>,
|
Path(artifact_ref): Path<String>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -148,6 +162,10 @@ pub async fn get_artifact_by_ref(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact '{}' not found", artifact_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact '{}' not found", artifact_ref)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Read, &artifact)
|
||||||
|
.await
|
||||||
|
.map_err(|_| ApiError::NotFound(format!("Artifact '{}' not found", artifact_ref)))?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
StatusCode::OK,
|
StatusCode::OK,
|
||||||
Json(ApiResponse::new(ArtifactResponse::from(artifact))),
|
Json(ApiResponse::new(ArtifactResponse::from(artifact))),
|
||||||
@@ -168,7 +186,7 @@ pub async fn get_artifact_by_ref(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn create_artifact(
|
pub async fn create_artifact(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Json(request): Json<CreateArtifactRequest>,
|
Json(request): Json<CreateArtifactRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -200,6 +218,16 @@ pub async fn create_artifact(
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
authorize_artifact_create(
|
||||||
|
&state,
|
||||||
|
&user,
|
||||||
|
&request.r#ref,
|
||||||
|
request.scope,
|
||||||
|
&request.owner,
|
||||||
|
visibility,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let input = CreateArtifactInput {
|
let input = CreateArtifactInput {
|
||||||
r#ref: request.r#ref,
|
r#ref: request.r#ref,
|
||||||
scope: request.scope,
|
scope: request.scope,
|
||||||
@@ -240,16 +268,18 @@ pub async fn create_artifact(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn update_artifact(
|
pub async fn update_artifact(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(id): Path<i64>,
|
Path(id): Path<i64>,
|
||||||
Json(request): Json<UpdateArtifactRequest>,
|
Json(request): Json<UpdateArtifactRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Verify artifact exists
|
// Verify artifact exists
|
||||||
ArtifactRepository::find_by_id(&state.db, id)
|
let artifact = ArtifactRepository::find_by_id(&state.db, id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Update, &artifact).await?;
|
||||||
|
|
||||||
let input = UpdateArtifactInput {
|
let input = UpdateArtifactInput {
|
||||||
r#ref: None, // Ref is immutable after creation
|
r#ref: None, // Ref is immutable after creation
|
||||||
scope: request.scope,
|
scope: request.scope,
|
||||||
@@ -305,7 +335,7 @@ pub async fn update_artifact(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn delete_artifact(
|
pub async fn delete_artifact(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(id): Path<i64>,
|
Path(id): Path<i64>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -313,6 +343,8 @@ pub async fn delete_artifact(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Delete, &artifact).await?;
|
||||||
|
|
||||||
// Before deleting DB rows, clean up any file-backed versions on disk
|
// Before deleting DB rows, clean up any file-backed versions on disk
|
||||||
let file_versions =
|
let file_versions =
|
||||||
ArtifactVersionRepository::find_file_versions_by_artifact(&state.db, id).await?;
|
ArtifactVersionRepository::find_file_versions_by_artifact(&state.db, id).await?;
|
||||||
@@ -355,11 +387,17 @@ pub async fn delete_artifact(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn list_artifacts_by_execution(
|
pub async fn list_artifacts_by_execution(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(execution_id): Path<i64>,
|
Path(execution_id): Path<i64>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
let artifacts = ArtifactRepository::find_by_execution(&state.db, execution_id).await?;
|
let mut artifacts = ArtifactRepository::find_by_execution(&state.db, execution_id).await?;
|
||||||
|
if let Some((identity_id, grants)) = ensure_can_read_any_artifact(&state, &user).await? {
|
||||||
|
artifacts.retain(|artifact| {
|
||||||
|
let ctx = artifact_authorization_context(identity_id, artifact);
|
||||||
|
AuthorizationService::is_allowed(&grants, Resource::Artifacts, Action::Read, &ctx)
|
||||||
|
});
|
||||||
|
}
|
||||||
let items: Vec<ArtifactSummary> = artifacts.into_iter().map(ArtifactSummary::from).collect();
|
let items: Vec<ArtifactSummary> = artifacts.into_iter().map(ArtifactSummary::from).collect();
|
||||||
|
|
||||||
Ok((StatusCode::OK, Json(ApiResponse::new(items))))
|
Ok((StatusCode::OK, Json(ApiResponse::new(items))))
|
||||||
@@ -387,7 +425,7 @@ pub async fn list_artifacts_by_execution(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn append_progress(
|
pub async fn append_progress(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(id): Path<i64>,
|
Path(id): Path<i64>,
|
||||||
Json(request): Json<AppendProgressRequest>,
|
Json(request): Json<AppendProgressRequest>,
|
||||||
@@ -396,6 +434,8 @@ pub async fn append_progress(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Update, &artifact).await?;
|
||||||
|
|
||||||
if artifact.r#type != ArtifactType::Progress {
|
if artifact.r#type != ArtifactType::Progress {
|
||||||
return Err(ApiError::BadRequest(format!(
|
return Err(ApiError::BadRequest(format!(
|
||||||
"Artifact '{}' is type {:?}, not progress. Use version endpoints for file artifacts.",
|
"Artifact '{}' is type {:?}, not progress. Use version endpoints for file artifacts.",
|
||||||
@@ -430,16 +470,18 @@ pub async fn append_progress(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn set_artifact_data(
|
pub async fn set_artifact_data(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(id): Path<i64>,
|
Path(id): Path<i64>,
|
||||||
Json(request): Json<SetDataRequest>,
|
Json(request): Json<SetDataRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Verify exists
|
// Verify exists
|
||||||
ArtifactRepository::find_by_id(&state.db, id)
|
let artifact = ArtifactRepository::find_by_id(&state.db, id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Update, &artifact).await?;
|
||||||
|
|
||||||
let updated = ArtifactRepository::set_data(&state.db, id, &request.data).await?;
|
let updated = ArtifactRepository::set_data(&state.db, id, &request.data).await?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
@@ -468,15 +510,19 @@ pub async fn set_artifact_data(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn list_versions(
|
pub async fn list_versions(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(id): Path<i64>,
|
Path(id): Path<i64>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Verify artifact exists
|
// Verify artifact exists
|
||||||
ArtifactRepository::find_by_id(&state.db, id)
|
let artifact = ArtifactRepository::find_by_id(&state.db, id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Read, &artifact)
|
||||||
|
.await
|
||||||
|
.map_err(|_| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
let versions = ArtifactVersionRepository::list_by_artifact(&state.db, id).await?;
|
let versions = ArtifactVersionRepository::list_by_artifact(&state.db, id).await?;
|
||||||
let items: Vec<ArtifactVersionSummary> = versions
|
let items: Vec<ArtifactVersionSummary> = versions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@@ -502,15 +548,19 @@ pub async fn list_versions(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn get_version(
|
pub async fn get_version(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path((id, version)): Path<(i64, i32)>,
|
Path((id, version)): Path<(i64, i32)>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// Verify artifact exists
|
// Verify artifact exists
|
||||||
ArtifactRepository::find_by_id(&state.db, id)
|
let artifact = ArtifactRepository::find_by_id(&state.db, id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Read, &artifact)
|
||||||
|
.await
|
||||||
|
.map_err(|_| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
let ver = ArtifactVersionRepository::find_by_version(&state.db, id, version)
|
let ver = ArtifactVersionRepository::find_by_version(&state.db, id, version)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| {
|
||||||
@@ -536,14 +586,18 @@ pub async fn get_version(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn get_latest_version(
|
pub async fn get_latest_version(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(id): Path<i64>,
|
Path(id): Path<i64>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
ArtifactRepository::find_by_id(&state.db, id)
|
let artifact = ArtifactRepository::find_by_id(&state.db, id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Read, &artifact)
|
||||||
|
.await
|
||||||
|
.map_err(|_| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
let ver = ArtifactVersionRepository::find_latest(&state.db, id)
|
let ver = ArtifactVersionRepository::find_latest(&state.db, id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("No versions found for artifact {}", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("No versions found for artifact {}", id)))?;
|
||||||
@@ -568,15 +622,17 @@ pub async fn get_latest_version(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn create_version_json(
|
pub async fn create_version_json(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(id): Path<i64>,
|
Path(id): Path<i64>,
|
||||||
Json(request): Json<CreateVersionJsonRequest>,
|
Json(request): Json<CreateVersionJsonRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
ArtifactRepository::find_by_id(&state.db, id)
|
let artifact = ArtifactRepository::find_by_id(&state.db, id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Update, &artifact).await?;
|
||||||
|
|
||||||
let input = CreateArtifactVersionInput {
|
let input = CreateArtifactVersionInput {
|
||||||
artifact: id,
|
artifact: id,
|
||||||
content_type: Some(
|
content_type: Some(
|
||||||
@@ -624,7 +680,7 @@ pub async fn create_version_json(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn create_version_file(
|
pub async fn create_version_file(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(id): Path<i64>,
|
Path(id): Path<i64>,
|
||||||
Json(request): Json<CreateFileVersionRequest>,
|
Json(request): Json<CreateFileVersionRequest>,
|
||||||
@@ -633,6 +689,8 @@ pub async fn create_version_file(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Update, &artifact).await?;
|
||||||
|
|
||||||
// Validate this is a file-type artifact
|
// Validate this is a file-type artifact
|
||||||
if !is_file_backed_type(artifact.r#type) {
|
if !is_file_backed_type(artifact.r#type) {
|
||||||
return Err(ApiError::BadRequest(format!(
|
return Err(ApiError::BadRequest(format!(
|
||||||
@@ -726,15 +784,17 @@ pub async fn create_version_file(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn upload_version(
|
pub async fn upload_version(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(id): Path<i64>,
|
Path(id): Path<i64>,
|
||||||
mut multipart: Multipart,
|
mut multipart: Multipart,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
ArtifactRepository::find_by_id(&state.db, id)
|
let artifact = ArtifactRepository::find_by_id(&state.db, id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Update, &artifact).await?;
|
||||||
|
|
||||||
let mut file_data: Option<Vec<u8>> = None;
|
let mut file_data: Option<Vec<u8>> = None;
|
||||||
let mut content_type: Option<String> = None;
|
let mut content_type: Option<String> = None;
|
||||||
let mut meta: Option<serde_json::Value> = None;
|
let mut meta: Option<serde_json::Value> = None;
|
||||||
@@ -854,7 +914,7 @@ pub async fn upload_version(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn download_version(
|
pub async fn download_version(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path((id, version)): Path<(i64, i32)>,
|
Path((id, version)): Path<(i64, i32)>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -862,6 +922,10 @@ pub async fn download_version(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Read, &artifact)
|
||||||
|
.await
|
||||||
|
.map_err(|_| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
// First try without content (cheaper query) to check for file_path
|
// First try without content (cheaper query) to check for file_path
|
||||||
let ver = ArtifactVersionRepository::find_by_version(&state.db, id, version)
|
let ver = ArtifactVersionRepository::find_by_version(&state.db, id, version)
|
||||||
.await?
|
.await?
|
||||||
@@ -904,7 +968,7 @@ pub async fn download_version(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn download_latest(
|
pub async fn download_latest(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(id): Path<i64>,
|
Path(id): Path<i64>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -912,6 +976,10 @@ pub async fn download_latest(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Read, &artifact)
|
||||||
|
.await
|
||||||
|
.map_err(|_| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
// First try without content (cheaper query) to check for file_path
|
// First try without content (cheaper query) to check for file_path
|
||||||
let ver = ArtifactVersionRepository::find_latest(&state.db, id)
|
let ver = ArtifactVersionRepository::find_latest(&state.db, id)
|
||||||
.await?
|
.await?
|
||||||
@@ -955,7 +1023,7 @@ pub async fn download_latest(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn delete_version(
|
pub async fn delete_version(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path((id, version)): Path<(i64, i32)>,
|
Path((id, version)): Path<(i64, i32)>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
@@ -964,6 +1032,8 @@ pub async fn delete_version(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Delete, &artifact).await?;
|
||||||
|
|
||||||
// Find the version by artifact + version number
|
// Find the version by artifact + version number
|
||||||
let ver = ArtifactVersionRepository::find_by_version(&state.db, id, version)
|
let ver = ArtifactVersionRepository::find_by_version(&state.db, id, version)
|
||||||
.await?
|
.await?
|
||||||
@@ -1042,7 +1112,7 @@ pub async fn delete_version(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn upload_version_by_ref(
|
pub async fn upload_version_by_ref(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(artifact_ref): Path<String>,
|
Path(artifact_ref): Path<String>,
|
||||||
mut multipart: Multipart,
|
mut multipart: Multipart,
|
||||||
@@ -1157,6 +1227,8 @@ pub async fn upload_version_by_ref(
|
|||||||
// Upsert: find existing artifact or create a new one
|
// Upsert: find existing artifact or create a new one
|
||||||
let artifact = match ArtifactRepository::find_by_ref(&state.db, &artifact_ref).await? {
|
let artifact = match ArtifactRepository::find_by_ref(&state.db, &artifact_ref).await? {
|
||||||
Some(existing) => {
|
Some(existing) => {
|
||||||
|
authorize_artifact_action(&state, &user, Action::Update, &existing).await?;
|
||||||
|
|
||||||
// Update execution link if a new execution ID was provided
|
// Update execution link if a new execution ID was provided
|
||||||
if execution_id.is_some() && execution_id != existing.execution {
|
if execution_id.is_some() && execution_id != existing.execution {
|
||||||
let update_input = UpdateArtifactInput {
|
let update_input = UpdateArtifactInput {
|
||||||
@@ -1211,6 +1283,16 @@ pub async fn upload_version_by_ref(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
authorize_artifact_create(
|
||||||
|
&state,
|
||||||
|
&user,
|
||||||
|
&artifact_ref,
|
||||||
|
a_scope,
|
||||||
|
owner.as_deref().unwrap_or_default(),
|
||||||
|
a_visibility,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
// Parse retention
|
// Parse retention
|
||||||
let a_retention_policy: RetentionPolicyType = match &retention_policy {
|
let a_retention_policy: RetentionPolicyType = match &retention_policy {
|
||||||
Some(rp) if !rp.is_empty() => {
|
Some(rp) if !rp.is_empty() => {
|
||||||
@@ -1297,7 +1379,7 @@ pub async fn upload_version_by_ref(
|
|||||||
security(("bearer_auth" = []))
|
security(("bearer_auth" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn allocate_file_version_by_ref(
|
pub async fn allocate_file_version_by_ref(
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(artifact_ref): Path<String>,
|
Path(artifact_ref): Path<String>,
|
||||||
Json(request): Json<AllocateFileVersionByRefRequest>,
|
Json(request): Json<AllocateFileVersionByRefRequest>,
|
||||||
@@ -1305,6 +1387,8 @@ pub async fn allocate_file_version_by_ref(
|
|||||||
// Upsert: find existing artifact or create a new one
|
// Upsert: find existing artifact or create a new one
|
||||||
let artifact = match ArtifactRepository::find_by_ref(&state.db, &artifact_ref).await? {
|
let artifact = match ArtifactRepository::find_by_ref(&state.db, &artifact_ref).await? {
|
||||||
Some(existing) => {
|
Some(existing) => {
|
||||||
|
authorize_artifact_action(&state, &user, Action::Update, &existing).await?;
|
||||||
|
|
||||||
// Update execution link if a new execution ID was provided
|
// Update execution link if a new execution ID was provided
|
||||||
if request.execution.is_some() && request.execution != existing.execution {
|
if request.execution.is_some() && request.execution != existing.execution {
|
||||||
let update_input = UpdateArtifactInput {
|
let update_input = UpdateArtifactInput {
|
||||||
@@ -1347,6 +1431,16 @@ pub async fn allocate_file_version_by_ref(
|
|||||||
.unwrap_or(RetentionPolicyType::Versions);
|
.unwrap_or(RetentionPolicyType::Versions);
|
||||||
let a_retention_limit = request.retention_limit.unwrap_or(10);
|
let a_retention_limit = request.retention_limit.unwrap_or(10);
|
||||||
|
|
||||||
|
authorize_artifact_create(
|
||||||
|
&state,
|
||||||
|
&user,
|
||||||
|
&artifact_ref,
|
||||||
|
a_scope,
|
||||||
|
request.owner.as_deref().unwrap_or_default(),
|
||||||
|
a_visibility,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let create_input = CreateArtifactInput {
|
let create_input = CreateArtifactInput {
|
||||||
r#ref: artifact_ref.clone(),
|
r#ref: artifact_ref.clone(),
|
||||||
scope: a_scope,
|
scope: a_scope,
|
||||||
@@ -1437,6 +1531,105 @@ pub async fn allocate_file_version_by_ref(
|
|||||||
// Helpers
|
// Helpers
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
|
async fn authorize_artifact_action(
|
||||||
|
state: &Arc<AppState>,
|
||||||
|
user: &AuthenticatedUser,
|
||||||
|
action: Action,
|
||||||
|
artifact: &attune_common::models::artifact::Artifact,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
if user.claims.token_type != TokenType::Access {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Artifacts,
|
||||||
|
action,
|
||||||
|
context: artifact_authorization_context(identity_id, artifact),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn authorize_artifact_create(
|
||||||
|
state: &Arc<AppState>,
|
||||||
|
user: &AuthenticatedUser,
|
||||||
|
artifact_ref: &str,
|
||||||
|
scope: OwnerType,
|
||||||
|
owner: &str,
|
||||||
|
visibility: ArtifactVisibility,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
if user.claims.token_type != TokenType::Access {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_ref = Some(artifact_ref.to_string());
|
||||||
|
ctx.owner_type = Some(scope);
|
||||||
|
ctx.owner_ref = Some(owner.to_string());
|
||||||
|
ctx.visibility = Some(visibility);
|
||||||
|
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Artifacts,
|
||||||
|
action: Action::Create,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn ensure_can_read_any_artifact(
|
||||||
|
state: &Arc<AppState>,
|
||||||
|
user: &AuthenticatedUser,
|
||||||
|
) -> Result<Option<(i64, Vec<attune_common::rbac::Grant>)>, ApiError> {
|
||||||
|
if user.claims.token_type != TokenType::Access {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let grants = authz.effective_grants(user).await?;
|
||||||
|
|
||||||
|
let can_read_any_artifact = grants
|
||||||
|
.iter()
|
||||||
|
.any(|g| g.resource == Resource::Artifacts && g.actions.contains(&Action::Read));
|
||||||
|
if !can_read_any_artifact {
|
||||||
|
return Err(ApiError::Forbidden(
|
||||||
|
"Insufficient permissions: artifacts:read".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Some((identity_id, grants)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn artifact_authorization_context(
|
||||||
|
identity_id: i64,
|
||||||
|
artifact: &attune_common::models::artifact::Artifact,
|
||||||
|
) -> AuthorizationContext {
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_id = Some(artifact.id);
|
||||||
|
ctx.target_ref = Some(artifact.r#ref.clone());
|
||||||
|
ctx.owner_type = Some(artifact.scope);
|
||||||
|
ctx.owner_ref = Some(artifact.owner.clone());
|
||||||
|
ctx.visibility = Some(artifact.visibility);
|
||||||
|
ctx
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true for artifact types that should use file-backed storage on disk.
|
/// Returns true for artifact types that should use file-backed storage on disk.
|
||||||
fn is_file_backed_type(artifact_type: ArtifactType) -> bool {
|
fn is_file_backed_type(artifact_type: ArtifactType) -> bool {
|
||||||
matches!(
|
matches!(
|
||||||
@@ -1775,14 +1968,19 @@ pub async fn stream_artifact(
|
|||||||
let token = params.token.as_ref().ok_or(ApiError::Unauthorized(
|
let token = params.token.as_ref().ok_or(ApiError::Unauthorized(
|
||||||
"Missing authentication token".to_string(),
|
"Missing authentication token".to_string(),
|
||||||
))?;
|
))?;
|
||||||
validate_token(token, &state.jwt_config)
|
let claims = validate_token(token, &state.jwt_config)
|
||||||
.map_err(|_| ApiError::Unauthorized("Invalid authentication token".to_string()))?;
|
.map_err(|_| ApiError::Unauthorized("Invalid authentication token".to_string()))?;
|
||||||
|
let user = AuthenticatedUser { claims };
|
||||||
|
|
||||||
// --- resolve artifact + latest version ---------------------------------
|
// --- resolve artifact + latest version ---------------------------------
|
||||||
let artifact = ArtifactRepository::find_by_id(&state.db, id)
|
let artifact = ArtifactRepository::find_by_id(&state.db, id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
|
authorize_artifact_action(&state, &user, Action::Read, &artifact)
|
||||||
|
.await
|
||||||
|
.map_err(|_| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
|
||||||
|
|
||||||
if !is_file_backed_type(artifact.r#type) {
|
if !is_file_backed_type(artifact.r#type) {
|
||||||
return Err(ApiError::BadRequest(format!(
|
return Err(ApiError::BadRequest(format!(
|
||||||
"Artifact '{}' is type {:?} which is not file-backed. \
|
"Artifact '{}' is type {:?} which is not file-backed. \
|
||||||
|
|||||||
@@ -169,6 +169,12 @@ pub async fn login(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::Unauthorized("Invalid login or password".to_string()))?;
|
.ok_or_else(|| ApiError::Unauthorized("Invalid login or password".to_string()))?;
|
||||||
|
|
||||||
|
if identity.frozen {
|
||||||
|
return Err(ApiError::Forbidden(
|
||||||
|
"Identity is frozen and cannot authenticate".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
// Check if identity has a password set
|
// Check if identity has a password set
|
||||||
let password_hash = identity
|
let password_hash = identity
|
||||||
.password_hash
|
.password_hash
|
||||||
@@ -324,6 +330,12 @@ pub async fn refresh_token(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::Unauthorized("Identity not found".to_string()))?;
|
.ok_or_else(|| ApiError::Unauthorized("Identity not found".to_string()))?;
|
||||||
|
|
||||||
|
if identity.frozen {
|
||||||
|
return Err(ApiError::Forbidden(
|
||||||
|
"Identity is frozen and cannot authenticate".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
// Generate new tokens
|
// Generate new tokens
|
||||||
let access_token = generate_access_token(identity.id, &identity.login, &state.jwt_config)?;
|
let access_token = generate_access_token(identity.id, &identity.login, &state.jwt_config)?;
|
||||||
let refresh_token = generate_refresh_token(identity.id, &identity.login, &state.jwt_config)?;
|
let refresh_token = generate_refresh_token(identity.id, &identity.login, &state.jwt_config)?;
|
||||||
@@ -380,6 +392,12 @@ pub async fn get_current_user(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound("Identity not found".to_string()))?;
|
.ok_or_else(|| ApiError::NotFound("Identity not found".to_string()))?;
|
||||||
|
|
||||||
|
if identity.frozen {
|
||||||
|
return Err(ApiError::Forbidden(
|
||||||
|
"Identity is frozen and cannot authenticate".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
let response = CurrentUserResponse {
|
let response = CurrentUserResponse {
|
||||||
id: identity.id,
|
id: identity.id,
|
||||||
login: identity.login,
|
login: identity.login,
|
||||||
@@ -551,6 +569,7 @@ pub async fn change_password(
|
|||||||
display_name: None,
|
display_name: None,
|
||||||
password_hash: Some(new_password_hash),
|
password_hash: Some(new_password_hash),
|
||||||
attributes: None,
|
attributes: None,
|
||||||
|
frozen: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
IdentityRepository::update(&state.db, identity_id, update_input).await?;
|
IdentityRepository::update(&state.db, identity_id, update_input).await?;
|
||||||
|
|||||||
@@ -82,6 +82,17 @@ pub async fn create_event(
|
|||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Json(payload): Json<CreateEventRequest>,
|
Json(payload): Json<CreateEventRequest>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
// Only sensor and execution tokens may create events directly.
|
||||||
|
// User sessions must go through the webhook receiver instead.
|
||||||
|
use crate::auth::jwt::TokenType;
|
||||||
|
if user.0.claims.token_type == TokenType::Access {
|
||||||
|
return Err(ApiError::Forbidden(
|
||||||
|
"Events may only be created by sensor services. To fire an event as a user, \
|
||||||
|
enable webhooks on the trigger and POST to its webhook URL."
|
||||||
|
.to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
// Validate request
|
// Validate request
|
||||||
payload
|
payload
|
||||||
.validate()
|
.validate()
|
||||||
@@ -128,7 +139,6 @@ pub async fn create_event(
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Determine source (sensor) from authenticated user if it's a sensor token
|
// Determine source (sensor) from authenticated user if it's a sensor token
|
||||||
use crate::auth::jwt::TokenType;
|
|
||||||
let (source_id, source_ref) = match user.0.claims.token_type {
|
let (source_id, source_ref) = match user.0.claims.token_type {
|
||||||
TokenType::Sensor => {
|
TokenType::Sensor => {
|
||||||
// Extract sensor reference from login
|
// Extract sensor reference from login
|
||||||
|
|||||||
@@ -93,19 +93,6 @@ pub async fn create_execution(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let mut execution_ctx = AuthorizationContext::new(identity_id);
|
|
||||||
execution_ctx.pack_ref = Some(action.pack_ref.clone());
|
|
||||||
authz
|
|
||||||
.authorize(
|
|
||||||
&user,
|
|
||||||
AuthorizationCheck {
|
|
||||||
resource: Resource::Executions,
|
|
||||||
action: Action::Create,
|
|
||||||
context: execution_ctx,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create execution input
|
// Create execution input
|
||||||
|
|||||||
@@ -120,12 +120,16 @@ pub async fn get_key(
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Key '{}' not found", key_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Key '{}' not found", key_ref)))?;
|
||||||
|
|
||||||
if user.0.claims.token_type == TokenType::Access {
|
// For encrypted keys, track whether this caller is permitted to see the value.
|
||||||
|
// Non-Access tokens (sensor, execution) always get full access.
|
||||||
|
let can_decrypt = if user.0.claims.token_type == TokenType::Access {
|
||||||
let identity_id = user
|
let identity_id = user
|
||||||
.0
|
.0
|
||||||
.identity_id()
|
.identity_id()
|
||||||
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
let authz = AuthorizationService::new(state.db.clone());
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
|
||||||
|
// Basic read check — hide behind 404 to prevent enumeration.
|
||||||
authz
|
authz
|
||||||
.authorize(
|
.authorize(
|
||||||
&user.0,
|
&user.0,
|
||||||
@@ -136,19 +140,43 @@ pub async fn get_key(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
// Hide unauthorized records behind 404 to reduce enumeration leakage.
|
|
||||||
.map_err(|_| ApiError::NotFound(format!("Key '{}' not found", key_ref)))?;
|
.map_err(|_| ApiError::NotFound(format!("Key '{}' not found", key_ref)))?;
|
||||||
}
|
|
||||||
|
|
||||||
// Decrypt value if encrypted
|
// For encrypted keys, separately check Keys::Decrypt.
|
||||||
|
// Failing this is not an error — we just return the value as null.
|
||||||
if key.encrypted {
|
if key.encrypted {
|
||||||
let encryption_key = state
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user.0,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Keys,
|
||||||
|
action: Action::Decrypt,
|
||||||
|
context: key_authorization_context(identity_id, &key),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.is_ok()
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
};
|
||||||
|
|
||||||
|
// Decrypt value if encrypted and caller has permission.
|
||||||
|
// If they lack Keys::Decrypt, return null rather than the ciphertext.
|
||||||
|
if key.encrypted {
|
||||||
|
if can_decrypt {
|
||||||
|
let encryption_key =
|
||||||
|
state
|
||||||
.config
|
.config
|
||||||
.security
|
.security
|
||||||
.encryption_key
|
.encryption_key
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| {
|
||||||
ApiError::InternalServerError("Encryption key not configured on server".to_string())
|
ApiError::InternalServerError(
|
||||||
|
"Encryption key not configured on server".to_string(),
|
||||||
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let decrypted_value = attune_common::crypto::decrypt_json(&key.value, encryption_key)
|
let decrypted_value = attune_common::crypto::decrypt_json(&key.value, encryption_key)
|
||||||
@@ -158,6 +186,9 @@ pub async fn get_key(
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
key.value = decrypted_value;
|
key.value = decrypted_value;
|
||||||
|
} else {
|
||||||
|
key.value = serde_json::Value::Null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let response = ApiResponse::new(KeyResponse::from(key));
|
let response = ApiResponse::new(KeyResponse::from(key));
|
||||||
@@ -195,6 +226,7 @@ pub async fn create_key(
|
|||||||
let mut ctx = AuthorizationContext::new(identity_id);
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
ctx.owner_identity_id = request.owner_identity;
|
ctx.owner_identity_id = request.owner_identity;
|
||||||
ctx.owner_type = Some(request.owner_type);
|
ctx.owner_type = Some(request.owner_type);
|
||||||
|
ctx.owner_ref = requested_key_owner_ref(&request);
|
||||||
ctx.encrypted = Some(request.encrypted);
|
ctx.encrypted = Some(request.encrypted);
|
||||||
ctx.target_ref = Some(request.r#ref.clone());
|
ctx.target_ref = Some(request.r#ref.clone());
|
||||||
|
|
||||||
@@ -541,6 +573,38 @@ fn key_authorization_context(identity_id: i64, key: &Key) -> AuthorizationContex
|
|||||||
ctx.target_ref = Some(key.r#ref.clone());
|
ctx.target_ref = Some(key.r#ref.clone());
|
||||||
ctx.owner_identity_id = key.owner_identity;
|
ctx.owner_identity_id = key.owner_identity;
|
||||||
ctx.owner_type = Some(key.owner_type);
|
ctx.owner_type = Some(key.owner_type);
|
||||||
|
ctx.owner_ref = key_owner_ref(
|
||||||
|
key.owner_type,
|
||||||
|
key.owner.as_deref(),
|
||||||
|
key.owner_pack_ref.as_deref(),
|
||||||
|
key.owner_action_ref.as_deref(),
|
||||||
|
key.owner_sensor_ref.as_deref(),
|
||||||
|
);
|
||||||
ctx.encrypted = Some(key.encrypted);
|
ctx.encrypted = Some(key.encrypted);
|
||||||
ctx
|
ctx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn requested_key_owner_ref(request: &CreateKeyRequest) -> Option<String> {
|
||||||
|
key_owner_ref(
|
||||||
|
request.owner_type,
|
||||||
|
request.owner.as_deref(),
|
||||||
|
request.owner_pack_ref.as_deref(),
|
||||||
|
request.owner_action_ref.as_deref(),
|
||||||
|
request.owner_sensor_ref.as_deref(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn key_owner_ref(
|
||||||
|
owner_type: OwnerType,
|
||||||
|
owner: Option<&str>,
|
||||||
|
owner_pack_ref: Option<&str>,
|
||||||
|
owner_action_ref: Option<&str>,
|
||||||
|
owner_sensor_ref: Option<&str>,
|
||||||
|
) -> Option<String> {
|
||||||
|
match owner_type {
|
||||||
|
OwnerType::Pack => owner_pack_ref.map(str::to_string),
|
||||||
|
OwnerType::Action => owner_action_ref.map(str::to_string),
|
||||||
|
OwnerType::Sensor => owner_sensor_ref.map(str::to_string),
|
||||||
|
_ => owner.map(str::to_string),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
//! API route modules
|
//! API route modules
|
||||||
|
|
||||||
pub mod actions;
|
pub mod actions;
|
||||||
|
pub mod agent;
|
||||||
pub mod analytics;
|
pub mod analytics;
|
||||||
pub mod artifacts;
|
pub mod artifacts;
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
@@ -19,6 +20,7 @@ pub mod webhooks;
|
|||||||
pub mod workflows;
|
pub mod workflows;
|
||||||
|
|
||||||
pub use actions::routes as action_routes;
|
pub use actions::routes as action_routes;
|
||||||
|
pub use agent::routes as agent_routes;
|
||||||
pub use analytics::routes as analytics_routes;
|
pub use analytics::routes as analytics_routes;
|
||||||
pub use artifacts::routes as artifact_routes;
|
pub use artifacts::routes as artifact_routes;
|
||||||
pub use auth::routes as auth_routes;
|
pub use auth::routes as auth_routes;
|
||||||
|
|||||||
@@ -9,12 +9,14 @@ use std::sync::Arc;
|
|||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
|
|
||||||
use attune_common::{
|
use attune_common::{
|
||||||
models::identity::{Identity, PermissionSet},
|
models::identity::{Identity, IdentityRoleAssignment},
|
||||||
rbac::{Action, AuthorizationContext, Resource},
|
rbac::{Action, AuthorizationContext, Resource},
|
||||||
repositories::{
|
repositories::{
|
||||||
identity::{
|
identity::{
|
||||||
CreateIdentityInput, CreatePermissionAssignmentInput, IdentityRepository,
|
CreateIdentityInput, CreateIdentityRoleAssignmentInput,
|
||||||
PermissionAssignmentRepository, PermissionSetRepository, UpdateIdentityInput,
|
CreatePermissionAssignmentInput, CreatePermissionSetRoleAssignmentInput,
|
||||||
|
IdentityRepository, IdentityRoleAssignmentRepository, PermissionAssignmentRepository,
|
||||||
|
PermissionSetRepository, PermissionSetRoleAssignmentRepository, UpdateIdentityInput,
|
||||||
},
|
},
|
||||||
Create, Delete, FindById, FindByRef, List, Update,
|
Create, Delete, FindById, FindByRef, List, Update,
|
||||||
},
|
},
|
||||||
@@ -26,9 +28,12 @@ use crate::{
|
|||||||
authz::{AuthorizationCheck, AuthorizationService},
|
authz::{AuthorizationCheck, AuthorizationService},
|
||||||
dto::{
|
dto::{
|
||||||
common::{PaginatedResponse, PaginationParams},
|
common::{PaginatedResponse, PaginationParams},
|
||||||
ApiResponse, CreateIdentityRequest, CreatePermissionAssignmentRequest, IdentityResponse,
|
ApiResponse, CreateIdentityRequest, CreateIdentityRoleAssignmentRequest,
|
||||||
IdentitySummary, PermissionAssignmentResponse, PermissionSetQueryParams,
|
CreatePermissionAssignmentRequest, CreatePermissionSetRoleAssignmentRequest,
|
||||||
PermissionSetSummary, SuccessResponse, UpdateIdentityRequest,
|
IdentityResponse, IdentityRoleAssignmentResponse, IdentitySummary,
|
||||||
|
PermissionAssignmentResponse, PermissionSetQueryParams,
|
||||||
|
PermissionSetRoleAssignmentResponse, PermissionSetSummary, SuccessResponse,
|
||||||
|
UpdateIdentityRequest,
|
||||||
},
|
},
|
||||||
middleware::{ApiError, ApiResult},
|
middleware::{ApiError, ApiResult},
|
||||||
state::AppState,
|
state::AppState,
|
||||||
@@ -58,16 +63,22 @@ pub async fn list_identities(
|
|||||||
let page_items = if start >= identities.len() {
|
let page_items = if start >= identities.len() {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
} else {
|
} else {
|
||||||
identities[start..end]
|
identities[start..end].to_vec()
|
||||||
.iter()
|
|
||||||
.cloned()
|
|
||||||
.map(IdentitySummary::from)
|
|
||||||
.collect()
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut summaries = Vec::with_capacity(page_items.len());
|
||||||
|
for identity in page_items {
|
||||||
|
let role_assignments =
|
||||||
|
IdentityRoleAssignmentRepository::find_by_identity(&state.db, identity.id).await?;
|
||||||
|
let roles = role_assignments.into_iter().map(|ra| ra.role).collect();
|
||||||
|
let mut summary = IdentitySummary::from(identity);
|
||||||
|
summary.roles = roles;
|
||||||
|
summaries.push(summary);
|
||||||
|
}
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
StatusCode::OK,
|
StatusCode::OK,
|
||||||
Json(PaginatedResponse::new(page_items, &query, total)),
|
Json(PaginatedResponse::new(summaries, &query, total)),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -94,10 +105,42 @@ pub async fn get_identity(
|
|||||||
let identity = IdentityRepository::find_by_id(&state.db, identity_id)
|
let identity = IdentityRepository::find_by_id(&state.db, identity_id)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Identity '{}' not found", identity_id)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Identity '{}' not found", identity_id)))?;
|
||||||
|
let roles = IdentityRoleAssignmentRepository::find_by_identity(&state.db, identity_id).await?;
|
||||||
|
let assignments =
|
||||||
|
PermissionAssignmentRepository::find_by_identity(&state.db, identity_id).await?;
|
||||||
|
let permission_sets = PermissionSetRepository::find_by_identity(&state.db, identity_id).await?;
|
||||||
|
let permission_set_refs = permission_sets
|
||||||
|
.into_iter()
|
||||||
|
.map(|ps| (ps.id, ps.r#ref))
|
||||||
|
.collect::<std::collections::HashMap<_, _>>();
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
StatusCode::OK,
|
StatusCode::OK,
|
||||||
Json(ApiResponse::new(IdentityResponse::from(identity))),
|
Json(ApiResponse::new(IdentityResponse {
|
||||||
|
id: identity.id,
|
||||||
|
login: identity.login,
|
||||||
|
display_name: identity.display_name,
|
||||||
|
frozen: identity.frozen,
|
||||||
|
attributes: identity.attributes,
|
||||||
|
roles: roles
|
||||||
|
.into_iter()
|
||||||
|
.map(IdentityRoleAssignmentResponse::from)
|
||||||
|
.collect(),
|
||||||
|
direct_permissions: assignments
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|assignment| {
|
||||||
|
permission_set_refs.get(&assignment.permset).cloned().map(
|
||||||
|
|permission_set_ref| PermissionAssignmentResponse {
|
||||||
|
id: assignment.id,
|
||||||
|
identity_id: assignment.identity,
|
||||||
|
permission_set_id: assignment.permset,
|
||||||
|
permission_set_ref,
|
||||||
|
created: assignment.created,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
})),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -180,6 +223,7 @@ pub async fn update_identity(
|
|||||||
display_name: request.display_name,
|
display_name: request.display_name,
|
||||||
password_hash,
|
password_hash,
|
||||||
attributes: request.attributes,
|
attributes: request.attributes,
|
||||||
|
frozen: request.frozen,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -257,10 +301,33 @@ pub async fn list_permission_sets(
|
|||||||
permission_sets.retain(|ps| ps.pack_ref.as_deref() == Some(pack_ref.as_str()));
|
permission_sets.retain(|ps| ps.pack_ref.as_deref() == Some(pack_ref.as_str()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let response: Vec<PermissionSetSummary> = permission_sets
|
let mut response = Vec::with_capacity(permission_sets.len());
|
||||||
|
for permission_set in permission_sets {
|
||||||
|
let permission_set_ref = permission_set.r#ref.clone();
|
||||||
|
let roles = PermissionSetRoleAssignmentRepository::find_by_permission_set(
|
||||||
|
&state.db,
|
||||||
|
permission_set.id,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
response.push(PermissionSetSummary {
|
||||||
|
id: permission_set.id,
|
||||||
|
r#ref: permission_set.r#ref,
|
||||||
|
pack_ref: permission_set.pack_ref,
|
||||||
|
label: permission_set.label,
|
||||||
|
description: permission_set.description,
|
||||||
|
grants: permission_set.grants,
|
||||||
|
roles: roles
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(PermissionSetSummary::from)
|
.map(|assignment| PermissionSetRoleAssignmentResponse {
|
||||||
.collect();
|
id: assignment.id,
|
||||||
|
permission_set_id: assignment.permset,
|
||||||
|
permission_set_ref: Some(permission_set_ref.clone()),
|
||||||
|
role: assignment.role,
|
||||||
|
created: assignment.created,
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
Ok((StatusCode::OK, Json(response)))
|
Ok((StatusCode::OK, Json(response)))
|
||||||
}
|
}
|
||||||
@@ -412,6 +479,229 @@ pub async fn delete_permission_assignment(
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/api/v1/identities/{id}/roles",
|
||||||
|
tag = "permissions",
|
||||||
|
params(
|
||||||
|
("id" = i64, Path, description = "Identity ID")
|
||||||
|
),
|
||||||
|
request_body = CreateIdentityRoleAssignmentRequest,
|
||||||
|
responses(
|
||||||
|
(status = 201, description = "Identity role assignment created", body = inline(ApiResponse<IdentityRoleAssignmentResponse>)),
|
||||||
|
(status = 404, description = "Identity not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn create_identity_role_assignment(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Path(identity_id): Path<i64>,
|
||||||
|
Json(request): Json<CreateIdentityRoleAssignmentRequest>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Permissions, Action::Manage).await?;
|
||||||
|
request.validate()?;
|
||||||
|
|
||||||
|
IdentityRepository::find_by_id(&state.db, identity_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::NotFound(format!("Identity '{}' not found", identity_id)))?;
|
||||||
|
|
||||||
|
let assignment = IdentityRoleAssignmentRepository::create(
|
||||||
|
&state.db,
|
||||||
|
CreateIdentityRoleAssignmentInput {
|
||||||
|
identity: identity_id,
|
||||||
|
role: request.role,
|
||||||
|
source: "manual".to_string(),
|
||||||
|
managed: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::CREATED,
|
||||||
|
Json(ApiResponse::new(IdentityRoleAssignmentResponse::from(
|
||||||
|
assignment,
|
||||||
|
))),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
delete,
|
||||||
|
path = "/api/v1/identities/roles/{id}",
|
||||||
|
tag = "permissions",
|
||||||
|
params(
|
||||||
|
("id" = i64, Path, description = "Identity role assignment ID")
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Identity role assignment deleted", body = inline(ApiResponse<SuccessResponse>)),
|
||||||
|
(status = 404, description = "Identity role assignment not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn delete_identity_role_assignment(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Path(assignment_id): Path<i64>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Permissions, Action::Manage).await?;
|
||||||
|
|
||||||
|
let assignment = IdentityRoleAssignmentRepository::find_by_id(&state.db, assignment_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::NotFound(format!(
|
||||||
|
"Identity role assignment '{}' not found",
|
||||||
|
assignment_id
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if assignment.managed {
|
||||||
|
return Err(ApiError::BadRequest(
|
||||||
|
"Managed role assignments must be updated through the identity provider sync"
|
||||||
|
.to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
IdentityRoleAssignmentRepository::delete(&state.db, assignment_id).await?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(ApiResponse::new(SuccessResponse::new(
|
||||||
|
"Identity role assignment deleted successfully",
|
||||||
|
))),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/api/v1/permissions/sets/{id}/roles",
|
||||||
|
tag = "permissions",
|
||||||
|
params(
|
||||||
|
("id" = i64, Path, description = "Permission set ID")
|
||||||
|
),
|
||||||
|
request_body = CreatePermissionSetRoleAssignmentRequest,
|
||||||
|
responses(
|
||||||
|
(status = 201, description = "Permission set role assignment created", body = inline(ApiResponse<PermissionSetRoleAssignmentResponse>)),
|
||||||
|
(status = 404, description = "Permission set not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn create_permission_set_role_assignment(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Path(permission_set_id): Path<i64>,
|
||||||
|
Json(request): Json<CreatePermissionSetRoleAssignmentRequest>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Permissions, Action::Manage).await?;
|
||||||
|
request.validate()?;
|
||||||
|
|
||||||
|
let permission_set = PermissionSetRepository::find_by_id(&state.db, permission_set_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::NotFound(format!("Permission set '{}' not found", permission_set_id))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let assignment = PermissionSetRoleAssignmentRepository::create(
|
||||||
|
&state.db,
|
||||||
|
CreatePermissionSetRoleAssignmentInput {
|
||||||
|
permset: permission_set_id,
|
||||||
|
role: request.role,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::CREATED,
|
||||||
|
Json(ApiResponse::new(PermissionSetRoleAssignmentResponse {
|
||||||
|
id: assignment.id,
|
||||||
|
permission_set_id: assignment.permset,
|
||||||
|
permission_set_ref: Some(permission_set.r#ref),
|
||||||
|
role: assignment.role,
|
||||||
|
created: assignment.created,
|
||||||
|
})),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
delete,
|
||||||
|
path = "/api/v1/permissions/sets/roles/{id}",
|
||||||
|
tag = "permissions",
|
||||||
|
params(
|
||||||
|
("id" = i64, Path, description = "Permission set role assignment ID")
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Permission set role assignment deleted", body = inline(ApiResponse<SuccessResponse>)),
|
||||||
|
(status = 404, description = "Permission set role assignment not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn delete_permission_set_role_assignment(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Path(assignment_id): Path<i64>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(&state, &user, Resource::Permissions, Action::Manage).await?;
|
||||||
|
|
||||||
|
PermissionSetRoleAssignmentRepository::find_by_id(&state.db, assignment_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::NotFound(format!(
|
||||||
|
"Permission set role assignment '{}' not found",
|
||||||
|
assignment_id
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
PermissionSetRoleAssignmentRepository::delete(&state.db, assignment_id).await?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(ApiResponse::new(SuccessResponse::new(
|
||||||
|
"Permission set role assignment deleted successfully",
|
||||||
|
))),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/api/v1/identities/{id}/freeze",
|
||||||
|
tag = "permissions",
|
||||||
|
params(
|
||||||
|
("id" = i64, Path, description = "Identity ID")
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Identity frozen", body = inline(ApiResponse<SuccessResponse>)),
|
||||||
|
(status = 404, description = "Identity not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn freeze_identity(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Path(identity_id): Path<i64>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
set_identity_frozen(&state, &user, identity_id, true).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/api/v1/identities/{id}/unfreeze",
|
||||||
|
tag = "permissions",
|
||||||
|
params(
|
||||||
|
("id" = i64, Path, description = "Identity ID")
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Identity unfrozen", body = inline(ApiResponse<SuccessResponse>)),
|
||||||
|
(status = 404, description = "Identity not found")
|
||||||
|
),
|
||||||
|
security(("bearer_auth" = []))
|
||||||
|
)]
|
||||||
|
pub async fn unfreeze_identity(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
RequireAuth(user): RequireAuth,
|
||||||
|
Path(identity_id): Path<i64>,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
set_identity_frozen(&state, &user, identity_id, false).await
|
||||||
|
}
|
||||||
|
|
||||||
pub fn routes() -> Router<Arc<AppState>> {
|
pub fn routes() -> Router<Arc<AppState>> {
|
||||||
Router::new()
|
Router::new()
|
||||||
.route("/identities", get(list_identities).post(create_identity))
|
.route("/identities", get(list_identities).post(create_identity))
|
||||||
@@ -421,11 +711,29 @@ pub fn routes() -> Router<Arc<AppState>> {
|
|||||||
.put(update_identity)
|
.put(update_identity)
|
||||||
.delete(delete_identity),
|
.delete(delete_identity),
|
||||||
)
|
)
|
||||||
|
.route(
|
||||||
|
"/identities/{id}/roles",
|
||||||
|
post(create_identity_role_assignment),
|
||||||
|
)
|
||||||
.route(
|
.route(
|
||||||
"/identities/{id}/permissions",
|
"/identities/{id}/permissions",
|
||||||
get(list_identity_permissions),
|
get(list_identity_permissions),
|
||||||
)
|
)
|
||||||
|
.route("/identities/{id}/freeze", post(freeze_identity))
|
||||||
|
.route("/identities/{id}/unfreeze", post(unfreeze_identity))
|
||||||
|
.route(
|
||||||
|
"/identities/roles/{id}",
|
||||||
|
delete(delete_identity_role_assignment),
|
||||||
|
)
|
||||||
.route("/permissions/sets", get(list_permission_sets))
|
.route("/permissions/sets", get(list_permission_sets))
|
||||||
|
.route(
|
||||||
|
"/permissions/sets/{id}/roles",
|
||||||
|
post(create_permission_set_role_assignment),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
"/permissions/sets/roles/{id}",
|
||||||
|
delete(delete_permission_set_role_assignment),
|
||||||
|
)
|
||||||
.route(
|
.route(
|
||||||
"/permissions/assignments",
|
"/permissions/assignments",
|
||||||
post(create_permission_assignment),
|
post(create_permission_assignment),
|
||||||
@@ -488,20 +796,82 @@ impl From<Identity> for IdentitySummary {
|
|||||||
id: value.id,
|
id: value.id,
|
||||||
login: value.login,
|
login: value.login,
|
||||||
display_name: value.display_name,
|
display_name: value.display_name,
|
||||||
|
frozen: value.frozen,
|
||||||
attributes: value.attributes,
|
attributes: value.attributes,
|
||||||
|
roles: Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<PermissionSet> for PermissionSetSummary {
|
impl From<IdentityRoleAssignment> for IdentityRoleAssignmentResponse {
|
||||||
fn from(value: PermissionSet) -> Self {
|
fn from(value: IdentityRoleAssignment) -> Self {
|
||||||
Self {
|
Self {
|
||||||
id: value.id,
|
id: value.id,
|
||||||
r#ref: value.r#ref,
|
identity_id: value.identity,
|
||||||
pack_ref: value.pack_ref,
|
role: value.role,
|
||||||
label: value.label,
|
source: value.source,
|
||||||
description: value.description,
|
managed: value.managed,
|
||||||
grants: value.grants,
|
created: value.created,
|
||||||
|
updated: value.updated,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<Identity> for IdentityResponse {
|
||||||
|
fn from(value: Identity) -> Self {
|
||||||
|
Self {
|
||||||
|
id: value.id,
|
||||||
|
login: value.login,
|
||||||
|
display_name: value.display_name,
|
||||||
|
frozen: value.frozen,
|
||||||
|
attributes: value.attributes,
|
||||||
|
roles: Vec::new(),
|
||||||
|
direct_permissions: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn set_identity_frozen(
|
||||||
|
state: &Arc<AppState>,
|
||||||
|
user: &crate::auth::middleware::AuthenticatedUser,
|
||||||
|
identity_id: i64,
|
||||||
|
frozen: bool,
|
||||||
|
) -> ApiResult<impl IntoResponse> {
|
||||||
|
authorize_permissions(state, user, Resource::Identities, Action::Update).await?;
|
||||||
|
|
||||||
|
let caller_identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
if caller_identity_id == identity_id && frozen {
|
||||||
|
return Err(ApiError::BadRequest(
|
||||||
|
"Refusing to freeze the currently authenticated identity".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
IdentityRepository::find_by_id(&state.db, identity_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::NotFound(format!("Identity '{}' not found", identity_id)))?;
|
||||||
|
|
||||||
|
IdentityRepository::update(
|
||||||
|
&state.db,
|
||||||
|
identity_id,
|
||||||
|
UpdateIdentityInput {
|
||||||
|
display_name: None,
|
||||||
|
password_hash: None,
|
||||||
|
attributes: None,
|
||||||
|
frozen: Some(frozen),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let message = if frozen {
|
||||||
|
"Identity frozen successfully"
|
||||||
|
} else {
|
||||||
|
"Identity unfrozen successfully"
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(ApiResponse::new(SuccessResponse::new(message))),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ use attune_common::repositories::{
|
|||||||
pack::PackRepository,
|
pack::PackRepository,
|
||||||
rule::{CreateRuleInput, RuleRepository, RuleSearchFilters, UpdateRuleInput},
|
rule::{CreateRuleInput, RuleRepository, RuleSearchFilters, UpdateRuleInput},
|
||||||
trigger::TriggerRepository,
|
trigger::TriggerRepository,
|
||||||
Create, Delete, FindByRef, Update,
|
Create, Delete, FindByRef, Patch, Update,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@@ -474,7 +474,7 @@ pub async fn update_rule(
|
|||||||
// Create update input
|
// Create update input
|
||||||
let update_input = UpdateRuleInput {
|
let update_input = UpdateRuleInput {
|
||||||
label: request.label,
|
label: request.label,
|
||||||
description: request.description,
|
description: request.description.map(Patch::Set),
|
||||||
conditions: request.conditions,
|
conditions: request.conditions,
|
||||||
action_params: request.action_params,
|
action_params: request.action_params,
|
||||||
trigger_params: request.trigger_params,
|
trigger_params: request.trigger_params,
|
||||||
|
|||||||
@@ -176,9 +176,12 @@ pub async fn create_runtime(
|
|||||||
pack_ref,
|
pack_ref,
|
||||||
description: request.description,
|
description: request.description,
|
||||||
name: request.name,
|
name: request.name,
|
||||||
|
aliases: vec![],
|
||||||
distributions: request.distributions,
|
distributions: request.distributions,
|
||||||
installation: request.installation,
|
installation: request.installation,
|
||||||
execution_config: request.execution_config,
|
execution_config: request.execution_config,
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: serde_json::json!({}),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -232,6 +235,7 @@ pub async fn update_runtime(
|
|||||||
NullableJsonPatch::Clear => Patch::Clear,
|
NullableJsonPatch::Clear => Patch::Clear,
|
||||||
}),
|
}),
|
||||||
execution_config: request.execution_config,
|
execution_config: request.execution_config,
|
||||||
|
..Default::default()
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|||||||
@@ -724,7 +724,7 @@ pub async fn update_sensor(
|
|||||||
// Create update input
|
// Create update input
|
||||||
let update_input = UpdateSensorInput {
|
let update_input = UpdateSensorInput {
|
||||||
label: request.label,
|
label: request.label,
|
||||||
description: request.description,
|
description: request.description.map(Patch::Set),
|
||||||
entrypoint: request.entrypoint,
|
entrypoint: request.entrypoint,
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_ref: None,
|
runtime_ref: None,
|
||||||
|
|||||||
@@ -20,8 +20,11 @@ use attune_common::{
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use attune_common::rbac::{Action, AuthorizationContext, Resource};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::middleware::RequireAuth,
|
auth::middleware::RequireAuth,
|
||||||
|
authz::{AuthorizationCheck, AuthorizationService},
|
||||||
dto::{
|
dto::{
|
||||||
trigger::TriggerResponse,
|
trigger::TriggerResponse,
|
||||||
webhook::{WebhookReceiverRequest, WebhookReceiverResponse},
|
webhook::{WebhookReceiverRequest, WebhookReceiverResponse},
|
||||||
@@ -170,7 +173,7 @@ fn get_webhook_config_array(
|
|||||||
)]
|
)]
|
||||||
pub async fn enable_webhook(
|
pub async fn enable_webhook(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Path(trigger_ref): Path<String>,
|
Path(trigger_ref): Path<String>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// First, find the trigger by ref to get its ID
|
// First, find the trigger by ref to get its ID
|
||||||
@@ -179,6 +182,26 @@ pub async fn enable_webhook(
|
|||||||
.map_err(|e| ApiError::InternalServerError(e.to_string()))?
|
.map_err(|e| ApiError::InternalServerError(e.to_string()))?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Trigger '{}' not found", trigger_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Trigger '{}' not found", trigger_ref)))?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_ref = Some(trigger.r#ref.clone());
|
||||||
|
ctx.pack_ref = trigger.pack_ref.clone();
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Triggers,
|
||||||
|
action: Action::Update,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Enable webhooks for this trigger
|
// Enable webhooks for this trigger
|
||||||
let _webhook_info = TriggerRepository::enable_webhook(&state.db, trigger.id)
|
let _webhook_info = TriggerRepository::enable_webhook(&state.db, trigger.id)
|
||||||
.await
|
.await
|
||||||
@@ -213,7 +236,7 @@ pub async fn enable_webhook(
|
|||||||
)]
|
)]
|
||||||
pub async fn disable_webhook(
|
pub async fn disable_webhook(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Path(trigger_ref): Path<String>,
|
Path(trigger_ref): Path<String>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// First, find the trigger by ref to get its ID
|
// First, find the trigger by ref to get its ID
|
||||||
@@ -222,6 +245,26 @@ pub async fn disable_webhook(
|
|||||||
.map_err(|e| ApiError::InternalServerError(e.to_string()))?
|
.map_err(|e| ApiError::InternalServerError(e.to_string()))?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Trigger '{}' not found", trigger_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Trigger '{}' not found", trigger_ref)))?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_ref = Some(trigger.r#ref.clone());
|
||||||
|
ctx.pack_ref = trigger.pack_ref.clone();
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Triggers,
|
||||||
|
action: Action::Update,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Disable webhooks for this trigger
|
// Disable webhooks for this trigger
|
||||||
TriggerRepository::disable_webhook(&state.db, trigger.id)
|
TriggerRepository::disable_webhook(&state.db, trigger.id)
|
||||||
.await
|
.await
|
||||||
@@ -257,7 +300,7 @@ pub async fn disable_webhook(
|
|||||||
)]
|
)]
|
||||||
pub async fn regenerate_webhook_key(
|
pub async fn regenerate_webhook_key(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
RequireAuth(_user): RequireAuth,
|
RequireAuth(user): RequireAuth,
|
||||||
Path(trigger_ref): Path<String>,
|
Path(trigger_ref): Path<String>,
|
||||||
) -> ApiResult<impl IntoResponse> {
|
) -> ApiResult<impl IntoResponse> {
|
||||||
// First, find the trigger by ref to get its ID
|
// First, find the trigger by ref to get its ID
|
||||||
@@ -266,6 +309,26 @@ pub async fn regenerate_webhook_key(
|
|||||||
.map_err(|e| ApiError::InternalServerError(e.to_string()))?
|
.map_err(|e| ApiError::InternalServerError(e.to_string()))?
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Trigger '{}' not found", trigger_ref)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Trigger '{}' not found", trigger_ref)))?;
|
||||||
|
|
||||||
|
if user.claims.token_type == crate::auth::jwt::TokenType::Access {
|
||||||
|
let identity_id = user
|
||||||
|
.identity_id()
|
||||||
|
.map_err(|_| ApiError::Unauthorized("Invalid user identity".to_string()))?;
|
||||||
|
let authz = AuthorizationService::new(state.db.clone());
|
||||||
|
let mut ctx = AuthorizationContext::new(identity_id);
|
||||||
|
ctx.target_ref = Some(trigger.r#ref.clone());
|
||||||
|
ctx.pack_ref = trigger.pack_ref.clone();
|
||||||
|
authz
|
||||||
|
.authorize(
|
||||||
|
&user,
|
||||||
|
AuthorizationCheck {
|
||||||
|
resource: Resource::Triggers,
|
||||||
|
action: Action::Update,
|
||||||
|
context: ctx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
// Check if webhooks are enabled
|
// Check if webhooks are enabled
|
||||||
if !trigger.webhook_enabled {
|
if !trigger.webhook_enabled {
|
||||||
return Err(ApiError::BadRequest(
|
return Err(ApiError::BadRequest(
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ use attune_common::repositories::{
|
|||||||
CreateWorkflowDefinitionInput, UpdateWorkflowDefinitionInput, WorkflowDefinitionRepository,
|
CreateWorkflowDefinitionInput, UpdateWorkflowDefinitionInput, WorkflowDefinitionRepository,
|
||||||
WorkflowSearchFilters,
|
WorkflowSearchFilters,
|
||||||
},
|
},
|
||||||
Create, Delete, FindByRef, Update,
|
Create, Delete, FindByRef, Patch, Update,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@@ -217,7 +217,7 @@ pub async fn create_workflow(
|
|||||||
pack.id,
|
pack.id,
|
||||||
&pack.r#ref,
|
&pack.r#ref,
|
||||||
&request.label,
|
&request.label,
|
||||||
&request.description.clone().unwrap_or_default(),
|
request.description.as_deref(),
|
||||||
"workflow",
|
"workflow",
|
||||||
request.param_schema.as_ref(),
|
request.param_schema.as_ref(),
|
||||||
request.out_schema.as_ref(),
|
request.out_schema.as_ref(),
|
||||||
@@ -416,7 +416,7 @@ pub async fn save_workflow_file(
|
|||||||
pack.id,
|
pack.id,
|
||||||
&pack.r#ref,
|
&pack.r#ref,
|
||||||
&request.label,
|
&request.label,
|
||||||
&request.description.clone().unwrap_or_default(),
|
request.description.as_deref(),
|
||||||
&entrypoint,
|
&entrypoint,
|
||||||
request.param_schema.as_ref(),
|
request.param_schema.as_ref(),
|
||||||
request.out_schema.as_ref(),
|
request.out_schema.as_ref(),
|
||||||
@@ -499,7 +499,7 @@ pub async fn update_workflow_file(
|
|||||||
pack.id,
|
pack.id,
|
||||||
&pack.r#ref,
|
&pack.r#ref,
|
||||||
&request.label,
|
&request.label,
|
||||||
&request.description.unwrap_or_default(),
|
request.description.as_deref(),
|
||||||
&entrypoint,
|
&entrypoint,
|
||||||
request.param_schema.as_ref(),
|
request.param_schema.as_ref(),
|
||||||
request.out_schema.as_ref(),
|
request.out_schema.as_ref(),
|
||||||
@@ -702,7 +702,7 @@ async fn create_companion_action(
|
|||||||
pack_id: i64,
|
pack_id: i64,
|
||||||
pack_ref: &str,
|
pack_ref: &str,
|
||||||
label: &str,
|
label: &str,
|
||||||
description: &str,
|
description: Option<&str>,
|
||||||
entrypoint: &str,
|
entrypoint: &str,
|
||||||
param_schema: Option<&serde_json::Value>,
|
param_schema: Option<&serde_json::Value>,
|
||||||
out_schema: Option<&serde_json::Value>,
|
out_schema: Option<&serde_json::Value>,
|
||||||
@@ -713,7 +713,7 @@ async fn create_companion_action(
|
|||||||
pack: pack_id,
|
pack: pack_id,
|
||||||
pack_ref: pack_ref.to_string(),
|
pack_ref: pack_ref.to_string(),
|
||||||
label: label.to_string(),
|
label: label.to_string(),
|
||||||
description: description.to_string(),
|
description: description.map(|s| s.to_string()),
|
||||||
entrypoint: entrypoint.to_string(),
|
entrypoint: entrypoint.to_string(),
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
@@ -787,7 +787,7 @@ async fn update_companion_action(
|
|||||||
if let Some(action) = existing_action {
|
if let Some(action) = existing_action {
|
||||||
let update_input = UpdateActionInput {
|
let update_input = UpdateActionInput {
|
||||||
label: label.map(|s| s.to_string()),
|
label: label.map(|s| s.to_string()),
|
||||||
description: description.map(|s| s.to_string()),
|
description: description.map(|s| Patch::Set(s.to_string())),
|
||||||
entrypoint: None,
|
entrypoint: None,
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
@@ -838,7 +838,7 @@ async fn ensure_companion_action(
|
|||||||
pack_id: i64,
|
pack_id: i64,
|
||||||
pack_ref: &str,
|
pack_ref: &str,
|
||||||
label: &str,
|
label: &str,
|
||||||
description: &str,
|
description: Option<&str>,
|
||||||
entrypoint: &str,
|
entrypoint: &str,
|
||||||
param_schema: Option<&serde_json::Value>,
|
param_schema: Option<&serde_json::Value>,
|
||||||
out_schema: Option<&serde_json::Value>,
|
out_schema: Option<&serde_json::Value>,
|
||||||
@@ -853,7 +853,10 @@ async fn ensure_companion_action(
|
|||||||
// Update existing companion action
|
// Update existing companion action
|
||||||
let update_input = UpdateActionInput {
|
let update_input = UpdateActionInput {
|
||||||
label: Some(label.to_string()),
|
label: Some(label.to_string()),
|
||||||
description: Some(description.to_string()),
|
description: Some(match description {
|
||||||
|
Some(description) => Patch::Set(description.to_string()),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
entrypoint: Some(entrypoint.to_string()),
|
entrypoint: Some(entrypoint.to_string()),
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
|
|||||||
@@ -60,6 +60,7 @@ impl Server {
|
|||||||
.merge(routes::history_routes())
|
.merge(routes::history_routes())
|
||||||
.merge(routes::analytics_routes())
|
.merge(routes::analytics_routes())
|
||||||
.merge(routes::artifact_routes())
|
.merge(routes::artifact_routes())
|
||||||
|
.merge(routes::agent_routes())
|
||||||
.with_state(self.state.clone());
|
.with_state(self.state.clone());
|
||||||
|
|
||||||
// Auth routes at root level (not versioned for frontend compatibility)
|
// Auth routes at root level (not versioned for frontend compatibility)
|
||||||
|
|||||||
@@ -362,7 +362,7 @@ mod tests {
|
|||||||
pack: 1,
|
pack: 1,
|
||||||
pack_ref: "test".to_string(),
|
pack_ref: "test".to_string(),
|
||||||
label: "Test Action".to_string(),
|
label: "Test Action".to_string(),
|
||||||
description: "Test action".to_string(),
|
description: Some("Test action".to_string()),
|
||||||
entrypoint: "test.sh".to_string(),
|
entrypoint: "test.sh".to_string(),
|
||||||
runtime: Some(1),
|
runtime: Some(1),
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
|
|||||||
138
crates/api/tests/agent_tests.rs
Normal file
138
crates/api/tests/agent_tests.rs
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
//! Integration tests for agent binary distribution endpoints
|
||||||
|
//!
|
||||||
|
//! The agent endpoints (`/api/v1/agent/binary` and `/api/v1/agent/info`) are
|
||||||
|
//! intentionally unauthenticated — the agent needs to download its binary
|
||||||
|
//! before it has JWT credentials. An optional `bootstrap_token` can restrict
|
||||||
|
//! access, but that is validated inside the handler, not via RequireAuth
|
||||||
|
//! middleware.
|
||||||
|
//!
|
||||||
|
//! The test configuration (`config.test.yaml`) does NOT include an `agent`
|
||||||
|
//! section, so both endpoints return 503 Service Unavailable. This is the
|
||||||
|
//! correct behaviour: the endpoints are reachable (no 401/404 from middleware)
|
||||||
|
//! but the feature is not configured.
|
||||||
|
|
||||||
|
use axum::http::StatusCode;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
mod helpers;
|
||||||
|
use helpers::TestContext;
|
||||||
|
|
||||||
|
// ── /api/v1/agent/info ──────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_agent_info_not_configured() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/api/v1/agent/info", None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// Agent config is not set in config.test.yaml, so the handler returns 503.
|
||||||
|
assert_eq!(response.status(), StatusCode::SERVICE_UNAVAILABLE);
|
||||||
|
|
||||||
|
let body: serde_json::Value = response.json().await.expect("Failed to parse JSON");
|
||||||
|
assert_eq!(body["error"], "Not configured");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_agent_info_no_auth_required() {
|
||||||
|
// Verify that the endpoint is reachable WITHOUT any JWT token.
|
||||||
|
// If RequireAuth middleware were applied, this would return 401.
|
||||||
|
// Instead we expect 503 (not configured) — proving the endpoint
|
||||||
|
// is publicly accessible.
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/api/v1/agent/info", None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// Must NOT be 401 Unauthorized — the endpoint has no auth middleware.
|
||||||
|
assert_ne!(
|
||||||
|
response.status(),
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
"agent/info should not require authentication"
|
||||||
|
);
|
||||||
|
// Should be 503 because agent config is absent.
|
||||||
|
assert_eq!(response.status(), StatusCode::SERVICE_UNAVAILABLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── /api/v1/agent/binary ────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_agent_binary_not_configured() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/api/v1/agent/binary", None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// Agent config is not set in config.test.yaml, so the handler returns 503.
|
||||||
|
assert_eq!(response.status(), StatusCode::SERVICE_UNAVAILABLE);
|
||||||
|
|
||||||
|
let body: serde_json::Value = response.json().await.expect("Failed to parse JSON");
|
||||||
|
assert_eq!(body["error"], "Not configured");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_agent_binary_no_auth_required() {
|
||||||
|
// Same reasoning as test_agent_info_no_auth_required: the binary
|
||||||
|
// download endpoint must be publicly accessible (no RequireAuth).
|
||||||
|
// When no bootstrap_token is configured, any caller can reach the
|
||||||
|
// handler. We still get 503 because the agent feature itself is
|
||||||
|
// not configured in the test environment.
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/api/v1/agent/binary", None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// Must NOT be 401 Unauthorized — the endpoint has no auth middleware.
|
||||||
|
assert_ne!(
|
||||||
|
response.status(),
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
"agent/binary should not require authentication when no bootstrap_token is configured"
|
||||||
|
);
|
||||||
|
// Should be 503 because agent config is absent.
|
||||||
|
assert_eq!(response.status(), StatusCode::SERVICE_UNAVAILABLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_agent_binary_invalid_arch() {
|
||||||
|
// Architecture validation (`validate_arch`) rejects unsupported values
|
||||||
|
// with 400 Bad Request. However, in the handler the execution order is:
|
||||||
|
// 1. validate_token (passes — no bootstrap_token configured)
|
||||||
|
// 2. check agent config (fails with 503 — not configured)
|
||||||
|
// 3. validate_arch (never reached)
|
||||||
|
//
|
||||||
|
// So even with an invalid arch like "mips", we get 503 from the config
|
||||||
|
// check before the arch is ever validated. The arch validation is covered
|
||||||
|
// by unit tests in routes/agent.rs instead.
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let response = ctx
|
||||||
|
.get("/api/v1/agent/binary?arch=mips", None)
|
||||||
|
.await
|
||||||
|
.expect("Failed to make request");
|
||||||
|
|
||||||
|
// 503 from the agent-config-not-set check, NOT 400 from arch validation.
|
||||||
|
assert_eq!(response.status(), StatusCode::SERVICE_UNAVAILABLE);
|
||||||
|
}
|
||||||
@@ -241,6 +241,7 @@ impl TestContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create and authenticate a test user
|
/// Create and authenticate a test user
|
||||||
|
#[allow(dead_code)]
|
||||||
pub async fn with_auth(mut self) -> Result<Self> {
|
pub async fn with_auth(mut self) -> Result<Self> {
|
||||||
// Generate unique username to avoid conflicts in parallel tests
|
// Generate unique username to avoid conflicts in parallel tests
|
||||||
let unique_id = uuid::Uuid::new_v4().to_string().replace("-", "")[..8].to_string();
|
let unique_id = uuid::Uuid::new_v4().to_string().replace("-", "")[..8].to_string();
|
||||||
@@ -394,6 +395,7 @@ impl TestContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get authenticated token
|
/// Get authenticated token
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn token(&self) -> Option<&str> {
|
pub fn token(&self) -> Option<&str> {
|
||||||
self.token.as_deref()
|
self.token.as_deref()
|
||||||
}
|
}
|
||||||
@@ -495,7 +497,7 @@ pub async fn create_test_action(pool: &PgPool, pack_id: i64, ref_name: &str) ->
|
|||||||
pack: pack_id,
|
pack: pack_id,
|
||||||
pack_ref: format!("pack_{}", pack_id),
|
pack_ref: format!("pack_{}", pack_id),
|
||||||
label: format!("Test Action {}", ref_name),
|
label: format!("Test Action {}", ref_name),
|
||||||
description: format!("Test action for {}", ref_name),
|
description: Some(format!("Test action for {}", ref_name)),
|
||||||
entrypoint: "main.py".to_string(),
|
entrypoint: "main.py".to_string(),
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
|
|||||||
276
crates/api/tests/rbac_scoped_resources_api_tests.rs
Normal file
276
crates/api/tests/rbac_scoped_resources_api_tests.rs
Normal file
@@ -0,0 +1,276 @@
|
|||||||
|
use axum::http::StatusCode;
|
||||||
|
use helpers::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use attune_common::{
|
||||||
|
models::enums::{ArtifactType, ArtifactVisibility, OwnerType, RetentionPolicyType},
|
||||||
|
repositories::{
|
||||||
|
artifact::{ArtifactRepository, CreateArtifactInput},
|
||||||
|
identity::{
|
||||||
|
CreatePermissionAssignmentInput, CreatePermissionSetInput, IdentityRepository,
|
||||||
|
PermissionAssignmentRepository, PermissionSetRepository,
|
||||||
|
},
|
||||||
|
key::{CreateKeyInput, KeyRepository},
|
||||||
|
Create,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
mod helpers;
|
||||||
|
|
||||||
|
async fn register_scoped_user(
|
||||||
|
ctx: &TestContext,
|
||||||
|
login: &str,
|
||||||
|
grants: serde_json::Value,
|
||||||
|
) -> Result<String> {
|
||||||
|
let response = ctx
|
||||||
|
.post(
|
||||||
|
"/auth/register",
|
||||||
|
json!({
|
||||||
|
"login": login,
|
||||||
|
"password": "TestPassword123!",
|
||||||
|
"display_name": format!("Scoped User {}", login),
|
||||||
|
}),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
assert_eq!(response.status(), StatusCode::CREATED);
|
||||||
|
let body: serde_json::Value = response.json().await?;
|
||||||
|
let token = body["data"]["access_token"]
|
||||||
|
.as_str()
|
||||||
|
.expect("missing access token")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let identity = IdentityRepository::find_by_login(&ctx.pool, login)
|
||||||
|
.await?
|
||||||
|
.expect("registered identity should exist");
|
||||||
|
|
||||||
|
let permset = PermissionSetRepository::create(
|
||||||
|
&ctx.pool,
|
||||||
|
CreatePermissionSetInput {
|
||||||
|
r#ref: format!("test.scoped_{}", uuid::Uuid::new_v4().simple()),
|
||||||
|
pack: None,
|
||||||
|
pack_ref: None,
|
||||||
|
label: Some("Scoped Test Permission Set".to_string()),
|
||||||
|
description: Some("Scoped test grants".to_string()),
|
||||||
|
grants,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
PermissionAssignmentRepository::create(
|
||||||
|
&ctx.pool,
|
||||||
|
CreatePermissionAssignmentInput {
|
||||||
|
identity: identity.id,
|
||||||
|
permset: permset.id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(token)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_pack_scoped_key_permissions_enforce_owner_refs() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let token = register_scoped_user(
|
||||||
|
&ctx,
|
||||||
|
&format!("scoped_keys_{}", uuid::Uuid::new_v4().simple()),
|
||||||
|
json!([
|
||||||
|
{
|
||||||
|
"resource": "keys",
|
||||||
|
"actions": ["read"],
|
||||||
|
"constraints": {
|
||||||
|
"owner_types": ["pack"],
|
||||||
|
"owner_refs": ["python_example"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to register scoped user");
|
||||||
|
|
||||||
|
KeyRepository::create(
|
||||||
|
&ctx.pool,
|
||||||
|
CreateKeyInput {
|
||||||
|
r#ref: format!("python_example_key_{}", uuid::Uuid::new_v4().simple()),
|
||||||
|
owner_type: OwnerType::Pack,
|
||||||
|
owner: Some("python_example".to_string()),
|
||||||
|
owner_identity: None,
|
||||||
|
owner_pack: None,
|
||||||
|
owner_pack_ref: Some("python_example".to_string()),
|
||||||
|
owner_action: None,
|
||||||
|
owner_action_ref: None,
|
||||||
|
owner_sensor: None,
|
||||||
|
owner_sensor_ref: None,
|
||||||
|
name: "Python Example Key".to_string(),
|
||||||
|
encrypted: false,
|
||||||
|
encryption_key_hash: None,
|
||||||
|
value: json!("allowed"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create scoped key");
|
||||||
|
|
||||||
|
let blocked_key = KeyRepository::create(
|
||||||
|
&ctx.pool,
|
||||||
|
CreateKeyInput {
|
||||||
|
r#ref: format!("other_pack_key_{}", uuid::Uuid::new_v4().simple()),
|
||||||
|
owner_type: OwnerType::Pack,
|
||||||
|
owner: Some("other_pack".to_string()),
|
||||||
|
owner_identity: None,
|
||||||
|
owner_pack: None,
|
||||||
|
owner_pack_ref: Some("other_pack".to_string()),
|
||||||
|
owner_action: None,
|
||||||
|
owner_action_ref: None,
|
||||||
|
owner_sensor: None,
|
||||||
|
owner_sensor_ref: None,
|
||||||
|
name: "Other Pack Key".to_string(),
|
||||||
|
encrypted: false,
|
||||||
|
encryption_key_hash: None,
|
||||||
|
value: json!("blocked"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create blocked key");
|
||||||
|
|
||||||
|
let allowed_list = ctx
|
||||||
|
.get("/api/v1/keys", Some(&token))
|
||||||
|
.await
|
||||||
|
.expect("Failed to list keys");
|
||||||
|
assert_eq!(allowed_list.status(), StatusCode::OK);
|
||||||
|
let allowed_body: serde_json::Value = allowed_list.json().await.expect("Invalid key list");
|
||||||
|
assert_eq!(
|
||||||
|
allowed_body["data"]
|
||||||
|
.as_array()
|
||||||
|
.expect("expected list")
|
||||||
|
.len(),
|
||||||
|
1
|
||||||
|
);
|
||||||
|
assert_eq!(allowed_body["data"][0]["owner"], "python_example");
|
||||||
|
|
||||||
|
let blocked_get = ctx
|
||||||
|
.get(&format!("/api/v1/keys/{}", blocked_key.r#ref), Some(&token))
|
||||||
|
.await
|
||||||
|
.expect("Failed to fetch blocked key");
|
||||||
|
assert_eq!(blocked_get.status(), StatusCode::NOT_FOUND);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore = "integration test — requires database"]
|
||||||
|
async fn test_pack_scoped_artifact_permissions_enforce_owner_refs() {
|
||||||
|
let ctx = TestContext::new()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create test context");
|
||||||
|
|
||||||
|
let token = register_scoped_user(
|
||||||
|
&ctx,
|
||||||
|
&format!("scoped_artifacts_{}", uuid::Uuid::new_v4().simple()),
|
||||||
|
json!([
|
||||||
|
{
|
||||||
|
"resource": "artifacts",
|
||||||
|
"actions": ["read", "create"],
|
||||||
|
"constraints": {
|
||||||
|
"owner_types": ["pack"],
|
||||||
|
"owner_refs": ["python_example"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to register scoped user");
|
||||||
|
|
||||||
|
let allowed_artifact = ArtifactRepository::create(
|
||||||
|
&ctx.pool,
|
||||||
|
CreateArtifactInput {
|
||||||
|
r#ref: format!("python_example.allowed_{}", uuid::Uuid::new_v4().simple()),
|
||||||
|
scope: OwnerType::Pack,
|
||||||
|
owner: "python_example".to_string(),
|
||||||
|
r#type: ArtifactType::FileText,
|
||||||
|
visibility: ArtifactVisibility::Private,
|
||||||
|
retention_policy: RetentionPolicyType::Versions,
|
||||||
|
retention_limit: 5,
|
||||||
|
name: Some("Allowed Artifact".to_string()),
|
||||||
|
description: None,
|
||||||
|
content_type: Some("text/plain".to_string()),
|
||||||
|
execution: None,
|
||||||
|
data: None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create allowed artifact");
|
||||||
|
|
||||||
|
let blocked_artifact = ArtifactRepository::create(
|
||||||
|
&ctx.pool,
|
||||||
|
CreateArtifactInput {
|
||||||
|
r#ref: format!("other_pack.blocked_{}", uuid::Uuid::new_v4().simple()),
|
||||||
|
scope: OwnerType::Pack,
|
||||||
|
owner: "other_pack".to_string(),
|
||||||
|
r#type: ArtifactType::FileText,
|
||||||
|
visibility: ArtifactVisibility::Private,
|
||||||
|
retention_policy: RetentionPolicyType::Versions,
|
||||||
|
retention_limit: 5,
|
||||||
|
name: Some("Blocked Artifact".to_string()),
|
||||||
|
description: None,
|
||||||
|
content_type: Some("text/plain".to_string()),
|
||||||
|
execution: None,
|
||||||
|
data: None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create blocked artifact");
|
||||||
|
|
||||||
|
let allowed_get = ctx
|
||||||
|
.get(
|
||||||
|
&format!("/api/v1/artifacts/{}", allowed_artifact.id),
|
||||||
|
Some(&token),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to fetch allowed artifact");
|
||||||
|
assert_eq!(allowed_get.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let blocked_get = ctx
|
||||||
|
.get(
|
||||||
|
&format!("/api/v1/artifacts/{}", blocked_artifact.id),
|
||||||
|
Some(&token),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to fetch blocked artifact");
|
||||||
|
assert_eq!(blocked_get.status(), StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
|
let create_allowed = ctx
|
||||||
|
.post(
|
||||||
|
"/api/v1/artifacts",
|
||||||
|
json!({
|
||||||
|
"ref": format!("python_example.created_{}", uuid::Uuid::new_v4().simple()),
|
||||||
|
"scope": "pack",
|
||||||
|
"owner": "python_example",
|
||||||
|
"type": "file_text",
|
||||||
|
"name": "Created Artifact"
|
||||||
|
}),
|
||||||
|
Some(&token),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create allowed artifact");
|
||||||
|
assert_eq!(create_allowed.status(), StatusCode::CREATED);
|
||||||
|
|
||||||
|
let create_blocked = ctx
|
||||||
|
.post(
|
||||||
|
"/api/v1/artifacts",
|
||||||
|
json!({
|
||||||
|
"ref": format!("other_pack.created_{}", uuid::Uuid::new_v4().simple()),
|
||||||
|
"scope": "pack",
|
||||||
|
"owner": "other_pack",
|
||||||
|
"type": "file_text",
|
||||||
|
"name": "Blocked Artifact"
|
||||||
|
}),
|
||||||
|
Some(&token),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create blocked artifact");
|
||||||
|
assert_eq!(create_blocked.status(), StatusCode::FORBIDDEN);
|
||||||
|
}
|
||||||
@@ -52,7 +52,7 @@ async fn setup_test_pack_and_action(pool: &PgPool) -> Result<(Pack, Action)> {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Action".to_string(),
|
label: "Test Action".to_string(),
|
||||||
description: "Test action for SSE tests".to_string(),
|
description: Some("Test action for SSE tests".to_string()),
|
||||||
entrypoint: "test.sh".to_string(),
|
entrypoint: "test.sh".to_string(),
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ struct Action {
|
|||||||
action_ref: String,
|
action_ref: String,
|
||||||
pack_ref: String,
|
pack_ref: String,
|
||||||
label: String,
|
label: String,
|
||||||
description: String,
|
description: Option<String>,
|
||||||
entrypoint: String,
|
entrypoint: String,
|
||||||
runtime: Option<i64>,
|
runtime: Option<i64>,
|
||||||
created: String,
|
created: String,
|
||||||
@@ -105,7 +105,7 @@ struct ActionDetail {
|
|||||||
pack: i64,
|
pack: i64,
|
||||||
pack_ref: String,
|
pack_ref: String,
|
||||||
label: String,
|
label: String,
|
||||||
description: String,
|
description: Option<String>,
|
||||||
entrypoint: String,
|
entrypoint: String,
|
||||||
runtime: Option<i64>,
|
runtime: Option<i64>,
|
||||||
param_schema: Option<serde_json::Value>,
|
param_schema: Option<serde_json::Value>,
|
||||||
@@ -253,7 +253,7 @@ async fn handle_list(
|
|||||||
.runtime
|
.runtime
|
||||||
.map(|r| r.to_string())
|
.map(|r| r.to_string())
|
||||||
.unwrap_or_else(|| "none".to_string()),
|
.unwrap_or_else(|| "none".to_string()),
|
||||||
output::truncate(&action.description, 40),
|
output::truncate(&action.description.unwrap_or_default(), 40),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -288,7 +288,10 @@ async fn handle_show(
|
|||||||
("Reference", action.action_ref.clone()),
|
("Reference", action.action_ref.clone()),
|
||||||
("Pack", action.pack_ref.clone()),
|
("Pack", action.pack_ref.clone()),
|
||||||
("Label", action.label.clone()),
|
("Label", action.label.clone()),
|
||||||
("Description", action.description.clone()),
|
(
|
||||||
|
"Description",
|
||||||
|
action.description.unwrap_or_else(|| "None".to_string()),
|
||||||
|
),
|
||||||
("Entry Point", action.entrypoint.clone()),
|
("Entry Point", action.entrypoint.clone()),
|
||||||
(
|
(
|
||||||
"Runtime",
|
"Runtime",
|
||||||
@@ -356,7 +359,10 @@ async fn handle_update(
|
|||||||
("Ref", action.action_ref.clone()),
|
("Ref", action.action_ref.clone()),
|
||||||
("Pack", action.pack_ref.clone()),
|
("Pack", action.pack_ref.clone()),
|
||||||
("Label", action.label.clone()),
|
("Label", action.label.clone()),
|
||||||
("Description", action.description.clone()),
|
(
|
||||||
|
"Description",
|
||||||
|
action.description.unwrap_or_else(|| "None".to_string()),
|
||||||
|
),
|
||||||
("Entrypoint", action.entrypoint.clone()),
|
("Entrypoint", action.entrypoint.clone()),
|
||||||
(
|
(
|
||||||
"Runtime",
|
"Runtime",
|
||||||
|
|||||||
@@ -112,7 +112,7 @@ struct Rule {
|
|||||||
pack: Option<i64>,
|
pack: Option<i64>,
|
||||||
pack_ref: String,
|
pack_ref: String,
|
||||||
label: String,
|
label: String,
|
||||||
description: String,
|
description: Option<String>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
trigger: Option<i64>,
|
trigger: Option<i64>,
|
||||||
trigger_ref: String,
|
trigger_ref: String,
|
||||||
@@ -133,7 +133,7 @@ struct RuleDetail {
|
|||||||
pack: Option<i64>,
|
pack: Option<i64>,
|
||||||
pack_ref: String,
|
pack_ref: String,
|
||||||
label: String,
|
label: String,
|
||||||
description: String,
|
description: Option<String>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
trigger: Option<i64>,
|
trigger: Option<i64>,
|
||||||
trigger_ref: String,
|
trigger_ref: String,
|
||||||
@@ -321,7 +321,10 @@ async fn handle_show(
|
|||||||
("Ref", rule.rule_ref.clone()),
|
("Ref", rule.rule_ref.clone()),
|
||||||
("Pack", rule.pack_ref.clone()),
|
("Pack", rule.pack_ref.clone()),
|
||||||
("Label", rule.label.clone()),
|
("Label", rule.label.clone()),
|
||||||
("Description", rule.description.clone()),
|
(
|
||||||
|
"Description",
|
||||||
|
rule.description.unwrap_or_else(|| "None".to_string()),
|
||||||
|
),
|
||||||
("Trigger", rule.trigger_ref.clone()),
|
("Trigger", rule.trigger_ref.clone()),
|
||||||
("Action", rule.action_ref.clone()),
|
("Action", rule.action_ref.clone()),
|
||||||
("Enabled", output::format_bool(rule.enabled)),
|
("Enabled", output::format_bool(rule.enabled)),
|
||||||
@@ -440,7 +443,10 @@ async fn handle_update(
|
|||||||
("Ref", rule.rule_ref.clone()),
|
("Ref", rule.rule_ref.clone()),
|
||||||
("Pack", rule.pack_ref.clone()),
|
("Pack", rule.pack_ref.clone()),
|
||||||
("Label", rule.label.clone()),
|
("Label", rule.label.clone()),
|
||||||
("Description", rule.description.clone()),
|
(
|
||||||
|
"Description",
|
||||||
|
rule.description.unwrap_or_else(|| "None".to_string()),
|
||||||
|
),
|
||||||
("Trigger", rule.trigger_ref.clone()),
|
("Trigger", rule.trigger_ref.clone()),
|
||||||
("Action", rule.action_ref.clone()),
|
("Action", rule.action_ref.clone()),
|
||||||
("Enabled", output::format_bool(rule.enabled)),
|
("Enabled", output::format_bool(rule.enabled)),
|
||||||
|
|||||||
107
crates/common/src/agent_bootstrap.rs
Normal file
107
crates/common/src/agent_bootstrap.rs
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
//! Shared bootstrap helpers for injected agent binaries.
|
||||||
|
|
||||||
|
use crate::agent_runtime_detection::{
|
||||||
|
detect_runtimes, format_as_env_value, print_detection_report_for_env, DetectedRuntime,
|
||||||
|
};
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct RuntimeBootstrapResult {
|
||||||
|
pub runtimes_override: Option<String>,
|
||||||
|
pub detected_runtimes: Option<Vec<DetectedRuntime>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Detect runtimes and populate the agent runtime environment variable when needed.
|
||||||
|
///
|
||||||
|
/// This must run before the Tokio runtime starts because it may mutate process
|
||||||
|
/// environment variables.
|
||||||
|
pub fn bootstrap_runtime_env(env_var_name: &str) -> RuntimeBootstrapResult {
|
||||||
|
let runtimes_override = std::env::var(env_var_name).ok();
|
||||||
|
let mut detected_runtimes = None;
|
||||||
|
|
||||||
|
if let Some(ref override_value) = runtimes_override {
|
||||||
|
info!(
|
||||||
|
"{} already set (override): {}",
|
||||||
|
env_var_name, override_value
|
||||||
|
);
|
||||||
|
info!("Running auto-detection for override-specified runtimes...");
|
||||||
|
|
||||||
|
let detected = detect_runtimes();
|
||||||
|
let override_names: Vec<&str> = override_value.split(',').map(|s| s.trim()).collect();
|
||||||
|
|
||||||
|
let filtered: Vec<_> = detected
|
||||||
|
.into_iter()
|
||||||
|
.filter(|rt| {
|
||||||
|
let lower_name = rt.name.to_ascii_lowercase();
|
||||||
|
override_names
|
||||||
|
.iter()
|
||||||
|
.any(|ov| ov.to_ascii_lowercase() == lower_name)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if filtered.is_empty() {
|
||||||
|
warn!(
|
||||||
|
"None of the override runtimes ({}) were found on this system",
|
||||||
|
override_value
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
info!(
|
||||||
|
"Matched {} override runtime(s) to detected interpreters:",
|
||||||
|
filtered.len()
|
||||||
|
);
|
||||||
|
for rt in &filtered {
|
||||||
|
match &rt.version {
|
||||||
|
Some(ver) => info!(" ✓ {} — {} ({})", rt.name, rt.path, ver),
|
||||||
|
None => info!(" ✓ {} — {}", rt.name, rt.path),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
detected_runtimes = Some(filtered);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
info!("No {} override — running auto-detection...", env_var_name);
|
||||||
|
|
||||||
|
let detected = detect_runtimes();
|
||||||
|
|
||||||
|
if detected.is_empty() {
|
||||||
|
warn!("No runtimes detected! The agent may not be able to execute any work.");
|
||||||
|
} else {
|
||||||
|
info!("Detected {} runtime(s):", detected.len());
|
||||||
|
for rt in &detected {
|
||||||
|
match &rt.version {
|
||||||
|
Some(ver) => info!(" ✓ {} — {} ({})", rt.name, rt.path, ver),
|
||||||
|
None => info!(" ✓ {} — {}", rt.name, rt.path),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let runtime_csv = format_as_env_value(&detected);
|
||||||
|
info!("Setting {}={}", env_var_name, runtime_csv);
|
||||||
|
std::env::set_var(env_var_name, &runtime_csv);
|
||||||
|
detected_runtimes = Some(detected);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
RuntimeBootstrapResult {
|
||||||
|
runtimes_override,
|
||||||
|
detected_runtimes,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_detect_only_report(env_var_name: &str, result: &RuntimeBootstrapResult) {
|
||||||
|
if result.runtimes_override.is_some() {
|
||||||
|
info!("--detect-only: re-running detection to show what is available on this system...");
|
||||||
|
println!(
|
||||||
|
"NOTE: {} is set — auto-detection was skipped during normal startup.",
|
||||||
|
env_var_name
|
||||||
|
);
|
||||||
|
println!(" Showing what auto-detection would find on this system:");
|
||||||
|
println!();
|
||||||
|
|
||||||
|
let detected = detect_runtimes();
|
||||||
|
print_detection_report_for_env(env_var_name, &detected);
|
||||||
|
} else if let Some(ref detected) = result.detected_runtimes {
|
||||||
|
print_detection_report_for_env(env_var_name, detected);
|
||||||
|
} else {
|
||||||
|
let detected = detect_runtimes();
|
||||||
|
print_detection_report_for_env(env_var_name, &detected);
|
||||||
|
}
|
||||||
|
}
|
||||||
306
crates/common/src/agent_runtime_detection.rs
Normal file
306
crates/common/src/agent_runtime_detection.rs
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
//! Runtime auto-detection for injected Attune agent binaries.
|
||||||
|
//!
|
||||||
|
//! This module probes the local system directly for well-known interpreters,
|
||||||
|
//! without requiring database access.
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::fmt;
|
||||||
|
use std::process::Command;
|
||||||
|
use tracing::{debug, info};
|
||||||
|
|
||||||
|
/// A runtime interpreter discovered on the local system.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct DetectedRuntime {
|
||||||
|
/// Canonical runtime name (for example, "python" or "node").
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
/// Absolute path to the interpreter binary.
|
||||||
|
pub path: String,
|
||||||
|
|
||||||
|
/// Version string if the version command succeeded.
|
||||||
|
pub version: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for DetectedRuntime {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match &self.version {
|
||||||
|
Some(v) => write!(f, "{} ({}, v{})", self.name, self.path, v),
|
||||||
|
None => write!(f, "{} ({})", self.name, self.path),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct RuntimeCandidate {
|
||||||
|
name: &'static str,
|
||||||
|
binaries: &'static [&'static str],
|
||||||
|
version_args: &'static [&'static str],
|
||||||
|
version_parser: VersionParser,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum VersionParser {
|
||||||
|
SemverLike,
|
||||||
|
JavaStyle,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn candidates() -> Vec<RuntimeCandidate> {
|
||||||
|
vec![
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "shell",
|
||||||
|
binaries: &["bash", "sh"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "python",
|
||||||
|
binaries: &["python3", "python"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "node",
|
||||||
|
binaries: &["node", "nodejs"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "ruby",
|
||||||
|
binaries: &["ruby"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "go",
|
||||||
|
binaries: &["go"],
|
||||||
|
version_args: &["version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "java",
|
||||||
|
binaries: &["java"],
|
||||||
|
version_args: &["-version"],
|
||||||
|
version_parser: VersionParser::JavaStyle,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "r",
|
||||||
|
binaries: &["Rscript"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
RuntimeCandidate {
|
||||||
|
name: "perl",
|
||||||
|
binaries: &["perl"],
|
||||||
|
version_args: &["--version"],
|
||||||
|
version_parser: VersionParser::SemverLike,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Detect available runtimes by probing the local system.
|
||||||
|
pub fn detect_runtimes() -> Vec<DetectedRuntime> {
|
||||||
|
info!("Starting runtime auto-detection...");
|
||||||
|
|
||||||
|
let mut detected = Vec::new();
|
||||||
|
|
||||||
|
for candidate in candidates() {
|
||||||
|
match detect_single_runtime(&candidate) {
|
||||||
|
Some(runtime) => {
|
||||||
|
info!(" ✓ Detected: {}", runtime);
|
||||||
|
detected.push(runtime);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
debug!(" ✗ Not found: {}", candidate.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Runtime auto-detection complete: found {} runtime(s): [{}]",
|
||||||
|
detected.len(),
|
||||||
|
detected
|
||||||
|
.iter()
|
||||||
|
.map(|r| r.name.as_str())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ")
|
||||||
|
);
|
||||||
|
|
||||||
|
detected
|
||||||
|
}
|
||||||
|
|
||||||
|
fn detect_single_runtime(candidate: &RuntimeCandidate) -> Option<DetectedRuntime> {
|
||||||
|
for binary in candidate.binaries {
|
||||||
|
if let Some(path) = which_binary(binary) {
|
||||||
|
let version = get_version(&path, candidate.version_args, &candidate.version_parser);
|
||||||
|
|
||||||
|
return Some(DetectedRuntime {
|
||||||
|
name: candidate.name.to_string(),
|
||||||
|
path,
|
||||||
|
version,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn which_binary(binary: &str) -> Option<String> {
|
||||||
|
if binary == "bash" || binary == "sh" {
|
||||||
|
let absolute_path = format!("/bin/{}", binary);
|
||||||
|
if std::path::Path::new(&absolute_path).exists() {
|
||||||
|
return Some(absolute_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match Command::new("which").arg(binary).output() {
|
||||||
|
Ok(output) if output.status.success() => {
|
||||||
|
let path = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||||
|
if path.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(_) => None,
|
||||||
|
Err(e) => {
|
||||||
|
debug!("'which' command failed ({}), trying 'command -v'", e);
|
||||||
|
match Command::new("sh")
|
||||||
|
.args(["-c", &format!("command -v {}", binary)])
|
||||||
|
.output()
|
||||||
|
{
|
||||||
|
Ok(output) if output.status.success() => {
|
||||||
|
let path = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||||
|
if path.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_version(binary_path: &str, version_args: &[&str], parser: &VersionParser) -> Option<String> {
|
||||||
|
let output = match Command::new(binary_path).args(version_args).output() {
|
||||||
|
Ok(output) => output,
|
||||||
|
Err(e) => {
|
||||||
|
debug!("Failed to run version command for {}: {}", binary_path, e);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||||
|
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||||
|
let combined = format!("{}{}", stdout, stderr);
|
||||||
|
|
||||||
|
match parser {
|
||||||
|
VersionParser::SemverLike => parse_semver_like(&combined),
|
||||||
|
VersionParser::JavaStyle => parse_java_version(&combined),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_semver_like(output: &str) -> Option<String> {
|
||||||
|
let re = regex::Regex::new(r"(?:v|go)?(\d+\.\d+(?:\.\d+)?)").ok()?;
|
||||||
|
re.captures(output)
|
||||||
|
.and_then(|captures| captures.get(1).map(|m| m.as_str().to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_java_version(output: &str) -> Option<String> {
|
||||||
|
let quoted_re = regex::Regex::new(r#"version\s+"([^"]+)""#).ok()?;
|
||||||
|
if let Some(captures) = quoted_re.captures(output) {
|
||||||
|
return captures.get(1).map(|m| m.as_str().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
parse_semver_like(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn format_as_env_value(runtimes: &[DetectedRuntime]) -> String {
|
||||||
|
runtimes
|
||||||
|
.iter()
|
||||||
|
.map(|r| r.name.as_str())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(",")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_detection_report_for_env(env_var_name: &str, runtimes: &[DetectedRuntime]) {
|
||||||
|
println!("=== Attune Agent Runtime Detection Report ===");
|
||||||
|
println!();
|
||||||
|
|
||||||
|
if runtimes.is_empty() {
|
||||||
|
println!("No runtimes detected!");
|
||||||
|
println!();
|
||||||
|
println!("The agent could not find any supported interpreter binaries.");
|
||||||
|
println!("Ensure at least one of the following is installed and on PATH:");
|
||||||
|
println!(" - bash / sh (shell scripts)");
|
||||||
|
println!(" - python3 / python (Python scripts)");
|
||||||
|
println!(" - node / nodejs (Node.js scripts)");
|
||||||
|
println!(" - ruby (Ruby scripts)");
|
||||||
|
println!(" - go (Go programs)");
|
||||||
|
println!(" - java (Java programs)");
|
||||||
|
println!(" - Rscript (R scripts)");
|
||||||
|
println!(" - perl (Perl scripts)");
|
||||||
|
} else {
|
||||||
|
println!("Detected {} runtime(s):", runtimes.len());
|
||||||
|
println!();
|
||||||
|
for rt in runtimes {
|
||||||
|
let version_str = rt.version.as_deref().unwrap_or("unknown version");
|
||||||
|
println!(" ✓ {:<10} {} ({})", rt.name, rt.path, version_str);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println!();
|
||||||
|
println!("{}={}", env_var_name, format_as_env_value(runtimes));
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_semver_like_python() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_semver_like("Python 3.12.1"),
|
||||||
|
Some("3.12.1".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_semver_like_node() {
|
||||||
|
assert_eq!(parse_semver_like("v20.11.0"), Some("20.11.0".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_semver_like_go() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_semver_like("go version go1.22.0 linux/amd64"),
|
||||||
|
Some("1.22.0".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_java_version_openjdk() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_java_version(r#"openjdk version "21.0.1" 2023-10-17"#),
|
||||||
|
Some("21.0.1".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_as_env_value_multiple() {
|
||||||
|
let runtimes = vec![
|
||||||
|
DetectedRuntime {
|
||||||
|
name: "shell".to_string(),
|
||||||
|
path: "/bin/bash".to_string(),
|
||||||
|
version: Some("5.2.15".to_string()),
|
||||||
|
},
|
||||||
|
DetectedRuntime {
|
||||||
|
name: "python".to_string(),
|
||||||
|
path: "/usr/bin/python3".to_string(),
|
||||||
|
version: Some("3.12.1".to_string()),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
assert_eq!(format_as_env_value(&runtimes), "shell,python");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -677,6 +677,15 @@ impl Default for PackRegistryConfig {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Agent binary distribution configuration
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct AgentConfig {
|
||||||
|
/// Directory containing agent binary files
|
||||||
|
pub binary_dir: String,
|
||||||
|
/// Optional bootstrap token for authenticating agent binary downloads
|
||||||
|
pub bootstrap_token: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Executor service configuration
|
/// Executor service configuration
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct ExecutorConfig {
|
pub struct ExecutorConfig {
|
||||||
@@ -770,6 +779,9 @@ pub struct Config {
|
|||||||
|
|
||||||
/// Executor configuration (optional, for executor service)
|
/// Executor configuration (optional, for executor service)
|
||||||
pub executor: Option<ExecutorConfig>,
|
pub executor: Option<ExecutorConfig>,
|
||||||
|
|
||||||
|
/// Agent configuration (optional, for agent binary distribution)
|
||||||
|
pub agent: Option<AgentConfig>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_service_name() -> String {
|
fn default_service_name() -> String {
|
||||||
@@ -1066,6 +1078,7 @@ mod tests {
|
|||||||
notifier: None,
|
notifier: None,
|
||||||
pack_registry: PackRegistryConfig::default(),
|
pack_registry: PackRegistryConfig::default(),
|
||||||
executor: None,
|
executor: None,
|
||||||
|
agent: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(config.service_name, "attune");
|
assert_eq!(config.service_name, "attune");
|
||||||
@@ -1144,6 +1157,7 @@ mod tests {
|
|||||||
notifier: None,
|
notifier: None,
|
||||||
pack_registry: PackRegistryConfig::default(),
|
pack_registry: PackRegistryConfig::default(),
|
||||||
executor: None,
|
executor: None,
|
||||||
|
agent: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(config.validate().is_ok());
|
assert!(config.validate().is_ok());
|
||||||
|
|||||||
@@ -6,6 +6,8 @@
|
|||||||
//! - Configuration
|
//! - Configuration
|
||||||
//! - Utilities
|
//! - Utilities
|
||||||
|
|
||||||
|
pub mod agent_bootstrap;
|
||||||
|
pub mod agent_runtime_detection;
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod crypto;
|
pub mod crypto;
|
||||||
|
|||||||
@@ -444,13 +444,55 @@ pub mod runtime {
|
|||||||
|
|
||||||
/// Optional environment variables to set during action execution.
|
/// Optional environment variables to set during action execution.
|
||||||
///
|
///
|
||||||
/// Values support the same template variables as other fields:
|
/// Entries support the same template variables as other fields:
|
||||||
/// `{pack_dir}`, `{env_dir}`, `{interpreter}`, `{manifest_path}`.
|
/// `{pack_dir}`, `{env_dir}`, `{interpreter}`, `{manifest_path}`.
|
||||||
///
|
///
|
||||||
/// Example: `{"NODE_PATH": "{env_dir}/node_modules"}` ensures Node.js
|
/// The shorthand string form replaces the variable entirely:
|
||||||
/// can find packages installed in the isolated runtime environment.
|
/// `{"NODE_PATH": "{env_dir}/node_modules"}`
|
||||||
|
///
|
||||||
|
/// The object form supports declarative merge semantics:
|
||||||
|
/// `{"PYTHONPATH": {"value": "{pack_dir}/lib", "operation": "prepend"}}`
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub env_vars: HashMap<String, String>,
|
pub env_vars: HashMap<String, RuntimeEnvVarConfig>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Declarative configuration for a single runtime environment variable.
|
||||||
|
///
|
||||||
|
/// The string form is shorthand for `{ "value": "...", "operation": "set" }`.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
#[serde(untagged)]
|
||||||
|
pub enum RuntimeEnvVarConfig {
|
||||||
|
Value(String),
|
||||||
|
Spec(RuntimeEnvVarSpec),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Full configuration for a runtime environment variable.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct RuntimeEnvVarSpec {
|
||||||
|
/// Template value to resolve for this variable.
|
||||||
|
pub value: String,
|
||||||
|
|
||||||
|
/// How the resolved value should be merged with any existing value.
|
||||||
|
#[serde(default)]
|
||||||
|
pub operation: RuntimeEnvVarOperation,
|
||||||
|
|
||||||
|
/// Separator used for prepend/append operations.
|
||||||
|
#[serde(default = "default_env_var_separator")]
|
||||||
|
pub separator: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Merge behavior for runtime-provided environment variables.
|
||||||
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum RuntimeEnvVarOperation {
|
||||||
|
#[default]
|
||||||
|
Set,
|
||||||
|
Prepend,
|
||||||
|
Append,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_env_var_separator() -> String {
|
||||||
|
":".to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Controls how inline code is materialized before execution.
|
/// Controls how inline code is materialized before execution.
|
||||||
@@ -768,6 +810,43 @@ pub mod runtime {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl RuntimeEnvVarConfig {
|
||||||
|
/// Resolve this environment variable against the current template
|
||||||
|
/// variables and any existing value already present in the process env.
|
||||||
|
pub fn resolve(
|
||||||
|
&self,
|
||||||
|
vars: &HashMap<&str, String>,
|
||||||
|
existing_value: Option<&str>,
|
||||||
|
) -> String {
|
||||||
|
match self {
|
||||||
|
Self::Value(value) => RuntimeExecutionConfig::resolve_template(value, vars),
|
||||||
|
Self::Spec(spec) => {
|
||||||
|
let resolved = RuntimeExecutionConfig::resolve_template(&spec.value, vars);
|
||||||
|
match spec.operation {
|
||||||
|
RuntimeEnvVarOperation::Set => resolved,
|
||||||
|
RuntimeEnvVarOperation::Prepend => {
|
||||||
|
join_env_var_values(&resolved, existing_value, &spec.separator)
|
||||||
|
}
|
||||||
|
RuntimeEnvVarOperation::Append => join_env_var_values(
|
||||||
|
existing_value.unwrap_or_default(),
|
||||||
|
Some(&resolved),
|
||||||
|
&spec.separator,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn join_env_var_values(left: &str, right: Option<&str>, separator: &str) -> String {
|
||||||
|
match (left.is_empty(), right.unwrap_or_default().is_empty()) {
|
||||||
|
(true, true) => String::new(),
|
||||||
|
(false, true) => left.to_string(),
|
||||||
|
(true, false) => right.unwrap_or_default().to_string(),
|
||||||
|
(false, false) => format!("{}{}{}", left, separator, right.unwrap_or_default()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||||
pub struct Runtime {
|
pub struct Runtime {
|
||||||
pub id: Id,
|
pub id: Id,
|
||||||
@@ -776,10 +855,13 @@ pub mod runtime {
|
|||||||
pub pack_ref: Option<String>,
|
pub pack_ref: Option<String>,
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
pub aliases: Vec<String>,
|
||||||
pub distributions: JsonDict,
|
pub distributions: JsonDict,
|
||||||
pub installation: Option<JsonDict>,
|
pub installation: Option<JsonDict>,
|
||||||
pub installers: JsonDict,
|
pub installers: JsonDict,
|
||||||
pub execution_config: JsonDict,
|
pub execution_config: JsonDict,
|
||||||
|
pub auto_detected: bool,
|
||||||
|
pub detection_config: JsonDict,
|
||||||
pub created: DateTime<Utc>,
|
pub created: DateTime<Utc>,
|
||||||
pub updated: DateTime<Utc>,
|
pub updated: DateTime<Utc>,
|
||||||
}
|
}
|
||||||
@@ -884,7 +966,7 @@ pub mod trigger {
|
|||||||
pub pack: Option<Id>,
|
pub pack: Option<Id>,
|
||||||
pub pack_ref: Option<String>,
|
pub pack_ref: Option<String>,
|
||||||
pub label: String,
|
pub label: String,
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
pub entrypoint: String,
|
pub entrypoint: String,
|
||||||
pub runtime: Id,
|
pub runtime: Id,
|
||||||
pub runtime_ref: String,
|
pub runtime_ref: String,
|
||||||
@@ -912,7 +994,7 @@ pub mod action {
|
|||||||
pub pack: Id,
|
pub pack: Id,
|
||||||
pub pack_ref: String,
|
pub pack_ref: String,
|
||||||
pub label: String,
|
pub label: String,
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
pub entrypoint: String,
|
pub entrypoint: String,
|
||||||
pub runtime: Option<Id>,
|
pub runtime: Option<Id>,
|
||||||
/// Optional semver version constraint for the runtime
|
/// Optional semver version constraint for the runtime
|
||||||
@@ -962,7 +1044,7 @@ pub mod rule {
|
|||||||
pub pack: Id,
|
pub pack: Id,
|
||||||
pub pack_ref: String,
|
pub pack_ref: String,
|
||||||
pub label: String,
|
pub label: String,
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
pub action: Option<Id>,
|
pub action: Option<Id>,
|
||||||
pub action_ref: String,
|
pub action_ref: String,
|
||||||
pub trigger: Option<Id>,
|
pub trigger: Option<Id>,
|
||||||
@@ -1218,6 +1300,7 @@ pub mod identity {
|
|||||||
pub display_name: Option<String>,
|
pub display_name: Option<String>,
|
||||||
pub password_hash: Option<String>,
|
pub password_hash: Option<String>,
|
||||||
pub attributes: JsonDict,
|
pub attributes: JsonDict,
|
||||||
|
pub frozen: bool,
|
||||||
pub created: DateTime<Utc>,
|
pub created: DateTime<Utc>,
|
||||||
pub updated: DateTime<Utc>,
|
pub updated: DateTime<Utc>,
|
||||||
}
|
}
|
||||||
@@ -1242,6 +1325,25 @@ pub mod identity {
|
|||||||
pub permset: Id,
|
pub permset: Id,
|
||||||
pub created: DateTime<Utc>,
|
pub created: DateTime<Utc>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||||
|
pub struct IdentityRoleAssignment {
|
||||||
|
pub id: Id,
|
||||||
|
pub identity: Id,
|
||||||
|
pub role: String,
|
||||||
|
pub source: String,
|
||||||
|
pub managed: bool,
|
||||||
|
pub created: DateTime<Utc>,
|
||||||
|
pub updated: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||||
|
pub struct PermissionSetRoleAssignment {
|
||||||
|
pub id: Id,
|
||||||
|
pub permset: Id,
|
||||||
|
pub role: String,
|
||||||
|
pub created: DateTime<Utc>,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Key/Value storage
|
/// Key/Value storage
|
||||||
@@ -1617,3 +1719,68 @@ pub mod entity_history {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::runtime::{
|
||||||
|
RuntimeEnvVarConfig, RuntimeEnvVarOperation, RuntimeEnvVarSpec, RuntimeExecutionConfig,
|
||||||
|
};
|
||||||
|
use serde_json::json;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn runtime_execution_config_env_vars_accept_string_and_object_forms() {
|
||||||
|
let config: RuntimeExecutionConfig = serde_json::from_value(json!({
|
||||||
|
"env_vars": {
|
||||||
|
"NODE_PATH": "{env_dir}/node_modules",
|
||||||
|
"PYTHONPATH": {
|
||||||
|
"value": "{pack_dir}/lib",
|
||||||
|
"operation": "prepend",
|
||||||
|
"separator": ":"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.expect("runtime execution config should deserialize");
|
||||||
|
|
||||||
|
assert!(matches!(
|
||||||
|
config.env_vars.get("NODE_PATH"),
|
||||||
|
Some(RuntimeEnvVarConfig::Value(value)) if value == "{env_dir}/node_modules"
|
||||||
|
));
|
||||||
|
|
||||||
|
assert!(matches!(
|
||||||
|
config.env_vars.get("PYTHONPATH"),
|
||||||
|
Some(RuntimeEnvVarConfig::Spec(RuntimeEnvVarSpec {
|
||||||
|
value,
|
||||||
|
operation: RuntimeEnvVarOperation::Prepend,
|
||||||
|
separator,
|
||||||
|
})) if value == "{pack_dir}/lib" && separator == ":"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn runtime_env_var_config_resolves_prepend_and_append_against_existing_values() {
|
||||||
|
let mut vars = HashMap::new();
|
||||||
|
vars.insert("pack_dir", "/packs/example".to_string());
|
||||||
|
vars.insert("env_dir", "/runtime_envs/example/python".to_string());
|
||||||
|
|
||||||
|
let prepend = RuntimeEnvVarConfig::Spec(RuntimeEnvVarSpec {
|
||||||
|
value: "{pack_dir}/lib".to_string(),
|
||||||
|
operation: RuntimeEnvVarOperation::Prepend,
|
||||||
|
separator: ":".to_string(),
|
||||||
|
});
|
||||||
|
assert_eq!(
|
||||||
|
prepend.resolve(&vars, Some("/already/set")),
|
||||||
|
"/packs/example/lib:/already/set"
|
||||||
|
);
|
||||||
|
|
||||||
|
let append = RuntimeEnvVarConfig::Spec(RuntimeEnvVarSpec {
|
||||||
|
value: "{env_dir}/node_modules".to_string(),
|
||||||
|
operation: RuntimeEnvVarOperation::Append,
|
||||||
|
separator: ":".to_string(),
|
||||||
|
});
|
||||||
|
assert_eq!(
|
||||||
|
append.resolve(&vars, Some("/base/modules")),
|
||||||
|
"/base/modules:/runtime_envs/example/python/node_modules"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ use crate::config::Config;
|
|||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
use crate::models::Runtime;
|
use crate::models::Runtime;
|
||||||
use crate::repositories::action::ActionRepository;
|
use crate::repositories::action::ActionRepository;
|
||||||
use crate::repositories::runtime::RuntimeRepository;
|
use crate::repositories::runtime::{self, RuntimeRepository};
|
||||||
use crate::repositories::FindById as _;
|
use crate::repositories::FindById as _;
|
||||||
use serde_json::Value as JsonValue;
|
use serde_json::Value as JsonValue;
|
||||||
use sqlx::{PgPool, Row};
|
use sqlx::{PgPool, Row};
|
||||||
@@ -370,15 +370,11 @@ impl PackEnvironmentManager {
|
|||||||
// ========================================================================
|
// ========================================================================
|
||||||
|
|
||||||
async fn get_runtime(&self, runtime_id: i64) -> Result<Runtime> {
|
async fn get_runtime(&self, runtime_id: i64) -> Result<Runtime> {
|
||||||
sqlx::query_as::<_, Runtime>(
|
let query = format!(
|
||||||
r#"
|
"SELECT {} FROM runtime WHERE id = $1",
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
runtime::SELECT_COLUMNS
|
||||||
distributions, installation, installers, execution_config,
|
);
|
||||||
created, updated
|
sqlx::query_as::<_, Runtime>(&query)
|
||||||
FROM runtime
|
|
||||||
WHERE id = $1
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.bind(runtime_id)
|
.bind(runtime_id)
|
||||||
.fetch_one(&self.pool)
|
.fetch_one(&self.pool)
|
||||||
.await
|
.await
|
||||||
|
|||||||
@@ -404,6 +404,16 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
.and_then(|v| serde_json::to_value(v).ok())
|
.and_then(|v| serde_json::to_value(v).ok())
|
||||||
.unwrap_or_else(|| serde_json::json!({}));
|
.unwrap_or_else(|| serde_json::json!({}));
|
||||||
|
|
||||||
|
let aliases: Vec<String> = data
|
||||||
|
.get("aliases")
|
||||||
|
.and_then(|v| v.as_sequence())
|
||||||
|
.map(|arr| {
|
||||||
|
arr.iter()
|
||||||
|
.filter_map(|v| v.as_str().map(|s| s.to_ascii_lowercase()))
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
// Check if runtime already exists — update in place if so
|
// Check if runtime already exists — update in place if so
|
||||||
if let Some(existing) = RuntimeRepository::find_by_ref(self.pool, &runtime_ref).await? {
|
if let Some(existing) = RuntimeRepository::find_by_ref(self.pool, &runtime_ref).await? {
|
||||||
let update_input = UpdateRuntimeInput {
|
let update_input = UpdateRuntimeInput {
|
||||||
@@ -418,6 +428,8 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
None => Patch::Clear,
|
None => Patch::Clear,
|
||||||
}),
|
}),
|
||||||
execution_config: Some(execution_config),
|
execution_config: Some(execution_config),
|
||||||
|
aliases: Some(aliases),
|
||||||
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
match RuntimeRepository::update(self.pool, existing.id, update_input).await {
|
match RuntimeRepository::update(self.pool, existing.id, update_input).await {
|
||||||
@@ -448,6 +460,9 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
distributions,
|
distributions,
|
||||||
installation,
|
installation,
|
||||||
execution_config,
|
execution_config,
|
||||||
|
aliases,
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: serde_json::json!({}),
|
||||||
};
|
};
|
||||||
|
|
||||||
match RuntimeRepository::create(self.pool, input).await {
|
match RuntimeRepository::create(self.pool, input).await {
|
||||||
@@ -710,8 +725,7 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
let description = data
|
let description = data
|
||||||
.get("description")
|
.get("description")
|
||||||
.and_then(|v| v.as_str())
|
.and_then(|v| v.as_str())
|
||||||
.unwrap_or("")
|
.map(|s| s.to_string());
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let enabled = data
|
let enabled = data
|
||||||
.get("enabled")
|
.get("enabled")
|
||||||
@@ -730,7 +744,10 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
if let Some(existing) = TriggerRepository::find_by_ref(self.pool, &trigger_ref).await? {
|
if let Some(existing) = TriggerRepository::find_by_ref(self.pool, &trigger_ref).await? {
|
||||||
let update_input = UpdateTriggerInput {
|
let update_input = UpdateTriggerInput {
|
||||||
label: Some(label),
|
label: Some(label),
|
||||||
description: Some(Patch::Set(description)),
|
description: Some(match description {
|
||||||
|
Some(description) => Patch::Set(description),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
enabled: Some(enabled),
|
enabled: Some(enabled),
|
||||||
param_schema: Some(match param_schema {
|
param_schema: Some(match param_schema {
|
||||||
Some(value) => Patch::Set(value),
|
Some(value) => Patch::Set(value),
|
||||||
@@ -763,7 +780,7 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
pack: Some(self.pack_id),
|
pack: Some(self.pack_id),
|
||||||
pack_ref: Some(self.pack_ref.clone()),
|
pack_ref: Some(self.pack_ref.clone()),
|
||||||
label,
|
label,
|
||||||
description: Some(description),
|
description,
|
||||||
enabled,
|
enabled,
|
||||||
param_schema,
|
param_schema,
|
||||||
out_schema,
|
out_schema,
|
||||||
@@ -843,8 +860,7 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
let description = data
|
let description = data
|
||||||
.get("description")
|
.get("description")
|
||||||
.and_then(|v| v.as_str())
|
.and_then(|v| v.as_str())
|
||||||
.unwrap_or("")
|
.map(|s| s.to_string());
|
||||||
.to_string();
|
|
||||||
|
|
||||||
// ── Workflow file handling ──────────────────────────────────
|
// ── Workflow file handling ──────────────────────────────────
|
||||||
// If the action declares `workflow_file`, load the referenced
|
// If the action declares `workflow_file`, load the referenced
|
||||||
@@ -861,7 +877,7 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
wf_path,
|
wf_path,
|
||||||
&action_ref,
|
&action_ref,
|
||||||
&label,
|
&label,
|
||||||
&description,
|
description.as_deref().unwrap_or(""),
|
||||||
&data,
|
&data,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
@@ -941,7 +957,10 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
if let Some(existing) = ActionRepository::find_by_ref(self.pool, &action_ref).await? {
|
if let Some(existing) = ActionRepository::find_by_ref(self.pool, &action_ref).await? {
|
||||||
let update_input = UpdateActionInput {
|
let update_input = UpdateActionInput {
|
||||||
label: Some(label),
|
label: Some(label),
|
||||||
description: Some(description),
|
description: Some(match description {
|
||||||
|
Some(description) => Patch::Set(description),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
entrypoint: Some(entrypoint),
|
entrypoint: Some(entrypoint),
|
||||||
runtime: runtime_id,
|
runtime: runtime_id,
|
||||||
runtime_version_constraint: Some(match runtime_version_constraint {
|
runtime_version_constraint: Some(match runtime_version_constraint {
|
||||||
@@ -1295,8 +1314,7 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
let description = data
|
let description = data
|
||||||
.get("description")
|
.get("description")
|
||||||
.and_then(|v| v.as_str())
|
.and_then(|v| v.as_str())
|
||||||
.unwrap_or("")
|
.map(|s| s.to_string());
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let enabled = data
|
let enabled = data
|
||||||
.get("enabled")
|
.get("enabled")
|
||||||
@@ -1332,7 +1350,10 @@ impl<'a> PackComponentLoader<'a> {
|
|||||||
if let Some(existing) = SensorRepository::find_by_ref(self.pool, &sensor_ref).await? {
|
if let Some(existing) = SensorRepository::find_by_ref(self.pool, &sensor_ref).await? {
|
||||||
let update_input = UpdateSensorInput {
|
let update_input = UpdateSensorInput {
|
||||||
label: Some(label),
|
label: Some(label),
|
||||||
description: Some(description),
|
description: Some(match description {
|
||||||
|
Some(description) => Patch::Set(description),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
entrypoint: Some(entrypoint),
|
entrypoint: Some(entrypoint),
|
||||||
runtime: Some(sensor_runtime_id),
|
runtime: Some(sensor_runtime_id),
|
||||||
runtime_ref: Some(sensor_runtime_ref.clone()),
|
runtime_ref: Some(sensor_runtime_ref.clone()),
|
||||||
|
|||||||
@@ -21,10 +21,6 @@ pub enum Resource {
|
|||||||
Inquiries,
|
Inquiries,
|
||||||
Keys,
|
Keys,
|
||||||
Artifacts,
|
Artifacts,
|
||||||
Workflows,
|
|
||||||
Webhooks,
|
|
||||||
Analytics,
|
|
||||||
History,
|
|
||||||
Identities,
|
Identities,
|
||||||
Permissions,
|
Permissions,
|
||||||
}
|
}
|
||||||
@@ -40,6 +36,7 @@ pub enum Action {
|
|||||||
Cancel,
|
Cancel,
|
||||||
Respond,
|
Respond,
|
||||||
Manage,
|
Manage,
|
||||||
|
Decrypt,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
@@ -69,6 +66,8 @@ pub struct GrantConstraints {
|
|||||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
pub owner_types: Option<Vec<OwnerType>>,
|
pub owner_types: Option<Vec<OwnerType>>,
|
||||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub owner_refs: Option<Vec<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
pub visibility: Option<Vec<ArtifactVisibility>>,
|
pub visibility: Option<Vec<ArtifactVisibility>>,
|
||||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
pub execution_scope: Option<ExecutionScopeConstraint>,
|
pub execution_scope: Option<ExecutionScopeConstraint>,
|
||||||
@@ -99,6 +98,7 @@ pub struct AuthorizationContext {
|
|||||||
pub pack_ref: Option<String>,
|
pub pack_ref: Option<String>,
|
||||||
pub owner_identity_id: Option<Id>,
|
pub owner_identity_id: Option<Id>,
|
||||||
pub owner_type: Option<OwnerType>,
|
pub owner_type: Option<OwnerType>,
|
||||||
|
pub owner_ref: Option<String>,
|
||||||
pub visibility: Option<ArtifactVisibility>,
|
pub visibility: Option<ArtifactVisibility>,
|
||||||
pub encrypted: Option<bool>,
|
pub encrypted: Option<bool>,
|
||||||
pub execution_owner_identity_id: Option<Id>,
|
pub execution_owner_identity_id: Option<Id>,
|
||||||
@@ -115,6 +115,7 @@ impl AuthorizationContext {
|
|||||||
pack_ref: None,
|
pack_ref: None,
|
||||||
owner_identity_id: None,
|
owner_identity_id: None,
|
||||||
owner_type: None,
|
owner_type: None,
|
||||||
|
owner_ref: None,
|
||||||
visibility: None,
|
visibility: None,
|
||||||
encrypted: None,
|
encrypted: None,
|
||||||
execution_owner_identity_id: None,
|
execution_owner_identity_id: None,
|
||||||
@@ -162,6 +163,15 @@ impl Grant {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(owner_refs) = &constraints.owner_refs {
|
||||||
|
let Some(owner_ref) = &ctx.owner_ref else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if !owner_refs.contains(owner_ref) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(visibility) = &constraints.visibility {
|
if let Some(visibility) = &constraints.visibility {
|
||||||
let Some(target_visibility) = ctx.visibility else {
|
let Some(target_visibility) = ctx.visibility else {
|
||||||
return false;
|
return false;
|
||||||
@@ -289,4 +299,28 @@ mod tests {
|
|||||||
.insert("team".to_string(), json!("infra"));
|
.insert("team".to_string(), json!("infra"));
|
||||||
assert!(!grant.allows(Resource::Packs, Action::Read, &ctx));
|
assert!(!grant.allows(Resource::Packs, Action::Read, &ctx));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn owner_ref_constraint_requires_exact_value_match() {
|
||||||
|
let grant = Grant {
|
||||||
|
resource: Resource::Artifacts,
|
||||||
|
actions: vec![Action::Read],
|
||||||
|
constraints: Some(GrantConstraints {
|
||||||
|
owner_types: Some(vec![OwnerType::Pack]),
|
||||||
|
owner_refs: Some(vec!["python_example".to_string()]),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut ctx = AuthorizationContext::new(1);
|
||||||
|
ctx.owner_type = Some(OwnerType::Pack);
|
||||||
|
ctx.owner_ref = Some("python_example".to_string());
|
||||||
|
assert!(grant.allows(Resource::Artifacts, Action::Read, &ctx));
|
||||||
|
|
||||||
|
ctx.owner_ref = Some("other_pack".to_string());
|
||||||
|
assert!(!grant.allows(Resource::Artifacts, Action::Read, &ctx));
|
||||||
|
|
||||||
|
ctx.owner_ref = None;
|
||||||
|
assert!(!grant.allows(Resource::Artifacts, Action::Read, &ctx));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ pub struct CreateActionInput {
|
|||||||
pub pack: Id,
|
pub pack: Id,
|
||||||
pub pack_ref: String,
|
pub pack_ref: String,
|
||||||
pub label: String,
|
pub label: String,
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
pub entrypoint: String,
|
pub entrypoint: String,
|
||||||
pub runtime: Option<Id>,
|
pub runtime: Option<Id>,
|
||||||
pub runtime_version_constraint: Option<String>,
|
pub runtime_version_constraint: Option<String>,
|
||||||
@@ -64,7 +64,7 @@ pub struct CreateActionInput {
|
|||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct UpdateActionInput {
|
pub struct UpdateActionInput {
|
||||||
pub label: Option<String>,
|
pub label: Option<String>,
|
||||||
pub description: Option<String>,
|
pub description: Option<Patch<String>>,
|
||||||
pub entrypoint: Option<String>,
|
pub entrypoint: Option<String>,
|
||||||
pub runtime: Option<Id>,
|
pub runtime: Option<Id>,
|
||||||
pub runtime_version_constraint: Option<Patch<String>>,
|
pub runtime_version_constraint: Option<Patch<String>>,
|
||||||
@@ -210,7 +210,10 @@ impl Update for ActionRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("description = ");
|
query.push("description = ");
|
||||||
query.push_bind(description);
|
match description {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<String>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -577,6 +577,14 @@ pub struct CreateArtifactVersionInput {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ArtifactVersionRepository {
|
impl ArtifactVersionRepository {
|
||||||
|
fn select_columns_with_alias(alias: &str) -> String {
|
||||||
|
format!(
|
||||||
|
"{alias}.id, {alias}.artifact, {alias}.version, {alias}.content_type, \
|
||||||
|
{alias}.size_bytes, NULL::bytea AS content, {alias}.content_json, \
|
||||||
|
{alias}.file_path, {alias}.meta, {alias}.created_by, {alias}.created"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
/// Find a version by ID (without binary content for performance)
|
/// Find a version by ID (without binary content for performance)
|
||||||
pub async fn find_by_id<'e, E>(executor: E, id: i64) -> Result<Option<ArtifactVersion>>
|
pub async fn find_by_id<'e, E>(executor: E, id: i64) -> Result<Option<ArtifactVersion>>
|
||||||
where
|
where
|
||||||
@@ -812,14 +820,11 @@ impl ArtifactVersionRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let query = format!(
|
let query = format!(
|
||||||
"SELECT av.{} \
|
"SELECT {} \
|
||||||
FROM artifact_version av \
|
FROM artifact_version av \
|
||||||
JOIN artifact a ON av.artifact = a.id \
|
JOIN artifact a ON av.artifact = a.id \
|
||||||
WHERE a.execution = $1 AND av.file_path IS NOT NULL",
|
WHERE a.execution = $1 AND av.file_path IS NOT NULL",
|
||||||
artifact_version::SELECT_COLUMNS
|
Self::select_columns_with_alias("av")
|
||||||
.split(", ")
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(", av.")
|
|
||||||
);
|
);
|
||||||
sqlx::query_as::<_, ArtifactVersion>(&query)
|
sqlx::query_as::<_, ArtifactVersion>(&query)
|
||||||
.bind(execution_id)
|
.bind(execution_id)
|
||||||
@@ -847,3 +852,18 @@ impl ArtifactVersionRepository {
|
|||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::ArtifactVersionRepository;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn aliased_select_columns_keep_null_content_expression_unqualified() {
|
||||||
|
let columns = ArtifactVersionRepository::select_columns_with_alias("av");
|
||||||
|
|
||||||
|
assert!(columns.contains("av.id"));
|
||||||
|
assert!(columns.contains("av.file_path"));
|
||||||
|
assert!(columns.contains("NULL::bytea AS content"));
|
||||||
|
assert!(!columns.contains("av.NULL::bytea AS content"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ pub struct UpdateIdentityInput {
|
|||||||
pub display_name: Option<String>,
|
pub display_name: Option<String>,
|
||||||
pub password_hash: Option<String>,
|
pub password_hash: Option<String>,
|
||||||
pub attributes: Option<JsonDict>,
|
pub attributes: Option<JsonDict>,
|
||||||
|
pub frozen: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
@@ -37,7 +38,7 @@ impl FindById for IdentityRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, Identity>(
|
sqlx::query_as::<_, Identity>(
|
||||||
"SELECT id, login, display_name, password_hash, attributes, created, updated FROM identity WHERE id = $1"
|
"SELECT id, login, display_name, password_hash, attributes, frozen, created, updated FROM identity WHERE id = $1"
|
||||||
).bind(id).fetch_optional(executor).await.map_err(Into::into)
|
).bind(id).fetch_optional(executor).await.map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -49,7 +50,7 @@ impl List for IdentityRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, Identity>(
|
sqlx::query_as::<_, Identity>(
|
||||||
"SELECT id, login, display_name, password_hash, attributes, created, updated FROM identity ORDER BY login ASC"
|
"SELECT id, login, display_name, password_hash, attributes, frozen, created, updated FROM identity ORDER BY login ASC"
|
||||||
).fetch_all(executor).await.map_err(Into::into)
|
).fetch_all(executor).await.map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -62,7 +63,7 @@ impl Create for IdentityRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, Identity>(
|
sqlx::query_as::<_, Identity>(
|
||||||
"INSERT INTO identity (login, display_name, password_hash, attributes) VALUES ($1, $2, $3, $4) RETURNING id, login, display_name, password_hash, attributes, created, updated"
|
"INSERT INTO identity (login, display_name, password_hash, attributes) VALUES ($1, $2, $3, $4) RETURNING id, login, display_name, password_hash, attributes, frozen, created, updated"
|
||||||
)
|
)
|
||||||
.bind(&input.login)
|
.bind(&input.login)
|
||||||
.bind(&input.display_name)
|
.bind(&input.display_name)
|
||||||
@@ -111,6 +112,13 @@ impl Update for IdentityRepository {
|
|||||||
query.push("attributes = ").push_bind(attributes);
|
query.push("attributes = ").push_bind(attributes);
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
if let Some(frozen) = input.frozen {
|
||||||
|
if has_updates {
|
||||||
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("frozen = ").push_bind(frozen);
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
|
|
||||||
if !has_updates {
|
if !has_updates {
|
||||||
// No updates requested, fetch and return existing entity
|
// No updates requested, fetch and return existing entity
|
||||||
@@ -119,7 +127,7 @@ impl Update for IdentityRepository {
|
|||||||
|
|
||||||
query.push(", updated = NOW() WHERE id = ").push_bind(id);
|
query.push(", updated = NOW() WHERE id = ").push_bind(id);
|
||||||
query.push(
|
query.push(
|
||||||
" RETURNING id, login, display_name, password_hash, attributes, created, updated",
|
" RETURNING id, login, display_name, password_hash, attributes, frozen, created, updated",
|
||||||
);
|
);
|
||||||
|
|
||||||
query
|
query
|
||||||
@@ -156,7 +164,7 @@ impl IdentityRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, Identity>(
|
sqlx::query_as::<_, Identity>(
|
||||||
"SELECT id, login, display_name, password_hash, attributes, created, updated FROM identity WHERE login = $1"
|
"SELECT id, login, display_name, password_hash, attributes, frozen, created, updated FROM identity WHERE login = $1"
|
||||||
).bind(login).fetch_optional(executor).await.map_err(Into::into)
|
).bind(login).fetch_optional(executor).await.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -169,7 +177,7 @@ impl IdentityRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, Identity>(
|
sqlx::query_as::<_, Identity>(
|
||||||
"SELECT id, login, display_name, password_hash, attributes, created, updated
|
"SELECT id, login, display_name, password_hash, attributes, frozen, created, updated
|
||||||
FROM identity
|
FROM identity
|
||||||
WHERE attributes->'oidc'->>'issuer' = $1
|
WHERE attributes->'oidc'->>'issuer' = $1
|
||||||
AND attributes->'oidc'->>'sub' = $2",
|
AND attributes->'oidc'->>'sub' = $2",
|
||||||
@@ -190,7 +198,7 @@ impl IdentityRepository {
|
|||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
sqlx::query_as::<_, Identity>(
|
sqlx::query_as::<_, Identity>(
|
||||||
"SELECT id, login, display_name, password_hash, attributes, created, updated
|
"SELECT id, login, display_name, password_hash, attributes, frozen, created, updated
|
||||||
FROM identity
|
FROM identity
|
||||||
WHERE attributes->'ldap'->>'server_url' = $1
|
WHERE attributes->'ldap'->>'server_url' = $1
|
||||||
AND attributes->'ldap'->>'dn' = $2",
|
AND attributes->'ldap'->>'dn' = $2",
|
||||||
@@ -363,6 +371,27 @@ impl PermissionSetRepository {
|
|||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_roles<'e, E>(executor: E, roles: &[String]) -> Result<Vec<PermissionSet>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
if roles.is_empty() {
|
||||||
|
return Ok(Vec::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlx::query_as::<_, PermissionSet>(
|
||||||
|
"SELECT DISTINCT ps.id, ps.ref, ps.pack, ps.pack_ref, ps.label, ps.description, ps.grants, ps.created, ps.updated
|
||||||
|
FROM permission_set ps
|
||||||
|
INNER JOIN permission_set_role_assignment psra ON psra.permset = ps.id
|
||||||
|
WHERE psra.role = ANY($1)
|
||||||
|
ORDER BY ps.ref ASC",
|
||||||
|
)
|
||||||
|
.bind(roles)
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
/// Delete permission sets belonging to a pack whose refs are NOT in the given set.
|
/// Delete permission sets belonging to a pack whose refs are NOT in the given set.
|
||||||
///
|
///
|
||||||
/// Used during pack reinstallation to clean up permission sets that were
|
/// Used during pack reinstallation to clean up permission sets that were
|
||||||
@@ -481,3 +510,231 @@ impl PermissionAssignmentRepository {
|
|||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct IdentityRoleAssignmentRepository;
|
||||||
|
|
||||||
|
impl Repository for IdentityRoleAssignmentRepository {
|
||||||
|
type Entity = IdentityRoleAssignment;
|
||||||
|
fn table_name() -> &'static str {
|
||||||
|
"identity_role_assignment"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct CreateIdentityRoleAssignmentInput {
|
||||||
|
pub identity: Id,
|
||||||
|
pub role: String,
|
||||||
|
pub source: String,
|
||||||
|
pub managed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl FindById for IdentityRoleAssignmentRepository {
|
||||||
|
async fn find_by_id<'e, E>(executor: E, id: i64) -> Result<Option<Self::Entity>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query_as::<_, IdentityRoleAssignment>(
|
||||||
|
"SELECT id, identity, role, source, managed, created, updated FROM identity_role_assignment WHERE id = $1"
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.fetch_optional(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl Create for IdentityRoleAssignmentRepository {
|
||||||
|
type CreateInput = CreateIdentityRoleAssignmentInput;
|
||||||
|
async fn create<'e, E>(executor: E, input: Self::CreateInput) -> Result<Self::Entity>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query_as::<_, IdentityRoleAssignment>(
|
||||||
|
"INSERT INTO identity_role_assignment (identity, role, source, managed)
|
||||||
|
VALUES ($1, $2, $3, $4)
|
||||||
|
RETURNING id, identity, role, source, managed, created, updated",
|
||||||
|
)
|
||||||
|
.bind(input.identity)
|
||||||
|
.bind(&input.role)
|
||||||
|
.bind(&input.source)
|
||||||
|
.bind(input.managed)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl Delete for IdentityRoleAssignmentRepository {
|
||||||
|
async fn delete<'e, E>(executor: E, id: i64) -> Result<bool>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
let result = sqlx::query("DELETE FROM identity_role_assignment WHERE id = $1")
|
||||||
|
.bind(id)
|
||||||
|
.execute(executor)
|
||||||
|
.await?;
|
||||||
|
Ok(result.rows_affected() > 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IdentityRoleAssignmentRepository {
|
||||||
|
pub async fn find_by_identity<'e, E>(
|
||||||
|
executor: E,
|
||||||
|
identity_id: Id,
|
||||||
|
) -> Result<Vec<IdentityRoleAssignment>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query_as::<_, IdentityRoleAssignment>(
|
||||||
|
"SELECT id, identity, role, source, managed, created, updated
|
||||||
|
FROM identity_role_assignment
|
||||||
|
WHERE identity = $1
|
||||||
|
ORDER BY role ASC",
|
||||||
|
)
|
||||||
|
.bind(identity_id)
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_role_names_by_identity<'e, E>(
|
||||||
|
executor: E,
|
||||||
|
identity_id: Id,
|
||||||
|
) -> Result<Vec<String>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query_scalar::<_, String>(
|
||||||
|
"SELECT role FROM identity_role_assignment WHERE identity = $1 ORDER BY role ASC",
|
||||||
|
)
|
||||||
|
.bind(identity_id)
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn replace_managed_roles<'e, E>(
|
||||||
|
executor: E,
|
||||||
|
identity_id: Id,
|
||||||
|
source: &str,
|
||||||
|
roles: &[String],
|
||||||
|
) -> Result<()>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + Copy + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query(
|
||||||
|
"DELETE FROM identity_role_assignment WHERE identity = $1 AND source = $2 AND managed = true",
|
||||||
|
)
|
||||||
|
.bind(identity_id)
|
||||||
|
.bind(source)
|
||||||
|
.execute(executor)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for role in roles {
|
||||||
|
sqlx::query(
|
||||||
|
"INSERT INTO identity_role_assignment (identity, role, source, managed)
|
||||||
|
VALUES ($1, $2, $3, true)
|
||||||
|
ON CONFLICT (identity, role) DO UPDATE
|
||||||
|
SET source = EXCLUDED.source,
|
||||||
|
managed = EXCLUDED.managed,
|
||||||
|
updated = NOW()",
|
||||||
|
)
|
||||||
|
.bind(identity_id)
|
||||||
|
.bind(role)
|
||||||
|
.bind(source)
|
||||||
|
.execute(executor)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct PermissionSetRoleAssignmentRepository;
|
||||||
|
|
||||||
|
impl Repository for PermissionSetRoleAssignmentRepository {
|
||||||
|
type Entity = PermissionSetRoleAssignment;
|
||||||
|
fn table_name() -> &'static str {
|
||||||
|
"permission_set_role_assignment"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct CreatePermissionSetRoleAssignmentInput {
|
||||||
|
pub permset: Id,
|
||||||
|
pub role: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl FindById for PermissionSetRoleAssignmentRepository {
|
||||||
|
async fn find_by_id<'e, E>(executor: E, id: i64) -> Result<Option<Self::Entity>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query_as::<_, PermissionSetRoleAssignment>(
|
||||||
|
"SELECT id, permset, role, created FROM permission_set_role_assignment WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.fetch_optional(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl Create for PermissionSetRoleAssignmentRepository {
|
||||||
|
type CreateInput = CreatePermissionSetRoleAssignmentInput;
|
||||||
|
async fn create<'e, E>(executor: E, input: Self::CreateInput) -> Result<Self::Entity>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query_as::<_, PermissionSetRoleAssignment>(
|
||||||
|
"INSERT INTO permission_set_role_assignment (permset, role)
|
||||||
|
VALUES ($1, $2)
|
||||||
|
RETURNING id, permset, role, created",
|
||||||
|
)
|
||||||
|
.bind(input.permset)
|
||||||
|
.bind(&input.role)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl Delete for PermissionSetRoleAssignmentRepository {
|
||||||
|
async fn delete<'e, E>(executor: E, id: i64) -> Result<bool>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
let result = sqlx::query("DELETE FROM permission_set_role_assignment WHERE id = $1")
|
||||||
|
.bind(id)
|
||||||
|
.execute(executor)
|
||||||
|
.await?;
|
||||||
|
Ok(result.rows_affected() > 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PermissionSetRoleAssignmentRepository {
|
||||||
|
pub async fn find_by_permission_set<'e, E>(
|
||||||
|
executor: E,
|
||||||
|
permset_id: Id,
|
||||||
|
) -> Result<Vec<PermissionSetRoleAssignment>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
sqlx::query_as::<_, PermissionSetRoleAssignment>(
|
||||||
|
"SELECT id, permset, role, created
|
||||||
|
FROM permission_set_role_assignment
|
||||||
|
WHERE permset = $1
|
||||||
|
ORDER BY role ASC",
|
||||||
|
)
|
||||||
|
.bind(permset_id)
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ use crate::models::{rule::*, Id};
|
|||||||
use crate::{Error, Result};
|
use crate::{Error, Result};
|
||||||
use sqlx::{Executor, Postgres, QueryBuilder};
|
use sqlx::{Executor, Postgres, QueryBuilder};
|
||||||
|
|
||||||
use super::{Create, Delete, FindById, FindByRef, List, Repository, Update};
|
use super::{Create, Delete, FindById, FindByRef, List, Patch, Repository, Update};
|
||||||
|
|
||||||
/// Filters for [`RuleRepository::list_search`].
|
/// Filters for [`RuleRepository::list_search`].
|
||||||
///
|
///
|
||||||
@@ -41,7 +41,7 @@ pub struct RestoreRuleInput {
|
|||||||
pub pack: Id,
|
pub pack: Id,
|
||||||
pub pack_ref: String,
|
pub pack_ref: String,
|
||||||
pub label: String,
|
pub label: String,
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
pub action: Option<Id>,
|
pub action: Option<Id>,
|
||||||
pub action_ref: String,
|
pub action_ref: String,
|
||||||
pub trigger: Option<Id>,
|
pub trigger: Option<Id>,
|
||||||
@@ -70,7 +70,7 @@ pub struct CreateRuleInput {
|
|||||||
pub pack: Id,
|
pub pack: Id,
|
||||||
pub pack_ref: String,
|
pub pack_ref: String,
|
||||||
pub label: String,
|
pub label: String,
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
pub action: Id,
|
pub action: Id,
|
||||||
pub action_ref: String,
|
pub action_ref: String,
|
||||||
pub trigger: Id,
|
pub trigger: Id,
|
||||||
@@ -86,7 +86,7 @@ pub struct CreateRuleInput {
|
|||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct UpdateRuleInput {
|
pub struct UpdateRuleInput {
|
||||||
pub label: Option<String>,
|
pub label: Option<String>,
|
||||||
pub description: Option<String>,
|
pub description: Option<Patch<String>>,
|
||||||
pub conditions: Option<serde_json::Value>,
|
pub conditions: Option<serde_json::Value>,
|
||||||
pub action_params: Option<serde_json::Value>,
|
pub action_params: Option<serde_json::Value>,
|
||||||
pub trigger_params: Option<serde_json::Value>,
|
pub trigger_params: Option<serde_json::Value>,
|
||||||
@@ -228,7 +228,10 @@ impl Update for RuleRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("description = ");
|
query.push("description = ");
|
||||||
query.push_bind(description);
|
match description {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<String>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -23,6 +23,13 @@ impl Repository for RuntimeRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Columns selected for all Runtime queries. Centralised here so that
|
||||||
|
/// schema changes only need one update.
|
||||||
|
pub const SELECT_COLUMNS: &str = "id, ref, pack, pack_ref, description, name, aliases, \
|
||||||
|
distributions, installation, installers, execution_config, \
|
||||||
|
auto_detected, detection_config, \
|
||||||
|
created, updated";
|
||||||
|
|
||||||
/// Input for creating a new runtime
|
/// Input for creating a new runtime
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct CreateRuntimeInput {
|
pub struct CreateRuntimeInput {
|
||||||
@@ -31,9 +38,12 @@ pub struct CreateRuntimeInput {
|
|||||||
pub pack_ref: Option<String>,
|
pub pack_ref: Option<String>,
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
pub aliases: Vec<String>,
|
||||||
pub distributions: JsonDict,
|
pub distributions: JsonDict,
|
||||||
pub installation: Option<JsonDict>,
|
pub installation: Option<JsonDict>,
|
||||||
pub execution_config: JsonDict,
|
pub execution_config: JsonDict,
|
||||||
|
pub auto_detected: bool,
|
||||||
|
pub detection_config: JsonDict,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Input for updating a runtime
|
/// Input for updating a runtime
|
||||||
@@ -41,9 +51,12 @@ pub struct CreateRuntimeInput {
|
|||||||
pub struct UpdateRuntimeInput {
|
pub struct UpdateRuntimeInput {
|
||||||
pub description: Option<Patch<String>>,
|
pub description: Option<Patch<String>>,
|
||||||
pub name: Option<String>,
|
pub name: Option<String>,
|
||||||
|
pub aliases: Option<Vec<String>>,
|
||||||
pub distributions: Option<JsonDict>,
|
pub distributions: Option<JsonDict>,
|
||||||
pub installation: Option<Patch<JsonDict>>,
|
pub installation: Option<Patch<JsonDict>>,
|
||||||
pub execution_config: Option<JsonDict>,
|
pub execution_config: Option<JsonDict>,
|
||||||
|
pub auto_detected: Option<bool>,
|
||||||
|
pub detection_config: Option<JsonDict>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
@@ -52,15 +65,8 @@ impl FindById for RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtime = sqlx::query_as::<_, Runtime>(
|
let query = format!("SELECT {} FROM runtime WHERE id = $1", SELECT_COLUMNS);
|
||||||
r#"
|
let runtime = sqlx::query_as::<_, Runtime>(&query)
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
|
||||||
distributions, installation, installers, execution_config,
|
|
||||||
created, updated
|
|
||||||
FROM runtime
|
|
||||||
WHERE id = $1
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.bind(id)
|
.bind(id)
|
||||||
.fetch_optional(executor)
|
.fetch_optional(executor)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -75,15 +81,8 @@ impl FindByRef for RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtime = sqlx::query_as::<_, Runtime>(
|
let query = format!("SELECT {} FROM runtime WHERE ref = $1", SELECT_COLUMNS);
|
||||||
r#"
|
let runtime = sqlx::query_as::<_, Runtime>(&query)
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
|
||||||
distributions, installation, installers, execution_config,
|
|
||||||
created, updated
|
|
||||||
FROM runtime
|
|
||||||
WHERE ref = $1
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.bind(ref_str)
|
.bind(ref_str)
|
||||||
.fetch_optional(executor)
|
.fetch_optional(executor)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -98,15 +97,8 @@ impl List for RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtimes = sqlx::query_as::<_, Runtime>(
|
let query = format!("SELECT {} FROM runtime ORDER BY ref ASC", SELECT_COLUMNS);
|
||||||
r#"
|
let runtimes = sqlx::query_as::<_, Runtime>(&query)
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
|
||||||
distributions, installation, installers, execution_config,
|
|
||||||
created, updated
|
|
||||||
FROM runtime
|
|
||||||
ORDER BY ref ASC
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.fetch_all(executor)
|
.fetch_all(executor)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -122,25 +114,27 @@ impl Create for RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtime = sqlx::query_as::<_, Runtime>(
|
let query = format!(
|
||||||
r#"
|
"INSERT INTO runtime (ref, pack, pack_ref, description, name, aliases, \
|
||||||
INSERT INTO runtime (ref, pack, pack_ref, description, name,
|
distributions, installation, installers, execution_config, \
|
||||||
distributions, installation, installers, execution_config)
|
auto_detected, detection_config) \
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) \
|
||||||
RETURNING id, ref, pack, pack_ref, description, name,
|
RETURNING {}",
|
||||||
distributions, installation, installers, execution_config,
|
SELECT_COLUMNS
|
||||||
created, updated
|
);
|
||||||
"#,
|
let runtime = sqlx::query_as::<_, Runtime>(&query)
|
||||||
)
|
|
||||||
.bind(&input.r#ref)
|
.bind(&input.r#ref)
|
||||||
.bind(input.pack)
|
.bind(input.pack)
|
||||||
.bind(&input.pack_ref)
|
.bind(&input.pack_ref)
|
||||||
.bind(&input.description)
|
.bind(&input.description)
|
||||||
.bind(&input.name)
|
.bind(&input.name)
|
||||||
|
.bind(&input.aliases)
|
||||||
.bind(&input.distributions)
|
.bind(&input.distributions)
|
||||||
.bind(&input.installation)
|
.bind(&input.installation)
|
||||||
.bind(serde_json::json!({}))
|
.bind(serde_json::json!({}))
|
||||||
.bind(&input.execution_config)
|
.bind(&input.execution_config)
|
||||||
|
.bind(input.auto_detected)
|
||||||
|
.bind(&input.detection_config)
|
||||||
.fetch_one(executor)
|
.fetch_one(executor)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -179,6 +173,15 @@ impl Update for RuntimeRepository {
|
|||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(aliases) = &input.aliases {
|
||||||
|
if has_updates {
|
||||||
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("aliases = ");
|
||||||
|
query.push_bind(aliases.as_slice());
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(distributions) = &input.distributions {
|
if let Some(distributions) = &input.distributions {
|
||||||
if has_updates {
|
if has_updates {
|
||||||
query.push(", ");
|
query.push(", ");
|
||||||
@@ -209,6 +212,24 @@ impl Update for RuntimeRepository {
|
|||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(auto_detected) = input.auto_detected {
|
||||||
|
if has_updates {
|
||||||
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("auto_detected = ");
|
||||||
|
query.push_bind(auto_detected);
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(detection_config) = &input.detection_config {
|
||||||
|
if has_updates {
|
||||||
|
query.push(", ");
|
||||||
|
}
|
||||||
|
query.push("detection_config = ");
|
||||||
|
query.push_bind(detection_config);
|
||||||
|
has_updates = true;
|
||||||
|
}
|
||||||
|
|
||||||
if !has_updates {
|
if !has_updates {
|
||||||
// No updates requested, fetch and return existing entity
|
// No updates requested, fetch and return existing entity
|
||||||
return Self::get_by_id(executor, id).await;
|
return Self::get_by_id(executor, id).await;
|
||||||
@@ -216,10 +237,7 @@ impl Update for RuntimeRepository {
|
|||||||
|
|
||||||
query.push(", updated = NOW() WHERE id = ");
|
query.push(", updated = NOW() WHERE id = ");
|
||||||
query.push_bind(id);
|
query.push_bind(id);
|
||||||
query.push(
|
query.push(&format!(" RETURNING {}", SELECT_COLUMNS));
|
||||||
" RETURNING id, ref, pack, pack_ref, description, name, \
|
|
||||||
distributions, installation, installers, execution_config, created, updated",
|
|
||||||
);
|
|
||||||
|
|
||||||
let runtime = query
|
let runtime = query
|
||||||
.build_query_as::<Runtime>()
|
.build_query_as::<Runtime>()
|
||||||
@@ -251,16 +269,11 @@ impl RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtimes = sqlx::query_as::<_, Runtime>(
|
let query = format!(
|
||||||
r#"
|
"SELECT {} FROM runtime WHERE pack = $1 ORDER BY ref ASC",
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
SELECT_COLUMNS
|
||||||
distributions, installation, installers, execution_config,
|
);
|
||||||
created, updated
|
let runtimes = sqlx::query_as::<_, Runtime>(&query)
|
||||||
FROM runtime
|
|
||||||
WHERE pack = $1
|
|
||||||
ORDER BY ref ASC
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.bind(pack_id)
|
.bind(pack_id)
|
||||||
.fetch_all(executor)
|
.fetch_all(executor)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -273,16 +286,11 @@ impl RuntimeRepository {
|
|||||||
where
|
where
|
||||||
E: Executor<'e, Database = Postgres> + 'e,
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
{
|
{
|
||||||
let runtime = sqlx::query_as::<_, Runtime>(
|
let query = format!(
|
||||||
r#"
|
"SELECT {} FROM runtime WHERE LOWER(name) = LOWER($1) LIMIT 1",
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
SELECT_COLUMNS
|
||||||
distributions, installation, installers, execution_config,
|
);
|
||||||
created, updated
|
let runtime = sqlx::query_as::<_, Runtime>(&query)
|
||||||
FROM runtime
|
|
||||||
WHERE LOWER(name) = LOWER($1)
|
|
||||||
LIMIT 1
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.bind(name)
|
.bind(name)
|
||||||
.fetch_optional(executor)
|
.fetch_optional(executor)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -290,6 +298,23 @@ impl RuntimeRepository {
|
|||||||
Ok(runtime)
|
Ok(runtime)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Find a runtime where the given alias appears in its `aliases` array.
|
||||||
|
/// Uses PostgreSQL's `@>` (array contains) operator with a GIN index.
|
||||||
|
pub async fn find_by_alias<'e, E>(executor: E, alias: &str) -> Result<Option<Runtime>>
|
||||||
|
where
|
||||||
|
E: Executor<'e, Database = Postgres> + 'e,
|
||||||
|
{
|
||||||
|
let query = format!(
|
||||||
|
"SELECT {} FROM runtime WHERE aliases @> ARRAY[$1]::text[] LIMIT 1",
|
||||||
|
SELECT_COLUMNS
|
||||||
|
);
|
||||||
|
let runtime = sqlx::query_as::<_, Runtime>(&query)
|
||||||
|
.bind(alias)
|
||||||
|
.fetch_optional(executor)
|
||||||
|
.await?;
|
||||||
|
Ok(runtime)
|
||||||
|
}
|
||||||
|
|
||||||
/// Delete runtimes belonging to a pack whose refs are NOT in the given set.
|
/// Delete runtimes belonging to a pack whose refs are NOT in the given set.
|
||||||
///
|
///
|
||||||
/// Used during pack reinstallation to clean up runtimes that were removed
|
/// Used during pack reinstallation to clean up runtimes that were removed
|
||||||
|
|||||||
@@ -665,7 +665,7 @@ pub struct CreateSensorInput {
|
|||||||
pub pack: Option<Id>,
|
pub pack: Option<Id>,
|
||||||
pub pack_ref: Option<String>,
|
pub pack_ref: Option<String>,
|
||||||
pub label: String,
|
pub label: String,
|
||||||
pub description: String,
|
pub description: Option<String>,
|
||||||
pub entrypoint: String,
|
pub entrypoint: String,
|
||||||
pub runtime: Id,
|
pub runtime: Id,
|
||||||
pub runtime_ref: String,
|
pub runtime_ref: String,
|
||||||
@@ -681,7 +681,7 @@ pub struct CreateSensorInput {
|
|||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct UpdateSensorInput {
|
pub struct UpdateSensorInput {
|
||||||
pub label: Option<String>,
|
pub label: Option<String>,
|
||||||
pub description: Option<String>,
|
pub description: Option<Patch<String>>,
|
||||||
pub entrypoint: Option<String>,
|
pub entrypoint: Option<String>,
|
||||||
pub runtime: Option<Id>,
|
pub runtime: Option<Id>,
|
||||||
pub runtime_ref: Option<String>,
|
pub runtime_ref: Option<String>,
|
||||||
@@ -830,7 +830,10 @@ impl Update for SensorRepository {
|
|||||||
query.push(", ");
|
query.push(", ");
|
||||||
}
|
}
|
||||||
query.push("description = ");
|
query.push("description = ");
|
||||||
query.push_bind(description);
|
match description {
|
||||||
|
Patch::Set(value) => query.push_bind(value),
|
||||||
|
Patch::Clear => query.push_bind(Option::<String>::None),
|
||||||
|
};
|
||||||
has_updates = true;
|
has_updates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,59 +6,41 @@
|
|||||||
//! 2. Config file specification (medium priority)
|
//! 2. Config file specification (medium priority)
|
||||||
//! 3. Database-driven detection with verification (lowest priority)
|
//! 3. Database-driven detection with verification (lowest priority)
|
||||||
//!
|
//!
|
||||||
//! Also provides [`normalize_runtime_name`] for alias-aware runtime name
|
//! Also provides alias-based matching functions ([`runtime_aliases_match_filter`]
|
||||||
//! comparison across the codebase (worker filters, env setup, etc.).
|
//! and [`runtime_aliases_contain`]) for comparing runtime alias lists against
|
||||||
|
//! worker filters and capability strings. Aliases are declared per-runtime in
|
||||||
|
//! pack manifests, so no hardcoded alias table is needed here.
|
||||||
|
|
||||||
use crate::config::Config;
|
use crate::config::Config;
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::models::Runtime;
|
use crate::models::Runtime;
|
||||||
|
use crate::repositories::runtime::SELECT_COLUMNS;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use tracing::{debug, info, warn};
|
use tracing::{debug, info, warn};
|
||||||
|
|
||||||
/// Normalize a runtime name to its canonical short form.
|
/// Check if a runtime's aliases overlap with a filter list.
|
||||||
///
|
///
|
||||||
/// This ensures that different ways of referring to the same runtime
|
/// The filter list comes from `ATTUNE_WORKER_RUNTIMES` (e.g., `["python", "shell"]`).
|
||||||
/// (e.g., "node", "nodejs", "node.js") all resolve to a single canonical
|
/// A runtime matches if any of its declared aliases appear in the filter list.
|
||||||
/// name. Used by worker runtime filters and environment setup to match
|
/// Comparison is case-insensitive.
|
||||||
/// database runtime names against short filter values.
|
pub fn runtime_aliases_match_filter(aliases: &[String], filter: &[String]) -> bool {
|
||||||
///
|
aliases.iter().any(|alias| {
|
||||||
/// The canonical names mirror the alias groups in
|
let lower_alias = alias.to_ascii_lowercase();
|
||||||
/// `PackComponentLoader::resolve_runtime`.
|
filter.iter().any(|f| f.to_ascii_lowercase() == lower_alias)
|
||||||
///
|
})
|
||||||
/// # Examples
|
|
||||||
/// ```
|
|
||||||
/// use attune_common::runtime_detection::normalize_runtime_name;
|
|
||||||
/// assert_eq!(normalize_runtime_name("node.js"), "node");
|
|
||||||
/// assert_eq!(normalize_runtime_name("nodejs"), "node");
|
|
||||||
/// assert_eq!(normalize_runtime_name("python3"), "python");
|
|
||||||
/// assert_eq!(normalize_runtime_name("shell"), "shell");
|
|
||||||
/// ```
|
|
||||||
pub fn normalize_runtime_name(name: &str) -> &str {
|
|
||||||
match name {
|
|
||||||
"node" | "nodejs" | "node.js" => "node",
|
|
||||||
"python" | "python3" => "python",
|
|
||||||
"bash" | "sh" | "shell" => "shell",
|
|
||||||
"native" | "builtin" | "standalone" => "native",
|
|
||||||
other => other,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if a runtime name matches a filter entry, supporting common aliases.
|
/// Check if a runtime's aliases contain a specific name.
|
||||||
///
|
///
|
||||||
/// Both sides are lowercased and then normalized before comparison so that,
|
/// Used by the scheduler to check if a worker's capability string
|
||||||
/// e.g., a filter value of `"node"` matches a database runtime name `"Node.js"`.
|
/// (e.g., "python") matches a runtime's aliases (e.g., ["python", "python3"]).
|
||||||
pub fn runtime_matches_filter(rt_name: &str, filter_entry: &str) -> bool {
|
/// Comparison is case-insensitive.
|
||||||
let rt_lower = rt_name.to_ascii_lowercase();
|
pub fn runtime_aliases_contain(aliases: &[String], name: &str) -> bool {
|
||||||
let filter_lower = filter_entry.to_ascii_lowercase();
|
let lower = name.to_ascii_lowercase();
|
||||||
normalize_runtime_name(&rt_lower) == normalize_runtime_name(&filter_lower)
|
aliases.iter().any(|a| a.to_ascii_lowercase() == lower)
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if a runtime name matches any entry in a filter list.
|
|
||||||
pub fn runtime_in_filter(rt_name: &str, filter: &[String]) -> bool {
|
|
||||||
filter.iter().any(|f| runtime_matches_filter(rt_name, f))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runtime detection service
|
/// Runtime detection service
|
||||||
@@ -156,15 +138,8 @@ impl RuntimeDetector {
|
|||||||
info!("Querying database for runtime definitions...");
|
info!("Querying database for runtime definitions...");
|
||||||
|
|
||||||
// Query all runtimes from database
|
// Query all runtimes from database
|
||||||
let runtimes = sqlx::query_as::<_, Runtime>(
|
let query = format!("SELECT {} FROM runtime ORDER BY ref", SELECT_COLUMNS);
|
||||||
r#"
|
let runtimes = sqlx::query_as::<_, Runtime>(&query)
|
||||||
SELECT id, ref, pack, pack_ref, description, name,
|
|
||||||
distributions, installation, installers, execution_config,
|
|
||||||
created, updated
|
|
||||||
FROM runtime
|
|
||||||
ORDER BY ref
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.fetch_all(&self.pool)
|
.fetch_all(&self.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -337,69 +312,46 @@ mod tests {
|
|||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_normalize_runtime_name_node_variants() {
|
fn test_runtime_aliases_match_filter() {
|
||||||
assert_eq!(normalize_runtime_name("node"), "node");
|
let aliases = vec!["python".to_string(), "python3".to_string()];
|
||||||
assert_eq!(normalize_runtime_name("nodejs"), "node");
|
let filter = vec!["python".to_string(), "shell".to_string()];
|
||||||
assert_eq!(normalize_runtime_name("node.js"), "node");
|
assert!(runtime_aliases_match_filter(&aliases, &filter));
|
||||||
|
|
||||||
|
let filter_no_match = vec!["node".to_string(), "ruby".to_string()];
|
||||||
|
assert!(!runtime_aliases_match_filter(&aliases, &filter_no_match));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_normalize_runtime_name_python_variants() {
|
fn test_runtime_aliases_match_filter_case_insensitive() {
|
||||||
assert_eq!(normalize_runtime_name("python"), "python");
|
let aliases = vec!["Python".to_string(), "python3".to_string()];
|
||||||
assert_eq!(normalize_runtime_name("python3"), "python");
|
let filter = vec!["python".to_string()];
|
||||||
|
assert!(runtime_aliases_match_filter(&aliases, &filter));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_normalize_runtime_name_shell_variants() {
|
fn test_runtime_aliases_match_filter_empty() {
|
||||||
assert_eq!(normalize_runtime_name("shell"), "shell");
|
let aliases: Vec<String> = vec![];
|
||||||
assert_eq!(normalize_runtime_name("bash"), "shell");
|
let filter = vec!["python".to_string()];
|
||||||
assert_eq!(normalize_runtime_name("sh"), "shell");
|
assert!(!runtime_aliases_match_filter(&aliases, &filter));
|
||||||
|
|
||||||
|
let aliases = vec!["python".to_string()];
|
||||||
|
let filter: Vec<String> = vec![];
|
||||||
|
assert!(!runtime_aliases_match_filter(&aliases, &filter));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_normalize_runtime_name_native_variants() {
|
fn test_runtime_aliases_contain() {
|
||||||
assert_eq!(normalize_runtime_name("native"), "native");
|
let aliases = vec!["ruby".to_string(), "rb".to_string()];
|
||||||
assert_eq!(normalize_runtime_name("builtin"), "native");
|
assert!(runtime_aliases_contain(&aliases, "ruby"));
|
||||||
assert_eq!(normalize_runtime_name("standalone"), "native");
|
assert!(runtime_aliases_contain(&aliases, "rb"));
|
||||||
|
assert!(!runtime_aliases_contain(&aliases, "python"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_normalize_runtime_name_passthrough() {
|
fn test_runtime_aliases_contain_case_insensitive() {
|
||||||
assert_eq!(normalize_runtime_name("custom_runtime"), "custom_runtime");
|
let aliases = vec!["ruby".to_string(), "rb".to_string()];
|
||||||
}
|
assert!(runtime_aliases_contain(&aliases, "Ruby"));
|
||||||
|
assert!(runtime_aliases_contain(&aliases, "RB"));
|
||||||
#[test]
|
|
||||||
fn test_runtime_matches_filter() {
|
|
||||||
// Node.js DB name lowercased vs worker filter "node"
|
|
||||||
assert!(runtime_matches_filter("node.js", "node"));
|
|
||||||
assert!(runtime_matches_filter("node", "nodejs"));
|
|
||||||
assert!(runtime_matches_filter("nodejs", "node.js"));
|
|
||||||
// Exact match
|
|
||||||
assert!(runtime_matches_filter("shell", "shell"));
|
|
||||||
// No match
|
|
||||||
assert!(!runtime_matches_filter("python", "node"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_runtime_matches_filter_case_insensitive() {
|
|
||||||
// Database stores capitalized names (e.g., "Node.js", "Python")
|
|
||||||
// Worker capabilities store lowercase (e.g., "node", "python")
|
|
||||||
assert!(runtime_matches_filter("Node.js", "node"));
|
|
||||||
assert!(runtime_matches_filter("node", "Node.js"));
|
|
||||||
assert!(runtime_matches_filter("Python", "python"));
|
|
||||||
assert!(runtime_matches_filter("python", "Python"));
|
|
||||||
assert!(runtime_matches_filter("Shell", "shell"));
|
|
||||||
assert!(runtime_matches_filter("NODEJS", "node"));
|
|
||||||
assert!(!runtime_matches_filter("Python", "node"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_runtime_in_filter() {
|
|
||||||
let filter = vec!["shell".to_string(), "node".to_string()];
|
|
||||||
assert!(runtime_in_filter("shell", &filter));
|
|
||||||
assert!(runtime_in_filter("node.js", &filter));
|
|
||||||
assert!(runtime_in_filter("nodejs", &filter));
|
|
||||||
assert!(!runtime_in_filter("python", &filter));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
use crate::repositories::action::{ActionRepository, CreateActionInput, UpdateActionInput};
|
use crate::repositories::action::{ActionRepository, CreateActionInput, UpdateActionInput};
|
||||||
use crate::repositories::workflow::{CreateWorkflowDefinitionInput, UpdateWorkflowDefinitionInput};
|
use crate::repositories::workflow::{CreateWorkflowDefinitionInput, UpdateWorkflowDefinitionInput};
|
||||||
|
use crate::repositories::Patch;
|
||||||
use crate::repositories::{
|
use crate::repositories::{
|
||||||
Create, Delete, FindByRef, PackRepository, Update, WorkflowDefinitionRepository,
|
Create, Delete, FindByRef, PackRepository, Update, WorkflowDefinitionRepository,
|
||||||
};
|
};
|
||||||
@@ -270,7 +271,7 @@ impl WorkflowRegistrar {
|
|||||||
pack: pack_id,
|
pack: pack_id,
|
||||||
pack_ref: pack_ref.to_string(),
|
pack_ref: pack_ref.to_string(),
|
||||||
label: effective_label.to_string(),
|
label: effective_label.to_string(),
|
||||||
description: workflow.description.clone().unwrap_or_default(),
|
description: workflow.description.clone(),
|
||||||
entrypoint,
|
entrypoint,
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
@@ -317,7 +318,10 @@ impl WorkflowRegistrar {
|
|||||||
// Update the existing companion action to stay in sync
|
// Update the existing companion action to stay in sync
|
||||||
let update_input = UpdateActionInput {
|
let update_input = UpdateActionInput {
|
||||||
label: Some(effective_label.to_string()),
|
label: Some(effective_label.to_string()),
|
||||||
description: workflow.description.clone(),
|
description: Some(match workflow.description.clone() {
|
||||||
|
Some(description) => Patch::Set(description),
|
||||||
|
None => Patch::Clear,
|
||||||
|
}),
|
||||||
entrypoint: Some(format!("workflows/{}.workflow.yaml", workflow_name)),
|
entrypoint: Some(format!("workflows/{}.workflow.yaml", workflow_name)),
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
|
|||||||
@@ -66,7 +66,10 @@ async fn test_create_action_with_optional_fields() {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(action.label, "Full Test Action");
|
assert_eq!(action.label, "Full Test Action");
|
||||||
assert_eq!(action.description, "Action with all optional fields");
|
assert_eq!(
|
||||||
|
action.description,
|
||||||
|
Some("Action with all optional fields".to_string())
|
||||||
|
);
|
||||||
assert_eq!(action.entrypoint, "custom.py");
|
assert_eq!(action.entrypoint, "custom.py");
|
||||||
assert!(action.param_schema.is_some());
|
assert!(action.param_schema.is_some());
|
||||||
assert!(action.out_schema.is_some());
|
assert!(action.out_schema.is_some());
|
||||||
@@ -204,7 +207,9 @@ async fn test_update_action() {
|
|||||||
|
|
||||||
let update = UpdateActionInput {
|
let update = UpdateActionInput {
|
||||||
label: Some("Updated Label".to_string()),
|
label: Some("Updated Label".to_string()),
|
||||||
description: Some("Updated description".to_string()),
|
description: Some(attune_common::repositories::Patch::Set(
|
||||||
|
"Updated description".to_string(),
|
||||||
|
)),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -214,7 +219,7 @@ async fn test_update_action() {
|
|||||||
|
|
||||||
assert_eq!(updated.id, action.id);
|
assert_eq!(updated.id, action.id);
|
||||||
assert_eq!(updated.label, "Updated Label");
|
assert_eq!(updated.label, "Updated Label");
|
||||||
assert_eq!(updated.description, "Updated description");
|
assert_eq!(updated.description, Some("Updated description".to_string()));
|
||||||
assert_eq!(updated.entrypoint, action.entrypoint); // Unchanged
|
assert_eq!(updated.entrypoint, action.entrypoint); // Unchanged
|
||||||
assert!(updated.updated > original_updated);
|
assert!(updated.updated > original_updated);
|
||||||
}
|
}
|
||||||
@@ -338,7 +343,7 @@ async fn test_action_foreign_key_constraint() {
|
|||||||
pack: 99999,
|
pack: 99999,
|
||||||
pack_ref: "nonexistent.pack".to_string(),
|
pack_ref: "nonexistent.pack".to_string(),
|
||||||
label: "Test Action".to_string(),
|
label: "Test Action".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
entrypoint: "main.py".to_string(),
|
entrypoint: "main.py".to_string(),
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ async fn test_create_enforcement_minimal() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -121,7 +121,7 @@ async fn test_create_enforcement_with_event() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -189,7 +189,7 @@ async fn test_create_enforcement_with_conditions() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -255,7 +255,7 @@ async fn test_create_enforcement_with_any_condition() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -397,7 +397,7 @@ async fn test_find_enforcement_by_id() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -471,7 +471,7 @@ async fn test_get_enforcement_by_id() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -552,7 +552,7 @@ async fn test_list_enforcements() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -624,7 +624,7 @@ async fn test_update_enforcement_status() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -690,7 +690,7 @@ async fn test_update_enforcement_status_transitions() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -769,7 +769,7 @@ async fn test_update_enforcement_payload() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -832,7 +832,7 @@ async fn test_update_enforcement_both_fields() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -896,7 +896,7 @@ async fn test_update_enforcement_no_changes() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -981,7 +981,7 @@ async fn test_delete_enforcement() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -1056,7 +1056,7 @@ async fn test_find_enforcements_by_rule() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Rule 1".to_string(),
|
label: "Rule 1".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -1078,7 +1078,7 @@ async fn test_find_enforcements_by_rule() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Rule 2".to_string(),
|
label: "Rule 2".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -1149,7 +1149,7 @@ async fn test_find_enforcements_by_status() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -1239,7 +1239,7 @@ async fn test_find_enforcements_by_event() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -1324,7 +1324,7 @@ async fn test_delete_rule_sets_enforcement_rule_to_null() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -1390,7 +1390,7 @@ async fn test_enforcement_resolved_at_lifecycle() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
|
|||||||
@@ -449,7 +449,7 @@ async fn test_delete_event_enforcement_retains_event_id() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
|
|||||||
@@ -454,7 +454,7 @@ impl ActionFixture {
|
|||||||
pack_ref: self.pack_ref,
|
pack_ref: self.pack_ref,
|
||||||
r#ref: self.r#ref,
|
r#ref: self.r#ref,
|
||||||
label: self.label,
|
label: self.label,
|
||||||
description: self.description,
|
description: Some(self.description),
|
||||||
entrypoint: self.entrypoint,
|
entrypoint: self.entrypoint,
|
||||||
runtime: self.runtime,
|
runtime: self.runtime,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
@@ -961,9 +961,12 @@ impl RuntimeFixture {
|
|||||||
pack_ref: self.pack_ref,
|
pack_ref: self.pack_ref,
|
||||||
description: self.description,
|
description: self.description,
|
||||||
name: self.name,
|
name: self.name,
|
||||||
|
aliases: vec![],
|
||||||
distributions: self.distributions,
|
distributions: self.distributions,
|
||||||
installation: self.installation,
|
installation: self.installation,
|
||||||
execution_config: self.execution_config,
|
execution_config: self.execution_config,
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: serde_json::json!({}),
|
||||||
};
|
};
|
||||||
|
|
||||||
RuntimeRepository::create(pool, input).await
|
RuntimeRepository::create(pool, input).await
|
||||||
@@ -1085,7 +1088,7 @@ impl SensorFixture {
|
|||||||
pack: self.pack_id,
|
pack: self.pack_id,
|
||||||
pack_ref: self.pack_ref,
|
pack_ref: self.pack_ref,
|
||||||
label: self.label,
|
label: self.label,
|
||||||
description: self.description,
|
description: Some(self.description),
|
||||||
entrypoint: self.entrypoint,
|
entrypoint: self.entrypoint,
|
||||||
runtime: self.runtime_id,
|
runtime: self.runtime_id,
|
||||||
runtime_ref: self.runtime_ref,
|
runtime_ref: self.runtime_ref,
|
||||||
|
|||||||
@@ -219,6 +219,7 @@ async fn test_update_identity() {
|
|||||||
display_name: Some("Updated Name".to_string()),
|
display_name: Some("Updated Name".to_string()),
|
||||||
password_hash: None,
|
password_hash: None,
|
||||||
attributes: Some(json!({"key": "updated", "new_key": "new_value"})),
|
attributes: Some(json!({"key": "updated", "new_key": "new_value"})),
|
||||||
|
frozen: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let updated = IdentityRepository::update(&pool, identity.id, update_input)
|
let updated = IdentityRepository::update(&pool, identity.id, update_input)
|
||||||
@@ -252,6 +253,7 @@ async fn test_update_identity_partial() {
|
|||||||
display_name: Some("Only Display Name Changed".to_string()),
|
display_name: Some("Only Display Name Changed".to_string()),
|
||||||
password_hash: None,
|
password_hash: None,
|
||||||
attributes: None,
|
attributes: None,
|
||||||
|
frozen: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let updated = IdentityRepository::update(&pool, identity.id, update_input)
|
let updated = IdentityRepository::update(&pool, identity.id, update_input)
|
||||||
@@ -274,6 +276,7 @@ async fn test_update_identity_not_found() {
|
|||||||
display_name: Some("Updated Name".to_string()),
|
display_name: Some("Updated Name".to_string()),
|
||||||
password_hash: None,
|
password_hash: None,
|
||||||
attributes: None,
|
attributes: None,
|
||||||
|
frozen: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = IdentityRepository::update(&pool, 999999, update_input).await;
|
let result = IdentityRepository::update(&pool, 999999, update_input).await;
|
||||||
@@ -380,6 +383,7 @@ async fn test_identity_updated_changes_on_update() {
|
|||||||
display_name: Some("Updated".to_string()),
|
display_name: Some("Updated".to_string()),
|
||||||
password_hash: None,
|
password_hash: None,
|
||||||
attributes: None,
|
attributes: None,
|
||||||
|
frozen: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let updated = IdentityRepository::update(&pool, identity.id, update_input)
|
let updated = IdentityRepository::update(&pool, identity.id, update_input)
|
||||||
|
|||||||
@@ -64,6 +64,7 @@ impl RuntimeFixture {
|
|||||||
pack_ref: None,
|
pack_ref: None,
|
||||||
description: Some(format!("Test runtime {}", seq)),
|
description: Some(format!("Test runtime {}", seq)),
|
||||||
name,
|
name,
|
||||||
|
aliases: vec![],
|
||||||
distributions: json!({
|
distributions: json!({
|
||||||
"linux": { "supported": true, "versions": ["ubuntu20.04", "ubuntu22.04"] },
|
"linux": { "supported": true, "versions": ["ubuntu20.04", "ubuntu22.04"] },
|
||||||
"darwin": { "supported": true, "versions": ["12", "13"] }
|
"darwin": { "supported": true, "versions": ["12", "13"] }
|
||||||
@@ -79,6 +80,8 @@ impl RuntimeFixture {
|
|||||||
"file_extension": ".py"
|
"file_extension": ".py"
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: json!({}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -93,6 +96,7 @@ impl RuntimeFixture {
|
|||||||
pack_ref: None,
|
pack_ref: None,
|
||||||
description: None,
|
description: None,
|
||||||
name,
|
name,
|
||||||
|
aliases: vec![],
|
||||||
distributions: json!({}),
|
distributions: json!({}),
|
||||||
installation: None,
|
installation: None,
|
||||||
execution_config: json!({
|
execution_config: json!({
|
||||||
@@ -102,6 +106,8 @@ impl RuntimeFixture {
|
|||||||
"file_extension": ".sh"
|
"file_extension": ".sh"
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: json!({}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -268,6 +274,7 @@ async fn test_update_runtime() {
|
|||||||
"method": "npm"
|
"method": "npm"
|
||||||
}))),
|
}))),
|
||||||
execution_config: None,
|
execution_config: None,
|
||||||
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let updated = RuntimeRepository::update(&pool, created.id, update_input.clone())
|
let updated = RuntimeRepository::update(&pool, created.id, update_input.clone())
|
||||||
@@ -299,6 +306,7 @@ async fn test_update_runtime_partial() {
|
|||||||
distributions: None,
|
distributions: None,
|
||||||
installation: None,
|
installation: None,
|
||||||
execution_config: None,
|
execution_config: None,
|
||||||
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let updated = RuntimeRepository::update(&pool, created.id, update_input.clone())
|
let updated = RuntimeRepository::update(&pool, created.id, update_input.clone())
|
||||||
|
|||||||
@@ -574,6 +574,7 @@ async fn test_worker_with_runtime() {
|
|||||||
pack_ref: None,
|
pack_ref: None,
|
||||||
description: Some("Test runtime".to_string()),
|
description: Some("Test runtime".to_string()),
|
||||||
name: "test_runtime".to_string(),
|
name: "test_runtime".to_string(),
|
||||||
|
aliases: vec![],
|
||||||
distributions: json!({}),
|
distributions: json!({}),
|
||||||
installation: None,
|
installation: None,
|
||||||
execution_config: json!({
|
execution_config: json!({
|
||||||
@@ -583,6 +584,8 @@ async fn test_worker_with_runtime() {
|
|||||||
"file_extension": ".sh"
|
"file_extension": ".sh"
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: json!({}),
|
||||||
};
|
};
|
||||||
|
|
||||||
let runtime = RuntimeRepository::create(&pool, runtime_input)
|
let runtime = RuntimeRepository::create(&pool, runtime_input)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ mod helpers;
|
|||||||
use attune_common::{
|
use attune_common::{
|
||||||
repositories::{
|
repositories::{
|
||||||
rule::{CreateRuleInput, RuleRepository, UpdateRuleInput},
|
rule::{CreateRuleInput, RuleRepository, UpdateRuleInput},
|
||||||
Create, Delete, FindById, FindByRef, List, Update,
|
Create, Delete, FindById, FindByRef, List, Patch, Update,
|
||||||
},
|
},
|
||||||
Error,
|
Error,
|
||||||
};
|
};
|
||||||
@@ -48,7 +48,7 @@ async fn test_create_rule() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "A test rule".to_string(),
|
description: Some("A test rule".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -66,7 +66,7 @@ async fn test_create_rule() {
|
|||||||
assert_eq!(rule.pack, pack.id);
|
assert_eq!(rule.pack, pack.id);
|
||||||
assert_eq!(rule.pack_ref, pack.r#ref);
|
assert_eq!(rule.pack_ref, pack.r#ref);
|
||||||
assert_eq!(rule.label, "Test Rule");
|
assert_eq!(rule.label, "Test Rule");
|
||||||
assert_eq!(rule.description, "A test rule");
|
assert_eq!(rule.description, Some("A test rule".to_string()));
|
||||||
assert_eq!(rule.action, Some(action.id));
|
assert_eq!(rule.action, Some(action.id));
|
||||||
assert_eq!(rule.action_ref, action.r#ref);
|
assert_eq!(rule.action_ref, action.r#ref);
|
||||||
assert_eq!(rule.trigger, Some(trigger.id));
|
assert_eq!(rule.trigger, Some(trigger.id));
|
||||||
@@ -105,7 +105,7 @@ async fn test_create_rule_disabled() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Disabled Rule".to_string(),
|
label: "Disabled Rule".to_string(),
|
||||||
description: "A disabled rule".to_string(),
|
description: Some("A disabled rule".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -155,7 +155,7 @@ async fn test_create_rule_with_complex_conditions() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Complex Rule".to_string(),
|
label: "Complex Rule".to_string(),
|
||||||
description: "Rule with complex conditions".to_string(),
|
description: Some("Rule with complex conditions".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -200,7 +200,7 @@ async fn test_create_rule_duplicate_ref() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "First Rule".to_string(),
|
label: "First Rule".to_string(),
|
||||||
description: "First".to_string(),
|
description: Some("First".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -220,7 +220,7 @@ async fn test_create_rule_duplicate_ref() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Second Rule".to_string(),
|
label: "Second Rule".to_string(),
|
||||||
description: "Second".to_string(),
|
description: Some("Second".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -274,7 +274,7 @@ async fn test_create_rule_invalid_ref_format_uppercase() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Upper Rule".to_string(),
|
label: "Upper Rule".to_string(),
|
||||||
description: "Invalid uppercase ref".to_string(),
|
description: Some("Invalid uppercase ref".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -316,7 +316,7 @@ async fn test_create_rule_invalid_ref_format_no_dot() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "No Dot Rule".to_string(),
|
label: "No Dot Rule".to_string(),
|
||||||
description: "Invalid ref without dot".to_string(),
|
description: Some("Invalid ref without dot".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -362,7 +362,7 @@ async fn test_find_rule_by_id() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Find Rule".to_string(),
|
label: "Find Rule".to_string(),
|
||||||
description: "Rule to find".to_string(),
|
description: Some("Rule to find".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -422,7 +422,7 @@ async fn test_find_rule_by_ref() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Find By Ref Rule".to_string(),
|
label: "Find By Ref Rule".to_string(),
|
||||||
description: "Find by ref".to_string(),
|
description: Some("Find by ref".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -484,7 +484,7 @@ async fn test_list_rules() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: format!("List Rule {}", i),
|
label: format!("List Rule {}", i),
|
||||||
description: format!("Rule {}", i),
|
description: Some(format!("Rule {}", i)),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -538,7 +538,7 @@ async fn test_list_rules_ordered_by_ref() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: name.to_string(),
|
label: name.to_string(),
|
||||||
description: name.to_string(),
|
description: Some(name.to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -594,7 +594,7 @@ async fn test_update_rule_label() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Original Label".to_string(),
|
label: "Original Label".to_string(),
|
||||||
description: "Original".to_string(),
|
description: Some("Original".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -618,7 +618,7 @@ async fn test_update_rule_label() {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(updated.label, "Updated Label");
|
assert_eq!(updated.label, "Updated Label");
|
||||||
assert_eq!(updated.description, "Original"); // unchanged
|
assert_eq!(updated.description, Some("Original".to_string())); // unchanged
|
||||||
assert!(updated.updated > created.updated);
|
assert!(updated.updated > created.updated);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -647,7 +647,7 @@ async fn test_update_rule_description() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test".to_string(),
|
label: "Test".to_string(),
|
||||||
description: "Old description".to_string(),
|
description: Some("Old description".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -662,7 +662,7 @@ async fn test_update_rule_description() {
|
|||||||
let created = RuleRepository::create(&pool, input).await.unwrap();
|
let created = RuleRepository::create(&pool, input).await.unwrap();
|
||||||
|
|
||||||
let update = UpdateRuleInput {
|
let update = UpdateRuleInput {
|
||||||
description: Some("New description".to_string()),
|
description: Some(Patch::Set("New description".to_string())),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -670,7 +670,7 @@ async fn test_update_rule_description() {
|
|||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(updated.description, "New description");
|
assert_eq!(updated.description, Some("New description".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
@@ -698,7 +698,7 @@ async fn test_update_rule_conditions() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test".to_string(),
|
label: "Test".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -750,7 +750,7 @@ async fn test_update_rule_enabled() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test".to_string(),
|
label: "Test".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -803,7 +803,7 @@ async fn test_update_rule_multiple_fields() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Old".to_string(),
|
label: "Old".to_string(),
|
||||||
description: "Old".to_string(),
|
description: Some("Old".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -819,7 +819,7 @@ async fn test_update_rule_multiple_fields() {
|
|||||||
|
|
||||||
let update = UpdateRuleInput {
|
let update = UpdateRuleInput {
|
||||||
label: Some("New Label".to_string()),
|
label: Some("New Label".to_string()),
|
||||||
description: Some("New Description".to_string()),
|
description: Some(Patch::Set("New Description".to_string())),
|
||||||
conditions: Some(json!({"updated": true})),
|
conditions: Some(json!({"updated": true})),
|
||||||
action_params: None,
|
action_params: None,
|
||||||
trigger_params: None,
|
trigger_params: None,
|
||||||
@@ -831,7 +831,7 @@ async fn test_update_rule_multiple_fields() {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(updated.label, "New Label");
|
assert_eq!(updated.label, "New Label");
|
||||||
assert_eq!(updated.description, "New Description");
|
assert_eq!(updated.description, Some("New Description".to_string()));
|
||||||
assert_eq!(updated.conditions, json!({"updated": true}));
|
assert_eq!(updated.conditions, json!({"updated": true}));
|
||||||
assert!(!updated.enabled);
|
assert!(!updated.enabled);
|
||||||
}
|
}
|
||||||
@@ -861,7 +861,7 @@ async fn test_update_rule_no_changes() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Test".to_string(),
|
label: "Test".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -914,7 +914,7 @@ async fn test_delete_rule() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "To Delete".to_string(),
|
label: "To Delete".to_string(),
|
||||||
description: "Will be deleted".to_string(),
|
description: Some("Will be deleted".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -995,7 +995,7 @@ async fn test_find_rules_by_pack() {
|
|||||||
pack: pack1.id,
|
pack: pack1.id,
|
||||||
pack_ref: pack1.r#ref.clone(),
|
pack_ref: pack1.r#ref.clone(),
|
||||||
label: format!("Rule {}", i),
|
label: format!("Rule {}", i),
|
||||||
description: format!("Rule {}", i),
|
description: Some(format!("Rule {}", i)),
|
||||||
action: action1.id,
|
action: action1.id,
|
||||||
action_ref: action1.r#ref.clone(),
|
action_ref: action1.r#ref.clone(),
|
||||||
trigger: trigger1.id,
|
trigger: trigger1.id,
|
||||||
@@ -1016,7 +1016,7 @@ async fn test_find_rules_by_pack() {
|
|||||||
pack: pack2.id,
|
pack: pack2.id,
|
||||||
pack_ref: pack2.r#ref.clone(),
|
pack_ref: pack2.r#ref.clone(),
|
||||||
label: "Pack2 Rule".to_string(),
|
label: "Pack2 Rule".to_string(),
|
||||||
description: "Pack2".to_string(),
|
description: Some("Pack2".to_string()),
|
||||||
action: action2.id,
|
action: action2.id,
|
||||||
action_ref: action2.r#ref.clone(),
|
action_ref: action2.r#ref.clone(),
|
||||||
trigger: trigger2.id,
|
trigger: trigger2.id,
|
||||||
@@ -1073,7 +1073,7 @@ async fn test_find_rules_by_action() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: format!("Action1 Rule {}", i),
|
label: format!("Action1 Rule {}", i),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action1.id,
|
action: action1.id,
|
||||||
action_ref: action1.r#ref.clone(),
|
action_ref: action1.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -1094,7 +1094,7 @@ async fn test_find_rules_by_action() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Action2 Rule".to_string(),
|
label: "Action2 Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action2.id,
|
action: action2.id,
|
||||||
action_ref: action2.r#ref.clone(),
|
action_ref: action2.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -1155,7 +1155,7 @@ async fn test_find_rules_by_trigger() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: format!("Trigger1 Rule {}", i),
|
label: format!("Trigger1 Rule {}", i),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger1.id,
|
trigger: trigger1.id,
|
||||||
@@ -1176,7 +1176,7 @@ async fn test_find_rules_by_trigger() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Trigger2 Rule".to_string(),
|
label: "Trigger2 Rule".to_string(),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger2.id,
|
trigger: trigger2.id,
|
||||||
@@ -1234,7 +1234,7 @@ async fn test_find_enabled_rules() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: format!("Enabled {}", i),
|
label: format!("Enabled {}", i),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -1256,7 +1256,7 @@ async fn test_find_enabled_rules() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: format!("Disabled {}", i),
|
label: format!("Disabled {}", i),
|
||||||
description: "Test".to_string(),
|
description: Some("Test".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -1312,7 +1312,7 @@ async fn test_cascade_delete_pack_deletes_rules() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Cascade Rule".to_string(),
|
label: "Cascade Rule".to_string(),
|
||||||
description: "Will be cascade deleted".to_string(),
|
description: Some("Will be cascade deleted".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
@@ -1368,7 +1368,7 @@ async fn test_rule_timestamps() {
|
|||||||
pack: pack.id,
|
pack: pack.id,
|
||||||
pack_ref: pack.r#ref.clone(),
|
pack_ref: pack.r#ref.clone(),
|
||||||
label: "Timestamp Rule".to_string(),
|
label: "Timestamp Rule".to_string(),
|
||||||
description: "Test timestamps".to_string(),
|
description: Some("Test timestamps".to_string()),
|
||||||
action: action.id,
|
action: action.id,
|
||||||
action_ref: action.r#ref.clone(),
|
action_ref: action.r#ref.clone(),
|
||||||
trigger: trigger.id,
|
trigger: trigger.id,
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ async fn test_create_sensor_duplicate_ref_fails() {
|
|||||||
pack: Some(pack.id),
|
pack: Some(pack.id),
|
||||||
pack_ref: Some(pack.r#ref.clone()),
|
pack_ref: Some(pack.r#ref.clone()),
|
||||||
label: "Duplicate Sensor".to_string(),
|
label: "Duplicate Sensor".to_string(),
|
||||||
description: "Test sensor".to_string(),
|
description: Some("Test sensor".to_string()),
|
||||||
entrypoint: "sensors/dup.py".to_string(),
|
entrypoint: "sensors/dup.py".to_string(),
|
||||||
runtime: runtime.id,
|
runtime: runtime.id,
|
||||||
runtime_ref: runtime.r#ref.clone(),
|
runtime_ref: runtime.r#ref.clone(),
|
||||||
@@ -235,7 +235,7 @@ async fn test_create_sensor_invalid_ref_format_fails() {
|
|||||||
pack: Some(pack.id),
|
pack: Some(pack.id),
|
||||||
pack_ref: Some(pack.r#ref.clone()),
|
pack_ref: Some(pack.r#ref.clone()),
|
||||||
label: "Invalid Sensor".to_string(),
|
label: "Invalid Sensor".to_string(),
|
||||||
description: "Test sensor".to_string(),
|
description: Some("Test sensor".to_string()),
|
||||||
entrypoint: "sensors/invalid.py".to_string(),
|
entrypoint: "sensors/invalid.py".to_string(),
|
||||||
runtime: runtime.id,
|
runtime: runtime.id,
|
||||||
runtime_ref: runtime.r#ref.clone(),
|
runtime_ref: runtime.r#ref.clone(),
|
||||||
@@ -276,7 +276,7 @@ async fn test_create_sensor_invalid_pack_fails() {
|
|||||||
pack: Some(99999), // Non-existent pack
|
pack: Some(99999), // Non-existent pack
|
||||||
pack_ref: Some("invalid".to_string()),
|
pack_ref: Some("invalid".to_string()),
|
||||||
label: "Invalid Pack Sensor".to_string(),
|
label: "Invalid Pack Sensor".to_string(),
|
||||||
description: "Test sensor".to_string(),
|
description: Some("Test sensor".to_string()),
|
||||||
entrypoint: "sensors/invalid.py".to_string(),
|
entrypoint: "sensors/invalid.py".to_string(),
|
||||||
runtime: runtime.id,
|
runtime: runtime.id,
|
||||||
runtime_ref: runtime.r#ref.clone(),
|
runtime_ref: runtime.r#ref.clone(),
|
||||||
@@ -308,7 +308,7 @@ async fn test_create_sensor_invalid_trigger_fails() {
|
|||||||
pack: None,
|
pack: None,
|
||||||
pack_ref: None,
|
pack_ref: None,
|
||||||
label: "Invalid Trigger Sensor".to_string(),
|
label: "Invalid Trigger Sensor".to_string(),
|
||||||
description: "Test sensor".to_string(),
|
description: Some("Test sensor".to_string()),
|
||||||
entrypoint: "sensors/invalid.py".to_string(),
|
entrypoint: "sensors/invalid.py".to_string(),
|
||||||
runtime: runtime.id,
|
runtime: runtime.id,
|
||||||
runtime_ref: runtime.r#ref.clone(),
|
runtime_ref: runtime.r#ref.clone(),
|
||||||
@@ -340,7 +340,7 @@ async fn test_create_sensor_invalid_runtime_fails() {
|
|||||||
pack: None,
|
pack: None,
|
||||||
pack_ref: None,
|
pack_ref: None,
|
||||||
label: "Invalid Runtime Sensor".to_string(),
|
label: "Invalid Runtime Sensor".to_string(),
|
||||||
description: "Test sensor".to_string(),
|
description: Some("Test sensor".to_string()),
|
||||||
entrypoint: "sensors/invalid.py".to_string(),
|
entrypoint: "sensors/invalid.py".to_string(),
|
||||||
runtime: 99999, // Non-existent runtime
|
runtime: 99999, // Non-existent runtime
|
||||||
runtime_ref: "invalid.runtime".to_string(),
|
runtime_ref: "invalid.runtime".to_string(),
|
||||||
@@ -728,7 +728,7 @@ async fn test_update_description() {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let input = UpdateSensorInput {
|
let input = UpdateSensorInput {
|
||||||
description: Some("New description for the sensor".to_string()),
|
description: Some(Patch::Set("New description for the sensor".to_string())),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -736,7 +736,10 @@ async fn test_update_description() {
|
|||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(updated.description, "New description for the sensor");
|
assert_eq!(
|
||||||
|
updated.description,
|
||||||
|
Some("New description for the sensor".to_string())
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
@@ -934,7 +937,7 @@ async fn test_update_multiple_fields() {
|
|||||||
|
|
||||||
let input = UpdateSensorInput {
|
let input = UpdateSensorInput {
|
||||||
label: Some("Multi Update".to_string()),
|
label: Some("Multi Update".to_string()),
|
||||||
description: Some("Updated multiple fields".to_string()),
|
description: Some(Patch::Set("Updated multiple fields".to_string())),
|
||||||
entrypoint: Some("sensors/multi.py".to_string()),
|
entrypoint: Some("sensors/multi.py".to_string()),
|
||||||
enabled: Some(false),
|
enabled: Some(false),
|
||||||
param_schema: Some(Patch::Set(json!({"type": "object"}))),
|
param_schema: Some(Patch::Set(json!({"type": "object"}))),
|
||||||
@@ -946,7 +949,10 @@ async fn test_update_multiple_fields() {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(updated.label, "Multi Update");
|
assert_eq!(updated.label, "Multi Update");
|
||||||
assert_eq!(updated.description, "Updated multiple fields");
|
assert_eq!(
|
||||||
|
updated.description,
|
||||||
|
Some("Updated multiple fields".to_string())
|
||||||
|
);
|
||||||
assert_eq!(updated.entrypoint, "sensors/multi.py");
|
assert_eq!(updated.entrypoint, "sensors/multi.py");
|
||||||
assert!(!updated.enabled);
|
assert!(!updated.enabled);
|
||||||
assert_eq!(updated.param_schema, Some(json!({"type": "object"})));
|
assert_eq!(updated.param_schema, Some(json!({"type": "object"})));
|
||||||
|
|||||||
@@ -368,7 +368,7 @@ mod tests {
|
|||||||
pack: 1,
|
pack: 1,
|
||||||
pack_ref: "test".to_string(),
|
pack_ref: "test".to_string(),
|
||||||
label: "Test Rule".to_string(),
|
label: "Test Rule".to_string(),
|
||||||
description: "Test rule description".to_string(),
|
description: Some("Test rule description".to_string()),
|
||||||
trigger_ref: "test.trigger".to_string(),
|
trigger_ref: "test.trigger".to_string(),
|
||||||
trigger: Some(1),
|
trigger: Some(1),
|
||||||
action_ref: "test.action".to_string(),
|
action_ref: "test.action".to_string(),
|
||||||
|
|||||||
@@ -13,8 +13,11 @@
|
|||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use attune_common::{
|
use attune_common::{
|
||||||
models::{enums::ExecutionStatus, execution::WorkflowTaskMetadata, Action, Execution},
|
models::{enums::ExecutionStatus, execution::WorkflowTaskMetadata, Action, Execution, Runtime},
|
||||||
mq::{Consumer, ExecutionRequestedPayload, MessageEnvelope, MessageType, Publisher},
|
mq::{
|
||||||
|
Consumer, ExecutionCompletedPayload, ExecutionRequestedPayload, MessageEnvelope,
|
||||||
|
MessageType, Publisher,
|
||||||
|
},
|
||||||
repositories::{
|
repositories::{
|
||||||
action::ActionRepository,
|
action::ActionRepository,
|
||||||
execution::{CreateExecutionInput, ExecutionRepository, UpdateExecutionInput},
|
execution::{CreateExecutionInput, ExecutionRepository, UpdateExecutionInput},
|
||||||
@@ -24,7 +27,7 @@ use attune_common::{
|
|||||||
},
|
},
|
||||||
Create, FindById, FindByRef, Update,
|
Create, FindById, FindByRef, Update,
|
||||||
},
|
},
|
||||||
runtime_detection::runtime_matches_filter,
|
runtime_detection::runtime_aliases_contain,
|
||||||
workflow::WorkflowDefinition,
|
workflow::WorkflowDefinition,
|
||||||
};
|
};
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
@@ -205,7 +208,23 @@ impl ExecutionScheduler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Regular action: select appropriate worker (round-robin among compatible workers)
|
// Regular action: select appropriate worker (round-robin among compatible workers)
|
||||||
let worker = Self::select_worker(pool, &action, round_robin_counter).await?;
|
let worker = match Self::select_worker(pool, &action, round_robin_counter).await {
|
||||||
|
Ok(worker) => worker,
|
||||||
|
Err(err) if Self::is_unschedulable_error(&err) => {
|
||||||
|
Self::fail_unschedulable_execution(
|
||||||
|
pool,
|
||||||
|
publisher,
|
||||||
|
envelope,
|
||||||
|
execution_id,
|
||||||
|
action.id,
|
||||||
|
&action.r#ref,
|
||||||
|
&err.to_string(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
Err(err) => return Err(err),
|
||||||
|
};
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
"Selected worker {} for execution {}",
|
"Selected worker {} for execution {}",
|
||||||
@@ -1561,7 +1580,7 @@ impl ExecutionScheduler {
|
|||||||
let compatible_workers: Vec<_> = if let Some(ref runtime) = runtime {
|
let compatible_workers: Vec<_> = if let Some(ref runtime) = runtime {
|
||||||
workers
|
workers
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|w| Self::worker_supports_runtime(w, &runtime.name))
|
.filter(|w| Self::worker_supports_runtime(w, runtime))
|
||||||
.collect()
|
.collect()
|
||||||
} else {
|
} else {
|
||||||
workers
|
workers
|
||||||
@@ -1619,20 +1638,26 @@ impl ExecutionScheduler {
|
|||||||
|
|
||||||
/// Check if a worker supports a given runtime
|
/// Check if a worker supports a given runtime
|
||||||
///
|
///
|
||||||
/// This checks the worker's capabilities.runtimes array for the runtime name.
|
/// This checks the worker's capabilities.runtimes array against the runtime's aliases.
|
||||||
/// Falls back to checking the deprecated runtime column if capabilities are not set.
|
/// If aliases are missing, fall back to the runtime's canonical name.
|
||||||
fn worker_supports_runtime(worker: &attune_common::models::Worker, runtime_name: &str) -> bool {
|
fn worker_supports_runtime(worker: &attune_common::models::Worker, runtime: &Runtime) -> bool {
|
||||||
// First, try to parse capabilities and check runtimes array
|
let runtime_names = Self::runtime_capability_names(runtime);
|
||||||
|
|
||||||
|
// Try to parse capabilities and check runtimes array
|
||||||
if let Some(ref capabilities) = worker.capabilities {
|
if let Some(ref capabilities) = worker.capabilities {
|
||||||
if let Some(runtimes) = capabilities.get("runtimes") {
|
if let Some(runtimes) = capabilities.get("runtimes") {
|
||||||
if let Some(runtime_array) = runtimes.as_array() {
|
if let Some(runtime_array) = runtimes.as_array() {
|
||||||
// Check if any runtime in the array matches (alias-aware)
|
// Check if any runtime in the array matches via aliases
|
||||||
for runtime_value in runtime_array {
|
for runtime_value in runtime_array {
|
||||||
if let Some(runtime_str) = runtime_value.as_str() {
|
if let Some(runtime_str) = runtime_value.as_str() {
|
||||||
if runtime_matches_filter(runtime_name, runtime_str) {
|
if runtime_names
|
||||||
|
.iter()
|
||||||
|
.any(|candidate| candidate.eq_ignore_ascii_case(runtime_str))
|
||||||
|
|| runtime_aliases_contain(&runtime.aliases, runtime_str)
|
||||||
|
{
|
||||||
debug!(
|
debug!(
|
||||||
"Worker {} supports runtime '{}' via capabilities (matched '{}')",
|
"Worker {} supports runtime '{}' via capabilities (matched '{}', candidates: {:?})",
|
||||||
worker.name, runtime_name, runtime_str
|
worker.name, runtime.name, runtime_str, runtime_names
|
||||||
);
|
);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -1642,25 +1667,90 @@ impl ExecutionScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback: check deprecated runtime column
|
|
||||||
// This is kept for backward compatibility but should be removed in the future
|
|
||||||
if worker.runtime.is_some() {
|
|
||||||
debug!(
|
debug!(
|
||||||
"Worker {} using deprecated runtime column for matching",
|
"Worker {} does not support runtime '{}' (candidates: {:?})",
|
||||||
worker.name
|
worker.name, runtime.name, runtime_names
|
||||||
);
|
|
||||||
// Note: This fallback is incomplete because we'd need to look up the runtime name
|
|
||||||
// from the ID, which would require an async call. Since we're moving to capabilities,
|
|
||||||
// we'll just return false here and require workers to set capabilities properly.
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!(
|
|
||||||
"Worker {} does not support runtime '{}'",
|
|
||||||
worker.name, runtime_name
|
|
||||||
);
|
);
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn runtime_capability_names(runtime: &Runtime) -> Vec<String> {
|
||||||
|
let mut names: Vec<String> = runtime
|
||||||
|
.aliases
|
||||||
|
.iter()
|
||||||
|
.map(|alias| alias.to_ascii_lowercase())
|
||||||
|
.filter(|alias| !alias.is_empty())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let runtime_name = runtime.name.to_ascii_lowercase();
|
||||||
|
if !runtime_name.is_empty() && !names.iter().any(|name| name == &runtime_name) {
|
||||||
|
names.push(runtime_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
names
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_unschedulable_error(error: &anyhow::Error) -> bool {
|
||||||
|
let message = error.to_string();
|
||||||
|
message.starts_with("No compatible workers found")
|
||||||
|
|| message.starts_with("No action workers available")
|
||||||
|
|| message.starts_with("No active workers available")
|
||||||
|
|| message.starts_with("No workers with fresh heartbeats available")
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fail_unschedulable_execution(
|
||||||
|
pool: &PgPool,
|
||||||
|
publisher: &Publisher,
|
||||||
|
envelope: &MessageEnvelope<ExecutionRequestedPayload>,
|
||||||
|
execution_id: i64,
|
||||||
|
action_id: i64,
|
||||||
|
action_ref: &str,
|
||||||
|
error_message: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
let completed_at = Utc::now();
|
||||||
|
let result = serde_json::json!({
|
||||||
|
"error": "Execution is unschedulable",
|
||||||
|
"message": error_message,
|
||||||
|
"action_ref": action_ref,
|
||||||
|
"failed_by": "execution_scheduler",
|
||||||
|
"failed_at": completed_at.to_rfc3339(),
|
||||||
|
});
|
||||||
|
|
||||||
|
ExecutionRepository::update(
|
||||||
|
pool,
|
||||||
|
execution_id,
|
||||||
|
UpdateExecutionInput {
|
||||||
|
status: Some(ExecutionStatus::Failed),
|
||||||
|
result: Some(result.clone()),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let completed = MessageEnvelope::new(
|
||||||
|
MessageType::ExecutionCompleted,
|
||||||
|
ExecutionCompletedPayload {
|
||||||
|
execution_id,
|
||||||
|
action_id,
|
||||||
|
action_ref: action_ref.to_string(),
|
||||||
|
status: "failed".to_string(),
|
||||||
|
result: Some(result),
|
||||||
|
completed_at,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.with_correlation_id(envelope.correlation_id)
|
||||||
|
.with_source("attune-executor");
|
||||||
|
|
||||||
|
publisher.publish_envelope(&completed).await?;
|
||||||
|
|
||||||
|
warn!(
|
||||||
|
"Execution {} marked failed as unschedulable: {}",
|
||||||
|
execution_id, error_message
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Check if a worker's heartbeat is fresh enough to schedule work
|
/// Check if a worker's heartbeat is fresh enough to schedule work
|
||||||
///
|
///
|
||||||
/// A worker is considered fresh if its last heartbeat is within
|
/// A worker is considered fresh if its last heartbeat is within
|
||||||
@@ -1826,6 +1916,70 @@ mod tests {
|
|||||||
// Real tests will require database and message queue setup
|
// Real tests will require database and message queue setup
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_worker_supports_runtime_with_alias_match() {
|
||||||
|
let worker = create_test_worker("test-worker", 5);
|
||||||
|
let runtime = Runtime {
|
||||||
|
id: 1,
|
||||||
|
r#ref: "core.shell".to_string(),
|
||||||
|
pack: None,
|
||||||
|
pack_ref: Some("core".to_string()),
|
||||||
|
description: Some("Shell runtime".to_string()),
|
||||||
|
name: "Shell".to_string(),
|
||||||
|
aliases: vec!["shell".to_string(), "bash".to_string()],
|
||||||
|
distributions: serde_json::json!({}),
|
||||||
|
installation: None,
|
||||||
|
installers: serde_json::json!({}),
|
||||||
|
execution_config: serde_json::json!({}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: serde_json::json!({}),
|
||||||
|
created: Utc::now(),
|
||||||
|
updated: Utc::now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(ExecutionScheduler::worker_supports_runtime(
|
||||||
|
&worker, &runtime
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_worker_supports_runtime_falls_back_to_runtime_name_when_aliases_missing() {
|
||||||
|
let worker = create_test_worker("test-worker", 5);
|
||||||
|
let runtime = Runtime {
|
||||||
|
id: 1,
|
||||||
|
r#ref: "core.shell".to_string(),
|
||||||
|
pack: None,
|
||||||
|
pack_ref: Some("core".to_string()),
|
||||||
|
description: Some("Shell runtime".to_string()),
|
||||||
|
name: "Shell".to_string(),
|
||||||
|
aliases: vec![],
|
||||||
|
distributions: serde_json::json!({}),
|
||||||
|
installation: None,
|
||||||
|
installers: serde_json::json!({}),
|
||||||
|
execution_config: serde_json::json!({}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: serde_json::json!({}),
|
||||||
|
created: Utc::now(),
|
||||||
|
updated: Utc::now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(ExecutionScheduler::worker_supports_runtime(
|
||||||
|
&worker, &runtime
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_unschedulable_error_classification() {
|
||||||
|
assert!(ExecutionScheduler::is_unschedulable_error(
|
||||||
|
&anyhow::anyhow!(
|
||||||
|
"No compatible workers found for action: core.sleep (requires runtime: Shell)"
|
||||||
|
)
|
||||||
|
));
|
||||||
|
assert!(!ExecutionScheduler::is_unschedulable_error(
|
||||||
|
&anyhow::anyhow!("database temporarily unavailable")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_concurrency_limit_dispatch_count() {
|
fn test_concurrency_limit_dispatch_count() {
|
||||||
// Verify the dispatch_count calculation used by dispatch_with_items_task
|
// Verify the dispatch_count calculation used by dispatch_with_items_task
|
||||||
|
|||||||
@@ -72,6 +72,7 @@ async fn _create_test_runtime(pool: &PgPool, suffix: &str) -> i64 {
|
|||||||
pack_ref: None,
|
pack_ref: None,
|
||||||
description: Some(format!("Test runtime {}", suffix)),
|
description: Some(format!("Test runtime {}", suffix)),
|
||||||
name: format!("Python {}", suffix),
|
name: format!("Python {}", suffix),
|
||||||
|
aliases: vec![],
|
||||||
distributions: json!({"ubuntu": "python3"}),
|
distributions: json!({"ubuntu": "python3"}),
|
||||||
installation: Some(json!({"method": "apt"})),
|
installation: Some(json!({"method": "apt"})),
|
||||||
execution_config: json!({
|
execution_config: json!({
|
||||||
@@ -81,6 +82,8 @@ async fn _create_test_runtime(pool: &PgPool, suffix: &str) -> i64 {
|
|||||||
"file_extension": ".py"
|
"file_extension": ".py"
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: json!({}),
|
||||||
};
|
};
|
||||||
|
|
||||||
RuntimeRepository::create(pool, runtime_input)
|
RuntimeRepository::create(pool, runtime_input)
|
||||||
@@ -96,7 +99,7 @@ async fn create_test_action(pool: &PgPool, pack_id: i64, pack_ref: &str, suffix:
|
|||||||
pack: pack_id,
|
pack: pack_id,
|
||||||
pack_ref: pack_ref.to_string(),
|
pack_ref: pack_ref.to_string(),
|
||||||
label: format!("FIFO Test Action {}", suffix),
|
label: format!("FIFO Test Action {}", suffix),
|
||||||
description: format!("Test action {}", suffix),
|
description: Some(format!("Test action {}", suffix)),
|
||||||
entrypoint: "echo test".to_string(),
|
entrypoint: "echo test".to_string(),
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
|
|||||||
@@ -67,6 +67,7 @@ async fn create_test_runtime(pool: &PgPool, suffix: &str) -> i64 {
|
|||||||
pack_ref: None,
|
pack_ref: None,
|
||||||
description: Some(format!("Test runtime {}", suffix)),
|
description: Some(format!("Test runtime {}", suffix)),
|
||||||
name: format!("Python {}", suffix),
|
name: format!("Python {}", suffix),
|
||||||
|
aliases: vec![],
|
||||||
distributions: json!({"ubuntu": "python3"}),
|
distributions: json!({"ubuntu": "python3"}),
|
||||||
installation: Some(json!({"method": "apt"})),
|
installation: Some(json!({"method": "apt"})),
|
||||||
execution_config: json!({
|
execution_config: json!({
|
||||||
@@ -76,6 +77,8 @@ async fn create_test_runtime(pool: &PgPool, suffix: &str) -> i64 {
|
|||||||
"file_extension": ".py"
|
"file_extension": ".py"
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: json!({}),
|
||||||
};
|
};
|
||||||
|
|
||||||
let runtime = RuntimeRepository::create(pool, runtime_input)
|
let runtime = RuntimeRepository::create(pool, runtime_input)
|
||||||
@@ -91,7 +94,7 @@ async fn create_test_action(pool: &PgPool, pack_id: i64, suffix: &str) -> i64 {
|
|||||||
pack: pack_id,
|
pack: pack_id,
|
||||||
pack_ref: format!("test_pack_{}", suffix),
|
pack_ref: format!("test_pack_{}", suffix),
|
||||||
label: format!("Test Action {}", suffix),
|
label: format!("Test Action {}", suffix),
|
||||||
description: format!("Test action {}", suffix),
|
description: Some(format!("Test action {}", suffix)),
|
||||||
entrypoint: "echo test".to_string(),
|
entrypoint: "echo test".to_string(),
|
||||||
runtime: None,
|
runtime: None,
|
||||||
runtime_version_constraint: None,
|
runtime_version_constraint: None,
|
||||||
|
|||||||
@@ -14,6 +14,10 @@ path = "src/lib.rs"
|
|||||||
name = "attune-sensor"
|
name = "attune-sensor"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "attune-sensor-agent"
|
||||||
|
path = "src/agent_main.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
attune-common = { path = "../common" }
|
attune-common = { path = "../common" }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
|
|||||||
79
crates/sensor/src/agent_main.rs
Normal file
79
crates/sensor/src/agent_main.rs
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
//! Attune Universal Sensor Agent.
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use attune_common::agent_bootstrap::{bootstrap_runtime_env, print_detect_only_report};
|
||||||
|
use attune_common::config::Config;
|
||||||
|
use attune_sensor::startup::{
|
||||||
|
apply_sensor_name_override, init_tracing, log_config_details, run_sensor_service,
|
||||||
|
set_config_path,
|
||||||
|
};
|
||||||
|
use clap::Parser;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(name = "attune-sensor-agent")]
|
||||||
|
#[command(
|
||||||
|
version,
|
||||||
|
about = "Attune Universal Sensor Agent - Injected into runtime containers to auto-detect sensor runtimes"
|
||||||
|
)]
|
||||||
|
struct Args {
|
||||||
|
/// Path to configuration file (optional)
|
||||||
|
#[arg(short, long)]
|
||||||
|
config: Option<String>,
|
||||||
|
|
||||||
|
/// Sensor worker name override
|
||||||
|
#[arg(short, long)]
|
||||||
|
name: Option<String>,
|
||||||
|
|
||||||
|
/// Run runtime detection, print results, and exit
|
||||||
|
#[arg(long)]
|
||||||
|
detect_only: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
attune_common::auth::install_crypto_provider();
|
||||||
|
init_tracing(tracing::Level::INFO);
|
||||||
|
|
||||||
|
let args = Args::parse();
|
||||||
|
|
||||||
|
info!("Starting Attune Universal Sensor Agent");
|
||||||
|
info!(
|
||||||
|
"Agent binary: attune-sensor-agent {}",
|
||||||
|
env!("CARGO_PKG_VERSION")
|
||||||
|
);
|
||||||
|
|
||||||
|
// Safe: no async runtime or worker threads are running yet.
|
||||||
|
std::env::set_var("ATTUNE_SENSOR_AGENT_MODE", "true");
|
||||||
|
std::env::set_var("ATTUNE_SENSOR_AGENT_BINARY_NAME", "attune-sensor-agent");
|
||||||
|
std::env::set_var(
|
||||||
|
"ATTUNE_SENSOR_AGENT_BINARY_VERSION",
|
||||||
|
env!("CARGO_PKG_VERSION"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let bootstrap = bootstrap_runtime_env("ATTUNE_SENSOR_RUNTIMES");
|
||||||
|
|
||||||
|
if args.detect_only {
|
||||||
|
print_detect_only_report("ATTUNE_SENSOR_RUNTIMES", &bootstrap);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
set_config_path(args.config.as_deref());
|
||||||
|
|
||||||
|
let runtime = tokio::runtime::Runtime::new()?;
|
||||||
|
runtime.block_on(async_main(args))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn async_main(args: Args) -> Result<()> {
|
||||||
|
let mut config = Config::load()?;
|
||||||
|
config.validate()?;
|
||||||
|
|
||||||
|
if let Some(name) = args.name {
|
||||||
|
apply_sensor_name_override(&mut config, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
log_config_details(&config);
|
||||||
|
run_sensor_service(config, "Attune Sensor Agent is ready").await?;
|
||||||
|
info!("Attune Sensor Agent shutdown complete");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -8,6 +8,7 @@ pub mod rule_lifecycle_listener;
|
|||||||
pub mod sensor_manager;
|
pub mod sensor_manager;
|
||||||
pub mod sensor_worker_registration;
|
pub mod sensor_worker_registration;
|
||||||
pub mod service;
|
pub mod service;
|
||||||
|
pub mod startup;
|
||||||
|
|
||||||
// Re-export template resolver from common crate
|
// Re-export template resolver from common crate
|
||||||
pub mod template_resolver {
|
pub mod template_resolver {
|
||||||
|
|||||||
@@ -1,15 +1,14 @@
|
|||||||
//! Attune Sensor Service
|
//! Attune Sensor Service
|
||||||
//!
|
//!
|
||||||
//! The Sensor Service monitors for trigger conditions and generates events.
|
//! The Sensor Service monitors for trigger conditions and generates events.
|
||||||
//! It executes custom sensor code, manages sensor lifecycle, and publishes
|
|
||||||
//! events to the message queue for rule matching and enforcement creation.
|
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use attune_common::config::Config;
|
use attune_common::config::Config;
|
||||||
use attune_sensor::service::SensorService;
|
use attune_sensor::startup::{
|
||||||
|
init_tracing, log_config_details, run_sensor_service, set_config_path,
|
||||||
|
};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use tokio::signal::unix::{signal, SignalKind};
|
use tracing::info;
|
||||||
use tracing::{error, info};
|
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[command(name = "attune-sensor")]
|
#[command(name = "attune-sensor")]
|
||||||
@@ -26,114 +25,23 @@ struct Args {
|
|||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<()> {
|
async fn main() -> Result<()> {
|
||||||
// Install HMAC-only JWT crypto provider (must be before any token operations)
|
|
||||||
attune_common::auth::install_crypto_provider();
|
attune_common::auth::install_crypto_provider();
|
||||||
|
|
||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
|
|
||||||
// Initialize tracing with specified log level
|
|
||||||
let log_level = args.log_level.parse().unwrap_or(tracing::Level::INFO);
|
let log_level = args.log_level.parse().unwrap_or(tracing::Level::INFO);
|
||||||
tracing_subscriber::fmt()
|
init_tracing(log_level);
|
||||||
.with_max_level(log_level)
|
|
||||||
.with_target(false)
|
|
||||||
.with_thread_ids(true)
|
|
||||||
.with_file(true)
|
|
||||||
.with_line_number(true)
|
|
||||||
.init();
|
|
||||||
|
|
||||||
info!("Starting Attune Sensor Service");
|
info!("Starting Attune Sensor Service");
|
||||||
info!("Version: {}", env!("CARGO_PKG_VERSION"));
|
info!("Version: {}", env!("CARGO_PKG_VERSION"));
|
||||||
|
|
||||||
// Load configuration
|
set_config_path(args.config.as_deref());
|
||||||
if let Some(config_path) = args.config {
|
|
||||||
info!("Loading configuration from: {}", config_path);
|
|
||||||
std::env::set_var("ATTUNE_CONFIG", config_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
let config = Config::load()?;
|
let config = Config::load()?;
|
||||||
config.validate()?;
|
config.validate()?;
|
||||||
|
|
||||||
info!("Configuration loaded successfully");
|
log_config_details(&config);
|
||||||
info!("Environment: {}", config.environment);
|
run_sensor_service(config, "Attune Sensor Service is ready").await?;
|
||||||
info!("Database: {}", mask_connection_string(&config.database.url));
|
|
||||||
if let Some(ref mq_config) = config.message_queue {
|
|
||||||
info!("Message Queue: {}", mask_connection_string(&mq_config.url));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create and start sensor service
|
|
||||||
let service = SensorService::new(config).await?;
|
|
||||||
|
|
||||||
info!("Sensor Service initialized successfully");
|
|
||||||
|
|
||||||
// Start the service (spawns background tasks and returns)
|
|
||||||
info!("Starting Sensor Service components...");
|
|
||||||
service.start().await?;
|
|
||||||
|
|
||||||
info!("Attune Sensor Service is ready");
|
|
||||||
|
|
||||||
// Setup signal handlers for graceful shutdown
|
|
||||||
let mut sigint = signal(SignalKind::interrupt())?;
|
|
||||||
let mut sigterm = signal(SignalKind::terminate())?;
|
|
||||||
|
|
||||||
tokio::select! {
|
|
||||||
_ = sigint.recv() => {
|
|
||||||
info!("Received SIGINT signal");
|
|
||||||
}
|
|
||||||
_ = sigterm.recv() => {
|
|
||||||
info!("Received SIGTERM signal");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
info!("Shutting down gracefully...");
|
|
||||||
|
|
||||||
// Stop the service: deregister worker, stop sensors, clean up connections
|
|
||||||
if let Err(e) = service.stop().await {
|
|
||||||
error!("Error during shutdown: {}", e);
|
|
||||||
}
|
|
||||||
|
|
||||||
info!("Attune Sensor Service shutdown complete");
|
info!("Attune Sensor Service shutdown complete");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Mask sensitive parts of connection strings for logging
|
|
||||||
fn mask_connection_string(url: &str) -> String {
|
|
||||||
if let Some(at_pos) = url.find('@') {
|
|
||||||
if let Some(proto_end) = url.find("://") {
|
|
||||||
let protocol = &url[..proto_end + 3];
|
|
||||||
let host_and_path = &url[at_pos..];
|
|
||||||
return format!("{}***:***{}", protocol, host_and_path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"***:***@***".to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_mask_connection_string() {
|
|
||||||
let url = "postgresql://user:password@localhost:5432/attune";
|
|
||||||
let masked = mask_connection_string(url);
|
|
||||||
assert!(!masked.contains("user"));
|
|
||||||
assert!(!masked.contains("password"));
|
|
||||||
assert!(masked.contains("@localhost"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_mask_connection_string_no_credentials() {
|
|
||||||
let url = "postgresql://localhost:5432/attune";
|
|
||||||
let masked = mask_connection_string(url);
|
|
||||||
assert_eq!(masked, "***:***@***");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_mask_rabbitmq_connection() {
|
|
||||||
let url = "amqp://admin:secret@rabbitmq:5672/%2F";
|
|
||||||
let masked = mask_connection_string(url);
|
|
||||||
assert!(!masked.contains("admin"));
|
|
||||||
assert!(!masked.contains("secret"));
|
|
||||||
assert!(masked.contains("@rabbitmq"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
//! - Monitoring sensor health and restarting failed sensors
|
//! - Monitoring sensor health and restarting failed sensors
|
||||||
|
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use attune_common::models::{Id, Sensor, Trigger};
|
use attune_common::models::{runtime::RuntimeExecutionConfig, Id, Sensor, Trigger};
|
||||||
use attune_common::repositories::{FindById, List, RuntimeRepository};
|
use attune_common::repositories::{FindById, List, RuntimeRepository};
|
||||||
|
|
||||||
use sqlx::{PgPool, Row};
|
use sqlx::{PgPool, Row};
|
||||||
@@ -27,6 +27,37 @@ use tracing::{debug, error, info, warn};
|
|||||||
|
|
||||||
use crate::api_client::ApiClient;
|
use crate::api_client::ApiClient;
|
||||||
|
|
||||||
|
fn existing_command_env(cmd: &Command, key: &str) -> Option<String> {
|
||||||
|
cmd.as_std()
|
||||||
|
.get_envs()
|
||||||
|
.find_map(|(env_key, value)| {
|
||||||
|
if env_key == key {
|
||||||
|
value.map(|value| value.to_string_lossy().into_owned())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.or_else(|| std::env::var(key).ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn apply_runtime_env_vars(
|
||||||
|
cmd: &mut Command,
|
||||||
|
exec_config: &RuntimeExecutionConfig,
|
||||||
|
pack_dir: &std::path::Path,
|
||||||
|
env_dir: Option<&std::path::Path>,
|
||||||
|
) {
|
||||||
|
if exec_config.env_vars.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let vars = exec_config.build_template_vars_with_env(pack_dir, env_dir);
|
||||||
|
for (key, env_var_config) in &exec_config.env_vars {
|
||||||
|
let resolved = env_var_config.resolve(&vars, existing_command_env(cmd, key).as_deref());
|
||||||
|
debug!("Setting sensor runtime env var: {}={}", key, resolved);
|
||||||
|
cmd.env(key, resolved);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Sensor manager that coordinates all sensor instances
|
/// Sensor manager that coordinates all sensor instances
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SensorManager {
|
pub struct SensorManager {
|
||||||
@@ -162,6 +193,127 @@ impl SensorManager {
|
|||||||
Ok(enabled_sensors)
|
Ok(enabled_sensors)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn ensure_runtime_environment(
|
||||||
|
&self,
|
||||||
|
exec_config: &RuntimeExecutionConfig,
|
||||||
|
pack_dir: &std::path::Path,
|
||||||
|
env_dir: &std::path::Path,
|
||||||
|
) -> Result<()> {
|
||||||
|
let env_cfg = match &exec_config.environment {
|
||||||
|
Some(cfg) if cfg.env_type != "none" => cfg,
|
||||||
|
_ => return Ok(()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let vars = exec_config.build_template_vars_with_env(pack_dir, Some(env_dir));
|
||||||
|
|
||||||
|
if !env_dir.exists() {
|
||||||
|
if env_cfg.create_command.is_empty() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Runtime environment '{}' requires create_command but none is configured",
|
||||||
|
env_cfg.env_type
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(parent) = env_dir.parent() {
|
||||||
|
tokio::fs::create_dir_all(parent).await.map_err(|e| {
|
||||||
|
anyhow!(
|
||||||
|
"Failed to create runtime environment parent directory {}: {}",
|
||||||
|
parent.display(),
|
||||||
|
e
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let resolved_cmd =
|
||||||
|
RuntimeExecutionConfig::resolve_command(&env_cfg.create_command, &vars);
|
||||||
|
let (program, args) = resolved_cmd
|
||||||
|
.split_first()
|
||||||
|
.ok_or_else(|| anyhow!("Empty create_command for runtime environment"))?;
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Creating sensor runtime environment at {}: {:?}",
|
||||||
|
env_dir.display(),
|
||||||
|
resolved_cmd
|
||||||
|
);
|
||||||
|
|
||||||
|
let output = Command::new(program)
|
||||||
|
.args(args)
|
||||||
|
.current_dir(pack_dir)
|
||||||
|
.output()
|
||||||
|
.await
|
||||||
|
.map_err(|e| anyhow!("Failed to run create command '{}': {}", program, e))?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Runtime environment creation failed (exit {}): {}",
|
||||||
|
output.status.code().unwrap_or(-1),
|
||||||
|
stderr.trim()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let dep_cfg = match &exec_config.dependencies {
|
||||||
|
Some(cfg) => cfg,
|
||||||
|
None => return Ok(()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let manifest_path = pack_dir.join(&dep_cfg.manifest_file);
|
||||||
|
if !manifest_path.exists() || dep_cfg.install_command.is_empty() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let install_marker = env_dir.join(".attune_sensor_deps_installed");
|
||||||
|
if install_marker.exists() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let resolved_cmd = RuntimeExecutionConfig::resolve_command(&dep_cfg.install_command, &vars);
|
||||||
|
let (program, args) = resolved_cmd
|
||||||
|
.split_first()
|
||||||
|
.ok_or_else(|| anyhow!("Empty install_command for runtime dependencies"))?;
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Installing sensor runtime dependencies for {} using {:?}",
|
||||||
|
pack_dir.display(),
|
||||||
|
resolved_cmd
|
||||||
|
);
|
||||||
|
|
||||||
|
let output = Command::new(program)
|
||||||
|
.args(args)
|
||||||
|
.current_dir(pack_dir)
|
||||||
|
.output()
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
anyhow!(
|
||||||
|
"Failed to run dependency install command '{}': {}",
|
||||||
|
program,
|
||||||
|
e
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Runtime dependency installation failed (exit {}): {}",
|
||||||
|
output.status.code().unwrap_or(-1),
|
||||||
|
stderr.trim()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
tokio::fs::write(&install_marker, b"ok")
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
anyhow!(
|
||||||
|
"Failed to write dependency install marker {}: {}",
|
||||||
|
install_marker.display(),
|
||||||
|
e
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Start a sensor instance
|
/// Start a sensor instance
|
||||||
async fn start_sensor(&self, sensor: Sensor) -> Result<()> {
|
async fn start_sensor(&self, sensor: Sensor) -> Result<()> {
|
||||||
info!("Starting sensor {} ({})", sensor.r#ref, sensor.id);
|
info!("Starting sensor {} ({})", sensor.r#ref, sensor.id);
|
||||||
@@ -231,6 +383,12 @@ impl SensorManager {
|
|||||||
|
|
||||||
let exec_config = runtime.parsed_execution_config();
|
let exec_config = runtime.parsed_execution_config();
|
||||||
let rt_name = runtime.name.to_lowercase();
|
let rt_name = runtime.name.to_lowercase();
|
||||||
|
let runtime_env_suffix = runtime
|
||||||
|
.r#ref
|
||||||
|
.rsplit('.')
|
||||||
|
.next()
|
||||||
|
.filter(|suffix| !suffix.is_empty())
|
||||||
|
.unwrap_or(&rt_name);
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
"Sensor {} runtime details: id={}, ref='{}', name='{}', execution_config={}",
|
"Sensor {} runtime details: id={}, ref='{}', name='{}', execution_config={}",
|
||||||
@@ -242,7 +400,19 @@ impl SensorManager {
|
|||||||
let pack_dir = std::path::PathBuf::from(&self.inner.packs_base_dir).join(pack_ref);
|
let pack_dir = std::path::PathBuf::from(&self.inner.packs_base_dir).join(pack_ref);
|
||||||
let env_dir = std::path::PathBuf::from(&self.inner.runtime_envs_dir)
|
let env_dir = std::path::PathBuf::from(&self.inner.runtime_envs_dir)
|
||||||
.join(pack_ref)
|
.join(pack_ref)
|
||||||
.join(&rt_name);
|
.join(runtime_env_suffix);
|
||||||
|
if let Err(e) = self
|
||||||
|
.ensure_runtime_environment(&exec_config, &pack_dir, &env_dir)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
warn!(
|
||||||
|
"Failed to ensure sensor runtime environment for {} at {}: {}",
|
||||||
|
sensor.r#ref,
|
||||||
|
env_dir.display(),
|
||||||
|
e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
let env_dir_opt = if env_dir.exists() {
|
let env_dir_opt = if env_dir.exists() {
|
||||||
Some(env_dir.as_path())
|
Some(env_dir.as_path())
|
||||||
} else {
|
} else {
|
||||||
@@ -354,15 +524,18 @@ impl SensorManager {
|
|||||||
|
|
||||||
// Start the standalone sensor with token and configuration
|
// Start the standalone sensor with token and configuration
|
||||||
// Pass sensor ref (e.g., "core.interval_timer_sensor") for proper identification
|
// Pass sensor ref (e.g., "core.interval_timer_sensor") for proper identification
|
||||||
let mut child = cmd
|
cmd.env("ATTUNE_API_URL", &self.inner.api_url)
|
||||||
.env("ATTUNE_API_URL", &self.inner.api_url)
|
|
||||||
.env("ATTUNE_API_TOKEN", &token_response.token)
|
.env("ATTUNE_API_TOKEN", &token_response.token)
|
||||||
.env("ATTUNE_SENSOR_ID", sensor.id.to_string())
|
.env("ATTUNE_SENSOR_ID", sensor.id.to_string())
|
||||||
.env("ATTUNE_SENSOR_REF", &sensor.r#ref)
|
.env("ATTUNE_SENSOR_REF", &sensor.r#ref)
|
||||||
.env("ATTUNE_SENSOR_TRIGGERS", &trigger_instances_json)
|
.env("ATTUNE_SENSOR_TRIGGERS", &trigger_instances_json)
|
||||||
.env("ATTUNE_MQ_URL", &self.inner.mq_url)
|
.env("ATTUNE_MQ_URL", &self.inner.mq_url)
|
||||||
.env("ATTUNE_MQ_EXCHANGE", "attune.events")
|
.env("ATTUNE_MQ_EXCHANGE", "attune.events")
|
||||||
.env("ATTUNE_LOG_LEVEL", "info")
|
.env("ATTUNE_LOG_LEVEL", "info");
|
||||||
|
|
||||||
|
apply_runtime_env_vars(&mut cmd, &exec_config, &pack_dir, env_dir_opt);
|
||||||
|
|
||||||
|
let mut child = cmd
|
||||||
.stdin(Stdio::null())
|
.stdin(Stdio::null())
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.stderr(Stdio::piped())
|
.stderr(Stdio::piped())
|
||||||
@@ -371,13 +544,14 @@ impl SensorManager {
|
|||||||
anyhow!(
|
anyhow!(
|
||||||
"Failed to start sensor process for '{}': {} \
|
"Failed to start sensor process for '{}': {} \
|
||||||
(binary='{}', is_native={}, runtime_ref='{}', \
|
(binary='{}', is_native={}, runtime_ref='{}', \
|
||||||
interpreter_config='{}')",
|
interpreter_config='{}', env_dir='{}')",
|
||||||
sensor.r#ref,
|
sensor.r#ref,
|
||||||
e,
|
e,
|
||||||
spawn_binary,
|
spawn_binary,
|
||||||
is_native,
|
is_native,
|
||||||
runtime.r#ref,
|
runtime.r#ref,
|
||||||
interpreter_binary
|
interpreter_binary,
|
||||||
|
env_dir.display()
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
@@ -748,6 +922,10 @@ pub struct SensorStatus {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use attune_common::models::runtime::{
|
||||||
|
RuntimeEnvVarConfig, RuntimeEnvVarOperation, RuntimeEnvVarSpec,
|
||||||
|
};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_sensor_status_default() {
|
fn test_sensor_status_default() {
|
||||||
@@ -757,4 +935,46 @@ mod tests {
|
|||||||
assert_eq!(status.failure_count, 0);
|
assert_eq!(status.failure_count, 0);
|
||||||
assert!(status.last_poll.is_none());
|
assert!(status.last_poll.is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_apply_runtime_env_vars_prepends_to_existing_command_env() {
|
||||||
|
let mut env_vars = HashMap::new();
|
||||||
|
env_vars.insert(
|
||||||
|
"PYTHONPATH".to_string(),
|
||||||
|
RuntimeEnvVarConfig::Spec(RuntimeEnvVarSpec {
|
||||||
|
value: "{pack_dir}/lib".to_string(),
|
||||||
|
operation: RuntimeEnvVarOperation::Prepend,
|
||||||
|
separator: ":".to_string(),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
let exec_config = RuntimeExecutionConfig {
|
||||||
|
env_vars,
|
||||||
|
..RuntimeExecutionConfig::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut cmd = Command::new("python3");
|
||||||
|
cmd.env("PYTHONPATH", "/existing/pythonpath");
|
||||||
|
|
||||||
|
apply_runtime_env_vars(
|
||||||
|
&mut cmd,
|
||||||
|
&exec_config,
|
||||||
|
std::path::Path::new("/packs/testpack"),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let resolved = cmd
|
||||||
|
.as_std()
|
||||||
|
.get_envs()
|
||||||
|
.find_map(|(key, value)| {
|
||||||
|
if key == "PYTHONPATH" {
|
||||||
|
value.map(|value| value.to_string_lossy().into_owned())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.expect("PYTHONPATH should be set");
|
||||||
|
|
||||||
|
assert_eq!(resolved, "/packs/testpack/lib:/existing/pythonpath");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,6 +15,10 @@ use sqlx::{PgPool, Row};
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use tracing::{debug, info};
|
use tracing::{debug, info};
|
||||||
|
|
||||||
|
const ATTUNE_SENSOR_AGENT_MODE_ENV: &str = "ATTUNE_SENSOR_AGENT_MODE";
|
||||||
|
const ATTUNE_SENSOR_AGENT_BINARY_NAME_ENV: &str = "ATTUNE_SENSOR_AGENT_BINARY_NAME";
|
||||||
|
const ATTUNE_SENSOR_AGENT_BINARY_VERSION_ENV: &str = "ATTUNE_SENSOR_AGENT_BINARY_VERSION";
|
||||||
|
|
||||||
/// Sensor worker registration manager
|
/// Sensor worker registration manager
|
||||||
pub struct SensorWorkerRegistration {
|
pub struct SensorWorkerRegistration {
|
||||||
pool: PgPool,
|
pool: PgPool,
|
||||||
@@ -25,6 +29,33 @@ pub struct SensorWorkerRegistration {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SensorWorkerRegistration {
|
impl SensorWorkerRegistration {
|
||||||
|
fn env_truthy(name: &str) -> bool {
|
||||||
|
std::env::var(name)
|
||||||
|
.ok()
|
||||||
|
.map(|value| matches!(value.trim().to_ascii_lowercase().as_str(), "1" | "true"))
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inject_agent_capabilities(capabilities: &mut HashMap<String, serde_json::Value>) {
|
||||||
|
if Self::env_truthy(ATTUNE_SENSOR_AGENT_MODE_ENV) {
|
||||||
|
capabilities.insert("agent_mode".to_string(), json!(true));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(binary_name) = std::env::var(ATTUNE_SENSOR_AGENT_BINARY_NAME_ENV) {
|
||||||
|
let binary_name = binary_name.trim();
|
||||||
|
if !binary_name.is_empty() {
|
||||||
|
capabilities.insert("agent_binary_name".to_string(), json!(binary_name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(binary_version) = std::env::var(ATTUNE_SENSOR_AGENT_BINARY_VERSION_ENV) {
|
||||||
|
let binary_version = binary_version.trim();
|
||||||
|
if !binary_version.is_empty() {
|
||||||
|
capabilities.insert("agent_binary_version".to_string(), json!(binary_version));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Create a new sensor worker registration manager
|
/// Create a new sensor worker registration manager
|
||||||
pub fn new(pool: PgPool, config: &Config) -> Self {
|
pub fn new(pool: PgPool, config: &Config) -> Self {
|
||||||
let worker_name = config
|
let worker_name = config
|
||||||
@@ -67,6 +98,8 @@ impl SensorWorkerRegistration {
|
|||||||
json!(env!("CARGO_PKG_VERSION")),
|
json!(env!("CARGO_PKG_VERSION")),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
Self::inject_agent_capabilities(&mut capabilities);
|
||||||
|
|
||||||
// Placeholder for runtimes (will be detected asynchronously)
|
// Placeholder for runtimes (will be detected asynchronously)
|
||||||
capabilities.insert("runtimes".to_string(), json!(Vec::<String>::new()));
|
capabilities.insert("runtimes".to_string(), json!(Vec::<String>::new()));
|
||||||
|
|
||||||
@@ -351,4 +384,28 @@ mod tests {
|
|||||||
|
|
||||||
registration.deregister().await.unwrap();
|
registration.deregister().await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_inject_agent_capabilities_from_env() {
|
||||||
|
std::env::set_var(ATTUNE_SENSOR_AGENT_MODE_ENV, "1");
|
||||||
|
std::env::set_var(ATTUNE_SENSOR_AGENT_BINARY_NAME_ENV, "attune-sensor-agent");
|
||||||
|
std::env::set_var(ATTUNE_SENSOR_AGENT_BINARY_VERSION_ENV, "1.2.3");
|
||||||
|
|
||||||
|
let mut capabilities = HashMap::new();
|
||||||
|
SensorWorkerRegistration::inject_agent_capabilities(&mut capabilities);
|
||||||
|
|
||||||
|
assert_eq!(capabilities.get("agent_mode"), Some(&json!(true)));
|
||||||
|
assert_eq!(
|
||||||
|
capabilities.get("agent_binary_name"),
|
||||||
|
Some(&json!("attune-sensor-agent"))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
capabilities.get("agent_binary_version"),
|
||||||
|
Some(&json!("1.2.3"))
|
||||||
|
);
|
||||||
|
|
||||||
|
std::env::remove_var(ATTUNE_SENSOR_AGENT_MODE_ENV);
|
||||||
|
std::env::remove_var(ATTUNE_SENSOR_AGENT_BINARY_NAME_ENV);
|
||||||
|
std::env::remove_var(ATTUNE_SENSOR_AGENT_BINARY_VERSION_ENV);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
119
crates/sensor/src/startup.rs
Normal file
119
crates/sensor/src/startup.rs
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
use crate::service::SensorService;
|
||||||
|
use anyhow::Result;
|
||||||
|
use attune_common::config::{Config, SensorConfig};
|
||||||
|
use tokio::signal::unix::{signal, SignalKind};
|
||||||
|
use tracing::{error, info};
|
||||||
|
|
||||||
|
pub fn init_tracing(log_level: tracing::Level) {
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(log_level)
|
||||||
|
.with_target(false)
|
||||||
|
.with_thread_ids(true)
|
||||||
|
.with_file(true)
|
||||||
|
.with_line_number(true)
|
||||||
|
.init();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_config_path(config_path: Option<&str>) {
|
||||||
|
if let Some(config_path) = config_path {
|
||||||
|
info!("Loading configuration from: {}", config_path);
|
||||||
|
std::env::set_var("ATTUNE_CONFIG", config_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn apply_sensor_name_override(config: &mut Config, name: String) {
|
||||||
|
if let Some(ref mut sensor_config) = config.sensor {
|
||||||
|
sensor_config.worker_name = Some(name);
|
||||||
|
} else {
|
||||||
|
config.sensor = Some(SensorConfig {
|
||||||
|
worker_name: Some(name),
|
||||||
|
host: None,
|
||||||
|
capabilities: None,
|
||||||
|
max_concurrent_sensors: None,
|
||||||
|
heartbeat_interval: 30,
|
||||||
|
poll_interval: 30,
|
||||||
|
sensor_timeout: 30,
|
||||||
|
shutdown_timeout: 30,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn log_config_details(config: &Config) {
|
||||||
|
info!("Configuration loaded successfully");
|
||||||
|
info!("Environment: {}", config.environment);
|
||||||
|
info!("Database: {}", mask_connection_string(&config.database.url));
|
||||||
|
if let Some(ref mq_config) = config.message_queue {
|
||||||
|
info!("Message Queue: {}", mask_connection_string(&mq_config.url));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run_sensor_service(config: Config, ready_message: &str) -> Result<()> {
|
||||||
|
let service = SensorService::new(config).await?;
|
||||||
|
|
||||||
|
info!("Sensor Service initialized successfully");
|
||||||
|
info!("Starting Sensor Service components...");
|
||||||
|
service.start().await?;
|
||||||
|
info!("{}", ready_message);
|
||||||
|
|
||||||
|
let mut sigint = signal(SignalKind::interrupt())?;
|
||||||
|
let mut sigterm = signal(SignalKind::terminate())?;
|
||||||
|
|
||||||
|
tokio::select! {
|
||||||
|
_ = sigint.recv() => {
|
||||||
|
info!("Received SIGINT signal");
|
||||||
|
}
|
||||||
|
_ = sigterm.recv() => {
|
||||||
|
info!("Received SIGTERM signal");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Shutting down gracefully...");
|
||||||
|
|
||||||
|
if let Err(e) = service.stop().await {
|
||||||
|
error!("Error during shutdown: {}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mask sensitive parts of connection strings for logging.
|
||||||
|
pub fn mask_connection_string(url: &str) -> String {
|
||||||
|
if let Some(at_pos) = url.find('@') {
|
||||||
|
if let Some(proto_end) = url.find("://") {
|
||||||
|
let protocol = &url[..proto_end + 3];
|
||||||
|
let host_and_path = &url[at_pos..];
|
||||||
|
return format!("{}***:***{}", protocol, host_and_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"***:***@***".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mask_connection_string() {
|
||||||
|
let url = "postgresql://user:password@localhost:5432/attune";
|
||||||
|
let masked = mask_connection_string(url);
|
||||||
|
assert!(!masked.contains("user"));
|
||||||
|
assert!(!masked.contains("password"));
|
||||||
|
assert!(masked.contains("@localhost"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mask_connection_string_no_credentials() {
|
||||||
|
let url = "postgresql://localhost:5432/attune";
|
||||||
|
let masked = mask_connection_string(url);
|
||||||
|
assert_eq!(masked, "***:***@***");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mask_rabbitmq_connection() {
|
||||||
|
let url = "amqp://admin:secret@rabbitmq:5672/%2F";
|
||||||
|
let masked = mask_connection_string(url);
|
||||||
|
assert!(!masked.contains("admin"));
|
||||||
|
assert!(!masked.contains("secret"));
|
||||||
|
assert!(masked.contains("@rabbitmq"));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -10,6 +10,10 @@ repository.workspace = true
|
|||||||
name = "attune-worker"
|
name = "attune-worker"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "attune-agent"
|
||||||
|
path = "src/agent_main.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
attune-common = { path = "../common" }
|
attune-common = { path = "../common" }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
|
|||||||
220
crates/worker/src/agent_main.rs
Normal file
220
crates/worker/src/agent_main.rs
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
//! Attune Universal Worker Agent
|
||||||
|
//!
|
||||||
|
//! This is the entrypoint for the universal worker agent binary (`attune-agent`).
|
||||||
|
//! Unlike the standard `attune-worker` binary which requires explicit runtime
|
||||||
|
//! configuration, the agent automatically detects available interpreters in the
|
||||||
|
//! container environment and configures itself accordingly.
|
||||||
|
//!
|
||||||
|
//! ## Usage
|
||||||
|
//!
|
||||||
|
//! The agent is designed to be injected into any container image. On startup it:
|
||||||
|
//!
|
||||||
|
//! 1. Probes the system for available interpreters (python3, node, bash, etc.)
|
||||||
|
//! 2. Sets `ATTUNE_WORKER_RUNTIMES` based on what it finds
|
||||||
|
//! 3. Loads configuration (env vars are the primary config source)
|
||||||
|
//! 4. Initializes and runs the standard `WorkerService`
|
||||||
|
//!
|
||||||
|
//! ## Configuration
|
||||||
|
//!
|
||||||
|
//! Environment variables (primary):
|
||||||
|
//! - `ATTUNE__DATABASE__URL` — PostgreSQL connection string
|
||||||
|
//! - `ATTUNE__MESSAGE_QUEUE__URL` — RabbitMQ connection string
|
||||||
|
//! - `ATTUNE_WORKER_RUNTIMES` — Override auto-detection with explicit runtime list
|
||||||
|
//! - `ATTUNE_CONFIG` — Path to optional config YAML file
|
||||||
|
//!
|
||||||
|
//! CLI arguments:
|
||||||
|
//! - `--config` / `-c` — Path to configuration file (optional)
|
||||||
|
//! - `--name` / `-n` — Worker name override
|
||||||
|
//! - `--detect-only` — Run runtime detection, print results, and exit
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use attune_common::agent_bootstrap::{bootstrap_runtime_env, print_detect_only_report};
|
||||||
|
use attune_common::config::Config;
|
||||||
|
use clap::Parser;
|
||||||
|
use tokio::signal::unix::{signal, SignalKind};
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
use attune_worker::dynamic_runtime::auto_register_detected_runtimes;
|
||||||
|
use attune_worker::runtime_detect::DetectedRuntime;
|
||||||
|
use attune_worker::service::WorkerService;
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(name = "attune-agent")]
|
||||||
|
#[command(
|
||||||
|
version,
|
||||||
|
about = "Attune Universal Worker Agent - Injected into any container to auto-detect and execute actions",
|
||||||
|
long_about = "The Attune Agent automatically discovers available runtime interpreters \
|
||||||
|
in the current environment and registers as a worker capable of executing \
|
||||||
|
actions for those runtimes. It is designed to be injected into arbitrary \
|
||||||
|
container images without requiring manual runtime configuration."
|
||||||
|
)]
|
||||||
|
struct Args {
|
||||||
|
/// Path to configuration file (optional — env vars are the primary config source)
|
||||||
|
#[arg(short, long)]
|
||||||
|
config: Option<String>,
|
||||||
|
|
||||||
|
/// Worker name (overrides config and auto-generated name)
|
||||||
|
#[arg(short, long)]
|
||||||
|
name: Option<String>,
|
||||||
|
|
||||||
|
/// Run runtime detection, print results, and exit without starting the worker
|
||||||
|
#[arg(long)]
|
||||||
|
detect_only: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
// Install HMAC-only JWT crypto provider (must be before any token operations)
|
||||||
|
attune_common::auth::install_crypto_provider();
|
||||||
|
|
||||||
|
// Initialize tracing
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_target(false)
|
||||||
|
.with_thread_ids(true)
|
||||||
|
.init();
|
||||||
|
|
||||||
|
let args = Args::parse();
|
||||||
|
|
||||||
|
info!("Starting Attune Universal Worker Agent");
|
||||||
|
info!("Agent binary: attune-agent {}", env!("CARGO_PKG_VERSION"));
|
||||||
|
|
||||||
|
// Safe: no async runtime or worker threads are running yet.
|
||||||
|
std::env::set_var("ATTUNE_AGENT_MODE", "true");
|
||||||
|
std::env::set_var("ATTUNE_AGENT_BINARY_NAME", "attune-agent");
|
||||||
|
std::env::set_var("ATTUNE_AGENT_BINARY_VERSION", env!("CARGO_PKG_VERSION"));
|
||||||
|
|
||||||
|
let bootstrap = bootstrap_runtime_env("ATTUNE_WORKER_RUNTIMES");
|
||||||
|
let agent_detected_runtimes = bootstrap.detected_runtimes.clone();
|
||||||
|
|
||||||
|
// --- Handle --detect-only (synchronous, no async runtime needed) ---
|
||||||
|
if args.detect_only {
|
||||||
|
print_detect_only_report("ATTUNE_WORKER_RUNTIMES", &bootstrap);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Set config path env var (synchronous, before tokio runtime) ---
|
||||||
|
if let Some(ref config_path) = args.config {
|
||||||
|
// Safe: no other threads are running yet (tokio runtime not started).
|
||||||
|
std::env::set_var("ATTUNE_CONFIG", config_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Build the tokio runtime and run the async portion ---
|
||||||
|
let runtime = tokio::runtime::Runtime::new()?;
|
||||||
|
runtime.block_on(async_main(args, agent_detected_runtimes))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The async portion of the agent entrypoint. Called from `main()` via
|
||||||
|
/// `runtime.block_on()` after all environment variable mutations are complete.
|
||||||
|
async fn async_main(
|
||||||
|
args: Args,
|
||||||
|
agent_detected_runtimes: Option<Vec<DetectedRuntime>>,
|
||||||
|
) -> Result<()> {
|
||||||
|
// --- Phase 2: Load configuration ---
|
||||||
|
let mut config = Config::load()?;
|
||||||
|
config.validate()?;
|
||||||
|
|
||||||
|
// Override worker name if provided via CLI
|
||||||
|
if let Some(name) = args.name {
|
||||||
|
if let Some(ref mut worker_config) = config.worker {
|
||||||
|
worker_config.name = Some(name);
|
||||||
|
} else {
|
||||||
|
config.worker = Some(attune_common::config::WorkerConfig {
|
||||||
|
name: Some(name),
|
||||||
|
worker_type: None,
|
||||||
|
runtime_id: None,
|
||||||
|
host: None,
|
||||||
|
port: None,
|
||||||
|
capabilities: None,
|
||||||
|
max_concurrent_tasks: 10,
|
||||||
|
heartbeat_interval: 30,
|
||||||
|
task_timeout: 300,
|
||||||
|
max_stdout_bytes: 10 * 1024 * 1024,
|
||||||
|
max_stderr_bytes: 10 * 1024 * 1024,
|
||||||
|
shutdown_timeout: Some(30),
|
||||||
|
stream_logs: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Configuration loaded successfully");
|
||||||
|
info!("Environment: {}", config.environment);
|
||||||
|
|
||||||
|
// --- Phase 2b: Dynamic runtime registration ---
|
||||||
|
//
|
||||||
|
// Before creating the WorkerService (which loads runtimes from the DB into
|
||||||
|
// its runtime registry), ensure that every detected runtime has a
|
||||||
|
// corresponding entry in the database. This handles the case where the
|
||||||
|
// agent detects a runtime (e.g., Ruby) that has a template in the core
|
||||||
|
// pack but hasn't been explicitly loaded by this agent before.
|
||||||
|
if let Some(ref detected) = agent_detected_runtimes {
|
||||||
|
info!(
|
||||||
|
"Ensuring {} detected runtime(s) are registered in the database...",
|
||||||
|
detected.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
// We need a temporary DB connection for dynamic registration.
|
||||||
|
// WorkerService::new() will create its own connection, so this is
|
||||||
|
// a short-lived pool just for the registration step.
|
||||||
|
let db = attune_common::db::Database::new(&config.database).await?;
|
||||||
|
let pool = db.pool().clone();
|
||||||
|
|
||||||
|
match auto_register_detected_runtimes(&pool, detected).await {
|
||||||
|
Ok(count) => {
|
||||||
|
if count > 0 {
|
||||||
|
info!(
|
||||||
|
"Dynamic registration complete: {} new runtime(s) added to database",
|
||||||
|
count
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
info!("Dynamic registration: all detected runtimes already in database");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!(
|
||||||
|
"Dynamic runtime registration failed (non-fatal, continuing): {}",
|
||||||
|
e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Phase 3: Initialize and run the worker service ---
|
||||||
|
let service = WorkerService::new(config).await?;
|
||||||
|
|
||||||
|
// If we auto-detected runtimes, pass them to the worker service so that
|
||||||
|
// registration includes the full `detected_interpreters` capability
|
||||||
|
// (binary paths + versions) and the `agent_mode` flag.
|
||||||
|
let mut service = if let Some(detected) = agent_detected_runtimes {
|
||||||
|
info!(
|
||||||
|
"Passing {} detected runtime(s) to worker registration",
|
||||||
|
detected.len()
|
||||||
|
);
|
||||||
|
service.with_detected_runtimes(detected)
|
||||||
|
} else {
|
||||||
|
service
|
||||||
|
};
|
||||||
|
|
||||||
|
info!("Attune Agent is ready");
|
||||||
|
|
||||||
|
service.start().await?;
|
||||||
|
|
||||||
|
// Setup signal handlers for graceful shutdown
|
||||||
|
let mut sigint = signal(SignalKind::interrupt())?;
|
||||||
|
let mut sigterm = signal(SignalKind::terminate())?;
|
||||||
|
|
||||||
|
tokio::select! {
|
||||||
|
_ = sigint.recv() => {
|
||||||
|
info!("Received SIGINT signal");
|
||||||
|
}
|
||||||
|
_ = sigterm.recv() => {
|
||||||
|
info!("Received SIGTERM signal");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Shutting down gracefully...");
|
||||||
|
|
||||||
|
service.stop().await?;
|
||||||
|
|
||||||
|
info!("Attune Agent shutdown complete");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
541
crates/worker/src/dynamic_runtime.rs
Normal file
541
crates/worker/src/dynamic_runtime.rs
Normal file
@@ -0,0 +1,541 @@
|
|||||||
|
//! Dynamic Runtime Registration Module
|
||||||
|
//!
|
||||||
|
//! When the agent detects an interpreter on the local system (e.g., Ruby, Go, Perl)
|
||||||
|
//! that does not yet have a corresponding runtime entry in the database, this module
|
||||||
|
//! handles auto-registering it so that the normal runtime-loading pipeline in
|
||||||
|
//! `WorkerService::new()` picks it up.
|
||||||
|
//!
|
||||||
|
//! ## Registration Strategy
|
||||||
|
//!
|
||||||
|
//! For each detected runtime the agent found:
|
||||||
|
//!
|
||||||
|
//! 1. **Look up by name** in the database using alias-aware matching.
|
||||||
|
//! 2. **If found** → already registered (either from a pack YAML or a previous
|
||||||
|
//! agent run). Nothing to do.
|
||||||
|
//! 3. **If not found** → search for a runtime *template* in loaded packs whose
|
||||||
|
//! normalized name matches. Templates are pack-registered runtimes (e.g.,
|
||||||
|
//! `core.ruby`) that provide the full `execution_config` needed to invoke
|
||||||
|
//! the interpreter, manage environments, and install dependencies.
|
||||||
|
//! 4. **If a template is found** → clone it as an auto-detected runtime with
|
||||||
|
//! `auto_detected = true` and populate `detection_config` with what the
|
||||||
|
//! agent discovered (binary path, version, etc.).
|
||||||
|
//! 5. **If no template exists** → create a minimal runtime with just the
|
||||||
|
//! detected interpreter binary path and file extension. This lets the agent
|
||||||
|
//! execute simple scripts immediately, even without a full template.
|
||||||
|
//! 6. Mark all auto-registered runtimes with `auto_detected = true`.
|
||||||
|
|
||||||
|
use attune_common::error::Result;
|
||||||
|
use attune_common::models::runtime::Runtime;
|
||||||
|
use attune_common::repositories::runtime::{CreateRuntimeInput, RuntimeRepository};
|
||||||
|
use attune_common::repositories::{Create, FindByRef, List};
|
||||||
|
|
||||||
|
use serde_json::json;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use tracing::{debug, info, warn};
|
||||||
|
|
||||||
|
use crate::runtime_detect::DetectedRuntime;
|
||||||
|
|
||||||
|
/// Canonical file extensions for runtimes that the auto-detection module knows
|
||||||
|
/// about. Used when creating minimal runtime entries without a template.
|
||||||
|
fn default_file_extension(runtime_name: &str) -> Option<&'static str> {
|
||||||
|
match runtime_name {
|
||||||
|
"shell" => Some(".sh"),
|
||||||
|
"python" => Some(".py"),
|
||||||
|
"node" => Some(".js"),
|
||||||
|
"ruby" => Some(".rb"),
|
||||||
|
"go" => Some(".go"),
|
||||||
|
"java" => Some(".java"),
|
||||||
|
"perl" => Some(".pl"),
|
||||||
|
"r" => Some(".R"),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ensure that every detected runtime has a corresponding entry in the
|
||||||
|
/// `runtime` table. Runtimes that already exist (from pack loading or a
|
||||||
|
/// previous agent run) are left untouched. Missing runtimes are created
|
||||||
|
/// either from a matching pack template or as a minimal auto-detected entry.
|
||||||
|
///
|
||||||
|
/// This function should be called **before** `WorkerService::new()` so that
|
||||||
|
/// the normal runtime-loading pipeline finds all detected runtimes in the DB.
|
||||||
|
///
|
||||||
|
/// Returns the number of runtimes that were newly registered.
|
||||||
|
pub async fn auto_register_detected_runtimes(
|
||||||
|
pool: &PgPool,
|
||||||
|
detected: &[DetectedRuntime],
|
||||||
|
) -> Result<usize> {
|
||||||
|
if detected.is_empty() {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Checking {} detected runtime(s) for dynamic registration...",
|
||||||
|
detected.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
// Load all existing runtimes once to avoid repeated queries.
|
||||||
|
let existing_runtimes = RuntimeRepository::list(pool).await.unwrap_or_default();
|
||||||
|
|
||||||
|
let mut registered_count = 0;
|
||||||
|
|
||||||
|
for detected_rt in detected {
|
||||||
|
let canonical_name = detected_rt.name.to_ascii_lowercase();
|
||||||
|
|
||||||
|
// Check if a runtime with a matching name already exists in the DB.
|
||||||
|
// Primary: check if the detected name appears in any existing runtime's aliases.
|
||||||
|
// Secondary: check if the ref ends with the canonical name (e.g., "core.ruby").
|
||||||
|
let already_exists = existing_runtimes.iter().any(|r| {
|
||||||
|
// Primary: check if the detected name is in this runtime's aliases
|
||||||
|
r.aliases.iter().any(|a| a == &canonical_name)
|
||||||
|
// Secondary: check if the ref ends with the canonical name (e.g., "core.ruby")
|
||||||
|
|| r.r#ref.ends_with(&format!(".{}", canonical_name))
|
||||||
|
});
|
||||||
|
|
||||||
|
if already_exists {
|
||||||
|
debug!(
|
||||||
|
"Runtime '{}' (canonical: '{}') already exists in database, skipping",
|
||||||
|
detected_rt.name, canonical_name
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No existing runtime — try to find a template from loaded packs.
|
||||||
|
// Templates are pack-registered runtimes whose normalized name matches
|
||||||
|
// (e.g., `core.ruby` for detected runtime "ruby"). Since we already
|
||||||
|
// checked `existing_runtimes` above and found nothing, we look for
|
||||||
|
// runtimes by ref pattern. Common convention: `core.<name>`.
|
||||||
|
let template_ref = format!("core.{}", canonical_name);
|
||||||
|
let template = RuntimeRepository::find_by_ref(pool, &template_ref)
|
||||||
|
.await
|
||||||
|
.unwrap_or(None);
|
||||||
|
|
||||||
|
let detection_config = build_detection_config(detected_rt);
|
||||||
|
|
||||||
|
if let Some(tmpl) = template {
|
||||||
|
// Clone the template as an auto-detected runtime.
|
||||||
|
// The template already has the full execution_config, distributions, etc.
|
||||||
|
// We just re-create it with auto_detected = true.
|
||||||
|
info!(
|
||||||
|
"Found template '{}' for detected runtime '{}', registering auto-detected clone",
|
||||||
|
tmpl.r#ref, detected_rt.name
|
||||||
|
);
|
||||||
|
|
||||||
|
// Use a distinct ref so we don't collide with the template.
|
||||||
|
let auto_ref = format!("auto.{}", canonical_name);
|
||||||
|
|
||||||
|
// Check if the auto ref already exists (race condition / previous run)
|
||||||
|
if RuntimeRepository::find_by_ref(pool, &auto_ref)
|
||||||
|
.await
|
||||||
|
.unwrap_or(None)
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
debug!(
|
||||||
|
"Auto-detected runtime '{}' already registered from a previous run",
|
||||||
|
auto_ref
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = CreateRuntimeInput {
|
||||||
|
r#ref: auto_ref.clone(),
|
||||||
|
pack: tmpl.pack,
|
||||||
|
pack_ref: tmpl.pack_ref.clone(),
|
||||||
|
description: Some(format!(
|
||||||
|
"Auto-detected {} runtime (from template {})",
|
||||||
|
detected_rt.name, tmpl.r#ref
|
||||||
|
)),
|
||||||
|
name: tmpl.name.clone(),
|
||||||
|
aliases: tmpl.aliases.clone(),
|
||||||
|
distributions: tmpl.distributions.clone(),
|
||||||
|
installation: tmpl.installation.clone(),
|
||||||
|
execution_config: build_execution_config_from_template(&tmpl, detected_rt),
|
||||||
|
auto_detected: true,
|
||||||
|
detection_config,
|
||||||
|
};
|
||||||
|
|
||||||
|
match RuntimeRepository::create(pool, input).await {
|
||||||
|
Ok(rt) => {
|
||||||
|
info!(
|
||||||
|
"Auto-registered runtime '{}' (ID: {}) from template '{}'",
|
||||||
|
auto_ref, rt.id, tmpl.r#ref
|
||||||
|
);
|
||||||
|
registered_count += 1;
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
// Unique constraint violation is fine (concurrent agent start)
|
||||||
|
warn!("Failed to auto-register runtime '{}': {}", auto_ref, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No template found — create a minimal runtime entry.
|
||||||
|
info!(
|
||||||
|
"No template found for detected runtime '{}', creating minimal entry",
|
||||||
|
detected_rt.name
|
||||||
|
);
|
||||||
|
|
||||||
|
let auto_ref = format!("auto.{}", canonical_name);
|
||||||
|
|
||||||
|
if RuntimeRepository::find_by_ref(pool, &auto_ref)
|
||||||
|
.await
|
||||||
|
.unwrap_or(None)
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
debug!(
|
||||||
|
"Auto-detected runtime '{}' already registered from a previous run",
|
||||||
|
auto_ref
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let execution_config = build_minimal_execution_config(detected_rt);
|
||||||
|
|
||||||
|
let input = CreateRuntimeInput {
|
||||||
|
r#ref: auto_ref.clone(),
|
||||||
|
pack: None,
|
||||||
|
pack_ref: None,
|
||||||
|
description: Some(format!(
|
||||||
|
"Auto-detected {} runtime at {}",
|
||||||
|
detected_rt.name, detected_rt.path
|
||||||
|
)),
|
||||||
|
name: capitalize_runtime_name(&canonical_name),
|
||||||
|
aliases: default_aliases(&canonical_name),
|
||||||
|
distributions: build_minimal_distributions(detected_rt),
|
||||||
|
installation: None,
|
||||||
|
execution_config,
|
||||||
|
auto_detected: true,
|
||||||
|
detection_config,
|
||||||
|
};
|
||||||
|
|
||||||
|
match RuntimeRepository::create(pool, input).await {
|
||||||
|
Ok(rt) => {
|
||||||
|
info!(
|
||||||
|
"Auto-registered minimal runtime '{}' (ID: {})",
|
||||||
|
auto_ref, rt.id
|
||||||
|
);
|
||||||
|
registered_count += 1;
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Failed to auto-register runtime '{}': {}", auto_ref, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if registered_count > 0 {
|
||||||
|
info!(
|
||||||
|
"Dynamic runtime registration complete: {} new runtime(s) registered",
|
||||||
|
registered_count
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
info!("Dynamic runtime registration complete: all detected runtimes already in database");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(registered_count)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build the `detection_config` JSONB value from a detected runtime.
|
||||||
|
/// This metadata records how the agent discovered this runtime, enabling
|
||||||
|
/// re-verification and diagnostics.
|
||||||
|
fn build_detection_config(detected: &DetectedRuntime) -> serde_json::Value {
|
||||||
|
let mut config = json!({
|
||||||
|
"detected_path": detected.path,
|
||||||
|
"detected_name": detected.name,
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(ref version) = detected.version {
|
||||||
|
config["detected_version"] = json!(version);
|
||||||
|
}
|
||||||
|
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build an execution config based on a template runtime, but with the
|
||||||
|
/// detected interpreter path substituted in. This ensures the auto-detected
|
||||||
|
/// runtime uses the actual binary path found on the system.
|
||||||
|
fn build_execution_config_from_template(
|
||||||
|
template: &Runtime,
|
||||||
|
detected: &DetectedRuntime,
|
||||||
|
) -> serde_json::Value {
|
||||||
|
let mut config = template.execution_config.clone();
|
||||||
|
|
||||||
|
// If the template has an interpreter config, update the binary path
|
||||||
|
// to the one we actually detected on this system.
|
||||||
|
if let Some(interpreter) = config.get_mut("interpreter") {
|
||||||
|
if let Some(obj) = interpreter.as_object_mut() {
|
||||||
|
obj.insert("binary".to_string(), json!(detected.path));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the template has an environment config with an interpreter_path
|
||||||
|
// that uses a template variable, leave it as-is (it will be resolved
|
||||||
|
// at execution time). But if it's a hardcoded absolute path, update it.
|
||||||
|
if let Some(env) = config.get_mut("environment") {
|
||||||
|
if let Some(obj) = env.as_object_mut() {
|
||||||
|
if let Some(interp_path) = obj.get("interpreter_path") {
|
||||||
|
if let Some(path_str) = interp_path.as_str() {
|
||||||
|
// Only leave template variables alone
|
||||||
|
if !path_str.contains('{') {
|
||||||
|
obj.insert("interpreter_path".to_string(), json!(detected.path));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build a minimal execution config for a runtime with no template.
|
||||||
|
/// This provides enough information for `ProcessRuntime` to invoke the
|
||||||
|
/// interpreter directly, without environment or dependency management.
|
||||||
|
fn build_minimal_execution_config(detected: &DetectedRuntime) -> serde_json::Value {
|
||||||
|
let canonical = detected.name.to_ascii_lowercase();
|
||||||
|
let file_ext = default_file_extension(&canonical);
|
||||||
|
|
||||||
|
let mut config = json!({
|
||||||
|
"interpreter": {
|
||||||
|
"binary": detected.path,
|
||||||
|
"args": [],
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(ext) = file_ext {
|
||||||
|
config["interpreter"]["file_extension"] = json!(ext);
|
||||||
|
}
|
||||||
|
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build minimal distributions metadata for a runtime with no template.
|
||||||
|
/// Includes a basic verification command using the detected binary path.
|
||||||
|
fn build_minimal_distributions(detected: &DetectedRuntime) -> serde_json::Value {
|
||||||
|
json!({
|
||||||
|
"verification": {
|
||||||
|
"commands": [
|
||||||
|
{
|
||||||
|
"binary": &detected.path,
|
||||||
|
"args": ["--version"],
|
||||||
|
"exit_code": 0,
|
||||||
|
"priority": 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default aliases for auto-detected runtimes that have no template.
|
||||||
|
/// These match what the core pack YAMLs declare but serve as fallback
|
||||||
|
/// when the template hasn't been loaded.
|
||||||
|
fn default_aliases(canonical_name: &str) -> Vec<String> {
|
||||||
|
match canonical_name {
|
||||||
|
"shell" => vec!["shell".into(), "bash".into(), "sh".into()],
|
||||||
|
"python" => vec!["python".into(), "python3".into()],
|
||||||
|
"node" => vec!["node".into(), "nodejs".into(), "node.js".into()],
|
||||||
|
"ruby" => vec!["ruby".into(), "rb".into()],
|
||||||
|
"go" => vec!["go".into(), "golang".into()],
|
||||||
|
"java" => vec!["java".into(), "jdk".into(), "openjdk".into()],
|
||||||
|
"perl" => vec!["perl".into(), "perl5".into()],
|
||||||
|
"r" => vec!["r".into(), "rscript".into()],
|
||||||
|
_ => vec![canonical_name.to_string()],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Capitalize a runtime name for display (e.g., "ruby" → "Ruby", "r" → "R").
|
||||||
|
fn capitalize_runtime_name(name: &str) -> String {
|
||||||
|
let mut chars = name.chars();
|
||||||
|
match chars.next() {
|
||||||
|
None => String::new(),
|
||||||
|
Some(first) => {
|
||||||
|
let upper: String = first.to_uppercase().collect();
|
||||||
|
format!("{}{}", upper, chars.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_default_file_extension() {
|
||||||
|
assert_eq!(default_file_extension("shell"), Some(".sh"));
|
||||||
|
assert_eq!(default_file_extension("python"), Some(".py"));
|
||||||
|
assert_eq!(default_file_extension("node"), Some(".js"));
|
||||||
|
assert_eq!(default_file_extension("ruby"), Some(".rb"));
|
||||||
|
assert_eq!(default_file_extension("go"), Some(".go"));
|
||||||
|
assert_eq!(default_file_extension("java"), Some(".java"));
|
||||||
|
assert_eq!(default_file_extension("perl"), Some(".pl"));
|
||||||
|
assert_eq!(default_file_extension("r"), Some(".R"));
|
||||||
|
assert_eq!(default_file_extension("unknown"), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_capitalize_runtime_name() {
|
||||||
|
assert_eq!(capitalize_runtime_name("ruby"), "Ruby");
|
||||||
|
assert_eq!(capitalize_runtime_name("go"), "Go");
|
||||||
|
assert_eq!(capitalize_runtime_name("r"), "R");
|
||||||
|
assert_eq!(capitalize_runtime_name("perl"), "Perl");
|
||||||
|
assert_eq!(capitalize_runtime_name("java"), "Java");
|
||||||
|
assert_eq!(capitalize_runtime_name(""), "");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_detection_config_with_version() {
|
||||||
|
let detected = DetectedRuntime {
|
||||||
|
name: "ruby".to_string(),
|
||||||
|
path: "/usr/bin/ruby".to_string(),
|
||||||
|
version: Some("3.3.0".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let config = build_detection_config(&detected);
|
||||||
|
assert_eq!(config["detected_path"], "/usr/bin/ruby");
|
||||||
|
assert_eq!(config["detected_name"], "ruby");
|
||||||
|
assert_eq!(config["detected_version"], "3.3.0");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_detection_config_without_version() {
|
||||||
|
let detected = DetectedRuntime {
|
||||||
|
name: "perl".to_string(),
|
||||||
|
path: "/usr/bin/perl".to_string(),
|
||||||
|
version: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let config = build_detection_config(&detected);
|
||||||
|
assert_eq!(config["detected_path"], "/usr/bin/perl");
|
||||||
|
assert_eq!(config["detected_name"], "perl");
|
||||||
|
assert!(config.get("detected_version").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_minimal_execution_config() {
|
||||||
|
let detected = DetectedRuntime {
|
||||||
|
name: "ruby".to_string(),
|
||||||
|
path: "/usr/bin/ruby".to_string(),
|
||||||
|
version: Some("3.3.0".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let config = build_minimal_execution_config(&detected);
|
||||||
|
assert_eq!(config["interpreter"]["binary"], "/usr/bin/ruby");
|
||||||
|
assert_eq!(config["interpreter"]["file_extension"], ".rb");
|
||||||
|
assert_eq!(config["interpreter"]["args"], json!([]));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_minimal_execution_config_unknown_runtime() {
|
||||||
|
let detected = DetectedRuntime {
|
||||||
|
name: "custom".to_string(),
|
||||||
|
path: "/opt/custom/bin/custom".to_string(),
|
||||||
|
version: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let config = build_minimal_execution_config(&detected);
|
||||||
|
assert_eq!(config["interpreter"]["binary"], "/opt/custom/bin/custom");
|
||||||
|
// Unknown runtime has no file extension
|
||||||
|
assert!(config["interpreter"].get("file_extension").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_minimal_distributions() {
|
||||||
|
let detected = DetectedRuntime {
|
||||||
|
name: "ruby".to_string(),
|
||||||
|
path: "/usr/bin/ruby".to_string(),
|
||||||
|
version: Some("3.3.0".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let distros = build_minimal_distributions(&detected);
|
||||||
|
let commands = distros["verification"]["commands"].as_array().unwrap();
|
||||||
|
assert_eq!(commands.len(), 1);
|
||||||
|
assert_eq!(commands[0]["binary"], "/usr/bin/ruby");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_execution_config_from_template_updates_binary() {
|
||||||
|
let template = Runtime {
|
||||||
|
id: 1,
|
||||||
|
r#ref: "core.ruby".to_string(),
|
||||||
|
pack: Some(1),
|
||||||
|
pack_ref: Some("core".to_string()),
|
||||||
|
description: Some("Ruby Runtime".to_string()),
|
||||||
|
name: "Ruby".to_string(),
|
||||||
|
aliases: vec!["ruby".to_string(), "rb".to_string()],
|
||||||
|
distributions: json!({}),
|
||||||
|
installation: None,
|
||||||
|
installers: json!({}),
|
||||||
|
execution_config: json!({
|
||||||
|
"interpreter": {
|
||||||
|
"binary": "ruby",
|
||||||
|
"args": [],
|
||||||
|
"file_extension": ".rb"
|
||||||
|
},
|
||||||
|
"env_vars": {
|
||||||
|
"GEM_HOME": "{env_dir}/gems"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: json!({}),
|
||||||
|
created: chrono::Utc::now(),
|
||||||
|
updated: chrono::Utc::now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let detected = DetectedRuntime {
|
||||||
|
name: "ruby".to_string(),
|
||||||
|
path: "/usr/local/bin/ruby3.3".to_string(),
|
||||||
|
version: Some("3.3.0".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let config = build_execution_config_from_template(&template, &detected);
|
||||||
|
|
||||||
|
// Binary should be updated to the detected path
|
||||||
|
assert_eq!(config["interpreter"]["binary"], "/usr/local/bin/ruby3.3");
|
||||||
|
// Other fields should be preserved from the template
|
||||||
|
assert_eq!(config["interpreter"]["file_extension"], ".rb");
|
||||||
|
assert_eq!(config["env_vars"]["GEM_HOME"], "{env_dir}/gems");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_execution_config_from_template_preserves_template_vars() {
|
||||||
|
let template = Runtime {
|
||||||
|
id: 1,
|
||||||
|
r#ref: "core.python".to_string(),
|
||||||
|
pack: Some(1),
|
||||||
|
pack_ref: Some("core".to_string()),
|
||||||
|
description: None,
|
||||||
|
name: "Python".to_string(),
|
||||||
|
aliases: vec!["python".to_string(), "python3".to_string()],
|
||||||
|
distributions: json!({}),
|
||||||
|
installation: None,
|
||||||
|
installers: json!({}),
|
||||||
|
execution_config: json!({
|
||||||
|
"interpreter": {
|
||||||
|
"binary": "python3",
|
||||||
|
"file_extension": ".py"
|
||||||
|
},
|
||||||
|
"environment": {
|
||||||
|
"interpreter_path": "{env_dir}/bin/python3",
|
||||||
|
"create_command": ["python3", "-m", "venv", "{env_dir}"]
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
auto_detected: false,
|
||||||
|
detection_config: json!({}),
|
||||||
|
created: chrono::Utc::now(),
|
||||||
|
updated: chrono::Utc::now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let detected = DetectedRuntime {
|
||||||
|
name: "python".to_string(),
|
||||||
|
path: "/usr/bin/python3.12".to_string(),
|
||||||
|
version: Some("3.12.1".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let config = build_execution_config_from_template(&template, &detected);
|
||||||
|
|
||||||
|
// Binary should be updated
|
||||||
|
assert_eq!(config["interpreter"]["binary"], "/usr/bin/python3.12");
|
||||||
|
// Template-variable interpreter_path should be preserved (contains '{')
|
||||||
|
assert_eq!(
|
||||||
|
config["environment"]["interpreter_path"],
|
||||||
|
"{env_dir}/bin/python3"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -35,7 +35,7 @@ use attune_common::repositories::pack::PackRepository;
|
|||||||
use attune_common::repositories::runtime::RuntimeRepository;
|
use attune_common::repositories::runtime::RuntimeRepository;
|
||||||
use attune_common::repositories::runtime_version::RuntimeVersionRepository;
|
use attune_common::repositories::runtime_version::RuntimeVersionRepository;
|
||||||
use attune_common::repositories::{FindById, List};
|
use attune_common::repositories::{FindById, List};
|
||||||
use attune_common::runtime_detection::runtime_in_filter;
|
use attune_common::runtime_detection::runtime_aliases_match_filter;
|
||||||
|
|
||||||
// Re-export the utility that the API also uses so callers can reach it from
|
// Re-export the utility that the API also uses so callers can reach it from
|
||||||
// either crate without adding a direct common dependency for this one function.
|
// either crate without adding a direct common dependency for this one function.
|
||||||
@@ -207,7 +207,7 @@ pub async fn setup_environments_for_registered_pack(
|
|||||||
.iter()
|
.iter()
|
||||||
.filter(|name| {
|
.filter(|name| {
|
||||||
if let Some(filter) = runtime_filter {
|
if let Some(filter) = runtime_filter {
|
||||||
runtime_in_filter(name, filter)
|
runtime_aliases_match_filter(&[name.to_string()], filter)
|
||||||
} else {
|
} else {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
@@ -463,12 +463,12 @@ async fn process_runtime_for_pack(
|
|||||||
runtime_envs_dir: &Path,
|
runtime_envs_dir: &Path,
|
||||||
pack_result: &mut PackEnvSetupResult,
|
pack_result: &mut PackEnvSetupResult,
|
||||||
) {
|
) {
|
||||||
// Apply worker runtime filter (alias-aware matching)
|
// Apply worker runtime filter (alias-aware matching via declared aliases)
|
||||||
if let Some(filter) = runtime_filter {
|
if let Some(filter) = runtime_filter {
|
||||||
if !runtime_in_filter(rt_name, filter) {
|
if !runtime_aliases_match_filter(&rt.aliases, filter) {
|
||||||
debug!(
|
debug!(
|
||||||
"Runtime '{}' not in worker filter, skipping for pack '{}'",
|
"Runtime '{}' not in worker filter (aliases: {:?}), skipping for pack '{}'",
|
||||||
rt_name, pack_ref,
|
rt_name, rt.aliases, pack_ref,
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ use attune_common::models::runtime::RuntimeExecutionConfig;
|
|||||||
use attune_common::models::{runtime::Runtime as RuntimeModel, Action, Execution, ExecutionStatus};
|
use attune_common::models::{runtime::Runtime as RuntimeModel, Action, Execution, ExecutionStatus};
|
||||||
use attune_common::repositories::artifact::{ArtifactRepository, ArtifactVersionRepository};
|
use attune_common::repositories::artifact::{ArtifactRepository, ArtifactVersionRepository};
|
||||||
use attune_common::repositories::execution::{ExecutionRepository, UpdateExecutionInput};
|
use attune_common::repositories::execution::{ExecutionRepository, UpdateExecutionInput};
|
||||||
|
use attune_common::repositories::runtime::SELECT_COLUMNS as RUNTIME_SELECT_COLUMNS;
|
||||||
use attune_common::repositories::runtime_version::RuntimeVersionRepository;
|
use attune_common::repositories::runtime_version::RuntimeVersionRepository;
|
||||||
use attune_common::repositories::{FindById, Update};
|
use attune_common::repositories::{FindById, Update};
|
||||||
use attune_common::version_matching::select_best_version;
|
use attune_common::version_matching::select_best_version;
|
||||||
@@ -410,12 +411,11 @@ impl ActionExecutor {
|
|||||||
|
|
||||||
// Load runtime information if specified
|
// Load runtime information if specified
|
||||||
let runtime_record = if let Some(runtime_id) = action.runtime {
|
let runtime_record = if let Some(runtime_id) = action.runtime {
|
||||||
match sqlx::query_as::<_, RuntimeModel>(
|
let query = format!(
|
||||||
r#"SELECT id, ref, pack, pack_ref, description, name,
|
"SELECT {} FROM runtime WHERE id = $1",
|
||||||
distributions, installation, installers, execution_config,
|
RUNTIME_SELECT_COLUMNS
|
||||||
created, updated
|
);
|
||||||
FROM runtime WHERE id = $1"#,
|
match sqlx::query_as::<_, RuntimeModel>(&query)
|
||||||
)
|
|
||||||
.bind(runtime_id)
|
.bind(runtime_id)
|
||||||
.fetch_optional(&self.pool)
|
.fetch_optional(&self.pool)
|
||||||
.await
|
.await
|
||||||
|
|||||||
@@ -4,16 +4,19 @@
|
|||||||
//! which executes actions in various runtime environments.
|
//! which executes actions in various runtime environments.
|
||||||
|
|
||||||
pub mod artifacts;
|
pub mod artifacts;
|
||||||
|
pub mod dynamic_runtime;
|
||||||
pub mod env_setup;
|
pub mod env_setup;
|
||||||
pub mod executor;
|
pub mod executor;
|
||||||
pub mod heartbeat;
|
pub mod heartbeat;
|
||||||
pub mod registration;
|
pub mod registration;
|
||||||
pub mod runtime;
|
pub mod runtime;
|
||||||
|
pub mod runtime_detect;
|
||||||
pub mod secrets;
|
pub mod secrets;
|
||||||
pub mod service;
|
pub mod service;
|
||||||
pub mod version_verify;
|
pub mod version_verify;
|
||||||
|
|
||||||
// Re-export commonly used types
|
// Re-export commonly used types
|
||||||
|
pub use dynamic_runtime::auto_register_detected_runtimes;
|
||||||
pub use executor::ActionExecutor;
|
pub use executor::ActionExecutor;
|
||||||
pub use heartbeat::HeartbeatManager;
|
pub use heartbeat::HeartbeatManager;
|
||||||
pub use registration::WorkerRegistration;
|
pub use registration::WorkerRegistration;
|
||||||
@@ -21,7 +24,8 @@ pub use runtime::{
|
|||||||
ExecutionContext, ExecutionResult, LocalRuntime, NativeRuntime, ProcessRuntime, Runtime,
|
ExecutionContext, ExecutionResult, LocalRuntime, NativeRuntime, ProcessRuntime, Runtime,
|
||||||
RuntimeError, RuntimeResult,
|
RuntimeError, RuntimeResult,
|
||||||
};
|
};
|
||||||
|
pub use runtime_detect::DetectedRuntime;
|
||||||
pub use secrets::SecretManager;
|
pub use secrets::SecretManager;
|
||||||
pub use service::WorkerService;
|
pub use service::{StartupMode, WorkerService};
|
||||||
// Re-export test executor from common (shared business logic)
|
// Re-export test executor from common (shared business logic)
|
||||||
pub use attune_common::test_executor::{TestConfig, TestExecutor};
|
pub use attune_common::test_executor::{TestConfig, TestExecutor};
|
||||||
|
|||||||
@@ -13,6 +13,12 @@ use sqlx::PgPool;
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use tracing::{info, warn};
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
use crate::runtime_detect::DetectedRuntime;
|
||||||
|
|
||||||
|
const ATTUNE_AGENT_MODE_ENV: &str = "ATTUNE_AGENT_MODE";
|
||||||
|
const ATTUNE_AGENT_BINARY_NAME_ENV: &str = "ATTUNE_AGENT_BINARY_NAME";
|
||||||
|
const ATTUNE_AGENT_BINARY_VERSION_ENV: &str = "ATTUNE_AGENT_BINARY_VERSION";
|
||||||
|
|
||||||
/// Worker registration manager
|
/// Worker registration manager
|
||||||
pub struct WorkerRegistration {
|
pub struct WorkerRegistration {
|
||||||
pool: PgPool,
|
pool: PgPool,
|
||||||
@@ -27,12 +33,60 @@ pub struct WorkerRegistration {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl WorkerRegistration {
|
impl WorkerRegistration {
|
||||||
|
fn env_truthy(name: &str) -> bool {
|
||||||
|
std::env::var(name)
|
||||||
|
.ok()
|
||||||
|
.map(|value| matches!(value.trim().to_ascii_lowercase().as_str(), "1" | "true"))
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inject_agent_capabilities(capabilities: &mut HashMap<String, serde_json::Value>) {
|
||||||
|
if Self::env_truthy(ATTUNE_AGENT_MODE_ENV) {
|
||||||
|
capabilities.insert("agent_mode".to_string(), json!(true));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(binary_name) = std::env::var(ATTUNE_AGENT_BINARY_NAME_ENV) {
|
||||||
|
let binary_name = binary_name.trim();
|
||||||
|
if !binary_name.is_empty() {
|
||||||
|
capabilities.insert("agent_binary_name".to_string(), json!(binary_name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(binary_version) = std::env::var(ATTUNE_AGENT_BINARY_VERSION_ENV) {
|
||||||
|
let binary_version = binary_version.trim();
|
||||||
|
if !binary_version.is_empty() {
|
||||||
|
capabilities.insert("agent_binary_version".to_string(), json!(binary_version));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn legacy_worker_name() -> Option<String> {
|
||||||
|
std::env::var("ATTUNE_WORKER_NAME")
|
||||||
|
.ok()
|
||||||
|
.map(|value| value.trim().to_string())
|
||||||
|
.filter(|value| !value.is_empty())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn legacy_worker_type() -> Option<WorkerType> {
|
||||||
|
let value = std::env::var("ATTUNE_WORKER_TYPE").ok()?;
|
||||||
|
match value.trim().to_ascii_lowercase().as_str() {
|
||||||
|
"local" => Some(WorkerType::Local),
|
||||||
|
"remote" => Some(WorkerType::Remote),
|
||||||
|
"container" => Some(WorkerType::Container),
|
||||||
|
other => {
|
||||||
|
warn!("Ignoring unrecognized ATTUNE_WORKER_TYPE value: {}", other);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Create a new worker registration manager
|
/// Create a new worker registration manager
|
||||||
pub fn new(pool: PgPool, config: &Config) -> Self {
|
pub fn new(pool: PgPool, config: &Config) -> Self {
|
||||||
let worker_name = config
|
let worker_name = config
|
||||||
.worker
|
.worker
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|w| w.name.clone())
|
.and_then(|w| w.name.clone())
|
||||||
|
.or_else(Self::legacy_worker_name)
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
format!(
|
format!(
|
||||||
"worker-{}",
|
"worker-{}",
|
||||||
@@ -46,6 +100,7 @@ impl WorkerRegistration {
|
|||||||
.worker
|
.worker
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|w| w.worker_type)
|
.and_then(|w| w.worker_type)
|
||||||
|
.or_else(Self::legacy_worker_type)
|
||||||
.unwrap_or(WorkerType::Local);
|
.unwrap_or(WorkerType::Local);
|
||||||
|
|
||||||
let worker_role = WorkerRole::Action;
|
let worker_role = WorkerRole::Action;
|
||||||
@@ -84,6 +139,8 @@ impl WorkerRegistration {
|
|||||||
json!(env!("CARGO_PKG_VERSION")),
|
json!(env!("CARGO_PKG_VERSION")),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
Self::inject_agent_capabilities(&mut capabilities);
|
||||||
|
|
||||||
// Placeholder for runtimes (will be detected asynchronously)
|
// Placeholder for runtimes (will be detected asynchronously)
|
||||||
capabilities.insert("runtimes".to_string(), json!(Vec::<String>::new()));
|
capabilities.insert("runtimes".to_string(), json!(Vec::<String>::new()));
|
||||||
|
|
||||||
@@ -100,6 +157,51 @@ impl WorkerRegistration {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Store detected runtime interpreter metadata in capabilities.
|
||||||
|
///
|
||||||
|
/// This is used by the agent (`attune-agent`) to record the full details of
|
||||||
|
/// auto-detected interpreters — binary paths and versions — alongside the
|
||||||
|
/// simple `runtimes` string list used for backward compatibility.
|
||||||
|
///
|
||||||
|
/// The data is stored under the `detected_interpreters` capability key as a
|
||||||
|
/// JSON array of objects:
|
||||||
|
/// ```json
|
||||||
|
/// [
|
||||||
|
/// {"name": "python", "path": "/usr/bin/python3", "version": "3.12.1"},
|
||||||
|
/// {"name": "shell", "path": "/bin/bash", "version": "5.2.15"}
|
||||||
|
/// ]
|
||||||
|
/// ```
|
||||||
|
pub fn set_detected_runtimes(&mut self, runtimes: Vec<DetectedRuntime>) {
|
||||||
|
let interpreters: Vec<serde_json::Value> = runtimes
|
||||||
|
.iter()
|
||||||
|
.map(|rt| {
|
||||||
|
json!({
|
||||||
|
"name": rt.name,
|
||||||
|
"path": rt.path,
|
||||||
|
"version": rt.version,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
self.capabilities
|
||||||
|
.insert("detected_interpreters".to_string(), json!(interpreters));
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Stored {} detected interpreter(s) in capabilities",
|
||||||
|
runtimes.len()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mark this worker as running in agent mode.
|
||||||
|
///
|
||||||
|
/// Agent-mode workers auto-detect their runtimes at startup (as opposed to
|
||||||
|
/// being configured via `ATTUNE_WORKER_RUNTIMES` or config files). Setting
|
||||||
|
/// this flag allows the system to distinguish agents from standard workers.
|
||||||
|
pub fn set_agent_mode(&mut self, is_agent: bool) {
|
||||||
|
self.capabilities
|
||||||
|
.insert("agent_mode".to_string(), json!(is_agent));
|
||||||
|
}
|
||||||
|
|
||||||
/// Detect available runtimes using the unified runtime detector
|
/// Detect available runtimes using the unified runtime detector
|
||||||
pub async fn detect_capabilities(&mut self, config: &Config) -> Result<()> {
|
pub async fn detect_capabilities(&mut self, config: &Config) -> Result<()> {
|
||||||
info!("Detecting worker capabilities...");
|
info!("Detecting worker capabilities...");
|
||||||
@@ -346,4 +448,96 @@ mod tests {
|
|||||||
|
|
||||||
registration.deregister().await.unwrap();
|
registration.deregister().await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_detected_runtimes_json_structure() {
|
||||||
|
// Test the JSON structure that set_detected_runtimes builds
|
||||||
|
let runtimes = vec![
|
||||||
|
DetectedRuntime {
|
||||||
|
name: "python".to_string(),
|
||||||
|
path: "/usr/bin/python3".to_string(),
|
||||||
|
version: Some("3.12.1".to_string()),
|
||||||
|
},
|
||||||
|
DetectedRuntime {
|
||||||
|
name: "shell".to_string(),
|
||||||
|
path: "/bin/bash".to_string(),
|
||||||
|
version: None,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
let interpreters: Vec<serde_json::Value> = runtimes
|
||||||
|
.iter()
|
||||||
|
.map(|rt| {
|
||||||
|
json!({
|
||||||
|
"name": rt.name,
|
||||||
|
"path": rt.path,
|
||||||
|
"version": rt.version,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let json_value = json!(interpreters);
|
||||||
|
|
||||||
|
// Verify structure
|
||||||
|
let arr = json_value.as_array().unwrap();
|
||||||
|
assert_eq!(arr.len(), 2);
|
||||||
|
assert_eq!(arr[0]["name"], "python");
|
||||||
|
assert_eq!(arr[0]["path"], "/usr/bin/python3");
|
||||||
|
assert_eq!(arr[0]["version"], "3.12.1");
|
||||||
|
assert_eq!(arr[1]["name"], "shell");
|
||||||
|
assert_eq!(arr[1]["path"], "/bin/bash");
|
||||||
|
assert!(arr[1]["version"].is_null());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_detected_runtimes_empty() {
|
||||||
|
let runtimes: Vec<DetectedRuntime> = vec![];
|
||||||
|
let interpreters: Vec<serde_json::Value> = runtimes
|
||||||
|
.iter()
|
||||||
|
.map(|rt| {
|
||||||
|
json!({
|
||||||
|
"name": rt.name,
|
||||||
|
"path": rt.path,
|
||||||
|
"version": rt.version,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let json_value = json!(interpreters);
|
||||||
|
assert_eq!(json_value.as_array().unwrap().len(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_agent_mode_capability_value() {
|
||||||
|
// Verify the JSON value for agent_mode capability
|
||||||
|
let value = json!(true);
|
||||||
|
assert_eq!(value, true);
|
||||||
|
|
||||||
|
let value = json!(false);
|
||||||
|
assert_eq!(value, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_inject_agent_capabilities_from_env() {
|
||||||
|
std::env::set_var(ATTUNE_AGENT_MODE_ENV, "TRUE");
|
||||||
|
std::env::set_var(ATTUNE_AGENT_BINARY_NAME_ENV, "attune-agent");
|
||||||
|
std::env::set_var(ATTUNE_AGENT_BINARY_VERSION_ENV, "1.2.3");
|
||||||
|
|
||||||
|
let mut capabilities = HashMap::new();
|
||||||
|
WorkerRegistration::inject_agent_capabilities(&mut capabilities);
|
||||||
|
|
||||||
|
assert_eq!(capabilities.get("agent_mode"), Some(&json!(true)));
|
||||||
|
assert_eq!(
|
||||||
|
capabilities.get("agent_binary_name"),
|
||||||
|
Some(&json!("attune-agent"))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
capabilities.get("agent_binary_version"),
|
||||||
|
Some(&json!("1.2.3"))
|
||||||
|
);
|
||||||
|
|
||||||
|
std::env::remove_var(ATTUNE_AGENT_MODE_ENV);
|
||||||
|
std::env::remove_var(ATTUNE_AGENT_BINARY_NAME_ENV);
|
||||||
|
std::env::remove_var(ATTUNE_AGENT_BINARY_VERSION_ENV);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,13 +24,77 @@ use attune_common::models::runtime::{
|
|||||||
};
|
};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::{Arc, Mutex as StdMutex, OnceLock};
|
||||||
use tokio::process::Command;
|
use tokio::process::Command;
|
||||||
use tracing::{debug, error, info, warn};
|
use tracing::{debug, error, info, warn};
|
||||||
|
|
||||||
|
/// Per-directory locks for lazy environment setup to prevent concurrent
|
||||||
|
/// setup of the same environment from corrupting it. When two executions
|
||||||
|
/// for the same pack arrive concurrently (e.g. in agent mode), both may
|
||||||
|
/// see `!env_dir.exists()` and race to run `setup_pack_environment`.
|
||||||
|
/// This map provides a per-directory async mutex so that only one setup
|
||||||
|
/// runs at a time for each env_dir path.
|
||||||
|
static ENV_SETUP_LOCKS: OnceLock<StdMutex<HashMap<PathBuf, Arc<tokio::sync::Mutex<()>>>>> =
|
||||||
|
OnceLock::new();
|
||||||
|
|
||||||
|
fn get_env_setup_lock(env_dir: &Path) -> Arc<tokio::sync::Mutex<()>> {
|
||||||
|
let locks = ENV_SETUP_LOCKS.get_or_init(|| StdMutex::new(HashMap::new()));
|
||||||
|
let mut map = locks.lock().unwrap();
|
||||||
|
map.entry(env_dir.to_path_buf())
|
||||||
|
.or_insert_with(|| Arc::new(tokio::sync::Mutex::new(())))
|
||||||
|
.clone()
|
||||||
|
}
|
||||||
|
|
||||||
fn bash_single_quote_escape(s: &str) -> String {
|
fn bash_single_quote_escape(s: &str) -> String {
|
||||||
s.replace('\'', "'\\''")
|
s.replace('\'', "'\\''")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn format_command_for_log(cmd: &Command) -> String {
|
||||||
|
let program = cmd.as_std().get_program().to_string_lossy().into_owned();
|
||||||
|
let args = cmd
|
||||||
|
.as_std()
|
||||||
|
.get_args()
|
||||||
|
.map(|arg| arg.to_string_lossy().into_owned())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
let cwd = cmd
|
||||||
|
.as_std()
|
||||||
|
.get_current_dir()
|
||||||
|
.map(|dir| dir.display().to_string())
|
||||||
|
.unwrap_or_else(|| "<inherit>".to_string());
|
||||||
|
let env = cmd
|
||||||
|
.as_std()
|
||||||
|
.get_envs()
|
||||||
|
.map(|(key, value)| {
|
||||||
|
let key = key.to_string_lossy().into_owned();
|
||||||
|
let value = value
|
||||||
|
.map(|v| {
|
||||||
|
if is_sensitive_env_var(&key) {
|
||||||
|
"<redacted>".to_string()
|
||||||
|
} else {
|
||||||
|
v.to_string_lossy().into_owned()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| "<unset>".to_string());
|
||||||
|
format!("{key}={value}")
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"program={program}, args={args:?}, cwd={cwd}, env={env:?}",
|
||||||
|
args = args,
|
||||||
|
env = env,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_sensitive_env_var(key: &str) -> bool {
|
||||||
|
let upper = key.to_ascii_uppercase();
|
||||||
|
upper.contains("TOKEN")
|
||||||
|
|| upper.contains("SECRET")
|
||||||
|
|| upper.contains("PASSWORD")
|
||||||
|
|| upper.ends_with("_KEY")
|
||||||
|
|| upper == "KEY"
|
||||||
|
}
|
||||||
|
|
||||||
/// A generic runtime driven by `RuntimeExecutionConfig` from the database.
|
/// A generic runtime driven by `RuntimeExecutionConfig` from the database.
|
||||||
///
|
///
|
||||||
/// Each `ProcessRuntime` instance corresponds to a row in the `runtime` table.
|
/// Each `ProcessRuntime` instance corresponds to a row in the `runtime` table.
|
||||||
@@ -615,30 +679,68 @@ impl Runtime for ProcessRuntime {
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
// Runtime environments are set up proactively — either at worker startup
|
// Lazy environment setup: if the environment directory doesn't exist but
|
||||||
// (scanning all registered packs) or via pack.registered MQ events when a
|
// should (i.e., there's an environment config and the pack dir exists),
|
||||||
// new pack is installed. We only log a warning here if the expected
|
// create it on-demand. This is the primary code path for agent mode where
|
||||||
// environment directory is missing so operators can investigate.
|
// proactive startup setup is skipped, but it also serves as a safety net
|
||||||
if effective_config.environment.is_some() && pack_dir.exists() && !env_dir.exists() {
|
// for standard workers if the environment was somehow missed.
|
||||||
warn!(
|
// Acquire a per-directory async lock to serialize environment setup.
|
||||||
|
// This prevents concurrent executions for the same pack from racing
|
||||||
|
// to create or repair the environment simultaneously.
|
||||||
|
if effective_config.environment.is_some() && pack_dir.exists() {
|
||||||
|
let env_lock = get_env_setup_lock(&env_dir);
|
||||||
|
let _guard = env_lock.lock().await;
|
||||||
|
|
||||||
|
// --- Lazy environment creation (double-checked after lock) ---
|
||||||
|
if !env_dir.exists() {
|
||||||
|
info!(
|
||||||
"Runtime environment for pack '{}' not found at {}. \
|
"Runtime environment for pack '{}' not found at {}. \
|
||||||
The environment should have been created at startup or on pack registration. \
|
Creating on first use (lazy setup).",
|
||||||
Proceeding with system interpreter as fallback.",
|
context.action_ref,
|
||||||
|
env_dir.display(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let setup_runtime = ProcessRuntime::new(
|
||||||
|
self.runtime_name.clone(),
|
||||||
|
effective_config.clone(),
|
||||||
|
self.packs_base_dir.clone(),
|
||||||
|
self.runtime_envs_dir.clone(),
|
||||||
|
);
|
||||||
|
match setup_runtime
|
||||||
|
.setup_pack_environment(&pack_dir, &env_dir)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(()) => {
|
||||||
|
info!(
|
||||||
|
"Successfully created environment for pack '{}' at {} (lazy setup)",
|
||||||
context.action_ref,
|
context.action_ref,
|
||||||
env_dir.display(),
|
env_dir.display(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!(
|
||||||
|
"Failed to create environment for pack '{}' at {}: {}. \
|
||||||
|
Proceeding with system interpreter as fallback.",
|
||||||
|
context.action_ref,
|
||||||
|
env_dir.display(),
|
||||||
|
e,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Broken-symlink repair (also under the per-directory lock) ---
|
||||||
// If the environment directory exists but contains a broken interpreter
|
// If the environment directory exists but contains a broken interpreter
|
||||||
// (e.g. broken symlinks from a venv created in a different container),
|
// (e.g. broken symlinks from a venv created in a different container),
|
||||||
// attempt to recreate it before resolving the interpreter.
|
// attempt to recreate it before resolving the interpreter.
|
||||||
if effective_config.environment.is_some() && env_dir.exists() && pack_dir.exists() {
|
if env_dir.exists() {
|
||||||
if let Some(ref env_cfg) = effective_config.environment {
|
if let Some(ref env_cfg) = effective_config.environment {
|
||||||
if let Some(ref interp_template) = env_cfg.interpreter_path {
|
if let Some(ref interp_template) = env_cfg.interpreter_path {
|
||||||
let mut vars = std::collections::HashMap::new();
|
let mut vars = std::collections::HashMap::new();
|
||||||
vars.insert("env_dir", env_dir.to_string_lossy().to_string());
|
vars.insert("env_dir", env_dir.to_string_lossy().to_string());
|
||||||
vars.insert("pack_dir", pack_dir.to_string_lossy().to_string());
|
vars.insert("pack_dir", pack_dir.to_string_lossy().to_string());
|
||||||
let resolved = RuntimeExecutionConfig::resolve_template(interp_template, &vars);
|
let resolved =
|
||||||
|
RuntimeExecutionConfig::resolve_template(interp_template, &vars);
|
||||||
let resolved_path = std::path::PathBuf::from(&resolved);
|
let resolved_path = std::path::PathBuf::from(&resolved);
|
||||||
|
|
||||||
// Check for a broken symlink: symlink_metadata succeeds for
|
// Check for a broken symlink: symlink_metadata succeeds for
|
||||||
@@ -704,6 +806,7 @@ impl Runtime for ProcessRuntime {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let interpreter = effective_config.resolve_interpreter_with_env(&pack_dir, env_dir_opt);
|
let interpreter = effective_config.resolve_interpreter_with_env(&pack_dir, env_dir_opt);
|
||||||
|
|
||||||
@@ -727,12 +830,9 @@ impl Runtime for ProcessRuntime {
|
|||||||
// resolved against the current pack/env directories.
|
// resolved against the current pack/env directories.
|
||||||
if !effective_config.env_vars.is_empty() {
|
if !effective_config.env_vars.is_empty() {
|
||||||
let vars = effective_config.build_template_vars_with_env(&pack_dir, env_dir_opt);
|
let vars = effective_config.build_template_vars_with_env(&pack_dir, env_dir_opt);
|
||||||
for (key, value_template) in &effective_config.env_vars {
|
for (key, env_var_config) in &effective_config.env_vars {
|
||||||
let resolved = RuntimeExecutionConfig::resolve_template(value_template, &vars);
|
let resolved = env_var_config.resolve(&vars, env.get(key).map(String::as_str));
|
||||||
debug!(
|
debug!("Setting runtime env var: {}={}", key, resolved);
|
||||||
"Setting runtime env var: {}={} (template: {})",
|
|
||||||
key, resolved, value_template
|
|
||||||
);
|
|
||||||
env.insert(key.clone(), resolved);
|
env.insert(key.clone(), resolved);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -840,10 +940,10 @@ impl Runtime for ProcessRuntime {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Log the full command about to be executed
|
// Log the spawned process accurately instead of using Command's shell-like Debug output.
|
||||||
info!(
|
info!(
|
||||||
"Running command: {:?} (action: '{}', execution_id: {}, working_dir: {:?})",
|
"Running command: {} (action: '{}', execution_id: {}, working_dir: {:?})",
|
||||||
cmd,
|
format_command_for_log(&cmd),
|
||||||
context.action_ref,
|
context.action_ref,
|
||||||
context.execution_id,
|
context.execution_id,
|
||||||
working_dir
|
working_dir
|
||||||
@@ -959,7 +1059,8 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
use attune_common::models::runtime::{
|
use attune_common::models::runtime::{
|
||||||
DependencyConfig, EnvironmentConfig, InlineExecutionConfig, InlineExecutionStrategy,
|
DependencyConfig, EnvironmentConfig, InlineExecutionConfig, InlineExecutionStrategy,
|
||||||
InterpreterConfig, RuntimeExecutionConfig,
|
InterpreterConfig, RuntimeEnvVarConfig, RuntimeEnvVarOperation, RuntimeEnvVarSpec,
|
||||||
|
RuntimeExecutionConfig,
|
||||||
};
|
};
|
||||||
use attune_common::models::{OutputFormat, ParameterDelivery, ParameterFormat};
|
use attune_common::models::{OutputFormat, ParameterDelivery, ParameterFormat};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
@@ -1274,6 +1375,88 @@ mod tests {
|
|||||||
assert!(result.stdout.contains("hello from python process runtime"));
|
assert!(result.stdout.contains("hello from python process runtime"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_execute_python_file_with_pack_lib_on_pythonpath() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let packs_dir = temp_dir.path().join("packs");
|
||||||
|
let pack_dir = packs_dir.join("testpack");
|
||||||
|
let actions_dir = pack_dir.join("actions");
|
||||||
|
let lib_dir = pack_dir.join("lib");
|
||||||
|
std::fs::create_dir_all(&actions_dir).unwrap();
|
||||||
|
std::fs::create_dir_all(&lib_dir).unwrap();
|
||||||
|
|
||||||
|
std::fs::write(
|
||||||
|
lib_dir.join("helper.py"),
|
||||||
|
"def message():\n return 'hello from pack lib'\n",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
std::fs::write(
|
||||||
|
actions_dir.join("hello.py"),
|
||||||
|
"import helper\nimport os\nprint(helper.message())\nprint(os.environ['PYTHONPATH'])\n",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let mut env_vars = HashMap::new();
|
||||||
|
env_vars.insert(
|
||||||
|
"PYTHONPATH".to_string(),
|
||||||
|
RuntimeEnvVarConfig::Spec(RuntimeEnvVarSpec {
|
||||||
|
value: "{pack_dir}/lib".to_string(),
|
||||||
|
operation: RuntimeEnvVarOperation::Prepend,
|
||||||
|
separator: ":".to_string(),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
let runtime = ProcessRuntime::new(
|
||||||
|
"python".to_string(),
|
||||||
|
RuntimeExecutionConfig {
|
||||||
|
interpreter: InterpreterConfig {
|
||||||
|
binary: "python3".to_string(),
|
||||||
|
args: vec![],
|
||||||
|
file_extension: Some(".py".to_string()),
|
||||||
|
},
|
||||||
|
inline_execution: InlineExecutionConfig::default(),
|
||||||
|
environment: None,
|
||||||
|
dependencies: None,
|
||||||
|
env_vars,
|
||||||
|
},
|
||||||
|
packs_dir,
|
||||||
|
temp_dir.path().join("runtime_envs"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut env = HashMap::new();
|
||||||
|
env.insert("PYTHONPATH".to_string(), "/existing/pythonpath".to_string());
|
||||||
|
|
||||||
|
let context = ExecutionContext {
|
||||||
|
execution_id: 3,
|
||||||
|
action_ref: "testpack.hello".to_string(),
|
||||||
|
parameters: HashMap::new(),
|
||||||
|
env,
|
||||||
|
secrets: HashMap::new(),
|
||||||
|
timeout: Some(10),
|
||||||
|
working_dir: None,
|
||||||
|
entry_point: "hello.py".to_string(),
|
||||||
|
code: None,
|
||||||
|
code_path: Some(actions_dir.join("hello.py")),
|
||||||
|
runtime_name: Some("python".to_string()),
|
||||||
|
runtime_config_override: None,
|
||||||
|
runtime_env_dir_suffix: None,
|
||||||
|
selected_runtime_version: None,
|
||||||
|
max_stdout_bytes: 1024 * 1024,
|
||||||
|
max_stderr_bytes: 1024 * 1024,
|
||||||
|
parameter_delivery: ParameterDelivery::default(),
|
||||||
|
parameter_format: ParameterFormat::default(),
|
||||||
|
output_format: OutputFormat::default(),
|
||||||
|
cancel_token: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = runtime.execute(context).await.unwrap();
|
||||||
|
assert_eq!(result.exit_code, 0);
|
||||||
|
assert!(result.stdout.contains("hello from pack lib"));
|
||||||
|
assert!(result
|
||||||
|
.stdout
|
||||||
|
.contains(&format!("{}/lib:/existing/pythonpath", pack_dir.display())));
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_execute_inline_code() {
|
async fn test_execute_inline_code() {
|
||||||
let temp_dir = TempDir::new().unwrap();
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
|||||||
12
crates/worker/src/runtime_detect.rs
Normal file
12
crates/worker/src/runtime_detect.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
//! Compatibility wrapper around the shared agent runtime detection module.
|
||||||
|
|
||||||
|
pub use attune_common::agent_runtime_detection::{
|
||||||
|
detect_runtimes, format_as_env_value, DetectedRuntime,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn print_detection_report(runtimes: &[DetectedRuntime]) {
|
||||||
|
attune_common::agent_runtime_detection::print_detection_report_for_env(
|
||||||
|
"ATTUNE_WORKER_RUNTIMES",
|
||||||
|
runtimes,
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -23,7 +23,7 @@ use attune_common::mq::{
|
|||||||
MessageEnvelope, MessageType, PackRegisteredPayload, Publisher, PublisherConfig,
|
MessageEnvelope, MessageType, PackRegisteredPayload, Publisher, PublisherConfig,
|
||||||
};
|
};
|
||||||
use attune_common::repositories::{execution::ExecutionRepository, FindById};
|
use attune_common::repositories::{execution::ExecutionRepository, FindById};
|
||||||
use attune_common::runtime_detection::runtime_in_filter;
|
use attune_common::runtime_detection::runtime_aliases_match_filter;
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
@@ -45,12 +45,32 @@ use crate::runtime::local::LocalRuntime;
|
|||||||
use crate::runtime::native::NativeRuntime;
|
use crate::runtime::native::NativeRuntime;
|
||||||
use crate::runtime::process::ProcessRuntime;
|
use crate::runtime::process::ProcessRuntime;
|
||||||
use crate::runtime::RuntimeRegistry;
|
use crate::runtime::RuntimeRegistry;
|
||||||
|
use crate::runtime_detect::DetectedRuntime;
|
||||||
use crate::secrets::SecretManager;
|
use crate::secrets::SecretManager;
|
||||||
use crate::version_verify;
|
use crate::version_verify;
|
||||||
|
|
||||||
use attune_common::repositories::runtime::RuntimeRepository;
|
use attune_common::repositories::runtime::RuntimeRepository;
|
||||||
use attune_common::repositories::List;
|
use attune_common::repositories::List;
|
||||||
|
|
||||||
|
/// Controls how the worker initializes its runtime environment.
|
||||||
|
///
|
||||||
|
/// The standard `attune-worker` binary uses `Worker` mode with proactive
|
||||||
|
/// setup at startup, while the `attune-agent` binary uses `Agent` mode
|
||||||
|
/// with lazy (on-demand) initialization.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum StartupMode {
|
||||||
|
/// Full worker mode: proactive environment setup, full version
|
||||||
|
/// verification sweep at startup. Used by `attune-worker`.
|
||||||
|
Worker,
|
||||||
|
|
||||||
|
/// Agent mode: lazy environment setup (on first use), on-demand
|
||||||
|
/// version verification, auto-detected runtimes. Used by `attune-agent`.
|
||||||
|
Agent {
|
||||||
|
/// Runtimes detected by the auto-detection module.
|
||||||
|
detected_runtimes: Vec<DetectedRuntime>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
/// Message payload for execution.scheduled events
|
/// Message payload for execution.scheduled events
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct ExecutionScheduledPayload {
|
pub struct ExecutionScheduledPayload {
|
||||||
@@ -93,6 +113,10 @@ pub struct WorkerService {
|
|||||||
/// Tracks cancellation requests that arrived before the in-memory token
|
/// Tracks cancellation requests that arrived before the in-memory token
|
||||||
/// for an execution had been registered.
|
/// for an execution had been registered.
|
||||||
pending_cancellations: Arc<Mutex<HashSet<i64>>>,
|
pending_cancellations: Arc<Mutex<HashSet<i64>>>,
|
||||||
|
/// Controls whether this worker runs in full `Worker` mode (proactive
|
||||||
|
/// environment setup, full version verification) or `Agent` mode (lazy
|
||||||
|
/// setup, auto-detected runtimes).
|
||||||
|
startup_mode: StartupMode,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WorkerService {
|
impl WorkerService {
|
||||||
@@ -229,10 +253,10 @@ impl WorkerService {
|
|||||||
// Uses alias-aware matching so that e.g. filter "node"
|
// Uses alias-aware matching so that e.g. filter "node"
|
||||||
// matches DB runtime name "Node.js" (lowercased to "node.js").
|
// matches DB runtime name "Node.js" (lowercased to "node.js").
|
||||||
if let Some(ref filter) = runtime_filter {
|
if let Some(ref filter) = runtime_filter {
|
||||||
if !runtime_in_filter(&rt_name, filter) {
|
if !runtime_aliases_match_filter(&rt.aliases, filter) {
|
||||||
debug!(
|
debug!(
|
||||||
"Skipping runtime '{}' (not in ATTUNE_WORKER_RUNTIMES filter)",
|
"Skipping runtime '{}' (aliases {:?} not in ATTUNE_WORKER_RUNTIMES filter)",
|
||||||
rt_name
|
rt_name, rt.aliases
|
||||||
);
|
);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -402,9 +426,26 @@ impl WorkerService {
|
|||||||
in_flight_tasks: Arc::new(Mutex::new(JoinSet::new())),
|
in_flight_tasks: Arc::new(Mutex::new(JoinSet::new())),
|
||||||
cancel_tokens: Arc::new(Mutex::new(HashMap::new())),
|
cancel_tokens: Arc::new(Mutex::new(HashMap::new())),
|
||||||
pending_cancellations: Arc::new(Mutex::new(HashSet::new())),
|
pending_cancellations: Arc::new(Mutex::new(HashSet::new())),
|
||||||
|
startup_mode: StartupMode::Worker,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set agent-detected runtimes for inclusion in worker registration.
|
||||||
|
///
|
||||||
|
/// When the worker is started as `attune-agent`, the agent entrypoint
|
||||||
|
/// auto-detects available interpreters and passes them here. During
|
||||||
|
/// [`start()`](Self::start), the detection results are stored in the
|
||||||
|
/// worker's capabilities as `detected_interpreters` (structured JSON
|
||||||
|
/// with binary paths and versions) and the `agent_mode` flag is set.
|
||||||
|
///
|
||||||
|
/// This method is a no-op for the standard `attune-worker` binary.
|
||||||
|
pub fn with_detected_runtimes(mut self, runtimes: Vec<DetectedRuntime>) -> Self {
|
||||||
|
self.startup_mode = StartupMode::Agent {
|
||||||
|
detected_runtimes: runtimes,
|
||||||
|
};
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Start the worker service
|
/// Start the worker service
|
||||||
pub async fn start(&mut self) -> Result<()> {
|
pub async fn start(&mut self) -> Result<()> {
|
||||||
info!("Starting Worker Service");
|
info!("Starting Worker Service");
|
||||||
@@ -413,6 +454,21 @@ impl WorkerService {
|
|||||||
let worker_id = {
|
let worker_id = {
|
||||||
let mut reg = self.registration.write().await;
|
let mut reg = self.registration.write().await;
|
||||||
reg.detect_capabilities(&self.config).await?;
|
reg.detect_capabilities(&self.config).await?;
|
||||||
|
|
||||||
|
// If running as an agent, store the detected interpreter metadata
|
||||||
|
// and set the agent_mode flag before registering.
|
||||||
|
if let StartupMode::Agent {
|
||||||
|
ref detected_runtimes,
|
||||||
|
} = self.startup_mode
|
||||||
|
{
|
||||||
|
reg.set_detected_runtimes(detected_runtimes.clone());
|
||||||
|
reg.set_agent_mode(true);
|
||||||
|
info!(
|
||||||
|
"Agent mode: {} detected interpreter(s) will be stored in capabilities",
|
||||||
|
detected_runtimes.len()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
reg.register().await?
|
reg.register().await?
|
||||||
};
|
};
|
||||||
self.worker_id = Some(worker_id);
|
self.worker_id = Some(worker_id);
|
||||||
@@ -430,6 +486,8 @@ impl WorkerService {
|
|||||||
})?;
|
})?;
|
||||||
info!("Worker-specific message queue infrastructure setup completed");
|
info!("Worker-specific message queue infrastructure setup completed");
|
||||||
|
|
||||||
|
match &self.startup_mode {
|
||||||
|
StartupMode::Worker => {
|
||||||
// Verify which runtime versions are available on this system.
|
// Verify which runtime versions are available on this system.
|
||||||
// This updates the `available` flag in the database so that
|
// This updates the `available` flag in the database so that
|
||||||
// `select_best_version()` only considers genuinely present versions.
|
// `select_best_version()` only considers genuinely present versions.
|
||||||
@@ -440,6 +498,14 @@ impl WorkerService {
|
|||||||
// environments are ready by the time the first execution arrives.
|
// environments are ready by the time the first execution arrives.
|
||||||
// Now version-aware: creates per-version environments where needed.
|
// Now version-aware: creates per-version environments where needed.
|
||||||
self.scan_and_setup_environments().await;
|
self.scan_and_setup_environments().await;
|
||||||
|
}
|
||||||
|
StartupMode::Agent { .. } => {
|
||||||
|
// Skip proactive setup — will happen lazily on first execution
|
||||||
|
info!(
|
||||||
|
"Agent mode: deferring environment setup and version verification to first use"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Start heartbeat
|
// Start heartbeat
|
||||||
self.heartbeat.start().await?;
|
self.heartbeat.start().await?;
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ use tracing::{debug, info, warn};
|
|||||||
|
|
||||||
use attune_common::models::RuntimeVersion;
|
use attune_common::models::RuntimeVersion;
|
||||||
use attune_common::repositories::runtime_version::RuntimeVersionRepository;
|
use attune_common::repositories::runtime_version::RuntimeVersionRepository;
|
||||||
use attune_common::runtime_detection::runtime_in_filter;
|
use attune_common::runtime_detection::runtime_aliases_match_filter;
|
||||||
|
|
||||||
/// Result of verifying all runtime versions at startup.
|
/// Result of verifying all runtime versions at startup.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@@ -95,7 +95,7 @@ pub async fn verify_all_runtime_versions(
|
|||||||
.to_lowercase();
|
.to_lowercase();
|
||||||
|
|
||||||
if let Some(filter) = runtime_filter {
|
if let Some(filter) = runtime_filter {
|
||||||
if !runtime_in_filter(&rt_base_name, filter) {
|
if !runtime_aliases_match_filter(&[rt_base_name.to_string()], filter) {
|
||||||
debug!(
|
debug!(
|
||||||
"Skipping version '{}' of runtime '{}' (not in worker runtime filter)",
|
"Skipping version '{}' of runtime '{}' (not in worker runtime filter)",
|
||||||
version.version, version.runtime_ref,
|
version.version, version.runtime_ref,
|
||||||
|
|||||||
216
docker-compose.agent.yaml
Normal file
216
docker-compose.agent.yaml
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
# Agent-Based Worker Services
|
||||||
|
#
|
||||||
|
# This override file demonstrates how to add custom runtime workers to Attune
|
||||||
|
# by injecting the universal worker agent into any container image.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# docker compose -f docker-compose.yaml -f docker-compose.agent.yaml up -d
|
||||||
|
#
|
||||||
|
# Prerequisites:
|
||||||
|
# The init-agent service (defined in docker-compose.yaml) must be present.
|
||||||
|
# It builds the statically-linked worker and sensor agent binaries and populates the agent_bin volume.
|
||||||
|
#
|
||||||
|
# How it works:
|
||||||
|
# 1. init-agent builds musl-static injected agent binaries and copies them to the agent_bin volume
|
||||||
|
# 2. Each worker service mounts agent_bin read-only and uses the agent as its entrypoint
|
||||||
|
# 3. The agent auto-detects available runtimes in the container (python, ruby, node, etc.)
|
||||||
|
# 4. No Dockerfile needed — just point at any container image with your desired runtime
|
||||||
|
#
|
||||||
|
# Adding your own worker:
|
||||||
|
# Copy one of the examples below and change:
|
||||||
|
# - service name (e.g., worker-my-runtime)
|
||||||
|
# - image (any Docker image with your runtime installed)
|
||||||
|
# - ATTUNE_WORKER_NAME (unique name for this worker)
|
||||||
|
# - Optionally set ATTUNE_WORKER_RUNTIMES to override auto-detection
|
||||||
|
|
||||||
|
services:
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Ruby Worker — Official Ruby image with auto-detected runtime
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
worker-ruby:
|
||||||
|
image: ruby:3.3-slim
|
||||||
|
container_name: attune-worker-ruby
|
||||||
|
depends_on:
|
||||||
|
init-agent:
|
||||||
|
condition: service_completed_successfully
|
||||||
|
init-packs:
|
||||||
|
condition: service_completed_successfully
|
||||||
|
migrations:
|
||||||
|
condition: service_completed_successfully
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
entrypoint: ["/opt/attune/agent/attune-agent"]
|
||||||
|
stop_grace_period: 45s
|
||||||
|
environment:
|
||||||
|
RUST_LOG: info
|
||||||
|
ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
||||||
|
ATTUNE_WORKER_NAME: worker-ruby-01
|
||||||
|
ATTUNE_WORKER_TYPE: container
|
||||||
|
ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
||||||
|
ATTUNE__SECURITY__ENCRYPTION_KEY: ${ENCRYPTION_KEY:-docker-dev-encryption-key-please-change-in-production-32plus}
|
||||||
|
ATTUNE__DATABASE__URL: postgresql://attune:attune@postgres:5432/attune
|
||||||
|
ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
||||||
|
ATTUNE_API_URL: http://attune-api:8080
|
||||||
|
# ATTUNE_WORKER_RUNTIMES omitted — auto-detected as ruby,shell
|
||||||
|
volumes:
|
||||||
|
- agent_bin:/opt/attune/agent:ro
|
||||||
|
- ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
||||||
|
- packs_data:/opt/attune/packs:ro
|
||||||
|
- runtime_envs:/opt/attune/runtime_envs
|
||||||
|
- artifacts_data:/opt/attune/artifacts
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pgrep -f attune-agent || exit 1"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 20s
|
||||||
|
networks:
|
||||||
|
- attune-network
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Python 3.12 Worker — Use a specific Python version via the agent
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# This demonstrates using the agent with a specific Python version instead of
|
||||||
|
# the built-in worker-python service (which uses debian:bookworm-slim + apt).
|
||||||
|
#
|
||||||
|
# worker-python312:
|
||||||
|
# image: python:3.12-slim
|
||||||
|
# container_name: attune-worker-python312
|
||||||
|
# depends_on:
|
||||||
|
# init-agent:
|
||||||
|
# condition: service_completed_successfully
|
||||||
|
# init-packs:
|
||||||
|
# condition: service_completed_successfully
|
||||||
|
# migrations:
|
||||||
|
# condition: service_completed_successfully
|
||||||
|
# postgres:
|
||||||
|
# condition: service_healthy
|
||||||
|
# rabbitmq:
|
||||||
|
# condition: service_healthy
|
||||||
|
# entrypoint: ["/opt/attune/agent/attune-agent"]
|
||||||
|
# stop_grace_period: 45s
|
||||||
|
# environment:
|
||||||
|
# RUST_LOG: info
|
||||||
|
# ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
||||||
|
# ATTUNE_WORKER_NAME: worker-python312-01
|
||||||
|
# ATTUNE_WORKER_TYPE: container
|
||||||
|
# ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
||||||
|
# ATTUNE__SECURITY__ENCRYPTION_KEY: ${ENCRYPTION_KEY:-docker-dev-encryption-key-please-change-in-production-32plus}
|
||||||
|
# ATTUNE__DATABASE__URL: postgresql://attune:attune@postgres:5432/attune
|
||||||
|
# ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
||||||
|
# ATTUNE_API_URL: http://attune-api:8080
|
||||||
|
# volumes:
|
||||||
|
# - agent_bin:/opt/attune/agent:ro
|
||||||
|
# - ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
||||||
|
# - packs_data:/opt/attune/packs:ro
|
||||||
|
# - runtime_envs:/opt/attune/runtime_envs
|
||||||
|
# - artifacts_data:/opt/attune/artifacts
|
||||||
|
# healthcheck:
|
||||||
|
# test: ["CMD-SHELL", "pgrep -f attune-agent || exit 1"]
|
||||||
|
# interval: 30s
|
||||||
|
# timeout: 10s
|
||||||
|
# retries: 3
|
||||||
|
# start_period: 20s
|
||||||
|
# networks:
|
||||||
|
# - attune-network
|
||||||
|
# restart: unless-stopped
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# GPU Worker — NVIDIA CUDA image for GPU-accelerated workloads
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Requires: NVIDIA Container Toolkit installed on the Docker host
|
||||||
|
# See: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/
|
||||||
|
#
|
||||||
|
# worker-gpu:
|
||||||
|
# image: nvidia/cuda:12.3.1-runtime-ubuntu22.04
|
||||||
|
# container_name: attune-worker-gpu
|
||||||
|
# depends_on:
|
||||||
|
# init-agent:
|
||||||
|
# condition: service_completed_successfully
|
||||||
|
# init-packs:
|
||||||
|
# condition: service_completed_successfully
|
||||||
|
# migrations:
|
||||||
|
# condition: service_completed_successfully
|
||||||
|
# postgres:
|
||||||
|
# condition: service_healthy
|
||||||
|
# rabbitmq:
|
||||||
|
# condition: service_healthy
|
||||||
|
# entrypoint: ["/opt/attune/agent/attune-agent"]
|
||||||
|
# runtime: nvidia
|
||||||
|
# stop_grace_period: 45s
|
||||||
|
# environment:
|
||||||
|
# RUST_LOG: info
|
||||||
|
# ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
||||||
|
# ATTUNE_WORKER_NAME: worker-gpu-01
|
||||||
|
# ATTUNE_WORKER_TYPE: container
|
||||||
|
# ATTUNE_WORKER_RUNTIMES: python,shell # Manual override — CUDA image has python pre-installed
|
||||||
|
# ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
||||||
|
# ATTUNE__SECURITY__ENCRYPTION_KEY: ${ENCRYPTION_KEY:-docker-dev-encryption-key-please-change-in-production-32plus}
|
||||||
|
# ATTUNE__DATABASE__URL: postgresql://attune:attune@postgres:5432/attune
|
||||||
|
# ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
||||||
|
# ATTUNE_API_URL: http://attune-api:8080
|
||||||
|
# volumes:
|
||||||
|
# - agent_bin:/opt/attune/agent:ro
|
||||||
|
# - ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
||||||
|
# - packs_data:/opt/attune/packs:ro
|
||||||
|
# - runtime_envs:/opt/attune/runtime_envs
|
||||||
|
# - artifacts_data:/opt/attune/artifacts
|
||||||
|
# healthcheck:
|
||||||
|
# test: ["CMD-SHELL", "pgrep -f attune-agent || exit 1"]
|
||||||
|
# interval: 30s
|
||||||
|
# timeout: 10s
|
||||||
|
# retries: 3
|
||||||
|
# start_period: 20s
|
||||||
|
# networks:
|
||||||
|
# - attune-network
|
||||||
|
# restart: unless-stopped
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Custom Image Worker — Template for any container image
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Copy this template and customize for your runtime:
|
||||||
|
#
|
||||||
|
# worker-custom:
|
||||||
|
# image: my-org/my-custom-image:latest
|
||||||
|
# container_name: attune-worker-custom
|
||||||
|
# depends_on:
|
||||||
|
# init-agent:
|
||||||
|
# condition: service_completed_successfully
|
||||||
|
# init-packs:
|
||||||
|
# condition: service_completed_successfully
|
||||||
|
# migrations:
|
||||||
|
# condition: service_completed_successfully
|
||||||
|
# postgres:
|
||||||
|
# condition: service_healthy
|
||||||
|
# rabbitmq:
|
||||||
|
# condition: service_healthy
|
||||||
|
# entrypoint: ["/opt/attune/agent/attune-agent"]
|
||||||
|
# stop_grace_period: 45s
|
||||||
|
# environment:
|
||||||
|
# RUST_LOG: info
|
||||||
|
# ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
||||||
|
# ATTUNE_WORKER_NAME: worker-custom-01
|
||||||
|
# ATTUNE_WORKER_TYPE: container
|
||||||
|
# ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
||||||
|
# ATTUNE__SECURITY__ENCRYPTION_KEY: ${ENCRYPTION_KEY:-docker-dev-encryption-key-please-change-in-production-32plus}
|
||||||
|
# ATTUNE__DATABASE__URL: postgresql://attune:attune@postgres:5432/attune
|
||||||
|
# ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
||||||
|
# ATTUNE_API_URL: http://attune-api:8080
|
||||||
|
# volumes:
|
||||||
|
# - agent_bin:/opt/attune/agent:ro
|
||||||
|
# - ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
||||||
|
# - packs_data:/opt/attune/packs:ro
|
||||||
|
# - runtime_envs:/opt/attune/runtime_envs
|
||||||
|
# - artifacts_data:/opt/attune/artifacts
|
||||||
|
# healthcheck:
|
||||||
|
# test: ["CMD-SHELL", "pgrep -f attune-agent || exit 1"]
|
||||||
|
# interval: 30s
|
||||||
|
# timeout: 10s
|
||||||
|
# retries: 3
|
||||||
|
# start_period: 20s
|
||||||
|
# networks:
|
||||||
|
# - attune-network
|
||||||
|
# restart: unless-stopped
|
||||||
@@ -126,6 +126,29 @@ services:
|
|||||||
restart: on-failure
|
restart: on-failure
|
||||||
entrypoint: "" # Override Python image entrypoint
|
entrypoint: "" # Override Python image entrypoint
|
||||||
|
|
||||||
|
# Agent binary volume population (builds the statically-linked worker and sensor agents)
|
||||||
|
# Other containers can use these binaries by mounting agent_bin and running
|
||||||
|
# /opt/attune/agent/attune-agent or /opt/attune/agent/attune-sensor-agent.
|
||||||
|
init-agent:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: docker/Dockerfile.agent
|
||||||
|
target: agent-init
|
||||||
|
args:
|
||||||
|
BUILDKIT_INLINE_CACHE: 1
|
||||||
|
container_name: attune-init-agent
|
||||||
|
volumes:
|
||||||
|
- agent_bin:/opt/attune/agent
|
||||||
|
entrypoint:
|
||||||
|
[
|
||||||
|
"/bin/sh",
|
||||||
|
"-c",
|
||||||
|
"cp /usr/local/bin/attune-agent /opt/attune/agent/attune-agent && cp /usr/local/bin/attune-sensor-agent /opt/attune/agent/attune-sensor-agent && chmod +x /opt/attune/agent/attune-agent /opt/attune/agent/attune-sensor-agent && /usr/local/bin/attune-agent --version > /opt/attune/agent/attune-agent.version && /usr/local/bin/attune-sensor-agent --version > /opt/attune/agent/attune-sensor-agent.version && echo 'Agent binaries copied successfully'",
|
||||||
|
]
|
||||||
|
restart: "no"
|
||||||
|
networks:
|
||||||
|
- attune-network
|
||||||
|
|
||||||
rabbitmq:
|
rabbitmq:
|
||||||
image: rabbitmq:3.13-management-alpine
|
image: rabbitmq:3.13-management-alpine
|
||||||
container_name: attune-rabbitmq
|
container_name: attune-rabbitmq
|
||||||
@@ -199,7 +222,10 @@ services:
|
|||||||
- runtime_envs:/opt/attune/runtime_envs
|
- runtime_envs:/opt/attune/runtime_envs
|
||||||
- artifacts_data:/opt/attune/artifacts
|
- artifacts_data:/opt/attune/artifacts
|
||||||
- api_logs:/opt/attune/logs
|
- api_logs:/opt/attune/logs
|
||||||
|
- agent_bin:/opt/attune/agent:ro
|
||||||
depends_on:
|
depends_on:
|
||||||
|
init-agent:
|
||||||
|
condition: service_completed_successfully
|
||||||
init-packs:
|
init-packs:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
init-user:
|
init-user:
|
||||||
@@ -271,19 +297,17 @@ services:
|
|||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Workers
|
# Workers
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
# Default agent-based workers
|
||||||
|
# These use stock runtime images and inject the statically-linked attune-agent
|
||||||
|
# from the shared agent_bin volume instead of baking attune-worker into each image.
|
||||||
worker-shell:
|
worker-shell:
|
||||||
build:
|
image: debian:bookworm-slim
|
||||||
context: .
|
|
||||||
dockerfile: docker/Dockerfile.worker.optimized
|
|
||||||
target: worker-base
|
|
||||||
args:
|
|
||||||
BUILDKIT_INLINE_CACHE: 1
|
|
||||||
container_name: attune-worker-shell
|
container_name: attune-worker-shell
|
||||||
|
entrypoint: ["/opt/attune/agent/attune-agent"]
|
||||||
stop_grace_period: 45s
|
stop_grace_period: 45s
|
||||||
environment:
|
environment:
|
||||||
RUST_LOG: info
|
RUST_LOG: info
|
||||||
ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
||||||
ATTUNE_WORKER_RUNTIMES: shell
|
|
||||||
ATTUNE_WORKER_TYPE: container
|
ATTUNE_WORKER_TYPE: container
|
||||||
ATTUNE_WORKER_NAME: worker-shell-01
|
ATTUNE_WORKER_NAME: worker-shell-01
|
||||||
ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
||||||
@@ -292,6 +316,7 @@ services:
|
|||||||
ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
||||||
ATTUNE_API_URL: http://attune-api:8080
|
ATTUNE_API_URL: http://attune-api:8080
|
||||||
volumes:
|
volumes:
|
||||||
|
- agent_bin:/opt/attune/agent:ro
|
||||||
- ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
- ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
||||||
- packs_data:/opt/attune/packs:ro
|
- packs_data:/opt/attune/packs:ro
|
||||||
- ./packs.dev:/opt/attune/packs.dev:rw
|
- ./packs.dev:/opt/attune/packs.dev:rw
|
||||||
@@ -299,6 +324,8 @@ services:
|
|||||||
- artifacts_data:/opt/attune/artifacts
|
- artifacts_data:/opt/attune/artifacts
|
||||||
- worker_shell_logs:/opt/attune/logs
|
- worker_shell_logs:/opt/attune/logs
|
||||||
depends_on:
|
depends_on:
|
||||||
|
init-agent:
|
||||||
|
condition: service_completed_successfully
|
||||||
init-packs:
|
init-packs:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
init-user:
|
init-user:
|
||||||
@@ -310,7 +337,7 @@ services:
|
|||||||
rabbitmq:
|
rabbitmq:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "pgrep -f attune-worker || exit 1"]
|
test: ["CMD-SHELL", "pgrep -f attune-agent || exit 1"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
@@ -319,20 +346,15 @@ services:
|
|||||||
- attune-network
|
- attune-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# Python worker - Shell + Python runtime
|
# Python worker - official Python image with agent auto-detection
|
||||||
worker-python:
|
worker-python:
|
||||||
build:
|
image: python:3.12-slim
|
||||||
context: .
|
|
||||||
dockerfile: docker/Dockerfile.worker.optimized
|
|
||||||
target: worker-python
|
|
||||||
args:
|
|
||||||
BUILDKIT_INLINE_CACHE: 1
|
|
||||||
container_name: attune-worker-python
|
container_name: attune-worker-python
|
||||||
|
entrypoint: ["/opt/attune/agent/attune-agent"]
|
||||||
stop_grace_period: 45s
|
stop_grace_period: 45s
|
||||||
environment:
|
environment:
|
||||||
RUST_LOG: info
|
RUST_LOG: info
|
||||||
ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
||||||
ATTUNE_WORKER_RUNTIMES: shell,python
|
|
||||||
ATTUNE_WORKER_TYPE: container
|
ATTUNE_WORKER_TYPE: container
|
||||||
ATTUNE_WORKER_NAME: worker-python-01
|
ATTUNE_WORKER_NAME: worker-python-01
|
||||||
ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
||||||
@@ -341,6 +363,7 @@ services:
|
|||||||
ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
||||||
ATTUNE_API_URL: http://attune-api:8080
|
ATTUNE_API_URL: http://attune-api:8080
|
||||||
volumes:
|
volumes:
|
||||||
|
- agent_bin:/opt/attune/agent:ro
|
||||||
- ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
- ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
||||||
- packs_data:/opt/attune/packs:ro
|
- packs_data:/opt/attune/packs:ro
|
||||||
- ./packs.dev:/opt/attune/packs.dev:rw
|
- ./packs.dev:/opt/attune/packs.dev:rw
|
||||||
@@ -348,6 +371,8 @@ services:
|
|||||||
- artifacts_data:/opt/attune/artifacts
|
- artifacts_data:/opt/attune/artifacts
|
||||||
- worker_python_logs:/opt/attune/logs
|
- worker_python_logs:/opt/attune/logs
|
||||||
depends_on:
|
depends_on:
|
||||||
|
init-agent:
|
||||||
|
condition: service_completed_successfully
|
||||||
init-packs:
|
init-packs:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
init-user:
|
init-user:
|
||||||
@@ -359,7 +384,7 @@ services:
|
|||||||
rabbitmq:
|
rabbitmq:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "pgrep -f attune-worker || exit 1"]
|
test: ["CMD-SHELL", "pgrep -f attune-agent || exit 1"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
@@ -368,20 +393,15 @@ services:
|
|||||||
- attune-network
|
- attune-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# Node worker - Shell + Node.js runtime
|
# Node worker - official Node.js image with agent auto-detection
|
||||||
worker-node:
|
worker-node:
|
||||||
build:
|
image: node:22-slim
|
||||||
context: .
|
|
||||||
dockerfile: docker/Dockerfile.worker.optimized
|
|
||||||
target: worker-node
|
|
||||||
args:
|
|
||||||
BUILDKIT_INLINE_CACHE: 1
|
|
||||||
container_name: attune-worker-node
|
container_name: attune-worker-node
|
||||||
|
entrypoint: ["/opt/attune/agent/attune-agent"]
|
||||||
stop_grace_period: 45s
|
stop_grace_period: 45s
|
||||||
environment:
|
environment:
|
||||||
RUST_LOG: info
|
RUST_LOG: info
|
||||||
ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
||||||
ATTUNE_WORKER_RUNTIMES: shell,node
|
|
||||||
ATTUNE_WORKER_TYPE: container
|
ATTUNE_WORKER_TYPE: container
|
||||||
ATTUNE_WORKER_NAME: worker-node-01
|
ATTUNE_WORKER_NAME: worker-node-01
|
||||||
ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
||||||
@@ -390,6 +410,7 @@ services:
|
|||||||
ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
||||||
ATTUNE_API_URL: http://attune-api:8080
|
ATTUNE_API_URL: http://attune-api:8080
|
||||||
volumes:
|
volumes:
|
||||||
|
- agent_bin:/opt/attune/agent:ro
|
||||||
- ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
- ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
||||||
- packs_data:/opt/attune/packs:ro
|
- packs_data:/opt/attune/packs:ro
|
||||||
- ./packs.dev:/opt/attune/packs.dev:rw
|
- ./packs.dev:/opt/attune/packs.dev:rw
|
||||||
@@ -397,6 +418,8 @@ services:
|
|||||||
- artifacts_data:/opt/attune/artifacts
|
- artifacts_data:/opt/attune/artifacts
|
||||||
- worker_node_logs:/opt/attune/logs
|
- worker_node_logs:/opt/attune/logs
|
||||||
depends_on:
|
depends_on:
|
||||||
|
init-agent:
|
||||||
|
condition: service_completed_successfully
|
||||||
init-packs:
|
init-packs:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
init-user:
|
init-user:
|
||||||
@@ -408,7 +431,7 @@ services:
|
|||||||
rabbitmq:
|
rabbitmq:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "pgrep -f attune-worker || exit 1"]
|
test: ["CMD-SHELL", "pgrep -f attune-agent || exit 1"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
@@ -417,19 +440,17 @@ services:
|
|||||||
- attune-network
|
- attune-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# Full worker - All runtimes (shell, python, node, native)
|
# Full worker - Python + Node image with manual native capability override
|
||||||
worker-full:
|
worker-full:
|
||||||
build:
|
image: nikolaik/python-nodejs:python3.12-nodejs22-slim
|
||||||
context: .
|
|
||||||
dockerfile: docker/Dockerfile.worker.optimized
|
|
||||||
target: worker-full
|
|
||||||
args:
|
|
||||||
BUILDKIT_INLINE_CACHE: 1
|
|
||||||
container_name: attune-worker-full
|
container_name: attune-worker-full
|
||||||
|
entrypoint: ["/opt/attune/agent/attune-agent"]
|
||||||
stop_grace_period: 45s
|
stop_grace_period: 45s
|
||||||
environment:
|
environment:
|
||||||
RUST_LOG: info
|
RUST_LOG: info
|
||||||
ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
||||||
|
# Keep native support enabled explicitly; the agent auto-detects interpreters
|
||||||
|
# but "native" is a capability flag rather than a discoverable interpreter.
|
||||||
ATTUNE_WORKER_RUNTIMES: shell,python,node,native
|
ATTUNE_WORKER_RUNTIMES: shell,python,node,native
|
||||||
ATTUNE_WORKER_TYPE: container
|
ATTUNE_WORKER_TYPE: container
|
||||||
ATTUNE_WORKER_NAME: worker-full-01
|
ATTUNE_WORKER_NAME: worker-full-01
|
||||||
@@ -439,6 +460,7 @@ services:
|
|||||||
ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
ATTUNE__MESSAGE_QUEUE__URL: amqp://attune:attune@rabbitmq:5672
|
||||||
ATTUNE_API_URL: http://attune-api:8080
|
ATTUNE_API_URL: http://attune-api:8080
|
||||||
volumes:
|
volumes:
|
||||||
|
- agent_bin:/opt/attune/agent:ro
|
||||||
- ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
- ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
||||||
- packs_data:/opt/attune/packs:ro
|
- packs_data:/opt/attune/packs:ro
|
||||||
- ./packs.dev:/opt/attune/packs.dev:rw
|
- ./packs.dev:/opt/attune/packs.dev:rw
|
||||||
@@ -446,6 +468,8 @@ services:
|
|||||||
- artifacts_data:/opt/attune/artifacts
|
- artifacts_data:/opt/attune/artifacts
|
||||||
- worker_full_logs:/opt/attune/logs
|
- worker_full_logs:/opt/attune/logs
|
||||||
depends_on:
|
depends_on:
|
||||||
|
init-agent:
|
||||||
|
condition: service_completed_successfully
|
||||||
init-packs:
|
init-packs:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
init-user:
|
init-user:
|
||||||
@@ -457,7 +481,7 @@ services:
|
|||||||
rabbitmq:
|
rabbitmq:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "pgrep -f attune-worker || exit 1"]
|
test: ["CMD-SHELL", "pgrep -f attune-agent || exit 1"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
@@ -466,18 +490,18 @@ services:
|
|||||||
- attune-network
|
- attune-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
|
# Default sensor service now uses the injected sensor agent inside a stock runtime image.
|
||||||
sensor:
|
sensor:
|
||||||
build:
|
image: nikolaik/python-nodejs:python3.12-nodejs22-slim
|
||||||
context: .
|
|
||||||
dockerfile: docker/Dockerfile.sensor.optimized
|
|
||||||
target: sensor-full
|
|
||||||
args:
|
|
||||||
BUILDKIT_INLINE_CACHE: 1
|
|
||||||
container_name: attune-sensor
|
container_name: attune-sensor
|
||||||
|
entrypoint: ["/opt/attune/agent/attune-sensor-agent"]
|
||||||
stop_grace_period: 45s
|
stop_grace_period: 45s
|
||||||
environment:
|
environment:
|
||||||
RUST_LOG: debug
|
RUST_LOG: debug
|
||||||
ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
ATTUNE_CONFIG: /opt/attune/config/config.yaml
|
||||||
|
# Keep native support enabled explicitly; interpreter auto-detection does
|
||||||
|
# not infer the synthetic "native" capability.
|
||||||
|
ATTUNE_SENSOR_RUNTIMES: shell,python,node,native
|
||||||
ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
ATTUNE__SECURITY__JWT_SECRET: ${JWT_SECRET:-docker-dev-secret-change-in-production}
|
||||||
ATTUNE__SECURITY__ENCRYPTION_KEY: ${ENCRYPTION_KEY:-docker-dev-encryption-key-please-change-in-production-32plus}
|
ATTUNE__SECURITY__ENCRYPTION_KEY: ${ENCRYPTION_KEY:-docker-dev-encryption-key-please-change-in-production-32plus}
|
||||||
ATTUNE__DATABASE__URL: postgresql://attune:attune@postgres:5432/attune
|
ATTUNE__DATABASE__URL: postgresql://attune:attune@postgres:5432/attune
|
||||||
@@ -488,12 +512,15 @@ services:
|
|||||||
ATTUNE_MQ_URL: amqp://attune:attune@rabbitmq:5672
|
ATTUNE_MQ_URL: amqp://attune:attune@rabbitmq:5672
|
||||||
ATTUNE_PACKS_BASE_DIR: /opt/attune/packs
|
ATTUNE_PACKS_BASE_DIR: /opt/attune/packs
|
||||||
volumes:
|
volumes:
|
||||||
|
- agent_bin:/opt/attune/agent:ro
|
||||||
- ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
- ${ATTUNE_DOCKER_CONFIG_PATH:-./config.docker.yaml}:/opt/attune/config/config.yaml:ro
|
||||||
- packs_data:/opt/attune/packs:rw
|
- packs_data:/opt/attune/packs:rw
|
||||||
- ./packs.dev:/opt/attune/packs.dev:rw
|
- ./packs.dev:/opt/attune/packs.dev:rw
|
||||||
- runtime_envs:/opt/attune/runtime_envs
|
- runtime_envs:/opt/attune/runtime_envs
|
||||||
- sensor_logs:/opt/attune/logs
|
- sensor_logs:/opt/attune/logs
|
||||||
depends_on:
|
depends_on:
|
||||||
|
init-agent:
|
||||||
|
condition: service_completed_successfully
|
||||||
init-packs:
|
init-packs:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
init-user:
|
init-user:
|
||||||
@@ -621,6 +648,8 @@ volumes:
|
|||||||
driver: local
|
driver: local
|
||||||
artifacts_data:
|
artifacts_data:
|
||||||
driver: local
|
driver: local
|
||||||
|
agent_bin:
|
||||||
|
driver: local
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Networks
|
# Networks
|
||||||
|
|||||||
159
docker/Dockerfile.agent
Normal file
159
docker/Dockerfile.agent
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
# Multi-stage Dockerfile for the Attune injected agent binaries
|
||||||
|
#
|
||||||
|
# Builds statically-linked `attune-agent` and `attune-sensor-agent` binaries
|
||||||
|
# using musl, suitable for injection into arbitrary runtime containers.
|
||||||
|
#
|
||||||
|
# Stages:
|
||||||
|
# builder - Cross-compile with musl for a fully static binary
|
||||||
|
# agent-binary - Minimal scratch image containing just the binary
|
||||||
|
# agent-init - BusyBox-based image for use as a Kubernetes init container
|
||||||
|
# or Docker Compose volume-populating service (has `cp`)
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# # Build the minimal binary-only image:
|
||||||
|
# DOCKER_BUILDKIT=1 docker buildx build --target agent-binary -f docker/Dockerfile.agent -t attune-agent:binary .
|
||||||
|
#
|
||||||
|
# # Build the init container image (for volume population via `cp`):
|
||||||
|
# DOCKER_BUILDKIT=1 docker buildx build --target agent-init -f docker/Dockerfile.agent -t attune-agent:latest .
|
||||||
|
#
|
||||||
|
# # Use in docker-compose.yaml to populate a shared volume:
|
||||||
|
# # agent-init:
|
||||||
|
# # image: attune-agent:latest
|
||||||
|
# # command: ["cp", "/usr/local/bin/attune-agent", "/shared/attune-agent"]
|
||||||
|
# # volumes:
|
||||||
|
# # - agent_binary:/shared
|
||||||
|
#
|
||||||
|
# Note: `attune-agent` lives in the worker crate and `attune-sensor-agent`
|
||||||
|
# lives in the sensor crate.
|
||||||
|
|
||||||
|
ARG RUST_VERSION=1.92
|
||||||
|
ARG DEBIAN_VERSION=bookworm
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Stage 1: Builder - Cross-compile a statically-linked binary with musl
|
||||||
|
# ============================================================================
|
||||||
|
FROM rust:${RUST_VERSION}-${DEBIAN_VERSION} AS builder
|
||||||
|
|
||||||
|
# Install musl toolchain for static linking
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
musl-tools \
|
||||||
|
pkg-config \
|
||||||
|
libssl-dev \
|
||||||
|
ca-certificates \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Add the musl target for fully static binaries
|
||||||
|
RUN rustup target add x86_64-unknown-linux-musl
|
||||||
|
|
||||||
|
WORKDIR /build
|
||||||
|
|
||||||
|
# Increase rustc stack size to prevent SIGSEGV during release builds
|
||||||
|
ENV RUST_MIN_STACK=67108864
|
||||||
|
|
||||||
|
# Enable SQLx offline mode — compile-time query checking without a live database
|
||||||
|
ENV SQLX_OFFLINE=true
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Dependency caching layer
|
||||||
|
# Copy only Cargo metadata first so `cargo fetch` is cached when only source
|
||||||
|
# code changes. This follows the same selective-copy optimization pattern as
|
||||||
|
# the other active Dockerfiles in this directory.
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
COPY Cargo.toml Cargo.lock ./
|
||||||
|
COPY crates/common/Cargo.toml ./crates/common/Cargo.toml
|
||||||
|
COPY crates/api/Cargo.toml ./crates/api/Cargo.toml
|
||||||
|
COPY crates/executor/Cargo.toml ./crates/executor/Cargo.toml
|
||||||
|
COPY crates/sensor/Cargo.toml ./crates/sensor/Cargo.toml
|
||||||
|
COPY crates/core-timer-sensor/Cargo.toml ./crates/core-timer-sensor/Cargo.toml
|
||||||
|
COPY crates/worker/Cargo.toml ./crates/worker/Cargo.toml
|
||||||
|
COPY crates/notifier/Cargo.toml ./crates/notifier/Cargo.toml
|
||||||
|
COPY crates/cli/Cargo.toml ./crates/cli/Cargo.toml
|
||||||
|
|
||||||
|
# Create minimal stub sources so cargo can resolve the workspace and fetch deps.
|
||||||
|
# These are ONLY used for `cargo fetch` — never compiled.
|
||||||
|
# NOTE: The worker crate has TWO binary targets and the sensor crate now has
|
||||||
|
# two binary targets as well, so we create stubs for all of them.
|
||||||
|
RUN mkdir -p crates/common/src && echo "" > crates/common/src/lib.rs && \
|
||||||
|
mkdir -p crates/api/src && echo "fn main(){}" > crates/api/src/main.rs && \
|
||||||
|
mkdir -p crates/executor/src && echo "fn main(){}" > crates/executor/src/main.rs && \
|
||||||
|
mkdir -p crates/executor/benches && echo "fn main(){}" > crates/executor/benches/context_clone.rs && \
|
||||||
|
mkdir -p crates/sensor/src && echo "fn main(){}" > crates/sensor/src/main.rs && \
|
||||||
|
echo "fn main(){}" > crates/sensor/src/agent_main.rs && \
|
||||||
|
mkdir -p crates/core-timer-sensor/src && echo "fn main(){}" > crates/core-timer-sensor/src/main.rs && \
|
||||||
|
mkdir -p crates/worker/src && echo "fn main(){}" > crates/worker/src/main.rs && \
|
||||||
|
echo "fn main(){}" > crates/worker/src/agent_main.rs && \
|
||||||
|
mkdir -p crates/notifier/src && echo "fn main(){}" > crates/notifier/src/main.rs && \
|
||||||
|
mkdir -p crates/cli/src && echo "fn main(){}" > crates/cli/src/main.rs
|
||||||
|
|
||||||
|
# Download all dependencies (cached unless Cargo.toml/Cargo.lock change)
|
||||||
|
# registry/git use sharing=shared — cargo handles concurrent reads safely
|
||||||
|
RUN --mount=type=cache,target=/usr/local/cargo/registry,sharing=shared \
|
||||||
|
--mount=type=cache,target=/usr/local/cargo/git,sharing=shared \
|
||||||
|
cargo fetch
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Build layer
|
||||||
|
# Copy real source code and compile only the agent binary with musl
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
COPY migrations/ ./migrations/
|
||||||
|
COPY crates/ ./crates/
|
||||||
|
|
||||||
|
# Build the injected agent binaries, statically linked with musl.
|
||||||
|
# Uses a dedicated cache ID (agent-target) so the musl target directory
|
||||||
|
# doesn't collide with the glibc target cache used by other Dockerfiles.
|
||||||
|
RUN --mount=type=cache,target=/usr/local/cargo/registry,sharing=shared \
|
||||||
|
--mount=type=cache,target=/usr/local/cargo/git,sharing=shared \
|
||||||
|
--mount=type=cache,id=agent-target,target=/build/target,sharing=locked \
|
||||||
|
cargo build --release --target x86_64-unknown-linux-musl --bin attune-agent --bin attune-sensor-agent && \
|
||||||
|
cp /build/target/x86_64-unknown-linux-musl/release/attune-agent /build/attune-agent && \
|
||||||
|
cp /build/target/x86_64-unknown-linux-musl/release/attune-sensor-agent /build/attune-sensor-agent
|
||||||
|
|
||||||
|
# Strip the binaries to minimize size
|
||||||
|
RUN strip /build/attune-agent && strip /build/attune-sensor-agent
|
||||||
|
|
||||||
|
# Verify the binaries are statically linked and functional
|
||||||
|
RUN ls -lh /build/attune-agent /build/attune-sensor-agent && \
|
||||||
|
file /build/attune-agent && \
|
||||||
|
file /build/attune-sensor-agent && \
|
||||||
|
ldd /build/attune-agent 2>&1 || true && \
|
||||||
|
ldd /build/attune-sensor-agent 2>&1 || true
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Stage 2: agent-binary - Minimal image with just the static binary
|
||||||
|
# ============================================================================
|
||||||
|
# This is the smallest possible image — a single static binary on scratch.
|
||||||
|
# Useful when you only need to extract the binary (e.g., via COPY --from).
|
||||||
|
FROM scratch AS agent-binary
|
||||||
|
|
||||||
|
COPY --from=builder /build/attune-agent /usr/local/bin/attune-agent
|
||||||
|
COPY --from=builder /build/attune-sensor-agent /usr/local/bin/attune-sensor-agent
|
||||||
|
|
||||||
|
ENTRYPOINT ["/usr/local/bin/attune-agent"]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Stage 3: agent-init - Init container for volume population
|
||||||
|
# ============================================================================
|
||||||
|
# Uses busybox so we have `cp`, `sh`, etc. for use as a Docker Compose or
|
||||||
|
# Kubernetes init container that copies the agent binary into a shared volume.
|
||||||
|
#
|
||||||
|
# Example docker-compose.yaml usage:
|
||||||
|
# agent-init:
|
||||||
|
# image: attune-agent:latest
|
||||||
|
# command: ["cp", "/usr/local/bin/attune-agent", "/shared/attune-agent"]
|
||||||
|
# volumes:
|
||||||
|
# - agent_binary:/shared
|
||||||
|
#
|
||||||
|
# my-worker-container:
|
||||||
|
# image: python:3.12
|
||||||
|
# command: ["/agent/attune-agent"]
|
||||||
|
# volumes:
|
||||||
|
# - agent_binary:/agent:ro
|
||||||
|
# depends_on:
|
||||||
|
# agent-init:
|
||||||
|
# condition: service_completed_successfully
|
||||||
|
FROM busybox:1.36 AS agent-init
|
||||||
|
|
||||||
|
COPY --from=builder /build/attune-agent /usr/local/bin/attune-agent
|
||||||
|
COPY --from=builder /build/attune-sensor-agent /usr/local/bin/attune-sensor-agent
|
||||||
|
|
||||||
|
ENTRYPOINT ["/usr/local/bin/attune-agent"]
|
||||||
6
docker/Dockerfile.agent-package
Normal file
6
docker/Dockerfile.agent-package
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
FROM busybox:1.36
|
||||||
|
|
||||||
|
COPY dist/attune-agent /usr/local/bin/attune-agent
|
||||||
|
COPY dist/attune-sensor-agent /usr/local/bin/attune-sensor-agent
|
||||||
|
|
||||||
|
ENTRYPOINT ["/usr/local/bin/attune-agent"]
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
FROM python:3.11-slim
|
|
||||||
|
|
||||||
COPY packs /source/packs
|
|
||||||
COPY scripts/load_core_pack.py /scripts/load_core_pack.py
|
|
||||||
COPY docker/init-packs.sh /init-packs.sh
|
|
||||||
|
|
||||||
RUN pip install --no-cache-dir psycopg2-binary pyyaml && \
|
|
||||||
chmod +x /init-packs.sh
|
|
||||||
|
|
||||||
CMD ["/bin/sh", "/init-packs.sh"]
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
FROM postgres:16-alpine
|
|
||||||
|
|
||||||
COPY docker/init-user.sh /init-user.sh
|
|
||||||
|
|
||||||
RUN chmod +x /init-user.sh
|
|
||||||
|
|
||||||
CMD ["/bin/sh", "/init-user.sh"]
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
FROM postgres:16-alpine
|
|
||||||
|
|
||||||
COPY migrations /migrations
|
|
||||||
COPY docker/run-migrations.sh /run-migrations.sh
|
|
||||||
COPY docker/init-roles.sql /docker/init-roles.sql
|
|
||||||
|
|
||||||
RUN chmod +x /run-migrations.sh
|
|
||||||
|
|
||||||
CMD ["/bin/sh", "/run-migrations.sh"]
|
|
||||||
@@ -51,6 +51,7 @@ RUN mkdir -p crates/common/src && echo "" > crates/common/src/lib.rs && \
|
|||||||
mkdir -p crates/sensor/src && echo "fn main(){}" > crates/sensor/src/main.rs && \
|
mkdir -p crates/sensor/src && echo "fn main(){}" > crates/sensor/src/main.rs && \
|
||||||
mkdir -p crates/core-timer-sensor/src && echo "fn main(){}" > crates/core-timer-sensor/src/main.rs && \
|
mkdir -p crates/core-timer-sensor/src && echo "fn main(){}" > crates/core-timer-sensor/src/main.rs && \
|
||||||
mkdir -p crates/worker/src && echo "fn main(){}" > crates/worker/src/main.rs && \
|
mkdir -p crates/worker/src && echo "fn main(){}" > crates/worker/src/main.rs && \
|
||||||
|
echo "fn main(){}" > crates/worker/src/agent_main.rs && \
|
||||||
mkdir -p crates/notifier/src && echo "fn main(){}" > crates/notifier/src/main.rs && \
|
mkdir -p crates/notifier/src && echo "fn main(){}" > crates/notifier/src/main.rs && \
|
||||||
mkdir -p crates/cli/src && echo "fn main(){}" > crates/cli/src/main.rs
|
mkdir -p crates/cli/src && echo "fn main(){}" > crates/cli/src/main.rs
|
||||||
|
|
||||||
|
|||||||
@@ -51,6 +51,7 @@ RUN mkdir -p crates/executor/src && echo "fn main() {}" > crates/executor/src/ma
|
|||||||
RUN mkdir -p crates/executor/benches && echo "fn main() {}" > crates/executor/benches/context_clone.rs
|
RUN mkdir -p crates/executor/benches && echo "fn main() {}" > crates/executor/benches/context_clone.rs
|
||||||
RUN mkdir -p crates/sensor/src && echo "fn main() {}" > crates/sensor/src/main.rs
|
RUN mkdir -p crates/sensor/src && echo "fn main() {}" > crates/sensor/src/main.rs
|
||||||
RUN mkdir -p crates/worker/src && echo "fn main() {}" > crates/worker/src/main.rs
|
RUN mkdir -p crates/worker/src && echo "fn main() {}" > crates/worker/src/main.rs
|
||||||
|
RUN echo "fn main() {}" > crates/worker/src/agent_main.rs
|
||||||
RUN mkdir -p crates/notifier/src && echo "fn main() {}" > crates/notifier/src/main.rs
|
RUN mkdir -p crates/notifier/src && echo "fn main() {}" > crates/notifier/src/main.rs
|
||||||
RUN mkdir -p crates/cli/src && echo "fn main() {}" > crates/cli/src/main.rs
|
RUN mkdir -p crates/cli/src && echo "fn main() {}" > crates/cli/src/main.rs
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user