publishing with intentional architecture
Some checks failed
Publish Images / Resolve Publish Metadata (push) Successful in 18s
Publish Images / Publish web (arm64) (push) Successful in 7m16s
CI / Rustfmt (push) Has been cancelled
CI / Clippy (push) Has been cancelled
CI / Security Advisory Checks (push) Has been cancelled
CI / Tests (push) Has been cancelled
CI / Cargo Audit & Deny (push) Has been cancelled
CI / Web Blocking Checks (push) Has been cancelled
CI / Security Blocking Checks (push) Has been cancelled
CI / Web Advisory Checks (push) Has been cancelled
Publish Images / Publish agent (amd64) (push) Has been cancelled
Publish Images / Publish api (amd64) (push) Has been cancelled
Publish Images / Publish executor (amd64) (push) Has been cancelled
Publish Images / Publish notifier (amd64) (push) Has been cancelled
Publish Images / Publish agent (arm64) (push) Has been cancelled
Publish Images / Publish api (arm64) (push) Has been cancelled
Publish Images / Publish executor (arm64) (push) Has been cancelled
Publish Images / Publish notifier (arm64) (push) Has been cancelled
Publish Images / Publish web (amd64) (push) Has been cancelled
Publish Images / Build Rust Bundles (amd64) (push) Has started running
Publish Images / Publish manifest attune-agent (push) Has been cancelled
Publish Images / Publish manifest attune-api (push) Has been cancelled
Publish Images / Publish manifest attune-executor (push) Has been cancelled
Publish Images / Publish manifest attune-notifier (push) Has been cancelled
Publish Images / Build Rust Bundles (arm64) (push) Has been cancelled
Publish Images / Publish manifest attune-web (push) Has been cancelled

This commit is contained in:
2026-03-25 01:10:10 -05:00
parent 2ebb03b868
commit 62307e8c65
8 changed files with 874 additions and 185 deletions

View File

@@ -1,7 +1,27 @@
name: Publish Images And Chart name: Publish Images
on: on:
workflow_dispatch: workflow_dispatch:
inputs:
target_arch:
description: Architecture to publish
type: choice
options:
- all
- amd64
- arm64
default: all
target_image:
description: Image to publish
type: choice
options:
- all
- api
- executor
- notifier
- agent
- web
default: all
push: push:
branches: branches:
- main - main
@@ -13,21 +33,26 @@ env:
REGISTRY_HOST: ${{ vars.CLUSTER_GITEA_HOST }} REGISTRY_HOST: ${{ vars.CLUSTER_GITEA_HOST }}
REGISTRY_NAMESPACE: ${{ vars.CONTAINER_REGISTRY_NAMESPACE }} REGISTRY_NAMESPACE: ${{ vars.CONTAINER_REGISTRY_NAMESPACE }}
REGISTRY_PLAIN_HTTP: ${{ vars.CONTAINER_REGISTRY_INSECURE }} REGISTRY_PLAIN_HTTP: ${{ vars.CONTAINER_REGISTRY_INSECURE }}
CHART_NAME: attune ARTIFACT_REPOSITORY: attune-build-artifacts
CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
RUSTUP_MAX_RETRIES: 10
RUST_MIN_STACK: 67108864
SQLX_OFFLINE: true
RUNNER_TOOL_CACHE: /toolcache
jobs: jobs:
metadata: metadata:
name: Resolve Publish Metadata name: Resolve Publish Metadata
runs-on: ubuntu-latest runs-on: build-amd64
outputs: outputs:
registry: ${{ steps.meta.outputs.registry }} registry: ${{ steps.meta.outputs.registry }}
namespace: ${{ steps.meta.outputs.namespace }} namespace: ${{ steps.meta.outputs.namespace }}
registry_plain_http: ${{ steps.meta.outputs.registry_plain_http }} registry_plain_http: ${{ steps.meta.outputs.registry_plain_http }}
image_tag: ${{ steps.meta.outputs.image_tag }} image_tag: ${{ steps.meta.outputs.image_tag }}
image_tags: ${{ steps.meta.outputs.image_tags }} image_tags: ${{ steps.meta.outputs.image_tags }}
chart_version: ${{ steps.meta.outputs.chart_version }} artifact_ref_base: ${{ steps.meta.outputs.artifact_ref_base }}
app_version: ${{ steps.meta.outputs.app_version }}
release_channel: ${{ steps.meta.outputs.release_channel }}
steps: steps:
- name: Resolve tags and registry paths - name: Resolve tags and registry paths
id: meta id: meta
@@ -78,97 +103,400 @@ jobs:
if [ "$ref_type" = "tag" ] && printf '%s' "$ref_name" | grep -Eq '^v[0-9]+\.[0-9]+\.[0-9]+([-.].*)?$'; then if [ "$ref_type" = "tag" ] && printf '%s' "$ref_name" | grep -Eq '^v[0-9]+\.[0-9]+\.[0-9]+([-.].*)?$'; then
version="${ref_name#v}" version="${ref_name#v}"
image_tags="${version},latest,sha-${short_sha}" image_tags="${version},latest,sha-${short_sha}"
chart_version="$version"
release_channel="release"
else else
version="sha-${short_sha}" version="sha-${short_sha}"
image_tags="edge,sha-${short_sha}" image_tags="edge,sha-${short_sha}"
chart_version="0.0.0-dev.${{ github.run_number }}"
release_channel="edge"
fi fi
artifact_ref_base="${registry}/${namespace}/${ARTIFACT_REPOSITORY}"
{ {
echo "registry=$registry" echo "registry=$registry"
echo "namespace=$namespace" echo "namespace=$namespace"
echo "registry_plain_http=$registry_plain_http" echo "registry_plain_http=$registry_plain_http"
echo "image_tag=$version" echo "image_tag=$version"
echo "image_tags=$image_tags" echo "image_tags=$image_tags"
echo "chart_version=$chart_version" echo "artifact_ref_base=$artifact_ref_base"
echo "app_version=$version"
echo "release_channel=$release_channel"
} >> "$GITHUB_OUTPUT" } >> "$GITHUB_OUTPUT"
publish-images: build-rust-bundles:
name: Publish ${{ matrix.image.name }} name: Build Rust Bundles (${{ matrix.arch }})
runs-on: ubuntu-latest runs-on: ${{ matrix.runner_label }}
needs: metadata needs: metadata
if: |
github.event_name != 'workflow_dispatch' ||
inputs.target_arch == 'all' ||
inputs.target_arch == matrix.arch
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include:
- arch: amd64
runner_label: build-amd64
musl_target: x86_64-unknown-linux-musl
- arch: arm64
runner_label: build-arm64
musl_target: aarch64-unknown-linux-musl
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Cache Rust toolchain
uses: actions/cache@v4
with:
path: |
~/.rustup/toolchains
~/.rustup/update-hashes
key: rustup-publish-${{ runner.os }}-${{ matrix.arch }}-stable-v1
restore-keys: |
rustup-${{ runner.os }}-${{ matrix.arch }}-stable-v1
rustup-${{ runner.os }}-stable-v1
rustup-
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.musl_target }}
- name: Cache Cargo registry + index
uses: actions/cache@v4
with:
path: |
~/.cargo/registry/index
~/.cargo/registry/cache
~/.cargo/git/db
key: cargo-registry-publish-${{ matrix.arch }}-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
cargo-registry-publish-${{ matrix.arch }}-
cargo-registry-
- name: Cache Cargo build artifacts
uses: actions/cache@v4
with:
path: target
key: cargo-publish-${{ matrix.arch }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('**/*.rs', '**/Cargo.toml') }}
restore-keys: |
cargo-publish-${{ matrix.arch }}-${{ hashFiles('**/Cargo.lock') }}-
cargo-publish-${{ matrix.arch }}-
- name: Install native build dependencies
shell: bash
run: |
set -euo pipefail
apt-get update
apt-get install -y pkg-config libssl-dev musl-tools file
- name: Build release binaries
shell: bash
run: |
set -euo pipefail
cargo build --release \
--bin attune-api \
--bin attune-executor \
--bin attune-notifier
- name: Build static agent binaries
shell: bash
run: |
set -euo pipefail
cargo build --release \
--target "${{ matrix.musl_target }}" \
--bin attune-agent \
--bin attune-sensor-agent
- name: Assemble binary bundle
shell: bash
run: |
set -euo pipefail
bundle_root="dist/bundle/${{ matrix.arch }}"
mkdir -p "$bundle_root/bin" "$bundle_root/agent"
cp target/release/attune-api "$bundle_root/bin/"
cp target/release/attune-executor "$bundle_root/bin/"
cp target/release/attune-notifier "$bundle_root/bin/"
cp target/${{ matrix.musl_target }}/release/attune-agent "$bundle_root/agent/"
cp target/${{ matrix.musl_target }}/release/attune-sensor-agent "$bundle_root/agent/"
cat > "$bundle_root/metadata.json" <<EOF
{
"git_sha": "${{ github.sha }}",
"ref": "${{ github.ref }}",
"arch": "${{ matrix.arch }}",
"image_tag": "${{ needs.metadata.outputs.image_tag }}"
}
EOF
tar -C dist/bundle/${{ matrix.arch }} -czf "dist/attune-binaries-${{ matrix.arch }}.tar.gz" .
- name: Setup ORAS
uses: oras-project/setup-oras@v1
- name: Log in to registry for artifacts
shell: bash
env:
REGISTRY_USERNAME: ${{ secrets.CONTAINER_REGISTRY_USERNAME }}
REGISTRY_PASSWORD: ${{ secrets.CONTAINER_REGISTRY_PASSWORD }}
GITHUB_TOKEN_FALLBACK: ${{ secrets.GITHUB_TOKEN }}
run: |
set -euo pipefail
registry_username="${REGISTRY_USERNAME:-${{ github.actor }}}"
registry_password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
login_args=()
if [ -z "$registry_password" ]; then
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
exit 1
fi
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
login_args+=(--plain-http)
fi
oras login "${{ needs.metadata.outputs.registry }}" \
"${login_args[@]}" \
--username "$registry_username" \
--password "$registry_password"
- name: Push binary bundle artifact
shell: bash
run: |
set -euo pipefail
push_args=()
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
push_args+=(--plain-http)
fi
oras push \
"${push_args[@]}" \
"${{ needs.metadata.outputs.artifact_ref_base }}:rust-binaries-${{ needs.metadata.outputs.image_tag }}-${{ matrix.arch }}" \
--artifact-type application/vnd.attune.rust-binaries.v1 \
"dist/attune-binaries-${{ matrix.arch }}.tar.gz:application/vnd.attune.rust-binaries.layer.v1.tar+gzip"
publish-rust-images:
name: Publish ${{ matrix.image.name }} (${{ matrix.arch }})
runs-on: ${{ matrix.runner_label }}
needs:
- metadata
- build-rust-bundles
if: |
(github.event_name != 'workflow_dispatch' ||
inputs.target_arch == 'all' ||
inputs.target_arch == matrix.arch) &&
(github.event_name != 'workflow_dispatch' ||
inputs.target_image == 'all' ||
inputs.target_image == matrix.image.name)
strategy:
fail-fast: false
matrix:
include:
- arch: amd64
runner_label: build-amd64
platform: linux/amd64
image: image:
- name: api name: api
repository: attune-api repository: attune-api
dockerfile: docker/Dockerfile.optimized source_path: bin/attune-api
context: . dockerfile: docker/Dockerfile.runtime
target: "" - arch: amd64
build_args: | runner_label: build-amd64
SERVICE=api platform: linux/amd64
- name: executor image:
name: executor
repository: attune-executor repository: attune-executor
dockerfile: docker/Dockerfile.optimized source_path: bin/attune-executor
context: . dockerfile: docker/Dockerfile.runtime
target: "" - arch: amd64
build_args: | runner_label: build-amd64
SERVICE=executor platform: linux/amd64
- name: notifier image:
name: notifier
repository: attune-notifier repository: attune-notifier
dockerfile: docker/Dockerfile.optimized source_path: bin/attune-notifier
context: . dockerfile: docker/Dockerfile.runtime
target: "" - arch: amd64
build_args: | runner_label: build-amd64
SERVICE=notifier platform: linux/amd64
- name: sensor image:
repository: attune-sensor name: agent
dockerfile: docker/Dockerfile.sensor.optimized
context: .
target: sensor-full
build_args: ""
- name: worker
repository: attune-worker
dockerfile: docker/Dockerfile.worker.optimized
context: .
target: worker-full
build_args: ""
- name: web
repository: attune-web
dockerfile: docker/Dockerfile.web
context: .
target: ""
build_args: ""
- name: migrations
repository: attune-migrations
dockerfile: docker/Dockerfile.migrations
context: .
target: ""
build_args: ""
- name: init-user
repository: attune-init-user
dockerfile: docker/Dockerfile.init-user
context: .
target: ""
build_args: ""
- name: init-packs
repository: attune-init-packs
dockerfile: docker/Dockerfile.init-packs
context: .
target: ""
build_args: ""
- name: agent
repository: attune-agent repository: attune-agent
dockerfile: docker/Dockerfile.agent source_path: agent/attune-agent
context: . dockerfile: docker/Dockerfile.agent-package
target: agent-init - arch: arm64
build_args: "" runner_label: build-arm64
platform: linux/arm64
image:
name: api
repository: attune-api
source_path: bin/attune-api
dockerfile: docker/Dockerfile.runtime
- arch: arm64
runner_label: build-arm64
platform: linux/arm64
image:
name: executor
repository: attune-executor
source_path: bin/attune-executor
dockerfile: docker/Dockerfile.runtime
- arch: arm64
runner_label: build-arm64
platform: linux/arm64
image:
name: notifier
repository: attune-notifier
source_path: bin/attune-notifier
dockerfile: docker/Dockerfile.runtime
- arch: arm64
runner_label: build-arm64
platform: linux/arm64
image:
name: agent
repository: attune-agent
source_path: agent/attune-agent
dockerfile: docker/Dockerfile.agent-package
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup ORAS
uses: oras-project/setup-oras@v1
- name: Setup Docker Buildx
if: needs.metadata.outputs.registry_plain_http != 'true'
uses: docker/setup-buildx-action@v3
- name: Setup Docker Buildx For Plain HTTP Registry
if: needs.metadata.outputs.registry_plain_http == 'true'
uses: docker/setup-buildx-action@v3
with:
buildkitd-config-inline: |
[registry."${{ needs.metadata.outputs.registry }}"]
http = true
insecure = true
- name: Log in to registry
shell: bash
env:
REGISTRY_USERNAME: ${{ secrets.CONTAINER_REGISTRY_USERNAME }}
REGISTRY_PASSWORD: ${{ secrets.CONTAINER_REGISTRY_PASSWORD }}
GITHUB_TOKEN_FALLBACK: ${{ secrets.GITHUB_TOKEN }}
run: |
set -euo pipefail
registry_username="${REGISTRY_USERNAME:-${{ github.actor }}}"
registry_password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
if [ -z "$registry_password" ]; then
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
exit 1
fi
mkdir -p "$HOME/.docker"
auth="$(printf '%s:%s' "$registry_username" "$registry_password" | base64 | tr -d '\n')"
cat > "$HOME/.docker/config.json" <<EOF
{
"auths": {
"${{ needs.metadata.outputs.registry }}": {
"auth": "${auth}"
}
}
}
EOF
oras_login_args=()
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
oras_login_args+=(--plain-http)
fi
oras login "${{ needs.metadata.outputs.registry }}" \
"${oras_login_args[@]}" \
--username "$registry_username" \
--password "$registry_password"
- name: Pull binary bundle
shell: bash
run: |
set -euo pipefail
pull_args=()
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
pull_args+=(--plain-http)
fi
mkdir -p dist/artifact
cd dist/artifact
oras pull \
"${pull_args[@]}" \
"${{ needs.metadata.outputs.artifact_ref_base }}:rust-binaries-${{ needs.metadata.outputs.image_tag }}-${{ matrix.arch }}"
tar -xzf "attune-binaries-${{ matrix.arch }}.tar.gz"
- name: Prepare packaging context
shell: bash
run: |
set -euo pipefail
rm -rf dist/image
mkdir -p dist/image
case "${{ matrix.image.name }}" in
api|executor|notifier)
cp "dist/artifact/${{ matrix.image.source_path }}" dist/attune-service-binary
;;
agent)
cp dist/artifact/agent/attune-agent dist/attune-agent
cp dist/artifact/agent/attune-sensor-agent dist/attune-sensor-agent
;;
*)
echo "Unsupported image: ${{ matrix.image.name }}"
exit 1
;;
esac
- name: Push architecture image
shell: bash
run: |
set -euo pipefail
image_ref="${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/${{ matrix.image.repository }}:${{ needs.metadata.outputs.image_tag }}-${{ matrix.arch }}"
build_cmd=(
docker buildx build
.
--platform "${{ matrix.platform }}"
--file "${{ matrix.image.dockerfile }}"
)
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
build_cmd+=(--output "type=image,\"name=${image_ref}\",push=true,registry.insecure=true")
else
build_cmd+=(--tag "$image_ref" --push)
fi
"${build_cmd[@]}"
publish-web-images:
name: Publish web (${{ matrix.arch }})
runs-on: ${{ matrix.runner_label }}
needs: metadata
if: |
(github.event_name != 'workflow_dispatch' ||
inputs.target_arch == 'all' ||
inputs.target_arch == matrix.arch) &&
(github.event_name != 'workflow_dispatch' ||
inputs.target_image == 'all' ||
inputs.target_image == 'web')
strategy:
fail-fast: false
matrix:
include:
- arch: amd64
runner_label: build-amd64
platform: linux/amd64
- arch: arm64
runner_label: build-arm64
platform: linux/arm64
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
@@ -196,7 +524,6 @@ jobs:
set -euo pipefail set -euo pipefail
username="${REGISTRY_USERNAME:-${{ github.actor }}}" username="${REGISTRY_USERNAME:-${{ github.actor }}}"
password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}" password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
registry="${{ needs.metadata.outputs.registry }}"
if [ -z "$password" ]; then if [ -z "$password" ]; then
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes" echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
@@ -209,81 +536,56 @@ jobs:
cat > "$HOME/.docker/config.json" <<EOF cat > "$HOME/.docker/config.json" <<EOF
{ {
"auths": { "auths": {
"${registry}": { "${{ needs.metadata.outputs.registry }}": {
"auth": "${auth}" "auth": "${auth}"
} }
} }
} }
EOF EOF
- name: Prepare image tags - name: Push architecture image
id: tags
shell: bash shell: bash
run: | run: |
set -euo pipefail set -euo pipefail
image_ref_base="${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/${{ matrix.image.repository }}"
tag_lines=""
IFS=',' read -ra tags <<< "${{ needs.metadata.outputs.image_tags }}"
for tag in "${tags[@]}"; do
tag_lines="${tag_lines}${image_ref_base}:${tag}"$'\n'
done
printf 'tags<<EOF\n%sEOF\n' "$tag_lines" >> "$GITHUB_OUTPUT"
- name: Build and push image image_ref="${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/attune-web:${{ needs.metadata.outputs.image_tag }}-${{ matrix.arch }}"
shell: bash
run: |
set -euo pipefail
image_names_csv=""
build_cmd=( build_cmd=(
docker buildx build docker buildx build
"${{ matrix.image.context }}" .
--file "${{ matrix.image.dockerfile }}" --platform "${{ matrix.platform }}"
--file docker/Dockerfile.web
) )
if [ -n "${{ matrix.image.target }}" ]; then
build_cmd+=(--target "${{ matrix.image.target }}")
fi
while IFS= read -r tag; do
if [ -n "$tag" ]; then
if [ -n "$image_names_csv" ]; then
image_names_csv="${image_names_csv},${tag}"
else
image_names_csv="${tag}"
fi
if [ "${{ needs.metadata.outputs.registry_plain_http }}" != "true" ]; then
build_cmd+=(--tag "$tag")
fi
fi
done <<< "${{ steps.tags.outputs.tags }}"
while IFS= read -r build_arg; do
[ -n "$build_arg" ] && build_cmd+=(--build-arg "$build_arg")
done <<< "${{ matrix.image.build_args }}"
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
build_cmd+=(--output "type=image,\"name=${image_names_csv}\",push=true,registry.insecure=true") build_cmd+=(--output "type=image,\"name=${image_ref}\",push=true,registry.insecure=true")
else else
build_cmd+=(--push) build_cmd+=(--tag "$image_ref" --push)
fi fi
"${build_cmd[@]}" "${build_cmd[@]}"
publish-chart: publish-manifests:
name: Publish Helm Chart name: Publish manifest ${{ matrix.repository }}
runs-on: ubuntu-latest runs-on: build-amd64
needs: needs:
- metadata - metadata
- publish-images - publish-rust-images
- publish-web-images
if: |
github.event_name != 'workflow_dispatch' ||
(inputs.target_arch == 'all' && inputs.target_image == 'all')
strategy:
fail-fast: false
matrix:
repository:
- attune-api
- attune-executor
- attune-notifier
- attune-agent
- attune-web
steps: steps:
- name: Checkout - name: Configure OCI registry auth
uses: actions/checkout@v4
- name: Setup Helm
uses: azure/setup-helm@v4
- name: Log in to Gitea OCI registry
shell: bash shell: bash
env: env:
REGISTRY_USERNAME: ${{ secrets.CONTAINER_REGISTRY_USERNAME }} REGISTRY_USERNAME: ${{ secrets.CONTAINER_REGISTRY_USERNAME }}
@@ -291,43 +593,48 @@ jobs:
GITHUB_TOKEN_FALLBACK: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN_FALLBACK: ${{ secrets.GITHUB_TOKEN }}
run: | run: |
set -euo pipefail set -euo pipefail
registry_username="${REGISTRY_USERNAME:-${{ github.actor }}}" username="${REGISTRY_USERNAME:-${{ github.actor }}}"
registry_password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}" password="${REGISTRY_PASSWORD:-${GITHUB_TOKEN_FALLBACK:-}}"
login_args=()
if [ -z "$registry_password" ]; then if [ -z "$password" ]; then
echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes" echo "Set CONTAINER_REGISTRY_PASSWORD or enable GITHUB_TOKEN package writes"
exit 1 exit 1
fi fi
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then mkdir -p "$HOME/.docker"
login_args+=(--plain-http) auth="$(printf '%s:%s' "$username" "$password" | base64 | tr -d '\n')"
fi
printf '%s' "$registry_password" | helm registry login "${{ needs.metadata.outputs.registry }}" \ cat > "$HOME/.docker/config.json" <<EOF
--username "$registry_username" \ {
"${login_args[@]}" \ "auths": {
--password-stdin "${{ needs.metadata.outputs.registry }}": {
"auth": "${auth}"
}
}
}
EOF
- name: Lint chart - name: Publish manifest tags
shell: bash
run: | run: |
helm lint charts/attune set -euo pipefail
- name: Package chart image_base="${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/${{ matrix.repository }}"
run: |
mkdir -p dist
helm package charts/attune \
--destination dist \
--version "${{ needs.metadata.outputs.chart_version }}" \
--app-version "${{ needs.metadata.outputs.app_version }}"
- name: Push chart to OCI registry
run: |
push_args=() push_args=()
if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then if [ "${{ needs.metadata.outputs.registry_plain_http }}" = "true" ]; then
push_args+=(--plain-http) push_args+=(--insecure)
fi fi
helm push "dist/${CHART_NAME}-${{ needs.metadata.outputs.chart_version }}.tgz" \ IFS=',' read -ra tags <<< "${{ needs.metadata.outputs.image_tags }}"
"oci://${{ needs.metadata.outputs.registry }}/${{ needs.metadata.outputs.namespace }}/helm" \ for tag in "${tags[@]}"; do
"${push_args[@]}" manifest_ref="${image_base}:${tag}"
amd64_ref="${image_base}:${{ needs.metadata.outputs.image_tag }}-amd64"
arm64_ref="${image_base}:${{ needs.metadata.outputs.image_tag }}-arm64"
docker manifest rm "$manifest_ref" >/dev/null 2>&1 || true
docker manifest create "$manifest_ref" "$amd64_ref" "$arm64_ref"
docker manifest annotate "$manifest_ref" "$amd64_ref" --os linux --arch amd64
docker manifest annotate "$manifest_ref" "$arm64_ref" --os linux --arch arm64
docker manifest push "${push_args[@]}" "$manifest_ref"
done

View File

@@ -444,13 +444,55 @@ pub mod runtime {
/// Optional environment variables to set during action execution. /// Optional environment variables to set during action execution.
/// ///
/// Values support the same template variables as other fields: /// Entries support the same template variables as other fields:
/// `{pack_dir}`, `{env_dir}`, `{interpreter}`, `{manifest_path}`. /// `{pack_dir}`, `{env_dir}`, `{interpreter}`, `{manifest_path}`.
/// ///
/// Example: `{"NODE_PATH": "{env_dir}/node_modules"}` ensures Node.js /// The shorthand string form replaces the variable entirely:
/// can find packages installed in the isolated runtime environment. /// `{"NODE_PATH": "{env_dir}/node_modules"}`
///
/// The object form supports declarative merge semantics:
/// `{"PYTHONPATH": {"value": "{pack_dir}/lib", "operation": "prepend"}}`
#[serde(default)] #[serde(default)]
pub env_vars: HashMap<String, String>, pub env_vars: HashMap<String, RuntimeEnvVarConfig>,
}
/// Declarative configuration for a single runtime environment variable.
///
/// The string form is shorthand for `{ "value": "...", "operation": "set" }`.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(untagged)]
pub enum RuntimeEnvVarConfig {
Value(String),
Spec(RuntimeEnvVarSpec),
}
/// Full configuration for a runtime environment variable.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct RuntimeEnvVarSpec {
/// Template value to resolve for this variable.
pub value: String,
/// How the resolved value should be merged with any existing value.
#[serde(default)]
pub operation: RuntimeEnvVarOperation,
/// Separator used for prepend/append operations.
#[serde(default = "default_env_var_separator")]
pub separator: String,
}
/// Merge behavior for runtime-provided environment variables.
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default)]
#[serde(rename_all = "snake_case")]
pub enum RuntimeEnvVarOperation {
#[default]
Set,
Prepend,
Append,
}
fn default_env_var_separator() -> String {
":".to_string()
} }
/// Controls how inline code is materialized before execution. /// Controls how inline code is materialized before execution.
@@ -768,6 +810,43 @@ pub mod runtime {
} }
} }
impl RuntimeEnvVarConfig {
/// Resolve this environment variable against the current template
/// variables and any existing value already present in the process env.
pub fn resolve(
&self,
vars: &HashMap<&str, String>,
existing_value: Option<&str>,
) -> String {
match self {
Self::Value(value) => RuntimeExecutionConfig::resolve_template(value, vars),
Self::Spec(spec) => {
let resolved = RuntimeExecutionConfig::resolve_template(&spec.value, vars);
match spec.operation {
RuntimeEnvVarOperation::Set => resolved,
RuntimeEnvVarOperation::Prepend => {
join_env_var_values(&resolved, existing_value, &spec.separator)
}
RuntimeEnvVarOperation::Append => join_env_var_values(
existing_value.unwrap_or_default(),
Some(&resolved),
&spec.separator,
),
}
}
}
}
}
fn join_env_var_values(left: &str, right: Option<&str>, separator: &str) -> String {
match (left.is_empty(), right.unwrap_or_default().is_empty()) {
(true, true) => String::new(),
(false, true) => left.to_string(),
(true, false) => right.unwrap_or_default().to_string(),
(false, false) => format!("{}{}{}", left, separator, right.unwrap_or_default()),
}
}
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] #[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct Runtime { pub struct Runtime {
pub id: Id, pub id: Id,
@@ -1640,3 +1719,68 @@ pub mod entity_history {
} }
} }
} }
#[cfg(test)]
mod tests {
use super::runtime::{
RuntimeEnvVarConfig, RuntimeEnvVarOperation, RuntimeEnvVarSpec, RuntimeExecutionConfig,
};
use serde_json::json;
use std::collections::HashMap;
#[test]
fn runtime_execution_config_env_vars_accept_string_and_object_forms() {
let config: RuntimeExecutionConfig = serde_json::from_value(json!({
"env_vars": {
"NODE_PATH": "{env_dir}/node_modules",
"PYTHONPATH": {
"value": "{pack_dir}/lib",
"operation": "prepend",
"separator": ":"
}
}
}))
.expect("runtime execution config should deserialize");
assert!(matches!(
config.env_vars.get("NODE_PATH"),
Some(RuntimeEnvVarConfig::Value(value)) if value == "{env_dir}/node_modules"
));
assert!(matches!(
config.env_vars.get("PYTHONPATH"),
Some(RuntimeEnvVarConfig::Spec(RuntimeEnvVarSpec {
value,
operation: RuntimeEnvVarOperation::Prepend,
separator,
})) if value == "{pack_dir}/lib" && separator == ":"
));
}
#[test]
fn runtime_env_var_config_resolves_prepend_and_append_against_existing_values() {
let mut vars = HashMap::new();
vars.insert("pack_dir", "/packs/example".to_string());
vars.insert("env_dir", "/runtime_envs/example/python".to_string());
let prepend = RuntimeEnvVarConfig::Spec(RuntimeEnvVarSpec {
value: "{pack_dir}/lib".to_string(),
operation: RuntimeEnvVarOperation::Prepend,
separator: ":".to_string(),
});
assert_eq!(
prepend.resolve(&vars, Some("/already/set")),
"/packs/example/lib:/already/set"
);
let append = RuntimeEnvVarConfig::Spec(RuntimeEnvVarSpec {
value: "{env_dir}/node_modules".to_string(),
operation: RuntimeEnvVarOperation::Append,
separator: ":".to_string(),
});
assert_eq!(
append.resolve(&vars, Some("/base/modules")),
"/base/modules:/runtime_envs/example/python/node_modules"
);
}
}

View File

@@ -27,6 +27,37 @@ use tracing::{debug, error, info, warn};
use crate::api_client::ApiClient; use crate::api_client::ApiClient;
fn existing_command_env(cmd: &Command, key: &str) -> Option<String> {
cmd.as_std()
.get_envs()
.find_map(|(env_key, value)| {
if env_key == key {
value.map(|value| value.to_string_lossy().into_owned())
} else {
None
}
})
.or_else(|| std::env::var(key).ok())
}
fn apply_runtime_env_vars(
cmd: &mut Command,
exec_config: &RuntimeExecutionConfig,
pack_dir: &std::path::Path,
env_dir: Option<&std::path::Path>,
) {
if exec_config.env_vars.is_empty() {
return;
}
let vars = exec_config.build_template_vars_with_env(pack_dir, env_dir);
for (key, env_var_config) in &exec_config.env_vars {
let resolved = env_var_config.resolve(&vars, existing_command_env(cmd, key).as_deref());
debug!("Setting sensor runtime env var: {}={}", key, resolved);
cmd.env(key, resolved);
}
}
/// Sensor manager that coordinates all sensor instances /// Sensor manager that coordinates all sensor instances
#[derive(Clone)] #[derive(Clone)]
pub struct SensorManager { pub struct SensorManager {
@@ -502,20 +533,7 @@ impl SensorManager {
.env("ATTUNE_MQ_EXCHANGE", "attune.events") .env("ATTUNE_MQ_EXCHANGE", "attune.events")
.env("ATTUNE_LOG_LEVEL", "info"); .env("ATTUNE_LOG_LEVEL", "info");
if !exec_config.env_vars.is_empty() { apply_runtime_env_vars(&mut cmd, &exec_config, &pack_dir, env_dir_opt);
let vars = exec_config.build_template_vars_with_env(&pack_dir, env_dir_opt);
for (key, value_template) in &exec_config.env_vars {
let resolved = attune_common::models::RuntimeExecutionConfig::resolve_template(
value_template,
&vars,
);
debug!(
"Setting sensor runtime env var: {}={} (template: {})",
key, resolved, value_template
);
cmd.env(key, resolved);
}
}
let mut child = cmd let mut child = cmd
.stdin(Stdio::null()) .stdin(Stdio::null())
@@ -904,6 +922,10 @@ pub struct SensorStatus {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use attune_common::models::runtime::{
RuntimeEnvVarConfig, RuntimeEnvVarOperation, RuntimeEnvVarSpec,
};
use std::collections::HashMap;
#[test] #[test]
fn test_sensor_status_default() { fn test_sensor_status_default() {
@@ -913,4 +935,46 @@ mod tests {
assert_eq!(status.failure_count, 0); assert_eq!(status.failure_count, 0);
assert!(status.last_poll.is_none()); assert!(status.last_poll.is_none());
} }
#[test]
fn test_apply_runtime_env_vars_prepends_to_existing_command_env() {
let mut env_vars = HashMap::new();
env_vars.insert(
"PYTHONPATH".to_string(),
RuntimeEnvVarConfig::Spec(RuntimeEnvVarSpec {
value: "{pack_dir}/lib".to_string(),
operation: RuntimeEnvVarOperation::Prepend,
separator: ":".to_string(),
}),
);
let exec_config = RuntimeExecutionConfig {
env_vars,
..RuntimeExecutionConfig::default()
};
let mut cmd = Command::new("python3");
cmd.env("PYTHONPATH", "/existing/pythonpath");
apply_runtime_env_vars(
&mut cmd,
&exec_config,
std::path::Path::new("/packs/testpack"),
None,
);
let resolved = cmd
.as_std()
.get_envs()
.find_map(|(key, value)| {
if key == "PYTHONPATH" {
value.map(|value| value.to_string_lossy().into_owned())
} else {
None
}
})
.expect("PYTHONPATH should be set");
assert_eq!(resolved, "/packs/testpack/lib:/existing/pythonpath");
}
} }

View File

@@ -830,12 +830,9 @@ impl Runtime for ProcessRuntime {
// resolved against the current pack/env directories. // resolved against the current pack/env directories.
if !effective_config.env_vars.is_empty() { if !effective_config.env_vars.is_empty() {
let vars = effective_config.build_template_vars_with_env(&pack_dir, env_dir_opt); let vars = effective_config.build_template_vars_with_env(&pack_dir, env_dir_opt);
for (key, value_template) in &effective_config.env_vars { for (key, env_var_config) in &effective_config.env_vars {
let resolved = RuntimeExecutionConfig::resolve_template(value_template, &vars); let resolved = env_var_config.resolve(&vars, env.get(key).map(String::as_str));
debug!( debug!("Setting runtime env var: {}={}", key, resolved);
"Setting runtime env var: {}={} (template: {})",
key, resolved, value_template
);
env.insert(key.clone(), resolved); env.insert(key.clone(), resolved);
} }
} }
@@ -1062,7 +1059,8 @@ mod tests {
use super::*; use super::*;
use attune_common::models::runtime::{ use attune_common::models::runtime::{
DependencyConfig, EnvironmentConfig, InlineExecutionConfig, InlineExecutionStrategy, DependencyConfig, EnvironmentConfig, InlineExecutionConfig, InlineExecutionStrategy,
InterpreterConfig, RuntimeExecutionConfig, InterpreterConfig, RuntimeEnvVarConfig, RuntimeEnvVarOperation, RuntimeEnvVarSpec,
RuntimeExecutionConfig,
}; };
use attune_common::models::{OutputFormat, ParameterDelivery, ParameterFormat}; use attune_common::models::{OutputFormat, ParameterDelivery, ParameterFormat};
use std::collections::HashMap; use std::collections::HashMap;
@@ -1377,6 +1375,88 @@ mod tests {
assert!(result.stdout.contains("hello from python process runtime")); assert!(result.stdout.contains("hello from python process runtime"));
} }
#[tokio::test]
async fn test_execute_python_file_with_pack_lib_on_pythonpath() {
let temp_dir = TempDir::new().unwrap();
let packs_dir = temp_dir.path().join("packs");
let pack_dir = packs_dir.join("testpack");
let actions_dir = pack_dir.join("actions");
let lib_dir = pack_dir.join("lib");
std::fs::create_dir_all(&actions_dir).unwrap();
std::fs::create_dir_all(&lib_dir).unwrap();
std::fs::write(
lib_dir.join("helper.py"),
"def message():\n return 'hello from pack lib'\n",
)
.unwrap();
std::fs::write(
actions_dir.join("hello.py"),
"import helper\nimport os\nprint(helper.message())\nprint(os.environ['PYTHONPATH'])\n",
)
.unwrap();
let mut env_vars = HashMap::new();
env_vars.insert(
"PYTHONPATH".to_string(),
RuntimeEnvVarConfig::Spec(RuntimeEnvVarSpec {
value: "{pack_dir}/lib".to_string(),
operation: RuntimeEnvVarOperation::Prepend,
separator: ":".to_string(),
}),
);
let runtime = ProcessRuntime::new(
"python".to_string(),
RuntimeExecutionConfig {
interpreter: InterpreterConfig {
binary: "python3".to_string(),
args: vec![],
file_extension: Some(".py".to_string()),
},
inline_execution: InlineExecutionConfig::default(),
environment: None,
dependencies: None,
env_vars,
},
packs_dir,
temp_dir.path().join("runtime_envs"),
);
let mut env = HashMap::new();
env.insert("PYTHONPATH".to_string(), "/existing/pythonpath".to_string());
let context = ExecutionContext {
execution_id: 3,
action_ref: "testpack.hello".to_string(),
parameters: HashMap::new(),
env,
secrets: HashMap::new(),
timeout: Some(10),
working_dir: None,
entry_point: "hello.py".to_string(),
code: None,
code_path: Some(actions_dir.join("hello.py")),
runtime_name: Some("python".to_string()),
runtime_config_override: None,
runtime_env_dir_suffix: None,
selected_runtime_version: None,
max_stdout_bytes: 1024 * 1024,
max_stderr_bytes: 1024 * 1024,
parameter_delivery: ParameterDelivery::default(),
parameter_format: ParameterFormat::default(),
output_format: OutputFormat::default(),
cancel_token: None,
};
let result = runtime.execute(context).await.unwrap();
assert_eq!(result.exit_code, 0);
assert!(result.stdout.contains("hello from pack lib"));
assert!(result
.stdout
.contains(&format!("{}/lib:/existing/pythonpath", pack_dir.display())));
}
#[tokio::test] #[tokio::test]
async fn test_execute_inline_code() { async fn test_execute_inline_code() {
let temp_dir = TempDir::new().unwrap(); let temp_dir = TempDir::new().unwrap();

View File

@@ -0,0 +1,6 @@
FROM busybox:1.36
COPY dist/attune-agent /usr/local/bin/attune-agent
COPY dist/attune-sensor-agent /usr/local/bin/attune-sensor-agent
ENTRYPOINT ["/usr/local/bin/attune-agent"]

33
docker/Dockerfile.runtime Normal file
View File

@@ -0,0 +1,33 @@
ARG DEBIAN_VERSION=bookworm
FROM debian:${DEBIAN_VERSION}-slim AS runtime
RUN apt-get update && apt-get install -y \
ca-certificates \
libssl3 \
curl \
git \
&& rm -rf /var/lib/apt/lists/*
RUN useradd -m -u 1000 attune && \
mkdir -p /opt/attune/packs /opt/attune/logs /opt/attune/runtime_envs /opt/attune/config /opt/attune/artifacts /opt/attune/agent && \
chown -R attune:attune /opt/attune
WORKDIR /opt/attune
COPY dist/attune-service-binary /usr/local/bin/attune-service
COPY migrations/ ./migrations/
RUN chown -R attune:attune /opt/attune
USER attune
ENV RUST_LOG=info
ENV ATTUNE_CONFIG=/opt/attune/config/config.yaml
HEALTHCHECK --interval=30s --timeout=3s --start-period=10s --retries=3 \
CMD curl -f http://localhost:8080/health || exit 1
EXPOSE 8080
CMD ["/usr/local/bin/attune-service"]

View File

@@ -12,6 +12,41 @@ Each runtime YAML file contains only the fields that are stored in the database:
- `description` - Brief description of the runtime - `description` - Brief description of the runtime
- `distributions` - Runtime verification and capability metadata (JSONB) - `distributions` - Runtime verification and capability metadata (JSONB)
- `installation` - Installation requirements and metadata (JSONB) - `installation` - Installation requirements and metadata (JSONB)
- `execution_config` - Interpreter, environment, dependency, and execution-time env var metadata
## `execution_config.env_vars`
Runtime authors can declare execution-time environment variables in a purely declarative way.
String values replace the variable entirely:
```yaml
env_vars:
NODE_PATH: "{env_dir}/node_modules"
```
Object values support merge semantics against an existing value already present in the execution environment:
```yaml
env_vars:
PYTHONPATH:
operation: prepend
value: "{pack_dir}/lib"
separator: ":"
```
Supported operations:
- `set` - Replace the variable with the resolved value
- `prepend` - Add the resolved value before the existing value
- `append` - Add the resolved value after the existing value
Supported template variables:
- `{pack_dir}`
- `{env_dir}`
- `{interpreter}`
- `{manifest_path}`
## Available Runtimes ## Available Runtimes

View File

@@ -54,6 +54,11 @@ execution_config:
- install - install
- "-r" - "-r"
- "{manifest_path}" - "{manifest_path}"
env_vars:
PYTHONPATH:
operation: prepend
value: "{pack_dir}/lib"
separator: ":"
# Version-specific execution configurations. # Version-specific execution configurations.
# Each entry describes how to invoke a particular Python version. # Each entry describes how to invoke a particular Python version.
@@ -96,6 +101,11 @@ versions:
- install - install
- "-r" - "-r"
- "{manifest_path}" - "{manifest_path}"
env_vars:
PYTHONPATH:
operation: prepend
value: "{pack_dir}/lib"
separator: ":"
- version: "3.12" - version: "3.12"
is_default: true is_default: true
@@ -133,6 +143,11 @@ versions:
- install - install
- "-r" - "-r"
- "{manifest_path}" - "{manifest_path}"
env_vars:
PYTHONPATH:
operation: prepend
value: "{pack_dir}/lib"
separator: ":"
- version: "3.13" - version: "3.13"
distributions: distributions:
@@ -169,3 +184,8 @@ versions:
- install - install
- "-r" - "-r"
- "{manifest_path}" - "{manifest_path}"
env_vars:
PYTHONPATH:
operation: prepend
value: "{pack_dir}/lib"
separator: ":"