Trying first Mélodium CI impl

pull/1416/head
Quentin VIGNAUD 3 months ago
parent d1fdbb9930
commit 09ba8a2185

@ -12,358 +12,12 @@ env:
CARGO_TERM_COLOR: always
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: dtolnay/rust-toolchain@nightly
with:
components: clippy,miri,rustfmt,rust-src
# Installed *after* nightly so it is the default.
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- uses: taiki-e/install-action@v2
with:
tool: cargo-hack,taplo-cli
- run: git ls-files -- '*.c' '*.h' | xargs clang-format --dry-run --Werror
- uses: DavidAnson/markdownlint-cli2-action@v20
- run: taplo fmt --check
- run: cargo +nightly fmt --all -- --check
- run: ./clippy.sh
# On the `aya-rs/aya` repository, regenerate the public API on a schedule.
#
# On all other events and repositories assert the public API is up to date.
- run: cargo xtask public-api
if: ${{ !(github.event_name == 'schedule' && github.repository == 'aya-rs/aya') }}
- run: cargo xtask public-api --bless
if: ${{ (github.event_name == 'schedule' && github.repository == 'aya-rs/aya') }}
- uses: peter-evans/create-pull-request@v7
if: ${{ (github.event_name == 'schedule' && github.repository == 'aya-rs/aya') }}
with:
# GitHub actions aren't allowed to trigger other actions to prevent
# abuse; the canonical workaround is to use a sufficiently authorized
# token.
#
# See https://github.com/peter-evans/create-pull-request/issues/48.
token: ${{ secrets.CRABBY_GITHUB_TOKEN }}
branch: create-pull-request/public-api
commit-message: 'public-api: regenerate'
title: 'public-api: regenerate'
body: |
**Automated changes**
- name: Run miri
run: |
set -euxo pipefail
cargo +nightly hack miri test --all-targets --feature-powerset \
--exclude aya-ebpf \
--exclude aya-ebpf-bindings \
--exclude aya-log-ebpf \
--exclude integration-ebpf \
--exclude integration-test \
--workspace
build-test-aya:
strategy:
fail-fast: false
matrix:
arch:
- aarch64-unknown-linux-gnu
- armv7-unknown-linux-gnueabi
- powerpc64le-unknown-linux-gnu
- riscv64gc-unknown-linux-gnu
- s390x-unknown-linux-gnu
- x86_64-unknown-linux-gnu
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.arch }}
- uses: Swatinem/rust-cache@v2
- uses: taiki-e/install-action@cargo-hack
# This is magic, it sets `$CARGO_BUILD_TARGET`.
- uses: taiki-e/setup-cross-toolchain-action@v1
with:
target: ${{ matrix.arch }}
- name: Build
run: |
set -euxo pipefail
cargo hack build --all-targets --feature-powerset \
--exclude aya-ebpf \
--exclude aya-ebpf-bindings \
--exclude aya-log-ebpf \
--exclude integration-ebpf \
--exclude xtask \
--workspace
- name: Test
env:
RUST_BACKTRACE: full
run: |
set -euxo pipefail
cargo hack test --all-targets --feature-powerset \
--exclude aya-ebpf \
--exclude aya-ebpf-bindings \
--exclude aya-log-ebpf \
--exclude integration-ebpf \
--exclude integration-test \
--exclude xtask \
--workspace
- name: Doctests
env:
RUST_BACKTRACE: full
run: |
set -euxo pipefail
cargo hack test --doc --feature-powerset \
--exclude aya-ebpf \
--exclude aya-ebpf-bindings \
--exclude aya-log-ebpf \
--exclude init \
--exclude integration-ebpf \
--exclude integration-test \
--exclude xtask \
--workspace
build-test-aya-ebpf:
strategy:
fail-fast: false
matrix:
bpf_target_arch:
- aarch64
- arm
- mips
- powerpc64
- riscv64
- s390x
- x86_64
target:
- bpfel-unknown-none
- bpfeb-unknown-none
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: dtolnay/rust-toolchain@nightly
with:
components: rust-src
# Installed *after* nightly so it is the default.
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- run: cargo install --git https://github.com/aya-rs/bpf-linker.git bpf-linker --features llvm-21
- uses: taiki-e/install-action@cargo-hack
- name: Build
env:
CARGO_CFG_BPF_TARGET_ARCH: ${{ matrix.bpf_target_arch }}
run: |
set -euxo pipefail
cargo +nightly hack build \
--target ${{ matrix.target }} \
-Z build-std=core \
--package aya-ebpf \
--package aya-log-ebpf \
--feature-powerset
- name: Test
env:
CARGO_CFG_BPF_TARGET_ARCH: ${{ matrix.bpf_target_arch }}
RUST_BACKTRACE: full
run: |
set -euxo pipefail
cargo hack test \
--doc \
--package aya-ebpf \
--package aya-log-ebpf \
--feature-powerset
run-integration-test:
strategy:
fail-fast: false
matrix:
include:
- target: x86_64-apple-darwin
# macos-15 is arm64[0] which doesn't support nested
# virtualization[1].
#
# [0] https://github.com/actions/runner-images#available-images
#
# [1] https://docs.github.com/en/actions/reference/runners/github-hosted-runners#limitations-for-arm64-macos-runners
os: macos-15-intel
# We don't use ubuntu-latest because we care about the apt packages available.
- target: x86_64-unknown-linux-gnu
os: ubuntu-22.04
- target: aarch64-unknown-linux-gnu
os: ubuntu-22.04-arm
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v5
with:
submodules: recursive
- name: Install prerequisites
if: runner.os == 'Linux'
# ubuntu-22.04 comes with clang 13-15[0]; support for signed and 64bit
# enum values was added in clang 15[1] which isn't in `$PATH`.
#
# [0] https://github.com/actions/runner-images/blob/main/images/ubuntu/Ubuntu2204-Readme.md
#
# [1] https://github.com/llvm/llvm-project/commit/dc1c43d
run: |
set -euxo pipefail
sudo apt update
sudo apt -y install lynx qemu-system-{arm,x86} musl-tools
echo /usr/lib/llvm-15/bin >> $GITHUB_PATH
- name: Install prerequisites
if: runner.os == 'macOS'
# The curl shipped on macOS doesn't contain
# https://github.com/curl/curl/commit/85efbb92b8e6679705e122cee45ce76c56414a3e which is
# needed for proper handling of `--etag-{compare,save}`.
#
# The tar shipped on macOS doesn't support --wildcards, so we need GNU tar.
#
# The clang shipped on macOS doesn't support BPF, so we need LLVM from brew.
#
# We need a musl C toolchain to compile our `test-distro` since some of
# our dependencies have build scripts that compile C code (i.e xz2).
# This is provided by `brew install filosottile/musl-cross/musl-cross`.
run: |
set -euxo pipefail
brew update
# https://github.com/actions/setup-python/issues/577
find /usr/local/bin -type l -exec sh -c 'readlink -f "$1" \
| grep -q ^/Library/Frameworks/Python.framework/Versions/' _ {} \; -exec rm -v {} \;
brew install --formula curl dpkg gnu-tar llvm lynx pkg-config qemu
echo $(brew --prefix curl)/bin >> $GITHUB_PATH
echo $(brew --prefix gnu-tar)/libexec/gnubin >> $GITHUB_PATH
echo $(brew --prefix llvm)/bin >> $GITHUB_PATH
brew install filosottile/musl-cross/musl-cross
ln -s "$(brew --prefix musl-cross)/bin/x86_64-linux-musl-gcc" /usr/local/bin/musl-gcc
- uses: dtolnay/rust-toolchain@nightly
with:
components: rust-src
# Installed *after* nightly so it is the default.
- uses: dtolnay/rust-toolchain@stable
with:
targets: aarch64-unknown-linux-musl,x86_64-unknown-linux-musl
- uses: Swatinem/rust-cache@v2
- name: Install libLLVM
# Download libLLVM from Rust CI to ensure that the libLLVM version
# matches exactly with the version used by the current Rust nightly. A
# mismatch between libLLVM (used by bpf-linker) and Rust's LLVM version
# can lead to linking issues.
run: |
set -euxo pipefail
# Get the partial SHA from Rust nightly.
rustc_sha=$(rustc +nightly --version | grep -oE '[a-f0-9]{7,40}')
# Get the full SHA from GitHub.
rustc_sha=$(curl -sfSL https://api.github.com/repos/rust-lang/rust/commits/$rustc_sha \
--header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' \
--header 'content-type: application/json' \
| jq -r '.sha')
mkdir -p /tmp/rustc-llvm
curl -sfSL https://ci-artifacts.rust-lang.org/rustc-builds/$rustc_sha/rust-dev-nightly-${{ matrix.target }}.tar.xz | \
tar -xJ --strip-components 2 -C /tmp/rustc-llvm
echo /tmp/rustc-llvm/bin >> $GITHUB_PATH
# NB: rustc doesn't ship libLLVM.so on macOS, so disable proxying (default feature). We also
# --force so that bpf-linker gets always relinked against the latest LLVM downloaded above.
#
# Do this on all system (not just macOS) to avoid relying on rustc-provided libLLVM.so.
- run: cargo install --git https://github.com/aya-rs/bpf-linker.git bpf-linker --no-default-features --features llvm-21 --force
- uses: actions/cache@v4
with:
path: test/.tmp
key: ${{ runner.arch }}-${{ runner.os }}-test-cache
- name: Download debian kernels
if: runner.arch == 'ARM64'
# TODO: enable tests on kernels before 6.0.
run: .github/scripts/download_kernel_images.sh test/.tmp/debian-kernels/arm64 arm64 6.1 6.12
- name: Download debian kernels
if: runner.arch == 'X64'
# TODO: enable tests on kernels before 6.0.
run: .github/scripts/download_kernel_images.sh test/.tmp/debian-kernels/amd64 amd64 6.1 6.12
- name: Cleanup stale kernels and modules
run: |
set -euxo pipefail
rm -rf test/.tmp/boot test/.tmp/lib
- name: Extract debian kernels
run: |
set -euxo pipefail
# The wildcard '**/boot/*' extracts kernel images and config.
# The wildcard '**/modules/*' extracts kernel modules.
# Modules are required since not all parts of the kernel we want to
# test are built-in.
find test/.tmp -name '*.deb' -print0 | xargs -t -0 -I {} \
sh -c "dpkg --fsys-tarfile {} | tar -C test/.tmp \
--wildcards --extract '**/boot/*' '**/modules/*' --file -"
- name: Run local integration tests
if: runner.os == 'Linux'
run: cargo xtask integration-test local
- name: Run virtualized integration tests
if: runner.os == 'Linux'
run: |
set -euxo pipefail
.github/scripts/find_kernels.py | xargs -t -0 \
cargo xtask integration-test vm --cache-dir test/.tmp \
--github-api-token ${{ secrets.GITHUB_TOKEN }} \
- name: Run virtualized integration tests
if: runner.os == 'macOS'
env:
# This sets the linker to the one installed by FiloSottile/musl-cross.
CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER: x86_64-linux-musl-gcc
run: |
set -euxo pipefail
.github/scripts/find_kernels.py | xargs -t -0 \
cargo xtask integration-test vm --cache-dir test/.tmp \
--github-api-token ${{ secrets.GITHUB_TOKEN }} \
# Provides a single status check for the entire build workflow.
# This is used for merge automation, like Mergify, since GH actions
# has no concept of "when all status checks pass".
# https://docs.mergify.com/conditions/#validating-all-status-checks
build-workflow-complete:
needs:
- lint
- build-test-aya
- build-test-aya-ebpf
- run-integration-test
runs-on: ubuntu-latest
steps:
- run: echo 'Build Complete'
ciUbuntuLatest:
uses: melodium-tech/github-actions/.github/workflows/melodium-ubuntu.yml@v0.9.0
with:
version: 0.9.0
command: |
run .melodium-ci/Compo.toml buildTestAya --logs_directory '"logs/"' --github_contexts '$''{{{{ { "github": ${{ toJSON(github) }}, "vars": ${{ toJSON(vars) }}, "secrets": { "GITHUB_TOKEN": "'"$GITHUB_TOKEN"'" } } }}}}'
artifact-path: logs/
secrets:
token: "${{ secrets.GITHUB_TOKEN }}"

@ -0,0 +1,21 @@
name = "ci"
version = "0.1.0"
[dependencies]
cicd = "^0.9.0"
distrib = "^0.9.0"
fs = "^0.9.0"
http = "^0.9.0"
javascript = "^0.9.0"
json = "^0.9.0"
log = "^0.9.0"
net = "^0.9.0"
process = "^0.9.0"
std = "^0.9.0"
work = "^0.9.0"
[entrypoints]
buildTestAya = "ci/workflows::buildTestAyaEntrypoint"
#ciUbuntuLatest = "ci/workflows::ciUbuntuLatestEntrypoint"
#cimatrixOs = "ci/workflows::cimatrixOsEntrypoint"
#gen = "ci/workflows::genEntrypoint"

@ -0,0 +1,120 @@
use cicd/services/github::replicateContextsWithInputs
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use log/logger::Logger
use std/data/string_map::StringMap
use std/data/string_map::|entry
use std/data/string_map::|map
use std/ops/option::|wrap
#[generated(true)]
#[github_action(actions/cache@v4)]
treatment cache[contexts: JavaScriptEngine, logger: Logger](
var enable_cross_os_archive: string = "false",
var fail_on_cache_miss: string = "false",
var key: string = "",
var lookup_only: string = "false",
var path: string = "",
var restore_keys: string = "",
var save_always: string = "false",
var upload_chunk_size: string = ""
)
input post_trigger: Block<void>
input trigger: Block<void>
output completed: Block<void>
output continue: Block<void>
output failed: Block<void>
output post_completed: Block<void>
model node_contexts: JavaScriptEngine()
{
mainAction: runAction[contexts = node_contexts, logger = logger](
commands = "node .melodium-ci/github/actions/third/actions/cache/ref_v4/dist/restore/index.js",
env = |wrap<StringMap>(
|map(
[|entry(
"INPUT_PATH",
path
),
|entry(
"INPUT_KEY",
key
),
|entry(
"INPUT_RESTORE-KEYS",
restore_keys
),
|entry(
"INPUT_UPLOAD-CHUNK-SIZE",
upload_chunk_size
),
|entry(
"INPUT_ENABLECROSSOSARCHIVE",
enable_cross_os_archive
),
|entry(
"INPUT_FAIL-ON-CACHE-MISS",
fail_on_cache_miss
),
|entry(
"INPUT_LOOKUP-ONLY",
lookup_only
),
|entry(
"INPUT_SAVE-ALWAYS",
save_always
)]
)
),
name = "cache"
)
postAction: runAction[contexts = node_contexts, logger = logger](
commands = "node .melodium-ci/github/actions/third/actions/cache/ref_v4/dist/save/index.js",
if = "success()",
name = "post:cache"
)
replicateContextsWithInputs[action_contexts = node_contexts, main_contexts = contexts](
inputs = |map(
[|entry(
"path",
path
),
|entry(
"key",
key
),
|entry(
"restore-keys",
restore_keys
),
|entry(
"upload-chunk-size",
upload_chunk_size
),
|entry(
"enableCrossOsArchive",
enable_cross_os_archive
),
|entry(
"fail-on-cache-miss",
fail_on_cache_miss
),
|entry(
"lookup-only",
lookup_only
),
|entry(
"save-always",
save_always
)]
)
)
Self.post_trigger -> postAction.trigger
postAction.completed -> Self.post_completed
Self.trigger -> replicateContextsWithInputs.trigger
replicateContextsWithInputs.ready -> mainAction.trigger
mainAction.completed -> Self.completed
mainAction.failed -> Self.failed
mainAction.continue -> Self.continue
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -0,0 +1,227 @@
use cicd/services/github::replicateContextsWithInputs
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use log/logger::Logger
use std/data/string_map::StringMap
use std/data/string_map::|entry
use std/data/string_map::|map
use std/ops/option::|wrap
#[github_action(actions/checkout@v5)]
#[generated(true)]
treatment checkout[contexts: JavaScriptEngine, logger: Logger](
var clean: string = "true",
var fetch_depth: string = "1",
var fetch_tags: string = "false",
var filter: string = "",
var github_server_url: string = "",
var lfs: string = "false",
var path: string = "",
var persist_credentials: string = "true",
var ref: string = "",
var repository: string = "${{ github.repository }}",
var set_safe_directory: string = "true",
var show_progress: string = "true",
var sparse_checkout: string = "",
var sparse_checkout_cone_mode: string = "true",
var ssh_key: string = "",
var ssh_known_hosts: string = "",
var ssh_strict: string = "true",
var ssh_user: string = "git",
var submodules: string = "false",
var token: string = "${{ github.token }}"
)
input post_trigger: Block<void>
input trigger: Block<void>
output completed: Block<void>
output continue: Block<void>
output failed: Block<void>
output post_completed: Block<void>
model node_contexts: JavaScriptEngine()
{
mainAction: runAction[contexts = node_contexts, logger = logger](
commands = "node .melodium-ci/github/actions/third/actions/checkout/ref_v5/dist/index.js",
env = |wrap<StringMap>(
|map(
[|entry(
"INPUT_REPOSITORY",
repository
),
|entry(
"INPUT_REF",
ref
),
|entry(
"INPUT_TOKEN",
token
),
|entry(
"INPUT_SSH-KEY",
ssh_key
),
|entry(
"INPUT_SSH-KNOWN-HOSTS",
ssh_known_hosts
),
|entry(
"INPUT_SSH-STRICT",
ssh_strict
),
|entry(
"INPUT_SSH-USER",
ssh_user
),
|entry(
"INPUT_PERSIST-CREDENTIALS",
persist_credentials
),
|entry(
"INPUT_PATH",
path
),
|entry(
"INPUT_CLEAN",
clean
),
|entry(
"INPUT_FILTER",
filter
),
|entry(
"INPUT_SPARSE-CHECKOUT",
sparse_checkout
),
|entry(
"INPUT_SPARSE-CHECKOUT-CONE-MODE",
sparse_checkout_cone_mode
),
|entry(
"INPUT_FETCH-DEPTH",
fetch_depth
),
|entry(
"INPUT_FETCH-TAGS",
fetch_tags
),
|entry(
"INPUT_SHOW-PROGRESS",
show_progress
),
|entry(
"INPUT_LFS",
lfs
),
|entry(
"INPUT_SUBMODULES",
submodules
),
|entry(
"INPUT_SET-SAFE-DIRECTORY",
set_safe_directory
),
|entry(
"INPUT_GITHUB-SERVER-URL",
github_server_url
)]
)
),
name = "checkout"
)
postAction: runAction[contexts = node_contexts, logger = logger](
commands = "node .melodium-ci/github/actions/third/actions/checkout/ref_v5/dist/index.js",
name = "post:checkout"
)
replicateContextsWithInputs[action_contexts = node_contexts, main_contexts = contexts](
inputs = |map(
[|entry(
"repository",
repository
),
|entry(
"ref",
ref
),
|entry(
"token",
token
),
|entry(
"ssh-key",
ssh_key
),
|entry(
"ssh-known-hosts",
ssh_known_hosts
),
|entry(
"ssh-strict",
ssh_strict
),
|entry(
"ssh-user",
ssh_user
),
|entry(
"persist-credentials",
persist_credentials
),
|entry(
"path",
path
),
|entry(
"clean",
clean
),
|entry(
"filter",
filter
),
|entry(
"sparse-checkout",
sparse_checkout
),
|entry(
"sparse-checkout-cone-mode",
sparse_checkout_cone_mode
),
|entry(
"fetch-depth",
fetch_depth
),
|entry(
"fetch-tags",
fetch_tags
),
|entry(
"show-progress",
show_progress
),
|entry(
"lfs",
lfs
),
|entry(
"submodules",
submodules
),
|entry(
"set-safe-directory",
set_safe_directory
),
|entry(
"github-server-url",
github_server_url
)]
)
)
Self.post_trigger -> postAction.trigger
postAction.completed -> Self.post_completed
Self.trigger -> replicateContextsWithInputs.trigger
replicateContextsWithInputs.ready -> mainAction.trigger
mainAction.completed -> Self.completed
mainAction.failed -> Self.failed
mainAction.continue -> Self.continue
}

File diff suppressed because one or more lines are too long

@ -0,0 +1,76 @@
use cicd/services/github::replicateContextsWithInputs
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use log/logger::Logger
use std/data/string_map::StringMap
use std/data/string_map::|entry
use std/data/string_map::|map
use std/ops/option::|wrap
#[github_action(DavidAnson/markdownlint-cli2-action@v20)]
#[generated(true)]
treatment markdownlintCli2Action[contexts: JavaScriptEngine, logger: Logger](
var config: string = "",
var fix: string = "",
var globs: string = "*.{md,markdown}",
var separator: string = ${
}
)
input trigger: Block<void>
output completed: Block<void>
output continue: Block<void>
output failed: Block<void>
model node_contexts: JavaScriptEngine()
{
mainAction: runAction[contexts = node_contexts, logger = logger](
commands = "node .melodium-ci/github/actions/third/david_anson/markdownlint_cli2_action/ref_v20/dist/index.mjs",
env = |wrap<StringMap>(
|map(
[|entry(
"INPUT_CONFIG",
config
),
|entry(
"INPUT_FIX",
fix
),
|entry(
"INPUT_GLOBS",
globs
),
|entry(
"INPUT_SEPARATOR",
separator
)]
)
),
name = "markdownlintCli2Action"
)
replicateContextsWithInputs[action_contexts = node_contexts, main_contexts = contexts](
inputs = |map(
[|entry(
"config",
config
),
|entry(
"fix",
fix
),
|entry(
"globs",
globs
),
|entry(
"separator",
separator
)]
)
)
Self.trigger -> replicateContextsWithInputs.trigger
replicateContextsWithInputs.ready -> mainAction.trigger
mainAction.completed -> Self.completed
mainAction.failed -> Self.failed
mainAction.continue -> Self.continue
}

File diff suppressed because one or more lines are too long

@ -0,0 +1,218 @@
use cicd/services/github::replicateContextsWithInputs
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use log/logger::Logger
use std/data/string_map::StringMap
use std/data/string_map::|entry
use std/data/string_map::|map
use std/ops/option::|wrap
#[github_action(dtolnay/rust-toolchain@nightly)]
#[generated(true)]
treatment rustToolchain[contexts: JavaScriptEngine, logger: Logger](
var components: string = "",
var target: string = "",
var targets: string = "",
var toolchain: string = "nightly"
)
input trigger: Block<void>
output completed: Block<void>
output continue: Block<void>
output failed: Block<void>
model composite_contexts: JavaScriptEngine()
{
flags: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: construct rustup command line
echo "targets=$(for t in ${targets//,/ }; do echo -n ' --target' $t; done)" >> $GITHUB_OUTPUT
echo "components=$(for c in ${components//,/ }; do echo -n ' --component' $c; done)" >> $GITHUB_OUTPUT
echo "downgrade=${{steps.parse.outputs.toolchain == 'nightly' && inputs.components && ' --allow-downgrade' || ''}}" >> $GITHUB_OUTPUT
}}},
env = |wrap<StringMap>(
|map(
[|entry(
"targets",
"${{inputs.targets || inputs.target || ''}}"
),
|entry(
"components",
"${{inputs.components}}"
)]
)
),
id = "flags",
name = "flags",
shell = "bash"
)
parse: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: parse toolchain version
if [[ -z $toolchain ]]; then
# GitHub does not enforce `required: true` inputs itself. https://github.com/actions/runner/issues/1070
echo "'toolchain' is a required input" >&2
exit 1
elif [[ $toolchain =~ ^stable' '[0-9]+' '(year|month|week|day)s?' 'ago$ ]]; then
if [[ ${{runner.os}} == macOS ]]; then
echo "toolchain=1.$((($(date -v-$(sed 's/stable \([0-9]*\) \(.\).*/\1\2/' <<< $toolchain) +%s)/60/60/24-16569)/7/6))" >> $GITHUB_OUTPUT
else
echo "toolchain=1.$((($(date --date "${toolchain#stable }" +%s)/60/60/24-16569)/7/6))" >> $GITHUB_OUTPUT
fi
elif [[ $toolchain =~ ^stable' 'minus' '[0-9]+' 'releases?$ ]]; then
echo "toolchain=1.$((($(date +%s)/60/60/24-16569)/7/6-${toolchain//[^0-9]/}))" >> $GITHUB_OUTPUT
elif [[ $toolchain =~ ^1\.[0-9]+$ ]]; then
echo "toolchain=1.$((i=${toolchain#1.}, c=($(date +%s)/60/60/24-16569)/7/6, i+9*i*(10*i<=c)+90*i*(100*i<=c)))" >> $GITHUB_OUTPUT
else
echo "toolchain=$toolchain" >> $GITHUB_OUTPUT
fi
}}},
env = |wrap<StringMap>(
|map(
[|entry(
"toolchain",
"${{inputs.toolchain}}"
)]
)
),
id = "parse",
name = "parse",
shell = "bash"
)
replicateContextsWithInputs[action_contexts = composite_contexts, main_contexts = contexts](
inputs = |map(
[|entry(
"toolchain",
toolchain
),
|entry(
"targets",
targets
),
|entry(
"target",
target
),
|entry(
"components",
components
)]
)
)
rustcVersion: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: create cachekey
DATE=$(rustc +${{steps.parse.outputs.toolchain}} --version --verbose | sed -ne 's/^commit-date: \(20[0-9][0-9]\)-\([01][0-9]\)-\([0-3][0-9]\)$/\1\2\3/p')
HASH=$(rustc +${{steps.parse.outputs.toolchain}} --version --verbose | sed -ne 's/^commit-hash: //p')
echo "cachekey=$(echo $DATE$HASH | head -c12)" >> $GITHUB_OUTPUT
}}},
id = "rustc-version",
name = "rustcVersion",
shell = "bash"
)
rustupToolchainInstallStepsParseOutputsToolchain: runAction[contexts = composite_contexts, logger = logger](
commands = "rustup toolchain install ${{steps.parse.outputs.toolchain}}${{steps.flags.outputs.targets}}${{steps.flags.outputs.components}} --profile minimal${{steps.flags.outputs.downgrade}} --no-self-update",
display_name = "rustup toolchain install ${{steps.parse.outputs.toolchain}}",
name = "rustupToolchainInstallStepsParseOutputsToolchain",
shell = "bash"
)
step10: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: enable Cargo sparse registry
# implemented in 1.66, stabilized in 1.68, made default in 1.70
if [ -z "${CARGO_REGISTRIES_CRATES_IO_PROTOCOL+set}" -o -f "${{runner.temp}}"/.implicit_cargo_registries_crates_io_protocol ]; then
if rustc +${{steps.parse.outputs.toolchain}} --version --verbose | grep -q '^release: 1\.6[89]\.'; then
touch "${{runner.temp}}"/.implicit_cargo_registries_crates_io_protocol || true
echo CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse >> $GITHUB_ENV
elif rustc +${{steps.parse.outputs.toolchain}} --version --verbose | grep -q '^release: 1\.6[67]\.'; then
touch "${{runner.temp}}"/.implicit_cargo_registries_crates_io_protocol || true
echo CARGO_REGISTRIES_CRATES_IO_PROTOCOL=git >> $GITHUB_ENV
fi
fi
}}},
name = "step10",
shell = "bash"
)
step11: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: work around spurious network errors in curl 8.0
# https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo/topic/timeout.20investigation
if rustc +${{steps.parse.outputs.toolchain}} --version --verbose | grep -q '^release: 1\.7[01]\.'; then
echo CARGO_HTTP_MULTIPLEXING=false >> $GITHUB_ENV
fi
}}},
name = "step11",
shell = "bash"
)
step12: runAction[contexts = composite_contexts, logger = logger](
commands = "rustc +${{steps.parse.outputs.toolchain}} --version --verbose",
name = "step12",
shell = "bash"
)
step2: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: set $CARGO_HOME
echo CARGO_HOME=${CARGO_HOME:-"${{runner.os == 'Windows' && '$USERPROFILE\.cargo' || '$HOME/.cargo'}}"} >> $GITHUB_ENV
}}},
name = "step2",
shell = "bash"
)
step3: runAction[contexts = composite_contexts, logger = logger](
commands = ${: install rustup if needed
if ! command -v rustup &>/dev/null; then
curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused --location --silent --show-error --fail https://sh.rustup.rs | sh -s -- --default-toolchain none -y
echo "$CARGO_HOME/bin" >> $GITHUB_PATH
fi
},
if = "runner.os != 'Windows'",
name = "step3",
shell = "bash"
)
step4: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: install rustup if needed on windows
if ! command -v rustup &>/dev/null; then
curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused --location --silent --show-error --fail https://win.rustup.rs/${{runner.arch == 'ARM64' && 'aarch64' || 'x86_64'}} --output '${{runner.temp}}\rustup-init.exe'
'${{runner.temp}}\rustup-init.exe' --default-toolchain none --no-modify-path -y
echo "$CARGO_HOME\bin" >> $GITHUB_PATH
fi
}}},
if = "runner.os == 'Windows'",
name = "step4",
shell = "bash"
)
step6: runAction[contexts = composite_contexts, logger = logger](
commands = "rustup default ${{steps.parse.outputs.toolchain}}",
continue_on_error = "true",
name = "step6",
shell = "bash"
)
step8: runAction[contexts = composite_contexts, logger = logger](
commands = ${{: disable incremental compilation
if [ -z "${CARGO_INCREMENTAL+set}" ]; then
echo CARGO_INCREMENTAL=0 >> $GITHUB_ENV
fi
}},
name = "step8",
shell = "bash"
)
step9: runAction[contexts = composite_contexts, logger = logger](
commands = ${{: enable colors in Cargo output
if [ -z "${CARGO_TERM_COLOR+set}" ]; then
echo CARGO_TERM_COLOR=always >> $GITHUB_ENV
fi
}},
name = "step9",
shell = "bash"
)
Self.trigger -> replicateContextsWithInputs.trigger
replicateContextsWithInputs.ready -> parse.trigger
parse.continue -> flags.trigger
flags.continue -> step2.trigger
step2.continue -> step3.trigger
step3.continue -> step4.trigger
step4.continue -> rustupToolchainInstallStepsParseOutputsToolchain.trigger
rustupToolchainInstallStepsParseOutputsToolchain.continue -> step6.trigger
step6.continue -> rustcVersion.trigger
rustcVersion.continue -> step8.trigger
step8.continue -> step9.trigger
step9.continue -> step10.trigger
step10.continue -> step11.trigger
step11.continue -> step12.trigger
step12.completed -> Self.completed
step12.failed -> Self.failed
step12.continue -> Self.continue
}

@ -0,0 +1,218 @@
use cicd/services/github::replicateContextsWithInputs
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use log/logger::Logger
use std/data/string_map::StringMap
use std/data/string_map::|entry
use std/data/string_map::|map
use std/ops/option::|wrap
#[generated(true)]
#[github_action(dtolnay/rust-toolchain@stable)]
treatment rustToolchain[contexts: JavaScriptEngine, logger: Logger](
var components: string = "",
var target: string = "",
var targets: string = "",
var toolchain: string = "stable"
)
input trigger: Block<void>
output completed: Block<void>
output continue: Block<void>
output failed: Block<void>
model composite_contexts: JavaScriptEngine()
{
flags: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: construct rustup command line
echo "targets=$(for t in ${targets//,/ }; do echo -n ' --target' $t; done)" >> $GITHUB_OUTPUT
echo "components=$(for c in ${components//,/ }; do echo -n ' --component' $c; done)" >> $GITHUB_OUTPUT
echo "downgrade=${{steps.parse.outputs.toolchain == 'nightly' && inputs.components && ' --allow-downgrade' || ''}}" >> $GITHUB_OUTPUT
}}},
env = |wrap<StringMap>(
|map(
[|entry(
"targets",
"${{inputs.targets || inputs.target || ''}}"
),
|entry(
"components",
"${{inputs.components}}"
)]
)
),
id = "flags",
name = "flags",
shell = "bash"
)
parse: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: parse toolchain version
if [[ -z $toolchain ]]; then
# GitHub does not enforce `required: true` inputs itself. https://github.com/actions/runner/issues/1070
echo "'toolchain' is a required input" >&2
exit 1
elif [[ $toolchain =~ ^stable' '[0-9]+' '(year|month|week|day)s?' 'ago$ ]]; then
if [[ ${{runner.os}} == macOS ]]; then
echo "toolchain=1.$((($(date -v-$(sed 's/stable \([0-9]*\) \(.\).*/\1\2/' <<< $toolchain) +%s)/60/60/24-16569)/7/6))" >> $GITHUB_OUTPUT
else
echo "toolchain=1.$((($(date --date "${toolchain#stable }" +%s)/60/60/24-16569)/7/6))" >> $GITHUB_OUTPUT
fi
elif [[ $toolchain =~ ^stable' 'minus' '[0-9]+' 'releases?$ ]]; then
echo "toolchain=1.$((($(date +%s)/60/60/24-16569)/7/6-${toolchain//[^0-9]/}))" >> $GITHUB_OUTPUT
elif [[ $toolchain =~ ^1\.[0-9]+$ ]]; then
echo "toolchain=1.$((i=${toolchain#1.}, c=($(date +%s)/60/60/24-16569)/7/6, i+9*i*(10*i<=c)+90*i*(100*i<=c)))" >> $GITHUB_OUTPUT
else
echo "toolchain=$toolchain" >> $GITHUB_OUTPUT
fi
}}},
env = |wrap<StringMap>(
|map(
[|entry(
"toolchain",
"${{inputs.toolchain}}"
)]
)
),
id = "parse",
name = "parse",
shell = "bash"
)
replicateContextsWithInputs[action_contexts = composite_contexts, main_contexts = contexts](
inputs = |map(
[|entry(
"toolchain",
toolchain
),
|entry(
"targets",
targets
),
|entry(
"target",
target
),
|entry(
"components",
components
)]
)
)
rustcVersion: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: create cachekey
DATE=$(rustc +${{steps.parse.outputs.toolchain}} --version --verbose | sed -ne 's/^commit-date: \(20[0-9][0-9]\)-\([01][0-9]\)-\([0-3][0-9]\)$/\1\2\3/p')
HASH=$(rustc +${{steps.parse.outputs.toolchain}} --version --verbose | sed -ne 's/^commit-hash: //p')
echo "cachekey=$(echo $DATE$HASH | head -c12)" >> $GITHUB_OUTPUT
}}},
id = "rustc-version",
name = "rustcVersion",
shell = "bash"
)
rustupToolchainInstallStepsParseOutputsToolchain: runAction[contexts = composite_contexts, logger = logger](
commands = "rustup toolchain install ${{steps.parse.outputs.toolchain}}${{steps.flags.outputs.targets}}${{steps.flags.outputs.components}} --profile minimal${{steps.flags.outputs.downgrade}} --no-self-update",
display_name = "rustup toolchain install ${{steps.parse.outputs.toolchain}}",
name = "rustupToolchainInstallStepsParseOutputsToolchain",
shell = "bash"
)
step10: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: enable Cargo sparse registry
# implemented in 1.66, stabilized in 1.68, made default in 1.70
if [ -z "${CARGO_REGISTRIES_CRATES_IO_PROTOCOL+set}" -o -f "${{runner.temp}}"/.implicit_cargo_registries_crates_io_protocol ]; then
if rustc +${{steps.parse.outputs.toolchain}} --version --verbose | grep -q '^release: 1\.6[89]\.'; then
touch "${{runner.temp}}"/.implicit_cargo_registries_crates_io_protocol || true
echo CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse >> $GITHUB_ENV
elif rustc +${{steps.parse.outputs.toolchain}} --version --verbose | grep -q '^release: 1\.6[67]\.'; then
touch "${{runner.temp}}"/.implicit_cargo_registries_crates_io_protocol || true
echo CARGO_REGISTRIES_CRATES_IO_PROTOCOL=git >> $GITHUB_ENV
fi
fi
}}},
name = "step10",
shell = "bash"
)
step11: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: work around spurious network errors in curl 8.0
# https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo/topic/timeout.20investigation
if rustc +${{steps.parse.outputs.toolchain}} --version --verbose | grep -q '^release: 1\.7[01]\.'; then
echo CARGO_HTTP_MULTIPLEXING=false >> $GITHUB_ENV
fi
}}},
name = "step11",
shell = "bash"
)
step12: runAction[contexts = composite_contexts, logger = logger](
commands = "rustc +${{steps.parse.outputs.toolchain}} --version --verbose",
name = "step12",
shell = "bash"
)
step2: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: set $CARGO_HOME
echo CARGO_HOME=${CARGO_HOME:-"${{runner.os == 'Windows' && '$USERPROFILE\.cargo' || '$HOME/.cargo'}}"} >> $GITHUB_ENV
}}},
name = "step2",
shell = "bash"
)
step3: runAction[contexts = composite_contexts, logger = logger](
commands = ${: install rustup if needed
if ! command -v rustup &>/dev/null; then
curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused --location --silent --show-error --fail https://sh.rustup.rs | sh -s -- --default-toolchain none -y
echo "$CARGO_HOME/bin" >> $GITHUB_PATH
fi
},
if = "runner.os != 'Windows'",
name = "step3",
shell = "bash"
)
step4: runAction[contexts = composite_contexts, logger = logger](
commands = ${{{: install rustup if needed on windows
if ! command -v rustup &>/dev/null; then
curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused --location --silent --show-error --fail https://win.rustup.rs/${{runner.arch == 'ARM64' && 'aarch64' || 'x86_64'}} --output '${{runner.temp}}\rustup-init.exe'
'${{runner.temp}}\rustup-init.exe' --default-toolchain none --no-modify-path -y
echo "$CARGO_HOME\bin" >> $GITHUB_PATH
fi
}}},
if = "runner.os == 'Windows'",
name = "step4",
shell = "bash"
)
step6: runAction[contexts = composite_contexts, logger = logger](
commands = "rustup default ${{steps.parse.outputs.toolchain}}",
continue_on_error = "true",
name = "step6",
shell = "bash"
)
step8: runAction[contexts = composite_contexts, logger = logger](
commands = ${{: disable incremental compilation
if [ -z "${CARGO_INCREMENTAL+set}" ]; then
echo CARGO_INCREMENTAL=0 >> $GITHUB_ENV
fi
}},
name = "step8",
shell = "bash"
)
step9: runAction[contexts = composite_contexts, logger = logger](
commands = ${{: enable colors in Cargo output
if [ -z "${CARGO_TERM_COLOR+set}" ]; then
echo CARGO_TERM_COLOR=always >> $GITHUB_ENV
fi
}},
name = "step9",
shell = "bash"
)
Self.trigger -> replicateContextsWithInputs.trigger
replicateContextsWithInputs.ready -> parse.trigger
parse.continue -> flags.trigger
flags.continue -> step2.trigger
step2.continue -> step3.trigger
step3.continue -> step4.trigger
step4.continue -> rustupToolchainInstallStepsParseOutputsToolchain.trigger
rustupToolchainInstallStepsParseOutputsToolchain.continue -> step6.trigger
step6.continue -> rustcVersion.trigger
rustcVersion.continue -> step8.trigger
step8.continue -> step9.trigger
step9.continue -> step10.trigger
step10.continue -> step11.trigger
step11.continue -> step12.trigger
step12.completed -> Self.completed
step12.failed -> Self.failed
step12.continue -> Self.continue
}

@ -0,0 +1,255 @@
use cicd/services/github::replicateContextsWithInputs
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use log/logger::Logger
use std/data/string_map::StringMap
use std/data/string_map::|entry
use std/data/string_map::|map
use std/ops/option::|wrap
#[generated(true)]
#[github_action(peter-evans/create-pull-request@v7)]
treatment createPullRequest[contexts: JavaScriptEngine, logger: Logger](
var add_paths: string = "",
var assignees: string = "",
var author: string = "${{ github.actor }} <${{ github.actor_id }}+${{ github.actor }}@users.noreply.github.com>",
var base: string = "",
var body: string = "Automated changes by [create-pull-request](https://github.com/peter-evans/create-pull-request) GitHub action",
var body_path: string = "",
var branch: string = "create-pull-request/patch",
var branch_suffix: string = "",
var branch_token: string = "",
var commit_message: string = "[create-pull-request] automated change",
var committer: string = "github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>",
var delete_branch: string = "false",
var draft: string = "false",
var labels: string = "",
var maintainer_can_modify: string = "true",
var milestone: string = "",
var path: string = "",
var push_to_fork: string = "",
var reviewers: string = "",
var sign_commits: string = "false",
var signoff: string = "false",
var team_reviewers: string = "",
var title: string = "Changes by create-pull-request action",
var token: string = "${{ github.token }}"
)
input trigger: Block<void>
output completed: Block<void>
output continue: Block<void>
output failed: Block<void>
model node_contexts: JavaScriptEngine()
{
mainAction: runAction[contexts = node_contexts, logger = logger](
commands = "node .melodium-ci/github/actions/third/peter_evans/create_pull_request/ref_v7/dist/index.js",
env = |wrap<StringMap>(
|map(
[|entry(
"INPUT_TOKEN",
token
),
|entry(
"INPUT_BRANCH-TOKEN",
branch_token
),
|entry(
"INPUT_PATH",
path
),
|entry(
"INPUT_ADD-PATHS",
add_paths
),
|entry(
"INPUT_COMMIT-MESSAGE",
commit_message
),
|entry(
"INPUT_COMMITTER",
committer
),
|entry(
"INPUT_AUTHOR",
author
),
|entry(
"INPUT_SIGNOFF",
signoff
),
|entry(
"INPUT_BRANCH",
branch
),
|entry(
"INPUT_DELETE-BRANCH",
delete_branch
),
|entry(
"INPUT_BRANCH-SUFFIX",
branch_suffix
),
|entry(
"INPUT_BASE",
base
),
|entry(
"INPUT_PUSH-TO-FORK",
push_to_fork
),
|entry(
"INPUT_SIGN-COMMITS",
sign_commits
),
|entry(
"INPUT_TITLE",
title
),
|entry(
"INPUT_BODY",
body
),
|entry(
"INPUT_BODY-PATH",
body_path
),
|entry(
"INPUT_LABELS",
labels
),
|entry(
"INPUT_ASSIGNEES",
assignees
),
|entry(
"INPUT_REVIEWERS",
reviewers
),
|entry(
"INPUT_TEAM-REVIEWERS",
team_reviewers
),
|entry(
"INPUT_MILESTONE",
milestone
),
|entry(
"INPUT_DRAFT",
draft
),
|entry(
"INPUT_MAINTAINER-CAN-MODIFY",
maintainer_can_modify
)]
)
),
name = "createPullRequest"
)
replicateContextsWithInputs[action_contexts = node_contexts, main_contexts = contexts](
inputs = |map(
[|entry(
"token",
token
),
|entry(
"branch-token",
branch_token
),
|entry(
"path",
path
),
|entry(
"add-paths",
add_paths
),
|entry(
"commit-message",
commit_message
),
|entry(
"committer",
committer
),
|entry(
"author",
author
),
|entry(
"signoff",
signoff
),
|entry(
"branch",
branch
),
|entry(
"delete-branch",
delete_branch
),
|entry(
"branch-suffix",
branch_suffix
),
|entry(
"base",
base
),
|entry(
"push-to-fork",
push_to_fork
),
|entry(
"sign-commits",
sign_commits
),
|entry(
"title",
title
),
|entry(
"body",
body
),
|entry(
"body-path",
body_path
),
|entry(
"labels",
labels
),
|entry(
"assignees",
assignees
),
|entry(
"reviewers",
reviewers
),
|entry(
"team-reviewers",
team_reviewers
),
|entry(
"milestone",
milestone
),
|entry(
"draft",
draft
),
|entry(
"maintainer-can-modify",
maintainer_can_modify
)]
)
)
Self.trigger -> replicateContextsWithInputs.trigger
replicateContextsWithInputs.ready -> mainAction.trigger
mainAction.completed -> Self.completed
mainAction.failed -> Self.failed
mainAction.continue -> Self.continue
}

File diff suppressed because one or more lines are too long

@ -0,0 +1,174 @@
use cicd/services/github::replicateContextsWithInputs
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use log/logger::Logger
use std/data/string_map::StringMap
use std/data/string_map::|entry
use std/data/string_map::|map
use std/ops/option::|wrap
#[generated(true)]
#[github_action(Swatinem/rust-cache@v2)]
treatment rustCache[contexts: JavaScriptEngine, logger: Logger](
var cache_all_crates: string = "false",
var cache_bin: string = "true",
var cache_directories: string = "",
var cache_on_failure: string = "",
var cache_provider: string = "github",
var cache_targets: string = "true",
var cache_workspace_crates: string = "false",
var env_vars: string = "",
var key: string = "",
var lookup_only: string = "false",
var prefix_key: string = "v0-rust",
var save_if: string = "true",
var shared_key: string = "",
var workspaces: string = ""
)
input post_trigger: Block<void>
input trigger: Block<void>
output completed: Block<void>
output continue: Block<void>
output failed: Block<void>
output post_completed: Block<void>
model node_contexts: JavaScriptEngine()
{
mainAction: runAction[contexts = node_contexts, logger = logger](
commands = "node .melodium-ci/github/actions/third/swatinem/rust_cache/ref_v2/dist/restore/index.js",
env = |wrap<StringMap>(
|map(
[|entry(
"INPUT_PREFIX-KEY",
prefix_key
),
|entry(
"INPUT_SHARED-KEY",
shared_key
),
|entry(
"INPUT_KEY",
key
),
|entry(
"INPUT_ENV-VARS",
env_vars
),
|entry(
"INPUT_WORKSPACES",
workspaces
),
|entry(
"INPUT_CACHE-DIRECTORIES",
cache_directories
),
|entry(
"INPUT_CACHE-TARGETS",
cache_targets
),
|entry(
"INPUT_CACHE-ON-FAILURE",
cache_on_failure
),
|entry(
"INPUT_CACHE-ALL-CRATES",
cache_all_crates
),
|entry(
"INPUT_CACHE-WORKSPACE-CRATES",
cache_workspace_crates
),
|entry(
"INPUT_SAVE-IF",
save_if
),
|entry(
"INPUT_CACHE-PROVIDER",
cache_provider
),
|entry(
"INPUT_CACHE-BIN",
cache_bin
),
|entry(
"INPUT_LOOKUP-ONLY",
lookup_only
)]
)
),
name = "rustCache"
)
postAction: runAction[contexts = node_contexts, logger = logger](
commands = "node .melodium-ci/github/actions/third/swatinem/rust_cache/ref_v2/dist/save/index.js",
if = "success() || env.CACHE_ON_FAILURE == 'true'",
name = "post:rustCache"
)
replicateContextsWithInputs[action_contexts = node_contexts, main_contexts = contexts](
inputs = |map(
[|entry(
"prefix-key",
prefix_key
),
|entry(
"shared-key",
shared_key
),
|entry(
"key",
key
),
|entry(
"env-vars",
env_vars
),
|entry(
"workspaces",
workspaces
),
|entry(
"cache-directories",
cache_directories
),
|entry(
"cache-targets",
cache_targets
),
|entry(
"cache-on-failure",
cache_on_failure
),
|entry(
"cache-all-crates",
cache_all_crates
),
|entry(
"cache-workspace-crates",
cache_workspace_crates
),
|entry(
"save-if",
save_if
),
|entry(
"cache-provider",
cache_provider
),
|entry(
"cache-bin",
cache_bin
),
|entry(
"lookup-only",
lookup_only
)]
)
)
Self.post_trigger -> postAction.trigger
postAction.completed -> Self.post_completed
Self.trigger -> replicateContextsWithInputs.trigger
replicateContextsWithInputs.ready -> mainAction.trigger
mainAction.completed -> Self.completed
mainAction.failed -> Self.failed
mainAction.continue -> Self.continue
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -0,0 +1,100 @@
use cicd/services/github::replicateContextsWithInputs
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use log/logger::Logger
use std/data/string_map::StringMap
use std/data/string_map::|entry
use std/data/string_map::|map
use std/ops/option::|wrap
#[github_action(taiki-e/install-action@cargo-hack)]
#[generated(true)]
treatment installAction[contexts: JavaScriptEngine, logger: Logger](
var checksum: string = "true",
var fallback: string = "cargo-binstall",
var tool: string = "cargo-hack"
)
input trigger: Block<void>
output completed: Block<void>
output continue: Block<void>
output failed: Block<void>
model composite_contexts: JavaScriptEngine()
{
replicateContextsWithInputs[action_contexts = composite_contexts, main_contexts = contexts](
inputs = |map(
[|entry(
"tool",
tool
),
|entry(
"checksum",
checksum
),
|entry(
"fallback",
fallback
)]
)
)
step0: runAction[contexts = composite_contexts, logger = logger](
commands = ${set -eu
if ! command -v bash >/dev/null; then
if grep -Eq '^ID=alpine' /etc/os-release; then
printf '::group::Install packages required for install-action (bash)\n'
# NB: sync with apk_install in main.sh
if command -v sudo >/dev/null; then
sudo apk --no-cache add bash
elif command -v doas >/dev/null; then
doas apk --no-cache add bash
else
apk --no-cache add bash
fi
printf '::endgroup::\n'
else
printf '::error::install-action requires bash\n'
exit 1
fi
fi
},
if = "runner.os == 'Linux'",
name = "step0",
shell = "sh"
)
step1: runAction[contexts = composite_contexts, logger = logger](
commands = "bash --noprofile --norc \"${GITHUB_ACTION_PATH:?}/main.sh\"",
env = |wrap<StringMap>(
|map(
[|entry(
"ACTION_USER_AGENT",
"${{ github.action_repository }} (${{ github.action_ref }})"
),
|entry(
"INPUT_CHECKSUM",
"${{ inputs.checksum }}"
),
|entry(
"INPUT_TOOL",
"${{ inputs.tool }}"
),
|entry(
"INPUT_FALLBACK",
"${{ inputs.fallback }}"
),
|entry(
"DEFAULT_GITHUB_TOKEN",
"${{ github.token }}"
)]
)
),
name = "step1",
shell = "bash"
)
Self.trigger -> replicateContextsWithInputs.trigger
replicateContextsWithInputs.ready -> step0.trigger
step0.continue -> step1.trigger
step1.completed -> Self.completed
step1.failed -> Self.failed
step1.continue -> Self.continue
}

@ -0,0 +1,100 @@
use cicd/services/github::replicateContextsWithInputs
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use log/logger::Logger
use std/data/string_map::StringMap
use std/data/string_map::|entry
use std/data/string_map::|map
use std/ops/option::|wrap
#[generated(true)]
#[github_action(taiki-e/install-action@v2)]
treatment installAction[contexts: JavaScriptEngine, logger: Logger](
var checksum: string = "true",
var fallback: string = "cargo-binstall",
var tool: string = ""
)
input trigger: Block<void>
output completed: Block<void>
output continue: Block<void>
output failed: Block<void>
model composite_contexts: JavaScriptEngine()
{
replicateContextsWithInputs[action_contexts = composite_contexts, main_contexts = contexts](
inputs = |map(
[|entry(
"tool",
tool
),
|entry(
"checksum",
checksum
),
|entry(
"fallback",
fallback
)]
)
)
step0: runAction[contexts = composite_contexts, logger = logger](
commands = ${set -eu
if ! command -v bash >/dev/null; then
if grep -Eq '^ID=alpine' /etc/os-release; then
printf '::group::Install packages required for install-action (bash)\n'
# NB: sync with apk_install in main.sh
if command -v sudo >/dev/null; then
sudo apk --no-cache add bash
elif command -v doas >/dev/null; then
doas apk --no-cache add bash
else
apk --no-cache add bash
fi
printf '::endgroup::\n'
else
printf '::error::install-action requires bash\n'
exit 1
fi
fi
},
if = "runner.os == 'Linux'",
name = "step0",
shell = "sh"
)
step1: runAction[contexts = composite_contexts, logger = logger](
commands = "bash --noprofile --norc \"${GITHUB_ACTION_PATH:?}/main.sh\"",
env = |wrap<StringMap>(
|map(
[|entry(
"ACTION_USER_AGENT",
"${{ github.action_repository }} (${{ github.action_ref }})"
),
|entry(
"INPUT_CHECKSUM",
"${{ inputs.checksum }}"
),
|entry(
"DEFAULT_GITHUB_TOKEN",
"${{ github.token }}"
),
|entry(
"INPUT_FALLBACK",
"${{ inputs.fallback }}"
),
|entry(
"INPUT_TOOL",
"${{ inputs.tool }}"
)]
)
),
name = "step1",
shell = "bash"
)
Self.trigger -> replicateContextsWithInputs.trigger
replicateContextsWithInputs.ready -> step0.trigger
step0.continue -> step1.trigger
step1.completed -> Self.completed
step1.failed -> Self.failed
step1.continue -> Self.continue
}

@ -0,0 +1,76 @@
use cicd/services/github::replicateContextsWithInputs
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use log/logger::Logger
use std/data/string_map::StringMap
use std/data/string_map::|entry
use std/data/string_map::|map
use std/ops/option::|wrap
#[generated(true)]
#[github_action(taiki-e/setup-cross-toolchain-action@v1)]
treatment setupCrossToolchainAction[contexts: JavaScriptEngine, logger: Logger](
var qemu: string = "",
var runner: string = "",
var target: string = "",
var wine: string = ""
)
input trigger: Block<void>
output completed: Block<void>
output continue: Block<void>
output failed: Block<void>
model composite_contexts: JavaScriptEngine()
{
replicateContextsWithInputs[action_contexts = composite_contexts, main_contexts = contexts](
inputs = |map(
[|entry(
"target",
target
),
|entry(
"runner",
runner
),
|entry(
"qemu",
qemu
),
|entry(
"wine",
wine
)]
)
)
step0: runAction[contexts = composite_contexts, logger = logger](
commands = "bash --noprofile --norc \"${GITHUB_ACTION_PATH:?}/main.sh\"",
env = |wrap<StringMap>(
|map(
[|entry(
"INPUT_WINE",
"${{ inputs.wine }}"
),
|entry(
"INPUT_RUNNER",
"${{ inputs.runner }}"
),
|entry(
"INPUT_TARGET",
"${{ inputs.target }}"
),
|entry(
"INPUT_QEMU",
"${{ inputs.qemu }}"
)]
)
),
name = "step0",
shell = "bash"
)
Self.trigger -> replicateContextsWithInputs.trigger
replicateContextsWithInputs.ready -> step0.trigger
step0.completed -> Self.completed
step0.failed -> Self.failed
step0.continue -> Self.continue
}

@ -0,0 +1,619 @@
use root/github/actions/third/actions/cache/ref_v4::cache
use root/github/actions/third/actions/checkout/ref_v5::checkout
use root/github/actions/third/david_anson/markdownlint_cli2_action/ref_v20::markdownlintCli2Action
use root/github/actions/third/dtolnay/rust_toolchain/ref_nightly::rustToolchain as RefNightlyrustToolchain
use root/github/actions/third/dtolnay/rust_toolchain/ref_stable::rustToolchain as RefStablerustToolchain
use root/github/actions/third/peter_evans/create_pull_request/ref_v7::createPullRequest
use root/github/actions/third/swatinem/rust_cache/ref_v2::rustCache
use root/github/actions/third/taiki_e/install_action/ref_cargo_hack::installAction as RefCargoHackinstallAction
use root/github/actions/third/taiki_e/install_action/ref_v2::installAction as RefV2installAction
use root/github/actions/third/taiki_e/setup_cross_toolchain_action/ref_v1::setupCrossToolchainAction
use cicd/services/github::githubJobResult
use cicd/services/github::includeNeeds
use cicd/services/github::prepareContexts
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use json/value::|null
use json::Json
use log/logger::Logger
use std/data/string_map::StringMap
use std/data/string_map::|entry
use std/data/string_map::|map
use std/flow/concentrate::Concentrator
use std/flow/concentrate::concentrateBlock
use std/flow/concentrate::concentrated
use std/flow::trigger
use std/ops/option::|wrap
treatment buildTestAyaAll[logger: Logger](
var github_contexts: string
)
input trigger: Block<void>
output finished: Block<void>
model finishConcentrator: Concentrator()
{
aarch64: buildTestAya[logger=logger](target_arch="aarch64-unknown-linux-gnu", github_contexts=github_contexts)
armv7: buildTestAya[logger=logger](target_arch="armv7-unknown-linux-gnueabi", github_contexts=github_contexts)
powerpc64le: buildTestAya[logger=logger](target_arch="powerpc64le-unknown-linux-gnu", github_contexts=github_contexts)
riscv64gc: buildTestAya[logger=logger](target_arch="riscv64gc-unknown-linux-gnu", github_contexts=github_contexts)
s390x: buildTestAya[logger=logger](target_arch="s390x-unknown-linux-gnu", github_contexts=github_contexts)
x86_64: buildTestAya[logger=logger](target_arch="x86_64-unknown-linux-gnu", github_contexts=github_contexts)
aarch64Finished: concentrateBlock<void>[concentrator = finishConcentrator]()
armv7Finished: concentrateBlock<void>[concentrator = finishConcentrator]()
powerpc64leFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
riscv64gcFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
s390xFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
x86_64Finished: concentrateBlock<void>[concentrator = finishConcentrator]()
Self.trigger -> aarch64.trigger,finished -> aarch64Finished.data
Self.trigger -> armv7.trigger,finished -> armv7Finished.data
Self.trigger -> powerpc64le.trigger,finished -> powerpc64leFinished.data
Self.trigger -> riscv64gc.trigger,finished -> riscv64gcFinished.data
Self.trigger -> s390x.trigger,finished -> s390xFinished.data
Self.trigger -> x86_64.trigger,finished -> x86_64Finished.data
finish: concentrated<void>[concentrator = finishConcentrator]()
finishTrigger: trigger<void>()
Self.trigger -> finish.trigger,data -> finishTrigger.stream,end -> Self.finished
}
#[job(buildTestAya)]
#[generated(true)]
treatment buildTestAya[logger: Logger](
var github_contexts: string,
var target_arch: string
)
input trigger: Block<void>
output finished: Block<void>
output result: Block<Json>
model contexts: JavaScriptEngine()
{
jobResult: githubJobResult[contexts = contexts](
local_context = |null(
),
name = "buildTestAya",
outputs = |map(
[]
)
)
prepareContexts[contexts = contexts](
github_contexts = github_contexts
)
step0: checkout[contexts = contexts, logger = logger]()
step1: RefStablerustToolchain[contexts = contexts, logger = logger](
targets = target_arch
)
step2: rustCache[contexts = contexts, logger = logger]()
step3: RefCargoHackinstallAction[contexts = contexts, logger = logger]()
step4: setupCrossToolchainAction[contexts = contexts, logger = logger](
target = target_arch
)
build: runAction[contexts = contexts, logger = logger](
commands = ${
set -euxo pipefail
cargo hack build --all-targets --feature-powerset \
--exclude aya-ebpf \
--exclude aya-ebpf-bindings \
--exclude aya-log-ebpf \
--exclude integration-ebpf \
--exclude xtask \
--workspace
},
display_name = "Build",
name = "build"
)
test: runAction[contexts = contexts, logger = logger](
commands = ${set -euxo pipefail
cargo hack test --all-targets --feature-powerset \
--exclude aya-ebpf \
--exclude aya-ebpf-bindings \
--exclude aya-log-ebpf \
--exclude integration-ebpf \
--exclude integration-test \
--exclude xtask \
--workspace
},
display_name = "Test",
env = |wrap<StringMap>(
|map(
[|entry(
"RUST_BACKTRACE",
"full"
)]
)
),
name = "test"
)
doctests: runAction[contexts = contexts, logger = logger](
commands = ${
set -euxo pipefail
cargo hack test --doc --feature-powerset \
--exclude aya-ebpf \
--exclude aya-ebpf-bindings \
--exclude aya-log-ebpf \
--exclude init \
--exclude integration-ebpf \
--exclude integration-test \
--exclude xtask \
--workspace
},
display_name = "Doctests",
env = |wrap<StringMap>(
|map(
[|entry(
"RUST_BACKTRACE",
"full"
)]
)
),
name = "doctests"
)
Self.trigger -> prepareContexts.trigger
prepareContexts.ready -> step0.trigger
step0.continue -> step1.trigger
step1.continue -> step2.trigger
step2.continue -> step3.trigger
step3.continue -> step4.trigger
step4.continue -> build.trigger
build.continue -> test.trigger
test.continue -> doctests.trigger
doctests.continue -> jobResult.trigger_release
jobResult.result -> Self.result
jobResult.finished -> Self.finished
jobResult.finished -> step0.post_trigger
jobResult.finished -> step2.post_trigger
}
#[generated(true)]
#[job(buildTestAyaEbpf)]
treatment buildTestAyaEbpf[logger: Logger](
var github_contexts: string
)
input trigger: Block<void>
output finished: Block<void>
output result: Block<Json>
model contexts: JavaScriptEngine()
{
build: runAction[contexts = contexts, logger = logger](
commands = ${{{set -euxo pipefail
cargo +nightly hack build \
--target ${{ matrix.target }} \
-Z build-std=core \
--package aya-ebpf \
--package aya-log-ebpf \
--feature-powerset
}}},
display_name = "Build",
env = |wrap<StringMap>(
|map(
[|entry(
"CARGO_CFG_BPF_TARGET_ARCH",
"${{ matrix.bpf_target_arch }}"
)]
)
),
name = "build"
)
jobResult: githubJobResult[contexts = contexts](
local_context = |null(
),
name = "buildTestAyaEbpf",
outputs = |map(
[]
)
)
prepareContexts[contexts = contexts](
github_contexts = github_contexts
)
step0: checkout[contexts = contexts, logger = logger]()
step1: RefNightlyrustToolchain[contexts = contexts, logger = logger](
components = "rust-src"
)
step2: RefStablerustToolchain[contexts = contexts, logger = logger]()
step3: rustCache[contexts = contexts, logger = logger]()
step4: runAction[contexts = contexts, logger = logger](
commands = "cargo install --git https://github.com/aya-rs/bpf-linker.git bpf-linker --features llvm-21",
name = "step4"
)
step5: RefCargoHackinstallAction[contexts = contexts, logger = logger]()
test: runAction[contexts = contexts, logger = logger](
commands = ${set -euxo pipefail
cargo hack test \
--doc \
--package aya-ebpf \
--package aya-log-ebpf \
--feature-powerset
},
display_name = "Test",
env = |wrap<StringMap>(
|map(
[|entry(
"RUST_BACKTRACE",
"full"
),
|entry(
"CARGO_CFG_BPF_TARGET_ARCH",
"${{ matrix.bpf_target_arch }}"
)]
)
),
name = "test"
)
Self.trigger -> prepareContexts.trigger
prepareContexts.ready -> step0.trigger
step0.continue -> step1.trigger
step1.continue -> step2.trigger
step2.continue -> step3.trigger
step3.continue -> step4.trigger
step4.continue -> step5.trigger
step5.continue -> build.trigger
build.continue -> test.trigger
test.continue -> jobResult.trigger_release
jobResult.result -> Self.result
jobResult.finished -> Self.finished
jobResult.finished -> step0.post_trigger
jobResult.finished -> step3.post_trigger
}
#[generated(true)]
#[job(buildWorkflowComplete)]
treatment buildWorkflowComplete[logger: Logger](
var github_contexts: string
)
#[job(build-test-aya)]
input need_build_test_aya: Block<Json>
#[job(build-test-aya-ebpf)]
input need_build_test_aya_ebpf: Block<Json>
#[job(lint)]
input need_lint: Block<Json>
#[job(run-integration-test)]
input need_run_integration_test: Block<Json>
input trigger: Block<void>
output finished: Block<void>
output result: Block<Json>
model contexts: JavaScriptEngine()
model readinessConcentrator: Concentrator()
{
includeNeedsBuildTestAya: includeNeeds[contexts = contexts](
from = "build-test-aya"
)
includeNeedsBuildTestAyaEbpf: includeNeeds[contexts = contexts](
from = "build-test-aya-ebpf"
)
includeNeedsBuildTestAyaEbpfReady: concentrateBlock<void>[concentrator = readinessConcentrator]()
includeNeedsBuildTestAyaReady: concentrateBlock<void>[concentrator = readinessConcentrator]()
includeNeedsLint: includeNeeds[contexts = contexts](
from = "lint"
)
includeNeedsLintReady: concentrateBlock<void>[concentrator = readinessConcentrator]()
includeNeedsRunIntegrationTest: includeNeeds[contexts = contexts](
from = "run-integration-test"
)
includeNeedsRunIntegrationTestReady: concentrateBlock<void>[concentrator = readinessConcentrator]()
jobResult: githubJobResult[contexts = contexts](
local_context = |null(
),
name = "buildWorkflowComplete",
outputs = |map(
[]
)
)
prepareContexts[contexts = contexts](
github_contexts = github_contexts
)
readiness: concentrated<void>[concentrator = readinessConcentrator]()
readinessTrigger: trigger<void>()
step0: runAction[contexts = contexts, logger = logger](
commands = "echo 'Build Complete'",
name = "step0"
)
Self.trigger -> prepareContexts.trigger
Self.need_lint -> includeNeedsLint.needs
prepareContexts.ready -> includeNeedsLint.trigger
includeNeedsLint.ready -> includeNeedsLintReady.data
Self.need_build_test_aya -> includeNeedsBuildTestAya.needs
prepareContexts.ready -> includeNeedsBuildTestAya.trigger
includeNeedsBuildTestAya.ready -> includeNeedsBuildTestAyaReady.data
Self.need_build_test_aya_ebpf -> includeNeedsBuildTestAyaEbpf.needs
prepareContexts.ready -> includeNeedsBuildTestAyaEbpf.trigger
includeNeedsBuildTestAyaEbpf.ready -> includeNeedsBuildTestAyaEbpfReady.data
Self.need_run_integration_test -> includeNeedsRunIntegrationTest.needs
prepareContexts.ready -> includeNeedsRunIntegrationTest.trigger
includeNeedsRunIntegrationTest.ready -> includeNeedsRunIntegrationTestReady.data
Self.trigger -> readiness.trigger
readiness.data -> readinessTrigger.stream
readinessTrigger.last -> step0.trigger
step0.continue -> jobResult.trigger_release
jobResult.result -> Self.result
jobResult.finished -> Self.finished
}
#[generated(true)]
#[job(lint)]
treatment lint[logger: Logger](
var github_contexts: string
)
input trigger: Block<void>
output finished: Block<void>
output result: Block<Json>
model contexts: JavaScriptEngine()
{
jobResult: githubJobResult[contexts = contexts](
local_context = |null(
),
name = "lint",
outputs = |map(
[]
)
)
prepareContexts[contexts = contexts](
github_contexts = github_contexts
)
runMiri: runAction[contexts = contexts, logger = logger](
commands = ${set -euxo pipefail
cargo +nightly hack miri test --all-targets --feature-powerset \
--exclude aya-ebpf \
--exclude aya-ebpf-bindings \
--exclude aya-log-ebpf \
--exclude integration-ebpf \
--exclude integration-test \
--workspace
},
display_name = "Run miri",
name = "runMiri"
)
step0: checkout[contexts = contexts, logger = logger]()
step1: RefNightlyrustToolchain[contexts = contexts, logger = logger](
components = "clippy,miri,rustfmt,rust-src"
)
step10: runAction[contexts = contexts, logger = logger](
commands = "cargo xtask public-api",
if = "${{ !(github.event_name == 'schedule' && github.repository == 'aya-rs/aya') }}",
name = "step10"
)
step11: runAction[contexts = contexts, logger = logger](
commands = "cargo xtask public-api --bless",
if = "${{ (github.event_name == 'schedule' && github.repository == 'aya-rs/aya') }}",
name = "step11"
)
step12: createPullRequest[contexts = contexts, logger = logger](
body = ${**Automated changes**
},
branch = "create-pull-request/public-api",
commit_message = "public-api: regenerate",
title = "public-api: regenerate",
token = "${{ secrets.CRABBY_GITHUB_TOKEN }}"
)
step2: RefStablerustToolchain[contexts = contexts, logger = logger]()
step3: rustCache[contexts = contexts, logger = logger]()
step4: RefV2installAction[contexts = contexts, logger = logger](
tool = "cargo-hack,taplo-cli"
)
step5: runAction[contexts = contexts, logger = logger](
commands = "git ls-files -- '*.c' '*.h' | xargs clang-format --dry-run --Werror",
name = "step5"
)
step6: markdownlintCli2Action[contexts = contexts, logger = logger]()
step7: runAction[contexts = contexts, logger = logger](
commands = "taplo fmt --check",
name = "step7"
)
step8: runAction[contexts = contexts, logger = logger](
commands = "cargo +nightly fmt --all -- --check",
name = "step8"
)
step9: runAction[contexts = contexts, logger = logger](
commands = "./clippy.sh",
name = "step9"
)
Self.trigger -> prepareContexts.trigger
prepareContexts.ready -> step0.trigger
step0.continue -> step1.trigger
step1.continue -> step2.trigger
step2.continue -> step3.trigger
step3.continue -> step4.trigger
step4.continue -> step5.trigger
step5.continue -> step6.trigger
step6.continue -> step7.trigger
step7.continue -> step8.trigger
step8.continue -> step9.trigger
step9.continue -> step10.trigger
step10.continue -> step11.trigger
step11.continue -> step12.trigger
step12.continue -> runMiri.trigger
runMiri.continue -> jobResult.trigger_release
jobResult.result -> Self.result
jobResult.finished -> Self.finished
jobResult.finished -> step0.post_trigger
jobResult.finished -> step3.post_trigger
}
#[generated(true)]
#[job(runIntegrationTest)]
treatment runIntegrationTest[logger: Logger](
var github_contexts: string
)
input trigger: Block<void>
output finished: Block<void>
output result: Block<Json>
model contexts: JavaScriptEngine()
{
cleanupStaleKernelsAndModules: runAction[contexts = contexts, logger = logger](
commands = ${set -euxo pipefail
rm -rf test/.tmp/boot test/.tmp/lib
},
display_name = "Cleanup stale kernels and modules",
name = "cleanupStaleKernelsAndModules"
)
downloadDebianKernels: runAction[contexts = contexts, logger = logger](
commands = ".github/scripts/download_kernel_images.sh test/.tmp/debian-kernels/arm64 arm64 6.1 6.12",
display_name = "Download debian kernels",
if = "runner.arch == 'ARM64'",
name = "downloadDebianKernels"
)
downloadDebianKernels_10: runAction[contexts = contexts, logger = logger](
commands = ".github/scripts/download_kernel_images.sh test/.tmp/debian-kernels/amd64 amd64 6.1 6.12",
display_name = "Download debian kernels",
if = "runner.arch == 'X64'",
name = "downloadDebianKernels_10"
)
extractDebianKernels: runAction[contexts = contexts, logger = logger](
commands = ${{set -euxo pipefail
# The wildcard '**/boot/*' extracts kernel images and config.
# The wildcard '**/modules/*' extracts kernel modules.
# Modules are required since not all parts of the kernel we want to
# test are built-in.
find test/.tmp -name '*.deb' -print0 | xargs -t -0 -I {} \
sh -c "dpkg --fsys-tarfile {} | tar -C test/.tmp \
--wildcards --extract '**/boot/*' '**/modules/*' --file -"
}},
display_name = "Extract debian kernels",
name = "extractDebianKernels"
)
installLibLlvm: runAction[contexts = contexts, logger = logger](
commands = ${{{set -euxo pipefail
# Get the partial SHA from Rust nightly.
rustc_sha=$(rustc +nightly --version | grep -oE '[a-f0-9]{7,40}')
# Get the full SHA from GitHub.
rustc_sha=$(curl -sfSL https://api.github.com/repos/rust-lang/rust/commits/$rustc_sha \
--header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' \
--header 'content-type: application/json' \
| jq -r '.sha')
mkdir -p /tmp/rustc-llvm
curl -sfSL https://ci-artifacts.rust-lang.org/rustc-builds/$rustc_sha/rust-dev-nightly-${{ matrix.target }}.tar.xz | \
tar -xJ --strip-components 2 -C /tmp/rustc-llvm
echo /tmp/rustc-llvm/bin >> $GITHUB_PATH
}}},
display_name = "Install libLLVM",
name = "installLibLlvm"
)
installPrerequisites: runAction[contexts = contexts, logger = logger](
commands = ${{set -euxo pipefail
sudo apt update
sudo apt -y install lynx qemu-system-{arm,x86} musl-tools
echo /usr/lib/llvm-15/bin >> $GITHUB_PATH
}},
display_name = "Install prerequisites",
if = "runner.os == 'Linux'",
name = "installPrerequisites"
)
installPrerequisites_2: runAction[contexts = contexts, logger = logger](
commands = ${{set -euxo pipefail
brew update
# https://github.com/actions/setup-python/issues/577
find /usr/local/bin -type l -exec sh -c 'readlink -f "$1" \
| grep -q ^/Library/Frameworks/Python.framework/Versions/' _ {} \; -exec rm -v {} \;
brew install --formula curl dpkg gnu-tar llvm lynx pkg-config qemu
echo $(brew --prefix curl)/bin >> $GITHUB_PATH
echo $(brew --prefix gnu-tar)/libexec/gnubin >> $GITHUB_PATH
echo $(brew --prefix llvm)/bin >> $GITHUB_PATH
brew install filosottile/musl-cross/musl-cross
ln -s "$(brew --prefix musl-cross)/bin/x86_64-linux-musl-gcc" /usr/local/bin/musl-gcc
}},
display_name = "Install prerequisites",
if = "runner.os == 'macOS'",
name = "installPrerequisites_2"
)
jobResult: githubJobResult[contexts = contexts](
local_context = |null(
),
name = "runIntegrationTest",
outputs = |map(
[]
)
)
prepareContexts[contexts = contexts](
github_contexts = github_contexts
)
runLocalIntegrationTests: runAction[contexts = contexts, logger = logger](
commands = "cargo xtask integration-test local",
display_name = "Run local integration tests",
if = "runner.os == 'Linux'",
name = "runLocalIntegrationTests"
)
runVirtualizedIntegrationTests: runAction[contexts = contexts, logger = logger](
commands = ${{{set -euxo pipefail
.github/scripts/find_kernels.py | xargs -t -0 \
cargo xtask integration-test vm --cache-dir test/.tmp \
--github-api-token ${{ secrets.GITHUB_TOKEN }} \
}}},
display_name = "Run virtualized integration tests",
if = "runner.os == 'Linux'",
name = "runVirtualizedIntegrationTests"
)
runVirtualizedIntegrationTests_15: runAction[contexts = contexts, logger = logger](
commands = ${{{set -euxo pipefail
.github/scripts/find_kernels.py | xargs -t -0 \
cargo xtask integration-test vm --cache-dir test/.tmp \
--github-api-token ${{ secrets.GITHUB_TOKEN }} \
}}},
display_name = "Run virtualized integration tests",
env = |wrap<StringMap>(
|map(
[|entry(
"CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER",
"x86_64-linux-musl-gcc"
)]
)
),
if = "runner.os == 'macOS'",
name = "runVirtualizedIntegrationTests_15"
)
step0: checkout[contexts = contexts, logger = logger](
submodules = "recursive"
)
step3: RefNightlyrustToolchain[contexts = contexts, logger = logger](
components = "rust-src"
)
step4: RefStablerustToolchain[contexts = contexts, logger = logger](
targets = "aarch64-unknown-linux-musl,x86_64-unknown-linux-musl"
)
step5: rustCache[contexts = contexts, logger = logger]()
step7: runAction[contexts = contexts, logger = logger](
commands = "cargo install --git https://github.com/aya-rs/bpf-linker.git bpf-linker --no-default-features --features llvm-21 --force",
name = "step7"
)
step8: cache[contexts = contexts, logger = logger](
key = "${{ runner.arch }}-${{ runner.os }}-test-cache",
path = "test/.tmp"
)
Self.trigger -> prepareContexts.trigger
prepareContexts.ready -> step0.trigger
step0.continue -> installPrerequisites.trigger
installPrerequisites.continue -> installPrerequisites_2.trigger
installPrerequisites_2.continue -> step3.trigger
step3.continue -> step4.trigger
step4.continue -> step5.trigger
step5.continue -> installLibLlvm.trigger
installLibLlvm.continue -> step7.trigger
step7.continue -> step8.trigger
step8.continue -> downloadDebianKernels.trigger
downloadDebianKernels.continue -> downloadDebianKernels_10.trigger
downloadDebianKernels_10.continue -> cleanupStaleKernelsAndModules.trigger
cleanupStaleKernelsAndModules.continue -> extractDebianKernels.trigger
extractDebianKernels.continue -> runLocalIntegrationTests.trigger
runLocalIntegrationTests.continue -> runVirtualizedIntegrationTests.trigger
runVirtualizedIntegrationTests.continue -> runVirtualizedIntegrationTests_15.trigger
runVirtualizedIntegrationTests_15.continue -> jobResult.trigger_release
jobResult.result -> Self.result
jobResult.finished -> Self.finished
jobResult.finished -> step0.post_trigger
jobResult.finished -> step5.post_trigger
jobResult.finished -> step8.post_trigger
}

@ -0,0 +1,99 @@
use root/github/actions/third/actions/checkout/ref_v5::checkout
use root/github/actions/third/dtolnay/rust_toolchain/ref_nightly::rustToolchain
use root/github/actions/third/peter_evans/create_pull_request/ref_v7::createPullRequest
use root/github/actions/third/swatinem/rust_cache/ref_v2::rustCache
use cicd/services/github::githubJobResult
use cicd/services/github::prepareContexts
use cicd/services/github::runAction
use javascript::JavaScriptEngine
use json/value::|null
use json::Json
use log/logger::Logger
use std/data/string_map::|map
#[job(codegen)]
#[generated(true)]
treatment codegen[logger: Logger](
var github_contexts: string
)
input trigger: Block<void>
output finished: Block<void>
output result: Block<Json>
model contexts: JavaScriptEngine()
{
installHeaders: runAction[contexts = contexts, logger = logger](
commands = ${{set -euxo pipefail
sudo apt -y update
sudo apt -y install libelf-dev libc6-dev libc6-dev-{arm64,armel,loong64,riscv64,ppc64el,s390x,mips}-cross
}},
display_name = "Install headers",
name = "installHeaders"
)
jobResult: githubJobResult[contexts = contexts](
local_context = |null(
),
name = "codegen",
outputs = |map(
[]
)
)
libbpf: runAction[contexts = contexts, logger = logger](
commands = "echo \"sha=$(git rev-parse HEAD)\" >> \"$GITHUB_OUTPUT\"",
id = "libbpf",
name = "libbpf",
working_directory = "xtask/libbpf"
)
prepareContexts[contexts = contexts](
github_contexts = github_contexts
)
step0: checkout[contexts = contexts, logger = logger](
submodules = "recursive"
)
step1: rustToolchain[contexts = contexts, logger = logger](
components = "rustfmt, clippy"
)
step2: rustCache[contexts = contexts, logger = logger]()
step4: runAction[contexts = contexts, logger = logger](
commands = "cargo xtask codegen",
name = "step4"
)
step5: runAction[contexts = contexts, logger = logger](
commands = "cargo fmt --all",
name = "step5"
)
step6: runAction[contexts = contexts, logger = logger](
commands = "cargo xtask public-api --bless",
name = "step6"
)
step8: createPullRequest[contexts = contexts, logger = logger](
body = ${{{**Automated changes**
libbpf commit: ${{ steps.libbpf.outputs.sha}}
}}},
branch = "create-pull-request/codegen",
commit_message = ${{{aya-obj, aya-ebpf-bindings: regenerate
libbpf commit: ${{ steps.libbpf.outputs.sha }}
}}},
title = "aya-obj, aya-ebpf-bindings: regenerate",
token = "${{ secrets.CRABBY_GITHUB_TOKEN }}"
)
Self.trigger -> prepareContexts.trigger
prepareContexts.ready -> step0.trigger
step0.continue -> step1.trigger
step1.continue -> step2.trigger
step2.continue -> installHeaders.trigger
installHeaders.continue -> step4.trigger
step4.continue -> step5.trigger
step5.continue -> step6.trigger
step6.continue -> libbpf.trigger
libbpf.continue -> step8.trigger
step8.continue -> jobResult.trigger_release
jobResult.result -> Self.result
jobResult.finished -> Self.finished
jobResult.finished -> step0.post_trigger
jobResult.finished -> step2.post_trigger
}

@ -0,0 +1,275 @@
use root/github/ci::buildTestAyaAll
use root/github/ci::buildTestAya
use root/github/ci::buildTestAyaEbpf
use root/github/ci::buildWorkflowComplete
use root/github/ci::lint
use root/github/ci::runIntegrationTest
use root/github/gen::codegen
use cicd/logging::manageLogs
use json/value::|null
use json::Json
use json::|to_json
use log/logger::Logger
use std/engine/util::startup
use std/flow/concentrate::Concentrator
use std/flow/concentrate::concentrateBlock
use std/flow/concentrate::concentrated
use std/flow::emit
use std/flow::trigger
use std/ops/option::|unwrap_or
/*
#[generated(true)]
#[github_workflow(/.github/workflows/ci.yml)]
treatment ciUbuntuLatest[logger: Logger](
var github_contexts: string
)
input need_run_integration_test: Block<Json>
input trigger: Block<void>
output finished: Block<void>
model finishConcentrator: Concentrator()
{
buildTestAya[logger = logger](
github_contexts = github_contexts
)
buildTestAyaEbpf[logger = logger](
github_contexts = github_contexts
)
buildTestAyaEbpfFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
buildTestAyaFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
buildWorkflowComplete[logger = logger](
github_contexts = github_contexts
)
buildWorkflowCompleteFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
finish: concentrated<void>[concentrator = finishConcentrator]()
finishTrigger: trigger<void>()
lint[logger = logger](
github_contexts = github_contexts
)
lintFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
runIntegrationTest[logger = logger](
github_contexts = github_contexts
)
runIntegrationTestFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
Self.trigger -> finish.trigger
finish.data -> finishTrigger.stream
Self.trigger -> lint.trigger
lint.finished -> lintFinished.data
Self.trigger -> buildTestAya.trigger
buildTestAya.finished -> buildTestAyaFinished.data
Self.trigger -> buildTestAyaEbpf.trigger
buildTestAyaEbpf.finished -> buildTestAyaEbpfFinished.data
Self.trigger -> runIntegrationTest.trigger
runIntegrationTest.finished -> runIntegrationTestFinished.data
Self.trigger -> buildWorkflowComplete.trigger
buildWorkflowComplete.finished -> buildWorkflowCompleteFinished.data
lint.result -> buildWorkflowComplete.need_lint
buildTestAya.result -> buildWorkflowComplete.need_build_test_aya
buildTestAyaEbpf.result -> buildWorkflowComplete.need_build_test_aya_ebpf
Self.need_run_integration_test -> buildWorkflowComplete.need_run_integration_test
finishTrigger.end -> Self.finished
}
treatment ciUbuntuLatestEntrypoint(
var github_contexts: string,
var logs_directory: string,
var need_run_integration_test: string
)
model logger: Logger()
{
ciUbuntuLatest[logger = logger](
github_contexts = github_contexts
)
emitRunIntegrationTest: emit<Json>(
value = |unwrap_or<Json>(
|to_json(
need_run_integration_test
),
|null(
)
)
)
manageLogs[logger = logger](
output_directory = logs_directory
)
startup()
startup.trigger -> ciUbuntuLatest.trigger
ciUbuntuLatest.finished -> manageLogs.stop
startup.trigger -> emitRunIntegrationTest.trigger
emitRunIntegrationTest.emit -> ciUbuntuLatest.need_run_integration_test
}
*/
treatment buildTestAyaEntrypoint(
var github_contexts: string,
var logs_directory: string
)
model logger: Logger()
{
buildTestAyaAll[logger = logger](
github_contexts = github_contexts
)
manageLogs[logger = logger](
output_directory = logs_directory
)
startup()
startup.trigger -> buildTestAyaAll.trigger
buildTestAyaAll.finished -> manageLogs.stop
}
#[generated(true)]
#[github_workflow(/.github/workflows/ci.yml)]
treatment cimatrixOs[logger: Logger](
var github_contexts: string
)
input need_build_test_aya: Block<Json>
input need_build_test_aya_ebpf: Block<Json>
input need_lint: Block<Json>
input trigger: Block<void>
output finished: Block<void>
model finishConcentrator: Concentrator()
{
buildTestAya[logger = logger](
github_contexts = github_contexts
)
buildTestAyaEbpf[logger = logger](
github_contexts = github_contexts
)
buildTestAyaEbpfFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
buildTestAyaFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
buildWorkflowComplete[logger = logger](
github_contexts = github_contexts
)
buildWorkflowCompleteFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
finish: concentrated<void>[concentrator = finishConcentrator]()
finishTrigger: trigger<void>()
lint[logger = logger](
github_contexts = github_contexts
)
lintFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
runIntegrationTest[logger = logger](
github_contexts = github_contexts
)
runIntegrationTestFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
Self.trigger -> finish.trigger
finish.data -> finishTrigger.stream
Self.trigger -> lint.trigger
lint.finished -> lintFinished.data
Self.trigger -> buildTestAya.trigger
buildTestAya.finished -> buildTestAyaFinished.data
Self.trigger -> buildTestAyaEbpf.trigger
buildTestAyaEbpf.finished -> buildTestAyaEbpfFinished.data
Self.trigger -> runIntegrationTest.trigger
runIntegrationTest.finished -> runIntegrationTestFinished.data
Self.trigger -> buildWorkflowComplete.trigger
buildWorkflowComplete.finished -> buildWorkflowCompleteFinished.data
Self.need_lint -> buildWorkflowComplete.need_lint
Self.need_build_test_aya -> buildWorkflowComplete.need_build_test_aya
Self.need_build_test_aya_ebpf -> buildWorkflowComplete.need_build_test_aya_ebpf
runIntegrationTest.result -> buildWorkflowComplete.need_run_integration_test
finishTrigger.end -> Self.finished
}
treatment cimatrixOsEntrypoint(
var github_contexts: string,
var logs_directory: string,
var need_build_test_aya: string,
var need_build_test_aya_ebpf: string,
var need_lint: string
)
model logger: Logger()
{
cimatrixOs[logger = logger](
github_contexts = github_contexts
)
emitBuildTestAya: emit<Json>(
value = |unwrap_or<Json>(
|to_json(
need_build_test_aya
),
|null(
)
)
)
emitBuildTestAyaEbpf: emit<Json>(
value = |unwrap_or<Json>(
|to_json(
need_build_test_aya_ebpf
),
|null(
)
)
)
emitLint: emit<Json>(
value = |unwrap_or<Json>(
|to_json(
need_lint
),
|null(
)
)
)
manageLogs[logger = logger](
output_directory = logs_directory
)
startup()
startup.trigger -> cimatrixOs.trigger
cimatrixOs.finished -> manageLogs.stop
startup.trigger -> emitLint.trigger
emitLint.emit -> cimatrixOs.need_lint
startup.trigger -> emitBuildTestAya.trigger
emitBuildTestAya.emit -> cimatrixOs.need_build_test_aya
startup.trigger -> emitBuildTestAyaEbpf.trigger
emitBuildTestAyaEbpf.emit -> cimatrixOs.need_build_test_aya_ebpf
}
#[github_workflow(/.github/workflows/gen.yml)]
#[generated(true)]
treatment gen[logger: Logger](
var github_contexts: string
)
input trigger: Block<void>
output finished: Block<void>
model finishConcentrator: Concentrator()
{
codegen[logger = logger](
github_contexts = github_contexts
)
codegenFinished: concentrateBlock<void>[concentrator = finishConcentrator]()
finish: concentrated<void>[concentrator = finishConcentrator]()
finishTrigger: trigger<void>()
Self.trigger -> finish.trigger
finish.data -> finishTrigger.stream
Self.trigger -> codegen.trigger
codegen.finished -> codegenFinished.data
finishTrigger.end -> Self.finished
}
treatment genEntrypoint(
var github_contexts: string,
var logs_directory: string
)
model logger: Logger()
{
gen[logger = logger](
github_contexts = github_contexts
)
manageLogs[logger = logger](
output_directory = logs_directory
)
startup()
startup.trigger -> gen.trigger
gen.finished -> manageLogs.stop
}
Loading…
Cancel
Save