diff --git a/.cargo/config b/.cargo/config.toml similarity index 100% rename from .cargo/config rename to .cargo/config.toml diff --git a/.github/workflows/gen.yml b/.github/workflows/gen.yml index cf5939df..ac271be7 100644 --- a/.github/workflows/gen.yml +++ b/.github/workflows/gen.yml @@ -9,11 +9,6 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: actions/checkout@v3 - with: - repository: libbpf/libbpf - path: libbpf - - name: libbpf-version working-directory: libbpf run: echo "LIBBPF_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV @@ -32,7 +27,7 @@ jobs: - name: Run codegen run: | - cargo xtask codegen --libbpf-dir ./libbpf + cargo xtask codegen - name: Check for changes run: | diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index e3e21f81..11674142 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -17,11 +17,9 @@ jobs: runs-on: macos-latest steps: - - uses: actions/checkout@v3 - uses: actions/checkout@v3 with: - repository: libbpf/libbpf - path: libbpf + submodules: recursive - name: Install Pre-requisites run: | @@ -40,4 +38,4 @@ jobs: key: tmp-files-${{ hashFiles('test/run.sh') }} - name: Run integration tests - run: test/run.sh ./libbpf + run: test/run.sh diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 713f36fa..fae57e42 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -29,7 +29,7 @@ jobs: run: cargo fmt --all -- --check - name: Run clippy - run: cargo clippy --all-targets --workspace --exclude integration-test -- --deny warnings + run: cargo clippy --all-targets --workspace -- --deny warnings - name: Run miri run: cargo miri test --all-targets diff --git a/.gitignore b/.gitignore index 0248fbf2..38ed638e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,5 @@ Cargo.lock target/ -libbpf/ .vscode/ !.vscode/settings.json site/ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..e606d2b6 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "libbpf"] + path = libbpf + url = https://github.com/libbpf/libbpf diff --git a/.vscode/settings.json b/.vscode/settings.json index 07c919f9..f922a6df 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,4 +1,4 @@ { - "rust-analyzer.checkOnSave.allTargets": false, - "rust-analyzer.checkOnSave.command": "clippy" + "rust-analyzer.check.allTargets": true, + "rust-analyzer.check.command": "clippy" } diff --git a/aya/src/util.rs b/aya/src/util.rs index aaca0f5f..fb1026af 100644 --- a/aya/src/util.rs +++ b/aya/src/util.rs @@ -299,7 +299,7 @@ macro_rules! include_bytes_aligned { pub bytes: Bytes, } - static ALIGNED: &Aligned<[u8]> = &Aligned { + const ALIGNED: &Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!($path), }; diff --git a/bpf/.cargo/config.toml b/bpf/.cargo/config.toml deleted file mode 100644 index 5d7e5915..00000000 --- a/bpf/.cargo/config.toml +++ /dev/null @@ -1,6 +0,0 @@ -[build] -target-dir = "../target" -target = "bpfel-unknown-none" - -[unstable] -build-std = ["core"] \ No newline at end of file diff --git a/libbpf b/libbpf new file mode 160000 index 00000000..a2258003 --- /dev/null +++ b/libbpf @@ -0,0 +1 @@ +Subproject commit a2258003f21d9d52afd48aa64787b65ef80bd355 diff --git a/test/README.md b/test/README.md index 09a7c49d..9b4dedf4 100644 --- a/test/README.md +++ b/test/README.md @@ -30,13 +30,13 @@ From the root of this repository: ### Native ``` -cargo xtask integration-test --libbpf-dir /path/to/libbpf +cargo xtask integration-test ``` ### Virtualized ``` -./test/run.sh /path/to/libbpf +./test/run.sh ``` ### Writing an integration test @@ -44,10 +44,11 @@ cargo xtask integration-test --libbpf-dir /path/to/libbpf Tests should follow these guidelines: - Rust eBPF code should live in `integration-ebpf/${NAME}.rs` and included in - `integration-ebpf/Cargo.toml`. -- C eBPF code should live in `integration-ebpf/src/bpf/${NAME}.bpf.c`. It's automatically compiled - and made available as `${OUT_DIR}/${NAME}.bpf.o`. -- Any bytecode should be included in the integration test binary using `include_bytes_aligned!`. + `integration-ebpf/Cargo.toml` and `integration-test/src/lib.rs` using + `include_bytes_aligned!`. +- C eBPF code should live in `integration-test/bpf/${NAME}.bpf.c`. It should be + added to the list of files in `integration-test/build.rs` and the list of + constants in `integration-test/src/lib.rs` using `include_bytes_aligned!`. - Tests should be added to `integration-test/tests`. - You may add a new module, or use an existing one. - Test functions should not return `anyhow::Result<()>` since this produces errors without stack diff --git a/test/integration-ebpf/.cargo/config.toml b/test/integration-ebpf/.cargo/config.toml deleted file mode 100644 index d89a98b0..00000000 --- a/test/integration-ebpf/.cargo/config.toml +++ /dev/null @@ -1,6 +0,0 @@ -[build] -target-dir = "../../target" -target = "bpfel-unknown-none" - -[unstable] -build-std = ["core"] diff --git a/test/integration-ebpf/Cargo.toml b/test/integration-ebpf/Cargo.toml index 73edf5fd..ff35ddb7 100644 --- a/test/integration-ebpf/Cargo.toml +++ b/test/integration-ebpf/Cargo.toml @@ -34,4 +34,4 @@ path = "src/relocations.rs" [[bin]] name = "bpf_probe_read" -path = "src/bpf_probe_read.rs" \ No newline at end of file +path = "src/bpf_probe_read.rs" diff --git a/test/integration-ebpf/rust-toolchain.toml b/test/integration-ebpf/rust-toolchain.toml deleted file mode 100644 index c046a094..00000000 --- a/test/integration-ebpf/rust-toolchain.toml +++ /dev/null @@ -1,2 +0,0 @@ -[toolchain] -channel="nightly" diff --git a/test/integration-ebpf/rustfmt.toml b/test/integration-ebpf/rustfmt.toml deleted file mode 120000 index 760eb840..00000000 --- a/test/integration-ebpf/rustfmt.toml +++ /dev/null @@ -1 +0,0 @@ -../../rustfmt.toml \ No newline at end of file diff --git a/test/integration-test/Cargo.toml b/test/integration-test/Cargo.toml index a4363549..b4d1dd10 100644 --- a/test/integration-test/Cargo.toml +++ b/test/integration-test/Cargo.toml @@ -18,5 +18,7 @@ object = { version = "0.31", default-features = false, features = [ "elf", ] } rbpf = "0.2.0" -tempfile = "3.3.0" tokio = { version = "1.24", default-features = false, features = ["time"] } + +[build-dependencies] +cargo_metadata = "0.15.4" diff --git a/test/integration-ebpf/src/bpf/ext.bpf.c b/test/integration-test/bpf/ext.bpf.c similarity index 100% rename from test/integration-ebpf/src/bpf/ext.bpf.c rename to test/integration-test/bpf/ext.bpf.c diff --git a/test/integration-ebpf/src/bpf/main.bpf.c b/test/integration-test/bpf/main.bpf.c similarity index 100% rename from test/integration-ebpf/src/bpf/main.bpf.c rename to test/integration-test/bpf/main.bpf.c diff --git a/test/integration-ebpf/src/bpf/multimap-btf.bpf.c b/test/integration-test/bpf/multimap-btf.bpf.c similarity index 100% rename from test/integration-ebpf/src/bpf/multimap-btf.bpf.c rename to test/integration-test/bpf/multimap-btf.bpf.c diff --git a/test/integration-ebpf/src/bpf/text_64_64_reloc.c b/test/integration-test/bpf/text_64_64_reloc.c similarity index 100% rename from test/integration-ebpf/src/bpf/text_64_64_reloc.c rename to test/integration-test/bpf/text_64_64_reloc.c diff --git a/test/integration-test/build.rs b/test/integration-test/build.rs new file mode 100644 index 00000000..c36d20a4 --- /dev/null +++ b/test/integration-test/build.rs @@ -0,0 +1,196 @@ +use std::{ + env, + ffi::OsString, + fmt::Write as _, + fs, + io::BufReader, + path::PathBuf, + process::{Child, Command, Stdio}, +}; + +use cargo_metadata::{ + Artifact, CompilerMessage, Message, Metadata, MetadataCommand, Package, Target, +}; + +fn main() { + const AYA_BUILD_INTEGRATION_BPF: &str = "AYA_BUILD_INTEGRATION_BPF"; + + println!("cargo:rerun-if-env-changed={}", AYA_BUILD_INTEGRATION_BPF); + + let build_integration_bpf = match env::var_os(AYA_BUILD_INTEGRATION_BPF) { + None => false, + Some(s) => { + let s = s.to_str().unwrap(); + s.parse::().unwrap() + } + }; + + let manifest_dir = env::var_os("CARGO_MANIFEST_DIR").unwrap(); + let manifest_dir = PathBuf::from(manifest_dir); + let out_dir = env::var_os("OUT_DIR").unwrap(); + let out_dir = PathBuf::from(out_dir); + + let endian = env::var_os("CARGO_CFG_TARGET_ENDIAN").unwrap(); + let target = if endian == "big" { + "bpfeb" + } else if endian == "little" { + "bpfel" + } else { + panic!("unsupported endian={:?}", endian) + }; + + const C_BPF_PROBES: &[(&str, &str)] = &[ + ("ext.bpf.c", "ext.bpf.o"), + ("main.bpf.c", "main.bpf.o"), + ("multimap-btf.bpf.c", "multimap-btf.bpf.o"), + ("text_64_64_reloc.c", "text_64_64_reloc.o"), + ]; + + let c_bpf_probes = C_BPF_PROBES + .iter() + .map(|(src, dst)| (src, out_dir.join(dst))); + + if build_integration_bpf { + let libbpf_dir = manifest_dir + .parent() + .unwrap() + .parent() + .unwrap() + .join("libbpf"); + + let libbpf_headers_dir = out_dir.join("libbpf_headers"); + + let mut includedir = OsString::new(); + includedir.push("INCLUDEDIR="); + includedir.push(&libbpf_headers_dir); + + let mut cmd = Command::new("make"); + cmd.arg("-C") + .arg(libbpf_dir.join("src")) + .arg(includedir) + .arg("install_headers"); + let status = cmd + .status() + .unwrap_or_else(|err| panic!("failed to run {cmd:?}: {err}")); + match status.code() { + Some(code) => match code { + 0 => {} + code => panic!("{cmd:?} exited with code {code}"), + }, + None => panic!("{cmd:?} terminated by signal"), + } + + let bpf_dir = manifest_dir.join("bpf"); + + let mut target_arch = OsString::new(); + target_arch.push("-D__TARGET_ARCH_"); + + let arch = env::var_os("CARGO_CFG_TARGET_ARCH").unwrap(); + if arch == "x86_64" { + target_arch.push("x86"); + } else if arch == "aarch64" { + target_arch.push("arm64"); + } else { + target_arch.push(arch); + }; + + for (src, dst) in c_bpf_probes { + let src = bpf_dir.join(src); + let mut cmd = Command::new("clang"); + cmd.arg("-I") + .arg(&libbpf_headers_dir) + .args(["-g", "-O2", "-target", target, "-c"]) + .arg(&target_arch) + .arg(src) + .arg("-o") + .arg(dst); + let status = cmd + .status() + .unwrap_or_else(|err| panic!("failed to run {cmd:?}: {err}")); + match status.code() { + Some(code) => match code { + 0 => {} + code => panic!("{cmd:?} exited with code {code}"), + }, + None => panic!("{cmd:?} terminated by signal"), + } + } + + let ebpf_dir = manifest_dir.parent().unwrap().join("integration-ebpf"); + let target = format!("{target}-unknown-none"); + + let mut cmd = Command::new("cargo"); + cmd.current_dir(&ebpf_dir).args([ + "build", + "-Z", + "build-std=core", + "--release", + "--message-format=json", + "--target", + &target, + ]); + let mut child = cmd + .stdout(Stdio::piped()) + .spawn() + .unwrap_or_else(|err| panic!("failed to spawn {cmd:?}: {err}")); + let Child { stdout, .. } = &mut child; + let stdout = stdout.take().unwrap(); + let reader = BufReader::new(stdout); + let mut executables = Vec::new(); + let mut compiler_messages = String::new(); + for message in Message::parse_stream(reader) { + #[allow(clippy::collapsible_match)] + match message.expect("valid JSON") { + Message::CompilerArtifact(Artifact { + executable, + target: Target { name, .. }, + .. + }) => { + if let Some(executable) = executable { + executables.push((name, executable.into_std_path_buf())); + } + } + Message::CompilerMessage(CompilerMessage { message, .. }) => { + writeln!(&mut compiler_messages, "{message}").unwrap() + } + _ => {} + } + } + + let status = child + .wait() + .unwrap_or_else(|err| panic!("failed to wait for {cmd:?}: {err}")); + + match status.code() { + Some(code) => match code { + 0 => {} + code => panic!("{cmd:?} exited with status code {code}:\n{compiler_messages}"), + }, + None => panic!("{cmd:?} terminated by signal"), + } + + for (name, binary) in executables { + let dst = out_dir.join(name); + let _: u64 = fs::copy(&binary, &dst) + .unwrap_or_else(|err| panic!("failed to copy {binary:?} to {dst:?}: {err}")); + } + } else { + for (_src, dst) in c_bpf_probes { + fs::write(&dst, []).unwrap_or_else(|err| panic!("failed to create {dst:?}: {err}")); + } + + let Metadata { packages, .. } = MetadataCommand::new().no_deps().exec().unwrap(); + for Package { name, targets, .. } in packages { + if name != "integration-ebpf" { + continue; + } + for Target { name, kind, .. } in targets { + if kind != ["bin"] { + continue; + } + let dst = out_dir.join(name); + fs::write(&dst, []).unwrap_or_else(|err| panic!("failed to create {dst:?}: {err}")); + } + } + } +} diff --git a/test/integration-test/src/lib.rs b/test/integration-test/src/lib.rs index 8b137891..7d571104 100644 --- a/test/integration-test/src/lib.rs +++ b/test/integration-test/src/lib.rs @@ -1 +1,20 @@ +use aya::include_bytes_aligned; +pub const EXT: &[u8] = include_bytes_aligned!(concat!(env!("OUT_DIR"), "/ext.bpf.o")); +pub const MAIN: &[u8] = include_bytes_aligned!(concat!(env!("OUT_DIR"), "/main.bpf.o")); +pub const MULTIMAP_BTF: &[u8] = + include_bytes_aligned!(concat!(env!("OUT_DIR"), "/multimap-btf.bpf.o")); +pub const TEXT_64_64_RELOC: &[u8] = + include_bytes_aligned!(concat!(env!("OUT_DIR"), "/text_64_64_reloc.o")); + +pub const LOG: &[u8] = include_bytes_aligned!(concat!(env!("OUT_DIR"), "/log")); +pub const MAP_TEST: &[u8] = include_bytes_aligned!(concat!(env!("OUT_DIR"), "/map_test")); +pub const NAME_TEST: &[u8] = include_bytes_aligned!(concat!(env!("OUT_DIR"), "/name_test")); +pub const PASS: &[u8] = include_bytes_aligned!(concat!(env!("OUT_DIR"), "/pass")); +pub const TEST: &[u8] = include_bytes_aligned!(concat!(env!("OUT_DIR"), "/test")); +pub const RELOCATIONS: &[u8] = include_bytes_aligned!(concat!(env!("OUT_DIR"), "/relocations")); +pub const BPF_PROBE_READ: &[u8] = + include_bytes_aligned!(concat!(env!("OUT_DIR"), "/bpf_probe_read")); + +#[cfg(test)] +mod tests; diff --git a/test/integration-test/src/tests.rs b/test/integration-test/src/tests.rs new file mode 100644 index 00000000..dd8565b0 --- /dev/null +++ b/test/integration-test/src/tests.rs @@ -0,0 +1,8 @@ +mod bpf_probe_read; +mod btf_relocations; +mod elf; +mod load; +mod log; +mod rbpf; +mod relocations; +mod smoke; diff --git a/test/integration-test/tests/bpf_probe_read.rs b/test/integration-test/src/tests/bpf_probe_read.rs similarity index 93% rename from test/integration-test/tests/bpf_probe_read.rs rename to test/integration-test/src/tests/bpf_probe_read.rs index d10f82e1..20e6a134 100644 --- a/test/integration-test/tests/bpf_probe_read.rs +++ b/test/integration-test/src/tests/bpf_probe_read.rs @@ -1,4 +1,4 @@ -use aya::{include_bytes_aligned, maps::Array, programs::UProbe, Bpf}; +use aya::{maps::Array, programs::UProbe, Bpf}; const RESULT_BUF_LEN: usize = 1024; @@ -68,7 +68,7 @@ fn set_user_buffer(bytes: &[u8], dest_len: usize) -> Bpf { let bpf = load_and_attach_uprobe( "test_bpf_probe_read_user_str_bytes", "trigger_bpf_probe_read_user", - include_bytes_aligned!("../../../target/bpfel-unknown-none/release/bpf_probe_read"), + crate::BPF_PROBE_READ, ); trigger_bpf_probe_read_user(bytes.as_ptr(), dest_len); bpf @@ -78,7 +78,7 @@ fn set_kernel_buffer(bytes: &[u8], dest_len: usize) -> Bpf { let mut bpf = load_and_attach_uprobe( "test_bpf_probe_read_kernel_str_bytes", "trigger_bpf_probe_read_kernel", - include_bytes_aligned!("../../../target/bpfel-unknown-none/release/bpf_probe_read"), + crate::BPF_PROBE_READ, ); set_kernel_buffer_element(&mut bpf, bytes); trigger_bpf_probe_read_kernel(dest_len); diff --git a/test/integration-test/tests/btf_relocations.rs b/test/integration-test/src/tests/btf_relocations.rs similarity index 83% rename from test/integration-test/tests/btf_relocations.rs rename to test/integration-test/src/tests/btf_relocations.rs index 37a138bf..75f36223 100644 --- a/test/integration-test/tests/btf_relocations.rs +++ b/test/integration-test/src/tests/btf_relocations.rs @@ -1,6 +1,9 @@ -use anyhow::{bail, Context as _, Result}; -use std::{path::PathBuf, process::Command, thread::sleep, time::Duration}; -use tempfile::TempDir; +use anyhow::{anyhow, bail, Context as _, Result}; +use std::{ + process::{Child, ChildStdout, Command, Stdio}, + thread::sleep, + time::Duration, +}; use aya::{maps::Array, programs::TracePoint, util::KernelVersion, BpfLoader, Btf, Endianness}; @@ -215,9 +218,15 @@ impl RelocationTest { /// - Generate the source eBPF filling a template /// - Compile it with clang fn build_ebpf(&self) -> Result> { - let local_definition = self.local_definition; - let relocation_code = self.relocation_code; - let (_tmp_dir, compiled_file) = compile(&format!( + use std::io::Read as _; + + let Self { + local_definition, + relocation_code, + .. + } = self; + + let mut stdout = compile(&format!( r#" #include @@ -250,23 +259,29 @@ impl RelocationTest { char _license[] __attribute__((section("license"), used)) = "GPL"; "# )) - .context("Failed to compile eBPF program")?; - let bytecode = - std::fs::read(compiled_file).context("Error reading compiled eBPF program")?; - Ok(bytecode) + .context("failed to compile eBPF program")?; + let mut output = Vec::new(); + stdout.read_to_end(&mut output)?; + Ok(output) } /// - Generate the target BTF source with a mock main() /// - Compile it with clang /// - Extract the BTF with llvm-objcopy fn build_btf(&self) -> Result { - let target_btf = self.target_btf; - let relocation_code = self.relocation_code; + use std::io::Read as _; + + let Self { + target_btf, + relocation_code, + .. + } = self; + // BTF files can be generated and inspected with these commands: // $ clang -c -g -O2 -target bpf target.c // $ pahole --btf_encode_detached=target.btf -V target.o // $ bpftool btf dump file ./target.btf format c - let (tmp_dir, compiled_file) = compile(&format!( + let stdout = compile(&format!( r#" #include @@ -280,14 +295,20 @@ impl RelocationTest { }} "# )) - .context("Failed to compile BTF")?; + .context("failed to compile BTF")?; + let mut cmd = Command::new("llvm-objcopy"); - cmd.current_dir(tmp_dir.path()) - .args(["--dump-section", ".BTF=target.btf"]) - .arg(compiled_file); - let status = cmd - .status() - .with_context(|| format!("Failed to run {cmd:?}"))?; + cmd.args(["--dump-section", ".BTF=-", "-"]) + .stdin(stdout) + .stdout(Stdio::piped()); + let mut child = cmd + .spawn() + .with_context(|| format!("failed to spawn {cmd:?}"))?; + let Child { stdout, .. } = &mut child; + let mut stdout = stdout.take().ok_or(anyhow!("failed to open stdout"))?; + let status = child + .wait() + .with_context(|| format!("failed to wait for {cmd:?}"))?; match status.code() { Some(code) => match code { 0 => {} @@ -295,25 +316,39 @@ impl RelocationTest { }, None => bail!("{cmd:?} terminated by signal"), } - let btf = Btf::parse_file(tmp_dir.path().join("target.btf"), Endianness::default()) - .context("Error parsing generated BTF")?; - Ok(btf) + + let mut output = Vec::new(); + stdout.read_to_end(&mut output)?; + + Btf::parse(output.as_slice(), Endianness::default()) + .context("failed to parse generated BTF") } } -/// Compile an eBPF program and return the path of the compiled object. -/// Also returns a TempDir handler, dropping it will clear the created dicretory. -fn compile(source_code: &str) -> Result<(TempDir, PathBuf)> { - let tmp_dir = tempfile::tempdir().context("Error making temp dir")?; - let source = tmp_dir.path().join("source.c"); - std::fs::write(&source, source_code).context("Writing bpf program failed")?; +/// Compile an eBPF program and return its bytes. +fn compile(source_code: &str) -> Result { + use std::io::Write as _; + let mut cmd = Command::new("clang"); - cmd.current_dir(&tmp_dir) - .args(["-c", "-g", "-O2", "-target", "bpf"]) - .arg(&source); - let status = cmd - .status() - .with_context(|| format!("Failed to run {cmd:?}"))?; + cmd.args([ + "-c", "-g", "-O2", "-target", "bpf", "-x", "c", "-", "-o", "-", + ]) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()); + let mut child = cmd + .spawn() + .with_context(|| format!("failed to spawn {cmd:?}"))?; + let Child { stdin, stdout, .. } = &mut child; + { + let mut stdin = stdin.take().ok_or(anyhow!("failed to open stdin"))?; + stdin + .write_all(source_code.as_bytes()) + .context("failed to write to stdin")?; + } + let stdout = stdout.take().ok_or(anyhow!("failed to open stdout"))?; + let status = child + .wait() + .with_context(|| format!("failed to wait for {cmd:?}"))?; match status.code() { Some(code) => match code { 0 => {} @@ -321,7 +356,7 @@ fn compile(source_code: &str) -> Result<(TempDir, PathBuf)> { }, None => bail!("{cmd:?} terminated by signal"), } - Ok((tmp_dir, source.with_extension("o"))) + Ok(stdout) } struct RelocationTestRunner { diff --git a/test/integration-test/tests/elf.rs b/test/integration-test/src/tests/elf.rs similarity index 70% rename from test/integration-test/tests/elf.rs rename to test/integration-test/src/tests/elf.rs index 3d37ddb4..3ebbdf58 100644 --- a/test/integration-test/tests/elf.rs +++ b/test/integration-test/src/tests/elf.rs @@ -1,10 +1,8 @@ -use aya::include_bytes_aligned; use object::{Object, ObjectSymbol}; #[test] fn test_maps() { - let bytes = include_bytes_aligned!("../../../target/bpfel-unknown-none/release/map_test"); - let obj_file = object::File::parse(bytes).unwrap(); + let obj_file = object::File::parse(crate::MAP_TEST).unwrap(); if obj_file.section_by_name("maps").is_none() { panic!("No 'maps' ELF section"); } diff --git a/test/integration-test/tests/load.rs b/test/integration-test/src/tests/load.rs similarity index 87% rename from test/integration-test/tests/load.rs rename to test/integration-test/src/tests/load.rs index 3f35e06e..ede471e2 100644 --- a/test/integration-test/tests/load.rs +++ b/test/integration-test/src/tests/load.rs @@ -1,7 +1,6 @@ use std::{convert::TryInto as _, thread, time}; use aya::{ - include_bytes_aligned, maps::Array, programs::{ links::{FdLink, PinnedLink}, @@ -16,8 +15,7 @@ const RETRY_DURATION_MS: u64 = 10; #[test] fn long_name() { - let bytes = include_bytes_aligned!("../../../target/bpfel-unknown-none/release/name_test"); - let mut bpf = Bpf::load(bytes).unwrap(); + let mut bpf = Bpf::load(crate::NAME_TEST).unwrap(); let name_prog: &mut Xdp = bpf .program_mut("ihaveaverylongname") .unwrap() @@ -33,9 +31,7 @@ fn long_name() { #[test] fn multiple_btf_maps() { - let bytes = - include_bytes_aligned!("../../../target/bpfel-unknown-none/release/multimap-btf.bpf.o"); - let mut bpf = Bpf::load(bytes).unwrap(); + let mut bpf = Bpf::load(crate::MULTIMAP_BTF).unwrap(); let map_1: Array<_, u64> = bpf.take_map("map_1").unwrap().try_into().unwrap(); let map_2: Array<_, u64> = bpf.take_map("map_2").unwrap().try_into().unwrap(); @@ -71,8 +67,7 @@ macro_rules! assert_loaded { #[test] fn unload_xdp() { - let bytes = include_bytes_aligned!("../../../target/bpfel-unknown-none/release/test"); - let mut bpf = Bpf::load(bytes).unwrap(); + let mut bpf = Bpf::load(crate::TEST).unwrap(); let prog: &mut Xdp = bpf .program_mut("test_unload_xdp") .unwrap() @@ -101,8 +96,7 @@ fn unload_xdp() { #[test] fn unload_kprobe() { - let bytes = include_bytes_aligned!("../../../target/bpfel-unknown-none/release/test"); - let mut bpf = Bpf::load(bytes).unwrap(); + let mut bpf = Bpf::load(crate::TEST).unwrap(); let prog: &mut KProbe = bpf .program_mut("test_unload_kpr") .unwrap() @@ -137,8 +131,7 @@ fn pin_link() { return; } - let bytes = include_bytes_aligned!("../../../target/bpfel-unknown-none/release/test"); - let mut bpf = Bpf::load(bytes).unwrap(); + let mut bpf = Bpf::load(crate::TEST).unwrap(); let prog: &mut Xdp = bpf .program_mut("test_unload_xdp") .unwrap() @@ -173,11 +166,9 @@ fn pin_lifecycle() { return; } - let bytes = include_bytes_aligned!("../../../target/bpfel-unknown-none/release/pass"); - // 1. Load Program and Pin { - let mut bpf = Bpf::load(bytes).unwrap(); + let mut bpf = Bpf::load(crate::PASS).unwrap(); let prog: &mut Xdp = bpf.program_mut("pass").unwrap().try_into().unwrap(); prog.load().unwrap(); prog.pin("/sys/fs/bpf/aya-xdp-test-prog").unwrap(); @@ -211,7 +202,7 @@ fn pin_lifecycle() { // 4. Load a new version of the program, unpin link, and atomically replace old program { - let mut bpf = Bpf::load(bytes).unwrap(); + let mut bpf = Bpf::load(crate::PASS).unwrap(); let prog: &mut Xdp = bpf.program_mut("pass").unwrap().try_into().unwrap(); prog.load().unwrap(); diff --git a/test/integration-test/tests/log.rs b/test/integration-test/src/tests/log.rs similarity index 95% rename from test/integration-test/tests/log.rs rename to test/integration-test/src/tests/log.rs index 1c4251db..b3d92893 100644 --- a/test/integration-test/tests/log.rs +++ b/test/integration-test/src/tests/log.rs @@ -1,6 +1,6 @@ use std::sync::{Arc, LockResult, Mutex, MutexGuard}; -use aya::{include_bytes_aligned, programs::UProbe, Bpf}; +use aya::{programs::UProbe, Bpf}; use aya_log::BpfLogger; use log::{Level, Log, Record}; use tokio::time::{sleep, Duration}; @@ -89,8 +89,7 @@ impl Log for TestingLogger { #[tokio::test] async fn log() { - let bytes = include_bytes_aligned!("../../../target/bpfel-unknown-none/release/log"); - let mut bpf = Bpf::load(bytes).unwrap(); + let mut bpf = Bpf::load(crate::LOG).unwrap(); let (logger, captured_logs) = TestingLogger::with_capacity(5); BpfLogger::init_with_logger(&mut bpf, logger).unwrap(); diff --git a/test/integration-test/tests/rbpf.rs b/test/integration-test/src/tests/rbpf.rs similarity index 92% rename from test/integration-test/tests/rbpf.rs rename to test/integration-test/src/tests/rbpf.rs index cfeb1334..1f2fe2cb 100644 --- a/test/integration-test/tests/rbpf.rs +++ b/test/integration-test/src/tests/rbpf.rs @@ -1,13 +1,11 @@ use core::{mem::size_of, ptr::null_mut, slice::from_raw_parts}; use std::collections::HashMap; -use aya::include_bytes_aligned; use aya_obj::{generated::bpf_insn, Object, ProgramSection}; #[test] fn run_with_rbpf() { - let bytes = include_bytes_aligned!("../../../target/bpfel-unknown-none/release/pass"); - let object = Object::parse(bytes).unwrap(); + let object = Object::parse(crate::PASS).unwrap(); assert_eq!(object.programs.len(), 1); matches::assert_matches!(object.programs["pass"].section, ProgramSection::Xdp { .. }); @@ -34,9 +32,7 @@ static mut MULTIMAP_MAPS: [*mut Vec; 2] = [null_mut(), null_mut()]; #[test] fn use_map_with_rbpf() { - let bytes = - include_bytes_aligned!("../../../target/bpfel-unknown-none/release/multimap-btf.bpf.o"); - let mut object = Object::parse(bytes).unwrap(); + let mut object = Object::parse(crate::MULTIMAP_BTF).unwrap(); assert_eq!(object.programs.len(), 1); matches::assert_matches!( diff --git a/test/integration-test/tests/relocations.rs b/test/integration-test/src/tests/relocations.rs similarity index 74% rename from test/integration-test/tests/relocations.rs rename to test/integration-test/src/tests/relocations.rs index 702494c3..8642dc4b 100644 --- a/test/integration-test/tests/relocations.rs +++ b/test/integration-test/src/tests/relocations.rs @@ -1,13 +1,10 @@ use std::time::Duration; -use aya::{include_bytes_aligned, programs::UProbe, Bpf}; +use aya::{programs::UProbe, Bpf}; #[test] fn relocations() { - let bpf = load_and_attach( - "test_64_32_call_relocs", - include_bytes_aligned!("../../../target/bpfel-unknown-none/release/relocations"), - ); + let bpf = load_and_attach("test_64_32_call_relocs", crate::RELOCATIONS); trigger_relocations_program(); std::thread::sleep(Duration::from_millis(100)); @@ -20,10 +17,7 @@ fn relocations() { #[test] fn text_64_64_reloc() { - let mut bpf = load_and_attach( - "test_text_64_64_reloc", - include_bytes_aligned!("../../../target/bpfel-unknown-none/release/text_64_64_reloc.o"), - ); + let mut bpf = load_and_attach("test_text_64_64_reloc", crate::TEXT_64_64_RELOC); let mut m = aya::maps::Array::<_, u64>::try_from(bpf.map_mut("RESULTS").unwrap()).unwrap(); m.set(0, 1, 0).unwrap(); diff --git a/test/integration-test/tests/smoke.rs b/test/integration-test/src/tests/smoke.rs similarity index 69% rename from test/integration-test/tests/smoke.rs rename to test/integration-test/src/tests/smoke.rs index 304772e8..8b09ac92 100644 --- a/test/integration-test/tests/smoke.rs +++ b/test/integration-test/src/tests/smoke.rs @@ -1,5 +1,4 @@ use aya::{ - include_bytes_aligned, programs::{Extension, Xdp, XdpFlags}, util::KernelVersion, Bpf, BpfLoader, @@ -13,8 +12,7 @@ fn xdp() { return; } - let bytes = include_bytes_aligned!("../../../target/bpfel-unknown-none/release/pass"); - let mut bpf = Bpf::load(bytes).unwrap(); + let mut bpf = Bpf::load(crate::PASS).unwrap(); let dispatcher: &mut Xdp = bpf.program_mut("pass").unwrap().try_into().unwrap(); dispatcher.load().unwrap(); dispatcher.attach("lo", XdpFlags::default()).unwrap(); @@ -27,15 +25,12 @@ fn extension() { eprintln!("skipping test on kernel {kernel_version:?}, XDP uses netlink"); return; } - let main_bytes = - include_bytes_aligned!("../../../target/bpfel-unknown-none/release/main.bpf.o"); - let mut bpf = Bpf::load(main_bytes).unwrap(); + let mut bpf = Bpf::load(crate::MAIN).unwrap(); let pass: &mut Xdp = bpf.program_mut("pass").unwrap().try_into().unwrap(); pass.load().unwrap(); pass.attach("lo", XdpFlags::default()).unwrap(); - let ext_bytes = include_bytes_aligned!("../../../target/bpfel-unknown-none/release/ext.bpf.o"); - let mut bpf = BpfLoader::new().extension("drop").load(ext_bytes).unwrap(); + let mut bpf = BpfLoader::new().extension("drop").load(crate::EXT).unwrap(); let drop_: &mut Extension = bpf.program_mut("drop").unwrap().try_into().unwrap(); drop_.load(pass.fd().unwrap(), "xdp_pass").unwrap(); } diff --git a/test/run.sh b/test/run.sh index dd2461c7..23c0dd1a 100755 --- a/test/run.sh +++ b/test/run.sh @@ -7,7 +7,6 @@ if [ "$(uname -s)" = "Darwin" ]; then fi AYA_SOURCE_DIR="$(realpath $(dirname $0)/..)" -LIBBPF_DIR=$1 # Temporary directory for tests to use. AYA_TMPDIR="${AYA_SOURCE_DIR}/.tmp" @@ -236,25 +235,16 @@ cleanup_vm() { fi } -if [ -z "$LIBBPF_DIR" ]; then - echo "path to libbpf required" - exit 1 -fi - start_vm trap cleanup_vm EXIT -# make sure we always use fresh aya and libbpf (also see comment at the end) -exec_vm "rm -rf aya/* libbpf" +# make sure we always use fresh sources (also see comment at the end) +exec_vm "rm -rf aya/*" rsync_vm "--exclude=target --exclude=.tmp $AYA_SOURCE_DIR" -rsync_vm "$LIBBPF_DIR" -# need to build or linting will fail trying to include object files; don't run the tests though. -exec_vm "cd aya; cargo xtask integration-test --libbpf-dir ~/libbpf -- filter-that-matches-nothing" -exec_vm "cd aya; cargo clippy --all-targets -p integration-test -- --deny warnings" -exec_vm "cd aya; cargo xtask integration-test --libbpf-dir ~/libbpf" +exec_vm "cd aya; cargo xtask integration-test" # we rm and sync but it doesn't seem to work reliably - I guess we could sleep a # few seconds after but ain't nobody got time for that. Instead we also rm # before rsyncing. -exec_vm "rm -rf aya/* libbpf; sync" +exec_vm "rm -rf aya/*; sync" diff --git a/xtask/src/build_ebpf.rs b/xtask/src/build_ebpf.rs deleted file mode 100644 index bdbf44ed..00000000 --- a/xtask/src/build_ebpf.rs +++ /dev/null @@ -1,138 +0,0 @@ -use std::{ - borrow::Cow, - env, - ffi::{OsStr, OsString}, - fs, - path::{Path, PathBuf}, - process::Command, -}; - -use anyhow::Result; -use clap::Parser; - -use crate::utils::{exec, workspace_root}; - -#[derive(Debug, Copy, Clone)] -pub enum Architecture { - BpfEl, - BpfEb, -} - -impl std::str::FromStr for Architecture { - type Err = &'static str; - - fn from_str(s: &str) -> Result { - Ok(match s { - "bpfel-unknown-none" => Architecture::BpfEl, - "bpfeb-unknown-none" => Architecture::BpfEb, - _ => return Err("invalid target"), - }) - } -} - -impl std::fmt::Display for Architecture { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(match self { - Architecture::BpfEl => "bpfel-unknown-none", - Architecture::BpfEb => "bpfeb-unknown-none", - }) - } -} - -#[derive(Debug, Parser)] -pub struct BuildEbpfOptions { - /// Set the endianness of the BPF target - #[clap(default_value = "bpfel-unknown-none", long)] - pub target: Architecture, - /// Libbpf dir, required for compiling C code - #[clap(long, action)] - pub libbpf_dir: PathBuf, -} - -pub fn build_ebpf(opts: BuildEbpfOptions) -> Result<()> { - build_rust_ebpf(&opts)?; - build_c_ebpf(&opts) -} - -fn build_rust_ebpf(opts: &BuildEbpfOptions) -> Result<()> { - let BuildEbpfOptions { - target, - libbpf_dir: _, - } = opts; - - let mut dir = PathBuf::from(workspace_root()); - dir.push("test/integration-ebpf"); - - exec( - Command::new("cargo") - .current_dir(&dir) - .args(["+nightly", "build", "--release", "--target"]) - .arg(target.to_string()) - .args(["-Z", "build-std=core"]) - .current_dir(&dir), - ) -} - -fn get_libbpf_headers(libbpf_dir: &Path, include_path: &Path) -> Result<()> { - fs::create_dir_all(include_path)?; - let mut includedir = OsString::new(); - includedir.push("INCLUDEDIR="); - includedir.push(include_path); - exec( - Command::new("make") - .current_dir(libbpf_dir.join("src")) - .arg(includedir) - .arg("install_headers"), - ) -} - -fn build_c_ebpf(opts: &BuildEbpfOptions) -> Result<()> { - let BuildEbpfOptions { target, libbpf_dir } = opts; - - let mut src = PathBuf::from(workspace_root()); - src.push("test/integration-ebpf/src/bpf"); - - let mut out_path = PathBuf::from(workspace_root()); - out_path.push("target"); - out_path.push(target.to_string()); - out_path.push("release"); - - let include_path = out_path.join("include"); - get_libbpf_headers(libbpf_dir, &include_path)?; - let files = fs::read_dir(&src).unwrap(); - for file in files { - let p = file.unwrap().path(); - if let Some(ext) = p.extension() { - if ext == "c" { - let mut out = PathBuf::from(&out_path); - out.push(p.file_name().unwrap()); - out.set_extension("o"); - compile_with_clang(&p, &out, &include_path)?; - } - } - } - Ok(()) -} - -/// Build eBPF programs with clang and libbpf headers. -fn compile_with_clang(src: &Path, out: &Path, include_path: &Path) -> Result<()> { - let clang: Cow<'_, _> = match env::var_os("CLANG") { - Some(val) => val.into(), - None => OsStr::new("/usr/bin/clang").into(), - }; - let arch = match env::consts::ARCH { - "x86_64" => "x86", - "aarch64" => "arm64", - arch => arch, - }; - exec( - Command::new(clang) - .arg("-I") - .arg(include_path) - .args(["-g", "-O2", "-target", "bpf", "-c"]) - .arg(format!("-D__TARGET_ARCH_{arch}")) - .arg(src) - .arg("-o") - .arg(out), - ) -} diff --git a/xtask/src/codegen/aya.rs b/xtask/src/codegen/aya.rs index 6758eed4..c9f48b48 100644 --- a/xtask/src/codegen/aya.rs +++ b/xtask/src/codegen/aya.rs @@ -3,20 +3,22 @@ use std::path::PathBuf; use aya_tool::{bindgen, write_to_file}; -use crate::codegen::{Architecture, Options}; +use crate::codegen::{Architecture, SysrootOptions}; -pub fn codegen(opts: &Options) -> Result<(), anyhow::Error> { - codegen_internal_btf_bindings(opts)?; +pub fn codegen(opts: &SysrootOptions) -> Result<(), anyhow::Error> { + codegen_internal_btf_bindings()?; codegen_bindings(opts) } -fn codegen_internal_btf_bindings(opts: &Options) -> Result<(), anyhow::Error> { +fn codegen_internal_btf_bindings() -> Result<(), anyhow::Error> { let dir = PathBuf::from("aya-obj"); let generated = dir.join("src/generated"); + let libbpf_dir = PathBuf::from("libbpf"); + let mut bindgen = bindgen::user_builder() .clang_arg(format!( "-I{}", - opts.libbpf_dir + libbpf_dir .join("include/uapi") .canonicalize() .unwrap() @@ -24,17 +26,13 @@ fn codegen_internal_btf_bindings(opts: &Options) -> Result<(), anyhow::Error> { )) .clang_arg(format!( "-I{}", - opts.libbpf_dir + libbpf_dir .join("include") .canonicalize() .unwrap() .to_string_lossy() )) - .header( - opts.libbpf_dir - .join("src/libbpf_internal.h") - .to_string_lossy(), - ) + .header(libbpf_dir.join("src/libbpf_internal.h").to_string_lossy()) .constified_enum_module("bpf_core_relo_kind"); let types = ["bpf_core_relo", "btf_ext_header"]; @@ -54,7 +52,13 @@ fn codegen_internal_btf_bindings(opts: &Options) -> Result<(), anyhow::Error> { Ok(()) } -fn codegen_bindings(opts: &Options) -> Result<(), anyhow::Error> { +fn codegen_bindings(opts: &SysrootOptions) -> Result<(), anyhow::Error> { + let SysrootOptions { + x86_64_sysroot, + aarch64_sysroot, + armv7_sysroot, + riscv64_sysroot, + } = opts; let types = [ // BPF "BPF_TYPES", @@ -158,15 +162,13 @@ fn codegen_bindings(opts: &Options) -> Result<(), anyhow::Error> { let dir = PathBuf::from("aya-obj"); let generated = dir.join("src/generated"); + let libbpf_dir = PathBuf::from("libbpf"); let builder = || { bindgen::user_builder() .header(dir.join("include/linux_wrapper.h").to_string_lossy()) - .clang_args(&[ - "-I", - &*opts.libbpf_dir.join("include/uapi").to_string_lossy(), - ]) - .clang_args(&["-I", &*opts.libbpf_dir.join("include").to_string_lossy()]) + .clang_args(&["-I", &*libbpf_dir.join("include/uapi").to_string_lossy()]) + .clang_args(&["-I", &*libbpf_dir.join("include").to_string_lossy()]) }; for arch in Architecture::supported() { @@ -185,10 +187,10 @@ fn codegen_bindings(opts: &Options) -> Result<(), anyhow::Error> { // Set the sysroot. This is needed to ensure that the correct arch // specific headers are imported. let sysroot = match arch { - Architecture::X86_64 => &opts.x86_64_sysroot, - Architecture::ARMv7 => &opts.armv7_sysroot, - Architecture::AArch64 => &opts.aarch64_sysroot, - Architecture::RISCV64 => &opts.riscv64_sysroot, + Architecture::X86_64 => x86_64_sysroot, + Architecture::ARMv7 => armv7_sysroot, + Architecture::AArch64 => aarch64_sysroot, + Architecture::RISCV64 => riscv64_sysroot, }; bindgen = bindgen.clang_args(&["-I", &*sysroot.to_string_lossy()]); diff --git a/xtask/src/codegen/aya_bpf_bindings.rs b/xtask/src/codegen/aya_bpf_bindings.rs index 1bfcbc5b..d0af3221 100644 --- a/xtask/src/codegen/aya_bpf_bindings.rs +++ b/xtask/src/codegen/aya_bpf_bindings.rs @@ -8,11 +8,19 @@ use syn::{parse_str, Item}; use crate::codegen::{ helpers::{expand_helpers, extract_helpers}, - Architecture, Options, + Architecture, SysrootOptions, }; -pub fn codegen(opts: &Options) -> Result<(), anyhow::Error> { +pub fn codegen(opts: &SysrootOptions) -> Result<(), anyhow::Error> { + let SysrootOptions { + x86_64_sysroot, + aarch64_sysroot, + armv7_sysroot, + riscv64_sysroot, + } = opts; + let dir = PathBuf::from("bpf/aya-bpf-bindings"); + let libbpf_dir = PathBuf::from("libbpf"); let builder = || { let mut bindgen = bindgen::bpf_builder() @@ -20,12 +28,9 @@ pub fn codegen(opts: &Options) -> Result<(), anyhow::Error> { // aya-tool uses aya_bpf::cty. We can't use that here since aya-bpf // depends on aya-bpf-bindings so it would create a circular dep. .ctypes_prefix("::aya_bpf_cty") - .clang_args(&[ - "-I", - &*opts.libbpf_dir.join("include/uapi").to_string_lossy(), - ]) - .clang_args(&["-I", &*opts.libbpf_dir.join("include").to_string_lossy()]) - .clang_args(&["-I", &*opts.libbpf_dir.join("src").to_string_lossy()]) + .clang_args(&["-I", &*libbpf_dir.join("include/uapi").to_string_lossy()]) + .clang_args(&["-I", &*libbpf_dir.join("include").to_string_lossy()]) + .clang_args(&["-I", &*libbpf_dir.join("src").to_string_lossy()]) // open aya-bpf-bindings/.../bindings.rs and look for mod // _bindgen, those are anonymous enums .constified_enum("BPF_F_.*") @@ -82,10 +87,10 @@ pub fn codegen(opts: &Options) -> Result<(), anyhow::Error> { // Set the sysroot. This is needed to ensure that the correct arch // specific headers are imported. let sysroot = match arch { - Architecture::X86_64 => &opts.x86_64_sysroot, - Architecture::ARMv7 => &opts.armv7_sysroot, - Architecture::AArch64 => &opts.aarch64_sysroot, - Architecture::RISCV64 => &opts.riscv64_sysroot, + Architecture::X86_64 => x86_64_sysroot, + Architecture::ARMv7 => armv7_sysroot, + Architecture::AArch64 => aarch64_sysroot, + Architecture::RISCV64 => riscv64_sysroot, }; bindgen = bindgen.clang_args(&["-I", &*sysroot.to_string_lossy()]); diff --git a/xtask/src/codegen/mod.rs b/xtask/src/codegen/mod.rs index e2321142..6722c660 100644 --- a/xtask/src/codegen/mod.rs +++ b/xtask/src/codegen/mod.rs @@ -52,13 +52,10 @@ impl std::fmt::Display for Architecture { } } +// sysroot options. Default to ubuntu headers installed by the +// libc6-dev-{arm64,armel}-cross packages. #[derive(Parser)] -pub struct Options { - #[arg(long, action)] - libbpf_dir: PathBuf, - - // sysroot options. Default to ubuntu headers installed by the - // libc6-dev-{arm64,armel}-cross packages. +pub struct SysrootOptions { #[arg(long, default_value = "/usr/include/x86_64-linux-gnu", action)] x86_64_sysroot: PathBuf, @@ -70,6 +67,12 @@ pub struct Options { #[arg(long, default_value = "/usr/riscv64-linux-gnu/include", action)] riscv64_sysroot: PathBuf, +} + +#[derive(Parser)] +pub struct Options { + #[command(flatten)] + sysroot_options: SysrootOptions, #[command(subcommand)] command: Option, @@ -84,13 +87,18 @@ enum Command { } pub fn codegen(opts: Options) -> Result<(), anyhow::Error> { - use Command::*; - match opts.command { - Some(Aya) => aya::codegen(&opts), - Some(AyaBpfBindings) => aya_bpf_bindings::codegen(&opts), + let Options { + sysroot_options, + command, + } = opts; + match command { + Some(command) => match command { + Command::Aya => aya::codegen(&sysroot_options), + Command::AyaBpfBindings => aya_bpf_bindings::codegen(&sysroot_options), + }, None => { - aya::codegen(&opts)?; - aya_bpf_bindings::codegen(&opts) + aya::codegen(&sysroot_options)?; + aya_bpf_bindings::codegen(&sysroot_options) } } } diff --git a/xtask/src/docs/mod.rs b/xtask/src/docs/mod.rs index d392798c..c7333d4c 100644 --- a/xtask/src/docs/mod.rs +++ b/xtask/src/docs/mod.rs @@ -1,5 +1,4 @@ -use crate::utils::exec; -use anyhow::{Context as _, Result}; +use anyhow::{anyhow, Context as _, Result}; use std::{ path::{Path, PathBuf}, process::Command, @@ -9,6 +8,19 @@ use std::{fs, io, io::Write}; use indoc::indoc; +pub fn exec(cmd: &mut Command) -> Result<()> { + let status = cmd + .status() + .with_context(|| format!("failed to run {cmd:?}"))?; + match status.code() { + Some(code) => match code { + 0 => Ok(()), + code => Err(anyhow!("{cmd:?} exited with code {code}")), + }, + None => Err(anyhow!("{cmd:?} terminated by signal")), + } +} + pub fn docs() -> Result<()> { let current_dir = PathBuf::from("."); let header_path = current_dir.join("header.html"); diff --git a/xtask/src/main.rs b/xtask/src/main.rs index c86da059..47b425e4 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -1,12 +1,9 @@ -mod build_ebpf; mod codegen; mod docs; mod run; -pub(crate) mod utils; - -use std::process::exit; use clap::Parser; + #[derive(Parser)] pub struct XtaskOptions { #[clap(subcommand)] @@ -17,20 +14,27 @@ pub struct XtaskOptions { enum Command { Codegen(codegen::Options), Docs, + BuildIntegrationTest(run::BuildOptions), IntegrationTest(run::Options), } -fn main() { +fn main() -> anyhow::Result<()> { let XtaskOptions { command } = Parser::parse(); - let ret = match command { + match command { Command::Codegen(opts) => codegen::codegen(opts), Command::Docs => docs::docs(), - Command::IntegrationTest(opts) => run::run(opts), - }; + Command::BuildIntegrationTest(opts) => { + let binaries = run::build(opts)?; + let mut stdout = std::io::stdout(); + for (_name, binary) in binaries { + use std::{io::Write as _, os::unix::ffi::OsStrExt as _}; - if let Err(e) = ret { - eprintln!("{e:#}"); - exit(1); + stdout.write_all(binary.as_os_str().as_bytes())?; + stdout.write_all("\n".as_bytes())?; + } + Ok(()) + } + Command::IntegrationTest(opts) => run::run(opts), } } diff --git a/xtask/src/run.rs b/xtask/src/run.rs index 0c7a241d..396d9ece 100644 --- a/xtask/src/run.rs +++ b/xtask/src/run.rs @@ -9,31 +9,33 @@ use anyhow::{Context as _, Result}; use cargo_metadata::{Artifact, CompilerMessage, Message, Target}; use clap::Parser; -use crate::build_ebpf::{build_ebpf, Architecture, BuildEbpfOptions as BuildOptions}; - #[derive(Debug, Parser)] -pub struct Options { - /// Set the endianness of the BPF target - #[clap(default_value = "bpfel-unknown-none", long)] - pub bpf_target: Architecture, - /// Build and run the release target +pub struct BuildOptions { + /// Pass --release to `cargo build`. #[clap(long)] pub release: bool, - /// The command used to wrap your application + /// Pass --target to `cargo build`. + #[clap(long)] + pub target: Option, +} + +#[derive(Debug, Parser)] +pub struct Options { + #[command(flatten)] + pub build_options: BuildOptions, + /// The command used to wrap your application. #[clap(short, long, default_value = "sudo -E")] pub runner: String, - /// libbpf directory - #[clap(long, action)] - pub libbpf_dir: PathBuf, - /// Arguments to pass to your application + /// Arguments to pass to your application. #[clap(name = "args", last = true)] pub run_args: Vec, } /// Build the project -fn build(release: bool) -> Result> { +pub fn build(opts: BuildOptions) -> Result> { + let BuildOptions { release, target } = opts; let mut cmd = Command::new("cargo"); - cmd.args([ + cmd.env("AYA_BUILD_INTEGRATION_BPF", "true").args([ "build", "--tests", "--message-format=json", @@ -42,6 +44,9 @@ fn build(release: bool) -> Result> { if release { cmd.arg("--release"); } + if let Some(target) = target { + cmd.args(["--target", &target]); + } let mut cmd = cmd .stdout(Stdio::piped()) .spawn() @@ -55,16 +60,17 @@ fn build(release: bool) -> Result> { match message.context("valid JSON")? { Message::CompilerArtifact(Artifact { executable, - target: Target { src_path, .. }, + target: Target { name, .. }, .. }) => { if let Some(executable) = executable { - executables.push((src_path.into(), executable.into())); + executables.push((name, executable.into())); } } Message::CompilerMessage(CompilerMessage { message, .. }) => { - assert_eq!(writeln!(&mut compiler_messages, "{message}"), Ok(())); + writeln!(&mut compiler_messages, "{message}").context("String write failed")? } + _ => {} } } @@ -87,27 +93,18 @@ fn build(release: bool) -> Result> { /// Build and run the project pub fn run(opts: Options) -> Result<()> { let Options { - bpf_target, - release, + build_options, runner, - libbpf_dir, run_args, } = opts; - // build our ebpf program followed by our application - build_ebpf(BuildOptions { - target: bpf_target, - libbpf_dir, - }) - .context("error while building eBPF program")?; - - let binaries = build(release).context("error while building userspace application")?; + let binaries = build(build_options).context("error while building userspace application")?; let mut args = runner.trim().split_terminator(' '); let runner = args.next().ok_or(anyhow::anyhow!("no first argument"))?; let args = args.collect::>(); let mut failures = String::new(); - for (src_path, binary) in binaries { + for (name, binary) in binaries { let mut cmd = Command::new(runner); let cmd = cmd .args(args.iter()) @@ -115,7 +112,7 @@ pub fn run(opts: Options) -> Result<()> { .args(run_args.iter()) .arg("--test-threads=1"); - println!("{} running {cmd:?}", src_path.display()); + println!("{} running {cmd:?}", name); let status = cmd .status() @@ -123,19 +120,11 @@ pub fn run(opts: Options) -> Result<()> { match status.code() { Some(code) => match code { 0 => {} - code => assert_eq!( - writeln!( - &mut failures, - "{} exited with status code {code}", - src_path.display() - ), - Ok(()) - ), + code => writeln!(&mut failures, "{} exited with status code {code}", name) + .context("String write failed")?, }, - None => assert_eq!( - writeln!(&mut failures, "{} terminated by signal", src_path.display()), - Ok(()) - ), + None => writeln!(&mut failures, "{} terminated by signal", name) + .context("String write failed")?, } } if failures.is_empty() { diff --git a/xtask/src/utils.rs b/xtask/src/utils.rs deleted file mode 100644 index f2cd2471..00000000 --- a/xtask/src/utils.rs +++ /dev/null @@ -1,24 +0,0 @@ -use std::{cell::OnceCell, process::Command}; - -use anyhow::{bail, Context as _, Result}; - -pub fn workspace_root() -> &'static str { - static mut WORKSPACE_ROOT: OnceCell = OnceCell::new(); - unsafe { &mut WORKSPACE_ROOT }.get_or_init(|| { - let cmd = cargo_metadata::MetadataCommand::new(); - cmd.exec().unwrap().workspace_root.to_string() - }) -} - -pub fn exec(cmd: &mut Command) -> Result<()> { - let status = cmd - .status() - .with_context(|| format!("failed to run {cmd:?}"))?; - match status.code() { - Some(code) => match code { - 0 => Ok(()), - code => bail!("{cmd:?} exited with code {code}"), - }, - None => bail!("{cmd:?} terminated by signal"), - } -}