integration-test: Remove integration-test-macros

This doesn't add any value; use `cargo build --tests` with
`--message-format=json` instead; parse the output using `cargo_metadata`
to discover the location of the test binary.
reviewable/pr640/r3
Tamir Duberstein 2 years ago
parent ecc03ecfad
commit b3231ed200
No known key found for this signature in database

@ -1,12 +1,33 @@
[workspace]
members = [
"aya", "aya-obj", "aya-tool", "aya-log", "aya-log-common", "aya-log-parser", "test/integration-test", "test/integration-test-macros", "xtask",
"aya",
"aya-obj",
"aya-tool",
"aya-log",
"aya-log-common",
"aya-log-parser",
"test/integration-test",
"xtask",
# macros
"aya-bpf-macros", "aya-log-ebpf-macros",
"aya-bpf-macros",
"aya-log-ebpf-macros",
# ebpf crates
"bpf/aya-bpf", "bpf/aya-bpf-bindings", "bpf/aya-log-ebpf", "test/integration-ebpf"
"bpf/aya-bpf",
"bpf/aya-bpf-bindings",
"bpf/aya-log-ebpf",
"test/integration-ebpf",
]
default-members = [
"aya",
"aya-obj",
"aya-tool",
"aya-log",
"aya-bpf-macros",
"aya-log-ebpf-macros",
]
default-members = ["aya", "aya-obj", "aya-tool", "aya-log", "aya-bpf-macros", "aya-log-ebpf-macros"]
[profile.dev]
panic = "abort"

@ -43,11 +43,9 @@ cargo xtask integration-test --libbpf-dir /path/to/libbpf
Tests should follow these guidelines:
- Rust eBPF code should live in `integration-ebpf/${NAME}.rs` and included in `integration-ebpf/Cargo.toml`
- Rust eBPF code should live in `integration-ebpf/${NAME}.rs` and included in `integration-ebpf/Cargo.toml`.
- C eBPF code should live in `integration-test/src/bpf/${NAME}.bpf.c`. It's automatically compiled and made available as `${OUT_DIR}/${NAME}.bpf.o`.
- Any bytecode should be included in the integration test binary using `include_bytes_aligned!`
- Tests should be added to `integration-test/src/test`
- You may add a new module, or use an existing one
- Integration tests must use the `#[integration_test]` macro to be included in the build
- Test functions should return `anyhow::Result<()>` since this allows the use of `?` to return errors.
- You may either `panic!` when an assertion fails or `bail!`. The former is preferred since the stack trace will point directly to the failed line.
- Any bytecode should be included in the integration test binary using `include_bytes_aligned!`.
- Tests should be added to `integration-test/src/test`.
- You may add a new module, or use an existing one.
- Test functions should not return `anyhow::Result<()>` since this produces errors without stack traces. Prefer to `panic!` instead.

@ -1,13 +0,0 @@
[package]
name = "integration-test-macros"
version = "0.1.0"
edition = "2021"
publish = false
[dependencies]
quote = "1"
proc-macro2 = "1.0"
syn = {version = "2.0", features = ["full"]}
[lib]
proc-macro = true

@ -1,46 +0,0 @@
use proc_macro::TokenStream;
use proc_macro2::Span;
use quote::quote;
use syn::{parse_macro_input, Ident, ItemFn};
#[proc_macro_attribute]
pub fn integration_test(_attr: TokenStream, item: TokenStream) -> TokenStream {
let item = parse_macro_input!(item as ItemFn);
let name = &item.sig.ident;
let name_str = &item.sig.ident.to_string();
let expanded = quote! {
#item
inventory::submit!(crate::IntegrationTest {
name: concat!(module_path!(), "::", #name_str),
test_fn: #name,
});
};
TokenStream::from(expanded)
}
#[proc_macro_attribute]
pub fn tokio_integration_test(_attr: TokenStream, item: TokenStream) -> TokenStream {
let item = parse_macro_input!(item as ItemFn);
let name = &item.sig.ident;
let name_str = &item.sig.ident.to_string();
let sync_name_str = format!("sync_{name_str}");
let sync_name = Ident::new(&sync_name_str, Span::call_site());
let expanded = quote! {
#item
fn #sync_name() {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
rt.block_on(#name());
}
inventory::submit!(crate::IntegrationTest {
name: concat!(module_path!(), "::", #sync_name_str),
test_fn: #sync_name,
});
};
TokenStream::from(expanded)
}

@ -13,7 +13,6 @@ clap = { version = "4", features = ["derive"] }
env_logger = "0.10"
futures-core = "0.3"
inventory = "0.3"
integration-test-macros = { path = "../integration-test-macros" }
libc = { version = "0.2.105" }
log = "0.4"
object = { version = "0.31", default-features = false, features = ["std", "read_core", "elf"] }

@ -1,21 +0,0 @@
use libtest_mimic::{Arguments, Trial};
mod tests;
use tests::IntegrationTest;
fn main() {
env_logger::init();
let mut args = Arguments::from_args();
// Force to run single-threaded
args.test_threads = Some(1);
let tests = inventory::iter::<IntegrationTest>
.into_iter()
.map(|test| {
Trial::test(test.name, move || {
(test.test_fn)();
Ok(())
})
})
.collect();
libtest_mimic::run(&args, tests).exit();
}

@ -6,7 +6,6 @@ use aya::{
programs::{ProgramError, UProbe},
Bpf,
};
use integration_test_macros::integration_test;
const RESULT_BUF_LEN: usize = 1024;
@ -20,13 +19,13 @@ struct TestResult {
unsafe impl aya::Pod for TestResult {}
#[integration_test]
#[test]
fn bpf_probe_read_user_str_bytes() {
let bpf = set_user_buffer(b"foo\0", RESULT_BUF_LEN);
assert_eq!(result_bytes(&bpf), b"foo");
}
#[integration_test]
#[test]
fn bpf_probe_read_user_str_bytes_truncate() {
let s = vec![b'a'; RESULT_BUF_LEN];
let bpf = set_user_buffer(&s, RESULT_BUF_LEN);
@ -34,25 +33,25 @@ fn bpf_probe_read_user_str_bytes_truncate() {
assert_eq!(result_bytes(&bpf), &s[..RESULT_BUF_LEN - 1]);
}
#[integration_test]
#[test]
fn bpf_probe_read_user_str_bytes_empty_string() {
let bpf = set_user_buffer(b"\0", RESULT_BUF_LEN);
assert_eq!(result_bytes(&bpf), b"");
}
#[integration_test]
#[test]
fn bpf_probe_read_user_str_bytes_empty_dest() {
let bpf = set_user_buffer(b"foo\0", 0);
assert_eq!(result_bytes(&bpf), b"");
}
#[integration_test]
#[test]
fn bpf_probe_read_kernel_str_bytes() {
let bpf = set_kernel_buffer(b"foo\0", RESULT_BUF_LEN);
assert_eq!(result_bytes(&bpf), b"foo");
}
#[integration_test]
#[test]
fn bpf_probe_read_kernel_str_bytes_truncate() {
let s = vec![b'a'; RESULT_BUF_LEN];
let bpf = set_kernel_buffer(&s, RESULT_BUF_LEN);
@ -60,13 +59,13 @@ fn bpf_probe_read_kernel_str_bytes_truncate() {
assert_eq!(result_bytes(&bpf), &s[..RESULT_BUF_LEN - 1]);
}
#[integration_test]
#[test]
fn bpf_probe_read_kernel_str_bytes_empty_string() {
let bpf = set_kernel_buffer(b"\0", RESULT_BUF_LEN);
assert_eq!(result_bytes(&bpf), b"");
}
#[integration_test]
#[test]
fn bpf_probe_read_kernel_str_bytes_empty_dest() {
let bpf = set_kernel_buffer(b"foo\0", 0);
assert_eq!(result_bytes(&bpf), b"");

@ -4,12 +4,10 @@ use tempfile::TempDir;
use aya::{maps::Array, programs::TracePoint, BpfLoader, Btf, Endianness};
use super::integration_test;
// In the tests below we often use values like 0xAAAAAAAA or -0x7AAAAAAA. Those values have no
// special meaning, they just have "nice" bit patterns that can be helpful while debugging.
#[integration_test]
#[test]
fn relocate_field() {
let test = RelocationTest {
local_definition: r#"
@ -40,7 +38,7 @@ fn relocate_field() {
assert_eq!(test.run_no_btf().unwrap(), 3);
}
#[integration_test]
#[test]
fn relocate_enum() {
let test = RelocationTest {
local_definition: r#"
@ -60,7 +58,7 @@ fn relocate_enum() {
assert_eq!(test.run_no_btf().unwrap(), 0xAAAAAAAA);
}
#[integration_test]
#[test]
fn relocate_enum_signed() {
let test = RelocationTest {
local_definition: r#"
@ -80,7 +78,7 @@ fn relocate_enum_signed() {
assert_eq!(test.run_no_btf().unwrap() as i64, -0x7AAAAAAAi64);
}
#[integration_test]
#[test]
fn relocate_enum64() {
let test = RelocationTest {
local_definition: r#"
@ -100,7 +98,7 @@ fn relocate_enum64() {
assert_eq!(test.run_no_btf().unwrap(), 0xAAAAAAAABBBBBBBB);
}
#[integration_test]
#[test]
fn relocate_enum64_signed() {
let test = RelocationTest {
local_definition: r#"
@ -120,7 +118,7 @@ fn relocate_enum64_signed() {
assert_eq!(test.run_no_btf().unwrap() as i64, -0xAAAAAAABBBBBBBBi64);
}
#[integration_test]
#[test]
fn relocate_pointer() {
let test = RelocationTest {
local_definition: r#"
@ -143,7 +141,7 @@ fn relocate_pointer() {
assert_eq!(test.run_no_btf().unwrap(), 42);
}
#[integration_test]
#[test]
fn relocate_struct_flavors() {
let definition = r#"
struct foo {};

@ -1,9 +1,7 @@
use super::integration_test;
use aya::include_bytes_aligned;
use object::{Object, ObjectSymbol};
#[integration_test]
#[test]
fn test_maps() {
let bytes = include_bytes_aligned!("../../../../target/bpfel-unknown-none/release/map_test");
let obj_file = object::File::parse(bytes).unwrap();

@ -13,12 +13,10 @@ use log::warn;
use crate::tests::kernel_version;
use super::integration_test;
const MAX_RETRIES: u32 = 100;
const RETRY_DURATION_MS: u64 = 10;
#[integration_test]
#[test]
fn long_name() {
let bytes = include_bytes_aligned!("../../../../target/bpfel-unknown-none/release/name_test");
let mut bpf = Bpf::load(bytes).unwrap();
@ -35,7 +33,7 @@ fn long_name() {
// Therefore, as long as we were able to load the program, this is good enough.
}
#[integration_test]
#[test]
fn multiple_btf_maps() {
let bytes =
include_bytes_aligned!("../../../../target/bpfel-unknown-none/release/multimap-btf.bpf.o");
@ -73,7 +71,7 @@ macro_rules! assert_loaded {
};
}
#[integration_test]
#[test]
fn unload_xdp() {
let bytes = include_bytes_aligned!("../../../../target/bpfel-unknown-none/release/test");
let mut bpf = Bpf::load(bytes).unwrap();
@ -103,7 +101,7 @@ fn unload_xdp() {
assert_loaded!("test_unload_xdp", false);
}
#[integration_test]
#[test]
fn unload_kprobe() {
let bytes = include_bytes_aligned!("../../../../target/bpfel-unknown-none/release/test");
let mut bpf = Bpf::load(bytes).unwrap();
@ -133,7 +131,7 @@ fn unload_kprobe() {
assert_loaded!("test_unload_kpr", false);
}
#[integration_test]
#[test]
fn pin_link() {
if kernel_version().unwrap() < (5, 9, 0) {
warn!("skipping test, XDP uses netlink");
@ -168,7 +166,7 @@ fn pin_link() {
assert_loaded!("test_unload_xdp", false);
}
#[integration_test]
#[test]
fn pin_lifecycle() {
if kernel_version().unwrap() < (5, 9, 0) {
warn!("skipping test, XDP uses netlink");

@ -5,8 +5,6 @@ use aya_log::BpfLogger;
use log::{Level, Log, Record};
use tokio::time::{sleep, Duration};
use super::tokio_integration_test;
const MAX_ATTEMPTS: usize = 10;
const TIMEOUT_MS: u64 = 10;
@ -89,7 +87,7 @@ impl Log for TestingLogger {
}
}
#[tokio_integration_test]
#[tokio::test]
async fn log() {
let bytes = include_bytes_aligned!("../../../../target/bpfel-unknown-none/release/log");
let mut bpf = Bpf::load(bytes).unwrap();

@ -12,8 +12,6 @@ pub mod rbpf;
pub mod relocations;
pub mod smoke;
pub use integration_test_macros::{integration_test, tokio_integration_test};
#[derive(Debug)]
pub struct IntegrationTest {
pub name: &'static str,

@ -4,9 +4,7 @@ use std::collections::HashMap;
use aya::include_bytes_aligned;
use aya_obj::{generated::bpf_insn, Object, ProgramSection};
use super::integration_test;
#[integration_test]
#[test]
fn run_with_rbpf() {
let bytes = include_bytes_aligned!("../../../../target/bpfel-unknown-none/release/pass");
let object = Object::parse(bytes).unwrap();
@ -37,7 +35,7 @@ fn run_with_rbpf() {
static mut MULTIMAP_MAPS: [*mut Vec<u64>; 2] = [null_mut(), null_mut()];
#[integration_test]
#[test]
fn use_map_with_rbpf() {
let bytes =
include_bytes_aligned!("../../../../target/bpfel-unknown-none/release/multimap-btf.bpf.o");

@ -5,9 +5,8 @@ use aya::{
programs::{ProgramError, UProbe},
Bpf,
};
use integration_test_macros::integration_test;
#[integration_test]
#[test]
fn relocations() {
let bpf = load_and_attach(
"test_64_32_call_relocs",
@ -23,7 +22,7 @@ fn relocations() {
assert_eq!(m.get(&2, 0).unwrap(), 3);
}
#[integration_test]
#[test]
fn text_64_64_reloc() {
let mut bpf = load_and_attach(
"test_text_64_64_reloc",

@ -5,9 +5,9 @@ use aya::{
};
use log::info;
use super::{integration_test, kernel_version};
use super::kernel_version;
#[integration_test]
#[test]
fn xdp() {
let bytes = include_bytes_aligned!("../../../../target/bpfel-unknown-none/release/pass");
let mut bpf = Bpf::load(bytes).unwrap();
@ -16,7 +16,7 @@ fn xdp() {
dispatcher.attach("lo", XdpFlags::default()).unwrap();
}
#[integration_test]
#[test]
fn extension() {
let (major, minor, _) = kernel_version().unwrap();
if major < 5 || (minor == 5 && minor < 9) {

@ -5,11 +5,12 @@ authors = ["Alessandro Decina <alessandro.d@gmail.com>"]
edition = "2021"
[dependencies]
anyhow = "1"
aya-tool = { path = "../aya-tool" }
cargo_metadata = "0.15.4"
clap = { version = "4", features = ["derive"] }
anyhow = "1"
syn = "2"
quote = "1"
proc-macro2 = "1"
indoc = "2.0"
proc-macro2 = "1"
quote = "1"
serde_json = "1"
syn = "2"

@ -4,7 +4,7 @@ use std::{
ffi::{OsStr, OsString},
fs,
path::{Path, PathBuf},
process::Command,
process::{Command, Output},
};
use anyhow::{bail, Context};
@ -148,16 +148,20 @@ fn compile_with_clang<P: Clone + AsRef<Path>>(
.arg("-o")
.arg(out.as_ref().as_os_str());
let output = cmd.output().context("Failed to execute clang")?;
if !output.status.success() {
let Output {
status,
stdout,
stderr,
} = cmd.output().context("Failed to execute clang")?;
if !status.success() {
bail!(
"Failed to compile eBPF programs\n \
stdout=\n \
{}\n \
stderr=\n \
{}\n",
String::from_utf8(output.stdout).unwrap(),
String::from_utf8(output.stderr).unwrap()
String::from_utf8(stdout).unwrap(),
String::from_utf8(stderr).unwrap()
);
}

@ -1,10 +1,13 @@
use std::{
os::unix::process::CommandExt,
path::{Path, PathBuf},
process::Command,
fmt::Write as _,
io::BufReader,
os::unix::process::CommandExt as _,
path::PathBuf,
process::{Command, Stdio},
};
use anyhow::Context as _;
use cargo_metadata::{Artifact, CompilerMessage, Message};
use clap::Parser;
use crate::build_ebpf::{build_ebpf, Architecture, BuildEbpfOptions as BuildOptions};
@ -29,19 +32,54 @@ pub struct Options {
}
/// Build the project
fn build(release: bool) -> Result<(), anyhow::Error> {
fn build(release: bool) -> Result<PathBuf, anyhow::Error> {
let mut cmd = Command::new("cargo");
cmd.arg("build").arg("-p").arg("integration-test");
cmd.arg("build")
.arg("--tests")
.arg("--message-format=json")
.arg("--package=integration-test");
if release {
cmd.arg("--release");
}
let mut cmd = cmd
.stdout(Stdio::piped())
.spawn()
.with_context(|| format!("failed to spawn {cmd:?}"))?;
let status = cmd.status().expect("failed to build userspace");
let reader = BufReader::new(cmd.stdout.take().unwrap());
let mut executables = Vec::new();
let mut compiler_messages = String::new();
for message in Message::parse_stream(reader) {
#[allow(clippy::collapsible_match)]
match message.context("valid JSON")? {
Message::CompilerArtifact(Artifact { executable, .. }) => {
if let Some(executable) = executable {
executables.push(executable);
}
}
Message::CompilerMessage(CompilerMessage { message, .. }) => {
assert_eq!(writeln!(&mut compiler_messages, "{message}"), Ok(()));
}
_ => {}
}
}
let status = cmd
.wait()
.with_context(|| format!("failed to wait for {cmd:?}"))?;
match status.code() {
Some(code) => match code {
0 => Ok(()),
code => Err(anyhow::anyhow!("{cmd:?} exited with status code: {code}")),
0 => match &*executables.into_boxed_slice() {
[executable] => Ok(executable.into()),
executables => Err(anyhow::anyhow!(
"expected one executable, found {:?}",
executables
)),
},
code => Err(anyhow::anyhow!(
"{cmd:?} exited with status code {code}:\n{compiler_messages}"
)),
},
None => Err(anyhow::anyhow!("process terminated by signal")),
}
@ -63,15 +101,15 @@ pub fn run(opts: Options) -> Result<(), anyhow::Error> {
libbpf_dir,
})
.context("Error while building eBPF program")?;
build(release).context("Error while building userspace application")?;
// profile we are building (release or debug)
let profile = if release { "release" } else { "debug" };
let bin_path = Path::new("target").join(profile).join("integration-test");
let bin_path = build(release).context("Error while building userspace application")?;
let mut args = runner.trim().split_terminator(' ');
let mut cmd = Command::new(args.next().expect("No first argument"));
cmd.args(args).arg(bin_path).args(run_args);
let runner = args.next().ok_or(anyhow::anyhow!("no first argument"))?;
let mut cmd = Command::new(runner);
cmd.args(args)
.arg(bin_path)
.args(run_args)
.arg("--test-threads=1");
// spawn the command
let err = cmd.exec();

Loading…
Cancel
Save