Merge pull request #350 from dave-tucker/monorepo

Bring aya-log into aya, creating a Monorepo
pull/353/head
Michal Rostecki 2 years ago committed by GitHub
commit f37a51433f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,5 +1,7 @@
[alias] [alias]
xtask = "run --package xtask --" xtask = "run --package xtask --"
build-bpfel = "build -Zbuild-std=core --target=bpfel-unknown-none"
build-bpfeb = "build -Zbuild-std=core --target=bpfeb-unknown-none"
[target.armv7-unknown-linux-gnueabi] [target.armv7-unknown-linux-gnueabi]
linker = "arm-linux-gnueabi-gcc" linker = "arm-linux-gnueabi-gcc"

@ -31,59 +31,19 @@ jobs:
- uses: actions-rs/toolchain@v1 - uses: actions-rs/toolchain@v1
with: with:
toolchain: nightly toolchain: nightly
components: rust-src
override: true override: true
- uses: Swatinem/rust-cache@v1 - uses: Swatinem/rust-cache@v1
- name: Prereqs - name: Prereqs
run: cargo install cross --git https://github.com/cross-rs/cross run: cargo install bpf-linker
- name: Build - name: Build
env: env:
CARGO_CFG_BPF_TARGET_ARCH: ${{ matrix.arch }} CARGO_CFG_BPF_TARGET_ARCH: ${{ matrix.arch }}
run: | run: |
pushd bpf cargo build-bpfel -p aya-bpf --verbose
cargo build --workspace --exclude aya-bpf-macros --verbose cargo build-bpfeb -p aya-bpf --verbose
popd cargo build-bpfel -p aya-log-ebpf --verbose
cargo build-bpfeb -p aya-log-ebpf --verbose
- name: Run tests
env:
CARGO_CFG_BPF_TARGET_ARCH: ${{ matrix.arch }}
run: |
pushd bpf
cargo test --workspace --exclude aya-bpf-macros --verbose
popd
build-macros:
strategy:
matrix:
arch:
- x86_64-unknown-linux-gnu
- aarch64-unknown-linux-gnu
- armv7-unknown-linux-gnueabi
- riscv64gc-unknown-none-elf
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
- uses: Swatinem/rust-cache@v1
- name: Prereqs
run: cargo install cross --git https://github.com/cross-rs/cross
- name: Build bpf macros
run: |
pushd bpf
cross build -p aya-bpf-macros --verbose
popd
- name: Test bpf macros
run: |
pushd bpf
RUST_BACKTRACE=full cross test -p aya-bpf-macros --verbose
popd

@ -21,19 +21,28 @@ jobs:
- x86_64-unknown-linux-gnu - x86_64-unknown-linux-gnu
- aarch64-unknown-linux-gnu - aarch64-unknown-linux-gnu
- armv7-unknown-linux-gnueabi - armv7-unknown-linux-gnueabi
- riscv64gc-unknown-none-elf - riscv64gc-unknown-linux-gnu
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v1 - uses: Swatinem/rust-cache@v1
- name: Prereqs - name: Prereqs
run: cargo install cross --git https://github.com/cross-rs/cross run: cargo install cross --git https://github.com/cross-rs/cross
- name: Build - name: Build
run: cross build --verbose run: cross build --verbose --target ${{matrix.arch}}
- name: Run test - name: Run test
run: RUST_BACKTRACE=full cross test --verbose env:
RUST_BACKTRACE: full
run: |
cross test --verbose --target ${{matrix.arch}}
test: test:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
@ -60,12 +69,10 @@ jobs:
sudo apt-get -qy install linux-tools-common qemu-system-x86 cloud-image-utils openssh-client libelf-dev gcc-multilib sudo apt-get -qy install linux-tools-common qemu-system-x86 cloud-image-utils openssh-client libelf-dev gcc-multilib
cargo install bpf-linker cargo install bpf-linker
- name: Lint integration tests - name: Lint integration tests
run: | run: |
cargo xtask build-integration-test-ebpf --libbpf-dir ./libbpf cargo xtask build-integration-test-ebpf --libbpf-dir ./libbpf
cargo clippy -p integration-test -- --deny warnings cargo clippy -p integration-test -- --deny warnings
cargo clippy -p integration-test-macros -- --deny warnings
- name: Run integration tests - name: Run integration tests
run: | run: |

@ -30,22 +30,11 @@ jobs:
- name: Check formatting - name: Check formatting
run: | run: |
cargo fmt --all -- --check cargo fmt --all -- --check
(cd bpf && cargo fmt --all -- --check)
(cd test/integration-ebpf && cargo fmt --all -- --check)
- name: Run clippy - name: Run clippy
run: | run: |
cargo clippy -p aya -- --deny warnings cargo clippy --workspace --exclude integration-test -- --deny warnings
cargo clippy -p aya-gen -- --deny warnings
cargo clippy -p xtask -- --deny warnings
(cd bpf && cargo clippy -p aya-bpf -- --deny warnings)
(cd test/integration-ebpf && cargo clippy -- --deny warnings)
- name: Run miri - name: Run miri
env:
MIRIFLAGS: -Zmiri-disable-stacked-borrows
run: | run: |
cargo miri test --all-targets cargo miri test --all-targets
pushd bpf
cargo miri test
popd

@ -1,4 +1,4 @@
{ {
"rust-analyzer.linkedProjects": ["Cargo.toml", "bpf/Cargo.toml", "test/integration-ebpf/Cargo.toml"], "rust-analyzer.checkOnSave.allTargets": false,
"rust-analyzer.checkOnSave.allTargets": false "rust-analyzer.checkOnSave.command": "clippy"
} }

@ -1,4 +1,4 @@
{ {
"rust-analyzer.linkedProjects": ["Cargo.toml", "bpf/Cargo.toml", "test/integration-ebpf/Cargo.toml"], "rust-analyzer.checkOnSave.allTargets": false,
"rust-analyzer.checkOnSave.allTargets": false "rust-analyzer.checkOnSave.command": "clippy"
} }

@ -1,3 +1,22 @@
[workspace] [workspace]
members = ["aya", "aya-gen", "test/integration-test", "test/integration-test-macros", "xtask"] members = [
default-members = ["aya", "aya-gen"] "aya", "aya-gen", "aya-log", "aya-log-common", "test/integration-test", "test/integration-test-macros", "xtask",
# macros
"aya-bpf-macros", "aya-log-ebpf-macros",
# ebpf crates
"bpf/aya-bpf", "bpf/aya-bpf-bindings", "bpf/aya-log-ebpf", "test/integration-ebpf"
]
default-members = ["aya", "aya-gen", "aya-log", "aya-bpf-macros", "aya-log-ebpf-macros"]
[profile.dev]
panic = "abort"
[profile.release]
panic = "abort"
[profile.dev.package.integration-ebpf]
opt-level = 2
overflow-checks = false
[profile.release.package.integration-ebpf]
debug = 2

@ -13,4 +13,4 @@ quote = "1.0"
syn = {version = "1.0", features = ["full"]} syn = {version = "1.0", features = ["full"]}
[dev-dependencies] [dev-dependencies]
aya-bpf = { path = "../aya-bpf" } aya-bpf = { path = "../bpf/aya-bpf" }

@ -0,0 +1,20 @@
[package]
name = "aya-log-common"
version = "0.1.11-dev.0"
description = "A logging library for eBPF programs."
keywords = ["ebpf", "bpf", "log", "logging"]
license = "MIT OR Apache-2.0"
authors = ["The Aya Contributors"]
repository = "https://github.com/aya-rs/aya-log"
documentation = "https://docs.rs/aya-log"
edition = "2018"
[features]
default = []
userspace = [ "aya" ]
[dependencies]
aya = { path = "../aya", version = "0.11.0", optional=true }
[lib]
path = "src/lib.rs"

@ -0,0 +1 @@
shared-version = true

@ -0,0 +1,185 @@
#![no_std]
use core::{cmp, mem, ptr};
pub const LOG_BUF_CAPACITY: usize = 8192;
pub const LOG_FIELDS: usize = 7;
#[repr(usize)]
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
pub enum Level {
/// The "error" level.
///
/// Designates very serious errors.
Error = 1,
/// The "warn" level.
///
/// Designates hazardous situations.
Warn,
/// The "info" level.
///
/// Designates useful information.
Info,
/// The "debug" level.
///
/// Designates lower priority information.
Debug,
/// The "trace" level.
///
/// Designates very low priority, often extremely verbose, information.
Trace,
}
#[repr(usize)]
#[derive(Copy, Clone, Debug)]
pub enum RecordField {
Target = 1,
Level,
Module,
File,
Line,
NumArgs,
Log,
}
#[repr(usize)]
#[derive(Copy, Clone, Debug)]
pub enum ArgType {
I8,
I16,
I32,
I64,
I128,
Isize,
U8,
U16,
U32,
U64,
U128,
Usize,
F32,
F64,
Str,
}
#[cfg(feature = "userspace")]
mod userspace {
use super::*;
unsafe impl aya::Pod for RecordField {}
unsafe impl aya::Pod for ArgType {}
}
struct TagLenValue<'a, T> {
tag: T,
value: &'a [u8],
}
impl<'a, T> TagLenValue<'a, T>
where
T: Copy,
{
#[inline(always)]
pub(crate) fn new(tag: T, value: &'a [u8]) -> TagLenValue<'a, T> {
TagLenValue { tag, value }
}
pub(crate) fn write(&self, mut buf: &mut [u8]) -> Result<usize, ()> {
let size = mem::size_of::<T>() + mem::size_of::<usize>() + self.value.len();
let remaining = cmp::min(buf.len(), LOG_BUF_CAPACITY);
// Check if the size doesn't exceed the buffer bounds.
if size > remaining {
return Err(());
}
unsafe { ptr::write_unaligned(buf.as_mut_ptr() as *mut _, self.tag) };
buf = &mut buf[mem::size_of::<T>()..];
unsafe { ptr::write_unaligned(buf.as_mut_ptr() as *mut _, self.value.len()) };
buf = &mut buf[mem::size_of::<usize>()..];
let len = cmp::min(buf.len(), self.value.len());
// The verifier isn't happy with `len` being unbounded, so compare it
// with `LOG_BUF_CAPACITY`.
if len > LOG_BUF_CAPACITY {
return Err(());
}
buf[..len].copy_from_slice(&self.value[..len]);
Ok(size)
}
}
pub trait WriteToBuf {
#[allow(clippy::result_unit_err)]
fn write(&self, buf: &mut [u8]) -> Result<usize, ()>;
}
macro_rules! impl_write_to_buf {
($type:ident, $arg_type:expr) => {
impl WriteToBuf for $type {
fn write(&self, buf: &mut [u8]) -> Result<usize, ()> {
TagLenValue::<ArgType>::new($arg_type, &self.to_ne_bytes()).write(buf)
}
}
};
}
impl_write_to_buf!(i8, ArgType::I8);
impl_write_to_buf!(i16, ArgType::I16);
impl_write_to_buf!(i32, ArgType::I32);
impl_write_to_buf!(i64, ArgType::I64);
impl_write_to_buf!(i128, ArgType::I128);
impl_write_to_buf!(isize, ArgType::Isize);
impl_write_to_buf!(u8, ArgType::U8);
impl_write_to_buf!(u16, ArgType::U16);
impl_write_to_buf!(u32, ArgType::U32);
impl_write_to_buf!(u64, ArgType::U64);
impl_write_to_buf!(u128, ArgType::U128);
impl_write_to_buf!(usize, ArgType::Usize);
impl_write_to_buf!(f32, ArgType::F32);
impl_write_to_buf!(f64, ArgType::F64);
impl WriteToBuf for str {
fn write(&self, buf: &mut [u8]) -> Result<usize, ()> {
TagLenValue::<ArgType>::new(ArgType::Str, self.as_bytes()).write(buf)
}
}
#[allow(clippy::result_unit_err)]
#[doc(hidden)]
#[inline(always)]
pub fn write_record_header(
buf: &mut [u8],
target: &str,
level: Level,
module: &str,
file: &str,
line: u32,
num_args: usize,
) -> Result<usize, ()> {
let mut size = 0;
for attr in [
TagLenValue::<RecordField>::new(RecordField::Target, target.as_bytes()),
TagLenValue::<RecordField>::new(RecordField::Level, &(level as usize).to_ne_bytes()),
TagLenValue::<RecordField>::new(RecordField::Module, module.as_bytes()),
TagLenValue::<RecordField>::new(RecordField::File, file.as_bytes()),
TagLenValue::<RecordField>::new(RecordField::Line, &line.to_ne_bytes()),
TagLenValue::<RecordField>::new(RecordField::NumArgs, &num_args.to_ne_bytes()),
] {
size += attr.write(&mut buf[size..])?;
}
Ok(size)
}
#[allow(clippy::result_unit_err)]
#[doc(hidden)]
pub fn write_record_message(buf: &mut [u8], msg: &str) -> Result<usize, ()> {
TagLenValue::<RecordField>::new(RecordField::Log, msg.as_bytes()).write(buf)
}

@ -0,0 +1,12 @@
[package]
name = "aya-log-ebpf-macros"
version = "0.1.0"
edition = "2018"
[dependencies]
proc-macro2 = "1.0"
quote = "1.0"
syn = "1.0"
[lib]
proc-macro = true

@ -0,0 +1,189 @@
use proc_macro2::TokenStream;
use quote::quote;
use syn::{
parse::{Parse, ParseStream},
punctuated::Punctuated,
Error, Expr, LitStr, Result, Token,
};
pub(crate) struct LogArgs {
pub(crate) ctx: Expr,
pub(crate) target: Option<Expr>,
pub(crate) level: Option<Expr>,
pub(crate) format_string: LitStr,
pub(crate) formatting_args: Option<Punctuated<Expr, Token![,]>>,
}
mod kw {
syn::custom_keyword!(target);
}
impl Parse for LogArgs {
fn parse(input: ParseStream) -> Result<Self> {
let ctx: Expr = input.parse()?;
input.parse::<Token![,]>()?;
// Parse `target: &str`, which is an optional argument.
let target: Option<Expr> = if input.peek(kw::target) {
input.parse::<kw::target>()?;
input.parse::<Token![:]>()?;
let t: Expr = input.parse()?;
input.parse::<Token![,]>()?;
Some(t)
} else {
None
};
// Check whether the next token is `format_string: &str` (which i
// always provided) or `level` (which is an optional expression).
// If `level` is provided, it comes before `format_string`.
let (level, format_string): (Option<Expr>, LitStr) = if input.peek(LitStr) {
// Only `format_string` is provided.
(None, input.parse()?)
} else {
// Both `level` and `format_string` are provided.
let level: Expr = input.parse()?;
input.parse::<Token![,]>()?;
let format_string: LitStr = input.parse()?;
(Some(level), format_string)
};
// Parse variadic arguments.
let formatting_args: Option<Punctuated<Expr, Token![,]>> = if input.is_empty() {
None
} else {
input.parse::<Token![,]>()?;
Some(Punctuated::parse_terminated(input)?)
};
Ok(Self {
ctx,
target,
level,
format_string,
formatting_args,
})
}
}
pub(crate) fn log(args: LogArgs, level: Option<TokenStream>) -> Result<TokenStream> {
let ctx = args.ctx;
let target = match args.target {
Some(t) => quote! { #t },
None => quote! { module_path!() },
};
let lvl: TokenStream = if let Some(l) = level {
l
} else if let Some(l) = args.level {
quote! { #l }
} else {
return Err(Error::new(
args.format_string.span(),
"missing `level` argument: try passing an `aya_log_ebpf::Level` value",
));
};
let format_string = args.format_string;
let (num_args, write_args) = match args.formatting_args {
Some(formatting_args) => {
let formatting_exprs = formatting_args.iter();
let num_args = formatting_exprs.len();
let write_args = quote! {{
use ::aya_log_ebpf::WriteToBuf;
Ok::<_, ()>(record_len) #( .and_then(|record_len| {
if record_len >= buf.buf.len() {
return Err(());
}
{ #formatting_exprs }.write(&mut buf.buf[record_len..]).map(|len| record_len + len)
}) )*
}};
(num_args, write_args)
}
None => (0, quote! {}),
};
// The way of writing to the perf buffer is different depending on whether
// we have variadic arguments or not.
let write_to_perf_buffer = if num_args > 0 {
// Writing with variadic arguments.
quote! {
if let Ok(record_len) = #write_args {
unsafe { ::aya_log_ebpf::AYA_LOGS.output(
#ctx,
&buf.buf[..record_len], 0
)}
}
}
} else {
// Writing with no variadic arguments.
quote! {
unsafe { ::aya_log_ebpf::AYA_LOGS.output(
#ctx,
&buf.buf[..record_len], 0
)}
}
};
Ok(quote! {
{
if let Some(buf_ptr) = unsafe { ::aya_log_ebpf::AYA_LOG_BUF.get_ptr_mut(0) } {
let buf = unsafe { &mut *buf_ptr };
if let Ok(header_len) = ::aya_log_ebpf::write_record_header(
&mut buf.buf,
#target,
#lvl,
module_path!(),
file!(),
line!(),
#num_args,
) {
if let Ok(message_len) = ::aya_log_ebpf::write_record_message(
&mut buf.buf[header_len..],
#format_string,
) {
let record_len = header_len + message_len;
#write_to_perf_buffer
}
}
}
}
})
}
pub(crate) fn error(args: LogArgs) -> Result<TokenStream> {
log(
args,
Some(quote! { ::aya_log_ebpf::macro_support::Level::Error }),
)
}
pub(crate) fn warn(args: LogArgs) -> Result<TokenStream> {
log(
args,
Some(quote! { ::aya_log_ebpf::macro_support::Level::Warn }),
)
}
pub(crate) fn info(args: LogArgs) -> Result<TokenStream> {
log(
args,
Some(quote! { ::aya_log_ebpf::macro_support::Level::Info }),
)
}
pub(crate) fn debug(args: LogArgs) -> Result<TokenStream> {
log(
args,
Some(quote! { ::aya_log_ebpf::macro_support::Level::Debug }),
)
}
pub(crate) fn trace(args: LogArgs) -> Result<TokenStream> {
log(
args,
Some(quote! { ::aya_log_ebpf::macro_support::Level::Trace }),
)
}

@ -0,0 +1,52 @@
use proc_macro::TokenStream;
use syn::parse_macro_input;
mod expand;
#[proc_macro]
pub fn log(args: TokenStream) -> TokenStream {
let args = parse_macro_input!(args as expand::LogArgs);
expand::log(args, None)
.unwrap_or_else(|err| err.to_compile_error())
.into()
}
#[proc_macro]
pub fn error(args: TokenStream) -> TokenStream {
let args = parse_macro_input!(args as expand::LogArgs);
expand::error(args)
.unwrap_or_else(|err| err.to_compile_error())
.into()
}
#[proc_macro]
pub fn warn(args: TokenStream) -> TokenStream {
let args = parse_macro_input!(args as expand::LogArgs);
expand::warn(args)
.unwrap_or_else(|err| err.to_compile_error())
.into()
}
#[proc_macro]
pub fn info(args: TokenStream) -> TokenStream {
let args = parse_macro_input!(args as expand::LogArgs);
expand::info(args)
.unwrap_or_else(|err| err.to_compile_error())
.into()
}
#[proc_macro]
pub fn debug(args: TokenStream) -> TokenStream {
let args = parse_macro_input!(args as expand::LogArgs);
expand::debug(args)
.unwrap_or_else(|err| err.to_compile_error())
.into()
}
#[proc_macro]
pub fn trace(args: TokenStream) -> TokenStream {
let args = parse_macro_input!(args as expand::LogArgs);
expand::trace(args)
.unwrap_or_else(|err| err.to_compile_error())
.into()
}

@ -0,0 +1,27 @@
[package]
name = "aya-log"
version = "0.1.11-dev.0"
description = "A logging library for eBPF programs."
keywords = ["ebpf", "bpf", "log", "logging"]
license = "MIT OR Apache-2.0"
authors = ["The Aya Contributors"]
repository = "https://github.com/aya-rs/aya-log"
readme = "README.md"
documentation = "https://docs.rs/aya-log"
edition = "2018"
[dependencies]
aya = { path = "../aya", version = "0.11.0", features=["async_tokio"] }
aya-log-common = { path = "../aya-log-common", version = "0.1.11-dev.0", features=["userspace"] }
dyn-fmt = "0.3.0"
thiserror = "1"
log = "0.4"
bytes = "1.1"
tokio = { version = "1.2.0" }
[dev-dependencies]
simplelog = "0.12"
testing_logger = "0.1.1"
[lib]
path = "src/lib.rs"

@ -0,0 +1,73 @@
# aya-log - a logging library for eBPF programs
## Overview
`aya-log` is a logging library for eBPF programs written using [aya]. Think of
it as the [log] crate for eBPF.
## Installation
### User space
Add `aya-log` to `Cargo.toml`:
```toml
[dependencies]
aya-log = { git = "https://github.com/aya-rs/aya-log", branch = "main" }
```
### eBPF side
Add `aya-log-ebpf` to `Cargo.toml`:
```toml
[dependencies]
aya-log-ebpf = { git = "https://github.com/aya-rs/aya-log", branch = "main" }
```
## Example
Here's an example that uses `aya-log` in conjunction with the [simplelog] crate
to log eBPF messages to the terminal.
### User space code
```rust
use simplelog::{ColorChoice, ConfigBuilder, LevelFilter, TermLogger, TerminalMode};
use aya_log::BpfLogger;
TermLogger::init(
LevelFilter::Debug,
ConfigBuilder::new()
.set_target_level(LevelFilter::Error)
.set_location_level(LevelFilter::Error)
.build(),
TerminalMode::Mixed,
ColorChoice::Auto,
)
.unwrap();
// Will log using the default logger, which is TermLogger in this case
BpfLogger::init(&mut bpf).unwrap();
```
### eBPF code
```rust
use aya_log_ebpf::info;
fn try_xdp_firewall(ctx: XdpContext) -> Result<u32, ()> {
if let Some(port) = tcp_dest_port(&ctx)? {
if block_port(port) {
info!(&ctx, "❌ blocked incoming connection on port: {}", port);
return Ok(XDP_DROP);
}
}
Ok(XDP_PASS)
}
```
[aya]: https://github.com/aya-rs/aya
[log]: https://docs.rs/log
[simplelog]: https://docs.rs/simplelog

@ -0,0 +1 @@
shared-version = true

@ -0,0 +1,381 @@
//! A logging framework for eBPF programs.
//!
//! This is the user space side of the [Aya] logging framework. For the eBPF
//! side, see the `aya-log-ebpf` crate.
//!
//! `aya-log` provides the [BpfLogger] type, which reads log records created by
//! `aya-log-ebpf` and logs them using the [log] crate. Any logger that
//! implements the [Log] trait can be used with this crate.
//!
//! # Example:
//!
//! This example uses the [simplelog] crate to log messages to the terminal.
//!
//! ```no_run
//! # let mut bpf = aya::Bpf::load(&[]).unwrap();
//! use simplelog::{ColorChoice, ConfigBuilder, LevelFilter, TermLogger, TerminalMode};
//! use aya_log::BpfLogger;
//!
//! // initialize simplelog::TermLogger as the default logger
//! TermLogger::init(
//! LevelFilter::Debug,
//! ConfigBuilder::new()
//! .set_target_level(LevelFilter::Error)
//! .set_location_level(LevelFilter::Error)
//! .build(),
//! TerminalMode::Mixed,
//! ColorChoice::Auto,
//! )
//! .unwrap();
//!
//! // start reading aya-log records and log them using the default logger
//! BpfLogger::init(&mut bpf).unwrap();
//! ```
//!
//! With the following eBPF code:
//!
//! ```ignore
//! # let ctx = ();
//! use aya_log_ebpf::{debug, error, info, trace, warn};
//!
//! error!(&ctx, "this is an error message 🚨");
//! warn!(&ctx, "this is a warning message ⚠️");
//! info!(&ctx, "this is an info message ");
//! debug!(&ctx, "this is a debug message ️🐝");
//! trace!(&ctx, "this is a trace message 🔍");
//! ```
//! Outputs:
//!
//! ```text
//! 21:58:55 [ERROR] xxx: [src/main.rs:35] this is an error message 🚨
//! 21:58:55 [WARN] xxx: [src/main.rs:36] this is a warning message ⚠️
//! 21:58:55 [INFO] xxx: [src/main.rs:37] this is an info message
//! 21:58:55 [DEBUG] (7) xxx: [src/main.rs:38] this is a debug message ️🐝
//! 21:58:55 [TRACE] (7) xxx: [src/main.rs:39] this is a trace message 🔍
//! ```
//!
//! [Aya]: https://docs.rs/aya
//! [simplelog]: https://docs.rs/simplelog
//! [Log]: https://docs.rs/log/0.4.14/log/trait.Log.html
//! [log]: https://docs.rs/log
//!
use std::{convert::TryInto, io, mem, ptr, str, sync::Arc};
use aya_log_common::{ArgType, RecordField, LOG_BUF_CAPACITY, LOG_FIELDS};
use bytes::BytesMut;
use dyn_fmt::AsStrFormatExt;
use log::{error, Level, Log, Record};
use thiserror::Error;
use aya::{
maps::{
perf::{AsyncPerfEventArray, PerfBufferError},
MapError,
},
util::online_cpus,
Bpf, Pod,
};
/// Log messages generated by `aya_log_ebpf` using the [log] crate.
///
/// For more details see the [module level documentation](crate).
pub struct BpfLogger;
impl BpfLogger {
/// Starts reading log records created with `aya-log-ebpf` and logs them
/// with the default logger. See [log::logger].
pub fn init(bpf: &mut Bpf) -> Result<BpfLogger, Error> {
BpfLogger::init_with_logger(bpf, DefaultLogger {})
}
/// Starts reading log records created with `aya-log-ebpf` and logs them
/// with the given logger.
pub fn init_with_logger<T: Log + 'static>(
bpf: &mut Bpf,
logger: T,
) -> Result<BpfLogger, Error> {
let logger = Arc::new(logger);
let mut logs: AsyncPerfEventArray<_> = bpf.map_mut("AYA_LOGS")?.try_into()?;
for cpu_id in online_cpus().map_err(Error::InvalidOnlineCpu)? {
let mut buf = logs.open(cpu_id, None)?;
let log = logger.clone();
tokio::spawn(async move {
let mut buffers = (0..10)
.map(|_| BytesMut::with_capacity(LOG_BUF_CAPACITY))
.collect::<Vec<_>>();
loop {
let events = buf.read_events(&mut buffers).await.unwrap();
#[allow(clippy::needless_range_loop)]
for i in 0..events.read {
let buf = &mut buffers[i];
log_buf(buf, &*log).unwrap();
}
}
});
}
Ok(BpfLogger {})
}
}
#[derive(Copy, Clone, Debug)]
struct DefaultLogger;
impl Log for DefaultLogger {
fn enabled(&self, metadata: &log::Metadata) -> bool {
log::logger().enabled(metadata)
}
fn log(&self, record: &Record) {
log::logger().log(record)
}
fn flush(&self) {
log::logger().flush()
}
}
#[derive(Error, Debug)]
pub enum Error {
#[error("error opening log event array")]
MapError(#[from] MapError),
#[error("error opening log buffer")]
PerfBufferError(#[from] PerfBufferError),
#[error("invalid /sys/devices/system/cpu/online format")]
InvalidOnlineCpu(#[source] io::Error),
}
fn log_buf(mut buf: &[u8], logger: &dyn Log) -> Result<(), ()> {
let mut target = None;
let mut level = Level::Trace;
let mut module = None;
let mut file = None;
let mut line = None;
let mut log = None;
let mut num_args = None;
for _ in 0..LOG_FIELDS {
let (attr, rest) = unsafe { TagLenValue::<'_, RecordField>::try_read(buf)? };
match attr.tag {
RecordField::Target => {
target = Some(std::str::from_utf8(attr.value).map_err(|_| ())?);
}
RecordField::Level => {
level = unsafe { ptr::read_unaligned(attr.value.as_ptr() as *const _) }
}
RecordField::Module => {
module = Some(std::str::from_utf8(attr.value).map_err(|_| ())?);
}
RecordField::File => {
file = Some(std::str::from_utf8(attr.value).map_err(|_| ())?);
}
RecordField::Line => {
line = Some(u32::from_ne_bytes(attr.value.try_into().map_err(|_| ())?));
}
RecordField::NumArgs => {
num_args = Some(usize::from_ne_bytes(attr.value.try_into().map_err(|_| ())?));
}
RecordField::Log => {
log = Some(std::str::from_utf8(attr.value).map_err(|_| ())?);
}
}
buf = rest;
}
let log_msg = log.ok_or(())?;
let full_log_msg = match num_args {
Some(n) => {
let mut args: Vec<String> = Vec::new();
for _ in 0..n {
let (attr, rest) = unsafe { TagLenValue::<'_, ArgType>::try_read(buf)? };
match attr.tag {
ArgType::I8 => {
args.push(
i8::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::I16 => {
args.push(
i16::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::I32 => {
args.push(
i32::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::I64 => {
args.push(
i64::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::I128 => {
args.push(
i128::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::Isize => {
args.push(
isize::from_ne_bytes(attr.value.try_into().map_err(|_| ())?)
.to_string(),
);
}
ArgType::U8 => {
args.push(
u8::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::U16 => {
args.push(
u16::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::U32 => {
args.push(
u32::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::U64 => {
args.push(
u64::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::U128 => {
args.push(
u128::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::Usize => {
args.push(
usize::from_ne_bytes(attr.value.try_into().map_err(|_| ())?)
.to_string(),
);
}
ArgType::F32 => {
args.push(
f32::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::F64 => {
args.push(
f64::from_ne_bytes(attr.value.try_into().map_err(|_| ())?).to_string(),
);
}
ArgType::Str => match str::from_utf8(attr.value) {
Ok(v) => args.push(v.to_string()),
Err(e) => error!("received invalid utf8 string: {}", e),
},
}
buf = rest;
}
log_msg.format(&args)
}
None => log_msg.to_string(),
};
logger.log(
&Record::builder()
.args(format_args!("{}", full_log_msg))
.target(target.ok_or(())?)
.level(level)
.module_path(module)
.file(file)
.line(line)
.build(),
);
logger.flush();
Ok(())
}
struct TagLenValue<'a, T: Pod> {
tag: T,
value: &'a [u8],
}
impl<'a, T: Pod> TagLenValue<'a, T> {
unsafe fn try_read(mut buf: &'a [u8]) -> Result<(TagLenValue<'a, T>, &'a [u8]), ()> {
if buf.len() < mem::size_of::<T>() + mem::size_of::<usize>() {
return Err(());
}
let tag = ptr::read_unaligned(buf.as_ptr() as *const T);
buf = &buf[mem::size_of::<T>()..];
let len = usize::from_ne_bytes(buf[..mem::size_of::<usize>()].try_into().unwrap());
buf = &buf[mem::size_of::<usize>()..];
if buf.len() < len {
return Err(());
}
Ok((
TagLenValue {
tag,
value: &buf[..len],
},
&buf[len..],
))
}
}
#[cfg(test)]
mod test {
use super::*;
use aya_log_common::{write_record_header, write_record_message, WriteToBuf};
use log::logger;
use testing_logger;
fn new_log(msg: &str, args: usize) -> Result<(usize, Vec<u8>), ()> {
let mut buf = vec![0; 8192];
let mut len = write_record_header(
&mut buf,
"test",
aya_log_common::Level::Info,
"test",
"test.rs",
123,
args,
)?;
len += write_record_message(&mut buf[len..], msg)?;
Ok((len, buf))
}
#[test]
fn test_str() {
testing_logger::setup();
let (_, input) = new_log("test", 0).unwrap();
let logger = logger();
let _ = log_buf(&input, logger);
testing_logger::validate(|captured_logs| {
assert_eq!(captured_logs.len(), 1);
assert_eq!(captured_logs[0].body, "test");
assert_eq!(captured_logs[0].level, Level::Info);
});
}
#[test]
fn test_str_with_args() {
testing_logger::setup();
let (len, mut input) = new_log("hello {}", 1).unwrap();
let name = "test";
(*name).write(&mut input[len..]).unwrap();
let logger = logger();
let _ = log_buf(&input, logger);
testing_logger::validate(|captured_logs| {
assert_eq!(captured_logs.len(), 1);
assert_eq!(captured_logs[0].body, "hello test");
assert_eq!(captured_logs[0].level, Level::Info);
});
}
}

@ -33,7 +33,7 @@
//! versa. Because of that, all map values must be plain old data and therefore //! versa. Because of that, all map values must be plain old data and therefore
//! implement the [Pod] trait. //! implement the [Pod] trait.
use std::{ use std::{
convert::TryFrom, convert::{TryFrom, TryInto},
ffi::CString, ffi::CString,
fmt, io, fmt, io,
marker::PhantomData, marker::PhantomData,
@ -226,7 +226,7 @@ impl AsRawFd for MapFd {
} }
#[derive(PartialEq, Eq, PartialOrd, Ord)] #[derive(PartialEq, Eq, PartialOrd, Ord)]
struct RlimitSize(u64); struct RlimitSize(usize);
impl fmt::Display for RlimitSize { impl fmt::Display for RlimitSize {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.0 < 1024 { if self.0 < 1024 {
@ -246,8 +246,9 @@ fn maybe_warn_rlimit() {
let ret = unsafe { getrlimit(RLIMIT_MEMLOCK, limit.as_mut_ptr()) }; let ret = unsafe { getrlimit(RLIMIT_MEMLOCK, limit.as_mut_ptr()) };
if ret == 0 { if ret == 0 {
let limit = unsafe { limit.assume_init() }; let limit = unsafe { limit.assume_init() };
let limit: RlimitSize = RlimitSize(limit.rlim_cur);
if limit.0 == RLIM_INFINITY { let limit: RlimitSize = RlimitSize(limit.rlim_cur.try_into().unwrap());
if limit.0 == RLIM_INFINITY.try_into().unwrap() {
return; return;
} }
warn!( warn!(

@ -0,0 +1,6 @@
[build]
target-dir = "../target"
target = "bpfel-unknown-none"
[unstable]
build-std = ["core"]

@ -1,2 +0,0 @@
[workspace]
members = ["aya-bpf", "aya-bpf-macros", "aya-bpf-bindings"]

@ -6,5 +6,8 @@ edition = "2018"
[dependencies] [dependencies]
aya-bpf-cty = { path = "../aya-bpf-cty" } aya-bpf-cty = { path = "../aya-bpf-cty" }
aya-bpf-macros = { path = "../aya-bpf-macros" } aya-bpf-macros = { path = "../../aya-bpf-macros" }
aya-bpf-bindings = { path = "../aya-bpf-bindings" } aya-bpf-bindings = { path = "../aya-bpf-bindings" }
[build-dependencies]
rustversion = "1.0"

@ -1,6 +1,7 @@
use std::env; use std::env;
fn main() { fn main() {
check_rust_version();
println!("cargo:rerun-if-env-changed=CARGO_CFG_BPF_TARGET_ARCH"); println!("cargo:rerun-if-env-changed=CARGO_CFG_BPF_TARGET_ARCH");
if let Ok(arch) = env::var("CARGO_CFG_BPF_TARGET_ARCH") { if let Ok(arch) = env::var("CARGO_CFG_BPF_TARGET_ARCH") {
println!("cargo:rustc-cfg=bpf_target_arch=\"{}\"", arch); println!("cargo:rustc-cfg=bpf_target_arch=\"{}\"", arch);
@ -10,3 +11,11 @@ fn main() {
println!("cargo:rustc-cfg=bpf_target_arch=\"{}\"", arch); println!("cargo:rustc-cfg=bpf_target_arch=\"{}\"", arch);
} }
} }
#[rustversion::nightly]
fn check_rust_version() {
println!("cargo:rustc-cfg=unstable");
}
#[rustversion::not(nightly)]
fn check_rust_version() {}

@ -9,7 +9,7 @@
html_logo_url = "https://aya-rs.dev/assets/images/crabby.svg", html_logo_url = "https://aya-rs.dev/assets/images/crabby.svg",
html_favicon_url = "https://aya-rs.dev/assets/images/crabby.svg" html_favicon_url = "https://aya-rs.dev/assets/images/crabby.svg"
)] )]
#![feature(never_type)] #![cfg_attr(unstable, feature(never_type))]
#![allow(clippy::missing_safety_doc)] #![allow(clippy::missing_safety_doc)]
#![no_std] #![no_std]

@ -80,6 +80,30 @@ impl ProgramArray {
/// ///
/// On success, this function **does not return** into the original program. /// On success, this function **does not return** into the original program.
/// On failure, a negative error is returned, wrapped in `Err()`. /// On failure, a negative error is returned, wrapped in `Err()`.
#[cfg(not(unstable))]
pub unsafe fn tail_call<C: BpfContext>(&self, ctx: &C, index: u32) -> Result<(), c_long> {
let res = bpf_tail_call(ctx.as_ptr(), self.def.get() as *mut _, index);
if res != 0 {
Err(res)
} else {
unreachable_unchecked()
}
}
/// Perform a tail call into a program indexed by this map.
///
/// # Safety
///
/// This function is inherently unsafe, since it causes control flow to jump into
/// another eBPF program. This can have side effects, such as drop methods not being
/// called. Note that tail calling into an eBPF program is not the same thing as
/// a function call -- control flow never returns to the caller.
///
/// # Return Value
///
/// On success, this function **does not return** into the original program.
/// On failure, a negative error is returned, wrapped in `Err()`.
#[cfg(unstable)]
pub unsafe fn tail_call<C: BpfContext>(&self, ctx: &C, index: u32) -> Result<!, c_long> { pub unsafe fn tail_call<C: BpfContext>(&self, ctx: &C, index: u32) -> Result<!, c_long> {
let res = bpf_tail_call(ctx.as_ptr(), self.def.get() as *mut _, index); let res = bpf_tail_call(ctx.as_ptr(), self.def.get() as *mut _, index);
if res != 0 { if res != 0 {

@ -0,0 +1,12 @@
[package]
name = "aya-log-ebpf"
version = "0.1.0"
edition = "2018"
[dependencies]
aya-bpf = { path = "../aya-bpf" }
aya-log-common = { path = "../../aya-log-common" }
aya-log-ebpf-macros = { path = "../../aya-log-ebpf-macros" }
[lib]
path = "src/lib.rs"

@ -0,0 +1,29 @@
#![no_std]
use aya_bpf::{
macros::map,
maps::{PerCpuArray, PerfEventByteArray},
};
pub use aya_log_common::{
write_record_header, write_record_message, Level, WriteToBuf, LOG_BUF_CAPACITY,
};
pub use aya_log_ebpf_macros::{debug, error, info, log, trace, warn};
#[doc(hidden)]
#[repr(C)]
pub struct LogBuf {
pub buf: [u8; LOG_BUF_CAPACITY],
}
#[doc(hidden)]
#[map]
pub static mut AYA_LOG_BUF: PerCpuArray<LogBuf> = PerCpuArray::with_max_entries(1, 0);
#[doc(hidden)]
#[map]
pub static mut AYA_LOGS: PerfEventByteArray = PerfEventByteArray::new(0);
#[doc(hidden)]
pub mod macro_support {
pub use aya_log_common::{Level, LOG_BUF_CAPACITY};
pub use aya_log_ebpf_macros::log;
}

@ -0,0 +1,2 @@
[toolchain]
channel = "nightly"

@ -1 +0,0 @@
../rustfmt.toml

@ -1,3 +1,3 @@
[build] [build]
publish = "site" publish = "site"
command = "rustup toolchain install nightly && cargo xtask docs" command = "rustup toolchain install nightly -c rust-src && cargo xtask docs"

@ -0,0 +1,6 @@
pre-release-commit-message = "{crate_name}: release version {{version}}"
post-release-commit-message = "{crate_name}: start next development iteration {{next_version}}"
consolidate-pushes = true
consolidate-commits = true
dev-version = true
dev-version-ext = "dev.0"

@ -22,15 +22,3 @@ path = "src/pass.rs"
[[bin]] [[bin]]
name = "test" name = "test"
path = "src/test.rs" path = "src/test.rs"
[profile.dev]
panic = "abort"
opt-level = 2
overflow-checks = false
[profile.release]
panic = "abort"
debug = 2
[workspace]
members = []

@ -8,67 +8,19 @@ use std::{fs, io, io::Write};
use indoc::indoc; use indoc::indoc;
pub fn docs() -> Result<(), anyhow::Error> { pub fn docs() -> Result<(), anyhow::Error> {
let mut working_dir = PathBuf::from("."); let current_dir = PathBuf::from(".");
let header_path = current_dir.join("header.html");
let replace = Command::new("sed")
.current_dir(&working_dir)
.args(vec![
"-i.bak",
"s/crabby.svg/crabby_dev.svg/",
"aya/src/lib.rs",
])
.status()
.expect("failed to replace logo");
assert!(replace.success());
let mut header_path = PathBuf::from(".");
header_path.push("header.html");
let mut header = fs::File::create(&header_path).expect("can't create header.html"); let mut header = fs::File::create(&header_path).expect("can't create header.html");
header header
.write_all(r#"<meta name="robots" content="noindex">"#.as_bytes()) .write_all(r#"<meta name="robots" content="noindex">"#.as_bytes())
.expect("can't write header.html contents"); .expect("can't write header.html contents");
header.flush().expect("couldn't flush contents"); header.flush().expect("couldn't flush contents");
let abs_header_path = fs::canonicalize(&header_path).unwrap(); let abs_header_path = fs::canonicalize(&header_path).unwrap();
let args = vec!["+nightly", "doc", "--no-deps", "--all-features"];
let status = Command::new("cargo")
.current_dir(&working_dir)
.env(
"RUSTDOCFLAGS",
format!("--html-in-header {}", abs_header_path.to_str().unwrap()),
)
.args(&args)
.status()
.expect("failed to build aya docs");
assert!(status.success());
working_dir.push("bpf");
let replace = Command::new("sed")
.current_dir(&working_dir)
.args(vec![
"-i.bak",
"s/crabby.svg/crabby_dev.svg/",
"aya-bpf/src/lib.rs",
])
.status()
.expect("failed to replace logo");
assert!(replace.success());
let status = Command::new("cargo")
.current_dir(&working_dir)
.env(
"RUSTDOCFLAGS",
format!("--html-in-header {}", abs_header_path.to_str().unwrap()),
)
.args(&args)
.status()
.expect("failed to build aya-bpf docs");
assert!(status.success());
copy_dir_all("./target/doc", "site/user")?; build_docs(&current_dir.join("aya"), &abs_header_path)?;
copy_dir_all("./bpf/target/doc", "site/bpf")?; build_docs(&current_dir.join("bpf/aya-bpf"), &abs_header_path)?;
copy_dir_all("./target/doc", "./site/user")?;
copy_dir_all("./target/bpfel-unknown-none/doc", "./site/bpf")?;
let mut robots = fs::File::create("site/robots.txt").expect("can't create robots.txt"); let mut robots = fs::File::create("site/robots.txt").expect("can't create robots.txt");
robots robots
@ -98,14 +50,38 @@ pub fn docs() -> Result<(), anyhow::Error> {
.as_bytes(), .as_bytes(),
) )
.expect("can't write index.html"); .expect("can't write index.html");
Ok(())
}
fn build_docs(working_dir: &PathBuf, abs_header_path: &Path) -> Result<(), anyhow::Error> {
let replace = Command::new("sed")
.current_dir(&working_dir)
.args(vec!["-i.bak", "s/crabby.svg/crabby_dev.svg/", "src/lib.rs"])
.status()
.expect("failed to replace logo");
assert!(replace.success());
fs::rename("aya/src/lib.rs.bak", "aya/src/lib.rs").unwrap(); let args = vec!["+nightly", "doc", "--no-deps", "--all-features"];
fs::rename("bpf/aya-bpf/src/lib.rs.bak", "bpf/aya-bpf/src/lib.rs").unwrap();
let status = Command::new("cargo")
.current_dir(&working_dir)
.env(
"RUSTDOCFLAGS",
format!("--html-in-header {}", abs_header_path.to_str().unwrap()),
)
.args(&args)
.status()
.expect("failed to build aya docs");
assert!(status.success());
fs::rename(
working_dir.join("src/lib.rs.bak"),
working_dir.join("src/lib.rs"),
)
.unwrap();
Ok(()) Ok(())
} }
fn copy_dir_all<P: AsRef<Path>>(src: P, dst: P) -> io::Result<()> { fn copy_dir_all<P1: AsRef<Path>, P2: AsRef<Path>>(src: P1, dst: P2) -> io::Result<()> {
fs::create_dir_all(&dst)?; fs::create_dir_all(&dst)?;
for entry in fs::read_dir(src)? { for entry in fs::read_dir(src)? {
let entry = entry?; let entry = entry?;

Loading…
Cancel
Save