diff --git a/aya-obj/src/btf/relocation.rs b/aya-obj/src/btf/relocation.rs index 3098ab7f..a352236d 100644 --- a/aya-obj/src/btf/relocation.rs +++ b/aya-obj/src/btf/relocation.rs @@ -14,8 +14,8 @@ use crate::{ IntEncoding, Struct, Union, MAX_SPEC_LEN, }, generated::{ - bpf_core_relo, bpf_core_relo_kind::*, bpf_insn, BPF_ALU, BPF_ALU64, BPF_B, BPF_DW, BPF_H, - BPF_K, BPF_LD, BPF_LDX, BPF_ST, BPF_STX, BPF_W, BTF_INT_SIGNED, + bpf_core_relo, bpf_core_relo_kind::*, bpf_insn, BPF_ALU, BPF_ALU64, BPF_B, BPF_CALL, + BPF_DW, BPF_H, BPF_JMP, BPF_K, BPF_LD, BPF_LDX, BPF_ST, BPF_STX, BPF_W, BTF_INT_SIGNED, }, util::HashMap, Object, Program, ProgramSection, @@ -314,13 +314,18 @@ fn relocate_btf_program<'target>( // same value, else the relocation is ambiguous and can't be applied let conflicts = matches .filter_map(|(cand_name, cand_spec, cand_comp_rel)| { - if cand_spec.bit_offset != target_spec.bit_offset - || cand_comp_rel.target.value != target_comp_rel.target.value - { - Some(cand_name) - } else { - None + if cand_spec.bit_offset != target_spec.bit_offset { + return Some(cand_name); + } else if let (Some(cand_comp_rel_target), Some(target_comp_rel_target)) = ( + cand_comp_rel.target.as_ref(), + target_comp_rel.target.as_ref(), + ) { + if cand_comp_rel_target.value != target_comp_rel_target.value { + return Some(cand_name); + } } + + None }) .collect::>(); if !conflicts.is_empty() { @@ -809,7 +814,7 @@ struct Candidate<'a> { #[derive(Debug)] struct ComputedRelocation { local: ComputedRelocationValue, - target: ComputedRelocationValue, + target: Option, } #[derive(Debug)] @@ -819,6 +824,14 @@ struct ComputedRelocationValue { type_id: Option, } +fn poison_insn(ins: &mut bpf_insn) { + ins.code = (BPF_JMP | BPF_CALL) as u8; + ins.set_dst_reg(0); + ins.set_src_reg(0); + ins.off = 0; + ins.imm = 0xBAD2310; +} + impl ComputedRelocation { fn new( rel: &Relocation, @@ -830,15 +843,15 @@ impl ComputedRelocation { FieldByteOffset | FieldByteSize | FieldExists | FieldSigned | FieldLShift64 | FieldRShift64 => ComputedRelocation { local: Self::compute_field_relocation(rel, Some(local_spec))?, - target: Self::compute_field_relocation(rel, target_spec)?, + target: Self::compute_field_relocation(rel, target_spec).ok(), }, TypeIdLocal | TypeIdTarget | TypeExists | TypeSize => ComputedRelocation { local: Self::compute_type_relocation(rel, local_spec, target_spec)?, - target: Self::compute_type_relocation(rel, local_spec, target_spec)?, + target: Self::compute_type_relocation(rel, local_spec, target_spec).ok(), }, EnumVariantExists | EnumVariantValue => ComputedRelocation { local: Self::compute_enum_relocation(rel, Some(local_spec))?, - target: Self::compute_enum_relocation(rel, target_spec)?, + target: Self::compute_enum_relocation(rel, target_spec).ok(), }, }; @@ -864,9 +877,31 @@ impl ComputedRelocation { relocation_number: rel.number, })?; + let target = if let Some(target) = self.target.as_ref() { + target + } else { + let is_ld_imm64 = ins.code == (BPF_LD | BPF_DW) as u8; + + poison_insn(ins); + + if is_ld_imm64 { + let next_ins = instructions.get_mut(ins_index + 1).ok_or( + RelocationError::InvalidInstructionIndex { + index: (ins_index + 1) * mem::size_of::(), + num_instructions, + relocation_number: rel.number, + }, + )?; + + poison_insn(next_ins); + } + + return Ok(()); + }; + let class = (ins.code & 0x07) as u32; - let target_value = self.target.value; + let target_value = target.value; match class { BPF_ALU | BPF_ALU64 => { @@ -892,9 +927,9 @@ impl ComputedRelocation { ins.off = target_value as i16; - if self.local.size != self.target.size { + if self.local.size != target.size { let local_ty = local_btf.type_by_id(self.local.type_id.unwrap())?; - let target_ty = target_btf.type_by_id(self.target.type_id.unwrap())?; + let target_ty = target_btf.type_by_id(target.type_id.unwrap())?; let unsigned = |info: u32| ((info >> 24) & 0x0F) & BTF_INT_SIGNED == 0; use BtfType::*; match (local_ty, target_ty) { @@ -910,13 +945,13 @@ impl ComputedRelocation { err_type_name(&local_btf.err_type_name(local_ty)), self.local.size, err_type_name(&target_btf.err_type_name(target_ty)), - self.target.size, + target.size, ), }) } } - let size = match self.target.size { + let size = match target.size { 8 => BPF_DW, 4 => BPF_W, 2 => BPF_H, diff --git a/test/integration-test/src/tests/btf_relocations.rs b/test/integration-test/src/tests/btf_relocations.rs index 85291ef4..f8f37c8f 100644 --- a/test/integration-test/src/tests/btf_relocations.rs +++ b/test/integration-test/src/tests/btf_relocations.rs @@ -143,6 +143,35 @@ fn relocate_pointer() { assert_eq!(test.run_no_btf().unwrap(), 42); } +#[integration_test] +fn relocate_struct_flavors() { + let definition = r#" + struct foo {}; + struct bar { struct foo *f; }; + struct bar___cafe { struct foo *e; struct foo *f; }; + "#; + + let relocation_code = r#" + __u8 memory[] = {42, 0, 0, 0, 0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0}; + struct bar* ptr = (struct bar *) &memory; + + if (__builtin_preserve_field_info((((typeof(struct bar___cafe) *)0)->e), 2)) { + value = (__u64) __builtin_preserve_access_index(((struct bar___cafe *)ptr)->e); + } else { + value = (__u64) __builtin_preserve_access_index(ptr->f); + } + "#; + + let test_no_flavor = RelocationTest { + local_definition: definition, + target_btf: definition, + relocation_code, + } + .build() + .unwrap(); + assert_eq!(test_no_flavor.run_no_btf().unwrap(), 42); +} + /// Utility code for running relocation tests: /// - Generates the eBPF program using probided local definition and relocation code /// - Generates the BTF from the target btf code