aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/tools/testing/selftests/bpf
diff options
context:
space:
mode:
authorYonghong Song <yonghong.song@linux.dev>2024-01-09 21:13:55 -0800
committerAlexei Starovoitov <ast@kernel.org>2024-01-23 14:40:23 -0800
commit6ae99ac8b7da30c9fdb15e380624dbc41f8200c8 (patch)
treef5c6c46df17c2350447ed9c8bad37bc2a4aeee47 /tools/testing/selftests/bpf
parentbpf: Track aligned st store as imprecise spilled registers (diff)
downloadwireguard-linux-6ae99ac8b7da30c9fdb15e380624dbc41f8200c8.tar.xz
wireguard-linux-6ae99ac8b7da30c9fdb15e380624dbc41f8200c8.zip
selftests/bpf: Add a selftest with not-8-byte aligned BPF_ST
Add a selftest with a 4 bytes BPF_ST of 0 where the store is not 8-byte aligned. The goal is to ensure that STACK_ZERO is properly marked in stack slots and the STACK_ZERO value can propagate properly during the load. Acked-by: Andrii Nakryiko <andrii@kernel.org> Signed-off-by: Yonghong Song <yonghong.song@linux.dev> Link: https://lore.kernel.org/r/20240110051355.2737232-1-yonghong.song@linux.dev Signed-off-by: Alexei Starovoitov <ast@kernel.org>
Diffstat (limited to 'tools/testing/selftests/bpf')
-rw-r--r--tools/testing/selftests/bpf/progs/verifier_spill_fill.c41
1 files changed, 41 insertions, 0 deletions
diff --git a/tools/testing/selftests/bpf/progs/verifier_spill_fill.c b/tools/testing/selftests/bpf/progs/verifier_spill_fill.c
index e38f29e73be7..7013a9694163 100644
--- a/tools/testing/selftests/bpf/progs/verifier_spill_fill.c
+++ b/tools/testing/selftests/bpf/progs/verifier_spill_fill.c
@@ -581,6 +581,47 @@ __naked void partial_stack_load_preserves_zeros(void)
: __clobber_common);
}
+SEC("raw_tp")
+__log_level(2)
+__success
+/* fp-4 is STACK_ZERO */
+__msg("2: (62) *(u32 *)(r10 -4) = 0 ; R10=fp0 fp-8=0000????")
+__msg("4: (71) r2 = *(u8 *)(r10 -1) ; R2_w=0 R10=fp0 fp-8=0000????")
+__msg("5: (0f) r1 += r2")
+__msg("mark_precise: frame0: last_idx 5 first_idx 0 subseq_idx -1")
+__msg("mark_precise: frame0: regs=r2 stack= before 4: (71) r2 = *(u8 *)(r10 -1)")
+__naked void partial_stack_load_preserves_partial_zeros(void)
+{
+ asm volatile (
+ /* fp-4 is value zero */
+ ".8byte %[fp4_st_zero];" /* LLVM-18+: *(u32 *)(r10 -4) = 0; */
+
+ /* load single U8 from non-aligned stack zero slot */
+ "r1 = %[single_byte_buf];"
+ "r2 = *(u8 *)(r10 -1);"
+ "r1 += r2;"
+ "*(u8 *)(r1 + 0) = r2;" /* this should be fine */
+
+ /* load single U16 from non-aligned stack zero slot */
+ "r1 = %[single_byte_buf];"
+ "r2 = *(u16 *)(r10 -2);"
+ "r1 += r2;"
+ "*(u8 *)(r1 + 0) = r2;" /* this should be fine */
+
+ /* load single U32 from non-aligned stack zero slot */
+ "r1 = %[single_byte_buf];"
+ "r2 = *(u32 *)(r10 -4);"
+ "r1 += r2;"
+ "*(u8 *)(r1 + 0) = r2;" /* this should be fine */
+
+ "r0 = 0;"
+ "exit;"
+ :
+ : __imm_ptr(single_byte_buf),
+ __imm_insn(fp4_st_zero, BPF_ST_MEM(BPF_W, BPF_REG_FP, -4, 0))
+ : __clobber_common);
+}
+
char two_byte_buf[2] SEC(".data.two_byte_buf");
SEC("raw_tp")