Files
linux/tools/testing/selftests/bpf/progs/verifier_may_goto_1.c
Yonghong Song 14a627fe79 selftests/bpf: Add some tests related to 'may_goto 0' insns
Add both asm-based and C-based tests which have 'may_goto 0' insns.

For the following code in C-based test,
   int i, tmp[3];
   for (i = 0; i < 3 && can_loop; i++)
       tmp[i] = 0;

The clang compiler (clang 19 and 20) generates
   may_goto 2
   may_goto 1
   may_goto 0
   r1 = 0
   r2 = 0
   r3 = 0

The above asm codes are due to llvm pass SROAPass. This ensures the
successful verification since tmp[0-2] are initialized.  Otherwise,
the code without SROAPass like
   may_goto 5
   r1 = 0
   may_goto 3
   r2 = 0
   may_goto 1
   r3 = 0
will have verification failure.

Although from the source code C-based test should have verification
failure, clang compiler optimization generates code with successful
verification. If gcc generates different asm codes than clang, the
following code can be used for gcc:
   int i, tmp[3];
   for (i = 0; i < 3; i++)
       tmp[i] = 0;

Acked-by: Eduard Zingerman <eddyz87@gmail.com>
Signed-off-by: Yonghong Song <yonghong.song@linux.dev>
Link: https://lore.kernel.org/r/20250118192034.2124952-1-yonghong.song@linux.dev
Signed-off-by: Alexei Starovoitov <ast@kernel.org>
2025-01-20 09:47:14 -08:00

98 lines
2.2 KiB
C

// SPDX-License-Identifier: GPL-2.0
/* Copyright (c) 2025 Meta Platforms, Inc. and affiliates. */
#include <linux/bpf.h>
#include <bpf/bpf_helpers.h>
#include "../../../include/linux/filter.h"
#include "bpf_misc.h"
SEC("raw_tp")
__description("may_goto 0")
__arch_x86_64
__xlated("0: r0 = 1")
__xlated("1: exit")
__success
__naked void may_goto_simple(void)
{
asm volatile (
".8byte %[may_goto];"
"r0 = 1;"
".8byte %[may_goto];"
"exit;"
:
: __imm_insn(may_goto, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
: __clobber_all);
}
SEC("raw_tp")
__description("batch 2 of may_goto 0")
__arch_x86_64
__xlated("0: r0 = 1")
__xlated("1: exit")
__success
__naked void may_goto_batch_0(void)
{
asm volatile (
".8byte %[may_goto1];"
".8byte %[may_goto1];"
"r0 = 1;"
".8byte %[may_goto1];"
".8byte %[may_goto1];"
"exit;"
:
: __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
: __clobber_all);
}
SEC("raw_tp")
__description("may_goto batch with offsets 2/1/0")
__arch_x86_64
__xlated("0: r0 = 1")
__xlated("1: exit")
__success
__naked void may_goto_batch_1(void)
{
asm volatile (
".8byte %[may_goto1];"
".8byte %[may_goto2];"
".8byte %[may_goto3];"
"r0 = 1;"
".8byte %[may_goto1];"
".8byte %[may_goto2];"
".8byte %[may_goto3];"
"exit;"
:
: __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 2 /* offset */, 0)),
__imm_insn(may_goto2, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 1 /* offset */, 0)),
__imm_insn(may_goto3, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
: __clobber_all);
}
SEC("raw_tp")
__description("may_goto batch with offsets 2/0")
__arch_x86_64
__xlated("0: *(u64 *)(r10 -8) = 8388608")
__xlated("1: r11 = *(u64 *)(r10 -8)")
__xlated("2: if r11 == 0x0 goto pc+3")
__xlated("3: r11 -= 1")
__xlated("4: *(u64 *)(r10 -8) = r11")
__xlated("5: r0 = 1")
__xlated("6: r0 = 2")
__xlated("7: exit")
__success
__naked void may_goto_batch_2(void)
{
asm volatile (
".8byte %[may_goto1];"
".8byte %[may_goto3];"
"r0 = 1;"
"r0 = 2;"
"exit;"
:
: __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 2 /* offset */, 0)),
__imm_insn(may_goto3, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
: __clobber_all);
}
char _license[] SEC("license") = "GPL";