arm64 fix for v7.0

- Implement a basic static call trampoline to fix CFI failures with the
   generic implementation.
 -----BEGIN PGP SIGNATURE-----
 
 iQFEBAABCgAuFiEEPxTL6PPUbjXGY88ct6xw3ITBYzQFAmnPh0AQHHdpbGxAa2Vy
 bmVsLm9yZwAKCRC3rHDchMFjNHRVB/97IOb/LZAq2yguGy6rMptm3tCdCsUmgPkh
 aPBeI4BE1JXofRcyM1oaavM/wC6M3ASb8JJbg5Ceta3wXwPfjzR2F9+6OEzipXzC
 nQzm0Da5GvwiHOY6GGhOgUy91+JJB1g7402ALIRjCiaadDBTLgys/YzDFUGC4+8N
 QKToOJykO4sCUR4lpYpuJvd1NQv1VkJo4ZgtlWvanHo9ovkTXOuCJsCTBv6EHMo6
 nJg9iSZOMj3L20VSmnY5fa0MpCNCXH8cfYtbmHBYBxI3e3sKYI8A2j0H22FP4oIH
 2+tkIg5TxQsmejf9u9V1JES2/0712SmG/hS0y1BsQtYzVuDp7pBZ
 =qSXb
 -----END PGP SIGNATURE-----

Merge tag 'arm64-fixes' of git://git.kernel.org/pub/scm/linux/kernel/git/arm64/linux

Pull arm64 fix from Will Deacon:

 - Implement a basic static call trampoline to fix CFI failures with the
   generic implementation

* tag 'arm64-fixes' of git://git.kernel.org/pub/scm/linux/kernel/git/arm64/linux:
  arm64: Use static call trampolines when kCFI is enabled
This commit is contained in:
Linus Torvalds 2026-04-03 08:47:13 -07:00
commit 441c63ff42
5 changed files with 57 additions and 0 deletions

View File

@ -252,6 +252,7 @@ config ARM64
select HAVE_RSEQ
select HAVE_RUST if RUSTC_SUPPORTS_ARM64
select HAVE_STACKPROTECTOR
select HAVE_STATIC_CALL if CFI
select HAVE_SYSCALL_TRACEPOINTS
select HAVE_KPROBES
select HAVE_KRETPROBES

View File

@ -0,0 +1,31 @@
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_STATIC_CALL_H
#define _ASM_STATIC_CALL_H
#define __ARCH_DEFINE_STATIC_CALL_TRAMP(name, target) \
asm(" .pushsection .static_call.text, \"ax\" \n" \
" .align 4 \n" \
" .globl " name " \n" \
name ": \n" \
" hint 34 /* BTI C */ \n" \
" adrp x16, 1f \n" \
" ldr x16, [x16, :lo12:1f] \n" \
" br x16 \n" \
" .type " name ", %function \n" \
" .size " name ", . - " name " \n" \
" .popsection \n" \
" .pushsection .rodata, \"a\" \n" \
" .align 3 \n" \
"1: .quad " target " \n" \
" .popsection \n")
#define ARCH_DEFINE_STATIC_CALL_TRAMP(name, func) \
__ARCH_DEFINE_STATIC_CALL_TRAMP(STATIC_CALL_TRAMP_STR(name), #func)
#define ARCH_DEFINE_STATIC_CALL_NULL_TRAMP(name) \
ARCH_DEFINE_STATIC_CALL_TRAMP(name, __static_call_return0)
#define ARCH_DEFINE_STATIC_CALL_RET0_TRAMP(name) \
ARCH_DEFINE_STATIC_CALL_TRAMP(name, __static_call_return0)
#endif /* _ASM_STATIC_CALL_H */

View File

@ -46,6 +46,7 @@ obj-$(CONFIG_MODULES) += module.o module-plts.o
obj-$(CONFIG_PERF_EVENTS) += perf_regs.o perf_callchain.o
obj-$(CONFIG_HARDLOCKUP_DETECTOR_PERF) += watchdog_hld.o
obj-$(CONFIG_HAVE_HW_BREAKPOINT) += hw_breakpoint.o
obj-$(CONFIG_HAVE_STATIC_CALL) += static_call.o
obj-$(CONFIG_CPU_PM) += sleep.o suspend.o
obj-$(CONFIG_KGDB) += kgdb.o
obj-$(CONFIG_EFI) += efi.o efi-rt-wrapper.o

View File

@ -0,0 +1,23 @@
// SPDX-License-Identifier: GPL-2.0
#include <linux/static_call.h>
#include <linux/memory.h>
#include <asm/text-patching.h>
void arch_static_call_transform(void *site, void *tramp, void *func, bool tail)
{
u64 literal;
int ret;
if (!func)
func = __static_call_return0;
/* decode the instructions to discover the literal address */
literal = ALIGN_DOWN((u64)tramp + 4, SZ_4K) +
aarch64_insn_adrp_get_offset(le32_to_cpup(tramp + 4)) +
8 * aarch64_insn_decode_immediate(AARCH64_INSN_IMM_12,
le32_to_cpup(tramp + 8));
ret = aarch64_insn_write_literal_u64((void *)literal, (u64)func);
WARN_ON_ONCE(ret);
}
EXPORT_SYMBOL_GPL(arch_static_call_transform);

View File

@ -191,6 +191,7 @@ SECTIONS
LOCK_TEXT
KPROBES_TEXT
HYPERVISOR_TEXT
STATIC_CALL_TEXT
*(.gnu.warning)
}