1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef __ASM_ARM_INSN_H 3 #define __ASM_ARM_INSN_H 4 5 #include <linux/types.h> 6 7 /* 8 * Avoid a literal load by emitting a sequence of ADD/LDR instructions with the 9 * appropriate relocations. The combined sequence has a range of -/+ 256 MiB, 10 * which should be sufficient for the core kernel as well as modules loaded 11 * into the module region. (Not supported by LLD before release 14) 12 */ 13 #if !(defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) && \ 14 !(defined(CONFIG_LD_IS_LLD) && CONFIG_LLD_VERSION < 140000) 15 #define LOAD_SYM_ARMV6(reg, sym) \ 16 " .globl " #sym " \n\t" \ 17 " .reloc 10f, R_ARM_ALU_PC_G0_NC, " #sym " \n\t" \ 18 " .reloc 11f, R_ARM_ALU_PC_G1_NC, " #sym " \n\t" \ 19 " .reloc 12f, R_ARM_LDR_PC_G2, " #sym " \n\t" \ 20 "10: sub " #reg ", pc, #8 \n\t" \ 21 "11: sub " #reg ", " #reg ", #4 \n\t" \ 22 "12: ldr " #reg ", [" #reg ", #0] \n\t" 23 #else 24 #define LOAD_SYM_ARMV6(reg, sym) \ 25 " ldr " #reg ", =" #sym " \n\t" \ 26 " ldr " #reg ", [" #reg "] \n\t" 27 #endif 28 29 static inline unsigned long 30 arm_gen_nop(void) 31 { 32 #ifdef CONFIG_THUMB2_KERNEL 33 return 0xf3af8000; /* nop.w */ 34 #else 35 return 0xe1a00000; /* mov r0, r0 */ 36 #endif 37 } 38 39 unsigned long 40 __arm_gen_branch(unsigned long pc, unsigned long addr, bool link, bool warn); 41 42 static inline unsigned long 43 arm_gen_branch(unsigned long pc, unsigned long addr) 44 { 45 return __arm_gen_branch(pc, addr, false, true); 46 } 47 48 static inline unsigned long 49 arm_gen_branch_link(unsigned long pc, unsigned long addr, bool warn) 50 { 51 return __arm_gen_branch(pc, addr, true, warn); 52 } 53 54 #endif 55