1 #include <common.h> 2 #include <exports.h> 3 4 #ifndef GCC_VERSION 5 #define GCC_VERSION (__GNUC__ * 1000 + __GNUC_MINOR__) 6 #endif /* GCC_VERSION */ 7 8 #if defined(CONFIG_X86) 9 /* 10 * x86 does not have a dedicated register to store the pointer to 11 * the global_data. Thus the jump table address is stored in a 12 * global variable, but such approach does not allow for execution 13 * from flash memory. The global_data address is passed as argv[-1] 14 * to the application program. 15 */ 16 static void **jt; 17 gd_t *global_data; 18 19 #define EXPORT_FUNC(x) \ 20 asm volatile ( \ 21 " .globl " #x "\n" \ 22 #x ":\n" \ 23 " movl %0, %%eax\n" \ 24 " movl jt, %%ecx\n" \ 25 " jmp *(%%ecx, %%eax)\n" \ 26 : : "i"(XF_ ## x * sizeof(void *)) : "eax", "ecx"); 27 #elif defined(CONFIG_PPC) 28 /* 29 * r2 holds the pointer to the global_data, r11 is a call-clobbered 30 * register 31 */ 32 #define EXPORT_FUNC(x) \ 33 asm volatile ( \ 34 " .globl " #x "\n" \ 35 #x ":\n" \ 36 " lwz %%r11, %0(%%r2)\n" \ 37 " lwz %%r11, %1(%%r11)\n" \ 38 " mtctr %%r11\n" \ 39 " bctr\n" \ 40 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r11"); 41 #elif defined(CONFIG_ARM) 42 #ifdef CONFIG_ARM64 43 /* 44 * x18 holds the pointer to the global_data, x9 is a call-clobbered 45 * register 46 */ 47 #define EXPORT_FUNC(x) \ 48 asm volatile ( \ 49 " .globl " #x "\n" \ 50 #x ":\n" \ 51 " ldr x9, [x18, %0]\n" \ 52 " ldr x9, [x9, %1]\n" \ 53 " br x9\n" \ 54 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "x9"); 55 #else 56 /* 57 * r9 holds the pointer to the global_data, ip is a call-clobbered 58 * register 59 */ 60 #define EXPORT_FUNC(x) \ 61 asm volatile ( \ 62 " .globl " #x "\n" \ 63 #x ":\n" \ 64 " ldr ip, [r9, %0]\n" \ 65 " ldr pc, [ip, %1]\n" \ 66 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "ip"); 67 #endif 68 #elif defined(CONFIG_MIPS) 69 /* 70 * k0 ($26) holds the pointer to the global_data; t9 ($25) is a call- 71 * clobbered register that is also used to set gp ($26). Note that the 72 * jr instruction also executes the instruction immediately following 73 * it; however, GCC/mips generates an additional `nop' after each asm 74 * statement 75 */ 76 #define EXPORT_FUNC(x) \ 77 asm volatile ( \ 78 " .globl " #x "\n" \ 79 #x ":\n" \ 80 " lw $25, %0($26)\n" \ 81 " lw $25, %1($25)\n" \ 82 " jr $25\n" \ 83 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "t9"); 84 #elif defined(CONFIG_NIOS2) 85 /* 86 * gp holds the pointer to the global_data, r8 is call-clobbered 87 */ 88 #define EXPORT_FUNC(x) \ 89 asm volatile ( \ 90 " .globl " #x "\n" \ 91 #x ":\n" \ 92 " movhi r8, %%hi(%0)\n" \ 93 " ori r8, r0, %%lo(%0)\n" \ 94 " add r8, r8, gp\n" \ 95 " ldw r8, 0(r8)\n" \ 96 " ldw r8, %1(r8)\n" \ 97 " jmp r8\n" \ 98 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "gp"); 99 #elif defined(CONFIG_M68K) 100 /* 101 * d7 holds the pointer to the global_data, a0 is a call-clobbered 102 * register 103 */ 104 #define EXPORT_FUNC(x) \ 105 asm volatile ( \ 106 " .globl " #x "\n" \ 107 #x ":\n" \ 108 " move.l %%d7, %%a0\n" \ 109 " adda.l %0, %%a0\n" \ 110 " move.l (%%a0), %%a0\n" \ 111 " adda.l %1, %%a0\n" \ 112 " move.l (%%a0), %%a0\n" \ 113 " jmp (%%a0)\n" \ 114 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "a0"); 115 #elif defined(CONFIG_MICROBLAZE) 116 /* 117 * r31 holds the pointer to the global_data. r5 is a call-clobbered. 118 */ 119 #define EXPORT_FUNC(x) \ 120 asm volatile ( \ 121 " .globl " #x "\n" \ 122 #x ":\n" \ 123 " lwi r5, r31, %0\n" \ 124 " lwi r5, r5, %1\n" \ 125 " bra r5\n" \ 126 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r5"); 127 #elif defined(CONFIG_BLACKFIN) 128 /* 129 * P3 holds the pointer to the global_data, P0 is a call-clobbered 130 * register 131 */ 132 #define EXPORT_FUNC(x) \ 133 asm volatile ( \ 134 " .globl _" #x "\n_" \ 135 #x ":\n" \ 136 " P0 = [P3 + %0]\n" \ 137 " P0 = [P0 + %1]\n" \ 138 " JUMP (P0)\n" \ 139 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "P0"); 140 #elif defined(CONFIG_AVR32) 141 /* 142 * r6 holds the pointer to the global_data. r8 is call clobbered. 143 */ 144 #define EXPORT_FUNC(x) \ 145 asm volatile( \ 146 " .globl\t" #x "\n" \ 147 #x ":\n" \ 148 " ld.w r8, r6[%0]\n" \ 149 " ld.w pc, r8[%1]\n" \ 150 : \ 151 : "i"(offsetof(gd_t, jt)), "i"(XF_ ##x) \ 152 : "r8"); 153 #elif defined(CONFIG_SH) 154 /* 155 * r13 holds the pointer to the global_data. r1 is a call clobbered. 156 */ 157 #define EXPORT_FUNC(x) \ 158 asm volatile ( \ 159 " .align 2\n" \ 160 " .globl " #x "\n" \ 161 #x ":\n" \ 162 " mov r13, r1\n" \ 163 " add %0, r1\n" \ 164 " mov.l @r1, r2\n" \ 165 " add %1, r2\n" \ 166 " mov.l @r2, r1\n" \ 167 " jmp @r1\n" \ 168 " nop\n" \ 169 " nop\n" \ 170 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r1", "r2"); 171 #elif defined(CONFIG_SPARC) 172 /* 173 * g7 holds the pointer to the global_data. g1 is call clobbered. 174 */ 175 #define EXPORT_FUNC(x) \ 176 asm volatile( \ 177 " .globl\t" #x "\n" \ 178 #x ":\n" \ 179 " set %0, %%g1\n" \ 180 " or %%g1, %%g7, %%g1\n" \ 181 " ld [%%g1], %%g1\n" \ 182 " ld [%%g1 + %1], %%g1\n" \ 183 " jmp %%g1\n" \ 184 " nop\n" \ 185 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "g1" ); 186 #elif defined(CONFIG_NDS32) 187 /* 188 * r16 holds the pointer to the global_data. gp is call clobbered. 189 * not support reduced register (16 GPR). 190 */ 191 #define EXPORT_FUNC(x) \ 192 asm volatile ( \ 193 " .globl " #x "\n" \ 194 #x ":\n" \ 195 " lwi $r16, [$gp + (%0)]\n" \ 196 " lwi $r16, [$r16 + (%1)]\n" \ 197 " jr $r16\n" \ 198 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "$r16"); 199 #elif defined(CONFIG_OPENRISC) 200 /* 201 * r10 holds the pointer to the global_data, r13 is a call-clobbered 202 * register 203 */ 204 #define EXPORT_FUNC(x) \ 205 asm volatile ( \ 206 " .globl " #x "\n" \ 207 #x ":\n" \ 208 " l.lwz r13, %0(r10)\n" \ 209 " l.lwz r13, %1(r13)\n" \ 210 " l.jr r13\n" \ 211 " l.nop\n" \ 212 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r13"); 213 #elif defined(CONFIG_ARC) 214 /* 215 * r25 holds the pointer to the global_data. r10 is call clobbered. 216 */ 217 #define EXPORT_FUNC(x) \ 218 asm volatile( \ 219 " .align 4\n" \ 220 " .globl " #x "\n" \ 221 #x ":\n" \ 222 " ld r10, [r25, %0]\n" \ 223 " ld r10, [r10, %1]\n" \ 224 " j [r10]\n" \ 225 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r10"); 226 #else 227 /*" addi $sp, $sp, -24\n" \ 228 " br $r16\n" \*/ 229 230 #error stubs definition missing for this architecture 231 #endif 232 233 /* This function is necessary to prevent the compiler from 234 * generating prologue/epilogue, preparing stack frame etc. 235 * The stub functions are special, they do not use the stack 236 * frame passed to them, but pass it intact to the actual 237 * implementation. On the other hand, asm() statements with 238 * arguments can be used only inside the functions (gcc limitation) 239 */ 240 #if GCC_VERSION < 3004 241 static 242 #endif /* GCC_VERSION */ 243 void __attribute__((unused)) dummy(void) 244 { 245 #include <_exports.h> 246 } 247 248 #include <asm/sections.h> 249 250 void app_startup(char * const *argv) 251 { 252 char *cp = __bss_start; 253 254 /* Zero out BSS */ 255 while (cp < _end) 256 *cp++ = 0; 257 258 #if defined(CONFIG_X86) 259 /* x86 does not have a dedicated register for passing global_data */ 260 global_data = (gd_t *)argv[-1]; 261 jt = global_data->jt; 262 #endif 263 } 264 265 #undef EXPORT_FUNC 266