1 #include <exports.h> 2 3 #ifndef GCC_VERSION 4 #define GCC_VERSION (__GNUC__ * 1000 + __GNUC_MINOR__) 5 #endif /* GCC_VERSION */ 6 7 #if defined(CONFIG_I386) 8 /* 9 * x86 does not have a dedicated register to store the pointer to 10 * the global_data. Thus the jump table address is stored in a 11 * global variable, but such approach does not allow for execution 12 * from flash memory. The global_data address is passed as argv[-1] 13 * to the application program. 14 */ 15 static void **jt; 16 gd_t *global_data; 17 18 #define EXPORT_FUNC(x) \ 19 asm volatile ( \ 20 " .globl " #x "\n" \ 21 #x ":\n" \ 22 " movl %0, %%eax\n" \ 23 " movl jt, %%ecx\n" \ 24 " jmp *(%%ecx, %%eax)\n" \ 25 : : "i"(XF_ ## x * sizeof(void *)) : "eax", "ecx"); 26 #elif defined(CONFIG_PPC) 27 /* 28 * r2 holds the pointer to the global_data, r11 is a call-clobbered 29 * register 30 */ 31 #define EXPORT_FUNC(x) \ 32 asm volatile ( \ 33 " .globl " #x "\n" \ 34 #x ":\n" \ 35 " lwz %%r11, %0(%%r2)\n" \ 36 " lwz %%r11, %1(%%r11)\n" \ 37 " mtctr %%r11\n" \ 38 " bctr\n" \ 39 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r11"); 40 #elif defined(CONFIG_ARM) 41 /* 42 * r8 holds the pointer to the global_data, ip is a call-clobbered 43 * register 44 */ 45 #define EXPORT_FUNC(x) \ 46 asm volatile ( \ 47 " .globl " #x "\n" \ 48 #x ":\n" \ 49 " ldr ip, [r8, %0]\n" \ 50 " ldr pc, [ip, %1]\n" \ 51 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "ip"); 52 #elif defined(CONFIG_MIPS) 53 /* 54 * k0 ($26) holds the pointer to the global_data; t9 ($25) is a call- 55 * clobbered register that is also used to set gp ($26). Note that the 56 * jr instruction also executes the instruction immediately following 57 * it; however, GCC/mips generates an additional `nop' after each asm 58 * statement 59 */ 60 #define EXPORT_FUNC(x) \ 61 asm volatile ( \ 62 " .globl " #x "\n" \ 63 #x ":\n" \ 64 " lw $25, %0($26)\n" \ 65 " lw $25, %1($25)\n" \ 66 " jr $25\n" \ 67 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "t9"); 68 #elif defined(CONFIG_NIOS2) 69 /* 70 * gp holds the pointer to the global_data, r8 is call-clobbered 71 */ 72 #define EXPORT_FUNC(x) \ 73 asm volatile ( \ 74 " .globl " #x "\n" \ 75 #x ":\n" \ 76 " movhi r8, %%hi(%0)\n" \ 77 " ori r8, r0, %%lo(%0)\n" \ 78 " add r8, r8, gp\n" \ 79 " ldw r8, 0(r8)\n" \ 80 " ldw r8, %1(r8)\n" \ 81 " jmp r8\n" \ 82 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "gp"); 83 #elif defined(CONFIG_M68K) 84 /* 85 * d7 holds the pointer to the global_data, a0 is a call-clobbered 86 * register 87 */ 88 #define EXPORT_FUNC(x) \ 89 asm volatile ( \ 90 " .globl " #x "\n" \ 91 #x ":\n" \ 92 " move.l %%d7, %%a0\n" \ 93 " adda.l %0, %%a0\n" \ 94 " move.l (%%a0), %%a0\n" \ 95 " adda.l %1, %%a0\n" \ 96 " move.l (%%a0), %%a0\n" \ 97 " jmp (%%a0)\n" \ 98 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "a0"); 99 #elif defined(CONFIG_MICROBLAZE) 100 /* 101 * r31 holds the pointer to the global_data. r5 is a call-clobbered. 102 */ 103 #define EXPORT_FUNC(x) \ 104 asm volatile ( \ 105 " .globl " #x "\n" \ 106 #x ":\n" \ 107 " lwi r5, r31, %0\n" \ 108 " lwi r5, r5, %1\n" \ 109 " bra r5\n" \ 110 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r5"); 111 #elif defined(CONFIG_BLACKFIN) 112 /* 113 * P3 holds the pointer to the global_data, P0 is a call-clobbered 114 * register 115 */ 116 #define EXPORT_FUNC(x) \ 117 asm volatile ( \ 118 " .globl _" #x "\n_" \ 119 #x ":\n" \ 120 " P0 = [P3 + %0]\n" \ 121 " P0 = [P0 + %1]\n" \ 122 " JUMP (P0)\n" \ 123 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "P0"); 124 #elif defined(CONFIG_AVR32) 125 /* 126 * r6 holds the pointer to the global_data. r8 is call clobbered. 127 */ 128 #define EXPORT_FUNC(x) \ 129 asm volatile( \ 130 " .globl\t" #x "\n" \ 131 #x ":\n" \ 132 " ld.w r8, r6[%0]\n" \ 133 " ld.w pc, r8[%1]\n" \ 134 : \ 135 : "i"(offsetof(gd_t, jt)), "i"(XF_ ##x) \ 136 : "r8"); 137 #elif defined(CONFIG_SH) 138 /* 139 * r13 holds the pointer to the global_data. r1 is a call clobbered. 140 */ 141 #define EXPORT_FUNC(x) \ 142 asm volatile ( \ 143 " .align 2\n" \ 144 " .globl " #x "\n" \ 145 #x ":\n" \ 146 " mov r13, r1\n" \ 147 " add %0, r1\n" \ 148 " mov.l @r1, r2\n" \ 149 " add %1, r2\n" \ 150 " mov.l @r2, r1\n" \ 151 " jmp @r1\n" \ 152 " nop\n" \ 153 " nop\n" \ 154 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r1", "r2"); 155 #elif defined(CONFIG_SPARC) 156 /* 157 * g7 holds the pointer to the global_data. g1 is call clobbered. 158 */ 159 #define EXPORT_FUNC(x) \ 160 asm volatile( \ 161 " .globl\t" #x "\n" \ 162 #x ":\n" \ 163 " set %0, %%g1\n" \ 164 " or %%g1, %%g7, %%g1\n" \ 165 " ld [%%g1], %%g1\n" \ 166 " ld [%%g1 + %1], %%g1\n" \ 167 " jmp %%g1\n" \ 168 " nop\n" \ 169 : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "g1" ); 170 171 #else 172 #error stubs definition missing for this architecture 173 #endif 174 175 /* This function is necessary to prevent the compiler from 176 * generating prologue/epilogue, preparing stack frame etc. 177 * The stub functions are special, they do not use the stack 178 * frame passed to them, but pass it intact to the actual 179 * implementation. On the other hand, asm() statements with 180 * arguments can be used only inside the functions (gcc limitation) 181 */ 182 #if GCC_VERSION < 3004 183 static 184 #endif /* GCC_VERSION */ 185 void __attribute__((unused)) dummy(void) 186 { 187 #include <_exports.h> 188 } 189 190 extern unsigned long __bss_start, _end; 191 192 void app_startup(char **argv) 193 { 194 unsigned char * cp = (unsigned char *) &__bss_start; 195 196 /* Zero out BSS */ 197 while (cp < (unsigned char *)&_end) { 198 *cp++ = 0; 199 } 200 201 #if defined(CONFIG_I386) 202 /* x86 does not have a dedicated register for passing global_data */ 203 global_data = (gd_t *)argv[-1]; 204 jt = global_data->jt; 205 #endif 206 } 207 208 #undef EXPORT_FUNC 209