stubs.c 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238
  1. #include <common.h>
  2. #include <exports.h>
  3. #ifndef GCC_VERSION
  4. #define GCC_VERSION (__GNUC__ * 1000 + __GNUC_MINOR__)
  5. #endif /* GCC_VERSION */
  6. #if defined(CONFIG_X86)
  7. /*
  8. * x86 does not have a dedicated register to store the pointer to
  9. * the global_data. Thus the jump table address is stored in a
  10. * global variable, but such approach does not allow for execution
  11. * from flash memory. The global_data address is passed as argv[-1]
  12. * to the application program.
  13. */
  14. static void **jt;
  15. gd_t *global_data;
  16. #define EXPORT_FUNC(x) \
  17. asm volatile ( \
  18. " .globl " #x "\n" \
  19. #x ":\n" \
  20. " movl %0, %%eax\n" \
  21. " movl jt, %%ecx\n" \
  22. " jmp *(%%ecx, %%eax)\n" \
  23. : : "i"(XF_ ## x * sizeof(void *)) : "eax", "ecx");
  24. #elif defined(CONFIG_PPC)
  25. /*
  26. * r2 holds the pointer to the global_data, r11 is a call-clobbered
  27. * register
  28. */
  29. #define EXPORT_FUNC(x) \
  30. asm volatile ( \
  31. " .globl " #x "\n" \
  32. #x ":\n" \
  33. " lwz %%r11, %0(%%r2)\n" \
  34. " lwz %%r11, %1(%%r11)\n" \
  35. " mtctr %%r11\n" \
  36. " bctr\n" \
  37. : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r11");
  38. #elif defined(CONFIG_ARM)
  39. /*
  40. * r8 holds the pointer to the global_data, ip is a call-clobbered
  41. * register
  42. */
  43. #define EXPORT_FUNC(x) \
  44. asm volatile ( \
  45. " .globl " #x "\n" \
  46. #x ":\n" \
  47. " ldr ip, [r8, %0]\n" \
  48. " ldr pc, [ip, %1]\n" \
  49. : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "ip");
  50. #elif defined(CONFIG_MIPS)
  51. /*
  52. * k0 ($26) holds the pointer to the global_data; t9 ($25) is a call-
  53. * clobbered register that is also used to set gp ($26). Note that the
  54. * jr instruction also executes the instruction immediately following
  55. * it; however, GCC/mips generates an additional `nop' after each asm
  56. * statement
  57. */
  58. #define EXPORT_FUNC(x) \
  59. asm volatile ( \
  60. " .globl " #x "\n" \
  61. #x ":\n" \
  62. " lw $25, %0($26)\n" \
  63. " lw $25, %1($25)\n" \
  64. " jr $25\n" \
  65. : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "t9");
  66. #elif defined(CONFIG_NIOS2)
  67. /*
  68. * gp holds the pointer to the global_data, r8 is call-clobbered
  69. */
  70. #define EXPORT_FUNC(x) \
  71. asm volatile ( \
  72. " .globl " #x "\n" \
  73. #x ":\n" \
  74. " movhi r8, %%hi(%0)\n" \
  75. " ori r8, r0, %%lo(%0)\n" \
  76. " add r8, r8, gp\n" \
  77. " ldw r8, 0(r8)\n" \
  78. " ldw r8, %1(r8)\n" \
  79. " jmp r8\n" \
  80. : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "gp");
  81. #elif defined(CONFIG_M68K)
  82. /*
  83. * d7 holds the pointer to the global_data, a0 is a call-clobbered
  84. * register
  85. */
  86. #define EXPORT_FUNC(x) \
  87. asm volatile ( \
  88. " .globl " #x "\n" \
  89. #x ":\n" \
  90. " move.l %%d7, %%a0\n" \
  91. " adda.l %0, %%a0\n" \
  92. " move.l (%%a0), %%a0\n" \
  93. " adda.l %1, %%a0\n" \
  94. " move.l (%%a0), %%a0\n" \
  95. " jmp (%%a0)\n" \
  96. : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "a0");
  97. #elif defined(CONFIG_MICROBLAZE)
  98. /*
  99. * r31 holds the pointer to the global_data. r5 is a call-clobbered.
  100. */
  101. #define EXPORT_FUNC(x) \
  102. asm volatile ( \
  103. " .globl " #x "\n" \
  104. #x ":\n" \
  105. " lwi r5, r31, %0\n" \
  106. " lwi r5, r5, %1\n" \
  107. " bra r5\n" \
  108. : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r5");
  109. #elif defined(CONFIG_BLACKFIN)
  110. /*
  111. * P3 holds the pointer to the global_data, P0 is a call-clobbered
  112. * register
  113. */
  114. #define EXPORT_FUNC(x) \
  115. asm volatile ( \
  116. " .globl _" #x "\n_" \
  117. #x ":\n" \
  118. " P0 = [P3 + %0]\n" \
  119. " P0 = [P0 + %1]\n" \
  120. " JUMP (P0)\n" \
  121. : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "P0");
  122. #elif defined(CONFIG_AVR32)
  123. /*
  124. * r6 holds the pointer to the global_data. r8 is call clobbered.
  125. */
  126. #define EXPORT_FUNC(x) \
  127. asm volatile( \
  128. " .globl\t" #x "\n" \
  129. #x ":\n" \
  130. " ld.w r8, r6[%0]\n" \
  131. " ld.w pc, r8[%1]\n" \
  132. : \
  133. : "i"(offsetof(gd_t, jt)), "i"(XF_ ##x) \
  134. : "r8");
  135. #elif defined(CONFIG_SH)
  136. /*
  137. * r13 holds the pointer to the global_data. r1 is a call clobbered.
  138. */
  139. #define EXPORT_FUNC(x) \
  140. asm volatile ( \
  141. " .align 2\n" \
  142. " .globl " #x "\n" \
  143. #x ":\n" \
  144. " mov r13, r1\n" \
  145. " add %0, r1\n" \
  146. " mov.l @r1, r2\n" \
  147. " add %1, r2\n" \
  148. " mov.l @r2, r1\n" \
  149. " jmp @r1\n" \
  150. " nop\n" \
  151. " nop\n" \
  152. : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r1", "r2");
  153. #elif defined(CONFIG_SPARC)
  154. /*
  155. * g7 holds the pointer to the global_data. g1 is call clobbered.
  156. */
  157. #define EXPORT_FUNC(x) \
  158. asm volatile( \
  159. " .globl\t" #x "\n" \
  160. #x ":\n" \
  161. " set %0, %%g1\n" \
  162. " or %%g1, %%g7, %%g1\n" \
  163. " ld [%%g1], %%g1\n" \
  164. " ld [%%g1 + %1], %%g1\n" \
  165. " jmp %%g1\n" \
  166. " nop\n" \
  167. : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "g1" );
  168. #elif defined(CONFIG_NDS32)
  169. /*
  170. * r16 holds the pointer to the global_data. gp is call clobbered.
  171. * not support reduced register (16 GPR).
  172. */
  173. #define EXPORT_FUNC(x) \
  174. asm volatile ( \
  175. " .globl " #x "\n" \
  176. #x ":\n" \
  177. " lwi $r16, [$gp + (%0)]\n" \
  178. " lwi $r16, [$r16 + (%1)]\n" \
  179. " jr $r16\n" \
  180. : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "$r16");
  181. #elif defined(CONFIG_OPENRISC)
  182. /*
  183. * r10 holds the pointer to the global_data, r13 is a call-clobbered
  184. * register
  185. */
  186. #define EXPORT_FUNC(x) \
  187. asm volatile ( \
  188. " .globl " #x "\n" \
  189. #x ":\n" \
  190. " l.lwz r13, %0(r10)\n" \
  191. " l.lwz r13, %1(r13)\n" \
  192. " l.jr r13\n" \
  193. " l.nop\n" \
  194. : : "i"(offsetof(gd_t, jt)), "i"(XF_ ## x * sizeof(void *)) : "r13");
  195. #else
  196. /*" addi $sp, $sp, -24\n" \
  197. " br $r16\n" \*/
  198. #error stubs definition missing for this architecture
  199. #endif
  200. /* This function is necessary to prevent the compiler from
  201. * generating prologue/epilogue, preparing stack frame etc.
  202. * The stub functions are special, they do not use the stack
  203. * frame passed to them, but pass it intact to the actual
  204. * implementation. On the other hand, asm() statements with
  205. * arguments can be used only inside the functions (gcc limitation)
  206. */
  207. #if GCC_VERSION < 3004
  208. static
  209. #endif /* GCC_VERSION */
  210. void __attribute__((unused)) dummy(void)
  211. {
  212. #include <_exports.h>
  213. }
  214. extern unsigned long __bss_start, _end;
  215. void app_startup(char * const *argv)
  216. {
  217. unsigned char * cp = (unsigned char *) &__bss_start;
  218. /* Zero out BSS */
  219. while (cp < (unsigned char *)&_end) {
  220. *cp++ = 0;
  221. }
  222. #if defined(CONFIG_X86)
  223. /* x86 does not have a dedicated register for passing global_data */
  224. global_data = (gd_t *)argv[-1];
  225. jt = global_data->jt;
  226. #endif
  227. }
  228. #undef EXPORT_FUNC