bpf_jit_asm.S 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205
  1. #include <asm/ptrace.h>
  2. #include "bpf_jit.h"
  3. #ifdef CONFIG_SPARC64
  4. #define SAVE_SZ 176
  5. #define SCRATCH_OFF STACK_BIAS + 128
  6. #define BE_PTR(label) be,pn %xcc, label
  7. #else
  8. #define SAVE_SZ 96
  9. #define SCRATCH_OFF 72
  10. #define BE_PTR(label) be label
  11. #endif
  12. #define SKF_MAX_NEG_OFF (-0x200000) /* SKF_LL_OFF from filter.h */
  13. .text
  14. .globl bpf_jit_load_word
  15. bpf_jit_load_word:
  16. cmp r_OFF, 0
  17. bl bpf_slow_path_word_neg
  18. nop
  19. .globl bpf_jit_load_word_positive_offset
  20. bpf_jit_load_word_positive_offset:
  21. sub r_HEADLEN, r_OFF, r_TMP
  22. cmp r_TMP, 3
  23. ble bpf_slow_path_word
  24. add r_SKB_DATA, r_OFF, r_TMP
  25. andcc r_TMP, 3, %g0
  26. bne load_word_unaligned
  27. nop
  28. retl
  29. ld [r_TMP], r_A
  30. load_word_unaligned:
  31. ldub [r_TMP + 0x0], r_OFF
  32. ldub [r_TMP + 0x1], r_TMP2
  33. sll r_OFF, 8, r_OFF
  34. or r_OFF, r_TMP2, r_OFF
  35. ldub [r_TMP + 0x2], r_TMP2
  36. sll r_OFF, 8, r_OFF
  37. or r_OFF, r_TMP2, r_OFF
  38. ldub [r_TMP + 0x3], r_TMP2
  39. sll r_OFF, 8, r_OFF
  40. retl
  41. or r_OFF, r_TMP2, r_A
  42. .globl bpf_jit_load_half
  43. bpf_jit_load_half:
  44. cmp r_OFF, 0
  45. bl bpf_slow_path_half_neg
  46. nop
  47. .globl bpf_jit_load_half_positive_offset
  48. bpf_jit_load_half_positive_offset:
  49. sub r_HEADLEN, r_OFF, r_TMP
  50. cmp r_TMP, 1
  51. ble bpf_slow_path_half
  52. add r_SKB_DATA, r_OFF, r_TMP
  53. andcc r_TMP, 1, %g0
  54. bne load_half_unaligned
  55. nop
  56. retl
  57. lduh [r_TMP], r_A
  58. load_half_unaligned:
  59. ldub [r_TMP + 0x0], r_OFF
  60. ldub [r_TMP + 0x1], r_TMP2
  61. sll r_OFF, 8, r_OFF
  62. retl
  63. or r_OFF, r_TMP2, r_A
  64. .globl bpf_jit_load_byte
  65. bpf_jit_load_byte:
  66. cmp r_OFF, 0
  67. bl bpf_slow_path_byte_neg
  68. nop
  69. .globl bpf_jit_load_byte_positive_offset
  70. bpf_jit_load_byte_positive_offset:
  71. cmp r_OFF, r_HEADLEN
  72. bge bpf_slow_path_byte
  73. nop
  74. retl
  75. ldub [r_SKB_DATA + r_OFF], r_A
  76. .globl bpf_jit_load_byte_msh
  77. bpf_jit_load_byte_msh:
  78. cmp r_OFF, 0
  79. bl bpf_slow_path_byte_msh_neg
  80. nop
  81. .globl bpf_jit_load_byte_msh_positive_offset
  82. bpf_jit_load_byte_msh_positive_offset:
  83. cmp r_OFF, r_HEADLEN
  84. bge bpf_slow_path_byte_msh
  85. nop
  86. ldub [r_SKB_DATA + r_OFF], r_OFF
  87. and r_OFF, 0xf, r_OFF
  88. retl
  89. sll r_OFF, 2, r_X
  90. #define bpf_slow_path_common(LEN) \
  91. save %sp, -SAVE_SZ, %sp; \
  92. mov %i0, %o0; \
  93. mov r_OFF, %o1; \
  94. add %fp, SCRATCH_OFF, %o2; \
  95. call skb_copy_bits; \
  96. mov (LEN), %o3; \
  97. cmp %o0, 0; \
  98. restore;
  99. bpf_slow_path_word:
  100. bpf_slow_path_common(4)
  101. bl bpf_error
  102. ld [%sp + SCRATCH_OFF], r_A
  103. retl
  104. nop
  105. bpf_slow_path_half:
  106. bpf_slow_path_common(2)
  107. bl bpf_error
  108. lduh [%sp + SCRATCH_OFF], r_A
  109. retl
  110. nop
  111. bpf_slow_path_byte:
  112. bpf_slow_path_common(1)
  113. bl bpf_error
  114. ldub [%sp + SCRATCH_OFF], r_A
  115. retl
  116. nop
  117. bpf_slow_path_byte_msh:
  118. bpf_slow_path_common(1)
  119. bl bpf_error
  120. ldub [%sp + SCRATCH_OFF], r_A
  121. and r_OFF, 0xf, r_OFF
  122. retl
  123. sll r_OFF, 2, r_X
  124. #define bpf_negative_common(LEN) \
  125. save %sp, -SAVE_SZ, %sp; \
  126. mov %i0, %o0; \
  127. mov r_OFF, %o1; \
  128. call bpf_internal_load_pointer_neg_helper; \
  129. mov (LEN), %o2; \
  130. mov %o0, r_TMP; \
  131. cmp %o0, 0; \
  132. BE_PTR(bpf_error); \
  133. restore;
  134. bpf_slow_path_word_neg:
  135. sethi %hi(SKF_MAX_NEG_OFF), r_TMP
  136. cmp r_OFF, r_TMP
  137. bl bpf_error
  138. nop
  139. .globl bpf_jit_load_word_negative_offset
  140. bpf_jit_load_word_negative_offset:
  141. bpf_negative_common(4)
  142. andcc r_TMP, 3, %g0
  143. bne load_word_unaligned
  144. nop
  145. retl
  146. ld [r_TMP], r_A
  147. bpf_slow_path_half_neg:
  148. sethi %hi(SKF_MAX_NEG_OFF), r_TMP
  149. cmp r_OFF, r_TMP
  150. bl bpf_error
  151. nop
  152. .globl bpf_jit_load_half_negative_offset
  153. bpf_jit_load_half_negative_offset:
  154. bpf_negative_common(2)
  155. andcc r_TMP, 1, %g0
  156. bne load_half_unaligned
  157. nop
  158. retl
  159. lduh [r_TMP], r_A
  160. bpf_slow_path_byte_neg:
  161. sethi %hi(SKF_MAX_NEG_OFF), r_TMP
  162. cmp r_OFF, r_TMP
  163. bl bpf_error
  164. nop
  165. .globl bpf_jit_load_byte_negative_offset
  166. bpf_jit_load_byte_negative_offset:
  167. bpf_negative_common(1)
  168. retl
  169. ldub [r_TMP], r_A
  170. bpf_slow_path_byte_msh_neg:
  171. sethi %hi(SKF_MAX_NEG_OFF), r_TMP
  172. cmp r_OFF, r_TMP
  173. bl bpf_error
  174. nop
  175. .globl bpf_jit_load_byte_msh_negative_offset
  176. bpf_jit_load_byte_msh_negative_offset:
  177. bpf_negative_common(1)
  178. ldub [r_TMP], r_OFF
  179. and r_OFF, 0xf, r_OFF
  180. retl
  181. sll r_OFF, 2, r_X
  182. bpf_error:
  183. /* Make the JIT program return zero. The JIT epilogue
  184. * stores away the original %o7 into r_saved_O7. The
  185. * normal leaf function return is to use "retl" which
  186. * would evalute to "jmpl %o7 + 8, %g0" but we want to
  187. * use the saved value thus the sequence you see here.
  188. */
  189. jmpl r_saved_O7 + 8, %g0
  190. clr %o0