avx2.c 8.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251
  1. /* -*- linux-c -*- ------------------------------------------------------- *
  2. *
  3. * Copyright (C) 2012 Intel Corporation
  4. * Author: Yuanhan Liu <yuanhan.liu@linux.intel.com>
  5. *
  6. * Based on sse2.c: Copyright 2002 H. Peter Anvin - All Rights Reserved
  7. *
  8. *
  9. * This program is free software; you can redistribute it and/or modify
  10. * it under the terms of the GNU General Public License as published by
  11. * the Free Software Foundation, Inc., 53 Temple Place Ste 330,
  12. * Boston MA 02111-1307, USA; either version 2 of the License, or
  13. * (at your option) any later version; incorporated herein by reference.
  14. *
  15. * ----------------------------------------------------------------------- */
  16. /*
  17. * AVX2 implementation of RAID-6 syndrome functions
  18. *
  19. */
  20. #ifdef CONFIG_AS_AVX2
  21. #include <linux/raid/pq.h>
  22. #include "x86.h"
  23. static const struct raid6_avx2_constants {
  24. u64 x1d[4];
  25. } raid6_avx2_constants __aligned(32) = {
  26. { 0x1d1d1d1d1d1d1d1dULL, 0x1d1d1d1d1d1d1d1dULL,
  27. 0x1d1d1d1d1d1d1d1dULL, 0x1d1d1d1d1d1d1d1dULL,},
  28. };
  29. static int raid6_have_avx2(void)
  30. {
  31. return boot_cpu_has(X86_FEATURE_AVX2) && boot_cpu_has(X86_FEATURE_AVX);
  32. }
  33. /*
  34. * Plain AVX2 implementation
  35. */
  36. static void raid6_avx21_gen_syndrome(int disks, size_t bytes, void **ptrs)
  37. {
  38. u8 **dptr = (u8 **)ptrs;
  39. u8 *p, *q;
  40. int d, z, z0;
  41. z0 = disks - 3; /* Highest data disk */
  42. p = dptr[z0+1]; /* XOR parity */
  43. q = dptr[z0+2]; /* RS syndrome */
  44. kernel_fpu_begin();
  45. asm volatile("vmovdqa %0,%%ymm0" : : "m" (raid6_avx2_constants.x1d[0]));
  46. asm volatile("vpxor %ymm3,%ymm3,%ymm3"); /* Zero temp */
  47. for (d = 0; d < bytes; d += 32) {
  48. asm volatile("prefetchnta %0" : : "m" (dptr[z0][d]));
  49. asm volatile("vmovdqa %0,%%ymm2" : : "m" (dptr[z0][d]));/* P[0] */
  50. asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d]));
  51. asm volatile("vmovdqa %ymm2,%ymm4");/* Q[0] */
  52. asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z0-1][d]));
  53. for (z = z0-2; z >= 0; z--) {
  54. asm volatile("prefetchnta %0" : : "m" (dptr[z][d]));
  55. asm volatile("vpcmpgtb %ymm4,%ymm3,%ymm5");
  56. asm volatile("vpaddb %ymm4,%ymm4,%ymm4");
  57. asm volatile("vpand %ymm0,%ymm5,%ymm5");
  58. asm volatile("vpxor %ymm5,%ymm4,%ymm4");
  59. asm volatile("vpxor %ymm6,%ymm2,%ymm2");
  60. asm volatile("vpxor %ymm6,%ymm4,%ymm4");
  61. asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z][d]));
  62. }
  63. asm volatile("vpcmpgtb %ymm4,%ymm3,%ymm5");
  64. asm volatile("vpaddb %ymm4,%ymm4,%ymm4");
  65. asm volatile("vpand %ymm0,%ymm5,%ymm5");
  66. asm volatile("vpxor %ymm5,%ymm4,%ymm4");
  67. asm volatile("vpxor %ymm6,%ymm2,%ymm2");
  68. asm volatile("vpxor %ymm6,%ymm4,%ymm4");
  69. asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d]));
  70. asm volatile("vpxor %ymm2,%ymm2,%ymm2");
  71. asm volatile("vmovntdq %%ymm4,%0" : "=m" (q[d]));
  72. asm volatile("vpxor %ymm4,%ymm4,%ymm4");
  73. }
  74. asm volatile("sfence" : : : "memory");
  75. kernel_fpu_end();
  76. }
  77. const struct raid6_calls raid6_avx2x1 = {
  78. raid6_avx21_gen_syndrome,
  79. raid6_have_avx2,
  80. "avx2x1",
  81. 1 /* Has cache hints */
  82. };
  83. /*
  84. * Unrolled-by-2 AVX2 implementation
  85. */
  86. static void raid6_avx22_gen_syndrome(int disks, size_t bytes, void **ptrs)
  87. {
  88. u8 **dptr = (u8 **)ptrs;
  89. u8 *p, *q;
  90. int d, z, z0;
  91. z0 = disks - 3; /* Highest data disk */
  92. p = dptr[z0+1]; /* XOR parity */
  93. q = dptr[z0+2]; /* RS syndrome */
  94. kernel_fpu_begin();
  95. asm volatile("vmovdqa %0,%%ymm0" : : "m" (raid6_avx2_constants.x1d[0]));
  96. asm volatile("vpxor %ymm1,%ymm1,%ymm1"); /* Zero temp */
  97. /* We uniformly assume a single prefetch covers at least 32 bytes */
  98. for (d = 0; d < bytes; d += 64) {
  99. asm volatile("prefetchnta %0" : : "m" (dptr[z0][d]));
  100. asm volatile("prefetchnta %0" : : "m" (dptr[z0][d+32]));
  101. asm volatile("vmovdqa %0,%%ymm2" : : "m" (dptr[z0][d]));/* P[0] */
  102. asm volatile("vmovdqa %0,%%ymm3" : : "m" (dptr[z0][d+32]));/* P[1] */
  103. asm volatile("vmovdqa %ymm2,%ymm4"); /* Q[0] */
  104. asm volatile("vmovdqa %ymm3,%ymm6"); /* Q[1] */
  105. for (z = z0-1; z >= 0; z--) {
  106. asm volatile("prefetchnta %0" : : "m" (dptr[z][d]));
  107. asm volatile("prefetchnta %0" : : "m" (dptr[z][d+32]));
  108. asm volatile("vpcmpgtb %ymm4,%ymm1,%ymm5");
  109. asm volatile("vpcmpgtb %ymm6,%ymm1,%ymm7");
  110. asm volatile("vpaddb %ymm4,%ymm4,%ymm4");
  111. asm volatile("vpaddb %ymm6,%ymm6,%ymm6");
  112. asm volatile("vpand %ymm0,%ymm5,%ymm5");
  113. asm volatile("vpand %ymm0,%ymm7,%ymm7");
  114. asm volatile("vpxor %ymm5,%ymm4,%ymm4");
  115. asm volatile("vpxor %ymm7,%ymm6,%ymm6");
  116. asm volatile("vmovdqa %0,%%ymm5" : : "m" (dptr[z][d]));
  117. asm volatile("vmovdqa %0,%%ymm7" : : "m" (dptr[z][d+32]));
  118. asm volatile("vpxor %ymm5,%ymm2,%ymm2");
  119. asm volatile("vpxor %ymm7,%ymm3,%ymm3");
  120. asm volatile("vpxor %ymm5,%ymm4,%ymm4");
  121. asm volatile("vpxor %ymm7,%ymm6,%ymm6");
  122. }
  123. asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d]));
  124. asm volatile("vmovntdq %%ymm3,%0" : "=m" (p[d+32]));
  125. asm volatile("vmovntdq %%ymm4,%0" : "=m" (q[d]));
  126. asm volatile("vmovntdq %%ymm6,%0" : "=m" (q[d+32]));
  127. }
  128. asm volatile("sfence" : : : "memory");
  129. kernel_fpu_end();
  130. }
  131. const struct raid6_calls raid6_avx2x2 = {
  132. raid6_avx22_gen_syndrome,
  133. raid6_have_avx2,
  134. "avx2x2",
  135. 1 /* Has cache hints */
  136. };
  137. #ifdef CONFIG_X86_64
  138. /*
  139. * Unrolled-by-4 AVX2 implementation
  140. */
  141. static void raid6_avx24_gen_syndrome(int disks, size_t bytes, void **ptrs)
  142. {
  143. u8 **dptr = (u8 **)ptrs;
  144. u8 *p, *q;
  145. int d, z, z0;
  146. z0 = disks - 3; /* Highest data disk */
  147. p = dptr[z0+1]; /* XOR parity */
  148. q = dptr[z0+2]; /* RS syndrome */
  149. kernel_fpu_begin();
  150. asm volatile("vmovdqa %0,%%ymm0" : : "m" (raid6_avx2_constants.x1d[0]));
  151. asm volatile("vpxor %ymm1,%ymm1,%ymm1"); /* Zero temp */
  152. asm volatile("vpxor %ymm2,%ymm2,%ymm2"); /* P[0] */
  153. asm volatile("vpxor %ymm3,%ymm3,%ymm3"); /* P[1] */
  154. asm volatile("vpxor %ymm4,%ymm4,%ymm4"); /* Q[0] */
  155. asm volatile("vpxor %ymm6,%ymm6,%ymm6"); /* Q[1] */
  156. asm volatile("vpxor %ymm10,%ymm10,%ymm10"); /* P[2] */
  157. asm volatile("vpxor %ymm11,%ymm11,%ymm11"); /* P[3] */
  158. asm volatile("vpxor %ymm12,%ymm12,%ymm12"); /* Q[2] */
  159. asm volatile("vpxor %ymm14,%ymm14,%ymm14"); /* Q[3] */
  160. for (d = 0; d < bytes; d += 128) {
  161. for (z = z0; z >= 0; z--) {
  162. asm volatile("prefetchnta %0" : : "m" (dptr[z][d]));
  163. asm volatile("prefetchnta %0" : : "m" (dptr[z][d+32]));
  164. asm volatile("prefetchnta %0" : : "m" (dptr[z][d+64]));
  165. asm volatile("prefetchnta %0" : : "m" (dptr[z][d+96]));
  166. asm volatile("vpcmpgtb %ymm4,%ymm1,%ymm5");
  167. asm volatile("vpcmpgtb %ymm6,%ymm1,%ymm7");
  168. asm volatile("vpcmpgtb %ymm12,%ymm1,%ymm13");
  169. asm volatile("vpcmpgtb %ymm14,%ymm1,%ymm15");
  170. asm volatile("vpaddb %ymm4,%ymm4,%ymm4");
  171. asm volatile("vpaddb %ymm6,%ymm6,%ymm6");
  172. asm volatile("vpaddb %ymm12,%ymm12,%ymm12");
  173. asm volatile("vpaddb %ymm14,%ymm14,%ymm14");
  174. asm volatile("vpand %ymm0,%ymm5,%ymm5");
  175. asm volatile("vpand %ymm0,%ymm7,%ymm7");
  176. asm volatile("vpand %ymm0,%ymm13,%ymm13");
  177. asm volatile("vpand %ymm0,%ymm15,%ymm15");
  178. asm volatile("vpxor %ymm5,%ymm4,%ymm4");
  179. asm volatile("vpxor %ymm7,%ymm6,%ymm6");
  180. asm volatile("vpxor %ymm13,%ymm12,%ymm12");
  181. asm volatile("vpxor %ymm15,%ymm14,%ymm14");
  182. asm volatile("vmovdqa %0,%%ymm5" : : "m" (dptr[z][d]));
  183. asm volatile("vmovdqa %0,%%ymm7" : : "m" (dptr[z][d+32]));
  184. asm volatile("vmovdqa %0,%%ymm13" : : "m" (dptr[z][d+64]));
  185. asm volatile("vmovdqa %0,%%ymm15" : : "m" (dptr[z][d+96]));
  186. asm volatile("vpxor %ymm5,%ymm2,%ymm2");
  187. asm volatile("vpxor %ymm7,%ymm3,%ymm3");
  188. asm volatile("vpxor %ymm13,%ymm10,%ymm10");
  189. asm volatile("vpxor %ymm15,%ymm11,%ymm11");
  190. asm volatile("vpxor %ymm5,%ymm4,%ymm4");
  191. asm volatile("vpxor %ymm7,%ymm6,%ymm6");
  192. asm volatile("vpxor %ymm13,%ymm12,%ymm12");
  193. asm volatile("vpxor %ymm15,%ymm14,%ymm14");
  194. }
  195. asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d]));
  196. asm volatile("vpxor %ymm2,%ymm2,%ymm2");
  197. asm volatile("vmovntdq %%ymm3,%0" : "=m" (p[d+32]));
  198. asm volatile("vpxor %ymm3,%ymm3,%ymm3");
  199. asm volatile("vmovntdq %%ymm10,%0" : "=m" (p[d+64]));
  200. asm volatile("vpxor %ymm10,%ymm10,%ymm10");
  201. asm volatile("vmovntdq %%ymm11,%0" : "=m" (p[d+96]));
  202. asm volatile("vpxor %ymm11,%ymm11,%ymm11");
  203. asm volatile("vmovntdq %%ymm4,%0" : "=m" (q[d]));
  204. asm volatile("vpxor %ymm4,%ymm4,%ymm4");
  205. asm volatile("vmovntdq %%ymm6,%0" : "=m" (q[d+32]));
  206. asm volatile("vpxor %ymm6,%ymm6,%ymm6");
  207. asm volatile("vmovntdq %%ymm12,%0" : "=m" (q[d+64]));
  208. asm volatile("vpxor %ymm12,%ymm12,%ymm12");
  209. asm volatile("vmovntdq %%ymm14,%0" : "=m" (q[d+96]));
  210. asm volatile("vpxor %ymm14,%ymm14,%ymm14");
  211. }
  212. asm volatile("sfence" : : : "memory");
  213. kernel_fpu_end();
  214. }
  215. const struct raid6_calls raid6_avx2x4 = {
  216. raid6_avx24_gen_syndrome,
  217. raid6_have_avx2,
  218. "avx2x4",
  219. 1 /* Has cache hints */
  220. };
  221. #endif
  222. #endif /* CONFIG_AS_AVX2 */