futex.h 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201
  1. #ifndef _ASM_FUTEX_H
  2. #define _ASM_FUTEX_H
  3. #ifdef __KERNEL__
  4. #include <linux/futex.h>
  5. #include <asm/errno.h>
  6. #include <asm/uaccess.h>
  7. #include <asm/war.h>
  8. #ifdef CONFIG_SMP
  9. #define __FUTEX_SMP_SYNC " sync \n"
  10. #else
  11. #define __FUTEX_SMP_SYNC
  12. #endif
  13. #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
  14. { \
  15. if (cpu_has_llsc && R10000_LLSC_WAR) { \
  16. __asm__ __volatile__( \
  17. " .set push \n" \
  18. " .set noat \n" \
  19. " .set mips3 \n" \
  20. "1: ll %1, %4 # __futex_atomic_op \n" \
  21. " .set mips0 \n" \
  22. " " insn " \n" \
  23. " .set mips3 \n" \
  24. "2: sc $1, %2 \n" \
  25. " beqzl $1, 1b \n" \
  26. __FUTEX_SMP_SYNC \
  27. "3: \n" \
  28. " .set pop \n" \
  29. " .set mips0 \n" \
  30. " .section .fixup,\"ax\" \n" \
  31. "4: li %0, %6 \n" \
  32. " j 2b \n" \
  33. " .previous \n" \
  34. " .section __ex_table,\"a\" \n" \
  35. " "__UA_ADDR "\t1b, 4b \n" \
  36. " "__UA_ADDR "\t2b, 4b \n" \
  37. " .previous \n" \
  38. : "=r" (ret), "=&r" (oldval), "=R" (*uaddr) \
  39. : "0" (0), "R" (*uaddr), "Jr" (oparg), "i" (-EFAULT) \
  40. : "memory"); \
  41. } else if (cpu_has_llsc) { \
  42. __asm__ __volatile__( \
  43. " .set push \n" \
  44. " .set noat \n" \
  45. " .set mips3 \n" \
  46. "1: ll %1, %4 # __futex_atomic_op \n" \
  47. " .set mips0 \n" \
  48. " " insn " \n" \
  49. " .set mips3 \n" \
  50. "2: sc $1, %2 \n" \
  51. " beqz $1, 1b \n" \
  52. __FUTEX_SMP_SYNC \
  53. "3: \n" \
  54. " .set pop \n" \
  55. " .set mips0 \n" \
  56. " .section .fixup,\"ax\" \n" \
  57. "4: li %0, %6 \n" \
  58. " j 2b \n" \
  59. " .previous \n" \
  60. " .section __ex_table,\"a\" \n" \
  61. " "__UA_ADDR "\t1b, 4b \n" \
  62. " "__UA_ADDR "\t2b, 4b \n" \
  63. " .previous \n" \
  64. : "=r" (ret), "=&r" (oldval), "=R" (*uaddr) \
  65. : "0" (0), "R" (*uaddr), "Jr" (oparg), "i" (-EFAULT) \
  66. : "memory"); \
  67. } else \
  68. ret = -ENOSYS; \
  69. }
  70. static inline int
  71. futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
  72. {
  73. int op = (encoded_op >> 28) & 7;
  74. int cmp = (encoded_op >> 24) & 15;
  75. int oparg = (encoded_op << 8) >> 20;
  76. int cmparg = (encoded_op << 20) >> 20;
  77. int oldval = 0, ret;
  78. if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  79. oparg = 1 << oparg;
  80. if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
  81. return -EFAULT;
  82. inc_preempt_count();
  83. switch (op) {
  84. case FUTEX_OP_SET:
  85. __futex_atomic_op("move $1, %z5", ret, oldval, uaddr, oparg);
  86. break;
  87. case FUTEX_OP_ADD:
  88. __futex_atomic_op("addu $1, %1, %z5",
  89. ret, oldval, uaddr, oparg);
  90. break;
  91. case FUTEX_OP_OR:
  92. __futex_atomic_op("or $1, %1, %z5",
  93. ret, oldval, uaddr, oparg);
  94. break;
  95. case FUTEX_OP_ANDN:
  96. __futex_atomic_op("and $1, %1, %z5",
  97. ret, oldval, uaddr, ~oparg);
  98. break;
  99. case FUTEX_OP_XOR:
  100. __futex_atomic_op("xor $1, %1, %z5",
  101. ret, oldval, uaddr, oparg);
  102. break;
  103. default:
  104. ret = -ENOSYS;
  105. }
  106. dec_preempt_count();
  107. if (!ret) {
  108. switch (cmp) {
  109. case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
  110. case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
  111. case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
  112. case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
  113. case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
  114. case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
  115. default: ret = -ENOSYS;
  116. }
  117. }
  118. return ret;
  119. }
  120. static inline int
  121. futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
  122. {
  123. int retval;
  124. if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
  125. return -EFAULT;
  126. if (cpu_has_llsc && R10000_LLSC_WAR) {
  127. __asm__ __volatile__(
  128. "# futex_atomic_cmpxchg_inatomic \n"
  129. " .set push \n"
  130. " .set noat \n"
  131. " .set mips3 \n"
  132. "1: ll %0, %2 \n"
  133. " bne %0, %z3, 3f \n"
  134. " .set mips0 \n"
  135. " move $1, %z4 \n"
  136. " .set mips3 \n"
  137. "2: sc $1, %1 \n"
  138. " beqzl $1, 1b \n"
  139. __FUTEX_SMP_SYNC
  140. "3: \n"
  141. " .set pop \n"
  142. " .section .fixup,\"ax\" \n"
  143. "4: li %0, %5 \n"
  144. " j 3b \n"
  145. " .previous \n"
  146. " .section __ex_table,\"a\" \n"
  147. " "__UA_ADDR "\t1b, 4b \n"
  148. " "__UA_ADDR "\t2b, 4b \n"
  149. " .previous \n"
  150. : "=&r" (retval), "=R" (*uaddr)
  151. : "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT)
  152. : "memory");
  153. } else if (cpu_has_llsc) {
  154. __asm__ __volatile__(
  155. "# futex_atomic_cmpxchg_inatomic \n"
  156. " .set push \n"
  157. " .set noat \n"
  158. " .set mips3 \n"
  159. "1: ll %0, %2 \n"
  160. " bne %0, %z3, 3f \n"
  161. " .set mips0 \n"
  162. " move $1, %z4 \n"
  163. " .set mips3 \n"
  164. "2: sc $1, %1 \n"
  165. " beqz $1, 1b \n"
  166. __FUTEX_SMP_SYNC
  167. "3: \n"
  168. " .set pop \n"
  169. " .section .fixup,\"ax\" \n"
  170. "4: li %0, %5 \n"
  171. " j 3b \n"
  172. " .previous \n"
  173. " .section __ex_table,\"a\" \n"
  174. " "__UA_ADDR "\t1b, 4b \n"
  175. " "__UA_ADDR "\t2b, 4b \n"
  176. " .previous \n"
  177. : "=&r" (retval), "=R" (*uaddr)
  178. : "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT)
  179. : "memory");
  180. } else
  181. return -ENOSYS;
  182. return retval;
  183. }
  184. #endif
  185. #endif