futex.h 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200
  1. #ifndef _ASM_FUTEX_H
  2. #define _ASM_FUTEX_H
  3. #ifdef __KERNEL__
  4. #include <linux/config.h>
  5. #include <linux/futex.h>
  6. #include <asm/errno.h>
  7. #include <asm/uaccess.h>
  8. #include <asm/war.h>
  9. #ifdef CONFIG_SMP
  10. #define __FUTEX_SMP_SYNC " sync \n"
  11. #else
  12. #define __FUTEX_SMP_SYNC
  13. #endif
  14. #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
  15. { \
  16. if (cpu_has_llsc && R10000_LLSC_WAR) { \
  17. __asm__ __volatile__( \
  18. " .set push \n" \
  19. " .set noat \n" \
  20. " .set mips3 \n" \
  21. "1: ll %1, (%3) # __futex_atomic_op \n" \
  22. " .set mips0 \n" \
  23. " " insn " \n" \
  24. " .set mips3 \n" \
  25. "2: sc $1, (%3) \n" \
  26. " beqzl $1, 1b \n" \
  27. __FUTEX_SMP_SYNC \
  28. "3: \n" \
  29. " .set pop \n" \
  30. " .set mips0 \n" \
  31. " .section .fixup,\"ax\" \n" \
  32. "4: li %0, %5 \n" \
  33. " j 2b \n" \
  34. " .previous \n" \
  35. " .section __ex_table,\"a\" \n" \
  36. " "__UA_ADDR "\t1b, 4b \n" \
  37. " "__UA_ADDR "\t2b, 4b \n" \
  38. " .previous \n" \
  39. : "=r" (ret), "=r" (oldval) \
  40. : "0" (0), "r" (uaddr), "Jr" (oparg), "i" (-EFAULT)); \
  41. } else if (cpu_has_llsc) { \
  42. __asm__ __volatile__( \
  43. " .set push \n" \
  44. " .set noat \n" \
  45. " .set mips3 \n" \
  46. "1: ll %1, (%3) # __futex_atomic_op \n" \
  47. " .set mips0 \n" \
  48. " " insn " \n" \
  49. " .set mips3 \n" \
  50. "2: sc $1, (%3) \n" \
  51. " beqz $1, 1b \n" \
  52. __FUTEX_SMP_SYNC \
  53. "3: \n" \
  54. " .set pop \n" \
  55. " .set mips0 \n" \
  56. " .section .fixup,\"ax\" \n" \
  57. "4: li %0, %5 \n" \
  58. " j 2b \n" \
  59. " .previous \n" \
  60. " .section __ex_table,\"a\" \n" \
  61. " "__UA_ADDR "\t1b, 4b \n" \
  62. " "__UA_ADDR "\t2b, 4b \n" \
  63. " .previous \n" \
  64. : "=r" (ret), "=r" (oldval) \
  65. : "0" (0), "r" (uaddr), "Jr" (oparg), "i" (-EFAULT)); \
  66. } else \
  67. ret = -ENOSYS; \
  68. }
  69. static inline int
  70. futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
  71. {
  72. int op = (encoded_op >> 28) & 7;
  73. int cmp = (encoded_op >> 24) & 15;
  74. int oparg = (encoded_op << 8) >> 20;
  75. int cmparg = (encoded_op << 20) >> 20;
  76. int oldval = 0, ret;
  77. if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  78. oparg = 1 << oparg;
  79. if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
  80. return -EFAULT;
  81. inc_preempt_count();
  82. switch (op) {
  83. case FUTEX_OP_SET:
  84. __futex_atomic_op("move $1, %z4", ret, oldval, uaddr, oparg);
  85. break;
  86. case FUTEX_OP_ADD:
  87. __futex_atomic_op("addu $1, %1, %z4",
  88. ret, oldval, uaddr, oparg);
  89. break;
  90. case FUTEX_OP_OR:
  91. __futex_atomic_op("or $1, %1, %z4",
  92. ret, oldval, uaddr, oparg);
  93. break;
  94. case FUTEX_OP_ANDN:
  95. __futex_atomic_op("and $1, %1, %z4",
  96. ret, oldval, uaddr, ~oparg);
  97. break;
  98. case FUTEX_OP_XOR:
  99. __futex_atomic_op("xor $1, %1, %z4",
  100. ret, oldval, uaddr, oparg);
  101. break;
  102. default:
  103. ret = -ENOSYS;
  104. }
  105. dec_preempt_count();
  106. if (!ret) {
  107. switch (cmp) {
  108. case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
  109. case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
  110. case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
  111. case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
  112. case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
  113. case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
  114. default: ret = -ENOSYS;
  115. }
  116. }
  117. return ret;
  118. }
  119. static inline int
  120. futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
  121. {
  122. int retval;
  123. if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
  124. return -EFAULT;
  125. if (cpu_has_llsc && R10000_LLSC_WAR) {
  126. __asm__ __volatile__(
  127. "# futex_atomic_cmpxchg_inatomic \n"
  128. " .set push \n"
  129. " .set noat \n"
  130. " .set mips3 \n"
  131. "1: ll %0, %2 \n"
  132. " bne %0, %z3, 3f \n"
  133. " .set mips0 \n"
  134. " move $1, %z4 \n"
  135. " .set mips3 \n"
  136. "2: sc $1, %1 \n"
  137. " beqzl $1, 1b \n"
  138. __FUTEX_SMP_SYNC
  139. "3: \n"
  140. " .set pop \n"
  141. " .section .fixup,\"ax\" \n"
  142. "4: li %0, %5 \n"
  143. " j 3b \n"
  144. " .previous \n"
  145. " .section __ex_table,\"a\" \n"
  146. " "__UA_ADDR "\t1b, 4b \n"
  147. " "__UA_ADDR "\t2b, 4b \n"
  148. " .previous \n"
  149. : "=&r" (retval), "=R" (*uaddr)
  150. : "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT)
  151. : "memory");
  152. } else if (cpu_has_llsc) {
  153. __asm__ __volatile__(
  154. "# futex_atomic_cmpxchg_inatomic \n"
  155. " .set push \n"
  156. " .set noat \n"
  157. " .set mips3 \n"
  158. "1: ll %0, %2 \n"
  159. " bne %0, %z3, 3f \n"
  160. " .set mips0 \n"
  161. " move $1, %z4 \n"
  162. " .set mips3 \n"
  163. "2: sc $1, %1 \n"
  164. " beqz $1, 1b \n"
  165. __FUTEX_SMP_SYNC
  166. "3: \n"
  167. " .set pop \n"
  168. " .section .fixup,\"ax\" \n"
  169. "4: li %0, %5 \n"
  170. " j 3b \n"
  171. " .previous \n"
  172. " .section __ex_table,\"a\" \n"
  173. " "__UA_ADDR "\t1b, 4b \n"
  174. " "__UA_ADDR "\t2b, 4b \n"
  175. " .previous \n"
  176. : "=&r" (retval), "=R" (*uaddr)
  177. : "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT)
  178. : "memory");
  179. } else
  180. return -ENOSYS;
  181. return retval;
  182. }
  183. #endif
  184. #endif