futex.h 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123
  1. #ifndef _ASM_S390_FUTEX_H
  2. #define _ASM_S390_FUTEX_H
  3. #ifdef __KERNEL__
  4. #include <linux/futex.h>
  5. #include <asm/errno.h>
  6. #include <asm/uaccess.h>
  7. #ifndef __s390x__
  8. #define __futex_atomic_fixup \
  9. ".section __ex_table,\"a\"\n" \
  10. " .align 4\n" \
  11. " .long 0b,4b,2b,4b,3b,4b\n" \
  12. ".previous"
  13. #else /* __s390x__ */
  14. #define __futex_atomic_fixup \
  15. ".section __ex_table,\"a\"\n" \
  16. " .align 8\n" \
  17. " .quad 0b,4b,2b,4b,3b,4b\n" \
  18. ".previous"
  19. #endif /* __s390x__ */
  20. #define __futex_atomic_op(insn, ret, oldval, newval, uaddr, oparg) \
  21. asm volatile(" sacf 256\n" \
  22. "0: l %1,0(%6)\n" \
  23. "1: " insn \
  24. "2: cs %1,%2,0(%6)\n" \
  25. "3: jl 1b\n" \
  26. " lhi %0,0\n" \
  27. "4: sacf 0\n" \
  28. __futex_atomic_fixup \
  29. : "=d" (ret), "=&d" (oldval), "=&d" (newval), \
  30. "=m" (*uaddr) \
  31. : "0" (-EFAULT), "d" (oparg), "a" (uaddr), \
  32. "m" (*uaddr) : "cc" );
  33. static inline int futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
  34. {
  35. int op = (encoded_op >> 28) & 7;
  36. int cmp = (encoded_op >> 24) & 15;
  37. int oparg = (encoded_op << 8) >> 20;
  38. int cmparg = (encoded_op << 20) >> 20;
  39. int oldval = 0, newval, ret;
  40. if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  41. oparg = 1 << oparg;
  42. if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
  43. return -EFAULT;
  44. inc_preempt_count();
  45. switch (op) {
  46. case FUTEX_OP_SET:
  47. __futex_atomic_op("lr %2,%5\n",
  48. ret, oldval, newval, uaddr, oparg);
  49. break;
  50. case FUTEX_OP_ADD:
  51. __futex_atomic_op("lr %2,%1\nar %2,%5\n",
  52. ret, oldval, newval, uaddr, oparg);
  53. break;
  54. case FUTEX_OP_OR:
  55. __futex_atomic_op("lr %2,%1\nor %2,%5\n",
  56. ret, oldval, newval, uaddr, oparg);
  57. break;
  58. case FUTEX_OP_ANDN:
  59. __futex_atomic_op("lr %2,%1\nnr %2,%5\n",
  60. ret, oldval, newval, uaddr, oparg);
  61. break;
  62. case FUTEX_OP_XOR:
  63. __futex_atomic_op("lr %2,%1\nxr %2,%5\n",
  64. ret, oldval, newval, uaddr, oparg);
  65. break;
  66. default:
  67. ret = -ENOSYS;
  68. }
  69. dec_preempt_count();
  70. if (!ret) {
  71. switch (cmp) {
  72. case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
  73. case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
  74. case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
  75. case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
  76. case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
  77. case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
  78. default: ret = -ENOSYS;
  79. }
  80. }
  81. return ret;
  82. }
  83. static inline int
  84. futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
  85. {
  86. int ret;
  87. if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
  88. return -EFAULT;
  89. asm volatile(" sacf 256\n"
  90. " cs %1,%4,0(%5)\n"
  91. "0: lr %0,%1\n"
  92. "1: sacf 0\n"
  93. #ifndef __s390x__
  94. ".section __ex_table,\"a\"\n"
  95. " .align 4\n"
  96. " .long 0b,1b\n"
  97. ".previous"
  98. #else /* __s390x__ */
  99. ".section __ex_table,\"a\"\n"
  100. " .align 8\n"
  101. " .quad 0b,1b\n"
  102. ".previous"
  103. #endif /* __s390x__ */
  104. : "=d" (ret), "+d" (oldval), "=m" (*uaddr)
  105. : "0" (-EFAULT), "d" (newval), "a" (uaddr), "m" (*uaddr)
  106. : "cc", "memory" );
  107. return oldval;
  108. }
  109. #endif /* __KERNEL__ */
  110. #endif /* _ASM_S390_FUTEX_H */