futex.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. #ifndef _ASM_S390_FUTEX_H
  2. #define _ASM_S390_FUTEX_H
  3. #ifdef __KERNEL__
  4. #include <linux/futex.h>
  5. #include <asm/errno.h>
  6. #include <asm/uaccess.h>
  7. #ifndef __s390x__
  8. #define __futex_atomic_fixup \
  9. ".section __ex_table,\"a\"\n" \
  10. " .align 4\n" \
  11. " .long 0b,2b,1b,2b\n" \
  12. ".previous"
  13. #else /* __s390x__ */
  14. #define __futex_atomic_fixup \
  15. ".section __ex_table,\"a\"\n" \
  16. " .align 8\n" \
  17. " .quad 0b,2b,1b,2b\n" \
  18. ".previous"
  19. #endif /* __s390x__ */
  20. #define __futex_atomic_op(insn, ret, oldval, newval, uaddr, oparg) \
  21. asm volatile(" l %1,0(%6)\n" \
  22. "0: " insn \
  23. " cs %1,%2,0(%6)\n" \
  24. "1: jl 0b\n" \
  25. " lhi %0,0\n" \
  26. "2:\n" \
  27. __futex_atomic_fixup \
  28. : "=d" (ret), "=&d" (oldval), "=&d" (newval), \
  29. "=m" (*uaddr) \
  30. : "0" (-EFAULT), "d" (oparg), "a" (uaddr), \
  31. "m" (*uaddr) : "cc" );
  32. static inline int futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
  33. {
  34. int op = (encoded_op >> 28) & 7;
  35. int cmp = (encoded_op >> 24) & 15;
  36. int oparg = (encoded_op << 8) >> 20;
  37. int cmparg = (encoded_op << 20) >> 20;
  38. int oldval = 0, newval, ret;
  39. if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  40. oparg = 1 << oparg;
  41. if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
  42. return -EFAULT;
  43. inc_preempt_count();
  44. switch (op) {
  45. case FUTEX_OP_SET:
  46. __futex_atomic_op("lr %2,%5\n",
  47. ret, oldval, newval, uaddr, oparg);
  48. break;
  49. case FUTEX_OP_ADD:
  50. __futex_atomic_op("lr %2,%1\nar %2,%5\n",
  51. ret, oldval, newval, uaddr, oparg);
  52. break;
  53. case FUTEX_OP_OR:
  54. __futex_atomic_op("lr %2,%1\nor %2,%5\n",
  55. ret, oldval, newval, uaddr, oparg);
  56. break;
  57. case FUTEX_OP_ANDN:
  58. __futex_atomic_op("lr %2,%1\nnr %2,%5\n",
  59. ret, oldval, newval, uaddr, oparg);
  60. break;
  61. case FUTEX_OP_XOR:
  62. __futex_atomic_op("lr %2,%1\nxr %2,%5\n",
  63. ret, oldval, newval, uaddr, oparg);
  64. break;
  65. default:
  66. ret = -ENOSYS;
  67. }
  68. dec_preempt_count();
  69. if (!ret) {
  70. switch (cmp) {
  71. case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
  72. case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
  73. case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
  74. case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
  75. case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
  76. case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
  77. default: ret = -ENOSYS;
  78. }
  79. }
  80. return ret;
  81. }
  82. static inline int
  83. futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
  84. {
  85. int ret;
  86. if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
  87. return -EFAULT;
  88. asm volatile(" cs %1,%4,0(%5)\n"
  89. "0: lr %0,%1\n"
  90. "1:\n"
  91. #ifndef __s390x__
  92. ".section __ex_table,\"a\"\n"
  93. " .align 4\n"
  94. " .long 0b,1b\n"
  95. ".previous"
  96. #else /* __s390x__ */
  97. ".section __ex_table,\"a\"\n"
  98. " .align 8\n"
  99. " .quad 0b,1b\n"
  100. ".previous"
  101. #endif /* __s390x__ */
  102. : "=d" (ret), "+d" (oldval), "=m" (*uaddr)
  103. : "0" (-EFAULT), "d" (newval), "a" (uaddr), "m" (*uaddr)
  104. : "cc", "memory" );
  105. return oldval;
  106. }
  107. #endif /* __KERNEL__ */
  108. #endif /* _ASM_S390_FUTEX_H */