spinlock.h 8.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251
  1. /*
  2. * include/asm-s390/spinlock.h
  3. *
  4. * S390 version
  5. * Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
  6. * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com)
  7. *
  8. * Derived from "include/asm-i386/spinlock.h"
  9. */
  10. #ifndef __ASM_SPINLOCK_H
  11. #define __ASM_SPINLOCK_H
  12. #ifdef __s390x__
  13. /*
  14. * Grmph, take care of %&#! user space programs that include
  15. * asm/spinlock.h. The diagnose is only available in kernel
  16. * context.
  17. */
  18. #ifdef __KERNEL__
  19. #include <asm/lowcore.h>
  20. #define __DIAG44_INSN "ex"
  21. #define __DIAG44_OPERAND __LC_DIAG44_OPCODE
  22. #else
  23. #define __DIAG44_INSN "#"
  24. #define __DIAG44_OPERAND 0
  25. #endif
  26. #endif /* __s390x__ */
  27. /*
  28. * Simple spin lock operations. There are two variants, one clears IRQ's
  29. * on the local processor, one does not.
  30. *
  31. * We make no fairness assumptions. They have a cost.
  32. */
  33. typedef struct {
  34. volatile unsigned int lock;
  35. #ifdef CONFIG_PREEMPT
  36. unsigned int break_lock;
  37. #endif
  38. } __attribute__ ((aligned (4))) spinlock_t;
  39. #define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 }
  40. #define spin_lock_init(lp) do { (lp)->lock = 0; } while(0)
  41. #define spin_unlock_wait(lp) do { barrier(); } while(((volatile spinlock_t *)(lp))->lock)
  42. #define spin_is_locked(x) ((x)->lock != 0)
  43. #define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock)
  44. extern inline void _raw_spin_lock(spinlock_t *lp)
  45. {
  46. #ifndef __s390x__
  47. unsigned int reg1, reg2;
  48. __asm__ __volatile__(" bras %0,1f\n"
  49. "0: diag 0,0,68\n"
  50. "1: slr %1,%1\n"
  51. " cs %1,%0,0(%3)\n"
  52. " jl 0b\n"
  53. : "=&d" (reg1), "=&d" (reg2), "=m" (lp->lock)
  54. : "a" (&lp->lock), "m" (lp->lock)
  55. : "cc", "memory" );
  56. #else /* __s390x__ */
  57. unsigned long reg1, reg2;
  58. __asm__ __volatile__(" bras %1,1f\n"
  59. "0: " __DIAG44_INSN " 0,%4\n"
  60. "1: slr %0,%0\n"
  61. " cs %0,%1,0(%3)\n"
  62. " jl 0b\n"
  63. : "=&d" (reg1), "=&d" (reg2), "=m" (lp->lock)
  64. : "a" (&lp->lock), "i" (__DIAG44_OPERAND),
  65. "m" (lp->lock) : "cc", "memory" );
  66. #endif /* __s390x__ */
  67. }
  68. extern inline int _raw_spin_trylock(spinlock_t *lp)
  69. {
  70. unsigned long reg;
  71. unsigned int result;
  72. __asm__ __volatile__(" basr %1,0\n"
  73. "0: cs %0,%1,0(%3)"
  74. : "=d" (result), "=&d" (reg), "=m" (lp->lock)
  75. : "a" (&lp->lock), "m" (lp->lock), "0" (0)
  76. : "cc", "memory" );
  77. return !result;
  78. }
  79. extern inline void _raw_spin_unlock(spinlock_t *lp)
  80. {
  81. unsigned int old;
  82. __asm__ __volatile__("cs %0,%3,0(%4)"
  83. : "=d" (old), "=m" (lp->lock)
  84. : "0" (lp->lock), "d" (0), "a" (lp)
  85. : "cc", "memory" );
  86. }
  87. /*
  88. * Read-write spinlocks, allowing multiple readers
  89. * but only one writer.
  90. *
  91. * NOTE! it is quite common to have readers in interrupts
  92. * but no interrupt writers. For those circumstances we
  93. * can "mix" irq-safe locks - any writer needs to get a
  94. * irq-safe write-lock, but readers can get non-irqsafe
  95. * read-locks.
  96. */
  97. typedef struct {
  98. volatile unsigned long lock;
  99. volatile unsigned long owner_pc;
  100. #ifdef CONFIG_PREEMPT
  101. unsigned int break_lock;
  102. #endif
  103. } rwlock_t;
  104. #define RW_LOCK_UNLOCKED (rwlock_t) { 0, 0 }
  105. #define rwlock_init(x) do { *(x) = RW_LOCK_UNLOCKED; } while(0)
  106. /**
  107. * read_can_lock - would read_trylock() succeed?
  108. * @lock: the rwlock in question.
  109. */
  110. #define read_can_lock(x) ((int)(x)->lock >= 0)
  111. /**
  112. * write_can_lock - would write_trylock() succeed?
  113. * @lock: the rwlock in question.
  114. */
  115. #define write_can_lock(x) ((x)->lock == 0)
  116. #ifndef __s390x__
  117. #define _raw_read_lock(rw) \
  118. asm volatile(" l 2,0(%1)\n" \
  119. " j 1f\n" \
  120. "0: diag 0,0,68\n" \
  121. "1: la 2,0(2)\n" /* clear high (=write) bit */ \
  122. " la 3,1(2)\n" /* one more reader */ \
  123. " cs 2,3,0(%1)\n" /* try to write new value */ \
  124. " jl 0b" \
  125. : "=m" ((rw)->lock) : "a" (&(rw)->lock), \
  126. "m" ((rw)->lock) : "2", "3", "cc", "memory" )
  127. #else /* __s390x__ */
  128. #define _raw_read_lock(rw) \
  129. asm volatile(" lg 2,0(%1)\n" \
  130. " j 1f\n" \
  131. "0: " __DIAG44_INSN " 0,%2\n" \
  132. "1: nihh 2,0x7fff\n" /* clear high (=write) bit */ \
  133. " la 3,1(2)\n" /* one more reader */ \
  134. " csg 2,3,0(%1)\n" /* try to write new value */ \
  135. " jl 0b" \
  136. : "=m" ((rw)->lock) \
  137. : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \
  138. "m" ((rw)->lock) : "2", "3", "cc", "memory" )
  139. #endif /* __s390x__ */
  140. #ifndef __s390x__
  141. #define _raw_read_unlock(rw) \
  142. asm volatile(" l 2,0(%1)\n" \
  143. " j 1f\n" \
  144. "0: diag 0,0,68\n" \
  145. "1: lr 3,2\n" \
  146. " ahi 3,-1\n" /* one less reader */ \
  147. " cs 2,3,0(%1)\n" \
  148. " jl 0b" \
  149. : "=m" ((rw)->lock) : "a" (&(rw)->lock), \
  150. "m" ((rw)->lock) : "2", "3", "cc", "memory" )
  151. #else /* __s390x__ */
  152. #define _raw_read_unlock(rw) \
  153. asm volatile(" lg 2,0(%1)\n" \
  154. " j 1f\n" \
  155. "0: " __DIAG44_INSN " 0,%2\n" \
  156. "1: lgr 3,2\n" \
  157. " bctgr 3,0\n" /* one less reader */ \
  158. " csg 2,3,0(%1)\n" \
  159. " jl 0b" \
  160. : "=m" ((rw)->lock) \
  161. : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \
  162. "m" ((rw)->lock) : "2", "3", "cc", "memory" )
  163. #endif /* __s390x__ */
  164. #ifndef __s390x__
  165. #define _raw_write_lock(rw) \
  166. asm volatile(" lhi 3,1\n" \
  167. " sll 3,31\n" /* new lock value = 0x80000000 */ \
  168. " j 1f\n" \
  169. "0: diag 0,0,68\n" \
  170. "1: slr 2,2\n" /* old lock value must be 0 */ \
  171. " cs 2,3,0(%1)\n" \
  172. " jl 0b" \
  173. : "=m" ((rw)->lock) : "a" (&(rw)->lock), \
  174. "m" ((rw)->lock) : "2", "3", "cc", "memory" )
  175. #else /* __s390x__ */
  176. #define _raw_write_lock(rw) \
  177. asm volatile(" llihh 3,0x8000\n" /* new lock value = 0x80...0 */ \
  178. " j 1f\n" \
  179. "0: " __DIAG44_INSN " 0,%2\n" \
  180. "1: slgr 2,2\n" /* old lock value must be 0 */ \
  181. " csg 2,3,0(%1)\n" \
  182. " jl 0b" \
  183. : "=m" ((rw)->lock) \
  184. : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \
  185. "m" ((rw)->lock) : "2", "3", "cc", "memory" )
  186. #endif /* __s390x__ */
  187. #ifndef __s390x__
  188. #define _raw_write_unlock(rw) \
  189. asm volatile(" slr 3,3\n" /* new lock value = 0 */ \
  190. " j 1f\n" \
  191. "0: diag 0,0,68\n" \
  192. "1: lhi 2,1\n" \
  193. " sll 2,31\n" /* old lock value must be 0x80000000 */ \
  194. " cs 2,3,0(%1)\n" \
  195. " jl 0b" \
  196. : "=m" ((rw)->lock) : "a" (&(rw)->lock), \
  197. "m" ((rw)->lock) : "2", "3", "cc", "memory" )
  198. #else /* __s390x__ */
  199. #define _raw_write_unlock(rw) \
  200. asm volatile(" slgr 3,3\n" /* new lock value = 0 */ \
  201. " j 1f\n" \
  202. "0: " __DIAG44_INSN " 0,%2\n" \
  203. "1: llihh 2,0x8000\n" /* old lock value must be 0x8..0 */\
  204. " csg 2,3,0(%1)\n" \
  205. " jl 0b" \
  206. : "=m" ((rw)->lock) \
  207. : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \
  208. "m" ((rw)->lock) : "2", "3", "cc", "memory" )
  209. #endif /* __s390x__ */
  210. #define _raw_read_trylock(lock) generic_raw_read_trylock(lock)
  211. extern inline int _raw_write_trylock(rwlock_t *rw)
  212. {
  213. unsigned long result, reg;
  214. __asm__ __volatile__(
  215. #ifndef __s390x__
  216. " lhi %1,1\n"
  217. " sll %1,31\n"
  218. " cs %0,%1,0(%3)"
  219. #else /* __s390x__ */
  220. " llihh %1,0x8000\n"
  221. "0: csg %0,%1,0(%3)\n"
  222. #endif /* __s390x__ */
  223. : "=d" (result), "=&d" (reg), "=m" (rw->lock)
  224. : "a" (&rw->lock), "m" (rw->lock), "0" (0UL)
  225. : "cc", "memory" );
  226. return result == 0;
  227. }
  228. #endif /* __ASM_SPINLOCK_H */