dec_and_lock.S 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980
  1. /* $Id: dec_and_lock.S,v 1.5 2001/11/18 00:12:56 davem Exp $
  2. * dec_and_lock.S: Sparc64 version of "atomic_dec_and_lock()"
  3. * using cas and ldstub instructions.
  4. *
  5. * Copyright (C) 2000 David S. Miller (davem@redhat.com)
  6. */
  7. #include <linux/config.h>
  8. #include <asm/thread_info.h>
  9. .text
  10. .align 64
  11. /* CAS basically works like this:
  12. *
  13. * void CAS(MEM, REG1, REG2)
  14. * {
  15. * START_ATOMIC();
  16. * if (*(MEM) == REG1) {
  17. * TMP = *(MEM);
  18. * *(MEM) = REG2;
  19. * REG2 = TMP;
  20. * } else
  21. * REG2 = *(MEM);
  22. * END_ATOMIC();
  23. * }
  24. */
  25. .globl _atomic_dec_and_lock
  26. _atomic_dec_and_lock: /* %o0 = counter, %o1 = lock */
  27. loop1: lduw [%o0], %g2
  28. subcc %g2, 1, %g7
  29. be,pn %icc, start_to_zero
  30. nop
  31. nzero: cas [%o0], %g2, %g7
  32. cmp %g2, %g7
  33. bne,pn %icc, loop1
  34. mov 0, %g1
  35. out:
  36. membar #StoreLoad | #StoreStore
  37. retl
  38. mov %g1, %o0
  39. start_to_zero:
  40. #ifdef CONFIG_PREEMPT
  41. ldsw [%g6 + TI_PRE_COUNT], %g3
  42. add %g3, 1, %g3
  43. stw %g3, [%g6 + TI_PRE_COUNT]
  44. #endif
  45. to_zero:
  46. ldstub [%o1], %g3
  47. membar #StoreLoad | #StoreStore
  48. brnz,pn %g3, spin_on_lock
  49. nop
  50. loop2: cas [%o0], %g2, %g7 /* ASSERT(g7 == 0) */
  51. cmp %g2, %g7
  52. be,pt %icc, out
  53. mov 1, %g1
  54. lduw [%o0], %g2
  55. subcc %g2, 1, %g7
  56. be,pn %icc, loop2
  57. nop
  58. membar #StoreStore | #LoadStore
  59. stb %g0, [%o1]
  60. #ifdef CONFIG_PREEMPT
  61. ldsw [%g6 + TI_PRE_COUNT], %g3
  62. sub %g3, 1, %g3
  63. stw %g3, [%g6 + TI_PRE_COUNT]
  64. #endif
  65. b,pt %xcc, nzero
  66. nop
  67. spin_on_lock:
  68. ldub [%o1], %g3
  69. membar #LoadLoad
  70. brnz,pt %g3, spin_on_lock
  71. nop
  72. ba,pt %xcc, to_zero
  73. nop
  74. nop