dec_and_lock.S 1.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778
  1. /* $Id: dec_and_lock.S,v 1.5 2001/11/18 00:12:56 davem Exp $
  2. * dec_and_lock.S: Sparc64 version of "atomic_dec_and_lock()"
  3. * using cas and ldstub instructions.
  4. *
  5. * Copyright (C) 2000 David S. Miller (davem@redhat.com)
  6. */
  7. #include <linux/config.h>
  8. #include <asm/thread_info.h>
  9. .text
  10. .align 64
  11. /* CAS basically works like this:
  12. *
  13. * void CAS(MEM, REG1, REG2)
  14. * {
  15. * START_ATOMIC();
  16. * if (*(MEM) == REG1) {
  17. * TMP = *(MEM);
  18. * *(MEM) = REG2;
  19. * REG2 = TMP;
  20. * } else
  21. * REG2 = *(MEM);
  22. * END_ATOMIC();
  23. * }
  24. */
  25. .globl _atomic_dec_and_lock
  26. _atomic_dec_and_lock: /* %o0 = counter, %o1 = lock */
  27. loop1: lduw [%o0], %g2
  28. subcc %g2, 1, %g7
  29. be,pn %icc, start_to_zero
  30. nop
  31. nzero: cas [%o0], %g2, %g7
  32. cmp %g2, %g7
  33. bne,pn %icc, loop1
  34. mov 0, %g1
  35. out:
  36. membar #StoreLoad | #StoreStore
  37. retl
  38. mov %g1, %o0
  39. start_to_zero:
  40. #ifdef CONFIG_PREEMPT
  41. ldsw [%g6 + TI_PRE_COUNT], %g3
  42. add %g3, 1, %g3
  43. stw %g3, [%g6 + TI_PRE_COUNT]
  44. #endif
  45. to_zero:
  46. ldstub [%o1], %g3
  47. brnz,pn %g3, spin_on_lock
  48. membar #StoreLoad | #StoreStore
  49. loop2: cas [%o0], %g2, %g7 /* ASSERT(g7 == 0) */
  50. cmp %g2, %g7
  51. be,pt %icc, out
  52. mov 1, %g1
  53. lduw [%o0], %g2
  54. subcc %g2, 1, %g7
  55. be,pn %icc, loop2
  56. nop
  57. membar #StoreStore | #LoadStore
  58. stb %g0, [%o1]
  59. #ifdef CONFIG_PREEMPT
  60. ldsw [%g6 + TI_PRE_COUNT], %g3
  61. sub %g3, 1, %g3
  62. stw %g3, [%g6 + TI_PRE_COUNT]
  63. #endif
  64. b,pt %xcc, nzero
  65. nop
  66. spin_on_lock:
  67. ldub [%o1], %g3
  68. brnz,pt %g3, spin_on_lock
  69. membar #LoadLoad
  70. ba,pt %xcc, to_zero
  71. nop
  72. nop