atomic.S 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100
  1. /* atomic.S: Move this stuff here for better ICACHE hit rates.
  2. *
  3. * Copyright (C) 1996 David S. Miller (davem@caipfs.rutgers.edu)
  4. */
  5. #include <linux/config.h>
  6. #include <asm/ptrace.h>
  7. #include <asm/psr.h>
  8. .text
  9. .align 4
  10. .globl __atomic_begin
  11. __atomic_begin:
  12. #ifndef CONFIG_SMP
  13. .globl ___xchg32_sun4c
  14. ___xchg32_sun4c:
  15. rd %psr, %g3
  16. andcc %g3, PSR_PIL, %g0
  17. bne 1f
  18. nop
  19. wr %g3, PSR_PIL, %psr
  20. nop; nop; nop
  21. 1:
  22. andcc %g3, PSR_PIL, %g0
  23. ld [%g1], %g7
  24. bne 1f
  25. st %g2, [%g1]
  26. wr %g3, 0x0, %psr
  27. nop; nop; nop
  28. 1:
  29. mov %g7, %g2
  30. jmpl %o7 + 8, %g0
  31. mov %g4, %o7
  32. .globl ___xchg32_sun4md
  33. ___xchg32_sun4md:
  34. swap [%g1], %g2
  35. jmpl %o7 + 8, %g0
  36. mov %g4, %o7
  37. #endif
  38. /* Read asm-sparc/atomic.h carefully to understand how this works for SMP.
  39. * Really, some things here for SMP are overly clever, go read the header.
  40. */
  41. .globl ___atomic24_add
  42. ___atomic24_add:
  43. rd %psr, %g3 ! Keep the code small, old way was stupid
  44. nop; nop; nop; ! Let the bits set
  45. or %g3, PSR_PIL, %g7 ! Disable interrupts
  46. wr %g7, 0x0, %psr ! Set %psr
  47. nop; nop; nop; ! Let the bits set
  48. #ifdef CONFIG_SMP
  49. 1: ldstub [%g1 + 3], %g7 ! Spin on the byte lock for SMP.
  50. orcc %g7, 0x0, %g0 ! Did we get it?
  51. bne 1b ! Nope...
  52. ld [%g1], %g7 ! Load locked atomic24_t
  53. sra %g7, 8, %g7 ! Get signed 24-bit integer
  54. add %g7, %g2, %g2 ! Add in argument
  55. sll %g2, 8, %g7 ! Transpose back to atomic24_t
  56. st %g7, [%g1] ! Clever: This releases the lock as well.
  57. #else
  58. ld [%g1], %g7 ! Load locked atomic24_t
  59. add %g7, %g2, %g2 ! Add in argument
  60. st %g2, [%g1] ! Store it back
  61. #endif
  62. wr %g3, 0x0, %psr ! Restore original PSR_PIL
  63. nop; nop; nop; ! Let the bits set
  64. jmpl %o7, %g0 ! NOTE: not + 8, see callers in atomic.h
  65. mov %g4, %o7 ! Restore %o7
  66. .globl ___atomic24_sub
  67. ___atomic24_sub:
  68. rd %psr, %g3 ! Keep the code small, old way was stupid
  69. nop; nop; nop; ! Let the bits set
  70. or %g3, PSR_PIL, %g7 ! Disable interrupts
  71. wr %g7, 0x0, %psr ! Set %psr
  72. nop; nop; nop; ! Let the bits set
  73. #ifdef CONFIG_SMP
  74. 1: ldstub [%g1 + 3], %g7 ! Spin on the byte lock for SMP.
  75. orcc %g7, 0x0, %g0 ! Did we get it?
  76. bne 1b ! Nope...
  77. ld [%g1], %g7 ! Load locked atomic24_t
  78. sra %g7, 8, %g7 ! Get signed 24-bit integer
  79. sub %g7, %g2, %g2 ! Subtract argument
  80. sll %g2, 8, %g7 ! Transpose back to atomic24_t
  81. st %g7, [%g1] ! Clever: This releases the lock as well
  82. #else
  83. ld [%g1], %g7 ! Load locked atomic24_t
  84. sub %g7, %g2, %g2 ! Subtract argument
  85. st %g2, [%g1] ! Store it back
  86. #endif
  87. wr %g3, 0x0, %psr ! Restore original PSR_PIL
  88. nop; nop; nop; ! Let the bits set
  89. jmpl %o7, %g0 ! NOTE: not + 8, see callers in atomic.h
  90. mov %g4, %o7 ! Restore %o7
  91. .globl __atomic_end
  92. __atomic_end: