bitops.S 2.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110
  1. /* bitops.S: Low level assembler bit operations.
  2. *
  3. * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
  4. */
  5. #include <linux/config.h>
  6. #include <asm/ptrace.h>
  7. #include <asm/psr.h>
  8. .text
  9. .align 4
  10. .globl __bitops_begin
  11. __bitops_begin:
  12. /* Take bits in %g2 and set them in word at %g1,
  13. * return whether bits were set in original value
  14. * in %g2. %g4 holds value to restore into %o7
  15. * in delay slot of jmpl return, %g3 + %g5 + %g7 can be
  16. * used as temporaries and thus is considered clobbered
  17. * by all callers.
  18. */
  19. .globl ___set_bit
  20. ___set_bit:
  21. rd %psr, %g3
  22. nop; nop; nop;
  23. or %g3, PSR_PIL, %g5
  24. wr %g5, 0x0, %psr
  25. nop; nop; nop
  26. #ifdef CONFIG_SMP
  27. set bitops_spinlock, %g5
  28. 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
  29. orcc %g7, 0x0, %g0 ! Did we get it?
  30. bne 2b ! Nope...
  31. #endif
  32. ld [%g1], %g7
  33. or %g7, %g2, %g5
  34. and %g7, %g2, %g2
  35. #ifdef CONFIG_SMP
  36. st %g5, [%g1]
  37. set bitops_spinlock, %g5
  38. stb %g0, [%g5]
  39. #else
  40. st %g5, [%g1]
  41. #endif
  42. wr %g3, 0x0, %psr
  43. nop; nop; nop
  44. jmpl %o7, %g0
  45. mov %g4, %o7
  46. /* Same as above, but clears the bits from %g2 instead. */
  47. .globl ___clear_bit
  48. ___clear_bit:
  49. rd %psr, %g3
  50. nop; nop; nop
  51. or %g3, PSR_PIL, %g5
  52. wr %g5, 0x0, %psr
  53. nop; nop; nop
  54. #ifdef CONFIG_SMP
  55. set bitops_spinlock, %g5
  56. 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
  57. orcc %g7, 0x0, %g0 ! Did we get it?
  58. bne 2b ! Nope...
  59. #endif
  60. ld [%g1], %g7
  61. andn %g7, %g2, %g5
  62. and %g7, %g2, %g2
  63. #ifdef CONFIG_SMP
  64. st %g5, [%g1]
  65. set bitops_spinlock, %g5
  66. stb %g0, [%g5]
  67. #else
  68. st %g5, [%g1]
  69. #endif
  70. wr %g3, 0x0, %psr
  71. nop; nop; nop
  72. jmpl %o7, %g0
  73. mov %g4, %o7
  74. /* Same thing again, but this time toggles the bits from %g2. */
  75. .globl ___change_bit
  76. ___change_bit:
  77. rd %psr, %g3
  78. nop; nop; nop
  79. or %g3, PSR_PIL, %g5
  80. wr %g5, 0x0, %psr
  81. nop; nop; nop
  82. #ifdef CONFIG_SMP
  83. set bitops_spinlock, %g5
  84. 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
  85. orcc %g7, 0x0, %g0 ! Did we get it?
  86. bne 2b ! Nope...
  87. #endif
  88. ld [%g1], %g7
  89. xor %g7, %g2, %g5
  90. and %g7, %g2, %g2
  91. #ifdef CONFIG_SMP
  92. st %g5, [%g1]
  93. set bitops_spinlock, %g5
  94. stb %g0, [%g5]
  95. #else
  96. st %g5, [%g1]
  97. #endif
  98. wr %g3, 0x0, %psr
  99. nop; nop; nop
  100. jmpl %o7, %g0
  101. mov %g4, %o7
  102. .globl __bitops_end
  103. __bitops_end: