bitops.S 2.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109
  1. /* bitops.S: Low level assembler bit operations.
  2. *
  3. * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
  4. */
  5. #include <asm/ptrace.h>
  6. #include <asm/psr.h>
  7. .text
  8. .align 4
  9. .globl __bitops_begin
  10. __bitops_begin:
  11. /* Take bits in %g2 and set them in word at %g1,
  12. * return whether bits were set in original value
  13. * in %g2. %g4 holds value to restore into %o7
  14. * in delay slot of jmpl return, %g3 + %g5 + %g7 can be
  15. * used as temporaries and thus is considered clobbered
  16. * by all callers.
  17. */
  18. .globl ___set_bit
  19. ___set_bit:
  20. rd %psr, %g3
  21. nop; nop; nop;
  22. or %g3, PSR_PIL, %g5
  23. wr %g5, 0x0, %psr
  24. nop; nop; nop
  25. #ifdef CONFIG_SMP
  26. set bitops_spinlock, %g5
  27. 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
  28. orcc %g7, 0x0, %g0 ! Did we get it?
  29. bne 2b ! Nope...
  30. #endif
  31. ld [%g1], %g7
  32. or %g7, %g2, %g5
  33. and %g7, %g2, %g2
  34. #ifdef CONFIG_SMP
  35. st %g5, [%g1]
  36. set bitops_spinlock, %g5
  37. stb %g0, [%g5]
  38. #else
  39. st %g5, [%g1]
  40. #endif
  41. wr %g3, 0x0, %psr
  42. nop; nop; nop
  43. jmpl %o7, %g0
  44. mov %g4, %o7
  45. /* Same as above, but clears the bits from %g2 instead. */
  46. .globl ___clear_bit
  47. ___clear_bit:
  48. rd %psr, %g3
  49. nop; nop; nop
  50. or %g3, PSR_PIL, %g5
  51. wr %g5, 0x0, %psr
  52. nop; nop; nop
  53. #ifdef CONFIG_SMP
  54. set bitops_spinlock, %g5
  55. 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
  56. orcc %g7, 0x0, %g0 ! Did we get it?
  57. bne 2b ! Nope...
  58. #endif
  59. ld [%g1], %g7
  60. andn %g7, %g2, %g5
  61. and %g7, %g2, %g2
  62. #ifdef CONFIG_SMP
  63. st %g5, [%g1]
  64. set bitops_spinlock, %g5
  65. stb %g0, [%g5]
  66. #else
  67. st %g5, [%g1]
  68. #endif
  69. wr %g3, 0x0, %psr
  70. nop; nop; nop
  71. jmpl %o7, %g0
  72. mov %g4, %o7
  73. /* Same thing again, but this time toggles the bits from %g2. */
  74. .globl ___change_bit
  75. ___change_bit:
  76. rd %psr, %g3
  77. nop; nop; nop
  78. or %g3, PSR_PIL, %g5
  79. wr %g5, 0x0, %psr
  80. nop; nop; nop
  81. #ifdef CONFIG_SMP
  82. set bitops_spinlock, %g5
  83. 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
  84. orcc %g7, 0x0, %g0 ! Did we get it?
  85. bne 2b ! Nope...
  86. #endif
  87. ld [%g1], %g7
  88. xor %g7, %g2, %g5
  89. and %g7, %g2, %g2
  90. #ifdef CONFIG_SMP
  91. st %g5, [%g1]
  92. set bitops_spinlock, %g5
  93. stb %g0, [%g5]
  94. #else
  95. st %g5, [%g1]
  96. #endif
  97. wr %g3, 0x0, %psr
  98. nop; nop; nop
  99. jmpl %o7, %g0
  100. mov %g4, %o7
  101. .globl __bitops_end
  102. __bitops_end: