bitops_64.h 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899
  1. /*
  2. * Copyright 2011 Tilera Corporation. All Rights Reserved.
  3. *
  4. * This program is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU General Public License
  6. * as published by the Free Software Foundation, version 2.
  7. *
  8. * This program is distributed in the hope that it will be useful, but
  9. * WITHOUT ANY WARRANTY; without even the implied warranty of
  10. * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
  11. * NON INFRINGEMENT. See the GNU General Public License for
  12. * more details.
  13. */
  14. #ifndef _ASM_TILE_BITOPS_64_H
  15. #define _ASM_TILE_BITOPS_64_H
  16. #include <linux/compiler.h>
  17. #include <asm/cmpxchg.h>
  18. /* See <asm/bitops.h> for API comments. */
  19. static inline void set_bit(unsigned nr, volatile unsigned long *addr)
  20. {
  21. unsigned long mask = (1UL << (nr % BITS_PER_LONG));
  22. __insn_fetchor((void *)(addr + nr / BITS_PER_LONG), mask);
  23. }
  24. static inline void clear_bit(unsigned nr, volatile unsigned long *addr)
  25. {
  26. unsigned long mask = (1UL << (nr % BITS_PER_LONG));
  27. __insn_fetchand((void *)(addr + nr / BITS_PER_LONG), ~mask);
  28. }
  29. #define smp_mb__before_clear_bit() smp_mb()
  30. #define smp_mb__after_clear_bit() smp_mb()
  31. static inline void change_bit(unsigned nr, volatile unsigned long *addr)
  32. {
  33. unsigned long mask = (1UL << (nr % BITS_PER_LONG));
  34. unsigned long guess, oldval;
  35. addr += nr / BITS_PER_LONG;
  36. oldval = *addr;
  37. do {
  38. guess = oldval;
  39. oldval = cmpxchg(addr, guess, guess ^ mask);
  40. } while (guess != oldval);
  41. }
  42. /*
  43. * The test_and_xxx_bit() routines require a memory fence before we
  44. * start the operation, and after the operation completes. We use
  45. * smp_mb() before, and rely on the "!= 0" comparison, plus a compiler
  46. * barrier(), to block until the atomic op is complete.
  47. */
  48. static inline int test_and_set_bit(unsigned nr, volatile unsigned long *addr)
  49. {
  50. int val;
  51. unsigned long mask = (1UL << (nr % BITS_PER_LONG));
  52. smp_mb(); /* barrier for proper semantics */
  53. val = (__insn_fetchor((void *)(addr + nr / BITS_PER_LONG), mask)
  54. & mask) != 0;
  55. barrier();
  56. return val;
  57. }
  58. static inline int test_and_clear_bit(unsigned nr, volatile unsigned long *addr)
  59. {
  60. int val;
  61. unsigned long mask = (1UL << (nr % BITS_PER_LONG));
  62. smp_mb(); /* barrier for proper semantics */
  63. val = (__insn_fetchand((void *)(addr + nr / BITS_PER_LONG), ~mask)
  64. & mask) != 0;
  65. barrier();
  66. return val;
  67. }
  68. static inline int test_and_change_bit(unsigned nr,
  69. volatile unsigned long *addr)
  70. {
  71. unsigned long mask = (1UL << (nr % BITS_PER_LONG));
  72. unsigned long guess, oldval;
  73. addr += nr / BITS_PER_LONG;
  74. oldval = *addr;
  75. do {
  76. guess = oldval;
  77. oldval = cmpxchg(addr, guess, guess ^ mask);
  78. } while (guess != oldval);
  79. return (oldval & mask) != 0;
  80. }
  81. #include <asm-generic/bitops/ext2-atomic-setbit.h>
  82. #endif /* _ASM_TILE_BITOPS_64_H */