bitops.c 1.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384
  1. /*
  2. * bitops.c: atomic operations which got too long to be inlined all over
  3. * the place.
  4. *
  5. * Copyright 1999 Philipp Rumpf (prumpf@tux.org)
  6. * Copyright 2000 Grant Grundler (grundler@cup.hp.com)
  7. */
  8. #include <linux/config.h>
  9. #include <linux/kernel.h>
  10. #include <linux/spinlock.h>
  11. #include <asm/system.h>
  12. #include <asm/atomic.h>
  13. #ifdef CONFIG_SMP
  14. raw_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = {
  15. [0 ... (ATOMIC_HASH_SIZE-1)] = __RAW_SPIN_LOCK_UNLOCKED
  16. };
  17. #endif
  18. #ifdef __LP64__
  19. unsigned long __xchg64(unsigned long x, unsigned long *ptr)
  20. {
  21. unsigned long temp, flags;
  22. _atomic_spin_lock_irqsave(ptr, flags);
  23. temp = *ptr;
  24. *ptr = x;
  25. _atomic_spin_unlock_irqrestore(ptr, flags);
  26. return temp;
  27. }
  28. #endif
  29. unsigned long __xchg32(int x, int *ptr)
  30. {
  31. unsigned long flags;
  32. long temp;
  33. _atomic_spin_lock_irqsave(ptr, flags);
  34. temp = (long) *ptr; /* XXX - sign extension wanted? */
  35. *ptr = x;
  36. _atomic_spin_unlock_irqrestore(ptr, flags);
  37. return (unsigned long)temp;
  38. }
  39. unsigned long __xchg8(char x, char *ptr)
  40. {
  41. unsigned long flags;
  42. long temp;
  43. _atomic_spin_lock_irqsave(ptr, flags);
  44. temp = (long) *ptr; /* XXX - sign extension wanted? */
  45. *ptr = x;
  46. _atomic_spin_unlock_irqrestore(ptr, flags);
  47. return (unsigned long)temp;
  48. }
  49. #ifdef __LP64__
  50. unsigned long __cmpxchg_u64(volatile unsigned long *ptr, unsigned long old, unsigned long new)
  51. {
  52. unsigned long flags;
  53. unsigned long prev;
  54. _atomic_spin_lock_irqsave(ptr, flags);
  55. if ((prev = *ptr) == old)
  56. *ptr = new;
  57. _atomic_spin_unlock_irqrestore(ptr, flags);
  58. return prev;
  59. }
  60. #endif
  61. unsigned long __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new)
  62. {
  63. unsigned long flags;
  64. unsigned int prev;
  65. _atomic_spin_lock_irqsave(ptr, flags);
  66. if ((prev = *ptr) == old)
  67. *ptr = new;
  68. _atomic_spin_unlock_irqrestore(ptr, flags);
  69. return (unsigned long)prev;
  70. }