dec_and_lock.c 842 B

123456789101112131415161718192021222324252627282930313233343536373839404142
  1. /*
  2. * x86 version of "atomic_dec_and_lock()" using
  3. * the atomic "cmpxchg" instruction.
  4. *
  5. * (For CPU's lacking cmpxchg, we use the slow
  6. * generic version, and this one never even gets
  7. * compiled).
  8. */
  9. #include <linux/spinlock.h>
  10. #include <linux/module.h>
  11. #include <asm/atomic.h>
  12. int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
  13. {
  14. int counter;
  15. int newcount;
  16. repeat:
  17. counter = atomic_read(atomic);
  18. newcount = counter-1;
  19. if (!newcount)
  20. goto slow_path;
  21. asm volatile("lock; cmpxchgl %1,%2"
  22. :"=a" (newcount)
  23. :"r" (newcount), "m" (atomic->counter), "0" (counter));
  24. /* If the above failed, "eax" will have changed */
  25. if (newcount != counter)
  26. goto repeat;
  27. return 0;
  28. slow_path:
  29. spin_lock(lock);
  30. if (atomic_dec_and_test(atomic))
  31. return 1;
  32. spin_unlock(lock);
  33. return 0;
  34. }
  35. EXPORT_SYMBOL(_atomic_dec_and_lock);