dec_and_lock.c 779 B

12345678910111213141516171819202122232425262728293031323334353637383940
  1. /*
  2. * x86 version of "atomic_dec_and_lock()" using
  3. * the atomic "cmpxchg" instruction.
  4. *
  5. * (For CPU's lacking cmpxchg, we use the slow
  6. * generic version, and this one never even gets
  7. * compiled).
  8. */
  9. #include <linux/spinlock.h>
  10. #include <asm/atomic.h>
  11. int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
  12. {
  13. int counter;
  14. int newcount;
  15. repeat:
  16. counter = atomic_read(atomic);
  17. newcount = counter-1;
  18. if (!newcount)
  19. goto slow_path;
  20. asm volatile("lock; cmpxchgl %1,%2"
  21. :"=a" (newcount)
  22. :"r" (newcount), "m" (atomic->counter), "0" (counter));
  23. /* If the above failed, "eax" will have changed */
  24. if (newcount != counter)
  25. goto repeat;
  26. return 0;
  27. slow_path:
  28. spin_lock(lock);
  29. if (atomic_dec_and_test(atomic))
  30. return 1;
  31. spin_unlock(lock);
  32. return 0;
  33. }