2874c5fd28426 (Thomas Gleixner 2019-05-27 08:55:01 +0200 1) // SPDX-License-Identifier: GPL-2.0-or-later
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 2) /*
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 3) * Generic implementation of 64-bit atomics using spinlocks,
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 4) * useful on processors that don't have 64-bit atomic instructions.
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 5) *
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 6) * Copyright © 2009 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com>
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 7) */
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 8) #include <linux/types.h>
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 9) #include <linux/cache.h>
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 10) #include <linux/spinlock.h>
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 11) #include <linux/init.h>
8bc3bcc93a2b4 (Paul Gortmaker 2011-11-16 21:29:17 -0500 12) #include <linux/export.h>
60063497a95e7 (Arun Sharma 2011-07-26 16:09:06 -0700 13) #include <linux/atomic.h>
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 14)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 15) /*
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 16) * We use a hashed array of spinlocks to provide exclusive access
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 17) * to each atomic64_t variable. Since this is expected to used on
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 18) * systems with small numbers of CPUs (<= 4 or so), we use a
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 19) * relatively small array of 16 spinlocks to avoid wasting too much
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 20) * memory on the spinlock array.
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 21) */
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 22) #define NR_LOCKS 16
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 23)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 24) /*
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 25) * Ensure each lock is in a separate cacheline.
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 26) */
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 27) static union {
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 28) raw_spinlock_t lock;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 29) char pad[L1_CACHE_BYTES];
fcc16882ac453 (Stephen Boyd 2012-12-19 23:39:48 -0800 30) } atomic64_lock[NR_LOCKS] __cacheline_aligned_in_smp = {
fcc16882ac453 (Stephen Boyd 2012-12-19 23:39:48 -0800 31) [0 ... (NR_LOCKS - 1)] = {
fcc16882ac453 (Stephen Boyd 2012-12-19 23:39:48 -0800 32) .lock = __RAW_SPIN_LOCK_UNLOCKED(atomic64_lock.lock),
fcc16882ac453 (Stephen Boyd 2012-12-19 23:39:48 -0800 33) },
fcc16882ac453 (Stephen Boyd 2012-12-19 23:39:48 -0800 34) };
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 35)
cb475de3d12df (Yong Zhang 2011-09-14 15:49:24 +0800 36) static inline raw_spinlock_t *lock_addr(const atomic64_t *v)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 37) {
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 38) unsigned long addr = (unsigned long) v;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 39)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 40) addr >>= L1_CACHE_SHIFT;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 41) addr ^= (addr >> 8) ^ (addr >> 16);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 42) return &atomic64_lock[addr & (NR_LOCKS - 1)].lock;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 43) }
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 44)
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 45) s64 atomic64_read(const atomic64_t *v)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 46) {
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 47) unsigned long flags;
cb475de3d12df (Yong Zhang 2011-09-14 15:49:24 +0800 48) raw_spinlock_t *lock = lock_addr(v);
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 49) s64 val;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 50)
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 51) raw_spin_lock_irqsave(lock, flags);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 52) val = v->counter;
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 53) raw_spin_unlock_irqrestore(lock, flags);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 54) return val;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 55) }
3fc7b4b220c7e (Roland Dreier 2009-07-29 15:04:02 -0700 56) EXPORT_SYMBOL(atomic64_read);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 57)
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 58) void atomic64_set(atomic64_t *v, s64 i)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 59) {
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 60) unsigned long flags;
cb475de3d12df (Yong Zhang 2011-09-14 15:49:24 +0800 61) raw_spinlock_t *lock = lock_addr(v);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 62)
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 63) raw_spin_lock_irqsave(lock, flags);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 64) v->counter = i;
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 65) raw_spin_unlock_irqrestore(lock, flags);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 66) }
3fc7b4b220c7e (Roland Dreier 2009-07-29 15:04:02 -0700 67) EXPORT_SYMBOL(atomic64_set);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 68)
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 69) #define ATOMIC64_OP(op, c_op) \
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 70) void atomic64_##op(s64 a, atomic64_t *v) \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 71) { \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 72) unsigned long flags; \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 73) raw_spinlock_t *lock = lock_addr(v); \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 74) \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 75) raw_spin_lock_irqsave(lock, flags); \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 76) v->counter c_op a; \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 77) raw_spin_unlock_irqrestore(lock, flags); \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 78) } \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 79) EXPORT_SYMBOL(atomic64_##op);
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 80)
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 81) #define ATOMIC64_OP_RETURN(op, c_op) \
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 82) s64 atomic64_##op##_return(s64 a, atomic64_t *v) \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 83) { \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 84) unsigned long flags; \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 85) raw_spinlock_t *lock = lock_addr(v); \
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 86) s64 val; \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 87) \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 88) raw_spin_lock_irqsave(lock, flags); \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 89) val = (v->counter c_op a); \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 90) raw_spin_unlock_irqrestore(lock, flags); \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 91) return val; \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 92) } \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 93) EXPORT_SYMBOL(atomic64_##op##_return);
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 94)
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 95) #define ATOMIC64_FETCH_OP(op, c_op) \
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 96) s64 atomic64_fetch_##op(s64 a, atomic64_t *v) \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 97) { \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 98) unsigned long flags; \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 99) raw_spinlock_t *lock = lock_addr(v); \
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 100) s64 val; \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 101) \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 102) raw_spin_lock_irqsave(lock, flags); \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 103) val = v->counter; \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 104) v->counter c_op a; \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 105) raw_spin_unlock_irqrestore(lock, flags); \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 106) return val; \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 107) } \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 108) EXPORT_SYMBOL(atomic64_fetch_##op);
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 109)
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 110) #define ATOMIC64_OPS(op, c_op) \
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 111) ATOMIC64_OP(op, c_op) \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 112) ATOMIC64_OP_RETURN(op, c_op) \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 113) ATOMIC64_FETCH_OP(op, c_op)
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 114)
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 115) ATOMIC64_OPS(add, +=)
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 116) ATOMIC64_OPS(sub, -=)
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 117)
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 118) #undef ATOMIC64_OPS
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 119) #define ATOMIC64_OPS(op, c_op) \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 120) ATOMIC64_OP(op, c_op) \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 121) ATOMIC64_OP_RETURN(op, c_op) \
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 122) ATOMIC64_FETCH_OP(op, c_op)
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 123)
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 124) ATOMIC64_OPS(and, &=)
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 125) ATOMIC64_OPS(or, |=)
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 126) ATOMIC64_OPS(xor, ^=)
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 127)
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 128) #undef ATOMIC64_OPS
28aa2bda2211f (Peter Zijlstra 2016-04-18 00:54:38 +0200 129) #undef ATOMIC64_FETCH_OP
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 130) #undef ATOMIC64_OP_RETURN
560cb12a4080a (Peter Zijlstra 2014-04-23 16:12:30 +0200 131) #undef ATOMIC64_OP
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 132)
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 133) s64 atomic64_dec_if_positive(atomic64_t *v)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 134) {
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 135) unsigned long flags;
cb475de3d12df (Yong Zhang 2011-09-14 15:49:24 +0800 136) raw_spinlock_t *lock = lock_addr(v);
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 137) s64 val;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 138)
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 139) raw_spin_lock_irqsave(lock, flags);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 140) val = v->counter - 1;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 141) if (val >= 0)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 142) v->counter = val;
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 143) raw_spin_unlock_irqrestore(lock, flags);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 144) return val;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 145) }
3fc7b4b220c7e (Roland Dreier 2009-07-29 15:04:02 -0700 146) EXPORT_SYMBOL(atomic64_dec_if_positive);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 147)
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 148) s64 atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 149) {
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 150) unsigned long flags;
cb475de3d12df (Yong Zhang 2011-09-14 15:49:24 +0800 151) raw_spinlock_t *lock = lock_addr(v);
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 152) s64 val;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 153)
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 154) raw_spin_lock_irqsave(lock, flags);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 155) val = v->counter;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 156) if (val == o)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 157) v->counter = n;
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 158) raw_spin_unlock_irqrestore(lock, flags);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 159) return val;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 160) }
3fc7b4b220c7e (Roland Dreier 2009-07-29 15:04:02 -0700 161) EXPORT_SYMBOL(atomic64_cmpxchg);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 162)
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 163) s64 atomic64_xchg(atomic64_t *v, s64 new)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 164) {
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 165) unsigned long flags;
cb475de3d12df (Yong Zhang 2011-09-14 15:49:24 +0800 166) raw_spinlock_t *lock = lock_addr(v);
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 167) s64 val;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 168)
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 169) raw_spin_lock_irqsave(lock, flags);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 170) val = v->counter;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 171) v->counter = new;
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 172) raw_spin_unlock_irqrestore(lock, flags);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 173) return val;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 174) }
3fc7b4b220c7e (Roland Dreier 2009-07-29 15:04:02 -0700 175) EXPORT_SYMBOL(atomic64_xchg);
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 176)
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 177) s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 178) {
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 179) unsigned long flags;
cb475de3d12df (Yong Zhang 2011-09-14 15:49:24 +0800 180) raw_spinlock_t *lock = lock_addr(v);
9255813d5841e (Mark Rutland 2019-05-22 14:22:35 +0100 181) s64 val;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 182)
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 183) raw_spin_lock_irqsave(lock, flags);
00b808ab79ead (Mark Rutland 2018-06-21 13:13:11 +0100 184) val = v->counter;
00b808ab79ead (Mark Rutland 2018-06-21 13:13:11 +0100 185) if (val != u)
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 186) v->counter += a;
f59ca05871a05 (Shan Hai 2011-09-01 11:32:03 +0800 187) raw_spin_unlock_irqrestore(lock, flags);
00b808ab79ead (Mark Rutland 2018-06-21 13:13:11 +0100 188)
00b808ab79ead (Mark Rutland 2018-06-21 13:13:11 +0100 189) return val;
09d4e0edd4614 (Paul Mackerras 2009-06-12 21:10:05 +0000 190) }
00b808ab79ead (Mark Rutland 2018-06-21 13:13:11 +0100 191) EXPORT_SYMBOL(atomic64_fetch_add_unless);