summaryrefslogtreecommitdiff
path: root/arch/loongarch/include/asm/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/loongarch/include/asm/atomic.h')
-rw-r--r--arch/loongarch/include/asm/atomic.h197
1 files changed, 10 insertions, 187 deletions
diff --git a/arch/loongarch/include/asm/atomic.h b/arch/loongarch/include/asm/atomic.h
index c86f0ab922ec..444b9ddcd004 100644
--- a/arch/loongarch/include/asm/atomic.h
+++ b/arch/loongarch/include/asm/atomic.h
@@ -11,6 +11,16 @@
#include <asm/barrier.h>
#include <asm/cmpxchg.h>
+#ifdef CONFIG_CPU_HAS_AMO
+#include <asm/atomic-amo.h>
+#else
+#include <asm/atomic-llsc.h>
+#endif
+
+#ifdef CONFIG_GENERIC_ATOMIC64
+#include <asm-generic/atomic64.h>
+#endif
+
#if __SIZEOF_LONG__ == 4
#define __LL "ll.w "
#define __SC "sc.w "
@@ -34,100 +44,6 @@
#define arch_atomic_read(v) READ_ONCE((v)->counter)
#define arch_atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
-#define ATOMIC_OP(op, I, asm_op) \
-static inline void arch_atomic_##op(int i, atomic_t *v) \
-{ \
- __asm__ __volatile__( \
- "am"#asm_op".w" " $zero, %1, %0 \n" \
- : "+ZB" (v->counter) \
- : "r" (I) \
- : "memory"); \
-}
-
-#define ATOMIC_OP_RETURN(op, I, asm_op, c_op, mb, suffix) \
-static inline int arch_atomic_##op##_return##suffix(int i, atomic_t *v) \
-{ \
- int result; \
- \
- __asm__ __volatile__( \
- "am"#asm_op#mb".w" " %1, %2, %0 \n" \
- : "+ZB" (v->counter), "=&r" (result) \
- : "r" (I) \
- : "memory"); \
- \
- return result c_op I; \
-}
-
-#define ATOMIC_FETCH_OP(op, I, asm_op, mb, suffix) \
-static inline int arch_atomic_fetch_##op##suffix(int i, atomic_t *v) \
-{ \
- int result; \
- \
- __asm__ __volatile__( \
- "am"#asm_op#mb".w" " %1, %2, %0 \n" \
- : "+ZB" (v->counter), "=&r" (result) \
- : "r" (I) \
- : "memory"); \
- \
- return result; \
-}
-
-#define ATOMIC_OPS(op, I, asm_op, c_op) \
- ATOMIC_OP(op, I, asm_op) \
- ATOMIC_OP_RETURN(op, I, asm_op, c_op, _db, ) \
- ATOMIC_OP_RETURN(op, I, asm_op, c_op, , _relaxed) \
- ATOMIC_FETCH_OP(op, I, asm_op, _db, ) \
- ATOMIC_FETCH_OP(op, I, asm_op, , _relaxed)
-
-ATOMIC_OPS(add, i, add, +)
-ATOMIC_OPS(sub, -i, add, +)
-
-#define arch_atomic_add_return arch_atomic_add_return
-#define arch_atomic_add_return_acquire arch_atomic_add_return
-#define arch_atomic_add_return_release arch_atomic_add_return
-#define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed
-#define arch_atomic_sub_return arch_atomic_sub_return
-#define arch_atomic_sub_return_acquire arch_atomic_sub_return
-#define arch_atomic_sub_return_release arch_atomic_sub_return
-#define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
-#define arch_atomic_fetch_add arch_atomic_fetch_add
-#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
-#define arch_atomic_fetch_add_release arch_atomic_fetch_add
-#define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed
-#define arch_atomic_fetch_sub arch_atomic_fetch_sub
-#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
-#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
-#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed
-
-#undef ATOMIC_OPS
-
-#define ATOMIC_OPS(op, I, asm_op) \
- ATOMIC_OP(op, I, asm_op) \
- ATOMIC_FETCH_OP(op, I, asm_op, _db, ) \
- ATOMIC_FETCH_OP(op, I, asm_op, , _relaxed)
-
-ATOMIC_OPS(and, i, and)
-ATOMIC_OPS(or, i, or)
-ATOMIC_OPS(xor, i, xor)
-
-#define arch_atomic_fetch_and arch_atomic_fetch_and
-#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
-#define arch_atomic_fetch_and_release arch_atomic_fetch_and
-#define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed
-#define arch_atomic_fetch_or arch_atomic_fetch_or
-#define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
-#define arch_atomic_fetch_or_release arch_atomic_fetch_or
-#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed
-#define arch_atomic_fetch_xor arch_atomic_fetch_xor
-#define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
-#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
-#define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed
-
-#undef ATOMIC_OPS
-#undef ATOMIC_FETCH_OP
-#undef ATOMIC_OP_RETURN
-#undef ATOMIC_OP
-
static inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
{
int prev, rc;
@@ -194,99 +110,6 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
#define arch_atomic64_read(v) READ_ONCE((v)->counter)
#define arch_atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
-#define ATOMIC64_OP(op, I, asm_op) \
-static inline void arch_atomic64_##op(long i, atomic64_t *v) \
-{ \
- __asm__ __volatile__( \
- "am"#asm_op".d " " $zero, %1, %0 \n" \
- : "+ZB" (v->counter) \
- : "r" (I) \
- : "memory"); \
-}
-
-#define ATOMIC64_OP_RETURN(op, I, asm_op, c_op, mb, suffix) \
-static inline long arch_atomic64_##op##_return##suffix(long i, atomic64_t *v) \
-{ \
- long result; \
- __asm__ __volatile__( \
- "am"#asm_op#mb".d " " %1, %2, %0 \n" \
- : "+ZB" (v->counter), "=&r" (result) \
- : "r" (I) \
- : "memory"); \
- \
- return result c_op I; \
-}
-
-#define ATOMIC64_FETCH_OP(op, I, asm_op, mb, suffix) \
-static inline long arch_atomic64_fetch_##op##suffix(long i, atomic64_t *v) \
-{ \
- long result; \
- \
- __asm__ __volatile__( \
- "am"#asm_op#mb".d " " %1, %2, %0 \n" \
- : "+ZB" (v->counter), "=&r" (result) \
- : "r" (I) \
- : "memory"); \
- \
- return result; \
-}
-
-#define ATOMIC64_OPS(op, I, asm_op, c_op) \
- ATOMIC64_OP(op, I, asm_op) \
- ATOMIC64_OP_RETURN(op, I, asm_op, c_op, _db, ) \
- ATOMIC64_OP_RETURN(op, I, asm_op, c_op, , _relaxed) \
- ATOMIC64_FETCH_OP(op, I, asm_op, _db, ) \
- ATOMIC64_FETCH_OP(op, I, asm_op, , _relaxed)
-
-ATOMIC64_OPS(add, i, add, +)
-ATOMIC64_OPS(sub, -i, add, +)
-
-#define arch_atomic64_add_return arch_atomic64_add_return
-#define arch_atomic64_add_return_acquire arch_atomic64_add_return
-#define arch_atomic64_add_return_release arch_atomic64_add_return
-#define arch_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed
-#define arch_atomic64_sub_return arch_atomic64_sub_return
-#define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
-#define arch_atomic64_sub_return_release arch_atomic64_sub_return
-#define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed
-#define arch_atomic64_fetch_add arch_atomic64_fetch_add
-#define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
-#define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
-#define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed
-#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
-#define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
-#define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
-#define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed
-
-#undef ATOMIC64_OPS
-
-#define ATOMIC64_OPS(op, I, asm_op) \
- ATOMIC64_OP(op, I, asm_op) \
- ATOMIC64_FETCH_OP(op, I, asm_op, _db, ) \
- ATOMIC64_FETCH_OP(op, I, asm_op, , _relaxed)
-
-ATOMIC64_OPS(and, i, and)
-ATOMIC64_OPS(or, i, or)
-ATOMIC64_OPS(xor, i, xor)
-
-#define arch_atomic64_fetch_and arch_atomic64_fetch_and
-#define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
-#define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
-#define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed
-#define arch_atomic64_fetch_or arch_atomic64_fetch_or
-#define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
-#define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
-#define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed
-#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
-#define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
-#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
-#define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed
-
-#undef ATOMIC64_OPS
-#undef ATOMIC64_FETCH_OP
-#undef ATOMIC64_OP_RETURN
-#undef ATOMIC64_OP
-
static inline long arch_atomic64_fetch_add_unless(atomic64_t *v, long a, long u)
{
long prev, rc;