From 4abab5ad6c8465a7528ccdd5f49367da05f78bbd Mon Sep 17 00:00:00 2001 From: Vladimir Azarov Date: Tue, 1 Oct 2024 15:47:05 +0200 Subject: Initial version --- arch/loongarch64/atomic_arch.h | 53 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 arch/loongarch64/atomic_arch.h (limited to 'arch/loongarch64/atomic_arch.h') diff --git a/arch/loongarch64/atomic_arch.h b/arch/loongarch64/atomic_arch.h new file mode 100644 index 0000000..2225d02 --- /dev/null +++ b/arch/loongarch64/atomic_arch.h @@ -0,0 +1,53 @@ +#define a_ll a_ll +static inline int a_ll(volatile int *p) +{ + int v; + __asm__ __volatile__ ( + "ll.w %0, %1" + : "=r"(v) + : "ZC"(*p)); + return v; +} + +#define a_sc a_sc +static inline int a_sc(volatile int *p, int v) +{ + int r; + __asm__ __volatile__ ( + "sc.w %0, %1" + : "=r"(r), "=ZC"(*p) + : "0"(v) : "memory"); + return r; +} + +#define a_ll_p a_ll_p +static inline void *a_ll_p(volatile void *p) +{ + void *v; + __asm__ __volatile__ ( + "ll.d %0, %1" + : "=r"(v) + : "ZC"(*(void *volatile *)p)); + return v; +} + +#define a_sc_p a_sc_p +static inline int a_sc_p(volatile void *p, void *v) +{ + long r; + __asm__ __volatile__ ( + "sc.d %0, %1" + : "=r"(r), "=ZC"(*(void *volatile *)p) + : "0"(v) + : "memory"); + return r; +} + +#define a_barrier a_barrier +static inline void a_barrier() +{ + __asm__ __volatile__ ("dbar 0" : : : "memory"); +} + +#define a_pre_llsc a_barrier +#define a_post_llsc a_barrier -- cgit v1.2.3