From 3774b37a724bbdd3811f677b991d66b204328257 Mon Sep 17 00:00:00 2001 From: Will Deacon Date: Fri, 2 May 2014 16:24:11 +0100 Subject: arm64: barriers: wire up new barrier options Now that all callers of the barrier macros are updated to pass the mandatory options, update the macros so the option is actually used. Acked-by: Catalin Marinas Signed-off-by: Will Deacon Signed-off-by: Catalin Marinas (cherry picked from commit 493e68747e07b69da3d746352525a1ebd6b61d82) Signed-off-by: Mark Brown Conflicts: arch/arm64/include/asm/barrier.h --- arch/arm64/include/asm/barrier.h | 48 ++++++++++++++++++++++++++++++++++------ 1 file changed, 41 insertions(+), 7 deletions(-) diff --git a/arch/arm64/include/asm/barrier.h b/arch/arm64/include/asm/barrier.h index c98d0a88916a..9a4c3d5b402e 100644 --- a/arch/arm64/include/asm/barrier.h +++ b/arch/arm64/include/asm/barrier.h @@ -25,21 +25,55 @@ #define wfi() asm volatile("wfi" : : : "memory") #define isb() asm volatile("isb" : : : "memory") -#define dmb(opt) asm volatile("dmb sy" : : : "memory") -#define dsb(opt) asm volatile("dsb sy" : : : "memory") +#define dmb(opt) asm volatile("dmb " #opt : : : "memory") +#define dsb(opt) asm volatile("dsb " #opt : : : "memory") #define mb() dsb(sy) -#define rmb() asm volatile("dsb ld" : : : "memory") -#define wmb() asm volatile("dsb st" : : : "memory") +#define rmb() dsb(ld) +#define wmb() dsb(st) #ifndef CONFIG_SMP #define smp_mb() barrier() #define smp_rmb() barrier() #define smp_wmb() barrier() #else -#define smp_mb() asm volatile("dmb ish" : : : "memory") -#define smp_rmb() asm volatile("dmb ishld" : : : "memory") -#define smp_wmb() asm volatile("dmb ishst" : : : "memory") + +#define smp_mb() dmb(ish) +#define smp_rmb() dmb(ishld) +#define smp_wmb() dmb(ishst) + +#define smp_store_release(p, v) \ +do { \ + compiletime_assert_atomic_type(*p); \ + switch (sizeof(*p)) { \ + case 4: \ + asm volatile ("stlr %w1, %0" \ + : "=Q" (*p) : "r" (v) : "memory"); \ + break; \ + case 8: \ + asm volatile ("stlr %1, %0" \ + : "=Q" (*p) : "r" (v) : "memory"); \ + break; \ + } \ +} while (0) + +#define smp_load_acquire(p) \ +({ \ + typeof(*p) ___p1; \ + compiletime_assert_atomic_type(*p); \ + switch (sizeof(*p)) { \ + case 4: \ + asm volatile ("ldar %w0, %1" \ + : "=r" (___p1) : "Q" (*p) : "memory"); \ + break; \ + case 8: \ + asm volatile ("ldar %0, %1" \ + : "=r" (___p1) : "Q" (*p) : "memory"); \ + break; \ + } \ + ___p1; \ +}) + #endif #define read_barrier_depends() do { } while(0) -- cgit v1.2.3 From b817da4cf03a33c8c40e3bef04d3f3ed64277d47 Mon Sep 17 00:00:00 2001 From: Will Deacon Date: Fri, 2 May 2014 16:24:12 +0100 Subject: arm64: barriers: use barrier() instead of smp_mb() when !SMP The recently introduced acquire/release accessors refer to smp_mb() in the !CONFIG_SMP case. This is confusing when reading the code, so use barrier() directly when we know we're UP. Acked-by: Catalin Marinas Signed-off-by: Will Deacon Signed-off-by: Catalin Marinas (cherry picked from commit be6209a6107e0f63544e3e7d00fd5c95434ec80a) Signed-off-by: Mark Brown Conflicts: arch/arm64/include/asm/barrier.h --- arch/arm64/include/asm/barrier.h | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/arch/arm64/include/asm/barrier.h b/arch/arm64/include/asm/barrier.h index 9a4c3d5b402e..709f1f6d6bbd 100644 --- a/arch/arm64/include/asm/barrier.h +++ b/arch/arm64/include/asm/barrier.h @@ -36,6 +36,22 @@ #define smp_mb() barrier() #define smp_rmb() barrier() #define smp_wmb() barrier() + +#define smp_store_release(p, v) \ +do { \ + compiletime_assert_atomic_type(*p); \ + barrier(); \ + ACCESS_ONCE(*p) = (v); \ +} while (0) + +#define smp_load_acquire(p) \ +({ \ + typeof(*p) ___p1 = ACCESS_ONCE(*p); \ + compiletime_assert_atomic_type(*p); \ + barrier(); \ + ___p1; \ +}) + #else #define smp_mb() dmb(ish) -- cgit v1.2.3