diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2016-09-09 10:54:29 -0700 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2016-09-09 10:54:29 -0700 |
commit | e45eeb43e67669f130c270a325383769d5fd73e2 (patch) | |
tree | 020de8523c77f7f4ff1455a04e0f33da50d2630b | |
parent | 2771fc8ed60e43a0148a2f2c672a55b8f565f418 (diff) | |
parent | 2b9743441a312e0b0a2d87deae363eccbe9d0f00 (diff) |
Merge tag 'arm64-fixes' of git://git.kernel.org/pub/scm/linux/kernel/git/arm64/linux
Pull arm64 fixes from Catalin Marinas:
- smp_mb__before_spinlock() changed to smp_mb() on arm64 since the
generic definition to smp_wmb() is not sufficient
- avoid a recursive loop with the graph tracer by using using
preempt_(enable|disable)_notrace in _percpu_(read|write)
* tag 'arm64-fixes' of git://git.kernel.org/pub/scm/linux/kernel/git/arm64/linux:
arm64: use preempt_disable_notrace in _percpu_read/write
arm64: spinlocks: implement smp_mb__before_spinlock() as smp_mb()
-rw-r--r-- | arch/arm64/include/asm/percpu.h | 8 | ||||
-rw-r--r-- | arch/arm64/include/asm/spinlock.h | 10 |
2 files changed, 14 insertions, 4 deletions
diff --git a/arch/arm64/include/asm/percpu.h b/arch/arm64/include/asm/percpu.h index 0a456bef8c79..2fee2f59288c 100644 --- a/arch/arm64/include/asm/percpu.h +++ b/arch/arm64/include/asm/percpu.h @@ -199,19 +199,19 @@ static inline unsigned long __percpu_xchg(void *ptr, unsigned long val, #define _percpu_read(pcp) \ ({ \ typeof(pcp) __retval; \ - preempt_disable(); \ + preempt_disable_notrace(); \ __retval = (typeof(pcp))__percpu_read(raw_cpu_ptr(&(pcp)), \ sizeof(pcp)); \ - preempt_enable(); \ + preempt_enable_notrace(); \ __retval; \ }) #define _percpu_write(pcp, val) \ do { \ - preempt_disable(); \ + preempt_disable_notrace(); \ __percpu_write(raw_cpu_ptr(&(pcp)), (unsigned long)(val), \ sizeof(pcp)); \ - preempt_enable(); \ + preempt_enable_notrace(); \ } while(0) \ #define _pcp_protect(operation, pcp, val) \ diff --git a/arch/arm64/include/asm/spinlock.h b/arch/arm64/include/asm/spinlock.h index e875a5a551d7..89206b568cd4 100644 --- a/arch/arm64/include/asm/spinlock.h +++ b/arch/arm64/include/asm/spinlock.h @@ -363,4 +363,14 @@ static inline int arch_read_trylock(arch_rwlock_t *rw) #define arch_read_relax(lock) cpu_relax() #define arch_write_relax(lock) cpu_relax() +/* + * Accesses appearing in program order before a spin_lock() operation + * can be reordered with accesses inside the critical section, by virtue + * of arch_spin_lock being constructed using acquire semantics. + * + * In cases where this is problematic (e.g. try_to_wake_up), an + * smp_mb__before_spinlock() can restore the required ordering. + */ +#define smp_mb__before_spinlock() smp_mb() + #endif /* __ASM_SPINLOCK_H */ |