forked from mirrors/linux
		
	MIPS: cmpxchg: Omit redundant barriers for Loongson3
When building a kernel configured to support Loongson3 LL/SC workarounds (ie. CONFIG_CPU_LOONGSON3_WORKAROUNDS=y) the inline assembly in __xchg_asm() & __cmpxchg_asm() already emits completion barriers, and as such we don't need to emit extra barriers from the xchg() or cmpxchg() macros. Add compile-time constant checks causing us to omit the redundant memory barriers. Signed-off-by: Paul Burton <paul.burton@mips.com> Cc: linux-mips@vger.kernel.org Cc: Huacai Chen <chenhc@lemote.com> Cc: Jiaxun Yang <jiaxun.yang@flygoat.com> Cc: linux-kernel@vger.kernel.org
This commit is contained in:
		
							parent
							
								
									6a57d2d1e7
								
							
						
					
					
						commit
						a91f2a1dba
					
				
					 1 changed files with 23 additions and 3 deletions
				
			
		| 
						 | 
					@ -94,7 +94,13 @@ static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
 | 
				
			||||||
({									\
 | 
					({									\
 | 
				
			||||||
	__typeof__(*(ptr)) __res;					\
 | 
						__typeof__(*(ptr)) __res;					\
 | 
				
			||||||
									\
 | 
														\
 | 
				
			||||||
	smp_mb__before_llsc();						\
 | 
						/*								\
 | 
				
			||||||
 | 
						 * In the Loongson3 workaround case __xchg_asm() already	\
 | 
				
			||||||
 | 
						 * contains a completion barrier prior to the LL, so we don't	\
 | 
				
			||||||
 | 
						 * need to emit an extra one here.				\
 | 
				
			||||||
 | 
						 */								\
 | 
				
			||||||
 | 
						if (!__SYNC_loongson3_war)					\
 | 
				
			||||||
 | 
							smp_mb__before_llsc();					\
 | 
				
			||||||
									\
 | 
														\
 | 
				
			||||||
	__res = (__typeof__(*(ptr)))					\
 | 
						__res = (__typeof__(*(ptr)))					\
 | 
				
			||||||
		__xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));	\
 | 
							__xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));	\
 | 
				
			||||||
| 
						 | 
					@ -179,9 +185,23 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
 | 
				
			||||||
({									\
 | 
					({									\
 | 
				
			||||||
	__typeof__(*(ptr)) __res;					\
 | 
						__typeof__(*(ptr)) __res;					\
 | 
				
			||||||
									\
 | 
														\
 | 
				
			||||||
	smp_mb__before_llsc();						\
 | 
						/*								\
 | 
				
			||||||
 | 
						 * In the Loongson3 workaround case __cmpxchg_asm() already	\
 | 
				
			||||||
 | 
						 * contains a completion barrier prior to the LL, so we don't	\
 | 
				
			||||||
 | 
						 * need to emit an extra one here.				\
 | 
				
			||||||
 | 
						 */								\
 | 
				
			||||||
 | 
						if (!__SYNC_loongson3_war)					\
 | 
				
			||||||
 | 
							smp_mb__before_llsc();					\
 | 
				
			||||||
 | 
														\
 | 
				
			||||||
	__res = cmpxchg_local((ptr), (old), (new));			\
 | 
						__res = cmpxchg_local((ptr), (old), (new));			\
 | 
				
			||||||
	smp_llsc_mb();							\
 | 
														\
 | 
				
			||||||
 | 
						/*								\
 | 
				
			||||||
 | 
						 * In the Loongson3 workaround case __cmpxchg_asm() already	\
 | 
				
			||||||
 | 
						 * contains a completion barrier after the SC, so we don't	\
 | 
				
			||||||
 | 
						 * need to emit an extra one here.				\
 | 
				
			||||||
 | 
						 */								\
 | 
				
			||||||
 | 
						if (!__SYNC_loongson3_war)					\
 | 
				
			||||||
 | 
							smp_llsc_mb();						\
 | 
				
			||||||
									\
 | 
														\
 | 
				
			||||||
	__res;								\
 | 
						__res;								\
 | 
				
			||||||
})
 | 
					})
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in a new issue