mirror of
				https://github.com/torvalds/linux.git
				synced 2025-11-04 02:30:34 +02:00 
			
		
		
		
	MIPS: Cleanup R10000_LLSC_WAR logic in atomic.h
This patch reduces down the conditionals in MIPS atomic code that deal
with a silicon bug in early R10000 cpus that required a workaround of
a branch-likely instruction following a store-conditional in order to
to guarantee the whole ll/sc sequence is atomic.  As the only real
difference is a branch-likely instruction (beqzl) over a standard
branch (beqz), the conditional is reduced down to a single preprocessor
check at the top to pick the required instruction.
This requires writing the uses in assembler, thus we discard the
non-R10000 case that uses a mixture of a C do...while loop with
embedded assembler that was added back in commit 7837314d14 ("MIPS:
Get rid of branches to .subsections.").  A note found in the git log
for commit 5999eca25c1f ("[MIPS] Improve branch prediction in ll/sc
atomic operations.") is also addressed.
The macro definition for the branch instruction and the code comment
derives from a patch sent in earlier by Paul Burton for various cmpxchg
cleanups.
[paul.burton@mips.com:
  - Minor whitespace fix for checkpatch.]
Signed-off-by: Joshua Kinard <kumba@gentoo.org>
Signed-off-by: Paul Burton <paul.burton@mips.com>
Patchwork: https://patchwork.linux-mips.org/patch/17736/
Cc: Ralf Baechle <ralf@linux-mips.org>
Cc: James Hogan <james.hogan@mips.com>
Cc: "Maciej W. Rozycki" <macro@mips.com>
Cc: linux-mips@linux-mips.org
			
			
This commit is contained in:
		
							parent
							
								
									a0a5ac3ce8
								
							
						
					
					
						commit
						4936084c2e
					
				
					 1 changed files with 32 additions and 147 deletions
				
			
		| 
						 | 
				
			
			@ -22,6 +22,17 @@
 | 
			
		|||
#include <asm/cmpxchg.h>
 | 
			
		||||
#include <asm/war.h>
 | 
			
		||||
 | 
			
		||||
/*
 | 
			
		||||
 * Using a branch-likely instruction to check the result of an sc instruction
 | 
			
		||||
 * works around a bug present in R10000 CPUs prior to revision 3.0 that could
 | 
			
		||||
 * cause ll-sc sequences to execute non-atomically.
 | 
			
		||||
 */
 | 
			
		||||
#if R10000_LLSC_WAR
 | 
			
		||||
# define __scbeqz "beqzl"
 | 
			
		||||
#else
 | 
			
		||||
# define __scbeqz "beqz"
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
#define ATOMIC_INIT(i)	  { (i) }
 | 
			
		||||
 | 
			
		||||
/*
 | 
			
		||||
| 
						 | 
				
			
			@ -44,31 +55,18 @@
 | 
			
		|||
#define ATOMIC_OP(op, c_op, asm_op)					      \
 | 
			
		||||
static __inline__ void atomic_##op(int i, atomic_t * v)			      \
 | 
			
		||||
{									      \
 | 
			
		||||
	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
 | 
			
		||||
	if (kernel_uses_llsc) {						      \
 | 
			
		||||
		int temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		__asm__ __volatile__(					      \
 | 
			
		||||
		"	.set	arch=r4000				\n"   \
 | 
			
		||||
		"	.set	"MIPS_ISA_LEVEL"			\n"   \
 | 
			
		||||
		"1:	ll	%0, %1		# atomic_" #op "	\n"   \
 | 
			
		||||
		"	" #asm_op " %0, %2				\n"   \
 | 
			
		||||
		"	sc	%0, %1					\n"   \
 | 
			
		||||
		"	beqzl	%0, 1b					\n"   \
 | 
			
		||||
		"\t" __scbeqz "	%0, 1b					\n"   \
 | 
			
		||||
		"	.set	mips0					\n"   \
 | 
			
		||||
		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)	      \
 | 
			
		||||
		: "Ir" (i));						      \
 | 
			
		||||
	} else if (kernel_uses_llsc) {					      \
 | 
			
		||||
		int temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		do {							      \
 | 
			
		||||
			__asm__ __volatile__(				      \
 | 
			
		||||
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
 | 
			
		||||
			"	ll	%0, %1		# atomic_" #op "\n"   \
 | 
			
		||||
			"	" #asm_op " %0, %2			\n"   \
 | 
			
		||||
			"	sc	%0, %1				\n"   \
 | 
			
		||||
			"	.set	mips0				\n"   \
 | 
			
		||||
			: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)  \
 | 
			
		||||
			: "Ir" (i));					      \
 | 
			
		||||
		} while (unlikely(!temp));				      \
 | 
			
		||||
	} else {							      \
 | 
			
		||||
		unsigned long flags;					      \
 | 
			
		||||
									      \
 | 
			
		||||
| 
						 | 
				
			
			@ -83,36 +81,20 @@ static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v)	      \
 | 
			
		|||
{									      \
 | 
			
		||||
	int result;							      \
 | 
			
		||||
									      \
 | 
			
		||||
	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
 | 
			
		||||
	if (kernel_uses_llsc) {						      \
 | 
			
		||||
		int temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		__asm__ __volatile__(					      \
 | 
			
		||||
		"	.set	arch=r4000				\n"   \
 | 
			
		||||
		"	.set	"MIPS_ISA_LEVEL"			\n"   \
 | 
			
		||||
		"1:	ll	%1, %2		# atomic_" #op "_return	\n"   \
 | 
			
		||||
		"	" #asm_op " %0, %1, %3				\n"   \
 | 
			
		||||
		"	sc	%0, %2					\n"   \
 | 
			
		||||
		"	beqzl	%0, 1b					\n"   \
 | 
			
		||||
		"\t" __scbeqz "	%0, 1b					\n"   \
 | 
			
		||||
		"	" #asm_op " %0, %1, %3				\n"   \
 | 
			
		||||
		"	.set	mips0					\n"   \
 | 
			
		||||
		: "=&r" (result), "=&r" (temp),				      \
 | 
			
		||||
		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
 | 
			
		||||
		: "Ir" (i));						      \
 | 
			
		||||
	} else if (kernel_uses_llsc) {					      \
 | 
			
		||||
		int temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		do {							      \
 | 
			
		||||
			__asm__ __volatile__(				      \
 | 
			
		||||
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
 | 
			
		||||
			"	ll	%1, %2	# atomic_" #op "_return	\n"   \
 | 
			
		||||
			"	" #asm_op " %0, %1, %3			\n"   \
 | 
			
		||||
			"	sc	%0, %2				\n"   \
 | 
			
		||||
			"	.set	mips0				\n"   \
 | 
			
		||||
			: "=&r" (result), "=&r" (temp),			      \
 | 
			
		||||
			  "+" GCC_OFF_SMALL_ASM() (v->counter)		      \
 | 
			
		||||
			: "Ir" (i));					      \
 | 
			
		||||
		} while (unlikely(!result));				      \
 | 
			
		||||
									      \
 | 
			
		||||
		result = temp; result c_op i;				      \
 | 
			
		||||
	} else {							      \
 | 
			
		||||
		unsigned long flags;					      \
 | 
			
		||||
									      \
 | 
			
		||||
| 
						 | 
				
			
			@ -131,36 +113,20 @@ static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v)	      \
 | 
			
		|||
{									      \
 | 
			
		||||
	int result;							      \
 | 
			
		||||
									      \
 | 
			
		||||
	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
 | 
			
		||||
	if (kernel_uses_llsc) {						      \
 | 
			
		||||
		int temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		__asm__ __volatile__(					      \
 | 
			
		||||
		"	.set	arch=r4000				\n"   \
 | 
			
		||||
		"	.set	"MIPS_ISA_LEVEL"			\n"   \
 | 
			
		||||
		"1:	ll	%1, %2		# atomic_fetch_" #op "	\n"   \
 | 
			
		||||
		"	" #asm_op " %0, %1, %3				\n"   \
 | 
			
		||||
		"	sc	%0, %2					\n"   \
 | 
			
		||||
		"	beqzl	%0, 1b					\n"   \
 | 
			
		||||
		"\t" __scbeqz "	%0, 1b					\n"   \
 | 
			
		||||
		"	move	%0, %1					\n"   \
 | 
			
		||||
		"	.set	mips0					\n"   \
 | 
			
		||||
		: "=&r" (result), "=&r" (temp),				      \
 | 
			
		||||
		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
 | 
			
		||||
		: "Ir" (i));						      \
 | 
			
		||||
	} else if (kernel_uses_llsc) {					      \
 | 
			
		||||
		int temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		do {							      \
 | 
			
		||||
			__asm__ __volatile__(				      \
 | 
			
		||||
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
 | 
			
		||||
			"	ll	%1, %2	# atomic_fetch_" #op "	\n"   \
 | 
			
		||||
			"	" #asm_op " %0, %1, %3			\n"   \
 | 
			
		||||
			"	sc	%0, %2				\n"   \
 | 
			
		||||
			"	.set	mips0				\n"   \
 | 
			
		||||
			: "=&r" (result), "=&r" (temp),			      \
 | 
			
		||||
			  "+" GCC_OFF_SMALL_ASM() (v->counter)		      \
 | 
			
		||||
			: "Ir" (i));					      \
 | 
			
		||||
		} while (unlikely(!result));				      \
 | 
			
		||||
									      \
 | 
			
		||||
		result = temp;						      \
 | 
			
		||||
	} else {							      \
 | 
			
		||||
		unsigned long flags;					      \
 | 
			
		||||
									      \
 | 
			
		||||
| 
						 | 
				
			
			@ -218,24 +184,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
 | 
			
		|||
 | 
			
		||||
	smp_mb__before_llsc();
 | 
			
		||||
 | 
			
		||||
	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 | 
			
		||||
		int temp;
 | 
			
		||||
 | 
			
		||||
		__asm__ __volatile__(
 | 
			
		||||
		"	.set	arch=r4000				\n"
 | 
			
		||||
		"1:	ll	%1, %2		# atomic_sub_if_positive\n"
 | 
			
		||||
		"	subu	%0, %1, %3				\n"
 | 
			
		||||
		"	move	%1, %0					\n"
 | 
			
		||||
		"	bltz	%0, 1f					\n"
 | 
			
		||||
		"	sc	%1, %2					\n"
 | 
			
		||||
		"	beqzl	%1, 1b					\n"
 | 
			
		||||
		"1:							\n"
 | 
			
		||||
		"	.set	mips0					\n"
 | 
			
		||||
		: "=&r" (result), "=&r" (temp),
 | 
			
		||||
		  "+" GCC_OFF_SMALL_ASM() (v->counter)
 | 
			
		||||
		: "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
 | 
			
		||||
		: "memory");
 | 
			
		||||
	} else if (kernel_uses_llsc) {
 | 
			
		||||
	if (kernel_uses_llsc) {
 | 
			
		||||
		int temp;
 | 
			
		||||
 | 
			
		||||
		__asm__ __volatile__(
 | 
			
		||||
| 
						 | 
				
			
			@ -245,7 +194,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
 | 
			
		|||
		"	move	%1, %0					\n"
 | 
			
		||||
		"	bltz	%0, 1f					\n"
 | 
			
		||||
		"	sc	%1, %2					\n"
 | 
			
		||||
		"	beqz	%1, 1b					\n"
 | 
			
		||||
		"\t" __scbeqz "	%1, 1b					\n"
 | 
			
		||||
		"1:							\n"
 | 
			
		||||
		"	.set	mips0					\n"
 | 
			
		||||
		: "=&r" (result), "=&r" (temp),
 | 
			
		||||
| 
						 | 
				
			
			@ -382,31 +331,18 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
 | 
			
		|||
#define ATOMIC64_OP(op, c_op, asm_op)					      \
 | 
			
		||||
static __inline__ void atomic64_##op(long i, atomic64_t * v)		      \
 | 
			
		||||
{									      \
 | 
			
		||||
	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
 | 
			
		||||
	if (kernel_uses_llsc) {						      \
 | 
			
		||||
		long temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		__asm__ __volatile__(					      \
 | 
			
		||||
		"	.set	arch=r4000				\n"   \
 | 
			
		||||
		"	.set	"MIPS_ISA_LEVEL"			\n"   \
 | 
			
		||||
		"1:	lld	%0, %1		# atomic64_" #op "	\n"   \
 | 
			
		||||
		"	" #asm_op " %0, %2				\n"   \
 | 
			
		||||
		"	scd	%0, %1					\n"   \
 | 
			
		||||
		"	beqzl	%0, 1b					\n"   \
 | 
			
		||||
		"\t" __scbeqz "	%0, 1b					\n"   \
 | 
			
		||||
		"	.set	mips0					\n"   \
 | 
			
		||||
		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)	      \
 | 
			
		||||
		: "Ir" (i));						      \
 | 
			
		||||
	} else if (kernel_uses_llsc) {					      \
 | 
			
		||||
		long temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		do {							      \
 | 
			
		||||
			__asm__ __volatile__(				      \
 | 
			
		||||
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
 | 
			
		||||
			"	lld	%0, %1		# atomic64_" #op "\n" \
 | 
			
		||||
			"	" #asm_op " %0, %2			\n"   \
 | 
			
		||||
			"	scd	%0, %1				\n"   \
 | 
			
		||||
			"	.set	mips0				\n"   \
 | 
			
		||||
			: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)      \
 | 
			
		||||
			: "Ir" (i));					      \
 | 
			
		||||
		} while (unlikely(!temp));				      \
 | 
			
		||||
	} else {							      \
 | 
			
		||||
		unsigned long flags;					      \
 | 
			
		||||
									      \
 | 
			
		||||
| 
						 | 
				
			
			@ -421,37 +357,20 @@ static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \
 | 
			
		|||
{									      \
 | 
			
		||||
	long result;							      \
 | 
			
		||||
									      \
 | 
			
		||||
	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
 | 
			
		||||
	if (kernel_uses_llsc) {						      \
 | 
			
		||||
		long temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		__asm__ __volatile__(					      \
 | 
			
		||||
		"	.set	arch=r4000				\n"   \
 | 
			
		||||
		"	.set	"MIPS_ISA_LEVEL"			\n"   \
 | 
			
		||||
		"1:	lld	%1, %2		# atomic64_" #op "_return\n"  \
 | 
			
		||||
		"	" #asm_op " %0, %1, %3				\n"   \
 | 
			
		||||
		"	scd	%0, %2					\n"   \
 | 
			
		||||
		"	beqzl	%0, 1b					\n"   \
 | 
			
		||||
		"\t" __scbeqz "	%0, 1b					\n"   \
 | 
			
		||||
		"	" #asm_op " %0, %1, %3				\n"   \
 | 
			
		||||
		"	.set	mips0					\n"   \
 | 
			
		||||
		: "=&r" (result), "=&r" (temp),				      \
 | 
			
		||||
		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
 | 
			
		||||
		: "Ir" (i));						      \
 | 
			
		||||
	} else if (kernel_uses_llsc) {					      \
 | 
			
		||||
		long temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		do {							      \
 | 
			
		||||
			__asm__ __volatile__(				      \
 | 
			
		||||
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
 | 
			
		||||
			"	lld	%1, %2	# atomic64_" #op "_return\n"  \
 | 
			
		||||
			"	" #asm_op " %0, %1, %3			\n"   \
 | 
			
		||||
			"	scd	%0, %2				\n"   \
 | 
			
		||||
			"	.set	mips0				\n"   \
 | 
			
		||||
			: "=&r" (result), "=&r" (temp),			      \
 | 
			
		||||
			  "=" GCC_OFF_SMALL_ASM() (v->counter)		      \
 | 
			
		||||
			: "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)	      \
 | 
			
		||||
			: "memory");					      \
 | 
			
		||||
		} while (unlikely(!result));				      \
 | 
			
		||||
									      \
 | 
			
		||||
		result = temp; result c_op i;				      \
 | 
			
		||||
	} else {							      \
 | 
			
		||||
		unsigned long flags;					      \
 | 
			
		||||
									      \
 | 
			
		||||
| 
						 | 
				
			
			@ -474,33 +393,16 @@ static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v)  \
 | 
			
		|||
		long temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		__asm__ __volatile__(					      \
 | 
			
		||||
		"	.set	arch=r4000				\n"   \
 | 
			
		||||
		"	.set	"MIPS_ISA_LEVEL"			\n"   \
 | 
			
		||||
		"1:	lld	%1, %2		# atomic64_fetch_" #op "\n"   \
 | 
			
		||||
		"	" #asm_op " %0, %1, %3				\n"   \
 | 
			
		||||
		"	scd	%0, %2					\n"   \
 | 
			
		||||
		"	beqzl	%0, 1b					\n"   \
 | 
			
		||||
		"\t" __scbeqz "	%0, 1b					\n"   \
 | 
			
		||||
		"	move	%0, %1					\n"   \
 | 
			
		||||
		"	.set	mips0					\n"   \
 | 
			
		||||
		: "=&r" (result), "=&r" (temp),				      \
 | 
			
		||||
		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
 | 
			
		||||
		: "Ir" (i));						      \
 | 
			
		||||
	} else if (kernel_uses_llsc) {					      \
 | 
			
		||||
		long temp;						      \
 | 
			
		||||
									      \
 | 
			
		||||
		do {							      \
 | 
			
		||||
			__asm__ __volatile__(				      \
 | 
			
		||||
			"	.set	"MIPS_ISA_LEVEL"		\n"   \
 | 
			
		||||
			"	lld	%1, %2	# atomic64_fetch_" #op "\n"   \
 | 
			
		||||
			"	" #asm_op " %0, %1, %3			\n"   \
 | 
			
		||||
			"	scd	%0, %2				\n"   \
 | 
			
		||||
			"	.set	mips0				\n"   \
 | 
			
		||||
			: "=&r" (result), "=&r" (temp),			      \
 | 
			
		||||
			  "=" GCC_OFF_SMALL_ASM() (v->counter)		      \
 | 
			
		||||
			: "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)	      \
 | 
			
		||||
			: "memory");					      \
 | 
			
		||||
		} while (unlikely(!result));				      \
 | 
			
		||||
									      \
 | 
			
		||||
		result = temp;						      \
 | 
			
		||||
	} else {							      \
 | 
			
		||||
		unsigned long flags;					      \
 | 
			
		||||
									      \
 | 
			
		||||
| 
						 | 
				
			
			@ -559,24 +461,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
 | 
			
		|||
 | 
			
		||||
	smp_mb__before_llsc();
 | 
			
		||||
 | 
			
		||||
	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 | 
			
		||||
		long temp;
 | 
			
		||||
 | 
			
		||||
		__asm__ __volatile__(
 | 
			
		||||
		"	.set	arch=r4000				\n"
 | 
			
		||||
		"1:	lld	%1, %2		# atomic64_sub_if_positive\n"
 | 
			
		||||
		"	dsubu	%0, %1, %3				\n"
 | 
			
		||||
		"	move	%1, %0					\n"
 | 
			
		||||
		"	bltz	%0, 1f					\n"
 | 
			
		||||
		"	scd	%1, %2					\n"
 | 
			
		||||
		"	beqzl	%1, 1b					\n"
 | 
			
		||||
		"1:							\n"
 | 
			
		||||
		"	.set	mips0					\n"
 | 
			
		||||
		: "=&r" (result), "=&r" (temp),
 | 
			
		||||
		  "=" GCC_OFF_SMALL_ASM() (v->counter)
 | 
			
		||||
		: "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
 | 
			
		||||
		: "memory");
 | 
			
		||||
	} else if (kernel_uses_llsc) {
 | 
			
		||||
	if (kernel_uses_llsc) {
 | 
			
		||||
		long temp;
 | 
			
		||||
 | 
			
		||||
		__asm__ __volatile__(
 | 
			
		||||
| 
						 | 
				
			
			@ -586,7 +471,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
 | 
			
		|||
		"	move	%1, %0					\n"
 | 
			
		||||
		"	bltz	%0, 1f					\n"
 | 
			
		||||
		"	scd	%1, %2					\n"
 | 
			
		||||
		"	beqz	%1, 1b					\n"
 | 
			
		||||
		"\t" __scbeqz "	%1, 1b					\n"
 | 
			
		||||
		"1:							\n"
 | 
			
		||||
		"	.set	mips0					\n"
 | 
			
		||||
		: "=&r" (result), "=&r" (temp),
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in a new issue