diff options
Diffstat (limited to 'arch/powerpc/include/asm/bitops.h')
| -rw-r--r-- | arch/powerpc/include/asm/bitops.h | 89 | 
1 files changed, 81 insertions, 8 deletions
diff --git a/arch/powerpc/include/asm/bitops.h b/arch/powerpc/include/asm/bitops.h index 11847b6a244e..a05d8c62cbea 100644 --- a/arch/powerpc/include/asm/bitops.h +++ b/arch/powerpc/include/asm/bitops.h @@ -71,19 +71,61 @@ static inline void fn(unsigned long mask,	\  	__asm__ __volatile__ (			\  	prefix					\  "1:"	PPC_LLARX "%0,0,%3,0\n"			\ -	stringify_in_c(op) "%0,%0,%2\n"		\ +	#op "%I2 %0,%0,%2\n"			\  	PPC_STLCX "%0,0,%3\n"			\  	"bne- 1b\n"				\  	: "=&r" (old), "+m" (*p)		\ -	: "r" (mask), "r" (p)			\ +	: "rK" (mask), "r" (p)			\  	: "cc", "memory");			\  }  DEFINE_BITOP(set_bits, or, "") -DEFINE_BITOP(clear_bits, andc, "") -DEFINE_BITOP(clear_bits_unlock, andc, PPC_RELEASE_BARRIER)  DEFINE_BITOP(change_bits, xor, "") +static __always_inline bool is_rlwinm_mask_valid(unsigned long x) +{ +	if (!x) +		return false; +	if (x & 1) +		x = ~x;	// make the mask non-wrapping +	x += x & -x;	// adding the low set bit results in at most one bit set + +	return !(x & (x - 1)); +} + +#define DEFINE_CLROP(fn, prefix)					\ +static inline void fn(unsigned long mask, volatile unsigned long *_p)	\ +{									\ +	unsigned long old;						\ +	unsigned long *p = (unsigned long *)_p;				\ +									\ +	if (IS_ENABLED(CONFIG_PPC32) &&					\ +	    __builtin_constant_p(mask) && is_rlwinm_mask_valid(~mask)) {\ +		asm volatile (						\ +			prefix						\ +		"1:"	"lwarx	%0,0,%3\n"				\ +			"rlwinm	%0,%0,0,%2\n"				\ +			"stwcx.	%0,0,%3\n"				\ +			"bne- 1b\n"					\ +			: "=&r" (old), "+m" (*p)			\ +			: "n" (~mask), "r" (p)				\ +			: "cc", "memory");				\ +	} else {							\ +		asm volatile (						\ +			prefix						\ +		"1:"	PPC_LLARX "%0,0,%3,0\n"				\ +			"andc %0,%0,%2\n"				\ +			PPC_STLCX "%0,0,%3\n"				\ +			"bne- 1b\n"					\ +			: "=&r" (old), "+m" (*p)			\ +			: "r" (mask), "r" (p)				\ +			: "cc", "memory");				\ +	}								\ +} + +DEFINE_CLROP(clear_bits, "") +DEFINE_CLROP(clear_bits_unlock, PPC_RELEASE_BARRIER) +  static inline void arch_set_bit(int nr, volatile unsigned long *addr)  {  	set_bits(BIT_MASK(nr), addr + BIT_WORD(nr)); @@ -116,12 +158,12 @@ static inline unsigned long fn(			\  	__asm__ __volatile__ (				\  	prefix						\  "1:"	PPC_LLARX "%0,0,%3,%4\n"			\ -	stringify_in_c(op) "%1,%0,%2\n"			\ +	#op "%I2 %1,%0,%2\n"				\  	PPC_STLCX "%1,0,%3\n"				\  	"bne- 1b\n"					\  	postfix						\  	: "=&r" (old), "=&r" (t)			\ -	: "r" (mask), "r" (p), "i" (IS_ENABLED(CONFIG_PPC64) ? eh : 0)	\ +	: "rK" (mask), "r" (p), "i" (IS_ENABLED(CONFIG_PPC64) ? eh : 0)	\  	: "cc", "memory");				\  	return (old & mask);				\  } @@ -130,11 +172,42 @@ DEFINE_TESTOP(test_and_set_bits, or, PPC_ATOMIC_ENTRY_BARRIER,  	      PPC_ATOMIC_EXIT_BARRIER, 0)  DEFINE_TESTOP(test_and_set_bits_lock, or, "",  	      PPC_ACQUIRE_BARRIER, 1) -DEFINE_TESTOP(test_and_clear_bits, andc, PPC_ATOMIC_ENTRY_BARRIER, -	      PPC_ATOMIC_EXIT_BARRIER, 0)  DEFINE_TESTOP(test_and_change_bits, xor, PPC_ATOMIC_ENTRY_BARRIER,  	      PPC_ATOMIC_EXIT_BARRIER, 0) +static inline unsigned long test_and_clear_bits(unsigned long mask, volatile unsigned long *_p) +{ +	unsigned long old, t; +	unsigned long *p = (unsigned long *)_p; + +	if (IS_ENABLED(CONFIG_PPC32) && +	    __builtin_constant_p(mask) && is_rlwinm_mask_valid(~mask)) { +		asm volatile ( +			PPC_ATOMIC_ENTRY_BARRIER +		"1:"	"lwarx %0,0,%3\n" +			"rlwinm	%1,%0,0,%2\n" +			"stwcx. %1,0,%3\n" +			"bne- 1b\n" +			PPC_ATOMIC_EXIT_BARRIER +			: "=&r" (old), "=&r" (t) +			: "n" (~mask), "r" (p) +			: "cc", "memory"); +	} else { +		asm volatile ( +			PPC_ATOMIC_ENTRY_BARRIER +		"1:"	PPC_LLARX "%0,0,%3,0\n" +			"andc	%1,%0,%2\n" +			PPC_STLCX "%1,0,%3\n" +			"bne- 1b\n" +			PPC_ATOMIC_EXIT_BARRIER +			: "=&r" (old), "=&r" (t) +			: "r" (mask), "r" (p) +			: "cc", "memory"); +	} + +	return (old & mask); +} +  static inline int arch_test_and_set_bit(unsigned long nr,  					volatile unsigned long *addr)  {  |