diff options
Diffstat (limited to 'arch/arc/include/asm/bitops.h')
| -rw-r--r-- | arch/arc/include/asm/bitops.h | 60 | 
1 files changed, 57 insertions, 3 deletions
diff --git a/arch/arc/include/asm/bitops.h b/arch/arc/include/asm/bitops.h index 0352fb8d21b9..8da87feec59a 100644 --- a/arch/arc/include/asm/bitops.h +++ b/arch/arc/include/asm/bitops.h @@ -22,7 +22,7 @@  #include <asm/smp.h>  #endif -#if defined(CONFIG_ARC_HAS_LLSC) +#ifdef CONFIG_ARC_HAS_LLSC  /*   * Hardware assisted Atomic-R-M-W @@ -88,7 +88,7 @@ static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *  	return (old & (1 << nr)) != 0;					\  } -#else	/* !CONFIG_ARC_HAS_LLSC */ +#elif !defined(CONFIG_ARC_PLAT_EZNPS)  /*   * Non hardware assisted Atomic-R-M-W @@ -139,7 +139,55 @@ static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *  	return (old & (1UL << (nr & 0x1f))) != 0;			\  } -#endif /* CONFIG_ARC_HAS_LLSC */ +#else /* CONFIG_ARC_PLAT_EZNPS */ + +#define BIT_OP(op, c_op, asm_op)					\ +static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\ +{									\ +	m += nr >> 5;							\ +									\ +	nr = (1UL << (nr & 0x1f));					\ +	if (asm_op == CTOP_INST_AAND_DI_R2_R2_R3)			\ +		nr = ~nr;						\ +									\ +	__asm__ __volatile__(						\ +	"	mov r2, %0\n"						\ +	"	mov r3, %1\n"						\ +	"	.word %2\n"						\ +	:								\ +	: "r"(nr), "r"(m), "i"(asm_op)					\ +	: "r2", "r3", "memory");					\ +} + +#define TEST_N_BIT_OP(op, c_op, asm_op)					\ +static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\ +{									\ +	unsigned long old;						\ +									\ +	m += nr >> 5;							\ +									\ +	nr = old = (1UL << (nr & 0x1f));				\ +	if (asm_op == CTOP_INST_AAND_DI_R2_R2_R3)			\ +		old = ~old;						\ +									\ +	/* Explicit full memory barrier needed before/after */		\ +	smp_mb();							\ +									\ +	__asm__ __volatile__(						\ +	"	mov r2, %0\n"						\ +	"	mov r3, %1\n"						\ +	"       .word %2\n"						\ +	"	mov %0, r2"						\ +	: "+r"(old)							\ +	: "r"(m), "i"(asm_op)						\ +	: "r2", "r3", "memory");					\ +									\ +	smp_mb();							\ +									\ +	return (old & nr) != 0;					\ +} + +#endif /* CONFIG_ARC_PLAT_EZNPS */  /***************************************   * Non atomic variants @@ -181,9 +229,15 @@ static inline int __test_and_##op##_bit(unsigned long nr, volatile unsigned long  	/* __test_and_set_bit(), __test_and_clear_bit(), __test_and_change_bit() */\  	__TEST_N_BIT_OP(op, c_op, asm_op) +#ifndef CONFIG_ARC_PLAT_EZNPS  BIT_OPS(set, |, bset)  BIT_OPS(clear, & ~, bclr)  BIT_OPS(change, ^, bxor) +#else +BIT_OPS(set, |, CTOP_INST_AOR_DI_R2_R2_R3) +BIT_OPS(clear, & ~, CTOP_INST_AAND_DI_R2_R2_R3) +BIT_OPS(change, ^, CTOP_INST_AXOR_DI_R2_R2_R3) +#endif  /*   * This routine doesn't need to be atomic.  |