aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatthew Wilcox (Oracle) <[email protected]>2023-10-04 17:53:10 +0100
committerAndrew Morton <[email protected]>2023-10-18 14:34:17 -0700
commit8da36b26e3d8640364a9e60e0b5c3fa3f55d298b (patch)
tree5ca1eae6df34fbfac5981355c23522981a36b2b7
parentea845e3173f7552aae539aeb943cd19ebe90ba38 (diff)
mips: implement xor_unlock_is_negative_byte
Inspired by the mips test_and_change_bit(), this will surely be more efficient than the generic one defined in filemap.c Link: https://lkml.kernel.org/r/[email protected] Signed-off-by: Matthew Wilcox (Oracle) <[email protected]> Cc: Albert Ou <[email protected]> Cc: Alexander Gordeev <[email protected]> Cc: Andreas Dilger <[email protected]> Cc: Christian Borntraeger <[email protected]> Cc: Christophe Leroy <[email protected]> Cc: Geert Uytterhoeven <[email protected]> Cc: Heiko Carstens <[email protected]> Cc: Ivan Kokshaysky <[email protected]> Cc: Matt Turner <[email protected]> Cc: Michael Ellerman <[email protected]> Cc: Nicholas Piggin <[email protected]> Cc: Palmer Dabbelt <[email protected]> Cc: Paul Walmsley <[email protected]> Cc: Richard Henderson <[email protected]> Cc: Sven Schnelle <[email protected]> Cc: "Theodore Ts'o" <[email protected]> Cc: Thomas Bogendoerfer <[email protected]> Cc: Vasily Gorbik <[email protected]> Signed-off-by: Andrew Morton <[email protected]>
-rw-r--r--arch/mips/include/asm/bitops.h26
-rw-r--r--arch/mips/lib/bitops.c14
2 files changed, 39 insertions, 1 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index b4bf754f7db3..d98a05c478f4 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -73,7 +73,8 @@ int __mips_test_and_clear_bit(unsigned long nr,
volatile unsigned long *addr);
int __mips_test_and_change_bit(unsigned long nr,
volatile unsigned long *addr);
-
+bool __mips_xor_is_negative_byte(unsigned long mask,
+ volatile unsigned long *addr);
/*
* set_bit - Atomically set a bit in memory
@@ -279,6 +280,29 @@ static inline int test_and_change_bit(unsigned long nr,
return res;
}
+static inline bool xor_unlock_is_negative_byte(unsigned long mask,
+ volatile unsigned long *p)
+{
+ unsigned long orig;
+ bool res;
+
+ smp_mb__before_atomic();
+
+ if (!kernel_uses_llsc) {
+ res = __mips_xor_is_negative_byte(mask, p);
+ } else {
+ orig = __test_bit_op(*p, "%0",
+ "xor\t%1, %0, %3",
+ "ir"(mask));
+ res = (orig & BIT(7)) != 0;
+ }
+
+ smp_llsc_mb();
+
+ return res;
+}
+#define xor_unlock_is_negative_byte xor_unlock_is_negative_byte
+
#undef __bit_op
#undef __test_bit_op
diff --git a/arch/mips/lib/bitops.c b/arch/mips/lib/bitops.c
index 116d0bd8b2ae..00aee98e9d54 100644
--- a/arch/mips/lib/bitops.c
+++ b/arch/mips/lib/bitops.c
@@ -146,3 +146,17 @@ int __mips_test_and_change_bit(unsigned long nr, volatile unsigned long *addr)
return res;
}
EXPORT_SYMBOL(__mips_test_and_change_bit);
+
+bool __mips_xor_is_negative_byte(unsigned long mask,
+ volatile unsigned long *addr)
+{
+ unsigned long flags;
+ unsigned long data;
+
+ raw_local_irq_save(flags);
+ data = *addr;
+ *addr = data ^ mask;
+ raw_local_irq_restore(flags);
+
+ return (data & BIT(7)) != 0;
+}