summary refs log tree commit diff
path: root/arch/xtensa
diff options
context:
space:
mode:
authorMax Filippov <jcmvbkbc@gmail.com>2019-10-15 22:04:13 -0700
committerMax Filippov <jcmvbkbc@gmail.com>2019-11-26 11:33:38 -0800
commitb387dc044efaa07cd8a47316c83fe2a5c08f9650 (patch)
tree24aa130e27a872a6cb1527a4403f7c093a5f5ff9 /arch/xtensa
parentcbc6e28703c44a321e9d8a8894ec11bc6e7e473d (diff)
downloadlinux-b387dc044efaa07cd8a47316c83fe2a5c08f9650.tar.gz
xtensa: use macros to generate *_bit and test_and_*_bit functions
Parameterize macros with function name, opcode and inversion pattern.
This reduces code duplication removing 2/3 of definitions.

Signed-off-by: Max Filippov <jcmvbkbc@gmail.com>
Diffstat (limited to 'arch/xtensa')
-rw-r--r--arch/xtensa/include/asm/bitops.h321
1 files changed, 92 insertions, 229 deletions
diff --git a/arch/xtensa/include/asm/bitops.h b/arch/xtensa/include/asm/bitops.h
index be8b2be5a98b..bfaad56870f6 100644
--- a/arch/xtensa/include/asm/bitops.h
+++ b/arch/xtensa/include/asm/bitops.h
@@ -98,247 +98,110 @@ static inline unsigned long __fls(unsigned long word)
 
 #if XCHAL_HAVE_EXCLUSIVE
 
-static inline void set_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32ex   %0, %2\n"
-			"       or      %0, %0, %1\n"
-			"       s32ex   %0, %2\n"
-			"       getex   %0\n"
-			"       beqz    %0, 1b\n"
-			: "=&a" (tmp)
-			: "a" (mask), "a" (p)
-			: "memory");
-}
-
-static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32ex   %0, %2\n"
-			"       and     %0, %0, %1\n"
-			"       s32ex   %0, %2\n"
-			"       getex   %0\n"
-			"       beqz    %0, 1b\n"
-			: "=&a" (tmp)
-			: "a" (~mask), "a" (p)
-			: "memory");
-}
-
-static inline void change_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32ex   %0, %2\n"
-			"       xor     %0, %0, %1\n"
-			"       s32ex   %0, %2\n"
-			"       getex   %0\n"
-			"       beqz    %0, 1b\n"
-			: "=&a" (tmp)
-			: "a" (mask), "a" (p)
-			: "memory");
-}
-
-static inline int
-test_and_set_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp, value;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32ex   %1, %3\n"
-			"       or      %0, %1, %2\n"
-			"       s32ex   %0, %3\n"
-			"       getex   %0\n"
-			"       beqz    %0, 1b\n"
-			: "=&a" (tmp), "=&a" (value)
-			: "a" (mask), "a" (p)
-			: "memory");
-
-	return value & mask;
-}
-
-static inline int
-test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp, value;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32ex   %1, %3\n"
-			"       and     %0, %1, %2\n"
-			"       s32ex   %0, %3\n"
-			"       getex   %0\n"
-			"       beqz    %0, 1b\n"
-			: "=&a" (tmp), "=&a" (value)
-			: "a" (~mask), "a" (p)
-			: "memory");
-
-	return value & mask;
-}
-
-static inline int
-test_and_change_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp, value;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32ex   %1, %3\n"
-			"       xor     %0, %1, %2\n"
-			"       s32ex   %0, %3\n"
-			"       getex   %0\n"
-			"       beqz    %0, 1b\n"
-			: "=&a" (tmp), "=&a" (value)
-			: "a" (mask), "a" (p)
-			: "memory");
-
-	return value & mask;
+#define BIT_OP(op, insn, inv)						\
+static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
+{									\
+	unsigned long tmp;						\
+	unsigned long mask = 1UL << (bit & 31);				\
+									\
+	p += bit >> 5;							\
+									\
+	__asm__ __volatile__(						\
+			"1:     l32ex   %0, %2\n"			\
+			"      "insn"   %0, %0, %1\n"			\
+			"       s32ex   %0, %2\n"			\
+			"       getex   %0\n"				\
+			"       beqz    %0, 1b\n"			\
+			: "=&a" (tmp)					\
+			: "a" (inv mask), "a" (p)			\
+			: "memory");					\
+}
+
+#define TEST_AND_BIT_OP(op, insn, inv)					\
+static inline int							\
+test_and_##op##_bit(unsigned int bit, volatile unsigned long *p)	\
+{									\
+	unsigned long tmp, value;					\
+	unsigned long mask = 1UL << (bit & 31);				\
+									\
+	p += bit >> 5;							\
+									\
+	__asm__ __volatile__(						\
+			"1:     l32ex   %1, %3\n"			\
+			"      "insn"   %0, %1, %2\n"			\
+			"       s32ex   %0, %3\n"			\
+			"       getex   %0\n"				\
+			"       beqz    %0, 1b\n"			\
+			: "=&a" (tmp), "=&a" (value)			\
+			: "a" (inv mask), "a" (p)			\
+			: "memory");					\
+									\
+	return value & mask;						\
 }
 
 #elif XCHAL_HAVE_S32C1I
 
-static inline void set_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp, value;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32i    %1, %3, 0\n"
-			"       wsr     %1, scompare1\n"
-			"       or      %0, %1, %2\n"
-			"       s32c1i  %0, %3, 0\n"
-			"       bne     %0, %1, 1b\n"
-			: "=&a" (tmp), "=&a" (value)
-			: "a" (mask), "a" (p)
-			: "memory");
-}
-
-static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp, value;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32i    %1, %3, 0\n"
-			"       wsr     %1, scompare1\n"
-			"       and     %0, %1, %2\n"
-			"       s32c1i  %0, %3, 0\n"
-			"       bne     %0, %1, 1b\n"
-			: "=&a" (tmp), "=&a" (value)
-			: "a" (~mask), "a" (p)
-			: "memory");
+#define BIT_OP(op, insn, inv)						\
+static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
+{									\
+	unsigned long tmp, value;					\
+	unsigned long mask = 1UL << (bit & 31);				\
+									\
+	p += bit >> 5;							\
+									\
+	__asm__ __volatile__(						\
+			"1:     l32i    %1, %3, 0\n"			\
+			"       wsr     %1, scompare1\n"		\
+			"      "insn"   %0, %1, %2\n"			\
+			"       s32c1i  %0, %3, 0\n"			\
+			"       bne     %0, %1, 1b\n"			\
+			: "=&a" (tmp), "=&a" (value)			\
+			: "a" (inv mask), "a" (p)			\
+			: "memory");					\
+}
+
+#define TEST_AND_BIT_OP(op, insn, inv)					\
+static inline int							\
+test_and_##op##_bit(unsigned int bit, volatile unsigned long *p)	\
+{									\
+	unsigned long tmp, value;					\
+	unsigned long mask = 1UL << (bit & 31);				\
+									\
+	p += bit >> 5;							\
+									\
+	__asm__ __volatile__(						\
+			"1:     l32i    %1, %3, 0\n"			\
+			"       wsr     %1, scompare1\n"		\
+			"      "insn"   %0, %1, %2\n"			\
+			"       s32c1i  %0, %3, 0\n"			\
+			"       bne     %0, %1, 1b\n"			\
+			: "=&a" (tmp), "=&a" (value)			\
+			: "a" (inv mask), "a" (p)			\
+			: "memory");					\
+									\
+	return tmp & mask;						\
 }
 
-static inline void change_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp, value;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32i    %1, %3, 0\n"
-			"       wsr     %1, scompare1\n"
-			"       xor     %0, %1, %2\n"
-			"       s32c1i  %0, %3, 0\n"
-			"       bne     %0, %1, 1b\n"
-			: "=&a" (tmp), "=&a" (value)
-			: "a" (mask), "a" (p)
-			: "memory");
-}
+#else
 
-static inline int
-test_and_set_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp, value;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32i    %1, %3, 0\n"
-			"       wsr     %1, scompare1\n"
-			"       or      %0, %1, %2\n"
-			"       s32c1i  %0, %3, 0\n"
-			"       bne     %0, %1, 1b\n"
-			: "=&a" (tmp), "=&a" (value)
-			: "a" (mask), "a" (p)
-			: "memory");
-
-	return tmp & mask;
-}
+#define BIT_OP(op, insn, inv)
+#define TEST_AND_BIT_OP(op, insn, inv)
 
-static inline int
-test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp, value;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32i    %1, %3, 0\n"
-			"       wsr     %1, scompare1\n"
-			"       and     %0, %1, %2\n"
-			"       s32c1i  %0, %3, 0\n"
-			"       bne     %0, %1, 1b\n"
-			: "=&a" (tmp), "=&a" (value)
-			: "a" (~mask), "a" (p)
-			: "memory");
-
-	return tmp & mask;
-}
+#include <asm-generic/bitops/atomic.h>
 
-static inline int
-test_and_change_bit(unsigned int bit, volatile unsigned long *p)
-{
-	unsigned long tmp, value;
-	unsigned long mask = 1UL << (bit & 31);
-
-	p += bit >> 5;
-
-	__asm__ __volatile__(
-			"1:     l32i    %1, %3, 0\n"
-			"       wsr     %1, scompare1\n"
-			"       xor     %0, %1, %2\n"
-			"       s32c1i  %0, %3, 0\n"
-			"       bne     %0, %1, 1b\n"
-			: "=&a" (tmp), "=&a" (value)
-			: "a" (mask), "a" (p)
-			: "memory");
-
-	return tmp & mask;
-}
+#endif /* XCHAL_HAVE_S32C1I */
 
-#else
+#define BIT_OPS(op, insn, inv)		\
+	BIT_OP(op, insn, inv)		\
+	TEST_AND_BIT_OP(op, insn, inv)
 
-#include <asm-generic/bitops/atomic.h>
+BIT_OPS(set, "or", )
+BIT_OPS(clear, "and", ~)
+BIT_OPS(change, "xor", )
 
-#endif /* XCHAL_HAVE_S32C1I */
+#undef BIT_OPS
+#undef BIT_OP
+#undef TEST_AND_BIT_OP
 
 #include <asm-generic/bitops/find.h>
 #include <asm-generic/bitops/le.h>