]> rtime.felk.cvut.cz Git - zynq/linux.git/commitdiff
x86/asm: 'Simplify' GEN_*_RMWcc() macros
authorPeter Zijlstra <peterz@infradead.org>
Wed, 3 Oct 2018 10:34:10 +0000 (12:34 +0200)
committerIngo Molnar <mingo@kernel.org>
Tue, 16 Oct 2018 15:33:54 +0000 (17:33 +0200)
Currently the GEN_*_RMWcc() macros include a return statement, which
pretty much mandates we directly wrap them in a (inline) function.

Macros with return statements are tricky and, as per the above, limit
use, so remove the return statement and make them
statement-expressions. This allows them to be used more widely.

Also, shuffle the arguments a bit. Place the @cc argument as 3rd, this
makes it consistent between UNARY and BINARY, but more importantly, it
makes the @arg0 argument last.

Since the @arg0 argument is now last, we can do CPP trickery and make
it an optional argument, simplifying the users; 17 out of 18
occurences do not need this argument.

Finally, change to asm symbolic names, instead of the numeric ordering
of operands, which allows us to get rid of __BINARY_RMWcc_ARG and get
cleaner code overall.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Cc: JBeulich@suse.com
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Andy Lutomirski <luto@amacapital.net>
Cc: Borislav Petkov <bp@alien8.de>
Cc: Brian Gerst <brgerst@gmail.com>
Cc: Denys Vlasenko <dvlasenk@redhat.com>
Cc: H. Peter Anvin <hpa@zytor.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: bp@alien8.de
Cc: hpa@linux.intel.com
Link: https://lkml.kernel.org/r/20181003130957.108960094@infradead.org
Signed-off-by: Ingo Molnar <mingo@kernel.org>
arch/x86/include/asm/atomic.h
arch/x86/include/asm/atomic64_64.h
arch/x86/include/asm/bitops.h
arch/x86/include/asm/local.h
arch/x86/include/asm/preempt.h
arch/x86/include/asm/refcount.h
arch/x86/include/asm/rmwcc.h

index ce84388e540c918a01e31599bc78881a5b9d2954..ea3d95275b43580ff48ed96d1e7376455d20746e 100644 (file)
@@ -82,7 +82,7 @@ static __always_inline void arch_atomic_sub(int i, atomic_t *v)
  */
 static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
 {
-       GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e);
+       return GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, e, "er", i);
 }
 #define arch_atomic_sub_and_test arch_atomic_sub_and_test
 
@@ -122,7 +122,7 @@ static __always_inline void arch_atomic_dec(atomic_t *v)
  */
 static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
 {
-       GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", e);
+       return GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, e);
 }
 #define arch_atomic_dec_and_test arch_atomic_dec_and_test
 
@@ -136,7 +136,7 @@ static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
  */
 static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
 {
-       GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", e);
+       return GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, e);
 }
 #define arch_atomic_inc_and_test arch_atomic_inc_and_test
 
@@ -151,7 +151,7 @@ static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
  */
 static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v)
 {
-       GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", s);
+       return GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, s, "er", i);
 }
 #define arch_atomic_add_negative arch_atomic_add_negative
 
index 5f851d92eecd9ee8eaad86c6b002937633e9144f..dadc20adba211bc962ec9874d81756615ee495c6 100644 (file)
@@ -73,7 +73,7 @@ static inline void arch_atomic64_sub(long i, atomic64_t *v)
  */
 static inline bool arch_atomic64_sub_and_test(long i, atomic64_t *v)
 {
-       GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
+       return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i);
 }
 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
 
@@ -115,7 +115,7 @@ static __always_inline void arch_atomic64_dec(atomic64_t *v)
  */
 static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
 {
-       GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
+       return GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, e);
 }
 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
 
@@ -129,7 +129,7 @@ static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
  */
 static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
 {
-       GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
+       return GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, e);
 }
 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
 
@@ -144,7 +144,7 @@ static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
  */
 static inline bool arch_atomic64_add_negative(long i, atomic64_t *v)
 {
-       GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
+       return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i);
 }
 #define arch_atomic64_add_negative arch_atomic64_add_negative
 
index 9f645ba57dbb263822600aae5d82138316c8f6e3..124f9195eb3ef5545f96fbb111ee4a111de5f0d0 100644 (file)
@@ -217,8 +217,7 @@ static __always_inline void change_bit(long nr, volatile unsigned long *addr)
  */
 static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr)
 {
-       GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts),
-                        *addr, "Ir", nr, "%0", c);
+       return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts), *addr, c, "Ir", nr);
 }
 
 /**
@@ -264,8 +263,7 @@ static __always_inline bool __test_and_set_bit(long nr, volatile unsigned long *
  */
 static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr)
 {
-       GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr),
-                        *addr, "Ir", nr, "%0", c);
+       return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr), *addr, c, "Ir", nr);
 }
 
 /**
@@ -318,8 +316,7 @@ static __always_inline bool __test_and_change_bit(long nr, volatile unsigned lon
  */
 static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr)
 {
-       GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc),
-                        *addr, "Ir", nr, "%0", c);
+       return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), *addr, c, "Ir", nr);
 }
 
 static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
index c91083c598457140925d1f0928a6c8fa4811004a..349a47acaa4a3524e25f056152cd701bd2c26224 100644 (file)
@@ -53,7 +53,7 @@ static inline void local_sub(long i, local_t *l)
  */
 static inline bool local_sub_and_test(long i, local_t *l)
 {
-       GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, "er", i, "%0", e);
+       return GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, e, "er", i);
 }
 
 /**
@@ -66,7 +66,7 @@ static inline bool local_sub_and_test(long i, local_t *l)
  */
 static inline bool local_dec_and_test(local_t *l)
 {
-       GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, "%0", e);
+       return GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, e);
 }
 
 /**
@@ -79,7 +79,7 @@ static inline bool local_dec_and_test(local_t *l)
  */
 static inline bool local_inc_and_test(local_t *l)
 {
-       GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, "%0", e);
+       return GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, e);
 }
 
 /**
@@ -93,7 +93,7 @@ static inline bool local_inc_and_test(local_t *l)
  */
 static inline bool local_add_negative(long i, local_t *l)
 {
-       GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, "er", i, "%0", s);
+       return GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, s, "er", i);
 }
 
 /**
index 7f2dbd91fc743a516a0ad8e094680b13832f61ef..90cb2f36c042fec25dc765b4241d68ccb48a3eba 100644 (file)
@@ -88,7 +88,7 @@ static __always_inline void __preempt_count_sub(int val)
  */
 static __always_inline bool __preempt_count_dec_and_test(void)
 {
-       GEN_UNARY_RMWcc("decl", __preempt_count, __percpu_arg(0), e);
+       return GEN_UNARY_RMWcc("decl", __preempt_count, e, __percpu_arg([var]));
 }
 
 /*
index c92909da0686e33e543f664dee086eda385f9c32..a8b5e1e133190ff1b8acf6d38795485d11fd061d 100644 (file)
@@ -79,16 +79,17 @@ static __always_inline void refcount_dec(refcount_t *r)
 static __always_inline __must_check
 bool refcount_sub_and_test(unsigned int i, refcount_t *r)
 {
-       GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl",
-                                 "REFCOUNT_CHECK_LT_ZERO counter=\"%0\"",
-                                 r->refs.counter, "er", i, "%0", e, "cx");
+
+       return GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl",
+                                        "REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"",
+                                        r->refs.counter, e, "er", i, "cx");
 }
 
 static __always_inline __must_check bool refcount_dec_and_test(refcount_t *r)
 {
-       GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl",
-                                "REFCOUNT_CHECK_LT_ZERO counter=\"%0\"",
-                                r->refs.counter, "%0", e, "cx");
+       return GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl",
+                                       "REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"",
+                                       r->refs.counter, e, "cx");
 }
 
 static __always_inline __must_check
index 4914a3e7c8035538a167c0dc23a2a33afd4a1ca2..46ac84b506f5f92d39ef40f351b3d64e3fe00861 100644 (file)
@@ -2,56 +2,69 @@
 #ifndef _ASM_X86_RMWcc
 #define _ASM_X86_RMWcc
 
+/* This counts to 12. Any more, it will return 13th argument. */
+#define __RMWcc_ARGS(_0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _n, X...) _n
+#define RMWcc_ARGS(X...) __RMWcc_ARGS(, ##X, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0)
+
+#define __RMWcc_CONCAT(a, b) a ## b
+#define RMWcc_CONCAT(a, b) __RMWcc_CONCAT(a, b)
+
 #define __CLOBBERS_MEM(clb...) "memory", ## clb
 
 #if !defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(CC_HAVE_ASM_GOTO)
 
 /* Use asm goto */
 
-#define __GEN_RMWcc(fullop, var, cc, clobbers, ...)                    \
-do {                                                                   \
+#define __GEN_RMWcc(fullop, _var, cc, clobbers, ...)                   \
+({                                                                     \
+       bool c = false;                                                 \
        asm_volatile_goto (fullop "; j" #cc " %l[cc_label]"             \
-                       : : [counter] "m" (var), ## __VA_ARGS__         \
+                       : : [var] "m" (_var), ## __VA_ARGS__            \
                        : clobbers : cc_label);                         \
-       return 0;                                                       \
-cc_label:                                                              \
-       return 1;                                                       \
-} while (0)
-
-#define __BINARY_RMWcc_ARG     " %1, "
-
+       if (0) {                                                        \
+cc_label:      c = true;                                               \
+       }                                                               \
+       c;                                                              \
+})
 
 #else /* defined(__GCC_ASM_FLAG_OUTPUTS__) || !defined(CC_HAVE_ASM_GOTO) */
 
 /* Use flags output or a set instruction */
 
-#define __GEN_RMWcc(fullop, var, cc, clobbers, ...)                    \
-do {                                                                   \
+#define __GEN_RMWcc(fullop, _var, cc, clobbers, ...)                   \
+({                                                                     \
        bool c;                                                         \
        asm volatile (fullop CC_SET(cc)                                 \
-                       : [counter] "+m" (var), CC_OUT(cc) (c)          \
+                       : [var] "+m" (_var), CC_OUT(cc) (c)             \
                        : __VA_ARGS__ : clobbers);                      \
-       return c;                                                       \
-} while (0)
-
-#define __BINARY_RMWcc_ARG     " %2, "
+       c;                                                              \
+})
 
 #endif /* defined(__GCC_ASM_FLAG_OUTPUTS__) || !defined(CC_HAVE_ASM_GOTO) */
 
-#define GEN_UNARY_RMWcc(op, var, arg0, cc)                             \
+#define GEN_UNARY_RMWcc_4(op, var, cc, arg0)                           \
        __GEN_RMWcc(op " " arg0, var, cc, __CLOBBERS_MEM())
 
-#define GEN_UNARY_SUFFIXED_RMWcc(op, suffix, var, arg0, cc, clobbers...)\
-       __GEN_RMWcc(op " " arg0 "\n\t" suffix, var, cc,                 \
-                   __CLOBBERS_MEM(clobbers))
+#define GEN_UNARY_RMWcc_3(op, var, cc)                                 \
+       GEN_UNARY_RMWcc_4(op, var, cc, "%[var]")
 
-#define GEN_BINARY_RMWcc(op, var, vcon, val, arg0, cc)                 \
-       __GEN_RMWcc(op __BINARY_RMWcc_ARG arg0, var, cc,                \
-                   __CLOBBERS_MEM(), vcon (val))
+#define GEN_UNARY_RMWcc(X...) RMWcc_CONCAT(GEN_UNARY_RMWcc_, RMWcc_ARGS(X))(X)
+
+#define GEN_BINARY_RMWcc_6(op, var, cc, vcon, _val, arg0)              \
+       __GEN_RMWcc(op " %[val], " arg0, var, cc,                       \
+                   __CLOBBERS_MEM(), [val] vcon (_val))
+
+#define GEN_BINARY_RMWcc_5(op, var, cc, vcon, val)                     \
+       GEN_BINARY_RMWcc_6(op, var, cc, vcon, val, "%[var]")
+
+#define GEN_BINARY_RMWcc(X...) RMWcc_CONCAT(GEN_BINARY_RMWcc_, RMWcc_ARGS(X))(X)
+
+#define GEN_UNARY_SUFFIXED_RMWcc(op, suffix, var, cc, clobbers...)     \
+       __GEN_RMWcc(op " %[var]\n\t" suffix, var, cc,                   \
+                   __CLOBBERS_MEM(clobbers))
 
-#define GEN_BINARY_SUFFIXED_RMWcc(op, suffix, var, vcon, val, arg0, cc,        \
-                                 clobbers...)                          \
-       __GEN_RMWcc(op __BINARY_RMWcc_ARG arg0 "\n\t" suffix, var, cc,  \
-                   __CLOBBERS_MEM(clobbers), vcon (val))
+#define GEN_BINARY_SUFFIXED_RMWcc(op, suffix, var, cc, vcon, _val, clobbers...)\
+       __GEN_RMWcc(op " %[val], %[var]\n\t" suffix, var, cc,           \
+                   __CLOBBERS_MEM(clobbers), [val] vcon (_val))
 
 #endif /* _ASM_X86_RMWcc */