summaryrefslogtreecommitdiff
path: root/arch/x86/include/asm/bitops.h
diff options
context:
space:
mode:
authorUros Bizjak <ubizjak@gmail.com>2025-09-07 20:33:38 +0200
committerBorislav Petkov (AMD) <bp@alien8.de>2025-09-08 15:38:06 +0200
commitc6c973dbfa5e34b1572bcd1852adcad1b5d08fab (patch)
treed7f0dfbf593ee77c79d6d3efe69c5877c446de98 /arch/x86/include/asm/bitops.h
parent13bdfb53aa04eeb8022af87288c5bc0a5d13a834 (diff)
x86/asm: Remove code depending on __GCC_ASM_FLAG_OUTPUTS__
The minimum supported GCC version is 8.1, which supports flag output operands and always defines __GCC_ASM_FLAG_OUTPUTS__ macro. Remove code depending on __GCC_ASM_FLAG_OUTPUTS__ and use the "=@ccCOND" flag output operand directly. Use the equivalent "=@ccz" instead of "=@cce" flag output operand for CMPXCHG8B and CMPXCHG16B instructions. These instructions set a single flag bit - the Zero flag - and "=@ccz" is used to distinguish the CC user from comparison instructions, where set ZERO flag indeed means that the values are equal. Signed-off-by: Uros Bizjak <ubizjak@gmail.com> Signed-off-by: Borislav Petkov (AMD) <bp@alien8.de> Link: https://lore.kernel.org/r/20250905121723.GCaLrU04lP2A50PT-B@fat_crate.local
Diffstat (limited to 'arch/x86/include/asm/bitops.h')
-rw-r--r--arch/x86/include/asm/bitops.h18
1 files changed, 6 insertions, 12 deletions
diff --git a/arch/x86/include/asm/bitops.h b/arch/x86/include/asm/bitops.h
index eebbc8889e70..33153dcd119c 100644
--- a/arch/x86/include/asm/bitops.h
+++ b/arch/x86/include/asm/bitops.h
@@ -99,8 +99,7 @@ static __always_inline bool arch_xor_unlock_is_negative_byte(unsigned long mask,
{
bool negative;
asm_inline volatile(LOCK_PREFIX "xorb %2,%1"
- CC_SET(s)
- : CC_OUT(s) (negative), WBYTE_ADDR(addr)
+ : "=@ccs" (negative), WBYTE_ADDR(addr)
: "iq" ((char)mask) : "memory");
return negative;
}
@@ -149,8 +148,7 @@ arch___test_and_set_bit(unsigned long nr, volatile unsigned long *addr)
bool oldbit;
asm(__ASM_SIZE(bts) " %2,%1"
- CC_SET(c)
- : CC_OUT(c) (oldbit)
+ : "=@ccc" (oldbit)
: ADDR, "Ir" (nr) : "memory");
return oldbit;
}
@@ -175,8 +173,7 @@ arch___test_and_clear_bit(unsigned long nr, volatile unsigned long *addr)
bool oldbit;
asm volatile(__ASM_SIZE(btr) " %2,%1"
- CC_SET(c)
- : CC_OUT(c) (oldbit)
+ : "=@ccc" (oldbit)
: ADDR, "Ir" (nr) : "memory");
return oldbit;
}
@@ -187,8 +184,7 @@ arch___test_and_change_bit(unsigned long nr, volatile unsigned long *addr)
bool oldbit;
asm volatile(__ASM_SIZE(btc) " %2,%1"
- CC_SET(c)
- : CC_OUT(c) (oldbit)
+ : "=@ccc" (oldbit)
: ADDR, "Ir" (nr) : "memory");
return oldbit;
@@ -211,8 +207,7 @@ static __always_inline bool constant_test_bit_acquire(long nr, const volatile un
bool oldbit;
asm volatile("testb %2,%1"
- CC_SET(nz)
- : CC_OUT(nz) (oldbit)
+ : "=@ccnz" (oldbit)
: "m" (((unsigned char *)addr)[nr >> 3]),
"i" (1 << (nr & 7))
:"memory");
@@ -225,8 +220,7 @@ static __always_inline bool variable_test_bit(long nr, volatile const unsigned l
bool oldbit;
asm volatile(__ASM_SIZE(bt) " %2,%1"
- CC_SET(c)
- : CC_OUT(c) (oldbit)
+ : "=@ccc" (oldbit)
: "m" (*(unsigned long *)addr), "Ir" (nr) : "memory");
return oldbit;