x86, bitops: remove use of "sbb" to return CF
Use SETC instead of SBB to return the value of CF from assembly. Using SETcc enables uniformity with other flags-returning pieces of assembly code. Signed-off-by: H. Peter Anvin <hpa@zytor.com> Link: http://lkml.kernel.org/r/1465414726-197858-2-git-send-email-hpa@linux.intel.com Reviewed-by: Andy Lutomirski <luto@kernel.org> Reviewed-by: Borislav Petkov <bp@suse.de> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
This commit is contained in:
parent
f5967101e9
commit
2823d4da5d
@ -230,11 +230,11 @@ test_and_set_bit_lock(long nr, volatile unsigned long *addr)
|
|||||||
*/
|
*/
|
||||||
static __always_inline int __test_and_set_bit(long nr, volatile unsigned long *addr)
|
static __always_inline int __test_and_set_bit(long nr, volatile unsigned long *addr)
|
||||||
{
|
{
|
||||||
int oldbit;
|
unsigned char oldbit;
|
||||||
|
|
||||||
asm("bts %2,%1\n\t"
|
asm("bts %2,%1\n\t"
|
||||||
"sbb %0,%0"
|
"setc %0"
|
||||||
: "=r" (oldbit), ADDR
|
: "=qm" (oldbit), ADDR
|
||||||
: "Ir" (nr));
|
: "Ir" (nr));
|
||||||
return oldbit;
|
return oldbit;
|
||||||
}
|
}
|
||||||
@ -270,11 +270,11 @@ static __always_inline int test_and_clear_bit(long nr, volatile unsigned long *a
|
|||||||
*/
|
*/
|
||||||
static __always_inline int __test_and_clear_bit(long nr, volatile unsigned long *addr)
|
static __always_inline int __test_and_clear_bit(long nr, volatile unsigned long *addr)
|
||||||
{
|
{
|
||||||
int oldbit;
|
unsigned char oldbit;
|
||||||
|
|
||||||
asm volatile("btr %2,%1\n\t"
|
asm volatile("btr %2,%1\n\t"
|
||||||
"sbb %0,%0"
|
"setc %0"
|
||||||
: "=r" (oldbit), ADDR
|
: "=qm" (oldbit), ADDR
|
||||||
: "Ir" (nr));
|
: "Ir" (nr));
|
||||||
return oldbit;
|
return oldbit;
|
||||||
}
|
}
|
||||||
@ -282,11 +282,11 @@ static __always_inline int __test_and_clear_bit(long nr, volatile unsigned long
|
|||||||
/* WARNING: non atomic and it can be reordered! */
|
/* WARNING: non atomic and it can be reordered! */
|
||||||
static __always_inline int __test_and_change_bit(long nr, volatile unsigned long *addr)
|
static __always_inline int __test_and_change_bit(long nr, volatile unsigned long *addr)
|
||||||
{
|
{
|
||||||
int oldbit;
|
unsigned char oldbit;
|
||||||
|
|
||||||
asm volatile("btc %2,%1\n\t"
|
asm volatile("btc %2,%1\n\t"
|
||||||
"sbb %0,%0"
|
"setc %0"
|
||||||
: "=r" (oldbit), ADDR
|
: "=qm" (oldbit), ADDR
|
||||||
: "Ir" (nr) : "memory");
|
: "Ir" (nr) : "memory");
|
||||||
|
|
||||||
return oldbit;
|
return oldbit;
|
||||||
@ -313,11 +313,11 @@ static __always_inline int constant_test_bit(long nr, const volatile unsigned lo
|
|||||||
|
|
||||||
static __always_inline int variable_test_bit(long nr, volatile const unsigned long *addr)
|
static __always_inline int variable_test_bit(long nr, volatile const unsigned long *addr)
|
||||||
{
|
{
|
||||||
int oldbit;
|
unsigned char oldbit;
|
||||||
|
|
||||||
asm volatile("bt %2,%1\n\t"
|
asm volatile("bt %2,%1\n\t"
|
||||||
"sbb %0,%0"
|
"setc %0"
|
||||||
: "=r" (oldbit)
|
: "=qm" (oldbit)
|
||||||
: "m" (*(unsigned long *)addr), "Ir" (nr));
|
: "m" (*(unsigned long *)addr), "Ir" (nr));
|
||||||
|
|
||||||
return oldbit;
|
return oldbit;
|
||||||
|
@ -510,9 +510,9 @@ do { \
|
|||||||
/* This is not atomic against other CPUs -- CPU preemption needs to be off */
|
/* This is not atomic against other CPUs -- CPU preemption needs to be off */
|
||||||
#define x86_test_and_clear_bit_percpu(bit, var) \
|
#define x86_test_and_clear_bit_percpu(bit, var) \
|
||||||
({ \
|
({ \
|
||||||
int old__; \
|
unsigned char old__; \
|
||||||
asm volatile("btr %2,"__percpu_arg(1)"\n\tsbbl %0,%0" \
|
asm volatile("btr %2,"__percpu_arg(1)"\n\tsetc %0" \
|
||||||
: "=r" (old__), "+m" (var) \
|
: "=qm" (old__), "+m" (var) \
|
||||||
: "dIr" (bit)); \
|
: "dIr" (bit)); \
|
||||||
old__; \
|
old__; \
|
||||||
})
|
})
|
||||||
@ -532,11 +532,11 @@ static __always_inline int x86_this_cpu_constant_test_bit(unsigned int nr,
|
|||||||
static inline int x86_this_cpu_variable_test_bit(int nr,
|
static inline int x86_this_cpu_variable_test_bit(int nr,
|
||||||
const unsigned long __percpu *addr)
|
const unsigned long __percpu *addr)
|
||||||
{
|
{
|
||||||
int oldbit;
|
unsigned char oldbit;
|
||||||
|
|
||||||
asm volatile("bt "__percpu_arg(2)",%1\n\t"
|
asm volatile("bt "__percpu_arg(2)",%1\n\t"
|
||||||
"sbb %0,%0"
|
"setc %0"
|
||||||
: "=r" (oldbit)
|
: "=qm" (oldbit)
|
||||||
: "m" (*(unsigned long *)addr), "Ir" (nr));
|
: "m" (*(unsigned long *)addr), "Ir" (nr));
|
||||||
|
|
||||||
return oldbit;
|
return oldbit;
|
||||||
|
@ -81,9 +81,9 @@ static inline int __const_sigismember(sigset_t *set, int _sig)
|
|||||||
|
|
||||||
static inline int __gen_sigismember(sigset_t *set, int _sig)
|
static inline int __gen_sigismember(sigset_t *set, int _sig)
|
||||||
{
|
{
|
||||||
int ret;
|
unsigned char ret;
|
||||||
asm("btl %2,%1\n\tsbbl %0,%0"
|
asm("btl %2,%1\n\tsetc %0"
|
||||||
: "=r"(ret) : "m"(*set), "Ir"(_sig-1) : "cc");
|
: "=qm"(ret) : "m"(*set), "Ir"(_sig-1) : "cc");
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,10 +79,10 @@ static inline void sync_change_bit(long nr, volatile unsigned long *addr)
|
|||||||
*/
|
*/
|
||||||
static inline int sync_test_and_set_bit(long nr, volatile unsigned long *addr)
|
static inline int sync_test_and_set_bit(long nr, volatile unsigned long *addr)
|
||||||
{
|
{
|
||||||
int oldbit;
|
unsigned char oldbit;
|
||||||
|
|
||||||
asm volatile("lock; bts %2,%1\n\tsbbl %0,%0"
|
asm volatile("lock; bts %2,%1\n\tsetc %0"
|
||||||
: "=r" (oldbit), "+m" (ADDR)
|
: "=qm" (oldbit), "+m" (ADDR)
|
||||||
: "Ir" (nr) : "memory");
|
: "Ir" (nr) : "memory");
|
||||||
return oldbit;
|
return oldbit;
|
||||||
}
|
}
|
||||||
@ -97,10 +97,10 @@ static inline int sync_test_and_set_bit(long nr, volatile unsigned long *addr)
|
|||||||
*/
|
*/
|
||||||
static inline int sync_test_and_clear_bit(long nr, volatile unsigned long *addr)
|
static inline int sync_test_and_clear_bit(long nr, volatile unsigned long *addr)
|
||||||
{
|
{
|
||||||
int oldbit;
|
unsigned char oldbit;
|
||||||
|
|
||||||
asm volatile("lock; btr %2,%1\n\tsbbl %0,%0"
|
asm volatile("lock; btr %2,%1\n\tsetc %0"
|
||||||
: "=r" (oldbit), "+m" (ADDR)
|
: "=qm" (oldbit), "+m" (ADDR)
|
||||||
: "Ir" (nr) : "memory");
|
: "Ir" (nr) : "memory");
|
||||||
return oldbit;
|
return oldbit;
|
||||||
}
|
}
|
||||||
@ -115,10 +115,10 @@ static inline int sync_test_and_clear_bit(long nr, volatile unsigned long *addr)
|
|||||||
*/
|
*/
|
||||||
static inline int sync_test_and_change_bit(long nr, volatile unsigned long *addr)
|
static inline int sync_test_and_change_bit(long nr, volatile unsigned long *addr)
|
||||||
{
|
{
|
||||||
int oldbit;
|
unsigned char oldbit;
|
||||||
|
|
||||||
asm volatile("lock; btc %2,%1\n\tsbbl %0,%0"
|
asm volatile("lock; btc %2,%1\n\tsetc %0"
|
||||||
: "=r" (oldbit), "+m" (ADDR)
|
: "=qm" (oldbit), "+m" (ADDR)
|
||||||
: "Ir" (nr) : "memory");
|
: "Ir" (nr) : "memory");
|
||||||
return oldbit;
|
return oldbit;
|
||||||
}
|
}
|
||||||
|
@ -440,10 +440,7 @@ static inline unsigned long get_vflags(struct kernel_vm86_regs *regs)
|
|||||||
|
|
||||||
static inline int is_revectored(int nr, struct revectored_struct *bitmap)
|
static inline int is_revectored(int nr, struct revectored_struct *bitmap)
|
||||||
{
|
{
|
||||||
__asm__ __volatile__("btl %2,%1\n\tsbbl %0,%0"
|
return test_bit(nr, bitmap->__map);
|
||||||
:"=r" (nr)
|
|
||||||
:"m" (*bitmap), "r" (nr));
|
|
||||||
return nr;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#define val_byte(val, n) (((__u8 *)&val)[n])
|
#define val_byte(val, n) (((__u8 *)&val)[n])
|
||||||
|
Loading…
Reference in New Issue
Block a user