diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2020-05-23 11:21:47 -0700 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2020-05-23 11:21:47 -0700 |
commit | 423b8baf18a8c03f2d6fa99aa447ed0da189bb95 (patch) | |
tree | 68d166214b4cde3da55a1159df6fdab6a4a4a568 /arch/x86/include/asm/bitops.h | |
parent | 23f0dac848412dafd197566b62d831d5a68b5b6b (diff) | |
parent | ca6edee6c6e3fe4ec143ee1964593d755ddbee62 (diff) |
Merge branch 'akpm' (patches from Andrew)
Merge misc fixes from Andrew Morton:
"11 fixes"
* emailed patches from Andrew Morton <akpm@linux-foundation.org>:
MAINTAINERS: add files related to kdump
z3fold: fix use-after-free when freeing handles
sparc32: use PUD rather than PGD to get PMD in srmmu_nocache_init()
MAINTAINERS: update email address for Naoya Horiguchi
sh: include linux/time_types.h for sockios
kasan: disable branch tracing for core runtime
selftests/vm/write_to_hugetlbfs.c: fix unused variable warning
selftests/vm/.gitignore: add mremap_dontunmap
rapidio: fix an error in get_user_pages_fast() error handling
x86: bitops: fix build regression
device-dax: don't leak kernel memory to user space after unloading kmem
Diffstat (limited to 'arch/x86/include/asm/bitops.h')
-rw-r--r-- | arch/x86/include/asm/bitops.h | 12 |
1 files changed, 6 insertions, 6 deletions
diff --git a/arch/x86/include/asm/bitops.h b/arch/x86/include/asm/bitops.h index 53f246e9df5a..0367efdc5b7a 100644 --- a/arch/x86/include/asm/bitops.h +++ b/arch/x86/include/asm/bitops.h @@ -52,9 +52,9 @@ static __always_inline void arch_set_bit(long nr, volatile unsigned long *addr) { if (__builtin_constant_p(nr)) { - asm volatile(LOCK_PREFIX "orb %1,%0" + asm volatile(LOCK_PREFIX "orb %b1,%0" : CONST_MASK_ADDR(nr, addr) - : "iq" (CONST_MASK(nr) & 0xff) + : "iq" (CONST_MASK(nr)) : "memory"); } else { asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0" @@ -72,9 +72,9 @@ static __always_inline void arch_clear_bit(long nr, volatile unsigned long *addr) { if (__builtin_constant_p(nr)) { - asm volatile(LOCK_PREFIX "andb %1,%0" + asm volatile(LOCK_PREFIX "andb %b1,%0" : CONST_MASK_ADDR(nr, addr) - : "iq" (CONST_MASK(nr) ^ 0xff)); + : "iq" (~CONST_MASK(nr))); } else { asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0" : : RLONG_ADDR(addr), "Ir" (nr) : "memory"); @@ -123,9 +123,9 @@ static __always_inline void arch_change_bit(long nr, volatile unsigned long *addr) { if (__builtin_constant_p(nr)) { - asm volatile(LOCK_PREFIX "xorb %1,%0" + asm volatile(LOCK_PREFIX "xorb %b1,%0" : CONST_MASK_ADDR(nr, addr) - : "iq" ((u8)CONST_MASK(nr))); + : "iq" (CONST_MASK(nr))); } else { asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0" : : RLONG_ADDR(addr), "Ir" (nr) : "memory"); |