aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm/cmpxchg.h
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2019-10-12 14:16:51 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2019-10-12 14:16:51 -0700
commit63f9bff56beb718ac0a2eb8398a98220b1e119dc (patch)
tree89f78c11dc48d745849baa75cabfa72107519795 /arch/mips/include/asm/cmpxchg.h
parentdb60a5a035aa8692dc7cee293356bdcc078fa7b7 (diff)
parent2f2b4fd674cadd8c6b40eb629e140a14db4068fd (diff)
Merge tag 'mips_fixes_5.4_2' of git://git.kernel.org/pub/scm/linux/kernel/git/mips/linux
Pull MIPS fixes from Paul Burton: - Build fixes for CONFIG_OPTIMIZE_INLINING=y builds in which the compiler may choose not to inline __xchg() & __cmpxchg(). - A build fix for Loongson configurations with GCC 9.x. - Expose some extra HWCAP bits to indicate support for various instruction set extensions to userland. - Fix bad stack access in firmware handling code for old SNI RM200/300/400 machines. * tag 'mips_fixes_5.4_2' of git://git.kernel.org/pub/scm/linux/kernel/git/mips/linux: MIPS: Disable Loongson MMI instructions for kernel build MIPS: elf_hwcap: Export userspace ASEs MIPS: fw: sni: Fix out of bounds init of o32 stack MIPS: include: Mark __xchg as __always_inline MIPS: include: Mark __cmpxchg as __always_inline
Diffstat (limited to 'arch/mips/include/asm/cmpxchg.h')
-rw-r--r--arch/mips/include/asm/cmpxchg.h9
1 files changed, 5 insertions, 4 deletions
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
index 79bf34efbc04..f6136871561d 100644
--- a/arch/mips/include/asm/cmpxchg.h
+++ b/arch/mips/include/asm/cmpxchg.h
@@ -77,8 +77,8 @@ extern unsigned long __xchg_called_with_bad_pointer(void)
extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
unsigned int size);
-static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
- int size)
+static __always_inline
+unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
{
switch (size) {
case 1:
@@ -153,8 +153,9 @@ static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
unsigned long new, unsigned int size);
-static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
- unsigned long new, unsigned int size)
+static __always_inline
+unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
+ unsigned long new, unsigned int size)
{
switch (size) {
case 1:

Privacy Policy