aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorArnd Bergmann <arnd@arndb.de>2023-06-28 11:49:18 +0200
committerKees Cook <keescook@chromium.org>2023-07-13 09:54:32 -0700
commitec7633de404e7ce704d8f79081b97bca5b616c23 (patch)
treeb58cf80ca2018004682bd14a1ec9e06b60804876
parente0b7b2081233ac7fe55838ff68cbc7ca9887a91f (diff)
sparc: mark __arch_xchg() as __always_inline
An otherwise correct change to the atomic operations uncovered an existing bug in the sparc __arch_xchg() function, which is calls __xchg_called_with_bad_pointer() when its arguments are unknown at compile time: ERROR: modpost: "__xchg_called_with_bad_pointer" [lib/atomic64_test.ko] undefined! This now happens because gcc determines that it's better to not inline the function. Avoid this by just marking the function as __always_inline to force the compiler to do the right thing here. Reported-by: Guenter Roeck <linux@roeck-us.net> Link: https://lore.kernel.org/all/c525adc9-6623-4660-8718-e0c9311563b8@roeck-us.net/ Fixes: d12157efc8e08 ("locking/atomic: make atomic*_{cmp,}xchg optional") Signed-off-by: Arnd Bergmann <arnd@arndb.de> Acked-by: Palmer Dabbelt <palmer@rivosinc.com> Acked-by: Mark Rutland <mark.rutland@arm.com> Reviewed-by: Sam Ravnborg <sam@ravnborg.org> Acked-by: Guenter Roeck <linux@roeck-us.net> Acked-by: Andi Shyti <andi.shyti@linux.intel.com> Link: https://lore.kernel.org/r/20230628094938.2318171-1-arnd@kernel.org Signed-off-by: Kees Cook <keescook@chromium.org>
-rw-r--r--arch/sparc/include/asm/cmpxchg_32.h2
-rw-r--r--arch/sparc/include/asm/cmpxchg_64.h2
2 files changed, 2 insertions, 2 deletions
diff --git a/arch/sparc/include/asm/cmpxchg_32.h b/arch/sparc/include/asm/cmpxchg_32.h
index 7a1339533d1d..d0af82c240b7 100644
--- a/arch/sparc/include/asm/cmpxchg_32.h
+++ b/arch/sparc/include/asm/cmpxchg_32.h
@@ -15,7 +15,7 @@
unsigned long __xchg_u32(volatile u32 *m, u32 new);
void __xchg_called_with_bad_pointer(void);
-static inline unsigned long __arch_xchg(unsigned long x, __volatile__ void * ptr, int size)
+static __always_inline unsigned long __arch_xchg(unsigned long x, __volatile__ void * ptr, int size)
{
switch (size) {
case 4:
diff --git a/arch/sparc/include/asm/cmpxchg_64.h b/arch/sparc/include/asm/cmpxchg_64.h
index 66cd61dde9ec..3de25262c411 100644
--- a/arch/sparc/include/asm/cmpxchg_64.h
+++ b/arch/sparc/include/asm/cmpxchg_64.h
@@ -87,7 +87,7 @@ xchg16(__volatile__ unsigned short *m, unsigned short val)
return (load32 & mask) >> bit_shift;
}
-static inline unsigned long
+static __always_inline unsigned long
__arch_xchg(unsigned long x, __volatile__ void * ptr, int size)
{
switch (size) {