Macro __cmpxchg() renamed to __cmpxchg_32() to emphasize it's explicit
support of 32bit data size, BUILD_BUG_ON() added to avoid any possible
misuses with unsupported data types.
In case CONFIG_ARC_HAS_LLSC is undefined, arch_cmpxchg() uses spinlock
to accomplish SMP-safety, so the BUILD_BUG_ON checking is uncecessary.
Signed-off-by: wuqiang.matt <wuqiang.matt@bytedance.com>
---
arch/arc/include/asm/cmpxchg.h | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/arch/arc/include/asm/cmpxchg.h b/arch/arc/include/asm/cmpxchg.h
index e138fde067de..bf46514f6f12 100644
--- a/arch/arc/include/asm/cmpxchg.h
+++ b/arch/arc/include/asm/cmpxchg.h
@@ -18,14 +18,16 @@
* if (*ptr == @old)
* *ptr = @new
*/
-#define __cmpxchg(ptr, old, new) \
+#define __cmpxchg_32(ptr, old, new) \
({ \
__typeof__(*(ptr)) _prev; \
\
+ BUILD_BUG_ON(sizeof(*(ptr)) != 4); \
+ \
__asm__ __volatile__( \
- "1: llock %0, [%1] \n" \
+ "1: llock %0, [%1] \n" \
" brne %0, %2, 2f \n" \
- " scond %3, [%1] \n" \
+ " scond %3, [%1] \n" \
" bnz 1b \n" \
"2: \n" \
: "=&r"(_prev) /* Early clobber prevent reg reuse */ \
@@ -47,7 +49,7 @@
\
switch(sizeof((_p_))) { \
case 4: \
- _prev_ = __cmpxchg(_p_, _o_, _n_); \
+ _prev_ = __cmpxchg_32(_p_, _o_, _n_); \
break; \
default: \
BUILD_BUG(); \
@@ -65,8 +67,6 @@
__typeof__(*(ptr)) _prev_; \
unsigned long __flags; \
\
- BUILD_BUG_ON(sizeof(_p_) != 4); \
- \
/* \
* spin lock/unlock provide the needed smp_mb() before/after \
*/ \
--
2.40.1
On Sat, 4 Nov 2023 17:16:12 +0800
"wuqiang.matt" <wuqiang.matt@bytedance.com> wrote:
> Macro __cmpxchg() renamed to __cmpxchg_32() to emphasize it's explicit
> support of 32bit data size, BUILD_BUG_ON() added to avoid any possible
> misuses with unsupported data types.
>
> In case CONFIG_ARC_HAS_LLSC is undefined, arch_cmpxchg() uses spinlock
> to accomplish SMP-safety, so the BUILD_BUG_ON checking is uncecessary.
>
Looks good to me.
Reviewed-by: Masami Hiramatsu (Google) <mhiramat@kernel.org>
> Signed-off-by: wuqiang.matt <wuqiang.matt@bytedance.com>
> ---
> arch/arc/include/asm/cmpxchg.h | 12 ++++++------
> 1 file changed, 6 insertions(+), 6 deletions(-)
>
> diff --git a/arch/arc/include/asm/cmpxchg.h b/arch/arc/include/asm/cmpxchg.h
> index e138fde067de..bf46514f6f12 100644
> --- a/arch/arc/include/asm/cmpxchg.h
> +++ b/arch/arc/include/asm/cmpxchg.h
> @@ -18,14 +18,16 @@
> * if (*ptr == @old)
> * *ptr = @new
> */
> -#define __cmpxchg(ptr, old, new) \
> +#define __cmpxchg_32(ptr, old, new) \
> ({ \
> __typeof__(*(ptr)) _prev; \
> \
> + BUILD_BUG_ON(sizeof(*(ptr)) != 4); \
> + \
> __asm__ __volatile__( \
> - "1: llock %0, [%1] \n" \
> + "1: llock %0, [%1] \n" \
> " brne %0, %2, 2f \n" \
> - " scond %3, [%1] \n" \
> + " scond %3, [%1] \n" \
> " bnz 1b \n" \
> "2: \n" \
> : "=&r"(_prev) /* Early clobber prevent reg reuse */ \
> @@ -47,7 +49,7 @@
> \
> switch(sizeof((_p_))) { \
> case 4: \
> - _prev_ = __cmpxchg(_p_, _o_, _n_); \
> + _prev_ = __cmpxchg_32(_p_, _o_, _n_); \
> break; \
> default: \
> BUILD_BUG(); \
> @@ -65,8 +67,6 @@
> __typeof__(*(ptr)) _prev_; \
> unsigned long __flags; \
> \
> - BUILD_BUG_ON(sizeof(_p_) != 4); \
> - \
> /* \
> * spin lock/unlock provide the needed smp_mb() before/after \
> */ \
> --
> 2.40.1
>
--
Masami Hiramatsu (Google) <mhiramat@kernel.org>
© 2016 - 2025 Red Hat, Inc.