[PATCH v6 81/90] x86/percpu: Add offset argument to x86_this_cpu_test_bit()
From: Ahmed S. Darwish
Date: Thu Mar 26 2026 - 22:46:51 EST
x86_this_cpu_test_bit() assumes that the queried bitmap starts at the base
address of the percpu object. For X86_FEATURE bitops, this matches the
current cpuinfo_x86::x86_capability[] layout.
Upcoming changes though will route all X86_FEATURE queries to the CPUID
tables, where the bitmap resides at the per-CPU CPUID table plus an offset.
Add an offset argument to x86_this_cpu_test_bit().
Signed-off-by: Ahmed S. Darwish <darwi@xxxxxxxxxxxxx>
---
arch/x86/include/asm/cpufeature.h | 2 +-
arch/x86/include/asm/percpu.h | 34 ++++++++++++++++++-------------
2 files changed, 21 insertions(+), 15 deletions(-)
diff --git a/arch/x86/include/asm/cpufeature.h b/arch/x86/include/asm/cpufeature.h
index 520949560138..b12bde4986b5 100644
--- a/arch/x86/include/asm/cpufeature.h
+++ b/arch/x86/include/asm/cpufeature.h
@@ -56,7 +56,7 @@ extern const char * const x86_bug_flags[NBUGINTS*32];
#define this_cpu_has(bit) \
(__builtin_constant_p(bit) && REQUIRED_MASK_BIT_SET(bit) ? 1 : \
- x86_this_cpu_test_bit(bit, cpu_info.x86_capability))
+ x86_this_cpu_test_bit(bit, cpu_info.x86_capability, 0))
/*
* This is the default CPU features testing macro to use in code.
diff --git a/arch/x86/include/asm/percpu.h b/arch/x86/include/asm/percpu.h
index 409981468cba..8a36f0bb979d 100644
--- a/arch/x86/include/asm/percpu.h
+++ b/arch/x86/include/asm/percpu.h
@@ -89,15 +89,17 @@
#endif /* CONFIG_SMP */
#if defined(CONFIG_USE_X86_SEG_SUPPORT) && defined(USE_TYPEOF_UNQUAL)
-# define __my_cpu_type(var) typeof(var)
-# define __my_cpu_ptr(ptr) (ptr)
-# define __my_cpu_var(var) (var)
+# define __my_cpu_type(var) typeof(var)
+# define __my_cpu_ptr(ptr) (ptr)
+# define __my_cpu_ptr_off(ptr, off) (typeof(ptr))((uintptr_t)(ptr) + (off))
+# define __my_cpu_var(var) (var)
-# define __percpu_qual __percpu_seg_override
+# define __percpu_qual __percpu_seg_override
#else
-# define __my_cpu_type(var) typeof(var) __percpu_seg_override
-# define __my_cpu_ptr(ptr) (__my_cpu_type(*(ptr))*)(__force uintptr_t)(ptr)
-# define __my_cpu_var(var) (*__my_cpu_ptr(&(var)))
+# define __my_cpu_type(var) typeof(var) __percpu_seg_override
+# define __my_cpu_ptr(ptr) (__my_cpu_type(*(ptr))*)(__force uintptr_t)(ptr)
+# define __my_cpu_ptr_off(ptr, off) (__my_cpu_type(*(ptr))*)((__force uintptr_t)(ptr) + (off))
+# define __my_cpu_var(var) (*__my_cpu_ptr(&(var)))
#endif
#define __force_percpu_arg(x) __force_percpu_prefix "%" #x
@@ -570,29 +572,33 @@ do { \
*/
#define this_cpu_read_stable(pcp) __pcpu_size_call_return(this_cpu_read_stable_, pcp)
-#define x86_this_cpu_constant_test_bit(_nr, _var) \
+#define x86_this_cpu_constant_test_bit(_nr, _var, _offset) \
({ \
unsigned long __percpu *addr__ = \
- (unsigned long __percpu *)&(_var) + BIT_WORD(_nr); \
+ (unsigned long __percpu *)((u8 __percpu *)&(_var) + (_offset)) +\
+ BIT_WORD(_nr); \
+ \
+ /* Ensure bitops safety */ \
+ BUILD_BUG_ON(!IS_ALIGNED((unsigned long)(_offset), sizeof(unsigned long)));\
\
!!(BIT_MASK(_nr) & raw_cpu_read(*addr__)); \
})
-#define x86_this_cpu_variable_test_bit(_nr, _var) \
+#define x86_this_cpu_variable_test_bit(_nr, _var, _offset) \
({ \
bool oldbit; \
\
asm volatile("btl %[nr], " __percpu_arg([var]) \
: "=@ccc" (oldbit) \
- : [var] "m" (__my_cpu_var(_var)), \
+ : [var] "m" (*__my_cpu_ptr_off(&(_var), _offset)), \
[nr] "rI" (_nr)); \
oldbit; \
})
-#define x86_this_cpu_test_bit(_nr, _var) \
+#define x86_this_cpu_test_bit(_nr, _var, _offset) \
(__builtin_constant_p(_nr) \
- ? x86_this_cpu_constant_test_bit(_nr, _var) \
- : x86_this_cpu_variable_test_bit(_nr, _var))
+ ? x86_this_cpu_constant_test_bit(_nr, _var, _offset) \
+ : x86_this_cpu_variable_test_bit(_nr, _var, _offset))
#include <asm-generic/percpu.h>
--
2.53.0