[PATCH 32/49] x86/alternatives: Rename 'int3_refs' to 'text_poke_array_refs'
From: Ingo Molnar
Date: Fri Mar 28 2025 - 09:35:13 EST
Make it clear that these reference counts lock access
to text_poke_array.
Signed-off-by: Ingo Molnar <mingo@xxxxxxxxxx>
---
arch/x86/kernel/alternative.c | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/arch/x86/kernel/alternative.c b/arch/x86/kernel/alternative.c
index 34d3c69595a0..566b857d210d 100644
--- a/arch/x86/kernel/alternative.c
+++ b/arch/x86/kernel/alternative.c
@@ -2473,11 +2473,11 @@ static struct smp_text_poke_array {
struct smp_text_poke_loc vec[TP_ARRAY_NR_ENTRIES_MAX];
} text_poke_array;
-static DEFINE_PER_CPU(atomic_t, int3_refs);
+static DEFINE_PER_CPU(atomic_t, text_poke_array_refs);
static bool try_get_text_poke_array(void)
{
- atomic_t *refs = this_cpu_ptr(&int3_refs);
+ atomic_t *refs = this_cpu_ptr(&text_poke_array_refs);
if (!raw_atomic_inc_not_zero(refs))
return false;
@@ -2487,7 +2487,7 @@ static bool try_get_text_poke_array(void)
static __always_inline void put_text_poke_array(void)
{
- atomic_t *refs = this_cpu_ptr(&int3_refs);
+ atomic_t *refs = this_cpu_ptr(&text_poke_array_refs);
smp_mb__before_atomic();
raw_atomic_dec(refs);
@@ -2522,9 +2522,9 @@ noinstr int smp_text_poke_int3_trap_handler(struct pt_regs *regs)
* Having observed our INT3 instruction, we now must observe
* text_poke_array with non-zero refcount:
*
- * int3_refs = 1 INT3
+ * text_poke_array_refs = 1 INT3
* WMB RMB
- * write INT3 if (int3_refs != 0)
+ * write INT3 if (text_poke_array_refs != 0)
*/
smp_rmb();
@@ -2623,7 +2623,7 @@ static void smp_text_poke_batch_process(void)
* ensure reading a non-zero refcount provides up to date text_poke_array data.
*/
for_each_possible_cpu(i)
- atomic_set_release(per_cpu_ptr(&int3_refs, i), 1);
+ atomic_set_release(per_cpu_ptr(&text_poke_array_refs, i), 1);
/*
* Function tracing can enable thousands of places that need to be
@@ -2745,7 +2745,7 @@ static void smp_text_poke_batch_process(void)
* unused.
*/
for_each_possible_cpu(i) {
- atomic_t *refs = per_cpu_ptr(&int3_refs, i);
+ atomic_t *refs = per_cpu_ptr(&text_poke_array_refs, i);
if (unlikely(!atomic_dec_and_test(refs)))
atomic_cond_read_acquire(refs, !VAL);
--
2.45.2