[PATCH v6 5/8] RISC-V: Replace RISCV_MISALIGNED with RISCV_SCALAR_MISALIGNED

From: Jesse Taube
Date: Wed Jul 24 2024 - 14:37:47 EST


Replace RISCV_MISALIGNED with RISCV_SCALAR_MISALIGNED to allow
for the addition of RISCV_VECTOR_MISALIGNED in a later patch.

Signed-off-by: Jesse Taube <jesse@xxxxxxxxxxxx>
Reviewed-by: Conor Dooley <conor.dooley@xxxxxxxxxxxxx>
Reviewed-by: Charlie Jenkins <charlie@xxxxxxxxxxxx>
---
V2 -> V3:
- New patch
V3 -> V4:
- No changes
V4 -> V5:
- No changes
V5 -> V6:
- fix accidental moving of check_unaligned_access_emulated_all_cpus out of the #ifdef
---
arch/riscv/Kconfig | 6 +++---
arch/riscv/include/asm/cpufeature.h | 2 +-
arch/riscv/include/asm/entry-common.h | 2 +-
arch/riscv/kernel/Makefile | 4 ++--
arch/riscv/kernel/fpu.S | 4 ++--
5 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/arch/riscv/Kconfig b/arch/riscv/Kconfig
index b94176e25be1..34d24242e37a 100644
--- a/arch/riscv/Kconfig
+++ b/arch/riscv/Kconfig
@@ -717,7 +717,7 @@ config THREAD_SIZE_ORDER
Specify the Pages of thread stack size (from 4KB to 64KB), which also
affects irq stack size, which is equal to thread stack size.

-config RISCV_MISALIGNED
+config RISCV_SCALAR_MISALIGNED
bool
select SYSCTL_ARCH_UNALIGN_ALLOW
help
@@ -734,7 +734,7 @@ choice

config RISCV_PROBE_UNALIGNED_ACCESS
bool "Probe for hardware unaligned access support"
- select RISCV_MISALIGNED
+ select RISCV_SCALAR_MISALIGNED
help
During boot, the kernel will run a series of tests to determine the
speed of unaligned accesses. This probing will dynamically determine
@@ -745,7 +745,7 @@ config RISCV_PROBE_UNALIGNED_ACCESS

config RISCV_EMULATED_UNALIGNED_ACCESS
bool "Emulate unaligned access where system support is missing"
- select RISCV_MISALIGNED
+ select RISCV_SCALAR_MISALIGNED
help
If unaligned memory accesses trap into the kernel as they are not
supported by the system, the kernel will emulate the unaligned
diff --git a/arch/riscv/include/asm/cpufeature.h b/arch/riscv/include/asm/cpufeature.h
index 3b24342c7d2a..4ade9f87fc55 100644
--- a/arch/riscv/include/asm/cpufeature.h
+++ b/arch/riscv/include/asm/cpufeature.h
@@ -34,7 +34,7 @@ extern struct riscv_isainfo hart_isa[NR_CPUS];

void riscv_user_isa_enable(void);

-#if defined(CONFIG_RISCV_MISALIGNED)
+#if defined(CONFIG_RISCV_SCALAR_MISALIGNED)
bool check_unaligned_access_emulated_all_cpus(void);
void check_unaligned_access_emulated(struct work_struct *work __always_unused);
void unaligned_emulation_finish(void);
diff --git a/arch/riscv/include/asm/entry-common.h b/arch/riscv/include/asm/entry-common.h
index 2293e535f865..0a4e3544c877 100644
--- a/arch/riscv/include/asm/entry-common.h
+++ b/arch/riscv/include/asm/entry-common.h
@@ -25,7 +25,7 @@ static inline void arch_exit_to_user_mode_prepare(struct pt_regs *regs,
void handle_page_fault(struct pt_regs *regs);
void handle_break(struct pt_regs *regs);

-#ifdef CONFIG_RISCV_MISALIGNED
+#ifdef CONFIG_RISCV_SCALAR_MISALIGNED
int handle_misaligned_load(struct pt_regs *regs);
int handle_misaligned_store(struct pt_regs *regs);
#else
diff --git a/arch/riscv/kernel/Makefile b/arch/riscv/kernel/Makefile
index 5b243d46f4b1..8d4e7d40e42f 100644
--- a/arch/riscv/kernel/Makefile
+++ b/arch/riscv/kernel/Makefile
@@ -62,8 +62,8 @@ obj-y += probes/
obj-y += tests/
obj-$(CONFIG_MMU) += vdso.o vdso/

-obj-$(CONFIG_RISCV_MISALIGNED) += traps_misaligned.o
-obj-$(CONFIG_RISCV_MISALIGNED) += unaligned_access_speed.o
+obj-$(CONFIG_RISCV_SCALAR_MISALIGNED) += traps_misaligned.o
+obj-$(CONFIG_RISCV_SCALAR_MISALIGNED) += unaligned_access_speed.o
obj-$(CONFIG_RISCV_PROBE_UNALIGNED_ACCESS) += copy-unaligned.o

obj-$(CONFIG_FPU) += fpu.o
diff --git a/arch/riscv/kernel/fpu.S b/arch/riscv/kernel/fpu.S
index 327cf527dd7e..f74f6b60e347 100644
--- a/arch/riscv/kernel/fpu.S
+++ b/arch/riscv/kernel/fpu.S
@@ -170,7 +170,7 @@ SYM_FUNC_END(__fstate_restore)
__access_func(f31)


-#ifdef CONFIG_RISCV_MISALIGNED
+#ifdef CONFIG_RISCV_SCALAR_MISALIGNED

/*
* Disable compressed instructions set to keep a constant offset between FP
@@ -224,4 +224,4 @@ SYM_FUNC_START(get_f64_reg)
fp_access_epilogue
SYM_FUNC_END(get_f64_reg)

-#endif /* CONFIG_RISCV_MISALIGNED */
+#endif /* CONFIG_RISCV_SCALAR_MISALIGNED */
--
2.45.2