[PATCH v3 2/2] arch/riscv: Add bitrev.h file to support rev8 and brev8

From: Jinjie Ruan

Date: Fri Apr 17 2026 - 05:38:11 EST


The RISC-V Bit-manipulation Extension for Cryptography (Zbkb) provides
the 'brev8' instruction, which reverses the bits within each byte.
Combined with the 'rev8' instruction (from Zbb or Zbkb), which reverses
the byte order of a register, we can efficiently implement 16-bit,
32-bit, and (on RV64) 64-bit bit reversal.

This is significantly faster than the default software table-lookup
implementation in lib/bitrev.c, as it replaces memory accesses and
multiple arithmetic operations with just two or three hardware
instructions.

Select HAVE_ARCH_BITREVERSE and provide <asm/bitrev.h> to utilize
these instructions when the Zbkb extension is available at runtime
via the alternatives mechanism.

Link: https://docs.riscv.org/reference/isa/unpriv/b-st-ext.html
Signed-off-by: Jinjie Ruan <ruanjinjie@xxxxxxxxxx>
---
arch/riscv/Kconfig | 1 +
arch/riscv/include/asm/bitrev.h | 55 +++++++++++++++++++++++++++++++++
2 files changed, 56 insertions(+)
create mode 100644 arch/riscv/include/asm/bitrev.h

diff --git a/arch/riscv/Kconfig b/arch/riscv/Kconfig
index 90c531e6abf5..05f2b2166a83 100644
--- a/arch/riscv/Kconfig
+++ b/arch/riscv/Kconfig
@@ -128,6 +128,7 @@ config RISCV
select HAS_IOPORT if MMU
select HAVE_ALIGNED_STRUCT_PAGE
select HAVE_ARCH_AUDITSYSCALL
+ select HAVE_ARCH_BITREVERSE if RISCV_ISA_ZBKB
select HAVE_ARCH_HUGE_VMALLOC if HAVE_ARCH_HUGE_VMAP
select HAVE_ARCH_HUGE_VMAP if MMU && 64BIT
select HAVE_ARCH_JUMP_LABEL if !XIP_KERNEL
diff --git a/arch/riscv/include/asm/bitrev.h b/arch/riscv/include/asm/bitrev.h
new file mode 100644
index 000000000000..eef263cc6655
--- /dev/null
+++ b/arch/riscv/include/asm/bitrev.h
@@ -0,0 +1,55 @@
+/* SPDX-License-Identifier: GPL-2.0 */
+#ifndef __ASM_BITREV_H
+#define __ASM_BITREV_H
+
+#include <linux/types.h>
+#include <asm/cpufeature-macros.h>
+#include <asm/hwcap.h>
+#include <asm-generic/bitops/__bitrev.h>
+
+static __always_inline __attribute_const__ u32 __arch_bitrev32(u32 x)
+{
+ unsigned long result = (unsigned long)x;
+
+ if (!riscv_has_extension_likely(RISCV_ISA_EXT_ZBKB))
+ return generic___bitrev32(x);
+
+ asm volatile(
+ ".option push\n"
+ ".option arch,+zbkb\n"
+ "rev8 %0, %0\n"
+ "brev8 %0, %0\n"
+ ".option pop"
+ : "+r" (result)
+ );
+
+#if __riscv_xlen == 64
+ return (u32)(result >> 32);
+#else
+ return (u32)result;
+#endif
+}
+
+static __always_inline __attribute_const__ u16 __arch_bitrev16(u16 x)
+{
+ return __arch_bitrev32((u32)x) >> 16;
+}
+
+static __always_inline __attribute_const__ u8 __arch_bitrev8(u8 x)
+{
+ unsigned long result = (unsigned long)x;
+
+ if (!riscv_has_extension_likely(RISCV_ISA_EXT_ZBKB))
+ return generic___bitrev8(x);
+
+ asm volatile(
+ ".option push\n"
+ ".option arch,+zbkb\n"
+ "brev8 %0, %0\n"
+ ".option pop"
+ : "+r" (result)
+ );
+
+ return (u8)result;
+}
+#endif
--
2.34.1