[PATCH] crypto: x86/cast5: Remove unused cast5_ctr_16way
From: linux
Date: Sat Oct 05 2024 - 19:14:49 EST
From: "Dr. David Alan Gilbert" <linux@xxxxxxxxxxx>
commit e2d60e2f597a ("crypto: x86/cast5 - drop CTR mode implementation")
removed the calls to cast5_ctr_16way but left the avx implementation.
Remove it.
Signed-off-by: Dr. David Alan Gilbert <linux@xxxxxxxxxxx>
---
arch/x86/crypto/cast5-avx-x86_64-asm_64.S | 76 -----------------------
1 file changed, 76 deletions(-)
diff --git a/arch/x86/crypto/cast5-avx-x86_64-asm_64.S b/arch/x86/crypto/cast5-avx-x86_64-asm_64.S
index b4e460a87f18..fb95a614249d 100644
--- a/arch/x86/crypto/cast5-avx-x86_64-asm_64.S
+++ b/arch/x86/crypto/cast5-avx-x86_64-asm_64.S
@@ -487,79 +487,3 @@ SYM_FUNC_START(cast5_cbc_dec_16way)
FRAME_END
RET;
SYM_FUNC_END(cast5_cbc_dec_16way)
-
-SYM_FUNC_START(cast5_ctr_16way)
- /* input:
- * %rdi: ctx
- * %rsi: dst
- * %rdx: src
- * %rcx: iv (big endian, 64bit)
- */
- FRAME_BEGIN
- pushq %r12;
- pushq %r15;
-
- movq %rdi, CTX;
- movq %rsi, %r11;
- movq %rdx, %r12;
-
- vpcmpeqd RTMP, RTMP, RTMP;
- vpsrldq $8, RTMP, RTMP; /* low: -1, high: 0 */
-
- vpcmpeqd RKR, RKR, RKR;
- vpaddq RKR, RKR, RKR; /* low: -2, high: -2 */
- vmovdqa .Lbswap_iv_mask(%rip), R1ST;
- vmovdqa .Lbswap128_mask(%rip), RKM;
-
- /* load IV and byteswap */
- vmovq (%rcx), RX;
- vpshufb R1ST, RX, RX;
-
- /* construct IVs */
- vpsubq RTMP, RX, RX; /* le: IV1, IV0 */
- vpshufb RKM, RX, RL1; /* be: IV0, IV1 */
- vpsubq RKR, RX, RX;
- vpshufb RKM, RX, RR1; /* be: IV2, IV3 */
- vpsubq RKR, RX, RX;
- vpshufb RKM, RX, RL2; /* be: IV4, IV5 */
- vpsubq RKR, RX, RX;
- vpshufb RKM, RX, RR2; /* be: IV6, IV7 */
- vpsubq RKR, RX, RX;
- vpshufb RKM, RX, RL3; /* be: IV8, IV9 */
- vpsubq RKR, RX, RX;
- vpshufb RKM, RX, RR3; /* be: IV10, IV11 */
- vpsubq RKR, RX, RX;
- vpshufb RKM, RX, RL4; /* be: IV12, IV13 */
- vpsubq RKR, RX, RX;
- vpshufb RKM, RX, RR4; /* be: IV14, IV15 */
-
- /* store last IV */
- vpsubq RTMP, RX, RX; /* le: IV16, IV14 */
- vpshufb R1ST, RX, RX; /* be: IV16, IV16 */
- vmovq RX, (%rcx);
-
- call __cast5_enc_blk16;
-
- /* dst = src ^ iv */
- vpxor (0*16)(%r12), RR1, RR1;
- vpxor (1*16)(%r12), RL1, RL1;
- vpxor (2*16)(%r12), RR2, RR2;
- vpxor (3*16)(%r12), RL2, RL2;
- vpxor (4*16)(%r12), RR3, RR3;
- vpxor (5*16)(%r12), RL3, RL3;
- vpxor (6*16)(%r12), RR4, RR4;
- vpxor (7*16)(%r12), RL4, RL4;
- vmovdqu RR1, (0*16)(%r11);
- vmovdqu RL1, (1*16)(%r11);
- vmovdqu RR2, (2*16)(%r11);
- vmovdqu RL2, (3*16)(%r11);
- vmovdqu RR3, (4*16)(%r11);
- vmovdqu RL3, (5*16)(%r11);
- vmovdqu RR4, (6*16)(%r11);
- vmovdqu RL4, (7*16)(%r11);
-
- popq %r15;
- popq %r12;
- FRAME_END
- RET;
-SYM_FUNC_END(cast5_ctr_16way)
--
2.46.2