[PATCH 4.20 152/183] ARCv2: Enable unaligned access in early ASM code

From: Greg Kroah-Hartman
Date: Mon Feb 25 2019 - 16:39:52 EST


4.20-stable review patch. If anyone has any objections, please let me know.

------------------

From: Eugeniy Paltsev <Eugeniy.Paltsev@xxxxxxxxxxxx>

commit 252f6e8eae909bc075a1b1e3b9efb095ae4c0b56 upstream.

It is currently done in arc_init_IRQ() which might be too late
considering gcc 7.3.1 onwards (GNU 2018.03) generates unaligned
memory accesses by default

Cc: stable@xxxxxxxxxxxxxxx #4.4+
Signed-off-by: Eugeniy Paltsev <Eugeniy.Paltsev@xxxxxxxxxxxx>
Signed-off-by: Vineet Gupta <vgupta@xxxxxxxxxxxx>
[vgupta: rewrote changelog]
Signed-off-by: Greg Kroah-Hartman <gregkh@xxxxxxxxxxxxxxxxxxx>

---
arch/arc/kernel/head.S | 10 ++++++++++
1 file changed, 10 insertions(+)

--- a/arch/arc/kernel/head.S
+++ b/arch/arc/kernel/head.S
@@ -17,6 +17,7 @@
#include <asm/entry.h>
#include <asm/arcregs.h>
#include <asm/cache.h>
+#include <asm/irqflags.h>

.macro CPU_EARLY_SETUP

@@ -47,6 +48,15 @@
sr r5, [ARC_REG_DC_CTRL]

1:
+
+#ifdef CONFIG_ISA_ARCV2
+ ; Unaligned access is disabled at reset, so re-enable early as
+ ; gcc 7.3.1 (ARC GNU 2018.03) onwards generates unaligned access
+ ; by default
+ lr r5, [status32]
+ bset r5, r5, STATUS_AD_BIT
+ kflag r5
+#endif
.endm

.section .init.text, "ax",@progbits