[PATCH 3.12 14/56] ARC: use ASL assembler mnemonic

From: Jiri Slaby
Date: Wed Jun 15 2016 - 03:44:55 EST


From: Vineet Gupta <vgupta@xxxxxxxxxxxx>

3.12-stable review patch. If anyone has any objections, please let me know.

===============

commit a6416f57ce57fb390b6ee30b12c01c29032a26af upstream.

ARCompact and ARCv2 only have ASL, while binutils used to support LSL as
a alias mnemonic.

Newer binutils (upstream) don't want to do that so replace it.

Signed-off-by: Vineet Gupta <vgupta@xxxxxxxxxxxx>
Signed-off-by: Jiri Slaby <jslaby@xxxxxxx>
---
arch/arc/mm/tlbex.S | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/arch/arc/mm/tlbex.S b/arch/arc/mm/tlbex.S
index cf7d7d9ad695..98837a2bfd5e 100644
--- a/arch/arc/mm/tlbex.S
+++ b/arch/arc/mm/tlbex.S
@@ -89,7 +89,7 @@ ex_saved_reg1:
#ifdef CONFIG_SMP
sr r0, [ARC_REG_SCRATCH_DATA0] ; freeup r0 to code with
GET_CPU_ID r0 ; get to per cpu scratch mem,
- lsl r0, r0, L1_CACHE_SHIFT ; cache line wide per cpu
+ asl r0, r0, L1_CACHE_SHIFT ; cache line wide per cpu
add r0, @ex_saved_reg1, r0
#else
st r0, [@ex_saved_reg1]
@@ -108,7 +108,7 @@ ex_saved_reg1:
.macro TLBMISS_RESTORE_REGS
#ifdef CONFIG_SMP
GET_CPU_ID r0 ; get to per cpu scratch mem
- lsl r0, r0, L1_CACHE_SHIFT ; each is cache line wide
+ asl r0, r0, L1_CACHE_SHIFT ; each is cache line wide
add r0, @ex_saved_reg1, r0
ld_s r3, [r0,12]
ld_s r2, [r0, 8]
@@ -220,7 +220,7 @@ ex_saved_reg1:

.macro CONV_PTE_TO_TLB
and r3, r0, PTE_BITS_RWX ; r w x
- lsl r2, r3, 3 ; r w x 0 0 0
+ asl r2, r3, 3 ; Kr Kw Kx 0 0 0 (GLOBAL, kernel only)
and.f 0, r0, _PAGE_GLOBAL
or.z r2, r2, r3 ; r w x r w x

--
2.9.0