xref: /arm-trusted-firmware/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S (revision 91f16700b400a8c0651d24a598fc48ee2997a0d7)
1*91f16700Schasinglulu/*
2*91f16700Schasinglulu * Copyright (c) 2017-2022, Arm Limited and Contributors. All rights reserved.
3*91f16700Schasinglulu *
4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause
5*91f16700Schasinglulu */
6*91f16700Schasinglulu
7*91f16700Schasinglulu#include <arch.h>
8*91f16700Schasinglulu#include <asm_macros.S>
9*91f16700Schasinglulu#include <context.h>
10*91f16700Schasinglulu#include <services/arm_arch_svc.h>
11*91f16700Schasinglulu
12*91f16700Schasinglulu	.globl	wa_cve_2017_5715_mmu_vbar
13*91f16700Schasinglulu
14*91f16700Schasinglulu#define ESR_EL3_A64_SMC0	0x5e000000
15*91f16700Schasinglulu#define ESR_EL3_A32_SMC0	0x4e000000
16*91f16700Schasinglulu
17*91f16700Schasingluluvector_base wa_cve_2017_5715_mmu_vbar
18*91f16700Schasinglulu
19*91f16700Schasinglulu	.macro	apply_cve_2017_5715_wa _is_sync_exception _esr_el3_val
20*91f16700Schasinglulu	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
21*91f16700Schasinglulu	mrs	x1, sctlr_el3
22*91f16700Schasinglulu	/* Disable MMU */
23*91f16700Schasinglulu	bic	x1, x1, #SCTLR_M_BIT
24*91f16700Schasinglulu	msr	sctlr_el3, x1
25*91f16700Schasinglulu	isb
26*91f16700Schasinglulu	/* Enable MMU */
27*91f16700Schasinglulu	orr	x1, x1, #SCTLR_M_BIT
28*91f16700Schasinglulu	msr	sctlr_el3, x1
29*91f16700Schasinglulu	/*
30*91f16700Schasinglulu	 * Defer ISB to avoid synchronizing twice in case we hit
31*91f16700Schasinglulu	 * the workaround SMC call which will implicitly synchronize
32*91f16700Schasinglulu	 * because of the ERET instruction.
33*91f16700Schasinglulu	 */
34*91f16700Schasinglulu
35*91f16700Schasinglulu	/*
36*91f16700Schasinglulu	 * Ensure SMC is coming from A64/A32 state on #0
37*91f16700Schasinglulu	 * with W0 = SMCCC_ARCH_WORKAROUND_1 or W0 = SMCCC_ARCH_WORKAROUND_3
38*91f16700Schasinglulu	 *
39*91f16700Schasinglulu	 * This sequence evaluates as:
40*91f16700Schasinglulu	 *    (W0==SMCCC_ARCH_WORKAROUND_1) || (W0==SMCCC_ARCH_WORKAROUND_3) ?
41*91f16700Schasinglulu	 *    (ESR_EL3==SMC#0) : (NE)
42*91f16700Schasinglulu	 * allowing use of a single branch operation
43*91f16700Schasinglulu	 */
44*91f16700Schasinglulu	.if \_is_sync_exception
45*91f16700Schasinglulu		orr	w1, wzr, #SMCCC_ARCH_WORKAROUND_1
46*91f16700Schasinglulu		cmp	w0, w1
47*91f16700Schasinglulu		orr	w1, wzr, #SMCCC_ARCH_WORKAROUND_3
48*91f16700Schasinglulu		ccmp	w0, w1, #4, ne
49*91f16700Schasinglulu		mrs	x0, esr_el3
50*91f16700Schasinglulu		mov_imm	w1, \_esr_el3_val
51*91f16700Schasinglulu		ccmp	w0, w1, #0, eq
52*91f16700Schasinglulu		/* Static predictor will predict a fall through */
53*91f16700Schasinglulu		bne	1f
54*91f16700Schasinglulu		exception_return
55*91f16700Schasinglulu1:
56*91f16700Schasinglulu	.endif
57*91f16700Schasinglulu
58*91f16700Schasinglulu	/*
59*91f16700Schasinglulu	 * Synchronize now to enable the MMU.  This is required
60*91f16700Schasinglulu	 * to ensure the load pair below reads the data stored earlier.
61*91f16700Schasinglulu	 */
62*91f16700Schasinglulu	isb
63*91f16700Schasinglulu	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
64*91f16700Schasinglulu	.endm
65*91f16700Schasinglulu
66*91f16700Schasinglulu	/* ---------------------------------------------------------------------
67*91f16700Schasinglulu	 * Current EL with SP_EL0 : 0x0 - 0x200
68*91f16700Schasinglulu	 * ---------------------------------------------------------------------
69*91f16700Schasinglulu	 */
70*91f16700Schasingluluvector_entry mmu_sync_exception_sp_el0
71*91f16700Schasinglulu	b	sync_exception_sp_el0
72*91f16700Schasingluluend_vector_entry mmu_sync_exception_sp_el0
73*91f16700Schasinglulu
74*91f16700Schasingluluvector_entry mmu_irq_sp_el0
75*91f16700Schasinglulu	b	irq_sp_el0
76*91f16700Schasingluluend_vector_entry mmu_irq_sp_el0
77*91f16700Schasinglulu
78*91f16700Schasingluluvector_entry mmu_fiq_sp_el0
79*91f16700Schasinglulu	b	fiq_sp_el0
80*91f16700Schasingluluend_vector_entry mmu_fiq_sp_el0
81*91f16700Schasinglulu
82*91f16700Schasingluluvector_entry mmu_serror_sp_el0
83*91f16700Schasinglulu	b	serror_sp_el0
84*91f16700Schasingluluend_vector_entry mmu_serror_sp_el0
85*91f16700Schasinglulu
86*91f16700Schasinglulu	/* ---------------------------------------------------------------------
87*91f16700Schasinglulu	 * Current EL with SP_ELx: 0x200 - 0x400
88*91f16700Schasinglulu	 * ---------------------------------------------------------------------
89*91f16700Schasinglulu	 */
90*91f16700Schasingluluvector_entry mmu_sync_exception_sp_elx
91*91f16700Schasinglulu	b	sync_exception_sp_elx
92*91f16700Schasingluluend_vector_entry mmu_sync_exception_sp_elx
93*91f16700Schasinglulu
94*91f16700Schasingluluvector_entry mmu_irq_sp_elx
95*91f16700Schasinglulu	b	irq_sp_elx
96*91f16700Schasingluluend_vector_entry mmu_irq_sp_elx
97*91f16700Schasinglulu
98*91f16700Schasingluluvector_entry mmu_fiq_sp_elx
99*91f16700Schasinglulu	b	fiq_sp_elx
100*91f16700Schasingluluend_vector_entry mmu_fiq_sp_elx
101*91f16700Schasinglulu
102*91f16700Schasingluluvector_entry mmu_serror_sp_elx
103*91f16700Schasinglulu	b	serror_sp_elx
104*91f16700Schasingluluend_vector_entry mmu_serror_sp_elx
105*91f16700Schasinglulu
106*91f16700Schasinglulu	/* ---------------------------------------------------------------------
107*91f16700Schasinglulu	 * Lower EL using AArch64 : 0x400 - 0x600
108*91f16700Schasinglulu	 * ---------------------------------------------------------------------
109*91f16700Schasinglulu	 */
110*91f16700Schasingluluvector_entry mmu_sync_exception_aarch64
111*91f16700Schasinglulu	apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
112*91f16700Schasinglulu	b	sync_exception_aarch64
113*91f16700Schasingluluend_vector_entry mmu_sync_exception_aarch64
114*91f16700Schasinglulu
115*91f16700Schasingluluvector_entry mmu_irq_aarch64
116*91f16700Schasinglulu	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
117*91f16700Schasinglulu	b	irq_aarch64
118*91f16700Schasingluluend_vector_entry mmu_irq_aarch64
119*91f16700Schasinglulu
120*91f16700Schasingluluvector_entry mmu_fiq_aarch64
121*91f16700Schasinglulu	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
122*91f16700Schasinglulu	b	fiq_aarch64
123*91f16700Schasingluluend_vector_entry mmu_fiq_aarch64
124*91f16700Schasinglulu
125*91f16700Schasingluluvector_entry mmu_serror_aarch64
126*91f16700Schasinglulu	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
127*91f16700Schasinglulu	b	serror_aarch64
128*91f16700Schasingluluend_vector_entry mmu_serror_aarch64
129*91f16700Schasinglulu
130*91f16700Schasinglulu	/* ---------------------------------------------------------------------
131*91f16700Schasinglulu	 * Lower EL using AArch32 : 0x600 - 0x800
132*91f16700Schasinglulu	 * ---------------------------------------------------------------------
133*91f16700Schasinglulu	 */
134*91f16700Schasingluluvector_entry mmu_sync_exception_aarch32
135*91f16700Schasinglulu	apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
136*91f16700Schasinglulu	b	sync_exception_aarch32
137*91f16700Schasingluluend_vector_entry mmu_sync_exception_aarch32
138*91f16700Schasinglulu
139*91f16700Schasingluluvector_entry mmu_irq_aarch32
140*91f16700Schasinglulu	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
141*91f16700Schasinglulu	b	irq_aarch32
142*91f16700Schasingluluend_vector_entry mmu_irq_aarch32
143*91f16700Schasinglulu
144*91f16700Schasingluluvector_entry mmu_fiq_aarch32
145*91f16700Schasinglulu	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
146*91f16700Schasinglulu	b	fiq_aarch32
147*91f16700Schasingluluend_vector_entry mmu_fiq_aarch32
148*91f16700Schasinglulu
149*91f16700Schasingluluvector_entry mmu_serror_aarch32
150*91f16700Schasinglulu	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
151*91f16700Schasinglulu	b	serror_aarch32
152*91f16700Schasingluluend_vector_entry mmu_serror_aarch32
153