xref: /arm-trusted-firmware/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S (revision 91f16700b400a8c0651d24a598fc48ee2997a0d7)
1*91f16700Schasinglulu/*
2*91f16700Schasinglulu * Copyright (c) 2017-2022, Arm Limited and Contributors. All rights reserved.
3*91f16700Schasinglulu *
4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause
5*91f16700Schasinglulu */
6*91f16700Schasinglulu
7*91f16700Schasinglulu#include <arch.h>
8*91f16700Schasinglulu#include <asm_macros.S>
9*91f16700Schasinglulu#include <context.h>
10*91f16700Schasinglulu#include <services/arm_arch_svc.h>
11*91f16700Schasinglulu
12*91f16700Schasinglulu	.globl	wa_cve_2017_5715_bpiall_vbar
13*91f16700Schasinglulu
14*91f16700Schasinglulu#define EMIT_BPIALL		0xee070fd5
15*91f16700Schasinglulu#define EMIT_SMC		0xe1600070
16*91f16700Schasinglulu#define ESR_EL3_A64_SMC0	0x5e000000
17*91f16700Schasinglulu
18*91f16700Schasinglulu	.macro	apply_cve_2017_5715_wa _from_vector
19*91f16700Schasinglulu	/*
20*91f16700Schasinglulu	 * Save register state to enable a call to AArch32 S-EL1 and return
21*91f16700Schasinglulu	 * Identify the original calling vector in w2 (==_from_vector)
22*91f16700Schasinglulu	 * Use w3-w6 for additional register state preservation while in S-EL1
23*91f16700Schasinglulu	 */
24*91f16700Schasinglulu
25*91f16700Schasinglulu	/* Save GP regs */
26*91f16700Schasinglulu	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
27*91f16700Schasinglulu	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
28*91f16700Schasinglulu	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
29*91f16700Schasinglulu	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
30*91f16700Schasinglulu	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
31*91f16700Schasinglulu	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
32*91f16700Schasinglulu	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
33*91f16700Schasinglulu	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
34*91f16700Schasinglulu	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
35*91f16700Schasinglulu	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
36*91f16700Schasinglulu	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
37*91f16700Schasinglulu	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
38*91f16700Schasinglulu	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
39*91f16700Schasinglulu	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
40*91f16700Schasinglulu	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
41*91f16700Schasinglulu
42*91f16700Schasinglulu	/* Identify the original exception vector */
43*91f16700Schasinglulu	mov	w2, \_from_vector
44*91f16700Schasinglulu
45*91f16700Schasinglulu	/* Preserve 32-bit system registers in GP registers through the workaround */
46*91f16700Schasinglulu	mrs	x3, esr_el3
47*91f16700Schasinglulu	mrs	x4, spsr_el3
48*91f16700Schasinglulu	mrs	x5, scr_el3
49*91f16700Schasinglulu	mrs	x6, sctlr_el1
50*91f16700Schasinglulu
51*91f16700Schasinglulu	/*
52*91f16700Schasinglulu	 * Preserve LR and ELR_EL3 registers in the GP regs context.
53*91f16700Schasinglulu	 * Temporarily use the CTX_GPREG_SP_EL0 slot to preserve ELR_EL3
54*91f16700Schasinglulu	 * through the workaround. This is OK because at this point the
55*91f16700Schasinglulu	 * current state for this context's SP_EL0 is in the live system
56*91f16700Schasinglulu	 * register, which is unmodified by the workaround.
57*91f16700Schasinglulu	 */
58*91f16700Schasinglulu	mrs	x7, elr_el3
59*91f16700Schasinglulu	stp	x30, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
60*91f16700Schasinglulu
61*91f16700Schasinglulu	/*
62*91f16700Schasinglulu	 * Load system registers for entry to S-EL1.
63*91f16700Schasinglulu	 */
64*91f16700Schasinglulu
65*91f16700Schasinglulu	/* Mask all interrupts and set AArch32 Supervisor mode */
66*91f16700Schasinglulu	movz	w8, SPSR_MODE32(MODE32_svc, SPSR_T_ARM, SPSR_E_LITTLE, SPSR_AIF_MASK)
67*91f16700Schasinglulu
68*91f16700Schasinglulu	/* Switch EL3 exception vectors while the workaround is executing. */
69*91f16700Schasinglulu	adr	x9, wa_cve_2017_5715_bpiall_ret_vbar
70*91f16700Schasinglulu
71*91f16700Schasinglulu	/* Setup SCTLR_EL1 with MMU off and I$ on */
72*91f16700Schasinglulu	ldr	x10, stub_sel1_sctlr
73*91f16700Schasinglulu
74*91f16700Schasinglulu	/* Land at the S-EL1 workaround stub */
75*91f16700Schasinglulu	adr	x11, aarch32_stub
76*91f16700Schasinglulu
77*91f16700Schasinglulu	/*
78*91f16700Schasinglulu	 * Setting SCR_EL3 to all zeroes means that the NS, RW
79*91f16700Schasinglulu	 * and SMD bits are configured as expected.
80*91f16700Schasinglulu	 */
81*91f16700Schasinglulu	msr	scr_el3, xzr
82*91f16700Schasinglulu	msr	spsr_el3, x8
83*91f16700Schasinglulu	msr	vbar_el3, x9
84*91f16700Schasinglulu	msr	sctlr_el1, x10
85*91f16700Schasinglulu	msr	elr_el3, x11
86*91f16700Schasinglulu
87*91f16700Schasinglulu	eret
88*91f16700Schasinglulu	.endm
89*91f16700Schasinglulu
90*91f16700Schasinglulu	/* ---------------------------------------------------------------------
91*91f16700Schasinglulu	 * This vector table is used at runtime to enter the workaround at
92*91f16700Schasinglulu	 * AArch32 S-EL1 for Sync/IRQ/FIQ/SError exceptions.  If the workaround
93*91f16700Schasinglulu	 * is not enabled, the existing runtime exception vector table is used.
94*91f16700Schasinglulu	 * ---------------------------------------------------------------------
95*91f16700Schasinglulu	 */
96*91f16700Schasingluluvector_base wa_cve_2017_5715_bpiall_vbar
97*91f16700Schasinglulu
98*91f16700Schasinglulu	/* ---------------------------------------------------------------------
99*91f16700Schasinglulu	 * Current EL with SP_EL0 : 0x0 - 0x200
100*91f16700Schasinglulu	 * ---------------------------------------------------------------------
101*91f16700Schasinglulu	 */
102*91f16700Schasingluluvector_entry bpiall_sync_exception_sp_el0
103*91f16700Schasinglulu	b	sync_exception_sp_el0
104*91f16700Schasinglulu	nop	/* to force 8 byte alignment for the following stub */
105*91f16700Schasinglulu
106*91f16700Schasinglulu	/*
107*91f16700Schasinglulu	 * Since each vector table entry is 128 bytes, we can store the
108*91f16700Schasinglulu	 * stub context in the unused space to minimize memory footprint.
109*91f16700Schasinglulu	 */
110*91f16700Schasinglulustub_sel1_sctlr:
111*91f16700Schasinglulu	.quad	SCTLR_AARCH32_EL1_RES1 | SCTLR_I_BIT
112*91f16700Schasinglulu
113*91f16700Schasingluluaarch32_stub:
114*91f16700Schasinglulu	.word	EMIT_BPIALL
115*91f16700Schasinglulu	.word	EMIT_SMC
116*91f16700Schasinglulu
117*91f16700Schasingluluend_vector_entry bpiall_sync_exception_sp_el0
118*91f16700Schasinglulu
119*91f16700Schasingluluvector_entry bpiall_irq_sp_el0
120*91f16700Schasinglulu	b	irq_sp_el0
121*91f16700Schasingluluend_vector_entry bpiall_irq_sp_el0
122*91f16700Schasinglulu
123*91f16700Schasingluluvector_entry bpiall_fiq_sp_el0
124*91f16700Schasinglulu	b	fiq_sp_el0
125*91f16700Schasingluluend_vector_entry bpiall_fiq_sp_el0
126*91f16700Schasinglulu
127*91f16700Schasingluluvector_entry bpiall_serror_sp_el0
128*91f16700Schasinglulu	b	serror_sp_el0
129*91f16700Schasingluluend_vector_entry bpiall_serror_sp_el0
130*91f16700Schasinglulu
131*91f16700Schasinglulu	/* ---------------------------------------------------------------------
132*91f16700Schasinglulu	 * Current EL with SP_ELx: 0x200 - 0x400
133*91f16700Schasinglulu	 * ---------------------------------------------------------------------
134*91f16700Schasinglulu	 */
135*91f16700Schasingluluvector_entry bpiall_sync_exception_sp_elx
136*91f16700Schasinglulu	b	sync_exception_sp_elx
137*91f16700Schasingluluend_vector_entry bpiall_sync_exception_sp_elx
138*91f16700Schasinglulu
139*91f16700Schasingluluvector_entry bpiall_irq_sp_elx
140*91f16700Schasinglulu	b	irq_sp_elx
141*91f16700Schasingluluend_vector_entry bpiall_irq_sp_elx
142*91f16700Schasinglulu
143*91f16700Schasingluluvector_entry bpiall_fiq_sp_elx
144*91f16700Schasinglulu	b	fiq_sp_elx
145*91f16700Schasingluluend_vector_entry bpiall_fiq_sp_elx
146*91f16700Schasinglulu
147*91f16700Schasingluluvector_entry bpiall_serror_sp_elx
148*91f16700Schasinglulu	b	serror_sp_elx
149*91f16700Schasingluluend_vector_entry bpiall_serror_sp_elx
150*91f16700Schasinglulu
151*91f16700Schasinglulu	/* ---------------------------------------------------------------------
152*91f16700Schasinglulu	 * Lower EL using AArch64 : 0x400 - 0x600
153*91f16700Schasinglulu	 * ---------------------------------------------------------------------
154*91f16700Schasinglulu	 */
155*91f16700Schasingluluvector_entry bpiall_sync_exception_aarch64
156*91f16700Schasinglulu	apply_cve_2017_5715_wa 1
157*91f16700Schasingluluend_vector_entry bpiall_sync_exception_aarch64
158*91f16700Schasinglulu
159*91f16700Schasingluluvector_entry bpiall_irq_aarch64
160*91f16700Schasinglulu	apply_cve_2017_5715_wa 2
161*91f16700Schasingluluend_vector_entry bpiall_irq_aarch64
162*91f16700Schasinglulu
163*91f16700Schasingluluvector_entry bpiall_fiq_aarch64
164*91f16700Schasinglulu	apply_cve_2017_5715_wa 4
165*91f16700Schasingluluend_vector_entry bpiall_fiq_aarch64
166*91f16700Schasinglulu
167*91f16700Schasingluluvector_entry bpiall_serror_aarch64
168*91f16700Schasinglulu	apply_cve_2017_5715_wa 8
169*91f16700Schasingluluend_vector_entry bpiall_serror_aarch64
170*91f16700Schasinglulu
171*91f16700Schasinglulu	/* ---------------------------------------------------------------------
172*91f16700Schasinglulu	 * Lower EL using AArch32 : 0x600 - 0x800
173*91f16700Schasinglulu	 * ---------------------------------------------------------------------
174*91f16700Schasinglulu	 */
175*91f16700Schasingluluvector_entry bpiall_sync_exception_aarch32
176*91f16700Schasinglulu	apply_cve_2017_5715_wa 1
177*91f16700Schasingluluend_vector_entry bpiall_sync_exception_aarch32
178*91f16700Schasinglulu
179*91f16700Schasingluluvector_entry bpiall_irq_aarch32
180*91f16700Schasinglulu	apply_cve_2017_5715_wa 2
181*91f16700Schasingluluend_vector_entry bpiall_irq_aarch32
182*91f16700Schasinglulu
183*91f16700Schasingluluvector_entry bpiall_fiq_aarch32
184*91f16700Schasinglulu	apply_cve_2017_5715_wa 4
185*91f16700Schasingluluend_vector_entry bpiall_fiq_aarch32
186*91f16700Schasinglulu
187*91f16700Schasingluluvector_entry bpiall_serror_aarch32
188*91f16700Schasinglulu	apply_cve_2017_5715_wa 8
189*91f16700Schasingluluend_vector_entry bpiall_serror_aarch32
190*91f16700Schasinglulu
191*91f16700Schasinglulu	/* ---------------------------------------------------------------------
192*91f16700Schasinglulu	 * This vector table is used while the workaround is executing.  It
193*91f16700Schasinglulu	 * installs a simple SMC handler to allow the Sync/IRQ/FIQ/SError
194*91f16700Schasinglulu	 * workaround stubs to enter EL3 from S-EL1.  It restores the previous
195*91f16700Schasinglulu	 * EL3 state before proceeding with the normal runtime exception vector.
196*91f16700Schasinglulu	 * ---------------------------------------------------------------------
197*91f16700Schasinglulu	 */
198*91f16700Schasingluluvector_base wa_cve_2017_5715_bpiall_ret_vbar
199*91f16700Schasinglulu
200*91f16700Schasinglulu	/* ---------------------------------------------------------------------
201*91f16700Schasinglulu	 * Current EL with SP_EL0 : 0x0 - 0x200 (UNUSED)
202*91f16700Schasinglulu	 * ---------------------------------------------------------------------
203*91f16700Schasinglulu	 */
204*91f16700Schasingluluvector_entry bpiall_ret_sync_exception_sp_el0
205*91f16700Schasinglulu	b	report_unhandled_exception
206*91f16700Schasingluluend_vector_entry bpiall_ret_sync_exception_sp_el0
207*91f16700Schasinglulu
208*91f16700Schasingluluvector_entry bpiall_ret_irq_sp_el0
209*91f16700Schasinglulu	b	report_unhandled_interrupt
210*91f16700Schasingluluend_vector_entry bpiall_ret_irq_sp_el0
211*91f16700Schasinglulu
212*91f16700Schasingluluvector_entry bpiall_ret_fiq_sp_el0
213*91f16700Schasinglulu	b	report_unhandled_interrupt
214*91f16700Schasingluluend_vector_entry bpiall_ret_fiq_sp_el0
215*91f16700Schasinglulu
216*91f16700Schasingluluvector_entry bpiall_ret_serror_sp_el0
217*91f16700Schasinglulu	b	report_unhandled_exception
218*91f16700Schasingluluend_vector_entry bpiall_ret_serror_sp_el0
219*91f16700Schasinglulu
220*91f16700Schasinglulu	/* ---------------------------------------------------------------------
221*91f16700Schasinglulu	 * Current EL with SP_ELx: 0x200 - 0x400 (UNUSED)
222*91f16700Schasinglulu	 * ---------------------------------------------------------------------
223*91f16700Schasinglulu	 */
224*91f16700Schasingluluvector_entry bpiall_ret_sync_exception_sp_elx
225*91f16700Schasinglulu	b	report_unhandled_exception
226*91f16700Schasingluluend_vector_entry bpiall_ret_sync_exception_sp_elx
227*91f16700Schasinglulu
228*91f16700Schasingluluvector_entry bpiall_ret_irq_sp_elx
229*91f16700Schasinglulu	b	report_unhandled_interrupt
230*91f16700Schasingluluend_vector_entry bpiall_ret_irq_sp_elx
231*91f16700Schasinglulu
232*91f16700Schasingluluvector_entry bpiall_ret_fiq_sp_elx
233*91f16700Schasinglulu	b	report_unhandled_interrupt
234*91f16700Schasingluluend_vector_entry bpiall_ret_fiq_sp_elx
235*91f16700Schasinglulu
236*91f16700Schasingluluvector_entry bpiall_ret_serror_sp_elx
237*91f16700Schasinglulu	b	report_unhandled_exception
238*91f16700Schasingluluend_vector_entry bpiall_ret_serror_sp_elx
239*91f16700Schasinglulu
240*91f16700Schasinglulu	/* ---------------------------------------------------------------------
241*91f16700Schasinglulu	 * Lower EL using AArch64 : 0x400 - 0x600 (UNUSED)
242*91f16700Schasinglulu	 * ---------------------------------------------------------------------
243*91f16700Schasinglulu	 */
244*91f16700Schasingluluvector_entry bpiall_ret_sync_exception_aarch64
245*91f16700Schasinglulu	b	report_unhandled_exception
246*91f16700Schasingluluend_vector_entry bpiall_ret_sync_exception_aarch64
247*91f16700Schasinglulu
248*91f16700Schasingluluvector_entry bpiall_ret_irq_aarch64
249*91f16700Schasinglulu	b	report_unhandled_interrupt
250*91f16700Schasingluluend_vector_entry bpiall_ret_irq_aarch64
251*91f16700Schasinglulu
252*91f16700Schasingluluvector_entry bpiall_ret_fiq_aarch64
253*91f16700Schasinglulu	b	report_unhandled_interrupt
254*91f16700Schasingluluend_vector_entry bpiall_ret_fiq_aarch64
255*91f16700Schasinglulu
256*91f16700Schasingluluvector_entry bpiall_ret_serror_aarch64
257*91f16700Schasinglulu	b	report_unhandled_exception
258*91f16700Schasingluluend_vector_entry bpiall_ret_serror_aarch64
259*91f16700Schasinglulu
260*91f16700Schasinglulu	/* ---------------------------------------------------------------------
261*91f16700Schasinglulu	 * Lower EL using AArch32 : 0x600 - 0x800
262*91f16700Schasinglulu	 * ---------------------------------------------------------------------
263*91f16700Schasinglulu	 */
264*91f16700Schasingluluvector_entry bpiall_ret_sync_exception_aarch32
265*91f16700Schasinglulu	/*
266*91f16700Schasinglulu	 * w2 indicates which SEL1 stub was run and thus which original vector was used
267*91f16700Schasinglulu	 * w3-w6 contain saved system register state (esr_el3 in w3)
268*91f16700Schasinglulu	 * Restore LR and ELR_EL3 register state from the GP regs context
269*91f16700Schasinglulu	 */
270*91f16700Schasinglulu	ldp	x30, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
271*91f16700Schasinglulu
272*91f16700Schasinglulu	/* Apply the restored system register state */
273*91f16700Schasinglulu	msr	esr_el3, x3
274*91f16700Schasinglulu	msr	spsr_el3, x4
275*91f16700Schasinglulu	msr	scr_el3, x5
276*91f16700Schasinglulu	msr	sctlr_el1, x6
277*91f16700Schasinglulu	msr	elr_el3, x7
278*91f16700Schasinglulu
279*91f16700Schasinglulu	/*
280*91f16700Schasinglulu	 * Workaround is complete, so swap VBAR_EL3 to point
281*91f16700Schasinglulu	 * to workaround entry table in preparation for subsequent
282*91f16700Schasinglulu	 * Sync/IRQ/FIQ/SError exceptions.
283*91f16700Schasinglulu	 */
284*91f16700Schasinglulu	adr	x0, wa_cve_2017_5715_bpiall_vbar
285*91f16700Schasinglulu	msr	vbar_el3, x0
286*91f16700Schasinglulu
287*91f16700Schasinglulu	/*
288*91f16700Schasinglulu	 * Restore all GP regs except x2 and x3 (esr).  The value in x2
289*91f16700Schasinglulu	 * indicates the type of the original exception.
290*91f16700Schasinglulu	 */
291*91f16700Schasinglulu	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
292*91f16700Schasinglulu	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
293*91f16700Schasinglulu	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
294*91f16700Schasinglulu	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
295*91f16700Schasinglulu	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
296*91f16700Schasinglulu	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
297*91f16700Schasinglulu	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
298*91f16700Schasinglulu	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
299*91f16700Schasinglulu	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
300*91f16700Schasinglulu	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
301*91f16700Schasinglulu	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
302*91f16700Schasinglulu	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
303*91f16700Schasinglulu	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
304*91f16700Schasinglulu	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
305*91f16700Schasinglulu
306*91f16700Schasinglulu	/* Fast path Sync exceptions.  Static predictor will fall through. */
307*91f16700Schasinglulu	tbz	w2, #0, workaround_not_sync
308*91f16700Schasinglulu
309*91f16700Schasinglulu	/*
310*91f16700Schasinglulu	 * Check if SMC is coming from A64 state on #0
311*91f16700Schasinglulu	 * with W0 = SMCCC_ARCH_WORKAROUND_1 or W0 = SMCCC_ARCH_WORKAROUND_3
312*91f16700Schasinglulu	 *
313*91f16700Schasinglulu	 * This sequence evaluates as:
314*91f16700Schasinglulu	 *    (W0==SMCCC_ARCH_WORKAROUND_1) || (W0==SMCCC_ARCH_WORKAROUND_3) ?
315*91f16700Schasinglulu	 *    (ESR_EL3==SMC#0) : (NE)
316*91f16700Schasinglulu	 * allowing use of a single branch operation
317*91f16700Schasinglulu	 */
318*91f16700Schasinglulu	orr	w2, wzr, #SMCCC_ARCH_WORKAROUND_1
319*91f16700Schasinglulu	cmp	w0, w2
320*91f16700Schasinglulu	orr	w2, wzr, #SMCCC_ARCH_WORKAROUND_3
321*91f16700Schasinglulu	ccmp	w0, w2, #4, ne
322*91f16700Schasinglulu	mov_imm	w2, ESR_EL3_A64_SMC0
323*91f16700Schasinglulu	ccmp	w3, w2, #0, eq
324*91f16700Schasinglulu	/* Static predictor will predict a fall through */
325*91f16700Schasinglulu	bne	1f
326*91f16700Schasinglulu	eret
327*91f16700Schasinglulu1:
328*91f16700Schasinglulu	/* restore x2 and x3 and continue sync exception handling */
329*91f16700Schasinglulu	b	bpiall_ret_sync_exception_aarch32_tail
330*91f16700Schasingluluend_vector_entry bpiall_ret_sync_exception_aarch32
331*91f16700Schasinglulu
332*91f16700Schasingluluvector_entry bpiall_ret_irq_aarch32
333*91f16700Schasinglulu	b	report_unhandled_interrupt
334*91f16700Schasinglulu
335*91f16700Schasinglulu	/*
336*91f16700Schasinglulu	 * Post-workaround fan-out for non-sync exceptions
337*91f16700Schasinglulu	 */
338*91f16700Schasingluluworkaround_not_sync:
339*91f16700Schasinglulu	tbnz	w2, #3, bpiall_ret_serror
340*91f16700Schasinglulu	tbnz	w2, #2, bpiall_ret_fiq
341*91f16700Schasinglulu	/* IRQ */
342*91f16700Schasinglulu	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
343*91f16700Schasinglulu	b	irq_aarch64
344*91f16700Schasinglulu
345*91f16700Schasinglulubpiall_ret_fiq:
346*91f16700Schasinglulu	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
347*91f16700Schasinglulu	b	fiq_aarch64
348*91f16700Schasinglulu
349*91f16700Schasinglulubpiall_ret_serror:
350*91f16700Schasinglulu	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
351*91f16700Schasinglulu	b	serror_aarch64
352*91f16700Schasingluluend_vector_entry bpiall_ret_irq_aarch32
353*91f16700Schasinglulu
354*91f16700Schasingluluvector_entry bpiall_ret_fiq_aarch32
355*91f16700Schasinglulu	b	report_unhandled_interrupt
356*91f16700Schasingluluend_vector_entry bpiall_ret_fiq_aarch32
357*91f16700Schasinglulu
358*91f16700Schasingluluvector_entry bpiall_ret_serror_aarch32
359*91f16700Schasinglulu	b	report_unhandled_exception
360*91f16700Schasingluluend_vector_entry bpiall_ret_serror_aarch32
361*91f16700Schasinglulu
362*91f16700Schasinglulu	/*
363*91f16700Schasinglulu	 * Part of bpiall_ret_sync_exception_aarch32 to save vector space
364*91f16700Schasinglulu	 */
365*91f16700Schasinglulufunc bpiall_ret_sync_exception_aarch32_tail
366*91f16700Schasinglulu	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
367*91f16700Schasinglulu	b	sync_exception_aarch64
368*91f16700Schasingluluendfunc bpiall_ret_sync_exception_aarch32_tail
369