xref: /arm-trusted-firmware/lib/el3_runtime/aarch64/context.S (revision 91f16700b400a8c0651d24a598fc48ee2997a0d7)
1*91f16700Schasinglulu/*
2*91f16700Schasinglulu * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
3*91f16700Schasinglulu *
4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause
5*91f16700Schasinglulu */
6*91f16700Schasinglulu
7*91f16700Schasinglulu#include <arch.h>
8*91f16700Schasinglulu#include <asm_macros.S>
9*91f16700Schasinglulu#include <assert_macros.S>
10*91f16700Schasinglulu#include <context.h>
11*91f16700Schasinglulu#include <el3_common_macros.S>
12*91f16700Schasinglulu
13*91f16700Schasinglulu	.global	el1_sysregs_context_save
14*91f16700Schasinglulu	.global	el1_sysregs_context_restore
15*91f16700Schasinglulu#if CTX_INCLUDE_FPREGS
16*91f16700Schasinglulu	.global	fpregs_context_save
17*91f16700Schasinglulu	.global	fpregs_context_restore
18*91f16700Schasinglulu#endif /* CTX_INCLUDE_FPREGS */
19*91f16700Schasinglulu	.global	prepare_el3_entry
20*91f16700Schasinglulu	.global	restore_gp_pmcr_pauth_regs
21*91f16700Schasinglulu	.global save_and_update_ptw_el1_sys_regs
22*91f16700Schasinglulu	.global	el3_exit
23*91f16700Schasinglulu
24*91f16700Schasinglulu
25*91f16700Schasinglulu/* ------------------------------------------------------------------
26*91f16700Schasinglulu * The following function strictly follows the AArch64 PCS to use
27*91f16700Schasinglulu * x9-x17 (temporary caller-saved registers) to save EL1 system
28*91f16700Schasinglulu * register context. It assumes that 'x0' is pointing to a
29*91f16700Schasinglulu * 'el1_sys_regs' structure where the register context will be saved.
30*91f16700Schasinglulu * ------------------------------------------------------------------
31*91f16700Schasinglulu */
32*91f16700Schasinglulufunc el1_sysregs_context_save
33*91f16700Schasinglulu
34*91f16700Schasinglulu	mrs	x9, spsr_el1
35*91f16700Schasinglulu	mrs	x10, elr_el1
36*91f16700Schasinglulu	stp	x9, x10, [x0, #CTX_SPSR_EL1]
37*91f16700Schasinglulu
38*91f16700Schasinglulu#if !ERRATA_SPECULATIVE_AT
39*91f16700Schasinglulu	mrs	x15, sctlr_el1
40*91f16700Schasinglulu	mrs	x16, tcr_el1
41*91f16700Schasinglulu	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
42*91f16700Schasinglulu#endif /* ERRATA_SPECULATIVE_AT */
43*91f16700Schasinglulu
44*91f16700Schasinglulu	mrs	x17, cpacr_el1
45*91f16700Schasinglulu	mrs	x9, csselr_el1
46*91f16700Schasinglulu	stp	x17, x9, [x0, #CTX_CPACR_EL1]
47*91f16700Schasinglulu
48*91f16700Schasinglulu	mrs	x10, sp_el1
49*91f16700Schasinglulu	mrs	x11, esr_el1
50*91f16700Schasinglulu	stp	x10, x11, [x0, #CTX_SP_EL1]
51*91f16700Schasinglulu
52*91f16700Schasinglulu	mrs	x12, ttbr0_el1
53*91f16700Schasinglulu	mrs	x13, ttbr1_el1
54*91f16700Schasinglulu	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
55*91f16700Schasinglulu
56*91f16700Schasinglulu	mrs	x14, mair_el1
57*91f16700Schasinglulu	mrs	x15, amair_el1
58*91f16700Schasinglulu	stp	x14, x15, [x0, #CTX_MAIR_EL1]
59*91f16700Schasinglulu
60*91f16700Schasinglulu	mrs	x16, actlr_el1
61*91f16700Schasinglulu	mrs	x17, tpidr_el1
62*91f16700Schasinglulu	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
63*91f16700Schasinglulu
64*91f16700Schasinglulu	mrs	x9, tpidr_el0
65*91f16700Schasinglulu	mrs	x10, tpidrro_el0
66*91f16700Schasinglulu	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
67*91f16700Schasinglulu
68*91f16700Schasinglulu	mrs	x13, par_el1
69*91f16700Schasinglulu	mrs	x14, far_el1
70*91f16700Schasinglulu	stp	x13, x14, [x0, #CTX_PAR_EL1]
71*91f16700Schasinglulu
72*91f16700Schasinglulu	mrs	x15, afsr0_el1
73*91f16700Schasinglulu	mrs	x16, afsr1_el1
74*91f16700Schasinglulu	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
75*91f16700Schasinglulu
76*91f16700Schasinglulu	mrs	x17, contextidr_el1
77*91f16700Schasinglulu	mrs	x9, vbar_el1
78*91f16700Schasinglulu	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
79*91f16700Schasinglulu
80*91f16700Schasinglulu	/* Save AArch32 system registers if the build has instructed so */
81*91f16700Schasinglulu#if CTX_INCLUDE_AARCH32_REGS
82*91f16700Schasinglulu	mrs	x11, spsr_abt
83*91f16700Schasinglulu	mrs	x12, spsr_und
84*91f16700Schasinglulu	stp	x11, x12, [x0, #CTX_SPSR_ABT]
85*91f16700Schasinglulu
86*91f16700Schasinglulu	mrs	x13, spsr_irq
87*91f16700Schasinglulu	mrs	x14, spsr_fiq
88*91f16700Schasinglulu	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
89*91f16700Schasinglulu
90*91f16700Schasinglulu	mrs	x15, dacr32_el2
91*91f16700Schasinglulu	mrs	x16, ifsr32_el2
92*91f16700Schasinglulu	stp	x15, x16, [x0, #CTX_DACR32_EL2]
93*91f16700Schasinglulu#endif /* CTX_INCLUDE_AARCH32_REGS */
94*91f16700Schasinglulu
95*91f16700Schasinglulu	/* Save NS timer registers if the build has instructed so */
96*91f16700Schasinglulu#if NS_TIMER_SWITCH
97*91f16700Schasinglulu	mrs	x10, cntp_ctl_el0
98*91f16700Schasinglulu	mrs	x11, cntp_cval_el0
99*91f16700Schasinglulu	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
100*91f16700Schasinglulu
101*91f16700Schasinglulu	mrs	x12, cntv_ctl_el0
102*91f16700Schasinglulu	mrs	x13, cntv_cval_el0
103*91f16700Schasinglulu	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
104*91f16700Schasinglulu
105*91f16700Schasinglulu	mrs	x14, cntkctl_el1
106*91f16700Schasinglulu	str	x14, [x0, #CTX_CNTKCTL_EL1]
107*91f16700Schasinglulu#endif /* NS_TIMER_SWITCH */
108*91f16700Schasinglulu
109*91f16700Schasinglulu	/* Save MTE system registers if the build has instructed so */
110*91f16700Schasinglulu#if CTX_INCLUDE_MTE_REGS
111*91f16700Schasinglulu	mrs	x15, TFSRE0_EL1
112*91f16700Schasinglulu	mrs	x16, TFSR_EL1
113*91f16700Schasinglulu	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
114*91f16700Schasinglulu
115*91f16700Schasinglulu	mrs	x9, RGSR_EL1
116*91f16700Schasinglulu	mrs	x10, GCR_EL1
117*91f16700Schasinglulu	stp	x9, x10, [x0, #CTX_RGSR_EL1]
118*91f16700Schasinglulu#endif /* CTX_INCLUDE_MTE_REGS */
119*91f16700Schasinglulu
120*91f16700Schasinglulu	ret
121*91f16700Schasingluluendfunc el1_sysregs_context_save
122*91f16700Schasinglulu
123*91f16700Schasinglulu/* ------------------------------------------------------------------
124*91f16700Schasinglulu * The following function strictly follows the AArch64 PCS to use
125*91f16700Schasinglulu * x9-x17 (temporary caller-saved registers) to restore EL1 system
126*91f16700Schasinglulu * register context.  It assumes that 'x0' is pointing to a
127*91f16700Schasinglulu * 'el1_sys_regs' structure from where the register context will be
128*91f16700Schasinglulu * restored
129*91f16700Schasinglulu * ------------------------------------------------------------------
130*91f16700Schasinglulu */
131*91f16700Schasinglulufunc el1_sysregs_context_restore
132*91f16700Schasinglulu
133*91f16700Schasinglulu	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
134*91f16700Schasinglulu	msr	spsr_el1, x9
135*91f16700Schasinglulu	msr	elr_el1, x10
136*91f16700Schasinglulu
137*91f16700Schasinglulu#if !ERRATA_SPECULATIVE_AT
138*91f16700Schasinglulu	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
139*91f16700Schasinglulu	msr	sctlr_el1, x15
140*91f16700Schasinglulu	msr	tcr_el1, x16
141*91f16700Schasinglulu#endif /* ERRATA_SPECULATIVE_AT */
142*91f16700Schasinglulu
143*91f16700Schasinglulu	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
144*91f16700Schasinglulu	msr	cpacr_el1, x17
145*91f16700Schasinglulu	msr	csselr_el1, x9
146*91f16700Schasinglulu
147*91f16700Schasinglulu	ldp	x10, x11, [x0, #CTX_SP_EL1]
148*91f16700Schasinglulu	msr	sp_el1, x10
149*91f16700Schasinglulu	msr	esr_el1, x11
150*91f16700Schasinglulu
151*91f16700Schasinglulu	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
152*91f16700Schasinglulu	msr	ttbr0_el1, x12
153*91f16700Schasinglulu	msr	ttbr1_el1, x13
154*91f16700Schasinglulu
155*91f16700Schasinglulu	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
156*91f16700Schasinglulu	msr	mair_el1, x14
157*91f16700Schasinglulu	msr	amair_el1, x15
158*91f16700Schasinglulu
159*91f16700Schasinglulu	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
160*91f16700Schasinglulu	msr	actlr_el1, x16
161*91f16700Schasinglulu	msr	tpidr_el1, x17
162*91f16700Schasinglulu
163*91f16700Schasinglulu	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
164*91f16700Schasinglulu	msr	tpidr_el0, x9
165*91f16700Schasinglulu	msr	tpidrro_el0, x10
166*91f16700Schasinglulu
167*91f16700Schasinglulu	ldp	x13, x14, [x0, #CTX_PAR_EL1]
168*91f16700Schasinglulu	msr	par_el1, x13
169*91f16700Schasinglulu	msr	far_el1, x14
170*91f16700Schasinglulu
171*91f16700Schasinglulu	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
172*91f16700Schasinglulu	msr	afsr0_el1, x15
173*91f16700Schasinglulu	msr	afsr1_el1, x16
174*91f16700Schasinglulu
175*91f16700Schasinglulu	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
176*91f16700Schasinglulu	msr	contextidr_el1, x17
177*91f16700Schasinglulu	msr	vbar_el1, x9
178*91f16700Schasinglulu
179*91f16700Schasinglulu	/* Restore AArch32 system registers if the build has instructed so */
180*91f16700Schasinglulu#if CTX_INCLUDE_AARCH32_REGS
181*91f16700Schasinglulu	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
182*91f16700Schasinglulu	msr	spsr_abt, x11
183*91f16700Schasinglulu	msr	spsr_und, x12
184*91f16700Schasinglulu
185*91f16700Schasinglulu	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
186*91f16700Schasinglulu	msr	spsr_irq, x13
187*91f16700Schasinglulu	msr	spsr_fiq, x14
188*91f16700Schasinglulu
189*91f16700Schasinglulu	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
190*91f16700Schasinglulu	msr	dacr32_el2, x15
191*91f16700Schasinglulu	msr	ifsr32_el2, x16
192*91f16700Schasinglulu#endif /* CTX_INCLUDE_AARCH32_REGS */
193*91f16700Schasinglulu
194*91f16700Schasinglulu	/* Restore NS timer registers if the build has instructed so */
195*91f16700Schasinglulu#if NS_TIMER_SWITCH
196*91f16700Schasinglulu	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
197*91f16700Schasinglulu	msr	cntp_ctl_el0, x10
198*91f16700Schasinglulu	msr	cntp_cval_el0, x11
199*91f16700Schasinglulu
200*91f16700Schasinglulu	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
201*91f16700Schasinglulu	msr	cntv_ctl_el0, x12
202*91f16700Schasinglulu	msr	cntv_cval_el0, x13
203*91f16700Schasinglulu
204*91f16700Schasinglulu	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
205*91f16700Schasinglulu	msr	cntkctl_el1, x14
206*91f16700Schasinglulu#endif /* NS_TIMER_SWITCH */
207*91f16700Schasinglulu
208*91f16700Schasinglulu	/* Restore MTE system registers if the build has instructed so */
209*91f16700Schasinglulu#if CTX_INCLUDE_MTE_REGS
210*91f16700Schasinglulu	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
211*91f16700Schasinglulu	msr	TFSRE0_EL1, x11
212*91f16700Schasinglulu	msr	TFSR_EL1, x12
213*91f16700Schasinglulu
214*91f16700Schasinglulu	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
215*91f16700Schasinglulu	msr	RGSR_EL1, x13
216*91f16700Schasinglulu	msr	GCR_EL1, x14
217*91f16700Schasinglulu#endif /* CTX_INCLUDE_MTE_REGS */
218*91f16700Schasinglulu
219*91f16700Schasinglulu	/* No explict ISB required here as ERET covers it */
220*91f16700Schasinglulu	ret
221*91f16700Schasingluluendfunc el1_sysregs_context_restore
222*91f16700Schasinglulu
223*91f16700Schasinglulu/* ------------------------------------------------------------------
224*91f16700Schasinglulu * The following function follows the aapcs_64 strictly to use
225*91f16700Schasinglulu * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
226*91f16700Schasinglulu * to save floating point register context. It assumes that 'x0' is
227*91f16700Schasinglulu * pointing to a 'fp_regs' structure where the register context will
228*91f16700Schasinglulu * be saved.
229*91f16700Schasinglulu *
230*91f16700Schasinglulu * Access to VFP registers will trap if CPTR_EL3.TFP is set.
231*91f16700Schasinglulu * However currently we don't use VFP registers nor set traps in
232*91f16700Schasinglulu * Trusted Firmware, and assume it's cleared.
233*91f16700Schasinglulu *
234*91f16700Schasinglulu * TODO: Revisit when VFP is used in secure world
235*91f16700Schasinglulu * ------------------------------------------------------------------
236*91f16700Schasinglulu */
237*91f16700Schasinglulu#if CTX_INCLUDE_FPREGS
238*91f16700Schasinglulufunc fpregs_context_save
239*91f16700Schasinglulu	stp	q0, q1, [x0, #CTX_FP_Q0]
240*91f16700Schasinglulu	stp	q2, q3, [x0, #CTX_FP_Q2]
241*91f16700Schasinglulu	stp	q4, q5, [x0, #CTX_FP_Q4]
242*91f16700Schasinglulu	stp	q6, q7, [x0, #CTX_FP_Q6]
243*91f16700Schasinglulu	stp	q8, q9, [x0, #CTX_FP_Q8]
244*91f16700Schasinglulu	stp	q10, q11, [x0, #CTX_FP_Q10]
245*91f16700Schasinglulu	stp	q12, q13, [x0, #CTX_FP_Q12]
246*91f16700Schasinglulu	stp	q14, q15, [x0, #CTX_FP_Q14]
247*91f16700Schasinglulu	stp	q16, q17, [x0, #CTX_FP_Q16]
248*91f16700Schasinglulu	stp	q18, q19, [x0, #CTX_FP_Q18]
249*91f16700Schasinglulu	stp	q20, q21, [x0, #CTX_FP_Q20]
250*91f16700Schasinglulu	stp	q22, q23, [x0, #CTX_FP_Q22]
251*91f16700Schasinglulu	stp	q24, q25, [x0, #CTX_FP_Q24]
252*91f16700Schasinglulu	stp	q26, q27, [x0, #CTX_FP_Q26]
253*91f16700Schasinglulu	stp	q28, q29, [x0, #CTX_FP_Q28]
254*91f16700Schasinglulu	stp	q30, q31, [x0, #CTX_FP_Q30]
255*91f16700Schasinglulu
256*91f16700Schasinglulu	mrs	x9, fpsr
257*91f16700Schasinglulu	str	x9, [x0, #CTX_FP_FPSR]
258*91f16700Schasinglulu
259*91f16700Schasinglulu	mrs	x10, fpcr
260*91f16700Schasinglulu	str	x10, [x0, #CTX_FP_FPCR]
261*91f16700Schasinglulu
262*91f16700Schasinglulu#if CTX_INCLUDE_AARCH32_REGS
263*91f16700Schasinglulu	mrs	x11, fpexc32_el2
264*91f16700Schasinglulu	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
265*91f16700Schasinglulu#endif /* CTX_INCLUDE_AARCH32_REGS */
266*91f16700Schasinglulu	ret
267*91f16700Schasingluluendfunc fpregs_context_save
268*91f16700Schasinglulu
269*91f16700Schasinglulu/* ------------------------------------------------------------------
270*91f16700Schasinglulu * The following function follows the aapcs_64 strictly to use x9-x17
271*91f16700Schasinglulu * (temporary caller-saved registers according to AArch64 PCS) to
272*91f16700Schasinglulu * restore floating point register context. It assumes that 'x0' is
273*91f16700Schasinglulu * pointing to a 'fp_regs' structure from where the register context
274*91f16700Schasinglulu * will be restored.
275*91f16700Schasinglulu *
276*91f16700Schasinglulu * Access to VFP registers will trap if CPTR_EL3.TFP is set.
277*91f16700Schasinglulu * However currently we don't use VFP registers nor set traps in
278*91f16700Schasinglulu * Trusted Firmware, and assume it's cleared.
279*91f16700Schasinglulu *
280*91f16700Schasinglulu * TODO: Revisit when VFP is used in secure world
281*91f16700Schasinglulu * ------------------------------------------------------------------
282*91f16700Schasinglulu */
283*91f16700Schasinglulufunc fpregs_context_restore
284*91f16700Schasinglulu	ldp	q0, q1, [x0, #CTX_FP_Q0]
285*91f16700Schasinglulu	ldp	q2, q3, [x0, #CTX_FP_Q2]
286*91f16700Schasinglulu	ldp	q4, q5, [x0, #CTX_FP_Q4]
287*91f16700Schasinglulu	ldp	q6, q7, [x0, #CTX_FP_Q6]
288*91f16700Schasinglulu	ldp	q8, q9, [x0, #CTX_FP_Q8]
289*91f16700Schasinglulu	ldp	q10, q11, [x0, #CTX_FP_Q10]
290*91f16700Schasinglulu	ldp	q12, q13, [x0, #CTX_FP_Q12]
291*91f16700Schasinglulu	ldp	q14, q15, [x0, #CTX_FP_Q14]
292*91f16700Schasinglulu	ldp	q16, q17, [x0, #CTX_FP_Q16]
293*91f16700Schasinglulu	ldp	q18, q19, [x0, #CTX_FP_Q18]
294*91f16700Schasinglulu	ldp	q20, q21, [x0, #CTX_FP_Q20]
295*91f16700Schasinglulu	ldp	q22, q23, [x0, #CTX_FP_Q22]
296*91f16700Schasinglulu	ldp	q24, q25, [x0, #CTX_FP_Q24]
297*91f16700Schasinglulu	ldp	q26, q27, [x0, #CTX_FP_Q26]
298*91f16700Schasinglulu	ldp	q28, q29, [x0, #CTX_FP_Q28]
299*91f16700Schasinglulu	ldp	q30, q31, [x0, #CTX_FP_Q30]
300*91f16700Schasinglulu
301*91f16700Schasinglulu	ldr	x9, [x0, #CTX_FP_FPSR]
302*91f16700Schasinglulu	msr	fpsr, x9
303*91f16700Schasinglulu
304*91f16700Schasinglulu	ldr	x10, [x0, #CTX_FP_FPCR]
305*91f16700Schasinglulu	msr	fpcr, x10
306*91f16700Schasinglulu
307*91f16700Schasinglulu#if CTX_INCLUDE_AARCH32_REGS
308*91f16700Schasinglulu	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
309*91f16700Schasinglulu	msr	fpexc32_el2, x11
310*91f16700Schasinglulu#endif /* CTX_INCLUDE_AARCH32_REGS */
311*91f16700Schasinglulu
312*91f16700Schasinglulu	/*
313*91f16700Schasinglulu	 * No explict ISB required here as ERET to
314*91f16700Schasinglulu	 * switch to secure EL1 or non-secure world
315*91f16700Schasinglulu	 * covers it
316*91f16700Schasinglulu	 */
317*91f16700Schasinglulu
318*91f16700Schasinglulu	ret
319*91f16700Schasingluluendfunc fpregs_context_restore
320*91f16700Schasinglulu#endif /* CTX_INCLUDE_FPREGS */
321*91f16700Schasinglulu
322*91f16700Schasinglulu	/*
323*91f16700Schasinglulu	 * Set SCR_EL3.EA bit to enable SErrors at EL3
324*91f16700Schasinglulu	 */
325*91f16700Schasinglulu	.macro enable_serror_at_el3
326*91f16700Schasinglulu	mrs     x8, scr_el3
327*91f16700Schasinglulu	orr     x8, x8, #SCR_EA_BIT
328*91f16700Schasinglulu	msr     scr_el3, x8
329*91f16700Schasinglulu	.endm
330*91f16700Schasinglulu
331*91f16700Schasinglulu	/*
332*91f16700Schasinglulu	 * Set the PSTATE bits not set when the exception was taken as
333*91f16700Schasinglulu	 * described in the AArch64.TakeException() pseudocode function
334*91f16700Schasinglulu	 * in ARM DDI 0487F.c page J1-7635 to a default value.
335*91f16700Schasinglulu	 */
336*91f16700Schasinglulu	.macro set_unset_pstate_bits
337*91f16700Schasinglulu	/*
338*91f16700Schasinglulu	 * If Data Independent Timing (DIT) functionality is implemented,
339*91f16700Schasinglulu	 * always enable DIT in EL3
340*91f16700Schasinglulu	 */
341*91f16700Schasinglulu#if ENABLE_FEAT_DIT
342*91f16700Schasinglulu#if ENABLE_FEAT_DIT == 2
343*91f16700Schasinglulu	mrs	x8, id_aa64pfr0_el1
344*91f16700Schasinglulu	and	x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT)
345*91f16700Schasinglulu	cbz	x8, 1f
346*91f16700Schasinglulu#endif
347*91f16700Schasinglulu	mov     x8, #DIT_BIT
348*91f16700Schasinglulu	msr     DIT, x8
349*91f16700Schasinglulu1:
350*91f16700Schasinglulu#endif /* ENABLE_FEAT_DIT */
351*91f16700Schasinglulu	.endm /* set_unset_pstate_bits */
352*91f16700Schasinglulu
353*91f16700Schasinglulu/*-------------------------------------------------------------------------
354*91f16700Schasinglulu * This macro checks the ENABLE_FEAT_MPAM state, performs ID register
355*91f16700Schasinglulu * check to see if the platform supports MPAM extension and restores MPAM3
356*91f16700Schasinglulu * register value if it is FEAT_STATE_ENABLED/FEAT_STATE_CHECKED.
357*91f16700Schasinglulu *
358*91f16700Schasinglulu * This is particularly more complicated because we can't check
359*91f16700Schasinglulu * if the platform supports MPAM  by looking for status of a particular bit
360*91f16700Schasinglulu * in the MDCR_EL3 or CPTR_EL3 register like other extensions.
361*91f16700Schasinglulu * ------------------------------------------------------------------------
362*91f16700Schasinglulu */
363*91f16700Schasinglulu
364*91f16700Schasinglulu	.macro	restore_mpam3_el3
365*91f16700Schasinglulu#if ENABLE_FEAT_MPAM
366*91f16700Schasinglulu#if ENABLE_FEAT_MPAM == 2
367*91f16700Schasinglulu
368*91f16700Schasinglulu	mrs x8, id_aa64pfr0_el1
369*91f16700Schasinglulu	lsr x8, x8, #(ID_AA64PFR0_MPAM_SHIFT)
370*91f16700Schasinglulu	and x8, x8, #(ID_AA64PFR0_MPAM_MASK)
371*91f16700Schasinglulu	mrs x7, id_aa64pfr1_el1
372*91f16700Schasinglulu	lsr x7, x7, #(ID_AA64PFR1_MPAM_FRAC_SHIFT)
373*91f16700Schasinglulu	and x7, x7, #(ID_AA64PFR1_MPAM_FRAC_MASK)
374*91f16700Schasinglulu	orr x7, x7, x8
375*91f16700Schasinglulu	cbz x7, no_mpam
376*91f16700Schasinglulu#endif
377*91f16700Schasinglulu	/* -----------------------------------------------------------
378*91f16700Schasinglulu	 * Restore MPAM3_EL3 register as per context state
379*91f16700Schasinglulu	 * Currently we only enable MPAM for NS world and trap to EL3
380*91f16700Schasinglulu	 * for MPAM access in lower ELs of Secure and Realm world
381*91f16700Schasinglulu	 * -----------------------------------------------------------
382*91f16700Schasinglulu	 */
383*91f16700Schasinglulu	ldr	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_MPAM3_EL3]
384*91f16700Schasinglulu	msr	S3_6_C10_C5_0, x17 /* mpam3_el3 */
385*91f16700Schasinglulu
386*91f16700Schasingluluno_mpam:
387*91f16700Schasinglulu#endif
388*91f16700Schasinglulu	.endm /* restore_mpam3_el3 */
389*91f16700Schasinglulu
390*91f16700Schasinglulu/* ------------------------------------------------------------------
391*91f16700Schasinglulu * The following macro is used to save and restore all the general
392*91f16700Schasinglulu * purpose and ARMv8.3-PAuth (if enabled) registers.
393*91f16700Schasinglulu * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
394*91f16700Schasinglulu * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
395*91f16700Schasinglulu * needs not to be saved/restored during world switch.
396*91f16700Schasinglulu *
397*91f16700Schasinglulu * Ideally we would only save and restore the callee saved registers
398*91f16700Schasinglulu * when a world switch occurs but that type of implementation is more
399*91f16700Schasinglulu * complex. So currently we will always save and restore these
400*91f16700Schasinglulu * registers on entry and exit of EL3.
401*91f16700Schasinglulu * clobbers: x18
402*91f16700Schasinglulu * ------------------------------------------------------------------
403*91f16700Schasinglulu */
404*91f16700Schasinglulu	.macro save_gp_pmcr_pauth_regs
405*91f16700Schasinglulu	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
406*91f16700Schasinglulu	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
407*91f16700Schasinglulu	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
408*91f16700Schasinglulu	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
409*91f16700Schasinglulu	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
410*91f16700Schasinglulu	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
411*91f16700Schasinglulu	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
412*91f16700Schasinglulu	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
413*91f16700Schasinglulu	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
414*91f16700Schasinglulu	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
415*91f16700Schasinglulu	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
416*91f16700Schasinglulu	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
417*91f16700Schasinglulu	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
418*91f16700Schasinglulu	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
419*91f16700Schasinglulu	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
420*91f16700Schasinglulu	mrs	x18, sp_el0
421*91f16700Schasinglulu	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
422*91f16700Schasinglulu
423*91f16700Schasinglulu	/* PMUv3 is presumed to be always present */
424*91f16700Schasinglulu	mrs	x9, pmcr_el0
425*91f16700Schasinglulu	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
426*91f16700Schasinglulu	/* Disable cycle counter when event counting is prohibited */
427*91f16700Schasinglulu	orr	x9, x9, #PMCR_EL0_DP_BIT
428*91f16700Schasinglulu	msr	pmcr_el0, x9
429*91f16700Schasinglulu	isb
430*91f16700Schasinglulu#if CTX_INCLUDE_PAUTH_REGS
431*91f16700Schasinglulu	/* ----------------------------------------------------------
432*91f16700Schasinglulu 	 * Save the ARMv8.3-PAuth keys as they are not banked
433*91f16700Schasinglulu 	 * by exception level
434*91f16700Schasinglulu	 * ----------------------------------------------------------
435*91f16700Schasinglulu	 */
436*91f16700Schasinglulu	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
437*91f16700Schasinglulu
438*91f16700Schasinglulu	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
439*91f16700Schasinglulu	mrs	x21, APIAKeyHi_EL1
440*91f16700Schasinglulu	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
441*91f16700Schasinglulu	mrs	x23, APIBKeyHi_EL1
442*91f16700Schasinglulu	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
443*91f16700Schasinglulu	mrs	x25, APDAKeyHi_EL1
444*91f16700Schasinglulu	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
445*91f16700Schasinglulu	mrs	x27, APDBKeyHi_EL1
446*91f16700Schasinglulu	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
447*91f16700Schasinglulu	mrs	x29, APGAKeyHi_EL1
448*91f16700Schasinglulu
449*91f16700Schasinglulu	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
450*91f16700Schasinglulu	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
451*91f16700Schasinglulu	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
452*91f16700Schasinglulu	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
453*91f16700Schasinglulu	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
454*91f16700Schasinglulu#endif /* CTX_INCLUDE_PAUTH_REGS */
455*91f16700Schasinglulu	.endm /* save_gp_pmcr_pauth_regs */
456*91f16700Schasinglulu
457*91f16700Schasinglulu/* -----------------------------------------------------------------
458*91f16700Schasinglulu * This function saves the context and sets the PSTATE to a known
459*91f16700Schasinglulu * state, preparing entry to el3.
460*91f16700Schasinglulu * Save all the general purpose and ARMv8.3-PAuth (if enabled)
461*91f16700Schasinglulu * registers.
462*91f16700Schasinglulu * Then set any of the PSTATE bits that are not set by hardware
463*91f16700Schasinglulu * according to the Aarch64.TakeException pseudocode in the Arm
464*91f16700Schasinglulu * Architecture Reference Manual to a default value for EL3.
465*91f16700Schasinglulu * clobbers: x17
466*91f16700Schasinglulu * -----------------------------------------------------------------
467*91f16700Schasinglulu */
468*91f16700Schasinglulufunc prepare_el3_entry
469*91f16700Schasinglulu	save_gp_pmcr_pauth_regs
470*91f16700Schasinglulu	enable_serror_at_el3
471*91f16700Schasinglulu	/*
472*91f16700Schasinglulu	 * Set the PSTATE bits not described in the Aarch64.TakeException
473*91f16700Schasinglulu	 * pseudocode to their default values.
474*91f16700Schasinglulu	 */
475*91f16700Schasinglulu	set_unset_pstate_bits
476*91f16700Schasinglulu	ret
477*91f16700Schasingluluendfunc prepare_el3_entry
478*91f16700Schasinglulu
479*91f16700Schasinglulu/* ------------------------------------------------------------------
480*91f16700Schasinglulu * This function restores ARMv8.3-PAuth (if enabled) and all general
481*91f16700Schasinglulu * purpose registers except x30 from the CPU context.
482*91f16700Schasinglulu * x30 register must be explicitly restored by the caller.
483*91f16700Schasinglulu * ------------------------------------------------------------------
484*91f16700Schasinglulu */
485*91f16700Schasinglulufunc restore_gp_pmcr_pauth_regs
486*91f16700Schasinglulu#if CTX_INCLUDE_PAUTH_REGS
487*91f16700Schasinglulu 	/* Restore the ARMv8.3 PAuth keys */
488*91f16700Schasinglulu	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
489*91f16700Schasinglulu
490*91f16700Schasinglulu	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
491*91f16700Schasinglulu	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
492*91f16700Schasinglulu	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
493*91f16700Schasinglulu	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
494*91f16700Schasinglulu	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
495*91f16700Schasinglulu
496*91f16700Schasinglulu	msr	APIAKeyLo_EL1, x0
497*91f16700Schasinglulu	msr	APIAKeyHi_EL1, x1
498*91f16700Schasinglulu	msr	APIBKeyLo_EL1, x2
499*91f16700Schasinglulu	msr	APIBKeyHi_EL1, x3
500*91f16700Schasinglulu	msr	APDAKeyLo_EL1, x4
501*91f16700Schasinglulu	msr	APDAKeyHi_EL1, x5
502*91f16700Schasinglulu	msr	APDBKeyLo_EL1, x6
503*91f16700Schasinglulu	msr	APDBKeyHi_EL1, x7
504*91f16700Schasinglulu	msr	APGAKeyLo_EL1, x8
505*91f16700Schasinglulu	msr	APGAKeyHi_EL1, x9
506*91f16700Schasinglulu#endif /* CTX_INCLUDE_PAUTH_REGS */
507*91f16700Schasinglulu
508*91f16700Schasinglulu	/* PMUv3 is presumed to be always present */
509*91f16700Schasinglulu	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
510*91f16700Schasinglulu	msr	pmcr_el0, x0
511*91f16700Schasinglulu	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
512*91f16700Schasinglulu	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
513*91f16700Schasinglulu	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
514*91f16700Schasinglulu	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
515*91f16700Schasinglulu	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
516*91f16700Schasinglulu	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
517*91f16700Schasinglulu	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
518*91f16700Schasinglulu	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
519*91f16700Schasinglulu	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
520*91f16700Schasinglulu	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
521*91f16700Schasinglulu	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
522*91f16700Schasinglulu	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
523*91f16700Schasinglulu	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
524*91f16700Schasinglulu	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
525*91f16700Schasinglulu	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
526*91f16700Schasinglulu	msr	sp_el0, x28
527*91f16700Schasinglulu	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
528*91f16700Schasinglulu	ret
529*91f16700Schasingluluendfunc restore_gp_pmcr_pauth_regs
530*91f16700Schasinglulu
531*91f16700Schasinglulu/*
532*91f16700Schasinglulu * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
533*91f16700Schasinglulu * registers and update EL1 registers to disable stage1 and stage2
534*91f16700Schasinglulu * page table walk
535*91f16700Schasinglulu */
536*91f16700Schasinglulufunc save_and_update_ptw_el1_sys_regs
537*91f16700Schasinglulu	/* ----------------------------------------------------------
538*91f16700Schasinglulu	 * Save only sctlr_el1 and tcr_el1 registers
539*91f16700Schasinglulu	 * ----------------------------------------------------------
540*91f16700Schasinglulu	 */
541*91f16700Schasinglulu	mrs	x29, sctlr_el1
542*91f16700Schasinglulu	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
543*91f16700Schasinglulu	mrs	x29, tcr_el1
544*91f16700Schasinglulu	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
545*91f16700Schasinglulu
546*91f16700Schasinglulu	/* ------------------------------------------------------------
547*91f16700Schasinglulu	 * Must follow below order in order to disable page table
548*91f16700Schasinglulu	 * walk for lower ELs (EL1 and EL0). First step ensures that
549*91f16700Schasinglulu	 * page table walk is disabled for stage1 and second step
550*91f16700Schasinglulu	 * ensures that page table walker should use TCR_EL1.EPDx
551*91f16700Schasinglulu	 * bits to perform address translation. ISB ensures that CPU
552*91f16700Schasinglulu	 * does these 2 steps in order.
553*91f16700Schasinglulu	 *
554*91f16700Schasinglulu	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
555*91f16700Schasinglulu	 *    stage1.
556*91f16700Schasinglulu	 * 2. Enable MMU bit to avoid identity mapping via stage2
557*91f16700Schasinglulu	 *    and force TCR_EL1.EPDx to be used by the page table
558*91f16700Schasinglulu	 *    walker.
559*91f16700Schasinglulu	 * ------------------------------------------------------------
560*91f16700Schasinglulu	 */
561*91f16700Schasinglulu	orr	x29, x29, #(TCR_EPD0_BIT)
562*91f16700Schasinglulu	orr	x29, x29, #(TCR_EPD1_BIT)
563*91f16700Schasinglulu	msr	tcr_el1, x29
564*91f16700Schasinglulu	isb
565*91f16700Schasinglulu	mrs	x29, sctlr_el1
566*91f16700Schasinglulu	orr	x29, x29, #SCTLR_M_BIT
567*91f16700Schasinglulu	msr	sctlr_el1, x29
568*91f16700Schasinglulu	isb
569*91f16700Schasinglulu
570*91f16700Schasinglulu	ret
571*91f16700Schasingluluendfunc save_and_update_ptw_el1_sys_regs
572*91f16700Schasinglulu
573*91f16700Schasinglulu/* -----------------------------------------------------------------
574*91f16700Schasinglulu* The below macro returns the address of the per_world context for
575*91f16700Schasinglulu* the security state, retrieved through "get_security_state" macro.
576*91f16700Schasinglulu* The per_world context address is returned in the register argument.
577*91f16700Schasinglulu* Clobbers: x9, x10
578*91f16700Schasinglulu* ------------------------------------------------------------------
579*91f16700Schasinglulu*/
580*91f16700Schasinglulu
581*91f16700Schasinglulu.macro get_per_world_context _reg:req
582*91f16700Schasinglulu	ldr 	x10, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
583*91f16700Schasinglulu	get_security_state x9, x10
584*91f16700Schasinglulu	mov_imm	x10, (CTX_GLOBAL_EL3STATE_END - CTX_CPTR_EL3)
585*91f16700Schasinglulu	mul	x9, x9, x10
586*91f16700Schasinglulu	adrp	x10, per_world_context
587*91f16700Schasinglulu	add	x10, x10, :lo12:per_world_context
588*91f16700Schasinglulu	add	x9, x9, x10
589*91f16700Schasinglulu	mov 	\_reg, x9
590*91f16700Schasinglulu.endm
591*91f16700Schasinglulu
592*91f16700Schasinglulu/* ------------------------------------------------------------------
593*91f16700Schasinglulu * This routine assumes that the SP_EL3 is pointing to a valid
594*91f16700Schasinglulu * context structure from where the gp regs and other special
595*91f16700Schasinglulu * registers can be retrieved.
596*91f16700Schasinglulu * ------------------------------------------------------------------
597*91f16700Schasinglulu */
598*91f16700Schasinglulufunc el3_exit
599*91f16700Schasinglulu#if ENABLE_ASSERTIONS
600*91f16700Schasinglulu	/* el3_exit assumes SP_EL0 on entry */
601*91f16700Schasinglulu	mrs	x17, spsel
602*91f16700Schasinglulu	cmp	x17, #MODE_SP_EL0
603*91f16700Schasinglulu	ASM_ASSERT(eq)
604*91f16700Schasinglulu#endif /* ENABLE_ASSERTIONS */
605*91f16700Schasinglulu
606*91f16700Schasinglulu	/* ----------------------------------------------------------
607*91f16700Schasinglulu	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
608*91f16700Schasinglulu	 * will be used for handling the next SMC.
609*91f16700Schasinglulu	 * Then switch to SP_EL3.
610*91f16700Schasinglulu	 * ----------------------------------------------------------
611*91f16700Schasinglulu	 */
612*91f16700Schasinglulu	mov	x17, sp
613*91f16700Schasinglulu	msr	spsel, #MODE_SP_ELX
614*91f16700Schasinglulu	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
615*91f16700Schasinglulu
616*91f16700Schasinglulu	/* ----------------------------------------------------------
617*91f16700Schasinglulu	 * Restore CPTR_EL3.
618*91f16700Schasinglulu	 * ZCR is only restored if SVE is supported and enabled.
619*91f16700Schasinglulu	 * Synchronization is required before zcr_el3 is addressed.
620*91f16700Schasinglulu	 * ----------------------------------------------------------
621*91f16700Schasinglulu	 */
622*91f16700Schasinglulu
623*91f16700Schasinglulu	/* The address of the per_world context is stored in x9 */
624*91f16700Schasinglulu	get_per_world_context x9
625*91f16700Schasinglulu
626*91f16700Schasinglulu	ldp	x19, x20, [x9, #CTX_CPTR_EL3]
627*91f16700Schasinglulu	msr	cptr_el3, x19
628*91f16700Schasinglulu
629*91f16700Schasinglulu#if IMAGE_BL31
630*91f16700Schasinglulu	ands	x19, x19, #CPTR_EZ_BIT
631*91f16700Schasinglulu	beq	sve_not_enabled
632*91f16700Schasinglulu
633*91f16700Schasinglulu	isb
634*91f16700Schasinglulu	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
635*91f16700Schasinglulusve_not_enabled:
636*91f16700Schasinglulu
637*91f16700Schasinglulu	restore_mpam3_el3
638*91f16700Schasinglulu
639*91f16700Schasinglulu#endif /* IMAGE_BL31 */
640*91f16700Schasinglulu
641*91f16700Schasinglulu#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
642*91f16700Schasinglulu	/* ----------------------------------------------------------
643*91f16700Schasinglulu	 * Restore mitigation state as it was on entry to EL3
644*91f16700Schasinglulu	 * ----------------------------------------------------------
645*91f16700Schasinglulu	 */
646*91f16700Schasinglulu	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
647*91f16700Schasinglulu	cbz	x17, 1f
648*91f16700Schasinglulu	blr	x17
649*91f16700Schasinglulu1:
650*91f16700Schasinglulu#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
651*91f16700Schasinglulu
652*91f16700Schasinglulu#if IMAGE_BL31
653*91f16700Schasinglulu	synchronize_errors
654*91f16700Schasinglulu#endif /* IMAGE_BL31 */
655*91f16700Schasinglulu
656*91f16700Schasinglulu	/* ----------------------------------------------------------
657*91f16700Schasinglulu	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
658*91f16700Schasinglulu	 * ----------------------------------------------------------
659*91f16700Schasinglulu	 */
660*91f16700Schasinglulu	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
661*91f16700Schasinglulu	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
662*91f16700Schasinglulu	msr	scr_el3, x18
663*91f16700Schasinglulu	msr	spsr_el3, x16
664*91f16700Schasinglulu	msr	elr_el3, x17
665*91f16700Schasinglulu
666*91f16700Schasinglulu	restore_ptw_el1_sys_regs
667*91f16700Schasinglulu
668*91f16700Schasinglulu	/* ----------------------------------------------------------
669*91f16700Schasinglulu	 * Restore general purpose (including x30), PMCR_EL0 and
670*91f16700Schasinglulu	 * ARMv8.3-PAuth registers.
671*91f16700Schasinglulu	 * Exit EL3 via ERET to a lower exception level.
672*91f16700Schasinglulu 	 * ----------------------------------------------------------
673*91f16700Schasinglulu 	 */
674*91f16700Schasinglulu	bl	restore_gp_pmcr_pauth_regs
675*91f16700Schasinglulu	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
676*91f16700Schasinglulu
677*91f16700Schasinglulu#ifdef IMAGE_BL31
678*91f16700Schasinglulu	/* Clear the EL3 flag as we are exiting el3 */
679*91f16700Schasinglulu	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
680*91f16700Schasinglulu#endif /* IMAGE_BL31 */
681*91f16700Schasinglulu
682*91f16700Schasinglulu	exception_return
683*91f16700Schasinglulu
684*91f16700Schasingluluendfunc el3_exit
685