xref: /arm-trusted-firmware/lib/cpus/aarch64/cortex_a76.S (revision 91f16700b400a8c0651d24a598fc48ee2997a0d7)
1*91f16700Schasinglulu/*
2*91f16700Schasinglulu * Copyright (c) 2017-2023, Arm Limited and Contributors. All rights reserved.
3*91f16700Schasinglulu *
4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause
5*91f16700Schasinglulu */
6*91f16700Schasinglulu
7*91f16700Schasinglulu#include <arch.h>
8*91f16700Schasinglulu#include <asm_macros.S>
9*91f16700Schasinglulu#include <common/bl_common.h>
10*91f16700Schasinglulu#include <cortex_a76.h>
11*91f16700Schasinglulu#include <cpu_macros.S>
12*91f16700Schasinglulu#include <plat_macros.S>
13*91f16700Schasinglulu#include <services/arm_arch_svc.h>
14*91f16700Schasinglulu#include "wa_cve_2022_23960_bhb.S"
15*91f16700Schasinglulu
16*91f16700Schasinglulu/* Hardware handled coherency */
17*91f16700Schasinglulu#if HW_ASSISTED_COHERENCY == 0
18*91f16700Schasinglulu#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19*91f16700Schasinglulu#endif
20*91f16700Schasinglulu	.globl cortex_a76_reset_func
21*91f16700Schasinglulu	.globl cortex_a76_core_pwr_dwn
22*91f16700Schasinglulu	.globl cortex_a76_disable_wa_cve_2018_3639
23*91f16700Schasinglulu
24*91f16700Schasinglulu/* 64-bit only core */
25*91f16700Schasinglulu#if CTX_INCLUDE_AARCH32_REGS == 1
26*91f16700Schasinglulu#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
27*91f16700Schasinglulu#endif
28*91f16700Schasinglulu
29*91f16700Schasinglulu#define ESR_EL3_A64_SMC0	0x5e000000
30*91f16700Schasinglulu#define ESR_EL3_A32_SMC0	0x4e000000
31*91f16700Schasinglulu
32*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639
33*91f16700Schasinglulu	/*
34*91f16700Schasinglulu	 * This macro applies the mitigation for CVE-2018-3639.
35*91f16700Schasinglulu	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
36*91f16700Schasinglulu	 * SMC calls from a lower EL running in AArch32 or AArch64
37*91f16700Schasinglulu	 * will go through the fast and return early.
38*91f16700Schasinglulu	 *
39*91f16700Schasinglulu	 * The macro saves x2-x3 to the context. In the fast path
40*91f16700Schasinglulu	 * x0-x3 registers do not need to be restored as the calling
41*91f16700Schasinglulu	 * context will have saved them. The macro also saves
42*91f16700Schasinglulu	 * x29-x30 to the context in the sync_exception path.
43*91f16700Schasinglulu	 */
44*91f16700Schasinglulu	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
45*91f16700Schasinglulu	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
46*91f16700Schasinglulu	.if \_is_sync_exception
47*91f16700Schasinglulu	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
48*91f16700Schasinglulu	mov_imm	w2, \_esr_el3_val
49*91f16700Schasinglulu	bl	apply_cve_2018_3639_sync_wa
50*91f16700Schasinglulu	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
51*91f16700Schasinglulu	.endif
52*91f16700Schasinglulu	/*
53*91f16700Schasinglulu	 * Always enable v4 mitigation during EL3 execution. This is not
54*91f16700Schasinglulu	 * required for the fast path above because it does not perform any
55*91f16700Schasinglulu	 * memory loads.
56*91f16700Schasinglulu	 */
57*91f16700Schasinglulu	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
58*91f16700Schasinglulu	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
59*91f16700Schasinglulu	msr	CORTEX_A76_CPUACTLR2_EL1, x2
60*91f16700Schasinglulu	isb
61*91f16700Schasinglulu
62*91f16700Schasinglulu	/*
63*91f16700Schasinglulu	 * The caller may have passed arguments to EL3 via x2-x3.
64*91f16700Schasinglulu	 * Restore these registers from the context before jumping to the
65*91f16700Schasinglulu	 * main runtime vector table entry.
66*91f16700Schasinglulu	 */
67*91f16700Schasinglulu	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
68*91f16700Schasinglulu	.endm
69*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
70*91f16700Schasinglulu
71*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
72*91f16700Schasingluluvector_base cortex_a76_wa_cve_vbar
73*91f16700Schasinglulu
74*91f16700Schasinglulu	/* ---------------------------------------------------------------------
75*91f16700Schasinglulu	 * Current EL with SP_EL0 : 0x0 - 0x200
76*91f16700Schasinglulu	 * ---------------------------------------------------------------------
77*91f16700Schasinglulu	 */
78*91f16700Schasingluluvector_entry cortex_a76_sync_exception_sp_el0
79*91f16700Schasinglulu	b	sync_exception_sp_el0
80*91f16700Schasingluluend_vector_entry cortex_a76_sync_exception_sp_el0
81*91f16700Schasinglulu
82*91f16700Schasingluluvector_entry cortex_a76_irq_sp_el0
83*91f16700Schasinglulu	b	irq_sp_el0
84*91f16700Schasingluluend_vector_entry cortex_a76_irq_sp_el0
85*91f16700Schasinglulu
86*91f16700Schasingluluvector_entry cortex_a76_fiq_sp_el0
87*91f16700Schasinglulu	b	fiq_sp_el0
88*91f16700Schasingluluend_vector_entry cortex_a76_fiq_sp_el0
89*91f16700Schasinglulu
90*91f16700Schasingluluvector_entry cortex_a76_serror_sp_el0
91*91f16700Schasinglulu	b	serror_sp_el0
92*91f16700Schasingluluend_vector_entry cortex_a76_serror_sp_el0
93*91f16700Schasinglulu
94*91f16700Schasinglulu	/* ---------------------------------------------------------------------
95*91f16700Schasinglulu	 * Current EL with SP_ELx: 0x200 - 0x400
96*91f16700Schasinglulu	 * ---------------------------------------------------------------------
97*91f16700Schasinglulu	 */
98*91f16700Schasingluluvector_entry cortex_a76_sync_exception_sp_elx
99*91f16700Schasinglulu	b	sync_exception_sp_elx
100*91f16700Schasingluluend_vector_entry cortex_a76_sync_exception_sp_elx
101*91f16700Schasinglulu
102*91f16700Schasingluluvector_entry cortex_a76_irq_sp_elx
103*91f16700Schasinglulu	b	irq_sp_elx
104*91f16700Schasingluluend_vector_entry cortex_a76_irq_sp_elx
105*91f16700Schasinglulu
106*91f16700Schasingluluvector_entry cortex_a76_fiq_sp_elx
107*91f16700Schasinglulu	b	fiq_sp_elx
108*91f16700Schasingluluend_vector_entry cortex_a76_fiq_sp_elx
109*91f16700Schasinglulu
110*91f16700Schasingluluvector_entry cortex_a76_serror_sp_elx
111*91f16700Schasinglulu	b	serror_sp_elx
112*91f16700Schasingluluend_vector_entry cortex_a76_serror_sp_elx
113*91f16700Schasinglulu
114*91f16700Schasinglulu	/* ---------------------------------------------------------------------
115*91f16700Schasinglulu	 * Lower EL using AArch64 : 0x400 - 0x600
116*91f16700Schasinglulu	 * ---------------------------------------------------------------------
117*91f16700Schasinglulu	 */
118*91f16700Schasingluluvector_entry cortex_a76_sync_exception_aarch64
119*91f16700Schasinglulu
120*91f16700Schasinglulu#if WORKAROUND_CVE_2022_23960
121*91f16700Schasinglulu	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
122*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2022_23960 */
123*91f16700Schasinglulu
124*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639
125*91f16700Schasinglulu	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
126*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
127*91f16700Schasinglulu
128*91f16700Schasinglulu	b	sync_exception_aarch64
129*91f16700Schasingluluend_vector_entry cortex_a76_sync_exception_aarch64
130*91f16700Schasinglulu
131*91f16700Schasingluluvector_entry cortex_a76_irq_aarch64
132*91f16700Schasinglulu
133*91f16700Schasinglulu#if WORKAROUND_CVE_2022_23960
134*91f16700Schasinglulu	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
135*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2022_23960 */
136*91f16700Schasinglulu
137*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639
138*91f16700Schasinglulu	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
139*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
140*91f16700Schasinglulu
141*91f16700Schasinglulu	b	irq_aarch64
142*91f16700Schasingluluend_vector_entry cortex_a76_irq_aarch64
143*91f16700Schasinglulu
144*91f16700Schasingluluvector_entry cortex_a76_fiq_aarch64
145*91f16700Schasinglulu
146*91f16700Schasinglulu#if WORKAROUND_CVE_2022_23960
147*91f16700Schasinglulu	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
148*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2022_23960 */
149*91f16700Schasinglulu
150*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639
151*91f16700Schasinglulu	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
152*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
153*91f16700Schasinglulu
154*91f16700Schasinglulu	b	fiq_aarch64
155*91f16700Schasingluluend_vector_entry cortex_a76_fiq_aarch64
156*91f16700Schasinglulu
157*91f16700Schasingluluvector_entry cortex_a76_serror_aarch64
158*91f16700Schasinglulu
159*91f16700Schasinglulu#if WORKAROUND_CVE_2022_23960
160*91f16700Schasinglulu	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
161*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2022_23960 */
162*91f16700Schasinglulu
163*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639
164*91f16700Schasinglulu	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
165*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
166*91f16700Schasinglulu
167*91f16700Schasinglulu	b	serror_aarch64
168*91f16700Schasingluluend_vector_entry cortex_a76_serror_aarch64
169*91f16700Schasinglulu
170*91f16700Schasinglulu	/* ---------------------------------------------------------------------
171*91f16700Schasinglulu	 * Lower EL using AArch32 : 0x600 - 0x800
172*91f16700Schasinglulu	 * ---------------------------------------------------------------------
173*91f16700Schasinglulu	 */
174*91f16700Schasingluluvector_entry cortex_a76_sync_exception_aarch32
175*91f16700Schasinglulu
176*91f16700Schasinglulu#if WORKAROUND_CVE_2022_23960
177*91f16700Schasinglulu	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
178*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2022_23960 */
179*91f16700Schasinglulu
180*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639
181*91f16700Schasinglulu	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
182*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
183*91f16700Schasinglulu
184*91f16700Schasinglulu	b	sync_exception_aarch32
185*91f16700Schasingluluend_vector_entry cortex_a76_sync_exception_aarch32
186*91f16700Schasinglulu
187*91f16700Schasingluluvector_entry cortex_a76_irq_aarch32
188*91f16700Schasinglulu
189*91f16700Schasinglulu#if WORKAROUND_CVE_2022_23960
190*91f16700Schasinglulu	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
191*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2022_23960 */
192*91f16700Schasinglulu
193*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639
194*91f16700Schasinglulu	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
195*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
196*91f16700Schasinglulu
197*91f16700Schasinglulu	b	irq_aarch32
198*91f16700Schasingluluend_vector_entry cortex_a76_irq_aarch32
199*91f16700Schasinglulu
200*91f16700Schasingluluvector_entry cortex_a76_fiq_aarch32
201*91f16700Schasinglulu
202*91f16700Schasinglulu#if WORKAROUND_CVE_2022_23960
203*91f16700Schasinglulu	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
204*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2022_23960 */
205*91f16700Schasinglulu
206*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639
207*91f16700Schasinglulu	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
208*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
209*91f16700Schasinglulu
210*91f16700Schasinglulu	b	fiq_aarch32
211*91f16700Schasingluluend_vector_entry cortex_a76_fiq_aarch32
212*91f16700Schasinglulu
213*91f16700Schasingluluvector_entry cortex_a76_serror_aarch32
214*91f16700Schasinglulu
215*91f16700Schasinglulu#if WORKAROUND_CVE_2022_23960
216*91f16700Schasinglulu	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
217*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2022_23960 */
218*91f16700Schasinglulu
219*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639
220*91f16700Schasinglulu	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
221*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
222*91f16700Schasinglulu
223*91f16700Schasinglulu	b	serror_aarch32
224*91f16700Schasingluluend_vector_entry cortex_a76_serror_aarch32
225*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
226*91f16700Schasinglulu
227*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639
228*91f16700Schasinglulu	/*
229*91f16700Schasinglulu	 * -----------------------------------------------------------------
230*91f16700Schasinglulu	 * This function applies the mitigation for CVE-2018-3639
231*91f16700Schasinglulu	 * specifically for sync exceptions. It implements a fast path
232*91f16700Schasinglulu	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
233*91f16700Schasinglulu	 * running in AArch64 will go through the fast and return early.
234*91f16700Schasinglulu	 *
235*91f16700Schasinglulu	 * In the fast path x0-x3 registers do not need to be restored as the
236*91f16700Schasinglulu	 * calling context will have saved them.
237*91f16700Schasinglulu	 *
238*91f16700Schasinglulu	 * Caller must pass value of esr_el3 to compare via x2.
239*91f16700Schasinglulu	 * Save and restore these registers outside of this function from the
240*91f16700Schasinglulu	 * context before jumping to the main runtime vector table entry.
241*91f16700Schasinglulu	 *
242*91f16700Schasinglulu	 * Shall clobber: x0-x3, x30
243*91f16700Schasinglulu	 * -----------------------------------------------------------------
244*91f16700Schasinglulu	 */
245*91f16700Schasinglulufunc apply_cve_2018_3639_sync_wa
246*91f16700Schasinglulu	/*
247*91f16700Schasinglulu	 * Ensure SMC is coming from A64/A32 state on #0
248*91f16700Schasinglulu	 * with W0 = SMCCC_ARCH_WORKAROUND_2
249*91f16700Schasinglulu	 *
250*91f16700Schasinglulu	 * This sequence evaluates as:
251*91f16700Schasinglulu	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
252*91f16700Schasinglulu	 * allowing use of a single branch operation
253*91f16700Schasinglulu	 * X2 populated outside this function with the SMC FID.
254*91f16700Schasinglulu	 */
255*91f16700Schasinglulu	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
256*91f16700Schasinglulu	cmp	x0, x3
257*91f16700Schasinglulu	mrs	x3, esr_el3
258*91f16700Schasinglulu
259*91f16700Schasinglulu	ccmp	w2, w3, #0, eq
260*91f16700Schasinglulu	/*
261*91f16700Schasinglulu	 * Static predictor will predict a fall-through, optimizing
262*91f16700Schasinglulu	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
263*91f16700Schasinglulu	 */
264*91f16700Schasinglulu	bne	1f
265*91f16700Schasinglulu
266*91f16700Schasinglulu	/*
267*91f16700Schasinglulu	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
268*91f16700Schasinglulu	* fast path.
269*91f16700Schasinglulu	*/
270*91f16700Schasinglulu	cmp	x1, xzr /* enable/disable check */
271*91f16700Schasinglulu
272*91f16700Schasinglulu	/*
273*91f16700Schasinglulu	 * When the calling context wants mitigation disabled,
274*91f16700Schasinglulu	 * we program the mitigation disable function in the
275*91f16700Schasinglulu	 * CPU context, which gets invoked on subsequent exits from
276*91f16700Schasinglulu	 * EL3 via the `el3_exit` function. Otherwise NULL is
277*91f16700Schasinglulu	 * programmed in the CPU context, which results in caller's
278*91f16700Schasinglulu	 * inheriting the EL3 mitigation state (enabled) on subsequent
279*91f16700Schasinglulu	 * `el3_exit`.
280*91f16700Schasinglulu	 */
281*91f16700Schasinglulu	mov	x0, xzr
282*91f16700Schasinglulu	adr	x1, cortex_a76_disable_wa_cve_2018_3639
283*91f16700Schasinglulu	csel	x1, x1, x0, eq
284*91f16700Schasinglulu	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
285*91f16700Schasinglulu
286*91f16700Schasinglulu	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
287*91f16700Schasinglulu	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
288*91f16700Schasinglulu	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
289*91f16700Schasinglulu	csel	x3, x3, x1, eq
290*91f16700Schasinglulu	msr	CORTEX_A76_CPUACTLR2_EL1, x3
291*91f16700Schasinglulu	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
292*91f16700Schasinglulu	/*
293*91f16700Schasinglulu	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
294*91f16700Schasinglulu	*/
295*91f16700Schasinglulu	exception_return /* exception_return contains ISB */
296*91f16700Schasinglulu1:
297*91f16700Schasinglulu	ret
298*91f16700Schasingluluendfunc apply_cve_2018_3639_sync_wa
299*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
300*91f16700Schasinglulu
301*91f16700Schasingluluworkaround_reset_start cortex_a76, ERRATUM(1073348), ERRATA_A76_1073348
302*91f16700Schasinglulu	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1 ,CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
303*91f16700Schasingluluworkaround_reset_end cortex_a76, ERRATUM(1073348)
304*91f16700Schasinglulu
305*91f16700Schasinglulucheck_erratum_ls cortex_a76, ERRATUM(1073348), CPU_REV(1, 0)
306*91f16700Schasinglulu
307*91f16700Schasingluluworkaround_reset_start cortex_a76, ERRATUM(1130799), ERRATA_A76_1130799
308*91f16700Schasinglulu	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_59
309*91f16700Schasinglulu	msr	CORTEX_A76_CPUACTLR2_EL1, x1
310*91f16700Schasingluluworkaround_reset_end cortex_a76, ERRATUM(1130799)
311*91f16700Schasinglulu
312*91f16700Schasinglulucheck_erratum_ls cortex_a76, ERRATUM(1130799), CPU_REV(2, 0)
313*91f16700Schasinglulu
314*91f16700Schasingluluworkaround_reset_start cortex_a76, ERRATUM(1220197), ERRATA_A76_1220197
315*91f16700Schasinglulu	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
316*91f16700Schasingluluworkaround_reset_end cortex_a76, ERRATUM(1220197)
317*91f16700Schasinglulu
318*91f16700Schasinglulucheck_erratum_ls cortex_a76, ERRATUM(1220197), CPU_REV(2, 0)
319*91f16700Schasinglulu
320*91f16700Schasingluluworkaround_reset_start cortex_a76, ERRATUM(1257314), ERRATA_A76_1257314
321*91f16700Schasinglulu	sysreg_bit_set CORTEX_A76_CPUACTLR3_EL1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
322*91f16700Schasingluluworkaround_reset_end cortex_a76, ERRATUM(1257314)
323*91f16700Schasinglulu
324*91f16700Schasinglulucheck_erratum_ls cortex_a76, ERRATUM(1257314), CPU_REV(3, 0)
325*91f16700Schasinglulu
326*91f16700Schasingluluworkaround_reset_start cortex_a76, ERRATUM(1262606), ERRATA_A76_1262606
327*91f16700Schasinglulu	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
328*91f16700Schasingluluworkaround_reset_end cortex_a76, ERRATUM(1262606)
329*91f16700Schasinglulu
330*91f16700Schasinglulucheck_erratum_ls cortex_a76, ERRATUM(1262606), CPU_REV(3, 0)
331*91f16700Schasinglulu
332*91f16700Schasingluluworkaround_reset_start cortex_a76, ERRATUM(1262888), ERRATA_A76_1262888
333*91f16700Schasinglulu	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_BIT_51
334*91f16700Schasingluluworkaround_reset_end cortex_a76, ERRATUM(1262888)
335*91f16700Schasinglulu
336*91f16700Schasinglulucheck_erratum_ls cortex_a76, ERRATUM(1262888), CPU_REV(3, 0)
337*91f16700Schasinglulu
338*91f16700Schasingluluworkaround_reset_start cortex_a76, ERRATUM(1275112), ERRATA_A76_1275112
339*91f16700Schasinglulu	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
340*91f16700Schasingluluworkaround_reset_end cortex_a76, ERRATUM(1275112)
341*91f16700Schasinglulu
342*91f16700Schasinglulucheck_erratum_ls cortex_a76, ERRATUM(1275112), CPU_REV(3, 0)
343*91f16700Schasinglulu
344*91f16700Schasinglulucheck_erratum_custom_start cortex_a76, ERRATUM(1286807)
345*91f16700Schasinglulu#if ERRATA_A76_1286807
346*91f16700Schasinglulu	mov x0, #ERRATA_APPLIES
347*91f16700Schasinglulu	ret
348*91f16700Schasinglulu#else
349*91f16700Schasinglulu	mov	x1, #0x30
350*91f16700Schasinglulu	b	cpu_rev_var_ls
351*91f16700Schasinglulu#endif
352*91f16700Schasinglulucheck_erratum_custom_end cortex_a76, ERRATUM(1286807)
353*91f16700Schasinglulu
354*91f16700Schasingluluworkaround_reset_start cortex_a76, ERRATUM(1791580), ERRATA_A76_1791580
355*91f16700Schasinglulu	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
356*91f16700Schasingluluworkaround_reset_end cortex_a76, ERRATUM(1791580)
357*91f16700Schasinglulu
358*91f16700Schasinglulucheck_erratum_ls cortex_a76, ERRATUM(1791580), CPU_REV(4, 0)
359*91f16700Schasinglulu
360*91f16700Schasingluluworkaround_reset_start cortex_a76, ERRATUM(1868343), ERRATA_A76_1868343
361*91f16700Schasinglulu	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
362*91f16700Schasingluluworkaround_reset_end cortex_a76, ERRATUM(1868343)
363*91f16700Schasinglulu
364*91f16700Schasinglulucheck_erratum_ls cortex_a76, ERRATUM(1868343), CPU_REV(4, 0)
365*91f16700Schasinglulu
366*91f16700Schasingluluworkaround_reset_start cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
367*91f16700Schasinglulu	mov	x0, #3
368*91f16700Schasinglulu	msr	S3_6_C15_C8_0, x0
369*91f16700Schasinglulu	ldr	x0, =0x10E3900002
370*91f16700Schasinglulu	msr	S3_6_C15_C8_2, x0
371*91f16700Schasinglulu	ldr	x0, =0x10FFF00083
372*91f16700Schasinglulu	msr	S3_6_C15_C8_3, x0
373*91f16700Schasinglulu	ldr	x0, =0x2001003FF
374*91f16700Schasinglulu	msr	S3_6_C15_C8_1, x0
375*91f16700Schasinglulu
376*91f16700Schasinglulu	mov	x0, #4
377*91f16700Schasinglulu	msr	S3_6_C15_C8_0, x0
378*91f16700Schasinglulu	ldr	x0, =0x10E3800082
379*91f16700Schasinglulu	msr	S3_6_C15_C8_2, x0
380*91f16700Schasinglulu	ldr	x0, =0x10FFF00083
381*91f16700Schasinglulu	msr	S3_6_C15_C8_3, x0
382*91f16700Schasinglulu	ldr	x0, =0x2001003FF
383*91f16700Schasinglulu	msr	S3_6_C15_C8_1, x0
384*91f16700Schasinglulu
385*91f16700Schasinglulu	mov	x0, #5
386*91f16700Schasinglulu	msr	S3_6_C15_C8_0, x0
387*91f16700Schasinglulu	ldr	x0, =0x10E3800200
388*91f16700Schasinglulu	msr	S3_6_C15_C8_2, x0
389*91f16700Schasinglulu	ldr	x0, =0x10FFF003E0
390*91f16700Schasinglulu	msr	S3_6_C15_C8_3, x0
391*91f16700Schasinglulu	ldr	x0, =0x2001003FF
392*91f16700Schasinglulu	msr	S3_6_C15_C8_1, x0
393*91f16700Schasingluluworkaround_reset_end cortex_a76, ERRATUM(1946160)
394*91f16700Schasinglulu
395*91f16700Schasinglulucheck_erratum_range cortex_a76, ERRATUM(1946160), CPU_REV(3, 0), CPU_REV(4, 1)
396*91f16700Schasinglulu
397*91f16700Schasingluluworkaround_runtime_start cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
398*91f16700Schasinglulu	/* dsb before isb of power down sequence */
399*91f16700Schasinglulu	dsb	sy
400*91f16700Schasingluluworkaround_runtime_end cortex_a76, ERRATUM(2743102)
401*91f16700Schasinglulu
402*91f16700Schasinglulucheck_erratum_ls cortex_a76, ERRATUM(2743102), CPU_REV(4, 1)
403*91f16700Schasinglulu
404*91f16700Schasinglulucheck_erratum_chosen cortex_a76, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
405*91f16700Schasinglulu
406*91f16700Schasinglulufunc cortex_a76_disable_wa_cve_2018_3639
407*91f16700Schasinglulu	sysreg_bit_clear CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
408*91f16700Schasinglulu	isb
409*91f16700Schasinglulu	ret
410*91f16700Schasingluluendfunc cortex_a76_disable_wa_cve_2018_3639
411*91f16700Schasinglulu
412*91f16700Schasinglulu/* --------------------------------------------------------------
413*91f16700Schasinglulu * Errata Workaround for Cortex A76 Errata #1165522.
414*91f16700Schasinglulu * This applies only to revisions <= r3p0 of Cortex A76.
415*91f16700Schasinglulu * Due to the nature of the errata it is applied unconditionally
416*91f16700Schasinglulu * when built in, report it as applicable in this case
417*91f16700Schasinglulu * --------------------------------------------------------------
418*91f16700Schasinglulu */
419*91f16700Schasinglulucheck_erratum_custom_start cortex_a76, ERRATUM(1165522)
420*91f16700Schasinglulu#if ERRATA_A76_1165522
421*91f16700Schasinglulu	mov	x0, #ERRATA_APPLIES
422*91f16700Schasinglulu	ret
423*91f16700Schasinglulu#else
424*91f16700Schasinglulu	mov	x1, #0x30
425*91f16700Schasinglulu	b	cpu_rev_var_ls
426*91f16700Schasinglulu#endif
427*91f16700Schasinglulucheck_erratum_custom_end cortex_a76, ERRATUM(1165522)
428*91f16700Schasinglulu
429*91f16700Schasinglulucheck_erratum_chosen cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
430*91f16700Schasinglulu
431*91f16700Schasinglulu/* erratum has no workaround in the cpu. Generic code must take care */
432*91f16700Schasingluluadd_erratum_entry cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960, NO_APPLY_AT_RESET
433*91f16700Schasinglulu
434*91f16700Schasinglulu/* ERRATA_DSU_798953 :
435*91f16700Schasinglulu * The errata is defined in dsu_helpers.S but applies to cortex_a76
436*91f16700Schasinglulu * as well. Henceforth creating symbolic names to the already existing errata
437*91f16700Schasinglulu * workaround functions to get them registered under the Errata Framework.
438*91f16700Schasinglulu */
439*91f16700Schasinglulu.equ check_erratum_cortex_a76_798953, check_errata_dsu_798953
440*91f16700Schasinglulu.equ erratum_cortex_a76_798953_wa, errata_dsu_798953_wa
441*91f16700Schasingluluadd_erratum_entry cortex_a76, ERRATUM(798953), ERRATA_DSU_798953, APPLY_AT_RESET
442*91f16700Schasinglulu
443*91f16700Schasinglulu/* ERRATA_DSU_936184 :
444*91f16700Schasinglulu * The errata is defined in dsu_helpers.S but applies to cortex_a76
445*91f16700Schasinglulu * as well. Henceforth creating symbolic names to the already existing errata
446*91f16700Schasinglulu * workaround functions to get them registered under the Errata Framework.
447*91f16700Schasinglulu */
448*91f16700Schasinglulu.equ check_erratum_cortex_a76_936184, check_errata_dsu_936184
449*91f16700Schasinglulu.equ erratum_cortex_a76_936184_wa, errata_dsu_936184_wa
450*91f16700Schasingluluadd_erratum_entry cortex_a76, ERRATUM(936184), ERRATA_DSU_936184, APPLY_AT_RESET
451*91f16700Schasinglulu
452*91f16700Schasinglulucpu_reset_func_start cortex_a76
453*91f16700Schasinglulu
454*91f16700Schasinglulu#if WORKAROUND_CVE_2018_3639
455*91f16700Schasinglulu	/* If the PE implements SSBS, we don't need the dynamic workaround */
456*91f16700Schasinglulu	mrs	x0, id_aa64pfr1_el1
457*91f16700Schasinglulu	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
458*91f16700Schasinglulu	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
459*91f16700Schasinglulu#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
460*91f16700Schasinglulu	cmp	x0, 0
461*91f16700Schasinglulu	ASM_ASSERT(ne)
462*91f16700Schasinglulu#endif
463*91f16700Schasinglulu#if DYNAMIC_WORKAROUND_CVE_2018_3639
464*91f16700Schasinglulu	cbnz	x0, 1f
465*91f16700Schasinglulu	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
466*91f16700Schasinglulu	isb
467*91f16700Schasinglulu
468*91f16700Schasinglulu#ifdef IMAGE_BL31
469*91f16700Schasinglulu	/*
470*91f16700Schasinglulu	 * The Cortex-A76 generic vectors are overwritten to use the vectors
471*91f16700Schasinglulu	 * defined above. This is required in order to apply mitigation
472*91f16700Schasinglulu	 * against CVE-2018-3639 on exception entry from lower ELs.
473*91f16700Schasinglulu	 * If the below vector table is used, skip overriding it again for
474*91f16700Schasinglulu	 *  CVE_2022_23960 as both use the same vbar.
475*91f16700Schasinglulu	 */
476*91f16700Schasinglulu	override_vector_table cortex_a76_wa_cve_vbar
477*91f16700Schasinglulu	isb
478*91f16700Schasinglulu	b	2f
479*91f16700Schasinglulu#endif /* IMAGE_BL31 */
480*91f16700Schasinglulu
481*91f16700Schasinglulu1:
482*91f16700Schasinglulu#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
483*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2018_3639 */
484*91f16700Schasinglulu
485*91f16700Schasinglulu#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
486*91f16700Schasinglulu	/*
487*91f16700Schasinglulu	 * The Cortex-A76 generic vectors are overridden to apply errata
488*91f16700Schasinglulu	 * mitigation on exception entry from lower ELs. This will be bypassed
489*91f16700Schasinglulu	 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
490*91f16700Schasinglulu	 */
491*91f16700Schasinglulu	override_vector_table cortex_a76_wa_cve_vbar
492*91f16700Schasinglulu	isb
493*91f16700Schasinglulu#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
494*91f16700Schasinglulu2:
495*91f16700Schasinglulucpu_reset_func_end cortex_a76
496*91f16700Schasinglulu
497*91f16700Schasinglulu	/* ---------------------------------------------
498*91f16700Schasinglulu	 * HW will do the cache maintenance while powering down
499*91f16700Schasinglulu	 * ---------------------------------------------
500*91f16700Schasinglulu	 */
501*91f16700Schasinglulufunc cortex_a76_core_pwr_dwn
502*91f16700Schasinglulu	/* ---------------------------------------------
503*91f16700Schasinglulu	 * Enable CPU power down bit in power control register
504*91f16700Schasinglulu	 * ---------------------------------------------
505*91f16700Schasinglulu	 */
506*91f16700Schasinglulu	sysreg_bit_set CORTEX_A76_CPUPWRCTLR_EL1, CORTEX_A76_CORE_PWRDN_EN_MASK
507*91f16700Schasinglulu
508*91f16700Schasinglulu	apply_erratum cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
509*91f16700Schasinglulu
510*91f16700Schasinglulu	isb
511*91f16700Schasinglulu	ret
512*91f16700Schasingluluendfunc cortex_a76_core_pwr_dwn
513*91f16700Schasinglulu
514*91f16700Schasingluluerrata_report_shim cortex_a76
515*91f16700Schasinglulu
516*91f16700Schasinglulu	/* ---------------------------------------------
517*91f16700Schasinglulu	 * This function provides cortex_a76 specific
518*91f16700Schasinglulu	 * register information for crash reporting.
519*91f16700Schasinglulu	 * It needs to return with x6 pointing to
520*91f16700Schasinglulu	 * a list of register names in ascii and
521*91f16700Schasinglulu	 * x8 - x15 having values of registers to be
522*91f16700Schasinglulu	 * reported.
523*91f16700Schasinglulu	 * ---------------------------------------------
524*91f16700Schasinglulu	 */
525*91f16700Schasinglulu.section .rodata.cortex_a76_regs, "aS"
526*91f16700Schasinglulucortex_a76_regs:  /* The ascii list of register names to be reported */
527*91f16700Schasinglulu	.asciz	"cpuectlr_el1", ""
528*91f16700Schasinglulu
529*91f16700Schasinglulufunc cortex_a76_cpu_reg_dump
530*91f16700Schasinglulu	adr	x6, cortex_a76_regs
531*91f16700Schasinglulu	mrs	x8, CORTEX_A76_CPUECTLR_EL1
532*91f16700Schasinglulu	ret
533*91f16700Schasingluluendfunc cortex_a76_cpu_reg_dump
534*91f16700Schasinglulu
535*91f16700Schasingluludeclare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
536*91f16700Schasinglulu	cortex_a76_reset_func, \
537*91f16700Schasinglulu	CPU_NO_EXTRA1_FUNC, \
538*91f16700Schasinglulu	cortex_a76_disable_wa_cve_2018_3639, \
539*91f16700Schasinglulu	CPU_NO_EXTRA3_FUNC, \
540*91f16700Schasinglulu	cortex_a76_core_pwr_dwn
541