xref: /arm-trusted-firmware/lib/cpus/aarch64/cortex_a72.S (revision 91f16700b400a8c0651d24a598fc48ee2997a0d7)
1*91f16700Schasinglulu/*
2*91f16700Schasinglulu * Copyright (c) 2015-2023, Arm Limited and Contributors. All rights reserved.
3*91f16700Schasinglulu *
4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause
5*91f16700Schasinglulu */
6*91f16700Schasinglulu#include <arch.h>
7*91f16700Schasinglulu#include <asm_macros.S>
8*91f16700Schasinglulu#include <assert_macros.S>
9*91f16700Schasinglulu#include <cortex_a72.h>
10*91f16700Schasinglulu#include <cpu_macros.S>
11*91f16700Schasinglulu#include <plat_macros.S>
12*91f16700Schasinglulu#include "wa_cve_2022_23960_bhb_vector.S"
13*91f16700Schasinglulu
14*91f16700Schasinglulu#if WORKAROUND_CVE_2022_23960
15*91f16700Schasinglulu	wa_cve_2022_23960_bhb_vector_table CORTEX_A72_BHB_LOOP_COUNT, cortex_a72
16*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2022_23960 */
17*91f16700Schasinglulu
18*91f16700Schasinglulu	/* ---------------------------------------------
19*91f16700Schasinglulu	 * Disable L1 data cache and unified L2 cache
20*91f16700Schasinglulu	 * ---------------------------------------------
21*91f16700Schasinglulu	 */
22*91f16700Schasinglulufunc cortex_a72_disable_dcache
23*91f16700Schasinglulu	mrs	x1, sctlr_el3
24*91f16700Schasinglulu	bic	x1, x1, #SCTLR_C_BIT
25*91f16700Schasinglulu	msr	sctlr_el3, x1
26*91f16700Schasinglulu	isb
27*91f16700Schasinglulu	ret
28*91f16700Schasingluluendfunc cortex_a72_disable_dcache
29*91f16700Schasinglulu
30*91f16700Schasinglulu	/* ---------------------------------------------
31*91f16700Schasinglulu	 * Disable all types of L2 prefetches.
32*91f16700Schasinglulu	 * ---------------------------------------------
33*91f16700Schasinglulu	 */
34*91f16700Schasinglulufunc cortex_a72_disable_l2_prefetch
35*91f16700Schasinglulu	mrs	x0, CORTEX_A72_ECTLR_EL1
36*91f16700Schasinglulu	orr	x0, x0, #CORTEX_A72_ECTLR_DIS_TWD_ACC_PFTCH_BIT
37*91f16700Schasinglulu	mov	x1, #CORTEX_A72_ECTLR_L2_IPFTCH_DIST_MASK
38*91f16700Schasinglulu	orr	x1, x1, #CORTEX_A72_ECTLR_L2_DPFTCH_DIST_MASK
39*91f16700Schasinglulu	bic	x0, x0, x1
40*91f16700Schasinglulu	msr	CORTEX_A72_ECTLR_EL1, x0
41*91f16700Schasinglulu	isb
42*91f16700Schasinglulu	ret
43*91f16700Schasingluluendfunc cortex_a72_disable_l2_prefetch
44*91f16700Schasinglulu
45*91f16700Schasinglulu	/* ---------------------------------------------
46*91f16700Schasinglulu	 * Disable the load-store hardware prefetcher.
47*91f16700Schasinglulu	 * ---------------------------------------------
48*91f16700Schasinglulu	 */
49*91f16700Schasinglulufunc cortex_a72_disable_hw_prefetcher
50*91f16700Schasinglulu	sysreg_bit_set CORTEX_A72_CPUACTLR_EL1, CORTEX_A72_CPUACTLR_EL1_DISABLE_L1_DCACHE_HW_PFTCH
51*91f16700Schasinglulu	isb
52*91f16700Schasinglulu	dsb	ish
53*91f16700Schasinglulu	ret
54*91f16700Schasingluluendfunc cortex_a72_disable_hw_prefetcher
55*91f16700Schasinglulu
56*91f16700Schasinglulu	/* ---------------------------------------------
57*91f16700Schasinglulu	 * Disable intra-cluster coherency
58*91f16700Schasinglulu	 * ---------------------------------------------
59*91f16700Schasinglulu	 */
60*91f16700Schasinglulufunc cortex_a72_disable_smp
61*91f16700Schasinglulu	sysreg_bit_clear CORTEX_A72_ECTLR_EL1, CORTEX_A72_ECTLR_SMP_BIT
62*91f16700Schasinglulu	ret
63*91f16700Schasingluluendfunc cortex_a72_disable_smp
64*91f16700Schasinglulu
65*91f16700Schasinglulu	/* ---------------------------------------------
66*91f16700Schasinglulu	 * Disable debug interfaces
67*91f16700Schasinglulu	 * ---------------------------------------------
68*91f16700Schasinglulu	 */
69*91f16700Schasinglulufunc cortex_a72_disable_ext_debug
70*91f16700Schasinglulu	mov	x0, #1
71*91f16700Schasinglulu	msr	osdlr_el1, x0
72*91f16700Schasinglulu	isb
73*91f16700Schasinglulu	dsb	sy
74*91f16700Schasinglulu	ret
75*91f16700Schasingluluendfunc cortex_a72_disable_ext_debug
76*91f16700Schasinglulu
77*91f16700Schasinglulufunc check_smccc_arch_workaround_3
78*91f16700Schasinglulu	cpu_check_csv2	x0, 1f
79*91f16700Schasinglulu	mov	x0, #ERRATA_APPLIES
80*91f16700Schasinglulu	ret
81*91f16700Schasinglulu1:
82*91f16700Schasinglulu	mov	x0, #ERRATA_NOT_APPLIES
83*91f16700Schasinglulu	ret
84*91f16700Schasingluluendfunc check_smccc_arch_workaround_3
85*91f16700Schasinglulu
86*91f16700Schasingluluworkaround_reset_start cortex_a72, ERRATUM(859971), ERRATA_A72_859971
87*91f16700Schasinglulu	sysreg_bit_set CORTEX_A72_CPUACTLR_EL1, CORTEX_A72_CPUACTLR_EL1_DIS_INSTR_PREFETCH
88*91f16700Schasingluluworkaround_reset_end cortex_a72, ERRATUM(859971)
89*91f16700Schasinglulu
90*91f16700Schasinglulucheck_erratum_ls cortex_a72, ERRATUM(859971), CPU_REV(0, 3)
91*91f16700Schasinglulu
92*91f16700Schasinglulu/* Due to the nature of the errata it is applied unconditionally when chosen */
93*91f16700Schasinglulucheck_erratum_chosen cortex_a72, ERRATUM(1319367), ERRATA_A72_1319367
94*91f16700Schasinglulu/* erratum workaround is interleaved with generic code */
95*91f16700Schasingluluadd_erratum_entry cortex_a72, ERRATUM(1319367), ERRATA_A72_1319367, NO_APPLY_AT_RESET
96*91f16700Schasinglulu
97*91f16700Schasingluluworkaround_reset_start cortex_a72, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
98*91f16700Schasinglulu#if IMAGE_BL31
99*91f16700Schasinglulu	override_vector_table wa_cve_2017_5715_mmu_vbar
100*91f16700Schasinglulu#endif
101*91f16700Schasingluluworkaround_reset_end cortex_a72, CVE(2017, 5715)
102*91f16700Schasinglulu
103*91f16700Schasinglulucheck_erratum_custom_start cortex_a72, CVE(2017, 5715)
104*91f16700Schasinglulu	cpu_check_csv2	x0, 1f
105*91f16700Schasinglulu#if WORKAROUND_CVE_2017_5715
106*91f16700Schasinglulu	mov	x0, #ERRATA_APPLIES
107*91f16700Schasinglulu#else
108*91f16700Schasinglulu	mov	x0, #ERRATA_MISSING
109*91f16700Schasinglulu#endif
110*91f16700Schasinglulu	ret
111*91f16700Schasinglulu1:
112*91f16700Schasinglulu	mov	x0, #ERRATA_NOT_APPLIES
113*91f16700Schasinglulu	ret
114*91f16700Schasinglulucheck_erratum_custom_end cortex_a72, CVE(2017, 5715)
115*91f16700Schasinglulu
116*91f16700Schasingluluworkaround_reset_start cortex_a72, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
117*91f16700Schasinglulu	sysreg_bit_set CORTEX_A72_CPUACTLR_EL1, CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
118*91f16700Schasinglulu	isb
119*91f16700Schasinglulu	dsb	sy
120*91f16700Schasingluluworkaround_reset_end cortex_a72, CVE(2018, 3639)
121*91f16700Schasinglulucheck_erratum_chosen cortex_a72, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
122*91f16700Schasinglulu
123*91f16700Schasingluluworkaround_reset_start cortex_a72, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
124*91f16700Schasinglulu#if IMAGE_BL31
125*91f16700Schasinglulu	/* Skip installing vector table again if already done for CVE(2017, 5715) */
126*91f16700Schasinglulu	/*
127*91f16700Schasinglulu	 * The Cortex-A72 generic vectors are overridden to apply the
128*91f16700Schasinglulu	 * mitigation on exception entry from lower ELs for revisions >= r1p0
129*91f16700Schasinglulu	 * which has CSV2 implemented.
130*91f16700Schasinglulu	 */
131*91f16700Schasinglulu	adr	x0, wa_cve_vbar_cortex_a72
132*91f16700Schasinglulu	mrs	x1, vbar_el3
133*91f16700Schasinglulu	cmp	x0, x1
134*91f16700Schasinglulu	b.eq	1f
135*91f16700Schasinglulu	msr	vbar_el3, x0
136*91f16700Schasinglulu1:
137*91f16700Schasinglulu#endif /* IMAGE_BL31 */
138*91f16700Schasingluluworkaround_reset_end cortex_a72, CVE(2022, 23960)
139*91f16700Schasinglulu
140*91f16700Schasinglulucheck_erratum_custom_start cortex_a72, CVE(2022, 23960)
141*91f16700Schasinglulu#if WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960
142*91f16700Schasinglulu	cpu_check_csv2	x0, 1f
143*91f16700Schasinglulu	mov	x0, #ERRATA_APPLIES
144*91f16700Schasinglulu	ret
145*91f16700Schasinglulu1:
146*91f16700Schasinglulu#if WORKAROUND_CVE_2022_23960
147*91f16700Schasinglulu	mov	x0, #ERRATA_APPLIES
148*91f16700Schasinglulu#else
149*91f16700Schasinglulu	mov	x0, #ERRATA_MISSING
150*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2022_23960 */
151*91f16700Schasinglulu	ret
152*91f16700Schasinglulu#endif /* WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 */
153*91f16700Schasinglulu	mov	x0, #ERRATA_MISSING
154*91f16700Schasinglulu	ret
155*91f16700Schasinglulucheck_erratum_custom_end cortex_a72, CVE(2022, 23960)
156*91f16700Schasinglulu
157*91f16700Schasinglulucpu_reset_func_start cortex_a72
158*91f16700Schasinglulu
159*91f16700Schasinglulu	/* ---------------------------------------------
160*91f16700Schasinglulu	 * Enable the SMP bit.
161*91f16700Schasinglulu	 * ---------------------------------------------
162*91f16700Schasinglulu	 */
163*91f16700Schasinglulu	sysreg_bit_set CORTEX_A72_ECTLR_EL1, CORTEX_A72_ECTLR_SMP_BIT
164*91f16700Schasinglulu
165*91f16700Schasinglulucpu_reset_func_end cortex_a72
166*91f16700Schasinglulu
167*91f16700Schasinglulu	/* ----------------------------------------------------
168*91f16700Schasinglulu	 * The CPU Ops core power down function for Cortex-A72.
169*91f16700Schasinglulu	 * ----------------------------------------------------
170*91f16700Schasinglulu	 */
171*91f16700Schasinglulufunc cortex_a72_core_pwr_dwn
172*91f16700Schasinglulu	mov	x18, x30
173*91f16700Schasinglulu
174*91f16700Schasinglulu	/* ---------------------------------------------
175*91f16700Schasinglulu	 * Turn off caches.
176*91f16700Schasinglulu	 * ---------------------------------------------
177*91f16700Schasinglulu	 */
178*91f16700Schasinglulu	bl	cortex_a72_disable_dcache
179*91f16700Schasinglulu
180*91f16700Schasinglulu	/* ---------------------------------------------
181*91f16700Schasinglulu	 * Disable the L2 prefetches.
182*91f16700Schasinglulu	 * ---------------------------------------------
183*91f16700Schasinglulu	 */
184*91f16700Schasinglulu	bl	cortex_a72_disable_l2_prefetch
185*91f16700Schasinglulu
186*91f16700Schasinglulu	/* ---------------------------------------------
187*91f16700Schasinglulu	 * Disable the load-store hardware prefetcher.
188*91f16700Schasinglulu	 * ---------------------------------------------
189*91f16700Schasinglulu	 */
190*91f16700Schasinglulu	bl	cortex_a72_disable_hw_prefetcher
191*91f16700Schasinglulu
192*91f16700Schasinglulu	/* ---------------------------------------------
193*91f16700Schasinglulu	 * Flush L1 caches.
194*91f16700Schasinglulu	 * ---------------------------------------------
195*91f16700Schasinglulu	 */
196*91f16700Schasinglulu	mov	x0, #DCCISW
197*91f16700Schasinglulu	bl	dcsw_op_level1
198*91f16700Schasinglulu
199*91f16700Schasinglulu	/* ---------------------------------------------
200*91f16700Schasinglulu	 * Come out of intra cluster coherency
201*91f16700Schasinglulu	 * ---------------------------------------------
202*91f16700Schasinglulu	 */
203*91f16700Schasinglulu	bl	cortex_a72_disable_smp
204*91f16700Schasinglulu
205*91f16700Schasinglulu	/* ---------------------------------------------
206*91f16700Schasinglulu	 * Force the debug interfaces to be quiescent
207*91f16700Schasinglulu	 * ---------------------------------------------
208*91f16700Schasinglulu	 */
209*91f16700Schasinglulu	mov	x30, x18
210*91f16700Schasinglulu	b	cortex_a72_disable_ext_debug
211*91f16700Schasingluluendfunc cortex_a72_core_pwr_dwn
212*91f16700Schasinglulu
213*91f16700Schasinglulu	/* -------------------------------------------------------
214*91f16700Schasinglulu	 * The CPU Ops cluster power down function for Cortex-A72.
215*91f16700Schasinglulu	 * -------------------------------------------------------
216*91f16700Schasinglulu	 */
217*91f16700Schasinglulufunc cortex_a72_cluster_pwr_dwn
218*91f16700Schasinglulu	mov	x18, x30
219*91f16700Schasinglulu
220*91f16700Schasinglulu	/* ---------------------------------------------
221*91f16700Schasinglulu	 * Turn off caches.
222*91f16700Schasinglulu	 * ---------------------------------------------
223*91f16700Schasinglulu	 */
224*91f16700Schasinglulu	bl	cortex_a72_disable_dcache
225*91f16700Schasinglulu
226*91f16700Schasinglulu	/* ---------------------------------------------
227*91f16700Schasinglulu	 * Disable the L2 prefetches.
228*91f16700Schasinglulu	 * ---------------------------------------------
229*91f16700Schasinglulu	 */
230*91f16700Schasinglulu	bl	cortex_a72_disable_l2_prefetch
231*91f16700Schasinglulu
232*91f16700Schasinglulu	/* ---------------------------------------------
233*91f16700Schasinglulu	 * Disable the load-store hardware prefetcher.
234*91f16700Schasinglulu	 * ---------------------------------------------
235*91f16700Schasinglulu	 */
236*91f16700Schasinglulu	bl	cortex_a72_disable_hw_prefetcher
237*91f16700Schasinglulu
238*91f16700Schasinglulu#if !SKIP_A72_L1_FLUSH_PWR_DWN
239*91f16700Schasinglulu	/* ---------------------------------------------
240*91f16700Schasinglulu	 * Flush L1 caches.
241*91f16700Schasinglulu	 * ---------------------------------------------
242*91f16700Schasinglulu	 */
243*91f16700Schasinglulu	mov	x0, #DCCISW
244*91f16700Schasinglulu	bl	dcsw_op_level1
245*91f16700Schasinglulu#endif
246*91f16700Schasinglulu
247*91f16700Schasinglulu	/* ---------------------------------------------
248*91f16700Schasinglulu	 * Disable the optional ACP.
249*91f16700Schasinglulu	 * ---------------------------------------------
250*91f16700Schasinglulu	 */
251*91f16700Schasinglulu	bl	plat_disable_acp
252*91f16700Schasinglulu
253*91f16700Schasinglulu	/* -------------------------------------------------
254*91f16700Schasinglulu	 * Flush the L2 caches.
255*91f16700Schasinglulu	 * -------------------------------------------------
256*91f16700Schasinglulu	 */
257*91f16700Schasinglulu	mov	x0, #DCCISW
258*91f16700Schasinglulu	bl	dcsw_op_level2
259*91f16700Schasinglulu
260*91f16700Schasinglulu	/* ---------------------------------------------
261*91f16700Schasinglulu	 * Come out of intra cluster coherency
262*91f16700Schasinglulu	 * ---------------------------------------------
263*91f16700Schasinglulu	 */
264*91f16700Schasinglulu	bl	cortex_a72_disable_smp
265*91f16700Schasinglulu
266*91f16700Schasinglulu	/* ---------------------------------------------
267*91f16700Schasinglulu	 * Force the debug interfaces to be quiescent
268*91f16700Schasinglulu	 * ---------------------------------------------
269*91f16700Schasinglulu	 */
270*91f16700Schasinglulu	mov	x30, x18
271*91f16700Schasinglulu	b	cortex_a72_disable_ext_debug
272*91f16700Schasingluluendfunc cortex_a72_cluster_pwr_dwn
273*91f16700Schasinglulu
274*91f16700Schasingluluerrata_report_shim cortex_a72
275*91f16700Schasinglulu
276*91f16700Schasinglulu	/* ---------------------------------------------
277*91f16700Schasinglulu	 * This function provides cortex_a72 specific
278*91f16700Schasinglulu	 * register information for crash reporting.
279*91f16700Schasinglulu	 * It needs to return with x6 pointing to
280*91f16700Schasinglulu	 * a list of register names in ascii and
281*91f16700Schasinglulu	 * x8 - x15 having values of registers to be
282*91f16700Schasinglulu	 * reported.
283*91f16700Schasinglulu	 * ---------------------------------------------
284*91f16700Schasinglulu	 */
285*91f16700Schasinglulu.section .rodata.cortex_a72_regs, "aS"
286*91f16700Schasinglulucortex_a72_regs:  /* The ascii list of register names to be reported */
287*91f16700Schasinglulu	.asciz	"cpuectlr_el1", "cpumerrsr_el1", "l2merrsr_el1", ""
288*91f16700Schasinglulu
289*91f16700Schasinglulufunc cortex_a72_cpu_reg_dump
290*91f16700Schasinglulu	adr	x6, cortex_a72_regs
291*91f16700Schasinglulu	mrs	x8, CORTEX_A72_ECTLR_EL1
292*91f16700Schasinglulu	mrs	x9, CORTEX_A72_MERRSR_EL1
293*91f16700Schasinglulu	mrs	x10, CORTEX_A72_L2MERRSR_EL1
294*91f16700Schasinglulu	ret
295*91f16700Schasingluluendfunc cortex_a72_cpu_reg_dump
296*91f16700Schasinglulu
297*91f16700Schasingluludeclare_cpu_ops_wa cortex_a72, CORTEX_A72_MIDR, \
298*91f16700Schasinglulu	cortex_a72_reset_func, \
299*91f16700Schasinglulu	check_erratum_cortex_a72_5715, \
300*91f16700Schasinglulu	CPU_NO_EXTRA2_FUNC, \
301*91f16700Schasinglulu	check_smccc_arch_workaround_3, \
302*91f16700Schasinglulu	cortex_a72_core_pwr_dwn, \
303*91f16700Schasinglulu	cortex_a72_cluster_pwr_dwn
304