xref: /arm-trusted-firmware/lib/cpus/aarch32/cortex_a17.S (revision 91f16700b400a8c0651d24a598fc48ee2997a0d7)
1*91f16700Schasinglulu/*
2*91f16700Schasinglulu * Copyright (c) 2017-2023, Arm Limited and Contributors. All rights reserved.
3*91f16700Schasinglulu *
4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause
5*91f16700Schasinglulu */
6*91f16700Schasinglulu
7*91f16700Schasinglulu#include <arch.h>
8*91f16700Schasinglulu#include <asm_macros.S>
9*91f16700Schasinglulu#include <assert_macros.S>
10*91f16700Schasinglulu#include <cortex_a17.h>
11*91f16700Schasinglulu#include <cpu_macros.S>
12*91f16700Schasinglulu
13*91f16700Schasinglulu	.macro assert_cache_enabled
14*91f16700Schasinglulu#if ENABLE_ASSERTIONS
15*91f16700Schasinglulu		ldcopr	r0, SCTLR
16*91f16700Schasinglulu		tst	r0, #SCTLR_C_BIT
17*91f16700Schasinglulu		ASM_ASSERT(eq)
18*91f16700Schasinglulu#endif
19*91f16700Schasinglulu	.endm
20*91f16700Schasinglulu
21*91f16700Schasinglulufunc cortex_a17_disable_smp
22*91f16700Schasinglulu	ldcopr	r0, ACTLR
23*91f16700Schasinglulu	bic	r0, #CORTEX_A17_ACTLR_SMP_BIT
24*91f16700Schasinglulu	stcopr	r0, ACTLR
25*91f16700Schasinglulu	isb
26*91f16700Schasinglulu	dsb	sy
27*91f16700Schasinglulu	bx	lr
28*91f16700Schasingluluendfunc cortex_a17_disable_smp
29*91f16700Schasinglulu
30*91f16700Schasinglulufunc cortex_a17_enable_smp
31*91f16700Schasinglulu	ldcopr	r0, ACTLR
32*91f16700Schasinglulu	orr	r0, #CORTEX_A17_ACTLR_SMP_BIT
33*91f16700Schasinglulu	stcopr	r0, ACTLR
34*91f16700Schasinglulu	isb
35*91f16700Schasinglulu	bx	lr
36*91f16700Schasingluluendfunc cortex_a17_enable_smp
37*91f16700Schasinglulu
38*91f16700Schasinglulu	/* ----------------------------------------------------
39*91f16700Schasinglulu	 * Errata Workaround for Cortex A17 Errata #852421.
40*91f16700Schasinglulu	 * This applies only to revision <= r1p2 of Cortex A17.
41*91f16700Schasinglulu	 * Inputs:
42*91f16700Schasinglulu	 * r0: variant[4:7] and revision[0:3] of current cpu.
43*91f16700Schasinglulu	 * Shall clobber: r0-r3
44*91f16700Schasinglulu	 * ----------------------------------------------------
45*91f16700Schasinglulu	 */
46*91f16700Schasinglulufunc errata_a17_852421_wa
47*91f16700Schasinglulu	/*
48*91f16700Schasinglulu	 * Compare r0 against revision r1p2
49*91f16700Schasinglulu	 */
50*91f16700Schasinglulu	mov	r2, lr
51*91f16700Schasinglulu	bl	check_errata_852421
52*91f16700Schasinglulu	cmp	r0, #ERRATA_NOT_APPLIES
53*91f16700Schasinglulu	beq	1f
54*91f16700Schasinglulu	ldcopr	r0, CORTEX_A17_IMP_DEF_REG1
55*91f16700Schasinglulu	orr	r0, r0, #(1<<24)
56*91f16700Schasinglulu	stcopr	r0, CORTEX_A17_IMP_DEF_REG1
57*91f16700Schasinglulu1:
58*91f16700Schasinglulu	bx	r2
59*91f16700Schasingluluendfunc errata_a17_852421_wa
60*91f16700Schasinglulu
61*91f16700Schasinglulufunc check_errata_852421
62*91f16700Schasinglulu	mov	r1, #0x12
63*91f16700Schasinglulu	b	cpu_rev_var_ls
64*91f16700Schasingluluendfunc check_errata_852421
65*91f16700Schasinglulu
66*91f16700Schasingluluadd_erratum_entry cortex_a17, ERRATUM(852421), ERRATA_A17_852421
67*91f16700Schasinglulu
68*91f16700Schasinglulu	/* ----------------------------------------------------
69*91f16700Schasinglulu	 * Errata Workaround for Cortex A17 Errata #852423.
70*91f16700Schasinglulu	 * This applies only to revision <= r1p2 of Cortex A17.
71*91f16700Schasinglulu	 * Inputs:
72*91f16700Schasinglulu	 * r0: variant[4:7] and revision[0:3] of current cpu.
73*91f16700Schasinglulu	 * Shall clobber: r0-r3
74*91f16700Schasinglulu	 * ----------------------------------------------------
75*91f16700Schasinglulu	 */
76*91f16700Schasinglulufunc errata_a17_852423_wa
77*91f16700Schasinglulu	/*
78*91f16700Schasinglulu	 * Compare r0 against revision r1p2
79*91f16700Schasinglulu	 */
80*91f16700Schasinglulu	mov	r2, lr
81*91f16700Schasinglulu	bl	check_errata_852423
82*91f16700Schasinglulu	cmp	r0, #ERRATA_NOT_APPLIES
83*91f16700Schasinglulu	beq	1f
84*91f16700Schasinglulu	ldcopr	r0, CORTEX_A17_IMP_DEF_REG1
85*91f16700Schasinglulu	orr	r0, r0, #(1<<12)
86*91f16700Schasinglulu	stcopr	r0, CORTEX_A17_IMP_DEF_REG1
87*91f16700Schasinglulu1:
88*91f16700Schasinglulu	bx	r2
89*91f16700Schasingluluendfunc errata_a17_852423_wa
90*91f16700Schasinglulu
91*91f16700Schasinglulufunc check_errata_852423
92*91f16700Schasinglulu	mov	r1, #0x12
93*91f16700Schasinglulu	b	cpu_rev_var_ls
94*91f16700Schasingluluendfunc check_errata_852423
95*91f16700Schasinglulu
96*91f16700Schasingluluadd_erratum_entry cortex_a17, ERRATUM(852423), ERRATA_A17_852423
97*91f16700Schasinglulu
98*91f16700Schasinglulufunc check_errata_cve_2017_5715
99*91f16700Schasinglulu#if WORKAROUND_CVE_2017_5715
100*91f16700Schasinglulu	mov	r0, #ERRATA_APPLIES
101*91f16700Schasinglulu#else
102*91f16700Schasinglulu	mov	r0, #ERRATA_MISSING
103*91f16700Schasinglulu#endif
104*91f16700Schasinglulu	bx	lr
105*91f16700Schasingluluendfunc check_errata_cve_2017_5715
106*91f16700Schasinglulu
107*91f16700Schasingluluadd_erratum_entry cortex_a17, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
108*91f16700Schasinglulu
109*91f16700Schasingluluerrata_report_shim cortex_a17
110*91f16700Schasinglulu
111*91f16700Schasinglulufunc cortex_a17_reset_func
112*91f16700Schasinglulu	mov	r5, lr
113*91f16700Schasinglulu	bl	cpu_get_rev_var
114*91f16700Schasinglulu	mov	r4, r0
115*91f16700Schasinglulu
116*91f16700Schasinglulu#if ERRATA_A17_852421
117*91f16700Schasinglulu	mov	r0, r4
118*91f16700Schasinglulu	bl	errata_a17_852421_wa
119*91f16700Schasinglulu#endif
120*91f16700Schasinglulu
121*91f16700Schasinglulu#if ERRATA_A17_852423
122*91f16700Schasinglulu	mov	r0, r4
123*91f16700Schasinglulu	bl	errata_a17_852423_wa
124*91f16700Schasinglulu#endif
125*91f16700Schasinglulu
126*91f16700Schasinglulu#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
127*91f16700Schasinglulu	ldr	r0, =wa_cve_2017_5715_bpiall_vbar
128*91f16700Schasinglulu	stcopr	r0, VBAR
129*91f16700Schasinglulu	stcopr	r0, MVBAR
130*91f16700Schasinglulu	/* isb will be applied in the course of the reset func */
131*91f16700Schasinglulu#endif
132*91f16700Schasinglulu
133*91f16700Schasinglulu	mov	lr, r5
134*91f16700Schasinglulu	b	cortex_a17_enable_smp
135*91f16700Schasingluluendfunc cortex_a17_reset_func
136*91f16700Schasinglulu
137*91f16700Schasinglulufunc cortex_a17_core_pwr_dwn
138*91f16700Schasinglulu	push	{r12, lr}
139*91f16700Schasinglulu
140*91f16700Schasinglulu	assert_cache_enabled
141*91f16700Schasinglulu
142*91f16700Schasinglulu	/* Flush L1 cache */
143*91f16700Schasinglulu	mov	r0, #DC_OP_CISW
144*91f16700Schasinglulu	bl	dcsw_op_level1
145*91f16700Schasinglulu
146*91f16700Schasinglulu	/* Exit cluster coherency */
147*91f16700Schasinglulu	pop	{r12, lr}
148*91f16700Schasinglulu	b	cortex_a17_disable_smp
149*91f16700Schasingluluendfunc cortex_a17_core_pwr_dwn
150*91f16700Schasinglulu
151*91f16700Schasinglulufunc cortex_a17_cluster_pwr_dwn
152*91f16700Schasinglulu	push	{r12, lr}
153*91f16700Schasinglulu
154*91f16700Schasinglulu	assert_cache_enabled
155*91f16700Schasinglulu
156*91f16700Schasinglulu	/* Flush L1 caches */
157*91f16700Schasinglulu	mov	r0, #DC_OP_CISW
158*91f16700Schasinglulu	bl	dcsw_op_level1
159*91f16700Schasinglulu
160*91f16700Schasinglulu	bl	plat_disable_acp
161*91f16700Schasinglulu
162*91f16700Schasinglulu	/* Flush L2 caches */
163*91f16700Schasinglulu	mov	r0, #DC_OP_CISW
164*91f16700Schasinglulu	bl	dcsw_op_level2
165*91f16700Schasinglulu
166*91f16700Schasinglulu	/* Exit cluster coherency */
167*91f16700Schasinglulu	pop	{r12, lr}
168*91f16700Schasinglulu	b	cortex_a17_disable_smp
169*91f16700Schasingluluendfunc cortex_a17_cluster_pwr_dwn
170*91f16700Schasinglulu
171*91f16700Schasingluludeclare_cpu_ops cortex_a17, CORTEX_A17_MIDR, \
172*91f16700Schasinglulu	cortex_a17_reset_func, \
173*91f16700Schasinglulu	cortex_a17_core_pwr_dwn, \
174*91f16700Schasinglulu	cortex_a17_cluster_pwr_dwn
175