xref: /arm-trusted-firmware/lib/cpus/aarch32/cortex_a9.S (revision 91f16700b400a8c0651d24a598fc48ee2997a0d7)
1*91f16700Schasinglulu/*
2*91f16700Schasinglulu * Copyright (c) 2016-2023, Arm Limited and Contributors. All rights reserved.
3*91f16700Schasinglulu *
4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause
5*91f16700Schasinglulu */
6*91f16700Schasinglulu
7*91f16700Schasinglulu#include <arch.h>
8*91f16700Schasinglulu#include <asm_macros.S>
9*91f16700Schasinglulu#include <assert_macros.S>
10*91f16700Schasinglulu#include <cortex_a9.h>
11*91f16700Schasinglulu#include <cpu_macros.S>
12*91f16700Schasinglulu
13*91f16700Schasinglulu	.macro assert_cache_enabled
14*91f16700Schasinglulu#if ENABLE_ASSERTIONS
15*91f16700Schasinglulu		ldcopr	r0, SCTLR
16*91f16700Schasinglulu		tst	r0, #SCTLR_C_BIT
17*91f16700Schasinglulu		ASM_ASSERT(eq)
18*91f16700Schasinglulu#endif
19*91f16700Schasinglulu	.endm
20*91f16700Schasinglulu
21*91f16700Schasinglulufunc cortex_a9_disable_smp
22*91f16700Schasinglulu	ldcopr	r0, ACTLR
23*91f16700Schasinglulu	bic	r0, #CORTEX_A9_ACTLR_SMP_BIT
24*91f16700Schasinglulu	stcopr	r0, ACTLR
25*91f16700Schasinglulu	isb
26*91f16700Schasinglulu	dsb	sy
27*91f16700Schasinglulu	bx	lr
28*91f16700Schasingluluendfunc cortex_a9_disable_smp
29*91f16700Schasinglulu
30*91f16700Schasinglulufunc cortex_a9_enable_smp
31*91f16700Schasinglulu	ldcopr	r0, ACTLR
32*91f16700Schasinglulu	orr	r0, #CORTEX_A9_ACTLR_SMP_BIT
33*91f16700Schasinglulu	stcopr	r0, ACTLR
34*91f16700Schasinglulu	isb
35*91f16700Schasinglulu	bx	lr
36*91f16700Schasingluluendfunc cortex_a9_enable_smp
37*91f16700Schasinglulu
38*91f16700Schasinglulufunc check_errata_794073
39*91f16700Schasinglulu#if ERRATA_A9_794073
40*91f16700Schasinglulu	mov	r0, #ERRATA_APPLIES
41*91f16700Schasinglulu#else
42*91f16700Schasinglulu	mov	r0, #ERRATA_MISSING
43*91f16700Schasinglulu#endif
44*91f16700Schasinglulu	bx	lr
45*91f16700Schasingluluendfunc check_errata_794073
46*91f16700Schasinglulu
47*91f16700Schasingluluadd_erratum_entry cortex_a9, ERRATUM(794073), ERRATA_A9_794073
48*91f16700Schasinglulu
49*91f16700Schasinglulufunc check_errata_cve_2017_5715
50*91f16700Schasinglulu#if WORKAROUND_CVE_2017_5715
51*91f16700Schasinglulu	mov	r0, #ERRATA_APPLIES
52*91f16700Schasinglulu#else
53*91f16700Schasinglulu	mov	r0, #ERRATA_MISSING
54*91f16700Schasinglulu#endif
55*91f16700Schasinglulu	bx	lr
56*91f16700Schasingluluendfunc check_errata_cve_2017_5715
57*91f16700Schasinglulu
58*91f16700Schasingluluadd_erratum_entry cortex_a9, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
59*91f16700Schasinglulu
60*91f16700Schasingluluerrata_report_shim cortex_a9
61*91f16700Schasinglulu
62*91f16700Schasinglulufunc cortex_a9_reset_func
63*91f16700Schasinglulu#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
64*91f16700Schasinglulu	ldr	r0, =wa_cve_2017_5715_bpiall_vbar
65*91f16700Schasinglulu	stcopr	r0, VBAR
66*91f16700Schasinglulu	stcopr	r0, MVBAR
67*91f16700Schasinglulu	/* isb will be applied in the course of the reset func */
68*91f16700Schasinglulu#endif
69*91f16700Schasinglulu	b	cortex_a9_enable_smp
70*91f16700Schasingluluendfunc cortex_a9_reset_func
71*91f16700Schasinglulu
72*91f16700Schasinglulufunc cortex_a9_core_pwr_dwn
73*91f16700Schasinglulu	push	{r12, lr}
74*91f16700Schasinglulu
75*91f16700Schasinglulu	assert_cache_enabled
76*91f16700Schasinglulu
77*91f16700Schasinglulu	/* Flush L1 cache */
78*91f16700Schasinglulu	mov	r0, #DC_OP_CISW
79*91f16700Schasinglulu	bl	dcsw_op_level1
80*91f16700Schasinglulu
81*91f16700Schasinglulu	/* Exit cluster coherency */
82*91f16700Schasinglulu	pop	{r12, lr}
83*91f16700Schasinglulu	b	cortex_a9_disable_smp
84*91f16700Schasingluluendfunc cortex_a9_core_pwr_dwn
85*91f16700Schasinglulu
86*91f16700Schasinglulufunc cortex_a9_cluster_pwr_dwn
87*91f16700Schasinglulu	push	{r12, lr}
88*91f16700Schasinglulu
89*91f16700Schasinglulu	assert_cache_enabled
90*91f16700Schasinglulu
91*91f16700Schasinglulu	/* Flush L1 caches */
92*91f16700Schasinglulu	mov	r0, #DC_OP_CISW
93*91f16700Schasinglulu	bl	dcsw_op_level1
94*91f16700Schasinglulu
95*91f16700Schasinglulu	bl	plat_disable_acp
96*91f16700Schasinglulu
97*91f16700Schasinglulu	/* Exit cluster coherency */
98*91f16700Schasinglulu	pop	{r12, lr}
99*91f16700Schasinglulu	b	cortex_a9_disable_smp
100*91f16700Schasingluluendfunc cortex_a9_cluster_pwr_dwn
101*91f16700Schasinglulu
102*91f16700Schasingluludeclare_cpu_ops cortex_a9, CORTEX_A9_MIDR, \
103*91f16700Schasinglulu	cortex_a9_reset_func, \
104*91f16700Schasinglulu	cortex_a9_core_pwr_dwn, \
105*91f16700Schasinglulu	cortex_a9_cluster_pwr_dwn
106