1*91f16700Schasinglulu/* 2*91f16700Schasinglulu * Copyright (c) 2016-2023, Arm Limited and Contributors. All rights reserved. 3*91f16700Schasinglulu * 4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause 5*91f16700Schasinglulu */ 6*91f16700Schasinglulu 7*91f16700Schasinglulu#include <arch.h> 8*91f16700Schasinglulu#include <asm_macros.S> 9*91f16700Schasinglulu#include <assert_macros.S> 10*91f16700Schasinglulu#include <cortex_a15.h> 11*91f16700Schasinglulu#include <cpu_macros.S> 12*91f16700Schasinglulu 13*91f16700Schasinglulu/* 14*91f16700Schasinglulu * Cortex-A15 support LPAE and Virtualization Extensions. 15*91f16700Schasinglulu * Don't care if confiugration uses or not LPAE and VE. 16*91f16700Schasinglulu * Therefore, where we don't check ARCH_IS_ARMV7_WITH_LPAE/VE 17*91f16700Schasinglulu */ 18*91f16700Schasinglulu 19*91f16700Schasinglulu .macro assert_cache_enabled 20*91f16700Schasinglulu#if ENABLE_ASSERTIONS 21*91f16700Schasinglulu ldcopr r0, SCTLR 22*91f16700Schasinglulu tst r0, #SCTLR_C_BIT 23*91f16700Schasinglulu ASM_ASSERT(eq) 24*91f16700Schasinglulu#endif 25*91f16700Schasinglulu .endm 26*91f16700Schasinglulu 27*91f16700Schasinglulufunc cortex_a15_disable_smp 28*91f16700Schasinglulu ldcopr r0, ACTLR 29*91f16700Schasinglulu bic r0, #CORTEX_A15_ACTLR_SMP_BIT 30*91f16700Schasinglulu stcopr r0, ACTLR 31*91f16700Schasinglulu isb 32*91f16700Schasinglulu#if ERRATA_A15_816470 33*91f16700Schasinglulu /* 34*91f16700Schasinglulu * Invalidate any TLB address 35*91f16700Schasinglulu */ 36*91f16700Schasinglulu mov r0, #0 37*91f16700Schasinglulu stcopr r0, TLBIMVA 38*91f16700Schasinglulu#endif 39*91f16700Schasinglulu dsb sy 40*91f16700Schasinglulu bx lr 41*91f16700Schasingluluendfunc cortex_a15_disable_smp 42*91f16700Schasinglulu 43*91f16700Schasinglulufunc cortex_a15_enable_smp 44*91f16700Schasinglulu ldcopr r0, ACTLR 45*91f16700Schasinglulu orr r0, #CORTEX_A15_ACTLR_SMP_BIT 46*91f16700Schasinglulu stcopr r0, ACTLR 47*91f16700Schasinglulu isb 48*91f16700Schasinglulu bx lr 49*91f16700Schasingluluendfunc cortex_a15_enable_smp 50*91f16700Schasinglulu 51*91f16700Schasinglulu /* ---------------------------------------------------- 52*91f16700Schasinglulu * Errata Workaround for Cortex A15 Errata #816470. 53*91f16700Schasinglulu * This applies only to revision >= r3p0 of Cortex A15. 54*91f16700Schasinglulu * ---------------------------------------------------- 55*91f16700Schasinglulu */ 56*91f16700Schasinglulufunc check_errata_816470 57*91f16700Schasinglulu /* 58*91f16700Schasinglulu * Even though this is only needed for revision >= r3p0, it is always 59*91f16700Schasinglulu * applied because of the low cost of the workaround. 60*91f16700Schasinglulu */ 61*91f16700Schasinglulu mov r0, #ERRATA_APPLIES 62*91f16700Schasinglulu bx lr 63*91f16700Schasingluluendfunc check_errata_816470 64*91f16700Schasinglulu 65*91f16700Schasingluluadd_erratum_entry cortex_a15, ERRATUM(816470), ERRATA_A15_816470 66*91f16700Schasinglulu /* ---------------------------------------------------- 67*91f16700Schasinglulu * Errata Workaround for Cortex A15 Errata #827671. 68*91f16700Schasinglulu * This applies only to revision >= r3p0 of Cortex A15. 69*91f16700Schasinglulu * Inputs: 70*91f16700Schasinglulu * r0: variant[4:7] and revision[0:3] of current cpu. 71*91f16700Schasinglulu * Shall clobber: r0-r3 72*91f16700Schasinglulu * ---------------------------------------------------- 73*91f16700Schasinglulu */ 74*91f16700Schasinglulufunc errata_a15_827671_wa 75*91f16700Schasinglulu /* 76*91f16700Schasinglulu * Compare r0 against revision r3p0 77*91f16700Schasinglulu */ 78*91f16700Schasinglulu mov r2, lr 79*91f16700Schasinglulu bl check_errata_827671 80*91f16700Schasinglulu cmp r0, #ERRATA_NOT_APPLIES 81*91f16700Schasinglulu beq 1f 82*91f16700Schasinglulu ldcopr r0, CORTEX_A15_ACTLR2 83*91f16700Schasinglulu orr r0, #CORTEX_A15_ACTLR2_INV_DCC_BIT 84*91f16700Schasinglulu stcopr r0, CORTEX_A15_ACTLR2 85*91f16700Schasinglulu isb 86*91f16700Schasinglulu1: 87*91f16700Schasinglulu bx r2 88*91f16700Schasingluluendfunc errata_a15_827671_wa 89*91f16700Schasinglulu 90*91f16700Schasinglulufunc check_errata_827671 91*91f16700Schasinglulu mov r1, #0x30 92*91f16700Schasinglulu b cpu_rev_var_hs 93*91f16700Schasingluluendfunc check_errata_827671 94*91f16700Schasinglulu 95*91f16700Schasingluluadd_erratum_entry cortex_a15, ERRATUM(827671), ERRATA_A15_827671 96*91f16700Schasinglulu 97*91f16700Schasinglulufunc check_errata_cve_2017_5715 98*91f16700Schasinglulu#if WORKAROUND_CVE_2017_5715 99*91f16700Schasinglulu mov r0, #ERRATA_APPLIES 100*91f16700Schasinglulu#else 101*91f16700Schasinglulu mov r0, #ERRATA_MISSING 102*91f16700Schasinglulu#endif 103*91f16700Schasinglulu bx lr 104*91f16700Schasingluluendfunc check_errata_cve_2017_5715 105*91f16700Schasinglulu 106*91f16700Schasingluluadd_erratum_entry cortex_a15, CVE(2017, 5715), WORKAROUND_CVE_2017_5715 107*91f16700Schasinglulu 108*91f16700Schasinglulufunc check_errata_cve_2022_23960 109*91f16700Schasinglulu#if WORKAROUND_CVE_2022_23960 110*91f16700Schasinglulu mov r0, #ERRATA_APPLIES 111*91f16700Schasinglulu#else 112*91f16700Schasinglulu mov r0, #ERRATA_MISSING 113*91f16700Schasinglulu#endif 114*91f16700Schasinglulu bx lr 115*91f16700Schasingluluendfunc check_errata_cve_2022_23960 116*91f16700Schasinglulu 117*91f16700Schasingluluadd_erratum_entry cortex_a15, CVE(2022, 23960), WORKAROUND_CVE_2022_23960 118*91f16700Schasinglulu 119*91f16700Schasinglulufunc cortex_a15_reset_func 120*91f16700Schasinglulu mov r5, lr 121*91f16700Schasinglulu bl cpu_get_rev_var 122*91f16700Schasinglulu 123*91f16700Schasinglulu#if ERRATA_A15_827671 124*91f16700Schasinglulu bl errata_a15_827671_wa 125*91f16700Schasinglulu#endif 126*91f16700Schasinglulu 127*91f16700Schasinglulu#if IMAGE_BL32 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) 128*91f16700Schasinglulu ldcopr r0, ACTLR 129*91f16700Schasinglulu orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT 130*91f16700Schasinglulu stcopr r0, ACTLR 131*91f16700Schasinglulu ldr r0, =wa_cve_2017_5715_icache_inv_vbar 132*91f16700Schasinglulu stcopr r0, VBAR 133*91f16700Schasinglulu stcopr r0, MVBAR 134*91f16700Schasinglulu /* isb will be applied in the course of the reset func */ 135*91f16700Schasinglulu#endif 136*91f16700Schasinglulu 137*91f16700Schasinglulu mov lr, r5 138*91f16700Schasinglulu b cortex_a15_enable_smp 139*91f16700Schasingluluendfunc cortex_a15_reset_func 140*91f16700Schasinglulu 141*91f16700Schasinglulufunc cortex_a15_core_pwr_dwn 142*91f16700Schasinglulu push {r12, lr} 143*91f16700Schasinglulu 144*91f16700Schasinglulu assert_cache_enabled 145*91f16700Schasinglulu 146*91f16700Schasinglulu /* Flush L1 cache */ 147*91f16700Schasinglulu mov r0, #DC_OP_CISW 148*91f16700Schasinglulu bl dcsw_op_level1 149*91f16700Schasinglulu 150*91f16700Schasinglulu /* Exit cluster coherency */ 151*91f16700Schasinglulu pop {r12, lr} 152*91f16700Schasinglulu b cortex_a15_disable_smp 153*91f16700Schasingluluendfunc cortex_a15_core_pwr_dwn 154*91f16700Schasinglulu 155*91f16700Schasinglulufunc cortex_a15_cluster_pwr_dwn 156*91f16700Schasinglulu push {r12, lr} 157*91f16700Schasinglulu 158*91f16700Schasinglulu assert_cache_enabled 159*91f16700Schasinglulu 160*91f16700Schasinglulu /* Flush L1 caches */ 161*91f16700Schasinglulu mov r0, #DC_OP_CISW 162*91f16700Schasinglulu bl dcsw_op_level1 163*91f16700Schasinglulu 164*91f16700Schasinglulu bl plat_disable_acp 165*91f16700Schasinglulu 166*91f16700Schasinglulu /* Flush L2 caches */ 167*91f16700Schasinglulu mov r0, #DC_OP_CISW 168*91f16700Schasinglulu bl dcsw_op_level2 169*91f16700Schasinglulu 170*91f16700Schasinglulu /* Exit cluster coherency */ 171*91f16700Schasinglulu pop {r12, lr} 172*91f16700Schasinglulu b cortex_a15_disable_smp 173*91f16700Schasingluluendfunc cortex_a15_cluster_pwr_dwn 174*91f16700Schasinglulu 175*91f16700Schasingluluerrata_report_shim cortex_a15 176*91f16700Schasinglulu 177*91f16700Schasingluludeclare_cpu_ops cortex_a15, CORTEX_A15_MIDR, \ 178*91f16700Schasinglulu cortex_a15_reset_func, \ 179*91f16700Schasinglulu cortex_a15_core_pwr_dwn, \ 180*91f16700Schasinglulu cortex_a15_cluster_pwr_dwn 181