1*91f16700Schasinglulu/* 2*91f16700Schasinglulu * Copyright (c) 2017-2023, Arm Limited and Contributors. All rights reserved. 3*91f16700Schasinglulu * 4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause 5*91f16700Schasinglulu */ 6*91f16700Schasinglulu#include <arch.h> 7*91f16700Schasinglulu#include <asm_macros.S> 8*91f16700Schasinglulu#include <assert_macros.S> 9*91f16700Schasinglulu#include <common/debug.h> 10*91f16700Schasinglulu#include <cortex_a72.h> 11*91f16700Schasinglulu#include <cpu_macros.S> 12*91f16700Schasinglulu 13*91f16700Schasinglulu /* --------------------------------------------- 14*91f16700Schasinglulu * Disable all types of L2 prefetches. 15*91f16700Schasinglulu * --------------------------------------------- 16*91f16700Schasinglulu */ 17*91f16700Schasinglulufunc cortex_a72_disable_l2_prefetch 18*91f16700Schasinglulu ldcopr16 r0, r1, CORTEX_A72_ECTLR 19*91f16700Schasinglulu orr64_imm r0, r1, CORTEX_A72_ECTLR_DIS_TWD_ACC_PFTCH_BIT 20*91f16700Schasinglulu bic64_imm r0, r1, (CORTEX_A72_ECTLR_L2_IPFTCH_DIST_MASK | \ 21*91f16700Schasinglulu CORTEX_A72_ECTLR_L2_DPFTCH_DIST_MASK) 22*91f16700Schasinglulu stcopr16 r0, r1, CORTEX_A72_ECTLR 23*91f16700Schasinglulu isb 24*91f16700Schasinglulu bx lr 25*91f16700Schasingluluendfunc cortex_a72_disable_l2_prefetch 26*91f16700Schasinglulu 27*91f16700Schasinglulu /* --------------------------------------------- 28*91f16700Schasinglulu * Disable the load-store hardware prefetcher. 29*91f16700Schasinglulu * --------------------------------------------- 30*91f16700Schasinglulu */ 31*91f16700Schasinglulufunc cortex_a72_disable_hw_prefetcher 32*91f16700Schasinglulu ldcopr16 r0, r1, CORTEX_A72_CPUACTLR 33*91f16700Schasinglulu orr64_imm r0, r1, CORTEX_A72_CPUACTLR_DISABLE_L1_DCACHE_HW_PFTCH 34*91f16700Schasinglulu stcopr16 r0, r1, CORTEX_A72_CPUACTLR 35*91f16700Schasinglulu isb 36*91f16700Schasinglulu dsb ish 37*91f16700Schasinglulu bx lr 38*91f16700Schasingluluendfunc cortex_a72_disable_hw_prefetcher 39*91f16700Schasinglulu 40*91f16700Schasinglulu /* --------------------------------------------- 41*91f16700Schasinglulu * Disable intra-cluster coherency 42*91f16700Schasinglulu * Clobbers: r0-r1 43*91f16700Schasinglulu * --------------------------------------------- 44*91f16700Schasinglulu */ 45*91f16700Schasinglulufunc cortex_a72_disable_smp 46*91f16700Schasinglulu ldcopr16 r0, r1, CORTEX_A72_ECTLR 47*91f16700Schasinglulu bic64_imm r0, r1, CORTEX_A72_ECTLR_SMP_BIT 48*91f16700Schasinglulu stcopr16 r0, r1, CORTEX_A72_ECTLR 49*91f16700Schasinglulu bx lr 50*91f16700Schasingluluendfunc cortex_a72_disable_smp 51*91f16700Schasinglulu 52*91f16700Schasinglulu /* --------------------------------------------- 53*91f16700Schasinglulu * Disable debug interfaces 54*91f16700Schasinglulu * --------------------------------------------- 55*91f16700Schasinglulu */ 56*91f16700Schasinglulufunc cortex_a72_disable_ext_debug 57*91f16700Schasinglulu mov r0, #1 58*91f16700Schasinglulu stcopr r0, DBGOSDLR 59*91f16700Schasinglulu isb 60*91f16700Schasinglulu dsb sy 61*91f16700Schasinglulu bx lr 62*91f16700Schasingluluendfunc cortex_a72_disable_ext_debug 63*91f16700Schasinglulu 64*91f16700Schasinglulu /* --------------------------------------------------- 65*91f16700Schasinglulu * Errata Workaround for Cortex A72 Errata #859971. 66*91f16700Schasinglulu * This applies only to revision <= r0p3 of Cortex A72. 67*91f16700Schasinglulu * Inputs: 68*91f16700Schasinglulu * r0: variant[4:7] and revision[0:3] of current cpu. 69*91f16700Schasinglulu * Shall clobber: r0-r3 70*91f16700Schasinglulu * --------------------------------------------------- 71*91f16700Schasinglulu */ 72*91f16700Schasinglulufunc errata_a72_859971_wa 73*91f16700Schasinglulu mov r2,lr 74*91f16700Schasinglulu bl check_errata_859971 75*91f16700Schasinglulu mov lr, r2 76*91f16700Schasinglulu cmp r0, #ERRATA_NOT_APPLIES 77*91f16700Schasinglulu beq 1f 78*91f16700Schasinglulu ldcopr16 r0, r1, CORTEX_A72_CPUACTLR 79*91f16700Schasinglulu orr64_imm r1, r1, CORTEX_A72_CPUACTLR_DIS_INSTR_PREFETCH 80*91f16700Schasinglulu stcopr16 r0, r1, CORTEX_A72_CPUACTLR 81*91f16700Schasinglulu1: 82*91f16700Schasinglulu bx lr 83*91f16700Schasingluluendfunc errata_a72_859971_wa 84*91f16700Schasinglulu 85*91f16700Schasinglulufunc check_errata_859971 86*91f16700Schasinglulu mov r1, #0x03 87*91f16700Schasinglulu b cpu_rev_var_ls 88*91f16700Schasingluluendfunc check_errata_859971 89*91f16700Schasinglulu 90*91f16700Schasingluluadd_erratum_entry cortex_a72, ERRATUM(859971), ERRATA_A72_859971 91*91f16700Schasinglulu 92*91f16700Schasinglulufunc check_errata_cve_2017_5715 93*91f16700Schasinglulu mov r0, #ERRATA_MISSING 94*91f16700Schasinglulu bx lr 95*91f16700Schasingluluendfunc check_errata_cve_2017_5715 96*91f16700Schasinglulu 97*91f16700Schasingluluadd_erratum_entry cortex_a72, CVE(2017, 5715), WORKAROUND_CVE_2017_5715 98*91f16700Schasinglulu 99*91f16700Schasinglulufunc check_errata_cve_2018_3639 100*91f16700Schasinglulu#if WORKAROUND_CVE_2018_3639 101*91f16700Schasinglulu mov r0, #ERRATA_APPLIES 102*91f16700Schasinglulu#else 103*91f16700Schasinglulu mov r0, #ERRATA_MISSING 104*91f16700Schasinglulu#endif 105*91f16700Schasinglulu bx lr 106*91f16700Schasingluluendfunc check_errata_cve_2018_3639 107*91f16700Schasinglulu 108*91f16700Schasingluluadd_erratum_entry cortex_a72, CVE(2018, 3639), WORKAROUND_CVE_2018_3639 109*91f16700Schasinglulu 110*91f16700Schasinglulufunc check_errata_cve_2022_23960 111*91f16700Schasinglulu mov r0, #ERRATA_MISSING 112*91f16700Schasinglulu bx lr 113*91f16700Schasingluluendfunc check_errata_cve_2022_23960 114*91f16700Schasinglulu 115*91f16700Schasingluluadd_erratum_entry cortex_a72, CVE(2022, 23960), WORKAROUND_CVE_2022_23960 116*91f16700Schasinglulu 117*91f16700Schasinglulu /* ------------------------------------------------- 118*91f16700Schasinglulu * The CPU Ops reset function for Cortex-A72. 119*91f16700Schasinglulu * ------------------------------------------------- 120*91f16700Schasinglulu */ 121*91f16700Schasinglulufunc cortex_a72_reset_func 122*91f16700Schasinglulu mov r5, lr 123*91f16700Schasinglulu bl cpu_get_rev_var 124*91f16700Schasinglulu mov r4, r0 125*91f16700Schasinglulu 126*91f16700Schasinglulu#if ERRATA_A72_859971 127*91f16700Schasinglulu mov r0, r4 128*91f16700Schasinglulu bl errata_a72_859971_wa 129*91f16700Schasinglulu#endif 130*91f16700Schasinglulu 131*91f16700Schasinglulu#if WORKAROUND_CVE_2018_3639 132*91f16700Schasinglulu ldcopr16 r0, r1, CORTEX_A72_CPUACTLR 133*91f16700Schasinglulu orr64_imm r0, r1, CORTEX_A72_CPUACTLR_DIS_LOAD_PASS_STORE 134*91f16700Schasinglulu stcopr16 r0, r1, CORTEX_A72_CPUACTLR 135*91f16700Schasinglulu isb 136*91f16700Schasinglulu dsb sy 137*91f16700Schasinglulu#endif 138*91f16700Schasinglulu 139*91f16700Schasinglulu /* --------------------------------------------- 140*91f16700Schasinglulu * Enable the SMP bit. 141*91f16700Schasinglulu * --------------------------------------------- 142*91f16700Schasinglulu */ 143*91f16700Schasinglulu ldcopr16 r0, r1, CORTEX_A72_ECTLR 144*91f16700Schasinglulu orr64_imm r0, r1, CORTEX_A72_ECTLR_SMP_BIT 145*91f16700Schasinglulu stcopr16 r0, r1, CORTEX_A72_ECTLR 146*91f16700Schasinglulu isb 147*91f16700Schasinglulu bx r5 148*91f16700Schasingluluendfunc cortex_a72_reset_func 149*91f16700Schasinglulu 150*91f16700Schasinglulu /* ---------------------------------------------------- 151*91f16700Schasinglulu * The CPU Ops core power down function for Cortex-A72. 152*91f16700Schasinglulu * ---------------------------------------------------- 153*91f16700Schasinglulu */ 154*91f16700Schasinglulufunc cortex_a72_core_pwr_dwn 155*91f16700Schasinglulu push {r12, lr} 156*91f16700Schasinglulu 157*91f16700Schasinglulu /* Assert if cache is enabled */ 158*91f16700Schasinglulu#if ENABLE_ASSERTIONS 159*91f16700Schasinglulu ldcopr r0, SCTLR 160*91f16700Schasinglulu tst r0, #SCTLR_C_BIT 161*91f16700Schasinglulu ASM_ASSERT(eq) 162*91f16700Schasinglulu#endif 163*91f16700Schasinglulu 164*91f16700Schasinglulu /* --------------------------------------------- 165*91f16700Schasinglulu * Disable the L2 prefetches. 166*91f16700Schasinglulu * --------------------------------------------- 167*91f16700Schasinglulu */ 168*91f16700Schasinglulu bl cortex_a72_disable_l2_prefetch 169*91f16700Schasinglulu 170*91f16700Schasinglulu /* --------------------------------------------- 171*91f16700Schasinglulu * Disable the load-store hardware prefetcher. 172*91f16700Schasinglulu * --------------------------------------------- 173*91f16700Schasinglulu */ 174*91f16700Schasinglulu bl cortex_a72_disable_hw_prefetcher 175*91f16700Schasinglulu 176*91f16700Schasinglulu /* --------------------------------------------- 177*91f16700Schasinglulu * Flush L1 caches. 178*91f16700Schasinglulu * --------------------------------------------- 179*91f16700Schasinglulu */ 180*91f16700Schasinglulu mov r0, #DC_OP_CISW 181*91f16700Schasinglulu bl dcsw_op_level1 182*91f16700Schasinglulu 183*91f16700Schasinglulu /* --------------------------------------------- 184*91f16700Schasinglulu * Come out of intra cluster coherency 185*91f16700Schasinglulu * --------------------------------------------- 186*91f16700Schasinglulu */ 187*91f16700Schasinglulu bl cortex_a72_disable_smp 188*91f16700Schasinglulu 189*91f16700Schasinglulu /* --------------------------------------------- 190*91f16700Schasinglulu * Force the debug interfaces to be quiescent 191*91f16700Schasinglulu * --------------------------------------------- 192*91f16700Schasinglulu */ 193*91f16700Schasinglulu pop {r12, lr} 194*91f16700Schasinglulu b cortex_a72_disable_ext_debug 195*91f16700Schasingluluendfunc cortex_a72_core_pwr_dwn 196*91f16700Schasinglulu 197*91f16700Schasinglulu /* ------------------------------------------------------- 198*91f16700Schasinglulu * The CPU Ops cluster power down function for Cortex-A72. 199*91f16700Schasinglulu * ------------------------------------------------------- 200*91f16700Schasinglulu */ 201*91f16700Schasinglulufunc cortex_a72_cluster_pwr_dwn 202*91f16700Schasinglulu push {r12, lr} 203*91f16700Schasinglulu 204*91f16700Schasinglulu /* Assert if cache is enabled */ 205*91f16700Schasinglulu#if ENABLE_ASSERTIONS 206*91f16700Schasinglulu ldcopr r0, SCTLR 207*91f16700Schasinglulu tst r0, #SCTLR_C_BIT 208*91f16700Schasinglulu ASM_ASSERT(eq) 209*91f16700Schasinglulu#endif 210*91f16700Schasinglulu 211*91f16700Schasinglulu /* --------------------------------------------- 212*91f16700Schasinglulu * Disable the L2 prefetches. 213*91f16700Schasinglulu * --------------------------------------------- 214*91f16700Schasinglulu */ 215*91f16700Schasinglulu bl cortex_a72_disable_l2_prefetch 216*91f16700Schasinglulu 217*91f16700Schasinglulu /* --------------------------------------------- 218*91f16700Schasinglulu * Disable the load-store hardware prefetcher. 219*91f16700Schasinglulu * --------------------------------------------- 220*91f16700Schasinglulu */ 221*91f16700Schasinglulu bl cortex_a72_disable_hw_prefetcher 222*91f16700Schasinglulu 223*91f16700Schasinglulu#if !SKIP_A72_L1_FLUSH_PWR_DWN 224*91f16700Schasinglulu /* --------------------------------------------- 225*91f16700Schasinglulu * Flush L1 caches. 226*91f16700Schasinglulu * --------------------------------------------- 227*91f16700Schasinglulu */ 228*91f16700Schasinglulu mov r0, #DC_OP_CISW 229*91f16700Schasinglulu bl dcsw_op_level1 230*91f16700Schasinglulu#endif 231*91f16700Schasinglulu 232*91f16700Schasinglulu /* --------------------------------------------- 233*91f16700Schasinglulu * Disable the optional ACP. 234*91f16700Schasinglulu * --------------------------------------------- 235*91f16700Schasinglulu */ 236*91f16700Schasinglulu bl plat_disable_acp 237*91f16700Schasinglulu 238*91f16700Schasinglulu /* ------------------------------------------------- 239*91f16700Schasinglulu * Flush the L2 caches. 240*91f16700Schasinglulu * ------------------------------------------------- 241*91f16700Schasinglulu */ 242*91f16700Schasinglulu mov r0, #DC_OP_CISW 243*91f16700Schasinglulu bl dcsw_op_level2 244*91f16700Schasinglulu 245*91f16700Schasinglulu /* --------------------------------------------- 246*91f16700Schasinglulu * Come out of intra cluster coherency 247*91f16700Schasinglulu * --------------------------------------------- 248*91f16700Schasinglulu */ 249*91f16700Schasinglulu bl cortex_a72_disable_smp 250*91f16700Schasinglulu 251*91f16700Schasinglulu /* --------------------------------------------- 252*91f16700Schasinglulu * Force the debug interfaces to be quiescent 253*91f16700Schasinglulu * --------------------------------------------- 254*91f16700Schasinglulu */ 255*91f16700Schasinglulu pop {r12, lr} 256*91f16700Schasinglulu b cortex_a72_disable_ext_debug 257*91f16700Schasingluluendfunc cortex_a72_cluster_pwr_dwn 258*91f16700Schasinglulu 259*91f16700Schasingluluerrata_report_shim cortex_a72 260*91f16700Schasinglulu 261*91f16700Schasingluludeclare_cpu_ops cortex_a72, CORTEX_A72_MIDR, \ 262*91f16700Schasinglulu cortex_a72_reset_func, \ 263*91f16700Schasinglulu cortex_a72_core_pwr_dwn, \ 264*91f16700Schasinglulu cortex_a72_cluster_pwr_dwn 265