1*91f16700Schasinglulu/* 2*91f16700Schasinglulu * Copyright (c) 2016-2023, Arm Limited and Contributors. All rights reserved. 3*91f16700Schasinglulu * 4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause 5*91f16700Schasinglulu */ 6*91f16700Schasinglulu 7*91f16700Schasinglulu#include <arch.h> 8*91f16700Schasinglulu#include <asm_macros.S> 9*91f16700Schasinglulu#include <assert_macros.S> 10*91f16700Schasinglulu#include <cortex_a32.h> 11*91f16700Schasinglulu#include <cpu_macros.S> 12*91f16700Schasinglulu 13*91f16700Schasinglulu 14*91f16700Schasinglulu /* --------------------------------------------- 15*91f16700Schasinglulu * Disable intra-cluster coherency 16*91f16700Schasinglulu * Clobbers: r0-r1 17*91f16700Schasinglulu * --------------------------------------------- 18*91f16700Schasinglulu */ 19*91f16700Schasinglulufunc cortex_a32_disable_smp 20*91f16700Schasinglulu ldcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1 21*91f16700Schasinglulu bic r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT 22*91f16700Schasinglulu stcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1 23*91f16700Schasinglulu isb 24*91f16700Schasinglulu dsb sy 25*91f16700Schasinglulu bx lr 26*91f16700Schasingluluendfunc cortex_a32_disable_smp 27*91f16700Schasinglulu 28*91f16700Schasinglulu /* ------------------------------------------------- 29*91f16700Schasinglulu * The CPU Ops reset function for Cortex-A32. 30*91f16700Schasinglulu * Clobbers: r0-r1 31*91f16700Schasinglulu * ------------------------------------------------- 32*91f16700Schasinglulu */ 33*91f16700Schasinglulufunc cortex_a32_reset_func 34*91f16700Schasinglulu /* --------------------------------------------- 35*91f16700Schasinglulu * Enable the SMP bit. 36*91f16700Schasinglulu * --------------------------------------------- 37*91f16700Schasinglulu */ 38*91f16700Schasinglulu ldcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1 39*91f16700Schasinglulu orr r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT 40*91f16700Schasinglulu stcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1 41*91f16700Schasinglulu isb 42*91f16700Schasinglulu bx lr 43*91f16700Schasingluluendfunc cortex_a32_reset_func 44*91f16700Schasinglulu 45*91f16700Schasinglulu /* ---------------------------------------------------- 46*91f16700Schasinglulu * The CPU Ops core power down function for Cortex-A32. 47*91f16700Schasinglulu * Clobbers: r0-r3 48*91f16700Schasinglulu * ---------------------------------------------------- 49*91f16700Schasinglulu */ 50*91f16700Schasinglulufunc cortex_a32_core_pwr_dwn 51*91f16700Schasinglulu /* r12 is pushed to meet the 8 byte stack alignment requirement */ 52*91f16700Schasinglulu push {r12, lr} 53*91f16700Schasinglulu 54*91f16700Schasinglulu /* Assert if cache is enabled */ 55*91f16700Schasinglulu#if ENABLE_ASSERTIONS 56*91f16700Schasinglulu ldcopr r0, SCTLR 57*91f16700Schasinglulu tst r0, #SCTLR_C_BIT 58*91f16700Schasinglulu ASM_ASSERT(eq) 59*91f16700Schasinglulu#endif 60*91f16700Schasinglulu 61*91f16700Schasinglulu /* --------------------------------------------- 62*91f16700Schasinglulu * Flush L1 caches. 63*91f16700Schasinglulu * --------------------------------------------- 64*91f16700Schasinglulu */ 65*91f16700Schasinglulu mov r0, #DC_OP_CISW 66*91f16700Schasinglulu bl dcsw_op_level1 67*91f16700Schasinglulu 68*91f16700Schasinglulu /* --------------------------------------------- 69*91f16700Schasinglulu * Come out of intra cluster coherency 70*91f16700Schasinglulu * --------------------------------------------- 71*91f16700Schasinglulu */ 72*91f16700Schasinglulu pop {r12, lr} 73*91f16700Schasinglulu b cortex_a32_disable_smp 74*91f16700Schasingluluendfunc cortex_a32_core_pwr_dwn 75*91f16700Schasinglulu 76*91f16700Schasinglulu /* ------------------------------------------------------- 77*91f16700Schasinglulu * The CPU Ops cluster power down function for Cortex-A32. 78*91f16700Schasinglulu * Clobbers: r0-r3 79*91f16700Schasinglulu * ------------------------------------------------------- 80*91f16700Schasinglulu */ 81*91f16700Schasinglulufunc cortex_a32_cluster_pwr_dwn 82*91f16700Schasinglulu /* r12 is pushed to meet the 8 byte stack alignment requirement */ 83*91f16700Schasinglulu push {r12, lr} 84*91f16700Schasinglulu 85*91f16700Schasinglulu /* Assert if cache is enabled */ 86*91f16700Schasinglulu#if ENABLE_ASSERTIONS 87*91f16700Schasinglulu ldcopr r0, SCTLR 88*91f16700Schasinglulu tst r0, #SCTLR_C_BIT 89*91f16700Schasinglulu ASM_ASSERT(eq) 90*91f16700Schasinglulu#endif 91*91f16700Schasinglulu 92*91f16700Schasinglulu /* --------------------------------------------- 93*91f16700Schasinglulu * Flush L1 cache. 94*91f16700Schasinglulu * --------------------------------------------- 95*91f16700Schasinglulu */ 96*91f16700Schasinglulu mov r0, #DC_OP_CISW 97*91f16700Schasinglulu bl dcsw_op_level1 98*91f16700Schasinglulu 99*91f16700Schasinglulu /* --------------------------------------------- 100*91f16700Schasinglulu * Disable the optional ACP. 101*91f16700Schasinglulu * --------------------------------------------- 102*91f16700Schasinglulu */ 103*91f16700Schasinglulu bl plat_disable_acp 104*91f16700Schasinglulu 105*91f16700Schasinglulu /* --------------------------------------------- 106*91f16700Schasinglulu * Flush L2 cache. 107*91f16700Schasinglulu * --------------------------------------------- 108*91f16700Schasinglulu */ 109*91f16700Schasinglulu mov r0, #DC_OP_CISW 110*91f16700Schasinglulu bl dcsw_op_level2 111*91f16700Schasinglulu 112*91f16700Schasinglulu /* --------------------------------------------- 113*91f16700Schasinglulu * Come out of intra cluster coherency 114*91f16700Schasinglulu * --------------------------------------------- 115*91f16700Schasinglulu */ 116*91f16700Schasinglulu pop {r12, lr} 117*91f16700Schasinglulu b cortex_a32_disable_smp 118*91f16700Schasingluluendfunc cortex_a32_cluster_pwr_dwn 119*91f16700Schasinglulu 120*91f16700Schasingluluerrata_report_shim cortex_a32 121*91f16700Schasinglulu 122*91f16700Schasingluludeclare_cpu_ops cortex_a32, CORTEX_A32_MIDR, \ 123*91f16700Schasinglulu cortex_a32_reset_func, \ 124*91f16700Schasinglulu cortex_a32_core_pwr_dwn, \ 125*91f16700Schasinglulu cortex_a32_cluster_pwr_dwn 126