1*91f16700Schasinglulu/* 2*91f16700Schasinglulu * Copyright (c) 2018, Arm Limited and Contributors. All rights reserved. 3*91f16700Schasinglulu * 4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause 5*91f16700Schasinglulu */ 6*91f16700Schasinglulu 7*91f16700Schasinglulu#include <asm_macros.S> 8*91f16700Schasinglulu#include <assert_macros.S> 9*91f16700Schasinglulu#include <lib/xlat_tables/xlat_tables_v2.h> 10*91f16700Schasinglulu 11*91f16700Schasinglulu .global enable_mmu_direct_svc_mon 12*91f16700Schasinglulu .global enable_mmu_direct_hyp 13*91f16700Schasinglulu 14*91f16700Schasinglulu /* void enable_mmu_direct_svc_mon(unsigned int flags) */ 15*91f16700Schasinglulufunc enable_mmu_direct_svc_mon 16*91f16700Schasinglulu /* Assert that MMU is turned off */ 17*91f16700Schasinglulu#if ENABLE_ASSERTIONS 18*91f16700Schasinglulu ldcopr r1, SCTLR 19*91f16700Schasinglulu tst r1, #SCTLR_M_BIT 20*91f16700Schasinglulu ASM_ASSERT(eq) 21*91f16700Schasinglulu#endif 22*91f16700Schasinglulu 23*91f16700Schasinglulu /* Invalidate TLB entries */ 24*91f16700Schasinglulu TLB_INVALIDATE(r0, TLBIALL) 25*91f16700Schasinglulu 26*91f16700Schasinglulu mov r3, r0 27*91f16700Schasinglulu ldr r0, =mmu_cfg_params 28*91f16700Schasinglulu 29*91f16700Schasinglulu /* MAIR0. Only the lower 32 bits are used. */ 30*91f16700Schasinglulu ldr r1, [r0, #(MMU_CFG_MAIR << 3)] 31*91f16700Schasinglulu stcopr r1, MAIR0 32*91f16700Schasinglulu 33*91f16700Schasinglulu /* TTBCR. Only the lower 32 bits are used. */ 34*91f16700Schasinglulu ldr r2, [r0, #(MMU_CFG_TCR << 3)] 35*91f16700Schasinglulu stcopr r2, TTBCR 36*91f16700Schasinglulu 37*91f16700Schasinglulu /* TTBR0 */ 38*91f16700Schasinglulu ldr r1, [r0, #(MMU_CFG_TTBR0 << 3)] 39*91f16700Schasinglulu ldr r2, [r0, #((MMU_CFG_TTBR0 << 3) + 4)] 40*91f16700Schasinglulu stcopr16 r1, r2, TTBR0_64 41*91f16700Schasinglulu 42*91f16700Schasinglulu /* TTBR1 is unused right now; set it to 0. */ 43*91f16700Schasinglulu mov r1, #0 44*91f16700Schasinglulu mov r2, #0 45*91f16700Schasinglulu stcopr16 r1, r2, TTBR1_64 46*91f16700Schasinglulu 47*91f16700Schasinglulu /* 48*91f16700Schasinglulu * Ensure all translation table writes have drained into memory, the TLB 49*91f16700Schasinglulu * invalidation is complete, and translation register writes are 50*91f16700Schasinglulu * committed before enabling the MMU 51*91f16700Schasinglulu */ 52*91f16700Schasinglulu dsb ish 53*91f16700Schasinglulu isb 54*91f16700Schasinglulu 55*91f16700Schasinglulu /* Enable enable MMU by honoring flags */ 56*91f16700Schasinglulu ldcopr r1, SCTLR 57*91f16700Schasinglulu ldr r2, =(SCTLR_WXN_BIT | SCTLR_C_BIT | SCTLR_M_BIT) 58*91f16700Schasinglulu orr r1, r1, r2 59*91f16700Schasinglulu 60*91f16700Schasinglulu /* Clear C bit if requested */ 61*91f16700Schasinglulu tst r3, #DISABLE_DCACHE 62*91f16700Schasinglulu bicne r1, r1, #SCTLR_C_BIT 63*91f16700Schasinglulu 64*91f16700Schasinglulu stcopr r1, SCTLR 65*91f16700Schasinglulu isb 66*91f16700Schasinglulu 67*91f16700Schasinglulu bx lr 68*91f16700Schasingluluendfunc enable_mmu_direct_svc_mon 69*91f16700Schasinglulu 70*91f16700Schasinglulu 71*91f16700Schasinglulu /* void enable_mmu_direct_hyp(unsigned int flags) */ 72*91f16700Schasinglulufunc enable_mmu_direct_hyp 73*91f16700Schasinglulu /* Assert that MMU is turned off */ 74*91f16700Schasinglulu#if ENABLE_ASSERTIONS 75*91f16700Schasinglulu ldcopr r1, HSCTLR 76*91f16700Schasinglulu tst r1, #HSCTLR_M_BIT 77*91f16700Schasinglulu ASM_ASSERT(eq) 78*91f16700Schasinglulu#endif 79*91f16700Schasinglulu 80*91f16700Schasinglulu /* Invalidate TLB entries */ 81*91f16700Schasinglulu TLB_INVALIDATE(r0, TLBIALL) 82*91f16700Schasinglulu 83*91f16700Schasinglulu mov r3, r0 84*91f16700Schasinglulu ldr r0, =mmu_cfg_params 85*91f16700Schasinglulu 86*91f16700Schasinglulu /* HMAIR0 */ 87*91f16700Schasinglulu ldr r1, [r0, #(MMU_CFG_MAIR << 3)] 88*91f16700Schasinglulu stcopr r1, HMAIR0 89*91f16700Schasinglulu 90*91f16700Schasinglulu /* HTCR */ 91*91f16700Schasinglulu ldr r2, [r0, #(MMU_CFG_TCR << 3)] 92*91f16700Schasinglulu stcopr r2, HTCR 93*91f16700Schasinglulu 94*91f16700Schasinglulu /* HTTBR */ 95*91f16700Schasinglulu ldr r1, [r0, #(MMU_CFG_TTBR0 << 3)] 96*91f16700Schasinglulu ldr r2, [r0, #((MMU_CFG_TTBR0 << 3) + 4)] 97*91f16700Schasinglulu stcopr16 r1, r2, HTTBR_64 98*91f16700Schasinglulu 99*91f16700Schasinglulu /* 100*91f16700Schasinglulu * Ensure all translation table writes have drained into memory, the TLB 101*91f16700Schasinglulu * invalidation is complete, and translation register writes are 102*91f16700Schasinglulu * committed before enabling the MMU 103*91f16700Schasinglulu */ 104*91f16700Schasinglulu dsb ish 105*91f16700Schasinglulu isb 106*91f16700Schasinglulu 107*91f16700Schasinglulu /* Enable enable MMU by honoring flags */ 108*91f16700Schasinglulu ldcopr r1, HSCTLR 109*91f16700Schasinglulu ldr r2, =(HSCTLR_WXN_BIT | HSCTLR_C_BIT | HSCTLR_M_BIT) 110*91f16700Schasinglulu orr r1, r1, r2 111*91f16700Schasinglulu 112*91f16700Schasinglulu /* Clear C bit if requested */ 113*91f16700Schasinglulu tst r3, #DISABLE_DCACHE 114*91f16700Schasinglulu bicne r1, r1, #HSCTLR_C_BIT 115*91f16700Schasinglulu 116*91f16700Schasinglulu stcopr r1, HSCTLR 117*91f16700Schasinglulu isb 118*91f16700Schasinglulu 119*91f16700Schasinglulu bx lr 120*91f16700Schasingluluendfunc enable_mmu_direct_hyp 121