1*91f16700Schasinglulu/* 2*91f16700Schasinglulu * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved. 3*91f16700Schasinglulu * Copyright (c) 2022, NVIDIA Corporation. All rights reserved. 4*91f16700Schasinglulu * 5*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause 6*91f16700Schasinglulu */ 7*91f16700Schasinglulu 8*91f16700Schasinglulu 9*91f16700Schasinglulu#include <assert_macros.S> 10*91f16700Schasinglulu#include <asm_macros.S> 11*91f16700Schasinglulu#include <assert_macros.S> 12*91f16700Schasinglulu#include <bl31/ea_handle.h> 13*91f16700Schasinglulu#include <context.h> 14*91f16700Schasinglulu#include <lib/extensions/ras_arch.h> 15*91f16700Schasinglulu#include <cpu_macros.S> 16*91f16700Schasinglulu#include <context.h> 17*91f16700Schasinglulu 18*91f16700Schasinglulu .globl handle_lower_el_sync_ea 19*91f16700Schasinglulu .globl handle_lower_el_async_ea 20*91f16700Schasinglulu .globl handle_pending_async_ea 21*91f16700Schasinglulu/* 22*91f16700Schasinglulu * This function handles Synchronous External Aborts from lower EL. 23*91f16700Schasinglulu * 24*91f16700Schasinglulu * It delegates the handling of the EA to platform handler, and upon successfully 25*91f16700Schasinglulu * handling the EA, exits EL3; otherwise panics. 26*91f16700Schasinglulu * 27*91f16700Schasinglulu * This function assumes x30 has been saved. 28*91f16700Schasinglulu */ 29*91f16700Schasinglulufunc handle_lower_el_sync_ea 30*91f16700Schasinglulu mrs x30, esr_el3 31*91f16700Schasinglulu ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH 32*91f16700Schasinglulu 33*91f16700Schasinglulu /* Check for I/D aborts from lower EL */ 34*91f16700Schasinglulu cmp x30, #EC_IABORT_LOWER_EL 35*91f16700Schasinglulu b.eq 1f 36*91f16700Schasinglulu 37*91f16700Schasinglulu cmp x30, #EC_DABORT_LOWER_EL 38*91f16700Schasinglulu b.eq 1f 39*91f16700Schasinglulu 40*91f16700Schasinglulu /* EA other than above are unhandled exceptions */ 41*91f16700Schasinglulu no_ret report_unhandled_exception 42*91f16700Schasinglulu1: 43*91f16700Schasinglulu /* 44*91f16700Schasinglulu * Save general purpose and ARMv8.3-PAuth registers (if enabled). 45*91f16700Schasinglulu * Also save PMCR_EL0 and set the PSTATE to a known state. 46*91f16700Schasinglulu */ 47*91f16700Schasinglulu bl prepare_el3_entry 48*91f16700Schasinglulu 49*91f16700Schasinglulu#if ENABLE_PAUTH 50*91f16700Schasinglulu /* Load and program APIAKey firmware key */ 51*91f16700Schasinglulu bl pauth_load_bl31_apiakey 52*91f16700Schasinglulu#endif 53*91f16700Schasinglulu 54*91f16700Schasinglulu /* Setup exception class and syndrome arguments for platform handler */ 55*91f16700Schasinglulu mov x0, #ERROR_EA_SYNC 56*91f16700Schasinglulu mrs x1, esr_el3 57*91f16700Schasinglulu bl delegate_sync_ea 58*91f16700Schasinglulu 59*91f16700Schasinglulu /* el3_exit assumes SP_EL0 on entry */ 60*91f16700Schasinglulu msr spsel, #MODE_SP_EL0 61*91f16700Schasinglulu b el3_exit 62*91f16700Schasingluluendfunc handle_lower_el_sync_ea 63*91f16700Schasinglulu 64*91f16700Schasinglulu 65*91f16700Schasinglulu/* 66*91f16700Schasinglulu * This function handles SErrors from lower ELs. 67*91f16700Schasinglulu * 68*91f16700Schasinglulu * It delegates the handling of the EA to platform handler, and upon successfully 69*91f16700Schasinglulu * handling the EA, exits EL3; otherwise panics. 70*91f16700Schasinglulu * 71*91f16700Schasinglulu * This function assumes x30 has been saved. 72*91f16700Schasinglulu */ 73*91f16700Schasinglulufunc handle_lower_el_async_ea 74*91f16700Schasinglulu 75*91f16700Schasinglulu /* 76*91f16700Schasinglulu * Save general purpose and ARMv8.3-PAuth registers (if enabled). 77*91f16700Schasinglulu * Also save PMCR_EL0 and set the PSTATE to a known state. 78*91f16700Schasinglulu */ 79*91f16700Schasinglulu bl prepare_el3_entry 80*91f16700Schasinglulu 81*91f16700Schasinglulu#if ENABLE_PAUTH 82*91f16700Schasinglulu /* Load and program APIAKey firmware key */ 83*91f16700Schasinglulu bl pauth_load_bl31_apiakey 84*91f16700Schasinglulu#endif 85*91f16700Schasinglulu 86*91f16700Schasinglulu /* Setup exception class and syndrome arguments for platform handler */ 87*91f16700Schasinglulu mov x0, #ERROR_EA_ASYNC 88*91f16700Schasinglulu mrs x1, esr_el3 89*91f16700Schasinglulu bl delegate_async_ea 90*91f16700Schasinglulu 91*91f16700Schasinglulu /* el3_exit assumes SP_EL0 on entry */ 92*91f16700Schasinglulu msr spsel, #MODE_SP_EL0 93*91f16700Schasinglulu b el3_exit 94*91f16700Schasingluluendfunc handle_lower_el_async_ea 95*91f16700Schasinglulu 96*91f16700Schasinglulu/* 97*91f16700Schasinglulu * Handler for async EA from lower EL synchronized at EL3 entry in FFH mode. 98*91f16700Schasinglulu * 99*91f16700Schasinglulu * This scenario may arise when there is an error (EA) in the system which is not 100*91f16700Schasinglulu * yet signaled to PE while executing in lower EL. During entry into EL3, the errors 101*91f16700Schasinglulu * are synchronized either implicitly or explicitly causing async EA to pend at EL3. 102*91f16700Schasinglulu * 103*91f16700Schasinglulu * On detecting the pending EA (via ISR_EL1.A), if the EA routing model is Firmware 104*91f16700Schasinglulu * First handling (FFH, SCR_EL3.EA = 1) this handler first handles the pending EA 105*91f16700Schasinglulu * and then handles the original exception. 106*91f16700Schasinglulu * 107*91f16700Schasinglulu * This function assumes x30 has been saved. 108*91f16700Schasinglulu */ 109*91f16700Schasinglulufunc handle_pending_async_ea 110*91f16700Schasinglulu /* 111*91f16700Schasinglulu * Prepare for nested handling of EA. Stash sysregs clobbered by nested 112*91f16700Schasinglulu * exception and handler 113*91f16700Schasinglulu */ 114*91f16700Schasinglulu str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR] 115*91f16700Schasinglulu mrs x30, esr_el3 116*91f16700Schasinglulu str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3] 117*91f16700Schasinglulu mrs x30, spsr_el3 118*91f16700Schasinglulu str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3] 119*91f16700Schasinglulu mrs x30, elr_el3 120*91f16700Schasinglulu str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3] 121*91f16700Schasinglulu 122*91f16700Schasinglulu mov x30, #1 123*91f16700Schasinglulu str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG] 124*91f16700Schasinglulu /* 125*91f16700Schasinglulu * Restore the original x30 saved as part of entering EL3. This is not 126*91f16700Schasinglulu * required for the current function but for EL3 SError vector entry 127*91f16700Schasinglulu * once PSTATE.A bit is unmasked. We restore x30 and then the same 128*91f16700Schasinglulu * value is stored in EL3 SError vector entry. 129*91f16700Schasinglulu */ 130*91f16700Schasinglulu ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 131*91f16700Schasinglulu 132*91f16700Schasinglulu /* 133*91f16700Schasinglulu * After clearing PSTATE.A bit pending SError will trigger at current EL. 134*91f16700Schasinglulu * Put explicit synchronization event to ensure newly unmasked interrupt 135*91f16700Schasinglulu * is taken immediately. 136*91f16700Schasinglulu */ 137*91f16700Schasinglulu unmask_async_ea 138*91f16700Schasinglulu 139*91f16700Schasinglulu /* Restore the original exception information along with zeroing the storage */ 140*91f16700Schasinglulu ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3] 141*91f16700Schasinglulu msr elr_el3, x30 142*91f16700Schasinglulu str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3] 143*91f16700Schasinglulu ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3] 144*91f16700Schasinglulu msr spsr_el3, x30 145*91f16700Schasinglulu str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3] 146*91f16700Schasinglulu ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3] 147*91f16700Schasinglulu msr esr_el3, x30 148*91f16700Schasinglulu str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3] 149*91f16700Schasinglulu 150*91f16700Schasinglulu /* 151*91f16700Schasinglulu * If the original exception corresponds to SError from lower El, eret back 152*91f16700Schasinglulu * to lower EL, otherwise return to vector table for original exception handling. 153*91f16700Schasinglulu */ 154*91f16700Schasinglulu ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH 155*91f16700Schasinglulu cmp x30, #EC_SERROR 156*91f16700Schasinglulu ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR] 157*91f16700Schasinglulu str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR] 158*91f16700Schasinglulu b.eq 1f 159*91f16700Schasinglulu ret 160*91f16700Schasinglulu1: 161*91f16700Schasinglulu exception_return 162*91f16700Schasingluluendfunc handle_pending_async_ea 163*91f16700Schasinglulu 164*91f16700Schasinglulu/* 165*91f16700Schasinglulu * Prelude for Synchronous External Abort handling. This function assumes that 166*91f16700Schasinglulu * all GP registers have been saved by the caller. 167*91f16700Schasinglulu * 168*91f16700Schasinglulu * x0: EA reason 169*91f16700Schasinglulu * x1: EA syndrome 170*91f16700Schasinglulu */ 171*91f16700Schasinglulufunc delegate_sync_ea 172*91f16700Schasinglulu#if ENABLE_FEAT_RAS 173*91f16700Schasinglulu /* 174*91f16700Schasinglulu * Check for Uncontainable error type. If so, route to the platform 175*91f16700Schasinglulu * fatal error handler rather than the generic EA one. 176*91f16700Schasinglulu */ 177*91f16700Schasinglulu ubfx x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH 178*91f16700Schasinglulu cmp x2, #ERROR_STATUS_SET_UC 179*91f16700Schasinglulu b.ne 1f 180*91f16700Schasinglulu 181*91f16700Schasinglulu /* Check fault status code */ 182*91f16700Schasinglulu ubfx x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH 183*91f16700Schasinglulu cmp x3, #SYNC_EA_FSC 184*91f16700Schasinglulu b.ne 1f 185*91f16700Schasinglulu 186*91f16700Schasinglulu no_ret plat_handle_uncontainable_ea 187*91f16700Schasinglulu1: 188*91f16700Schasinglulu#endif 189*91f16700Schasinglulu 190*91f16700Schasinglulu b ea_proceed 191*91f16700Schasingluluendfunc delegate_sync_ea 192*91f16700Schasinglulu 193*91f16700Schasinglulu 194*91f16700Schasinglulu/* 195*91f16700Schasinglulu * Prelude for Asynchronous External Abort handling. This function assumes that 196*91f16700Schasinglulu * all GP registers have been saved by the caller. 197*91f16700Schasinglulu * 198*91f16700Schasinglulu * x0: EA reason 199*91f16700Schasinglulu * x1: EA syndrome 200*91f16700Schasinglulu */ 201*91f16700Schasinglulufunc delegate_async_ea 202*91f16700Schasinglulu#if ENABLE_FEAT_RAS 203*91f16700Schasinglulu /* Check Exception Class to ensure SError, as this function should 204*91f16700Schasinglulu * only be invoked for SError. If that is not the case, which implies 205*91f16700Schasinglulu * either an HW error or programming error, panic. 206*91f16700Schasinglulu */ 207*91f16700Schasinglulu ubfx x2, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH 208*91f16700Schasinglulu cmp x2, EC_SERROR 209*91f16700Schasinglulu b.ne el3_panic 210*91f16700Schasinglulu /* 211*91f16700Schasinglulu * Check for Implementation Defined Syndrome. If so, skip checking 212*91f16700Schasinglulu * Uncontainable error type from the syndrome as the format is unknown. 213*91f16700Schasinglulu */ 214*91f16700Schasinglulu tbnz x1, #SERROR_IDS_BIT, 1f 215*91f16700Schasinglulu 216*91f16700Schasinglulu /* AET only valid when DFSC is 0x11 */ 217*91f16700Schasinglulu ubfx x2, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH 218*91f16700Schasinglulu cmp x2, #DFSC_SERROR 219*91f16700Schasinglulu b.ne 1f 220*91f16700Schasinglulu 221*91f16700Schasinglulu /* 222*91f16700Schasinglulu * Check for Uncontainable error type. If so, route to the platform 223*91f16700Schasinglulu * fatal error handler rather than the generic EA one. 224*91f16700Schasinglulu */ 225*91f16700Schasinglulu ubfx x3, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH 226*91f16700Schasinglulu cmp x3, #ERROR_STATUS_UET_UC 227*91f16700Schasinglulu b.ne 1f 228*91f16700Schasinglulu 229*91f16700Schasinglulu no_ret plat_handle_uncontainable_ea 230*91f16700Schasinglulu1: 231*91f16700Schasinglulu#endif 232*91f16700Schasinglulu 233*91f16700Schasinglulu b ea_proceed 234*91f16700Schasingluluendfunc delegate_async_ea 235*91f16700Schasinglulu 236*91f16700Schasinglulu 237*91f16700Schasinglulu/* 238*91f16700Schasinglulu * Delegate External Abort handling to platform's EA handler. This function 239*91f16700Schasinglulu * assumes that all GP registers have been saved by the caller. 240*91f16700Schasinglulu * 241*91f16700Schasinglulu * x0: EA reason 242*91f16700Schasinglulu * x1: EA syndrome 243*91f16700Schasinglulu */ 244*91f16700Schasinglulufunc ea_proceed 245*91f16700Schasinglulu /* 246*91f16700Schasinglulu * If the ESR loaded earlier is not zero, we were processing an EA 247*91f16700Schasinglulu * already, and this is a double fault. 248*91f16700Schasinglulu */ 249*91f16700Schasinglulu ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3] 250*91f16700Schasinglulu cbz x5, 1f 251*91f16700Schasinglulu no_ret plat_handle_double_fault 252*91f16700Schasinglulu 253*91f16700Schasinglulu1: 254*91f16700Schasinglulu /* Save EL3 state */ 255*91f16700Schasinglulu mrs x2, spsr_el3 256*91f16700Schasinglulu mrs x3, elr_el3 257*91f16700Schasinglulu stp x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 258*91f16700Schasinglulu 259*91f16700Schasinglulu /* 260*91f16700Schasinglulu * Save ESR as handling might involve lower ELs, and returning back to 261*91f16700Schasinglulu * EL3 from there would trample the original ESR. 262*91f16700Schasinglulu */ 263*91f16700Schasinglulu mrs x4, scr_el3 264*91f16700Schasinglulu mrs x5, esr_el3 265*91f16700Schasinglulu stp x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 266*91f16700Schasinglulu 267*91f16700Schasinglulu /* 268*91f16700Schasinglulu * Setup rest of arguments, and call platform External Abort handler. 269*91f16700Schasinglulu * 270*91f16700Schasinglulu * x0: EA reason (already in place) 271*91f16700Schasinglulu * x1: Exception syndrome (already in place). 272*91f16700Schasinglulu * x2: Cookie (unused for now). 273*91f16700Schasinglulu * x3: Context pointer. 274*91f16700Schasinglulu * x4: Flags (security state from SCR for now). 275*91f16700Schasinglulu */ 276*91f16700Schasinglulu mov x2, xzr 277*91f16700Schasinglulu mov x3, sp 278*91f16700Schasinglulu ubfx x4, x4, #0, #1 279*91f16700Schasinglulu 280*91f16700Schasinglulu /* Switch to runtime stack */ 281*91f16700Schasinglulu ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 282*91f16700Schasinglulu msr spsel, #MODE_SP_EL0 283*91f16700Schasinglulu mov sp, x5 284*91f16700Schasinglulu 285*91f16700Schasinglulu mov x29, x30 286*91f16700Schasinglulu#if ENABLE_ASSERTIONS 287*91f16700Schasinglulu /* Stash the stack pointer */ 288*91f16700Schasinglulu mov x28, sp 289*91f16700Schasinglulu#endif 290*91f16700Schasinglulu bl plat_ea_handler 291*91f16700Schasinglulu 292*91f16700Schasinglulu#if ENABLE_ASSERTIONS 293*91f16700Schasinglulu /* 294*91f16700Schasinglulu * Error handling flows might involve long jumps; so upon returning from 295*91f16700Schasinglulu * the platform error handler, validate that the we've completely 296*91f16700Schasinglulu * unwound the stack. 297*91f16700Schasinglulu */ 298*91f16700Schasinglulu mov x27, sp 299*91f16700Schasinglulu cmp x28, x27 300*91f16700Schasinglulu ASM_ASSERT(eq) 301*91f16700Schasinglulu#endif 302*91f16700Schasinglulu 303*91f16700Schasinglulu /* Make SP point to context */ 304*91f16700Schasinglulu msr spsel, #MODE_SP_ELX 305*91f16700Schasinglulu 306*91f16700Schasinglulu /* Restore EL3 state and ESR */ 307*91f16700Schasinglulu ldp x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 308*91f16700Schasinglulu msr spsr_el3, x1 309*91f16700Schasinglulu msr elr_el3, x2 310*91f16700Schasinglulu 311*91f16700Schasinglulu /* Restore ESR_EL3 and SCR_EL3 */ 312*91f16700Schasinglulu ldp x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 313*91f16700Schasinglulu msr scr_el3, x3 314*91f16700Schasinglulu msr esr_el3, x4 315*91f16700Schasinglulu 316*91f16700Schasinglulu#if ENABLE_ASSERTIONS 317*91f16700Schasinglulu cmp x4, xzr 318*91f16700Schasinglulu ASM_ASSERT(ne) 319*91f16700Schasinglulu#endif 320*91f16700Schasinglulu 321*91f16700Schasinglulu /* Clear ESR storage */ 322*91f16700Schasinglulu str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3] 323*91f16700Schasinglulu 324*91f16700Schasinglulu ret x29 325*91f16700Schasingluluendfunc ea_proceed 326