1*91f16700Schasinglulu /* 2*91f16700Schasinglulu * Copyright (c) 2018-2023, Arm Limited and Contributors. All rights reserved. 3*91f16700Schasinglulu * 4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause 5*91f16700Schasinglulu */ 6*91f16700Schasinglulu 7*91f16700Schasinglulu #include <common/debug.h> 8*91f16700Schasinglulu #include <common/runtime_svc.h> 9*91f16700Schasinglulu #include <lib/cpus/errata.h> 10*91f16700Schasinglulu #include <lib/cpus/wa_cve_2017_5715.h> 11*91f16700Schasinglulu #include <lib/cpus/wa_cve_2018_3639.h> 12*91f16700Schasinglulu #include <lib/cpus/wa_cve_2022_23960.h> 13*91f16700Schasinglulu #include <lib/smccc.h> 14*91f16700Schasinglulu #include <services/arm_arch_svc.h> 15*91f16700Schasinglulu #include <smccc_helpers.h> 16*91f16700Schasinglulu #include <plat/common/platform.h> 17*91f16700Schasinglulu 18*91f16700Schasinglulu static int32_t smccc_version(void) 19*91f16700Schasinglulu { 20*91f16700Schasinglulu return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION); 21*91f16700Schasinglulu } 22*91f16700Schasinglulu 23*91f16700Schasinglulu static int32_t smccc_arch_features(u_register_t arg1) 24*91f16700Schasinglulu { 25*91f16700Schasinglulu switch (arg1) { 26*91f16700Schasinglulu case SMCCC_VERSION: 27*91f16700Schasinglulu case SMCCC_ARCH_FEATURES: 28*91f16700Schasinglulu return SMC_ARCH_CALL_SUCCESS; 29*91f16700Schasinglulu case SMCCC_ARCH_SOC_ID: 30*91f16700Schasinglulu return plat_is_smccc_feature_available(arg1); 31*91f16700Schasinglulu #ifdef __aarch64__ 32*91f16700Schasinglulu /* Workaround checks are currently only implemented for aarch64 */ 33*91f16700Schasinglulu #if WORKAROUND_CVE_2017_5715 34*91f16700Schasinglulu case SMCCC_ARCH_WORKAROUND_1: 35*91f16700Schasinglulu if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES) 36*91f16700Schasinglulu return 1; 37*91f16700Schasinglulu return 0; /* ERRATA_APPLIES || ERRATA_MISSING */ 38*91f16700Schasinglulu #endif 39*91f16700Schasinglulu 40*91f16700Schasinglulu #if WORKAROUND_CVE_2018_3639 41*91f16700Schasinglulu case SMCCC_ARCH_WORKAROUND_2: { 42*91f16700Schasinglulu #if DYNAMIC_WORKAROUND_CVE_2018_3639 43*91f16700Schasinglulu unsigned long long ssbs; 44*91f16700Schasinglulu 45*91f16700Schasinglulu /* 46*91f16700Schasinglulu * Firmware doesn't have to carry out dynamic workaround if the 47*91f16700Schasinglulu * PE implements architectural Speculation Store Bypass Safe 48*91f16700Schasinglulu * (SSBS) feature. 49*91f16700Schasinglulu */ 50*91f16700Schasinglulu ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) & 51*91f16700Schasinglulu ID_AA64PFR1_EL1_SSBS_MASK; 52*91f16700Schasinglulu 53*91f16700Schasinglulu /* 54*91f16700Schasinglulu * If architectural SSBS is available on this PE, no firmware 55*91f16700Schasinglulu * mitigation via SMCCC_ARCH_WORKAROUND_2 is required. 56*91f16700Schasinglulu */ 57*91f16700Schasinglulu if (ssbs != SSBS_UNAVAILABLE) 58*91f16700Schasinglulu return 1; 59*91f16700Schasinglulu 60*91f16700Schasinglulu /* 61*91f16700Schasinglulu * On a platform where at least one CPU requires 62*91f16700Schasinglulu * dynamic mitigation but others are either unaffected 63*91f16700Schasinglulu * or permanently mitigated, report the latter as not 64*91f16700Schasinglulu * needing dynamic mitigation. 65*91f16700Schasinglulu */ 66*91f16700Schasinglulu if (wa_cve_2018_3639_get_disable_ptr() == NULL) 67*91f16700Schasinglulu return 1; 68*91f16700Schasinglulu /* 69*91f16700Schasinglulu * If we get here, this CPU requires dynamic mitigation 70*91f16700Schasinglulu * so report it as such. 71*91f16700Schasinglulu */ 72*91f16700Schasinglulu return 0; 73*91f16700Schasinglulu #else 74*91f16700Schasinglulu /* Either the CPUs are unaffected or permanently mitigated */ 75*91f16700Schasinglulu return SMC_ARCH_CALL_NOT_REQUIRED; 76*91f16700Schasinglulu #endif 77*91f16700Schasinglulu } 78*91f16700Schasinglulu #endif 79*91f16700Schasinglulu 80*91f16700Schasinglulu #if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715) 81*91f16700Schasinglulu case SMCCC_ARCH_WORKAROUND_3: 82*91f16700Schasinglulu /* 83*91f16700Schasinglulu * SMCCC_ARCH_WORKAROUND_3 should also take into account 84*91f16700Schasinglulu * CVE-2017-5715 since this SMC can be used instead of 85*91f16700Schasinglulu * SMCCC_ARCH_WORKAROUND_1. 86*91f16700Schasinglulu */ 87*91f16700Schasinglulu if ((check_smccc_arch_wa3_applies() == ERRATA_NOT_APPLIES) && 88*91f16700Schasinglulu (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)) { 89*91f16700Schasinglulu return 1; 90*91f16700Schasinglulu } 91*91f16700Schasinglulu return 0; /* ERRATA_APPLIES || ERRATA_MISSING */ 92*91f16700Schasinglulu #endif 93*91f16700Schasinglulu #endif /* __aarch64__ */ 94*91f16700Schasinglulu 95*91f16700Schasinglulu /* Fallthrough */ 96*91f16700Schasinglulu 97*91f16700Schasinglulu default: 98*91f16700Schasinglulu return SMC_UNK; 99*91f16700Schasinglulu } 100*91f16700Schasinglulu } 101*91f16700Schasinglulu 102*91f16700Schasinglulu /* return soc revision or soc version on success otherwise 103*91f16700Schasinglulu * return invalid parameter */ 104*91f16700Schasinglulu static int32_t smccc_arch_id(u_register_t arg1) 105*91f16700Schasinglulu { 106*91f16700Schasinglulu if (arg1 == SMCCC_GET_SOC_REVISION) { 107*91f16700Schasinglulu return plat_get_soc_revision(); 108*91f16700Schasinglulu } 109*91f16700Schasinglulu if (arg1 == SMCCC_GET_SOC_VERSION) { 110*91f16700Schasinglulu return plat_get_soc_version(); 111*91f16700Schasinglulu } 112*91f16700Schasinglulu return SMC_ARCH_CALL_INVAL_PARAM; 113*91f16700Schasinglulu } 114*91f16700Schasinglulu 115*91f16700Schasinglulu /* 116*91f16700Schasinglulu * Top-level Arm Architectural Service SMC handler. 117*91f16700Schasinglulu */ 118*91f16700Schasinglulu static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid, 119*91f16700Schasinglulu u_register_t x1, 120*91f16700Schasinglulu u_register_t x2, 121*91f16700Schasinglulu u_register_t x3, 122*91f16700Schasinglulu u_register_t x4, 123*91f16700Schasinglulu void *cookie, 124*91f16700Schasinglulu void *handle, 125*91f16700Schasinglulu u_register_t flags) 126*91f16700Schasinglulu { 127*91f16700Schasinglulu switch (smc_fid) { 128*91f16700Schasinglulu case SMCCC_VERSION: 129*91f16700Schasinglulu SMC_RET1(handle, smccc_version()); 130*91f16700Schasinglulu case SMCCC_ARCH_FEATURES: 131*91f16700Schasinglulu SMC_RET1(handle, smccc_arch_features(x1)); 132*91f16700Schasinglulu case SMCCC_ARCH_SOC_ID: 133*91f16700Schasinglulu SMC_RET1(handle, smccc_arch_id(x1)); 134*91f16700Schasinglulu #ifdef __aarch64__ 135*91f16700Schasinglulu #if WORKAROUND_CVE_2017_5715 136*91f16700Schasinglulu case SMCCC_ARCH_WORKAROUND_1: 137*91f16700Schasinglulu /* 138*91f16700Schasinglulu * The workaround has already been applied on affected PEs 139*91f16700Schasinglulu * during entry to EL3. On unaffected PEs, this function 140*91f16700Schasinglulu * has no effect. 141*91f16700Schasinglulu */ 142*91f16700Schasinglulu SMC_RET0(handle); 143*91f16700Schasinglulu #endif 144*91f16700Schasinglulu #if WORKAROUND_CVE_2018_3639 145*91f16700Schasinglulu case SMCCC_ARCH_WORKAROUND_2: 146*91f16700Schasinglulu /* 147*91f16700Schasinglulu * The workaround has already been applied on affected PEs 148*91f16700Schasinglulu * requiring dynamic mitigation during entry to EL3. 149*91f16700Schasinglulu * On unaffected or statically mitigated PEs, this function 150*91f16700Schasinglulu * has no effect. 151*91f16700Schasinglulu */ 152*91f16700Schasinglulu SMC_RET0(handle); 153*91f16700Schasinglulu #endif 154*91f16700Schasinglulu #if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715) 155*91f16700Schasinglulu case SMCCC_ARCH_WORKAROUND_3: 156*91f16700Schasinglulu /* 157*91f16700Schasinglulu * The workaround has already been applied on affected PEs 158*91f16700Schasinglulu * during entry to EL3. On unaffected PEs, this function 159*91f16700Schasinglulu * has no effect. 160*91f16700Schasinglulu */ 161*91f16700Schasinglulu SMC_RET0(handle); 162*91f16700Schasinglulu #endif 163*91f16700Schasinglulu #endif /* __aarch64__ */ 164*91f16700Schasinglulu default: 165*91f16700Schasinglulu WARN("Unimplemented Arm Architecture Service Call: 0x%x \n", 166*91f16700Schasinglulu smc_fid); 167*91f16700Schasinglulu SMC_RET1(handle, SMC_UNK); 168*91f16700Schasinglulu } 169*91f16700Schasinglulu } 170*91f16700Schasinglulu 171*91f16700Schasinglulu /* Register Standard Service Calls as runtime service */ 172*91f16700Schasinglulu DECLARE_RT_SVC( 173*91f16700Schasinglulu arm_arch_svc, 174*91f16700Schasinglulu OEN_ARM_START, 175*91f16700Schasinglulu OEN_ARM_END, 176*91f16700Schasinglulu SMC_TYPE_FAST, 177*91f16700Schasinglulu NULL, 178*91f16700Schasinglulu arm_arch_svc_smc_handler 179*91f16700Schasinglulu ); 180