xref: /arm-trusted-firmware/plat/nxp/common/sip_svc/aarch64/sipsvc.S (revision 91f16700b400a8c0651d24a598fc48ee2997a0d7)
1*91f16700Schasinglulu/*
2*91f16700Schasinglulu * Copyright 2018-2020 NXP
3*91f16700Schasinglulu *
4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause
5*91f16700Schasinglulu *
6*91f16700Schasinglulu */
7*91f16700Schasinglulu
8*91f16700Schasinglulu#include <asm_macros.S>
9*91f16700Schasinglulu#include <bl31_data.h>
10*91f16700Schasinglulu
11*91f16700Schasinglulu.global el2_2_aarch32
12*91f16700Schasinglulu.global prefetch_disable
13*91f16700Schasinglulu
14*91f16700Schasinglulu#define  SPSR_EL3_M4     0x10
15*91f16700Schasinglulu#define  SPSR_EL_MASK    0xC
16*91f16700Schasinglulu#define  SPSR_EL2        0x8
17*91f16700Schasinglulu#define  SCR_EL3_4_EL2_AARCH32  0x131
18*91f16700Schasinglulu#define  SPSR32_EL2_LE          0x1DA
19*91f16700Schasinglulu
20*91f16700Schasinglulu#define  MIDR_PARTNUM_START      4
21*91f16700Schasinglulu#define  MIDR_PARTNUM_WIDTH      12
22*91f16700Schasinglulu#define  MIDR_PARTNUM_A53        0xD03
23*91f16700Schasinglulu#define  MIDR_PARTNUM_A57        0xD07
24*91f16700Schasinglulu#define  MIDR_PARTNUM_A72        0xD08
25*91f16700Schasinglulu
26*91f16700Schasinglulu/*
27*91f16700Schasinglulu * uint64_t el2_2_aarch32(u_register_t smc_id,
28*91f16700Schasinglulu *                   u_register_t start_addr,
29*91f16700Schasinglulu *                   u_register_t parm1,
30*91f16700Schasinglulu *                   u_register_t parm2)
31*91f16700Schasinglulu * this function allows changing the execution width of EL2 from Aarch64
32*91f16700Schasinglulu * to Aarch32
33*91f16700Schasinglulu * Note: MUST be called from EL2 @ Aarch64
34*91f16700Schasinglulu * in:  x0 = smc function id
35*91f16700Schasinglulu *      x1 = start address for EL2 @ Aarch32
36*91f16700Schasinglulu *      x2 = first parameter to pass to EL2 @ Aarch32
37*91f16700Schasinglulu *      x3 = second parameter to pass to EL2 @ Aarch32
38*91f16700Schasinglulu * out: x0 = 0,  on success
39*91f16700Schasinglulu *      x0 = -1, on failure
40*91f16700Schasinglulu * uses x0, x1, x2, x3
41*91f16700Schasinglulu */
42*91f16700Schasinglulufunc el2_2_aarch32
43*91f16700Schasinglulu
44*91f16700Schasinglulu	/* check that caller is EL2 @ Aarch64 - err return if not */
45*91f16700Schasinglulu	mrs  x0, spsr_el3
46*91f16700Schasinglulu	/* see if we were called from Aarch32 */
47*91f16700Schasinglulu	tst  x0, #SPSR_EL3_M4
48*91f16700Schasinglulu	b.ne 2f
49*91f16700Schasinglulu
50*91f16700Schasinglulu	/* see if we were called from EL2 */
51*91f16700Schasinglulu	and   x0, x0, SPSR_EL_MASK
52*91f16700Schasinglulu	cmp   x0, SPSR_EL2
53*91f16700Schasinglulu	b.ne  2f
54*91f16700Schasinglulu
55*91f16700Schasinglulu	/* set ELR_EL3 */
56*91f16700Schasinglulu	msr  elr_el3, x1
57*91f16700Schasinglulu
58*91f16700Schasinglulu	/* set scr_el3 */
59*91f16700Schasinglulu	mov  x0, #SCR_EL3_4_EL2_AARCH32
60*91f16700Schasinglulu	msr  scr_el3, x0
61*91f16700Schasinglulu
62*91f16700Schasinglulu	/* set sctlr_el2 */
63*91f16700Schasinglulu	ldr   x1, =SCTLR_EL2_RES1
64*91f16700Schasinglulu	msr  sctlr_el2, x1
65*91f16700Schasinglulu
66*91f16700Schasinglulu	/* set spsr_el3 */
67*91f16700Schasinglulu	ldr  x0, =SPSR32_EL2_LE
68*91f16700Schasinglulu	msr  spsr_el3, x0
69*91f16700Schasinglulu
70*91f16700Schasinglulu	/* x2 = parm 1
71*91f16700Schasinglulu	 * x3 = parm2
72*91f16700Schasinglulu	 */
73*91f16700Schasinglulu
74*91f16700Schasinglulu	/* set the parameters to be passed-thru to EL2 @ Aarch32 */
75*91f16700Schasinglulu	mov  x1, x2
76*91f16700Schasinglulu	mov  x2, x3
77*91f16700Schasinglulu
78*91f16700Schasinglulu	/* x1 = parm 1
79*91f16700Schasinglulu	 * x2 = parm2
80*91f16700Schasinglulu	 */
81*91f16700Schasinglulu
82*91f16700Schasinglulu	mov  x0, xzr
83*91f16700Schasinglulu	/* invalidate the icache */
84*91f16700Schasinglulu	ic iallu
85*91f16700Schasinglulu	dsb sy
86*91f16700Schasinglulu	isb
87*91f16700Schasinglulu	b  1f
88*91f16700Schasinglulu2:
89*91f16700Schasinglulu	/* error return */
90*91f16700Schasinglulu	mvn  x0, xzr
91*91f16700Schasinglulu	ret
92*91f16700Schasinglulu1:
93*91f16700Schasinglulu	eret
94*91f16700Schasingluluendfunc el2_2_aarch32
95*91f16700Schasinglulu
96*91f16700Schasinglulu/*
97*91f16700Schasinglulu * int prefetch_disable(u_register_t smc_id, u_register_t mask)
98*91f16700Schasinglulu * this function marks cores which need to have the prefetch disabled -
99*91f16700Schasinglulu * secondary cores have prefetch disabled when they are released from reset -
100*91f16700Schasinglulu * the bootcore has prefetch disabled when this call is made
101*91f16700Schasinglulu * in:  x0 = function id
102*91f16700Schasinglulu *      x1 = core mask, where bit[0]=core0, bit[1]=core1, etc
103*91f16700Schasinglulu *           if a bit in the mask is set, then prefetch is disabled for that
104*91f16700Schasinglulu *           core
105*91f16700Schasinglulu * out: x0 = SMC_SUCCESS
106*91f16700Schasinglulu */
107*91f16700Schasinglulufunc prefetch_disable
108*91f16700Schasinglulu	stp  x4, x30, [sp, #-16]!
109*91f16700Schasinglulu
110*91f16700Schasinglulu	mov   x3, x1
111*91f16700Schasinglulu
112*91f16700Schasinglulu	/* x1 = core prefetch disable mask */
113*91f16700Schasinglulu	/* x3 = core prefetch disable mask */
114*91f16700Schasinglulu
115*91f16700Schasinglulu	/* store the mask */
116*91f16700Schasinglulu	mov   x0, #PREFETCH_DIS_OFFSET
117*91f16700Schasinglulu	bl   _set_global_data
118*91f16700Schasinglulu
119*91f16700Schasinglulu	/* x3 = core prefetch disable mask */
120*91f16700Schasinglulu
121*91f16700Schasinglulu	/* see if we need to disable prefetch on THIS core */
122*91f16700Schasinglulu	bl   plat_my_core_mask
123*91f16700Schasinglulu
124*91f16700Schasinglulu	/* x0 = core mask lsb */
125*91f16700Schasinglulu	/* x3 = core prefetch disable mask */
126*91f16700Schasinglulu
127*91f16700Schasinglulu	tst   x3, x0
128*91f16700Schasinglulu	b.eq  1f
129*91f16700Schasinglulu
130*91f16700Schasinglulu	/* read midr_el1 */
131*91f16700Schasinglulu	mrs   x1, midr_el1
132*91f16700Schasinglulu
133*91f16700Schasinglulu	/* x1 = midr_el1 */
134*91f16700Schasinglulu
135*91f16700Schasinglulu	mov   x0, xzr
136*91f16700Schasinglulu	bfxil x0, x1, #MIDR_PARTNUM_START, #MIDR_PARTNUM_WIDTH
137*91f16700Schasinglulu
138*91f16700Schasinglulu	/* x0 = part number (a53, a57, a72, etc) */
139*91f16700Schasinglulu
140*91f16700Schasinglulu	/* branch on cpu-specific */
141*91f16700Schasinglulu	cmp   x0, #MIDR_PARTNUM_A57
142*91f16700Schasinglulu	b.eq  1f
143*91f16700Schasinglulu	cmp   x0, #MIDR_PARTNUM_A72
144*91f16700Schasinglulu	b.ne  1f
145*91f16700Schasinglulu
146*91f16700Schasinglulu	bl    _disable_ldstr_pfetch_A72
147*91f16700Schasinglulu	b     1f
148*91f16700Schasinglulu1:
149*91f16700Schasinglulu	ldp   x4, x30, [sp], #16
150*91f16700Schasinglulu	mov   x0, xzr
151*91f16700Schasinglulu	ret
152*91f16700Schasingluluendfunc prefetch_disable
153