xref: /arm-trusted-firmware/lib/locks/exclusive/aarch64/spinlock.S (revision 91f16700b400a8c0651d24a598fc48ee2997a0d7)
1*91f16700Schasinglulu/*
2*91f16700Schasinglulu * Copyright (c) 2013-2019, Arm Limited and Contributors. All rights reserved.
3*91f16700Schasinglulu *
4*91f16700Schasinglulu * SPDX-License-Identifier: BSD-3-Clause
5*91f16700Schasinglulu */
6*91f16700Schasinglulu
7*91f16700Schasinglulu#include <asm_macros.S>
8*91f16700Schasinglulu
9*91f16700Schasinglulu	.globl	spin_lock
10*91f16700Schasinglulu	.globl	spin_unlock
11*91f16700Schasinglulu
12*91f16700Schasinglulu#if USE_SPINLOCK_CAS
13*91f16700Schasinglulu#if !ARM_ARCH_AT_LEAST(8, 1)
14*91f16700Schasinglulu#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform
15*91f16700Schasinglulu#endif
16*91f16700Schasinglulu
17*91f16700Schasinglulu/*
18*91f16700Schasinglulu * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
19*91f16700Schasinglulu * Swap instruction.
20*91f16700Schasinglulu */
21*91f16700Schasinglulu
22*91f16700Schasinglulu/*
23*91f16700Schasinglulu * Acquire lock using Compare and Swap instruction.
24*91f16700Schasinglulu *
25*91f16700Schasinglulu * Compare for 0 with acquire semantics, and swap 1. If failed to acquire, use
26*91f16700Schasinglulu * load exclusive semantics to monitor the address and enter WFE.
27*91f16700Schasinglulu *
28*91f16700Schasinglulu * void spin_lock(spinlock_t *lock);
29*91f16700Schasinglulu */
30*91f16700Schasinglulufunc spin_lock
31*91f16700Schasinglulu	mov	w2, #1
32*91f16700Schasinglulu1:	mov	w1, wzr
33*91f16700Schasinglulu2:	casa	w1, w2, [x0]
34*91f16700Schasinglulu	cbz	w1, 3f
35*91f16700Schasinglulu	ldxr	w1, [x0]
36*91f16700Schasinglulu	cbz	w1, 2b
37*91f16700Schasinglulu	wfe
38*91f16700Schasinglulu	b	1b
39*91f16700Schasinglulu3:
40*91f16700Schasinglulu	ret
41*91f16700Schasingluluendfunc spin_lock
42*91f16700Schasinglulu
43*91f16700Schasinglulu#else /* !USE_SPINLOCK_CAS */
44*91f16700Schasinglulu
45*91f16700Schasinglulu/*
46*91f16700Schasinglulu * Acquire lock using load-/store-exclusive instruction pair.
47*91f16700Schasinglulu *
48*91f16700Schasinglulu * void spin_lock(spinlock_t *lock);
49*91f16700Schasinglulu */
50*91f16700Schasinglulufunc spin_lock
51*91f16700Schasinglulu	mov	w2, #1
52*91f16700Schasinglulu	sevl
53*91f16700Schasinglulul1:	wfe
54*91f16700Schasinglulul2:	ldaxr	w1, [x0]
55*91f16700Schasinglulu	cbnz	w1, l1
56*91f16700Schasinglulu	stxr	w1, w2, [x0]
57*91f16700Schasinglulu	cbnz	w1, l2
58*91f16700Schasinglulu	ret
59*91f16700Schasingluluendfunc spin_lock
60*91f16700Schasinglulu
61*91f16700Schasinglulu#endif /* USE_SPINLOCK_CAS */
62*91f16700Schasinglulu
63*91f16700Schasinglulu/*
64*91f16700Schasinglulu * Release lock previously acquired by spin_lock.
65*91f16700Schasinglulu *
66*91f16700Schasinglulu * Use store-release to unconditionally clear the spinlock variable.
67*91f16700Schasinglulu * Store operation generates an event to all cores waiting in WFE
68*91f16700Schasinglulu * when address is monitored by the global monitor.
69*91f16700Schasinglulu *
70*91f16700Schasinglulu * void spin_unlock(spinlock_t *lock);
71*91f16700Schasinglulu */
72*91f16700Schasinglulufunc spin_unlock
73*91f16700Schasinglulu	stlr	wzr, [x0]
74*91f16700Schasinglulu	ret
75*91f16700Schasingluluendfunc spin_unlock
76