xref: /arm-trusted-firmware/include/lib/utils_def.h (revision 91f16700b400a8c0651d24a598fc48ee2997a0d7)
1*91f16700Schasinglulu /*
2*91f16700Schasinglulu  * Copyright (c) 2016-2023, Arm Limited and Contributors. All rights reserved.
3*91f16700Schasinglulu  * Copyright (c) 2020, NVIDIA Corporation. All rights reserved.
4*91f16700Schasinglulu  *
5*91f16700Schasinglulu  * SPDX-License-Identifier: BSD-3-Clause
6*91f16700Schasinglulu  */
7*91f16700Schasinglulu 
8*91f16700Schasinglulu #ifndef UTILS_DEF_H
9*91f16700Schasinglulu #define UTILS_DEF_H
10*91f16700Schasinglulu 
11*91f16700Schasinglulu #include <export/lib/utils_def_exp.h>
12*91f16700Schasinglulu 
13*91f16700Schasinglulu /* Compute the number of elements in the given array */
14*91f16700Schasinglulu #define ARRAY_SIZE(a)				\
15*91f16700Schasinglulu 	(sizeof(a) / sizeof((a)[0]))
16*91f16700Schasinglulu 
17*91f16700Schasinglulu #define IS_POWER_OF_TWO(x)			\
18*91f16700Schasinglulu 	(((x) & ((x) - 1)) == 0)
19*91f16700Schasinglulu 
20*91f16700Schasinglulu #define SIZE_FROM_LOG2_WORDS(n)		(U(4) << (n))
21*91f16700Schasinglulu 
22*91f16700Schasinglulu #define BIT_32(nr)			(U(1) << (nr))
23*91f16700Schasinglulu #define BIT_64(nr)			(ULL(1) << (nr))
24*91f16700Schasinglulu 
25*91f16700Schasinglulu #ifdef __aarch64__
26*91f16700Schasinglulu #define BIT				BIT_64
27*91f16700Schasinglulu #else
28*91f16700Schasinglulu #define BIT				BIT_32
29*91f16700Schasinglulu #endif
30*91f16700Schasinglulu 
31*91f16700Schasinglulu /*
32*91f16700Schasinglulu  * Create a contiguous bitmask starting at bit position @l and ending at
33*91f16700Schasinglulu  * position @h. For example
34*91f16700Schasinglulu  * GENMASK_64(39, 21) gives us the 64bit vector 0x000000ffffe00000.
35*91f16700Schasinglulu  */
36*91f16700Schasinglulu #if defined(__LINKER__) || defined(__ASSEMBLER__)
37*91f16700Schasinglulu #define GENMASK_32(h, l) \
38*91f16700Schasinglulu 	(((0xFFFFFFFF) << (l)) & (0xFFFFFFFF >> (32 - 1 - (h))))
39*91f16700Schasinglulu 
40*91f16700Schasinglulu #define GENMASK_64(h, l) \
41*91f16700Schasinglulu 	((~0 << (l)) & (~0 >> (64 - 1 - (h))))
42*91f16700Schasinglulu #else
43*91f16700Schasinglulu #define GENMASK_32(h, l) \
44*91f16700Schasinglulu 	(((~UINT32_C(0)) << (l)) & (~UINT32_C(0) >> (32 - 1 - (h))))
45*91f16700Schasinglulu 
46*91f16700Schasinglulu #define GENMASK_64(h, l) \
47*91f16700Schasinglulu 	(((~UINT64_C(0)) << (l)) & (~UINT64_C(0) >> (64 - 1 - (h))))
48*91f16700Schasinglulu #endif
49*91f16700Schasinglulu 
50*91f16700Schasinglulu #ifdef __aarch64__
51*91f16700Schasinglulu #define GENMASK				GENMASK_64
52*91f16700Schasinglulu #else
53*91f16700Schasinglulu #define GENMASK				GENMASK_32
54*91f16700Schasinglulu #endif
55*91f16700Schasinglulu 
56*91f16700Schasinglulu /*
57*91f16700Schasinglulu  * This variant of div_round_up can be used in macro definition but should not
58*91f16700Schasinglulu  * be used in C code as the `div` parameter is evaluated twice.
59*91f16700Schasinglulu  */
60*91f16700Schasinglulu #define DIV_ROUND_UP_2EVAL(n, d)	(((n) + (d) - 1) / (d))
61*91f16700Schasinglulu 
62*91f16700Schasinglulu #define div_round_up(val, div) __extension__ ({	\
63*91f16700Schasinglulu 	__typeof__(div) _div = (div);		\
64*91f16700Schasinglulu 	((val) + _div - (__typeof__(div)) 1) / _div;		\
65*91f16700Schasinglulu })
66*91f16700Schasinglulu 
67*91f16700Schasinglulu #define MIN(x, y) __extension__ ({	\
68*91f16700Schasinglulu 	__typeof__(x) _x = (x);		\
69*91f16700Schasinglulu 	__typeof__(y) _y = (y);		\
70*91f16700Schasinglulu 	(void)(&_x == &_y);		\
71*91f16700Schasinglulu 	(_x < _y) ? _x : _y;		\
72*91f16700Schasinglulu })
73*91f16700Schasinglulu 
74*91f16700Schasinglulu #define MAX(x, y) __extension__ ({	\
75*91f16700Schasinglulu 	__typeof__(x) _x = (x);		\
76*91f16700Schasinglulu 	__typeof__(y) _y = (y);		\
77*91f16700Schasinglulu 	(void)(&_x == &_y);		\
78*91f16700Schasinglulu 	(_x > _y) ? _x : _y;		\
79*91f16700Schasinglulu })
80*91f16700Schasinglulu 
81*91f16700Schasinglulu #define CLAMP(x, min, max) __extension__ ({ \
82*91f16700Schasinglulu 	__typeof__(x) _x = (x); \
83*91f16700Schasinglulu 	__typeof__(min) _min = (min); \
84*91f16700Schasinglulu 	__typeof__(max) _max = (max); \
85*91f16700Schasinglulu 	(void)(&_x == &_min); \
86*91f16700Schasinglulu 	(void)(&_x == &_max); \
87*91f16700Schasinglulu 	((_x > _max) ? _max : ((_x < _min) ? _min : _x)); \
88*91f16700Schasinglulu })
89*91f16700Schasinglulu 
90*91f16700Schasinglulu /*
91*91f16700Schasinglulu  * The round_up() macro rounds up a value to the given boundary in a
92*91f16700Schasinglulu  * type-agnostic yet type-safe manner. The boundary must be a power of two.
93*91f16700Schasinglulu  * In other words, it computes the smallest multiple of boundary which is
94*91f16700Schasinglulu  * greater than or equal to value.
95*91f16700Schasinglulu  *
96*91f16700Schasinglulu  * round_down() is similar but rounds the value down instead.
97*91f16700Schasinglulu  */
98*91f16700Schasinglulu #define round_boundary(value, boundary)		\
99*91f16700Schasinglulu 	((__typeof__(value))((boundary) - 1))
100*91f16700Schasinglulu 
101*91f16700Schasinglulu #define round_up(value, boundary)		\
102*91f16700Schasinglulu 	((((value) - 1) | round_boundary(value, boundary)) + 1)
103*91f16700Schasinglulu 
104*91f16700Schasinglulu #define round_down(value, boundary)		\
105*91f16700Schasinglulu 	((value) & ~round_boundary(value, boundary))
106*91f16700Schasinglulu 
107*91f16700Schasinglulu /* add operation together with checking whether the operation overflowed
108*91f16700Schasinglulu  * The result is '*res',
109*91f16700Schasinglulu  * return 0 on success and 1 on overflow
110*91f16700Schasinglulu  */
111*91f16700Schasinglulu #define add_overflow(a, b, res) __builtin_add_overflow((a), (b), (res))
112*91f16700Schasinglulu 
113*91f16700Schasinglulu /*
114*91f16700Schasinglulu  * Round up a value to align with a given size and
115*91f16700Schasinglulu  * check whether overflow happens.
116*91f16700Schasinglulu  * The rounduped value is '*res',
117*91f16700Schasinglulu  * return 0 on success and 1 on overflow
118*91f16700Schasinglulu  */
119*91f16700Schasinglulu #define round_up_overflow(v, size, res) (__extension__({ \
120*91f16700Schasinglulu 	typeof(res) __res = res; \
121*91f16700Schasinglulu 	typeof(*(__res)) __roundup_tmp = 0; \
122*91f16700Schasinglulu 	typeof(v) __roundup_mask = (typeof(v))(size) - 1; \
123*91f16700Schasinglulu 	\
124*91f16700Schasinglulu 	add_overflow((v), __roundup_mask, &__roundup_tmp) ? 1 : \
125*91f16700Schasinglulu 		(void)(*(__res) = __roundup_tmp & ~__roundup_mask), 0; \
126*91f16700Schasinglulu }))
127*91f16700Schasinglulu 
128*91f16700Schasinglulu /*
129*91f16700Schasinglulu  * Add a with b, then round up the result to align with a given size and
130*91f16700Schasinglulu  * check whether overflow happens.
131*91f16700Schasinglulu  * The rounduped value is '*res',
132*91f16700Schasinglulu  * return 0 on success and 1 on overflow
133*91f16700Schasinglulu  */
134*91f16700Schasinglulu #define add_with_round_up_overflow(a, b, size, res) (__extension__({ \
135*91f16700Schasinglulu 	typeof(a) __a = (a); \
136*91f16700Schasinglulu 	typeof(__a) __add_res = 0; \
137*91f16700Schasinglulu 	\
138*91f16700Schasinglulu 	add_overflow((__a), (b), &__add_res) ? 1 : \
139*91f16700Schasinglulu 		round_up_overflow(__add_res, (size), (res)) ? 1 : 0; \
140*91f16700Schasinglulu }))
141*91f16700Schasinglulu 
142*91f16700Schasinglulu /**
143*91f16700Schasinglulu  * Helper macro to ensure a value lies on a given boundary.
144*91f16700Schasinglulu  */
145*91f16700Schasinglulu #define is_aligned(value, boundary)			\
146*91f16700Schasinglulu 	(round_up((uintptr_t) value, boundary) ==	\
147*91f16700Schasinglulu 	 round_down((uintptr_t) value, boundary))
148*91f16700Schasinglulu 
149*91f16700Schasinglulu /*
150*91f16700Schasinglulu  * Evaluates to 1 if (ptr + inc) overflows, 0 otherwise.
151*91f16700Schasinglulu  * Both arguments must be unsigned pointer values (i.e. uintptr_t).
152*91f16700Schasinglulu  */
153*91f16700Schasinglulu #define check_uptr_overflow(_ptr, _inc)		\
154*91f16700Schasinglulu 	((_ptr) > (UINTPTR_MAX - (_inc)))
155*91f16700Schasinglulu 
156*91f16700Schasinglulu /*
157*91f16700Schasinglulu  * Evaluates to 1 if (u32 + inc) overflows, 0 otherwise.
158*91f16700Schasinglulu  * Both arguments must be 32-bit unsigned integers (i.e. effectively uint32_t).
159*91f16700Schasinglulu  */
160*91f16700Schasinglulu #define check_u32_overflow(_u32, _inc) \
161*91f16700Schasinglulu 	((_u32) > (UINT32_MAX - (_inc)))
162*91f16700Schasinglulu 
163*91f16700Schasinglulu /* Register size of the current architecture. */
164*91f16700Schasinglulu #ifdef __aarch64__
165*91f16700Schasinglulu #define REGSZ		U(8)
166*91f16700Schasinglulu #else
167*91f16700Schasinglulu #define REGSZ		U(4)
168*91f16700Schasinglulu #endif
169*91f16700Schasinglulu 
170*91f16700Schasinglulu /*
171*91f16700Schasinglulu  * Test for the current architecture version to be at least the version
172*91f16700Schasinglulu  * expected.
173*91f16700Schasinglulu  */
174*91f16700Schasinglulu #define ARM_ARCH_AT_LEAST(_maj, _min) \
175*91f16700Schasinglulu 	((ARM_ARCH_MAJOR > (_maj)) || \
176*91f16700Schasinglulu 	 ((ARM_ARCH_MAJOR == (_maj)) && (ARM_ARCH_MINOR >= (_min))))
177*91f16700Schasinglulu 
178*91f16700Schasinglulu /*
179*91f16700Schasinglulu  * Import an assembly or linker symbol as a C expression with the specified
180*91f16700Schasinglulu  * type
181*91f16700Schasinglulu  */
182*91f16700Schasinglulu #define IMPORT_SYM(type, sym, name) \
183*91f16700Schasinglulu 	extern char sym[];\
184*91f16700Schasinglulu 	static const __attribute__((unused)) type name = (type) sym;
185*91f16700Schasinglulu 
186*91f16700Schasinglulu /*
187*91f16700Schasinglulu  * When the symbol is used to hold a pointer, its alignment can be asserted
188*91f16700Schasinglulu  * with this macro. For example, if there is a linker symbol that is going to
189*91f16700Schasinglulu  * be used as a 64-bit pointer, the value of the linker symbol must also be
190*91f16700Schasinglulu  * aligned to 64 bit. This macro makes sure this is the case.
191*91f16700Schasinglulu  */
192*91f16700Schasinglulu #define ASSERT_SYM_PTR_ALIGN(sym) assert(((size_t)(sym) % __alignof__(*(sym))) == 0)
193*91f16700Schasinglulu 
194*91f16700Schasinglulu #define COMPILER_BARRIER() __asm__ volatile ("" ::: "memory")
195*91f16700Schasinglulu 
196*91f16700Schasinglulu /* Compiler builtin of GCC >= 9 and planned in llvm */
197*91f16700Schasinglulu #ifdef __HAVE_SPECULATION_SAFE_VALUE
198*91f16700Schasinglulu # define SPECULATION_SAFE_VALUE(var) __builtin_speculation_safe_value(var)
199*91f16700Schasinglulu #else
200*91f16700Schasinglulu # define SPECULATION_SAFE_VALUE(var) var
201*91f16700Schasinglulu #endif
202*91f16700Schasinglulu 
203*91f16700Schasinglulu /*
204*91f16700Schasinglulu  * Ticks elapsed in one second with a signal of 1 MHz
205*91f16700Schasinglulu  */
206*91f16700Schasinglulu #define MHZ_TICKS_PER_SEC	U(1000000)
207*91f16700Schasinglulu 
208*91f16700Schasinglulu /*
209*91f16700Schasinglulu  * Ticks elapsed in one second with a signal of 1 KHz
210*91f16700Schasinglulu  */
211*91f16700Schasinglulu #define KHZ_TICKS_PER_SEC U(1000)
212*91f16700Schasinglulu 
213*91f16700Schasinglulu #endif /* UTILS_DEF_H */
214