xref: /arm-trusted-firmware/include/lib/libc/aarch32/endian_.h (revision 91f16700b400a8c0651d24a598fc48ee2997a0d7)
1*91f16700Schasinglulu /*-
2*91f16700Schasinglulu  * SPDX-License-Identifier: BSD-3-Clause
3*91f16700Schasinglulu  *
4*91f16700Schasinglulu  * Copyright (c) 2001 David E. O'Brien
5*91f16700Schasinglulu  *
6*91f16700Schasinglulu  * Redistribution and use in source and binary forms, with or without
7*91f16700Schasinglulu  * modification, are permitted provided that the following conditions
8*91f16700Schasinglulu  * are met:
9*91f16700Schasinglulu  * 1. Redistributions of source code must retain the above copyright
10*91f16700Schasinglulu  *    notice, this list of conditions and the following disclaimer.
11*91f16700Schasinglulu  * 2. Redistributions in binary form must reproduce the above copyright
12*91f16700Schasinglulu  *    notice, this list of conditions and the following disclaimer in the
13*91f16700Schasinglulu  *    documentation and/or other materials provided with the distribution.
14*91f16700Schasinglulu  * 3. Neither the name of the University nor the names of its contributors
15*91f16700Schasinglulu  *    may be used to endorse or promote products derived from this software
16*91f16700Schasinglulu  *    without specific prior written permission.
17*91f16700Schasinglulu  *
18*91f16700Schasinglulu  * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
19*91f16700Schasinglulu  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20*91f16700Schasinglulu  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21*91f16700Schasinglulu  * ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
22*91f16700Schasinglulu  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23*91f16700Schasinglulu  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
24*91f16700Schasinglulu  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
25*91f16700Schasinglulu  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
26*91f16700Schasinglulu  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
27*91f16700Schasinglulu  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28*91f16700Schasinglulu  * SUCH DAMAGE.
29*91f16700Schasinglulu  *
30*91f16700Schasinglulu  *	@(#)endian.h	8.1 (Berkeley) 6/10/93
31*91f16700Schasinglulu  * $NetBSD: endian.h,v 1.7 1999/08/21 05:53:51 simonb Exp $
32*91f16700Schasinglulu  * $FreeBSD$
33*91f16700Schasinglulu  */
34*91f16700Schasinglulu /*
35*91f16700Schasinglulu  * Portions copyright (c) 2018, Arm Limited and Contributors.
36*91f16700Schasinglulu  * All rights reserved.
37*91f16700Schasinglulu  */
38*91f16700Schasinglulu 
39*91f16700Schasinglulu #ifndef ENDIAN__H
40*91f16700Schasinglulu #define ENDIAN__H
41*91f16700Schasinglulu 
42*91f16700Schasinglulu #include <stdint.h>
43*91f16700Schasinglulu 
44*91f16700Schasinglulu /*
45*91f16700Schasinglulu  * Definitions for byte order, according to byte significance from low
46*91f16700Schasinglulu  * address to high.
47*91f16700Schasinglulu  */
48*91f16700Schasinglulu #define _LITTLE_ENDIAN  1234    /* LSB first: i386, vax */
49*91f16700Schasinglulu #define _BIG_ENDIAN     4321    /* MSB first: 68000, ibm, net */
50*91f16700Schasinglulu #define _PDP_ENDIAN     3412    /* LSB first in word, MSW first in long */
51*91f16700Schasinglulu 
52*91f16700Schasinglulu #ifdef __ARMEB__
53*91f16700Schasinglulu #define _BYTE_ORDER	_BIG_ENDIAN
54*91f16700Schasinglulu #else
55*91f16700Schasinglulu #define	_BYTE_ORDER	_LITTLE_ENDIAN
56*91f16700Schasinglulu #endif /* __ARMEB__ */
57*91f16700Schasinglulu 
58*91f16700Schasinglulu #if __BSD_VISIBLE
59*91f16700Schasinglulu #define LITTLE_ENDIAN   _LITTLE_ENDIAN
60*91f16700Schasinglulu #define BIG_ENDIAN      _BIG_ENDIAN
61*91f16700Schasinglulu #define PDP_ENDIAN      _PDP_ENDIAN
62*91f16700Schasinglulu #define BYTE_ORDER      _BYTE_ORDER
63*91f16700Schasinglulu #endif
64*91f16700Schasinglulu 
65*91f16700Schasinglulu #ifdef __ARMEB__
66*91f16700Schasinglulu #define _QUAD_HIGHWORD 0
67*91f16700Schasinglulu #define _QUAD_LOWWORD 1
68*91f16700Schasinglulu #define __ntohl(x)	((uint32_t)(x))
69*91f16700Schasinglulu #define __ntohs(x)	((uint16_t)(x))
70*91f16700Schasinglulu #define __htonl(x)	((uint32_t)(x))
71*91f16700Schasinglulu #define __htons(x)	((uint16_t)(x))
72*91f16700Schasinglulu #else
73*91f16700Schasinglulu #define _QUAD_HIGHWORD  1
74*91f16700Schasinglulu #define _QUAD_LOWWORD 0
75*91f16700Schasinglulu #define __ntohl(x)        (__bswap32(x))
76*91f16700Schasinglulu #define __ntohs(x)        (__bswap16(x))
77*91f16700Schasinglulu #define __htonl(x)        (__bswap32(x))
78*91f16700Schasinglulu #define __htons(x)        (__bswap16(x))
79*91f16700Schasinglulu #endif /* __ARMEB__ */
80*91f16700Schasinglulu 
81*91f16700Schasinglulu static __inline uint64_t
82*91f16700Schasinglulu __bswap64(uint64_t _x)
83*91f16700Schasinglulu {
84*91f16700Schasinglulu 
85*91f16700Schasinglulu 	return ((_x >> 56) | ((_x >> 40) & 0xff00) | ((_x >> 24) & 0xff0000) |
86*91f16700Schasinglulu 	    ((_x >> 8) & 0xff000000) | ((_x << 8) & ((uint64_t)0xff << 32)) |
87*91f16700Schasinglulu 	    ((_x << 24) & ((uint64_t)0xff << 40)) |
88*91f16700Schasinglulu 	    ((_x << 40) & ((uint64_t)0xff << 48)) | ((_x << 56)));
89*91f16700Schasinglulu }
90*91f16700Schasinglulu 
91*91f16700Schasinglulu static __inline uint32_t
92*91f16700Schasinglulu __bswap32_var(uint32_t v)
93*91f16700Schasinglulu {
94*91f16700Schasinglulu 	uint32_t t1;
95*91f16700Schasinglulu 
96*91f16700Schasinglulu 	__asm __volatile("eor %1, %0, %0, ror #16\n"
97*91f16700Schasinglulu 	    		"bic %1, %1, #0x00ff0000\n"
98*91f16700Schasinglulu 			"mov %0, %0, ror #8\n"
99*91f16700Schasinglulu 			"eor %0, %0, %1, lsr #8\n"
100*91f16700Schasinglulu 			 : "+r" (v), "=r" (t1));
101*91f16700Schasinglulu 
102*91f16700Schasinglulu 	return (v);
103*91f16700Schasinglulu }
104*91f16700Schasinglulu 
105*91f16700Schasinglulu static __inline uint16_t
106*91f16700Schasinglulu __bswap16_var(uint16_t v)
107*91f16700Schasinglulu {
108*91f16700Schasinglulu 	uint32_t ret = v & 0xffff;
109*91f16700Schasinglulu 
110*91f16700Schasinglulu 	__asm __volatile(
111*91f16700Schasinglulu 	    "mov    %0, %0, ror #8\n"
112*91f16700Schasinglulu 	    "orr    %0, %0, %0, lsr #16\n"
113*91f16700Schasinglulu 	    "bic    %0, %0, %0, lsl #16"
114*91f16700Schasinglulu 	    : "+r" (ret));
115*91f16700Schasinglulu 
116*91f16700Schasinglulu 	return ((uint16_t)ret);
117*91f16700Schasinglulu }
118*91f16700Schasinglulu 
119*91f16700Schasinglulu #ifdef __OPTIMIZE__
120*91f16700Schasinglulu 
121*91f16700Schasinglulu #define __bswap32_constant(x)	\
122*91f16700Schasinglulu     ((((x) & 0xff000000U) >> 24) |	\
123*91f16700Schasinglulu      (((x) & 0x00ff0000U) >>  8) |	\
124*91f16700Schasinglulu      (((x) & 0x0000ff00U) <<  8) |	\
125*91f16700Schasinglulu      (((x) & 0x000000ffU) << 24))
126*91f16700Schasinglulu 
127*91f16700Schasinglulu #define __bswap16_constant(x)	\
128*91f16700Schasinglulu     ((((x) & 0xff00) >> 8) |		\
129*91f16700Schasinglulu      (((x) & 0x00ff) << 8))
130*91f16700Schasinglulu 
131*91f16700Schasinglulu #define __bswap16(x)	\
132*91f16700Schasinglulu     ((uint16_t)(__builtin_constant_p(x) ?	\
133*91f16700Schasinglulu      __bswap16_constant(x) :			\
134*91f16700Schasinglulu      __bswap16_var(x)))
135*91f16700Schasinglulu 
136*91f16700Schasinglulu #define __bswap32(x)	\
137*91f16700Schasinglulu     ((uint32_t)(__builtin_constant_p(x) ? 	\
138*91f16700Schasinglulu      __bswap32_constant(x) :			\
139*91f16700Schasinglulu      __bswap32_var(x)))
140*91f16700Schasinglulu 
141*91f16700Schasinglulu #else
142*91f16700Schasinglulu #define __bswap16(x)	__bswap16_var(x)
143*91f16700Schasinglulu #define __bswap32(x)	__bswap32_var(x)
144*91f16700Schasinglulu 
145*91f16700Schasinglulu #endif /* __OPTIMIZE__ */
146*91f16700Schasinglulu #endif /* ENDIAN__H */
147