blob: 9677530e8de320bf3f8d00e9bd0e1567996a2377 [file] [log] [blame]
Austin Schuh36244a12019-09-21 17:52:38 -07001// Copyright 2017 The Abseil Authors.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14//
15
16#ifndef ABSL_BASE_INTERNAL_ENDIAN_H_
17#define ABSL_BASE_INTERNAL_ENDIAN_H_
18
19// The following guarantees declaration of the byte swap functions
20#ifdef _MSC_VER
21#include <stdlib.h> // NOLINT(build/include)
Austin Schuh36244a12019-09-21 17:52:38 -070022#elif defined(__FreeBSD__)
23#include <sys/endian.h>
24#elif defined(__GLIBC__)
25#include <byteswap.h> // IWYU pragma: export
26#endif
27
28#include <cstdint>
29#include "absl/base/config.h"
30#include "absl/base/internal/unaligned_access.h"
31#include "absl/base/port.h"
32
33namespace absl {
Austin Schuhb4691e92020-12-31 12:37:18 -080034ABSL_NAMESPACE_BEGIN
Austin Schuh36244a12019-09-21 17:52:38 -070035
36// Use compiler byte-swapping intrinsics if they are available. 32-bit
37// and 64-bit versions are available in Clang and GCC as of GCC 4.3.0.
38// The 16-bit version is available in Clang and GCC only as of GCC 4.8.0.
39// For simplicity, we enable them all only for GCC 4.8.0 or later.
40#if defined(__clang__) || \
41 (defined(__GNUC__) && \
42 ((__GNUC__ == 4 && __GNUC_MINOR__ >= 8) || __GNUC__ >= 5))
43inline uint64_t gbswap_64(uint64_t host_int) {
44 return __builtin_bswap64(host_int);
45}
46inline uint32_t gbswap_32(uint32_t host_int) {
47 return __builtin_bswap32(host_int);
48}
49inline uint16_t gbswap_16(uint16_t host_int) {
50 return __builtin_bswap16(host_int);
51}
52
53#elif defined(_MSC_VER)
54inline uint64_t gbswap_64(uint64_t host_int) {
55 return _byteswap_uint64(host_int);
56}
57inline uint32_t gbswap_32(uint32_t host_int) {
58 return _byteswap_ulong(host_int);
59}
60inline uint16_t gbswap_16(uint16_t host_int) {
61 return _byteswap_ushort(host_int);
62}
63
Austin Schuh36244a12019-09-21 17:52:38 -070064#else
65inline uint64_t gbswap_64(uint64_t host_int) {
66#if defined(__GNUC__) && defined(__x86_64__) && !defined(__APPLE__)
67 // Adapted from /usr/include/byteswap.h. Not available on Mac.
68 if (__builtin_constant_p(host_int)) {
69 return __bswap_constant_64(host_int);
70 } else {
71 uint64_t result;
72 __asm__("bswap %0" : "=r"(result) : "0"(host_int));
73 return result;
74 }
75#elif defined(__GLIBC__)
76 return bswap_64(host_int);
77#else
78 return (((host_int & uint64_t{0xFF}) << 56) |
79 ((host_int & uint64_t{0xFF00}) << 40) |
80 ((host_int & uint64_t{0xFF0000}) << 24) |
81 ((host_int & uint64_t{0xFF000000}) << 8) |
82 ((host_int & uint64_t{0xFF00000000}) >> 8) |
83 ((host_int & uint64_t{0xFF0000000000}) >> 24) |
84 ((host_int & uint64_t{0xFF000000000000}) >> 40) |
85 ((host_int & uint64_t{0xFF00000000000000}) >> 56));
86#endif // bswap_64
87}
88
89inline uint32_t gbswap_32(uint32_t host_int) {
90#if defined(__GLIBC__)
91 return bswap_32(host_int);
92#else
93 return (((host_int & uint32_t{0xFF}) << 24) |
94 ((host_int & uint32_t{0xFF00}) << 8) |
95 ((host_int & uint32_t{0xFF0000}) >> 8) |
96 ((host_int & uint32_t{0xFF000000}) >> 24));
97#endif
98}
99
100inline uint16_t gbswap_16(uint16_t host_int) {
101#if defined(__GLIBC__)
102 return bswap_16(host_int);
103#else
104 return (((host_int & uint16_t{0xFF}) << 8) |
105 ((host_int & uint16_t{0xFF00}) >> 8));
106#endif
107}
108
Austin Schuhb4691e92020-12-31 12:37:18 -0800109#endif // intrinsics available
Austin Schuh36244a12019-09-21 17:52:38 -0700110
111#ifdef ABSL_IS_LITTLE_ENDIAN
112
113// Definitions for ntohl etc. that don't require us to include
114// netinet/in.h. We wrap gbswap_32 and gbswap_16 in functions rather
115// than just #defining them because in debug mode, gcc doesn't
116// correctly handle the (rather involved) definitions of bswap_32.
117// gcc guarantees that inline functions are as fast as macros, so
118// this isn't a performance hit.
119inline uint16_t ghtons(uint16_t x) { return gbswap_16(x); }
120inline uint32_t ghtonl(uint32_t x) { return gbswap_32(x); }
121inline uint64_t ghtonll(uint64_t x) { return gbswap_64(x); }
122
123#elif defined ABSL_IS_BIG_ENDIAN
124
125// These definitions are simpler on big-endian machines
126// These are functions instead of macros to avoid self-assignment warnings
127// on calls such as "i = ghtnol(i);". This also provides type checking.
128inline uint16_t ghtons(uint16_t x) { return x; }
129inline uint32_t ghtonl(uint32_t x) { return x; }
130inline uint64_t ghtonll(uint64_t x) { return x; }
131
132#else
133#error \
134 "Unsupported byte order: Either ABSL_IS_BIG_ENDIAN or " \
135 "ABSL_IS_LITTLE_ENDIAN must be defined"
136#endif // byte order
137
138inline uint16_t gntohs(uint16_t x) { return ghtons(x); }
139inline uint32_t gntohl(uint32_t x) { return ghtonl(x); }
140inline uint64_t gntohll(uint64_t x) { return ghtonll(x); }
141
142// Utilities to convert numbers between the current hosts's native byte
143// order and little-endian byte order
144//
145// Load/Store methods are alignment safe
146namespace little_endian {
147// Conversion functions.
148#ifdef ABSL_IS_LITTLE_ENDIAN
149
150inline uint16_t FromHost16(uint16_t x) { return x; }
151inline uint16_t ToHost16(uint16_t x) { return x; }
152
153inline uint32_t FromHost32(uint32_t x) { return x; }
154inline uint32_t ToHost32(uint32_t x) { return x; }
155
156inline uint64_t FromHost64(uint64_t x) { return x; }
157inline uint64_t ToHost64(uint64_t x) { return x; }
158
159inline constexpr bool IsLittleEndian() { return true; }
160
161#elif defined ABSL_IS_BIG_ENDIAN
162
163inline uint16_t FromHost16(uint16_t x) { return gbswap_16(x); }
164inline uint16_t ToHost16(uint16_t x) { return gbswap_16(x); }
165
166inline uint32_t FromHost32(uint32_t x) { return gbswap_32(x); }
167inline uint32_t ToHost32(uint32_t x) { return gbswap_32(x); }
168
169inline uint64_t FromHost64(uint64_t x) { return gbswap_64(x); }
170inline uint64_t ToHost64(uint64_t x) { return gbswap_64(x); }
171
172inline constexpr bool IsLittleEndian() { return false; }
173
174#endif /* ENDIAN */
175
176// Functions to do unaligned loads and stores in little-endian order.
177inline uint16_t Load16(const void *p) {
178 return ToHost16(ABSL_INTERNAL_UNALIGNED_LOAD16(p));
179}
180
181inline void Store16(void *p, uint16_t v) {
182 ABSL_INTERNAL_UNALIGNED_STORE16(p, FromHost16(v));
183}
184
185inline uint32_t Load32(const void *p) {
186 return ToHost32(ABSL_INTERNAL_UNALIGNED_LOAD32(p));
187}
188
189inline void Store32(void *p, uint32_t v) {
190 ABSL_INTERNAL_UNALIGNED_STORE32(p, FromHost32(v));
191}
192
193inline uint64_t Load64(const void *p) {
194 return ToHost64(ABSL_INTERNAL_UNALIGNED_LOAD64(p));
195}
196
197inline void Store64(void *p, uint64_t v) {
198 ABSL_INTERNAL_UNALIGNED_STORE64(p, FromHost64(v));
199}
200
201} // namespace little_endian
202
203// Utilities to convert numbers between the current hosts's native byte
204// order and big-endian byte order (same as network byte order)
205//
206// Load/Store methods are alignment safe
207namespace big_endian {
208#ifdef ABSL_IS_LITTLE_ENDIAN
209
210inline uint16_t FromHost16(uint16_t x) { return gbswap_16(x); }
211inline uint16_t ToHost16(uint16_t x) { return gbswap_16(x); }
212
213inline uint32_t FromHost32(uint32_t x) { return gbswap_32(x); }
214inline uint32_t ToHost32(uint32_t x) { return gbswap_32(x); }
215
216inline uint64_t FromHost64(uint64_t x) { return gbswap_64(x); }
217inline uint64_t ToHost64(uint64_t x) { return gbswap_64(x); }
218
219inline constexpr bool IsLittleEndian() { return true; }
220
221#elif defined ABSL_IS_BIG_ENDIAN
222
223inline uint16_t FromHost16(uint16_t x) { return x; }
224inline uint16_t ToHost16(uint16_t x) { return x; }
225
226inline uint32_t FromHost32(uint32_t x) { return x; }
227inline uint32_t ToHost32(uint32_t x) { return x; }
228
229inline uint64_t FromHost64(uint64_t x) { return x; }
230inline uint64_t ToHost64(uint64_t x) { return x; }
231
232inline constexpr bool IsLittleEndian() { return false; }
233
234#endif /* ENDIAN */
235
236// Functions to do unaligned loads and stores in big-endian order.
237inline uint16_t Load16(const void *p) {
238 return ToHost16(ABSL_INTERNAL_UNALIGNED_LOAD16(p));
239}
240
241inline void Store16(void *p, uint16_t v) {
242 ABSL_INTERNAL_UNALIGNED_STORE16(p, FromHost16(v));
243}
244
245inline uint32_t Load32(const void *p) {
246 return ToHost32(ABSL_INTERNAL_UNALIGNED_LOAD32(p));
247}
248
249inline void Store32(void *p, uint32_t v) {
250 ABSL_INTERNAL_UNALIGNED_STORE32(p, FromHost32(v));
251}
252
253inline uint64_t Load64(const void *p) {
254 return ToHost64(ABSL_INTERNAL_UNALIGNED_LOAD64(p));
255}
256
257inline void Store64(void *p, uint64_t v) {
258 ABSL_INTERNAL_UNALIGNED_STORE64(p, FromHost64(v));
259}
260
261} // namespace big_endian
262
Austin Schuhb4691e92020-12-31 12:37:18 -0800263ABSL_NAMESPACE_END
Austin Schuh36244a12019-09-21 17:52:38 -0700264} // namespace absl
265
266#endif // ABSL_BASE_INTERNAL_ENDIAN_H_