Austin Schuh | 36244a1 | 2019-09-21 17:52:38 -0700 | [diff] [blame^] | 1 | // Copyright 2018 The Abseil Authors. |
| 2 | // |
| 3 | // Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | // you may not use this file except in compliance with the License. |
| 5 | // You may obtain a copy of the License at |
| 6 | // |
| 7 | // https://www.apache.org/licenses/LICENSE-2.0 |
| 8 | // |
| 9 | // Unless required by applicable law or agreed to in writing, software |
| 10 | // distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | // See the License for the specific language governing permissions and |
| 13 | // limitations under the License. |
| 14 | |
| 15 | #ifndef ABSL_BASE_INTERNAL_BITS_H_ |
| 16 | #define ABSL_BASE_INTERNAL_BITS_H_ |
| 17 | |
| 18 | // This file contains bitwise ops which are implementation details of various |
| 19 | // absl libraries. |
| 20 | |
| 21 | #include <cstdint> |
| 22 | |
| 23 | // Clang on Windows has __builtin_clzll; otherwise we need to use the |
| 24 | // windows intrinsic functions. |
| 25 | #if defined(_MSC_VER) |
| 26 | #include <intrin.h> |
| 27 | #if defined(_M_X64) |
| 28 | #pragma intrinsic(_BitScanReverse64) |
| 29 | #pragma intrinsic(_BitScanForward64) |
| 30 | #endif |
| 31 | #pragma intrinsic(_BitScanReverse) |
| 32 | #pragma intrinsic(_BitScanForward) |
| 33 | #endif |
| 34 | |
| 35 | #include "absl/base/attributes.h" |
| 36 | |
| 37 | #if defined(_MSC_VER) |
| 38 | // We can achieve something similar to attribute((always_inline)) with MSVC by |
| 39 | // using the __forceinline keyword, however this is not perfect. MSVC is |
| 40 | // much less aggressive about inlining, and even with the __forceinline keyword. |
| 41 | #define ABSL_BASE_INTERNAL_FORCEINLINE __forceinline |
| 42 | #else |
| 43 | // Use default attribute inline. |
| 44 | #define ABSL_BASE_INTERNAL_FORCEINLINE inline ABSL_ATTRIBUTE_ALWAYS_INLINE |
| 45 | #endif |
| 46 | |
| 47 | |
| 48 | namespace absl { |
| 49 | namespace base_internal { |
| 50 | |
| 51 | ABSL_BASE_INTERNAL_FORCEINLINE int CountLeadingZeros64Slow(uint64_t n) { |
| 52 | int zeroes = 60; |
| 53 | if (n >> 32) zeroes -= 32, n >>= 32; |
| 54 | if (n >> 16) zeroes -= 16, n >>= 16; |
| 55 | if (n >> 8) zeroes -= 8, n >>= 8; |
| 56 | if (n >> 4) zeroes -= 4, n >>= 4; |
| 57 | return "\4\3\2\2\1\1\1\1\0\0\0\0\0\0\0"[n] + zeroes; |
| 58 | } |
| 59 | |
| 60 | ABSL_BASE_INTERNAL_FORCEINLINE int CountLeadingZeros64(uint64_t n) { |
| 61 | #if defined(_MSC_VER) && defined(_M_X64) |
| 62 | // MSVC does not have __buitin_clzll. Use _BitScanReverse64. |
| 63 | unsigned long result = 0; // NOLINT(runtime/int) |
| 64 | if (_BitScanReverse64(&result, n)) { |
| 65 | return 63 - result; |
| 66 | } |
| 67 | return 64; |
| 68 | #elif defined(_MSC_VER) |
| 69 | // MSVC does not have __buitin_clzll. Compose two calls to _BitScanReverse |
| 70 | unsigned long result = 0; // NOLINT(runtime/int) |
| 71 | if ((n >> 32) && _BitScanReverse(&result, n >> 32)) { |
| 72 | return 31 - result; |
| 73 | } |
| 74 | if (_BitScanReverse(&result, n)) { |
| 75 | return 63 - result; |
| 76 | } |
| 77 | return 64; |
| 78 | #elif defined(__GNUC__) |
| 79 | // Use __builtin_clzll, which uses the following instructions: |
| 80 | // x86: bsr |
| 81 | // ARM64: clz |
| 82 | // PPC: cntlzd |
| 83 | static_assert(sizeof(unsigned long long) == sizeof(n), // NOLINT(runtime/int) |
| 84 | "__builtin_clzll does not take 64-bit arg"); |
| 85 | |
| 86 | // Handle 0 as a special case because __builtin_clzll(0) is undefined. |
| 87 | if (n == 0) { |
| 88 | return 64; |
| 89 | } |
| 90 | return __builtin_clzll(n); |
| 91 | #else |
| 92 | return CountLeadingZeros64Slow(n); |
| 93 | #endif |
| 94 | } |
| 95 | |
| 96 | ABSL_BASE_INTERNAL_FORCEINLINE int CountLeadingZeros32Slow(uint64_t n) { |
| 97 | int zeroes = 28; |
| 98 | if (n >> 16) zeroes -= 16, n >>= 16; |
| 99 | if (n >> 8) zeroes -= 8, n >>= 8; |
| 100 | if (n >> 4) zeroes -= 4, n >>= 4; |
| 101 | return "\4\3\2\2\1\1\1\1\0\0\0\0\0\0\0"[n] + zeroes; |
| 102 | } |
| 103 | |
| 104 | ABSL_BASE_INTERNAL_FORCEINLINE int CountLeadingZeros32(uint32_t n) { |
| 105 | #if defined(_MSC_VER) |
| 106 | unsigned long result = 0; // NOLINT(runtime/int) |
| 107 | if (_BitScanReverse(&result, n)) { |
| 108 | return 31 - result; |
| 109 | } |
| 110 | return 32; |
| 111 | #elif defined(__GNUC__) |
| 112 | // Use __builtin_clz, which uses the following instructions: |
| 113 | // x86: bsr |
| 114 | // ARM64: clz |
| 115 | // PPC: cntlzd |
| 116 | static_assert(sizeof(int) == sizeof(n), |
| 117 | "__builtin_clz does not take 32-bit arg"); |
| 118 | |
| 119 | // Handle 0 as a special case because __builtin_clz(0) is undefined. |
| 120 | if (n == 0) { |
| 121 | return 32; |
| 122 | } |
| 123 | return __builtin_clz(n); |
| 124 | #else |
| 125 | return CountLeadingZeros32Slow(n); |
| 126 | #endif |
| 127 | } |
| 128 | |
| 129 | ABSL_BASE_INTERNAL_FORCEINLINE int CountTrailingZerosNonZero64Slow(uint64_t n) { |
| 130 | int c = 63; |
| 131 | n &= ~n + 1; |
| 132 | if (n & 0x00000000FFFFFFFF) c -= 32; |
| 133 | if (n & 0x0000FFFF0000FFFF) c -= 16; |
| 134 | if (n & 0x00FF00FF00FF00FF) c -= 8; |
| 135 | if (n & 0x0F0F0F0F0F0F0F0F) c -= 4; |
| 136 | if (n & 0x3333333333333333) c -= 2; |
| 137 | if (n & 0x5555555555555555) c -= 1; |
| 138 | return c; |
| 139 | } |
| 140 | |
| 141 | ABSL_BASE_INTERNAL_FORCEINLINE int CountTrailingZerosNonZero64(uint64_t n) { |
| 142 | #if defined(_MSC_VER) && defined(_M_X64) |
| 143 | unsigned long result = 0; // NOLINT(runtime/int) |
| 144 | _BitScanForward64(&result, n); |
| 145 | return result; |
| 146 | #elif defined(_MSC_VER) |
| 147 | unsigned long result = 0; // NOLINT(runtime/int) |
| 148 | if (static_cast<uint32_t>(n) == 0) { |
| 149 | _BitScanForward(&result, n >> 32); |
| 150 | return result + 32; |
| 151 | } |
| 152 | _BitScanForward(&result, n); |
| 153 | return result; |
| 154 | #elif defined(__GNUC__) |
| 155 | static_assert(sizeof(unsigned long long) == sizeof(n), // NOLINT(runtime/int) |
| 156 | "__builtin_ctzll does not take 64-bit arg"); |
| 157 | return __builtin_ctzll(n); |
| 158 | #else |
| 159 | return CountTrailingZerosNonZero64Slow(n); |
| 160 | #endif |
| 161 | } |
| 162 | |
| 163 | ABSL_BASE_INTERNAL_FORCEINLINE int CountTrailingZerosNonZero32Slow(uint32_t n) { |
| 164 | int c = 31; |
| 165 | n &= ~n + 1; |
| 166 | if (n & 0x0000FFFF) c -= 16; |
| 167 | if (n & 0x00FF00FF) c -= 8; |
| 168 | if (n & 0x0F0F0F0F) c -= 4; |
| 169 | if (n & 0x33333333) c -= 2; |
| 170 | if (n & 0x55555555) c -= 1; |
| 171 | return c; |
| 172 | } |
| 173 | |
| 174 | ABSL_BASE_INTERNAL_FORCEINLINE int CountTrailingZerosNonZero32(uint32_t n) { |
| 175 | #if defined(_MSC_VER) |
| 176 | unsigned long result = 0; // NOLINT(runtime/int) |
| 177 | _BitScanForward(&result, n); |
| 178 | return result; |
| 179 | #elif defined(__GNUC__) |
| 180 | static_assert(sizeof(int) == sizeof(n), |
| 181 | "__builtin_ctz does not take 32-bit arg"); |
| 182 | return __builtin_ctz(n); |
| 183 | #else |
| 184 | return CountTrailingZerosNonZero32Slow(n); |
| 185 | #endif |
| 186 | } |
| 187 | |
| 188 | #undef ABSL_BASE_INTERNAL_FORCEINLINE |
| 189 | |
| 190 | } // namespace base_internal |
| 191 | } // namespace absl |
| 192 | |
| 193 | #endif // ABSL_BASE_INTERNAL_BITS_H_ |