aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/endian.h
diff options
context:
space:
mode:
authorheretic <heretic@yandex-team.ru>2022-02-10 16:45:46 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:45:46 +0300
commit81eddc8c0b55990194e112b02d127b87d54164a9 (patch)
tree9142afc54d335ea52910662635b898e79e192e49 /contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/endian.h
parent397cbe258b9e064f49c4ca575279f02f39fef76e (diff)
downloadydb-81eddc8c0b55990194e112b02d127b87d54164a9.tar.gz
Restoring authorship annotation for <heretic@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/endian.h')
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/endian.h122
1 files changed, 61 insertions, 61 deletions
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/endian.h b/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/endian.h
index d11a9a5ae8..0f7adb8bf6 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/endian.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/endian.h
@@ -26,7 +26,7 @@
#endif
#include <cstdint>
-#include "y_absl/base/casts.h"
+#include "y_absl/base/casts.h"
#include "y_absl/base/config.h"
#include "y_absl/base/internal/unaligned_access.h"
#include "y_absl/base/port.h"
@@ -174,36 +174,36 @@ inline constexpr bool IsLittleEndian() { return false; }
#endif /* ENDIAN */
-inline uint8_t FromHost(uint8_t x) { return x; }
-inline uint16_t FromHost(uint16_t x) { return FromHost16(x); }
-inline uint32_t FromHost(uint32_t x) { return FromHost32(x); }
-inline uint64_t FromHost(uint64_t x) { return FromHost64(x); }
-inline uint8_t ToHost(uint8_t x) { return x; }
-inline uint16_t ToHost(uint16_t x) { return ToHost16(x); }
-inline uint32_t ToHost(uint32_t x) { return ToHost32(x); }
-inline uint64_t ToHost(uint64_t x) { return ToHost64(x); }
-
-inline int8_t FromHost(int8_t x) { return x; }
-inline int16_t FromHost(int16_t x) {
- return bit_cast<int16_t>(FromHost16(bit_cast<uint16_t>(x)));
-}
-inline int32_t FromHost(int32_t x) {
- return bit_cast<int32_t>(FromHost32(bit_cast<uint32_t>(x)));
-}
-inline int64_t FromHost(int64_t x) {
- return bit_cast<int64_t>(FromHost64(bit_cast<uint64_t>(x)));
-}
-inline int8_t ToHost(int8_t x) { return x; }
-inline int16_t ToHost(int16_t x) {
- return bit_cast<int16_t>(ToHost16(bit_cast<uint16_t>(x)));
-}
-inline int32_t ToHost(int32_t x) {
- return bit_cast<int32_t>(ToHost32(bit_cast<uint32_t>(x)));
-}
-inline int64_t ToHost(int64_t x) {
- return bit_cast<int64_t>(ToHost64(bit_cast<uint64_t>(x)));
-}
-
+inline uint8_t FromHost(uint8_t x) { return x; }
+inline uint16_t FromHost(uint16_t x) { return FromHost16(x); }
+inline uint32_t FromHost(uint32_t x) { return FromHost32(x); }
+inline uint64_t FromHost(uint64_t x) { return FromHost64(x); }
+inline uint8_t ToHost(uint8_t x) { return x; }
+inline uint16_t ToHost(uint16_t x) { return ToHost16(x); }
+inline uint32_t ToHost(uint32_t x) { return ToHost32(x); }
+inline uint64_t ToHost(uint64_t x) { return ToHost64(x); }
+
+inline int8_t FromHost(int8_t x) { return x; }
+inline int16_t FromHost(int16_t x) {
+ return bit_cast<int16_t>(FromHost16(bit_cast<uint16_t>(x)));
+}
+inline int32_t FromHost(int32_t x) {
+ return bit_cast<int32_t>(FromHost32(bit_cast<uint32_t>(x)));
+}
+inline int64_t FromHost(int64_t x) {
+ return bit_cast<int64_t>(FromHost64(bit_cast<uint64_t>(x)));
+}
+inline int8_t ToHost(int8_t x) { return x; }
+inline int16_t ToHost(int16_t x) {
+ return bit_cast<int16_t>(ToHost16(bit_cast<uint16_t>(x)));
+}
+inline int32_t ToHost(int32_t x) {
+ return bit_cast<int32_t>(ToHost32(bit_cast<uint32_t>(x)));
+}
+inline int64_t ToHost(int64_t x) {
+ return bit_cast<int64_t>(ToHost64(bit_cast<uint64_t>(x)));
+}
+
// Functions to do unaligned loads and stores in little-endian order.
inline uint16_t Load16(const void *p) {
return ToHost16(ABSL_INTERNAL_UNALIGNED_LOAD16(p));
@@ -264,36 +264,36 @@ inline constexpr bool IsLittleEndian() { return false; }
#endif /* ENDIAN */
-inline uint8_t FromHost(uint8_t x) { return x; }
-inline uint16_t FromHost(uint16_t x) { return FromHost16(x); }
-inline uint32_t FromHost(uint32_t x) { return FromHost32(x); }
-inline uint64_t FromHost(uint64_t x) { return FromHost64(x); }
-inline uint8_t ToHost(uint8_t x) { return x; }
-inline uint16_t ToHost(uint16_t x) { return ToHost16(x); }
-inline uint32_t ToHost(uint32_t x) { return ToHost32(x); }
-inline uint64_t ToHost(uint64_t x) { return ToHost64(x); }
-
-inline int8_t FromHost(int8_t x) { return x; }
-inline int16_t FromHost(int16_t x) {
- return bit_cast<int16_t>(FromHost16(bit_cast<uint16_t>(x)));
-}
-inline int32_t FromHost(int32_t x) {
- return bit_cast<int32_t>(FromHost32(bit_cast<uint32_t>(x)));
-}
-inline int64_t FromHost(int64_t x) {
- return bit_cast<int64_t>(FromHost64(bit_cast<uint64_t>(x)));
-}
-inline int8_t ToHost(int8_t x) { return x; }
-inline int16_t ToHost(int16_t x) {
- return bit_cast<int16_t>(ToHost16(bit_cast<uint16_t>(x)));
-}
-inline int32_t ToHost(int32_t x) {
- return bit_cast<int32_t>(ToHost32(bit_cast<uint32_t>(x)));
-}
-inline int64_t ToHost(int64_t x) {
- return bit_cast<int64_t>(ToHost64(bit_cast<uint64_t>(x)));
-}
-
+inline uint8_t FromHost(uint8_t x) { return x; }
+inline uint16_t FromHost(uint16_t x) { return FromHost16(x); }
+inline uint32_t FromHost(uint32_t x) { return FromHost32(x); }
+inline uint64_t FromHost(uint64_t x) { return FromHost64(x); }
+inline uint8_t ToHost(uint8_t x) { return x; }
+inline uint16_t ToHost(uint16_t x) { return ToHost16(x); }
+inline uint32_t ToHost(uint32_t x) { return ToHost32(x); }
+inline uint64_t ToHost(uint64_t x) { return ToHost64(x); }
+
+inline int8_t FromHost(int8_t x) { return x; }
+inline int16_t FromHost(int16_t x) {
+ return bit_cast<int16_t>(FromHost16(bit_cast<uint16_t>(x)));
+}
+inline int32_t FromHost(int32_t x) {
+ return bit_cast<int32_t>(FromHost32(bit_cast<uint32_t>(x)));
+}
+inline int64_t FromHost(int64_t x) {
+ return bit_cast<int64_t>(FromHost64(bit_cast<uint64_t>(x)));
+}
+inline int8_t ToHost(int8_t x) { return x; }
+inline int16_t ToHost(int16_t x) {
+ return bit_cast<int16_t>(ToHost16(bit_cast<uint16_t>(x)));
+}
+inline int32_t ToHost(int32_t x) {
+ return bit_cast<int32_t>(ToHost32(bit_cast<uint32_t>(x)));
+}
+inline int64_t ToHost(int64_t x) {
+ return bit_cast<int64_t>(ToHost64(bit_cast<uint64_t>(x)));
+}
+
// Functions to do unaligned loads and stores in big-endian order.
inline uint16_t Load16(const void *p) {
return ToHost16(ABSL_INTERNAL_UNALIGNED_LOAD16(p));