95 lines
2.7 KiB
Diff
95 lines
2.7 KiB
Diff
--- src/s2/third_party/absl/base/internal/unaligned_access.h.orig 2019-12-25 11:13:05.290000000 +0800
|
|
+++ src/s2/third_party/absl/base/internal/unaligned_access.h 2019-12-25 11:15:06.650000000 +0800
|
|
@@ -80,7 +80,7 @@
|
|
return __sanitizer_unaligned_load32(p);
|
|
}
|
|
|
|
-inline uint64 UnalignedLoad64(const void *p) {
|
|
+inline uint64_t UnalignedLoad64(const void *p) {
|
|
return __sanitizer_unaligned_load64(p);
|
|
}
|
|
|
|
@@ -92,7 +92,7 @@
|
|
__sanitizer_unaligned_store32(p, v);
|
|
}
|
|
|
|
-inline void UnalignedStore64(void *p, uint64 v) {
|
|
+inline void UnalignedStore64(void *p, uint64_t v) {
|
|
__sanitizer_unaligned_store64(p, v);
|
|
}
|
|
|
|
@@ -130,8 +130,8 @@
|
|
return t;
|
|
}
|
|
|
|
-inline uint64 UnalignedLoad64(const void *p) {
|
|
- uint64 t;
|
|
+inline uint64_t UnalignedLoad64(const void *p) {
|
|
+ uint64_t t;
|
|
memcpy(&t, p, sizeof t);
|
|
return t;
|
|
}
|
|
@@ -140,7 +140,7 @@
|
|
|
|
inline void UnalignedStore32(void *p, uint32_t v) { memcpy(p, &v, sizeof v); }
|
|
|
|
-inline void UnalignedStore64(void *p, uint64 v) { memcpy(p, &v, sizeof v); }
|
|
+inline void UnalignedStore64(void *p, uint64_t v) { memcpy(p, &v, sizeof v); }
|
|
|
|
} // namespace base_internal
|
|
} // namespace absl
|
|
@@ -172,14 +172,14 @@
|
|
#define ABSL_INTERNAL_UNALIGNED_LOAD32(_p) \
|
|
(*reinterpret_cast<const uint32_t *>(_p))
|
|
#define ABSL_INTERNAL_UNALIGNED_LOAD64(_p) \
|
|
- (*reinterpret_cast<const uint64 *>(_p))
|
|
+ (*reinterpret_cast<const uint64_t *>(_p))
|
|
|
|
#define ABSL_INTERNAL_UNALIGNED_STORE16(_p, _val) \
|
|
(*reinterpret_cast<uint16_t *>(_p) = (_val))
|
|
#define ABSL_INTERNAL_UNALIGNED_STORE32(_p, _val) \
|
|
(*reinterpret_cast<uint32_t *>(_p) = (_val))
|
|
#define ABSL_INTERNAL_UNALIGNED_STORE64(_p, _val) \
|
|
- (*reinterpret_cast<uint64 *>(_p) = (_val))
|
|
+ (*reinterpret_cast<uint64_t *>(_p) = (_val))
|
|
|
|
#elif defined(__arm__) && \
|
|
!defined(__ARM_ARCH_5__) && \
|
|
@@ -246,13 +246,13 @@
|
|
namespace absl {
|
|
namespace base_internal {
|
|
|
|
-inline uint64 UnalignedLoad64(const void *p) {
|
|
- uint64 t;
|
|
+inline uint64_t UnalignedLoad64(const void *p) {
|
|
+ uint64_t t;
|
|
memcpy(&t, p, sizeof t);
|
|
return t;
|
|
}
|
|
|
|
-inline void UnalignedStore64(void *p, uint64 v) { memcpy(p, &v, sizeof v); }
|
|
+inline void UnalignedStore64(void *p, uint64_t v) { memcpy(p, &v, sizeof v); }
|
|
|
|
} // namespace base_internal
|
|
} // namespace absl
|
|
@@ -286,8 +286,8 @@
|
|
return t;
|
|
}
|
|
|
|
-inline uint64 UnalignedLoad64(const void *p) {
|
|
- uint64 t;
|
|
+inline uint64_t UnalignedLoad64(const void *p) {
|
|
+ uint64_t t;
|
|
memcpy(&t, p, sizeof t);
|
|
return t;
|
|
}
|
|
@@ -296,7 +296,7 @@
|
|
|
|
inline void UnalignedStore32(void *p, uint32_t v) { memcpy(p, &v, sizeof v); }
|
|
|
|
-inline void UnalignedStore64(void *p, uint64 v) { memcpy(p, &v, sizeof v); }
|
|
+inline void UnalignedStore64(void *p, uint64_t v) { memcpy(p, &v, sizeof v); }
|
|
|
|
} // namespace base_internal
|
|
} // namespace absl
|