2424#else
2525#include <pthread.h>
2626
27- #ifndef __cplusplus
28- #include < stdatomic.h>
29- #else /* __cplusplus */
30- #include < atomic>
31- #define _Atomic (X ) std::atomic<X>
32-
33- using std::memory_order_acq_rel;
34- using std::memory_order_acquire;
35- using std::memory_order_relaxed;
36- using std::memory_order_release;
37-
38- #endif /* __cplusplus */
39-
4027#endif /* _WIN32 */
4128
4229#include "utils_common.h"
@@ -118,14 +105,6 @@ static __inline void utils_atomic_load_acquire_ptr(void **ptr, void **out) {
118105 * (uintptr_t * )out = ret ;
119106}
120107
121- static __inline void utils_atomic_store_release_u64 (uint64_t *ptr,
122- uint64_t *val) {
123- ASSERT_IS_ALIGNED ((uintptr_t )ptr, 8 );
124- ASSERT_IS_ALIGNED ((uintptr_t )val, 8 );
125- utils_annotate_release (ptr);
126- InterlockedExchange64 ((LONG64 volatile *)ptr, *(LONG64 *)val);
127- }
128-
129108static __inline void utils_atomic_store_release_ptr (void * * ptr , void * val ) {
130109 ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
131110 utils_annotate_release (ptr );
@@ -146,14 +125,12 @@ static __inline uint64_t utils_atomic_decrement_u64(uint64_t *ptr) {
146125
147126static __inline uint64_t utils_fetch_and_add_u64 (uint64_t * ptr , uint64_t val ) {
148127 ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
149- ASSERT_IS_ALIGNED ((uintptr_t )&val, 8 );
150128 // return the value that had previously been in *ptr
151129 return InterlockedExchangeAdd64 ((LONG64 volatile * )(ptr ), val );
152130}
153131
154132static __inline uint64_t utils_fetch_and_sub_u64 (uint64_t * ptr , uint64_t val ) {
155133 ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
156- ASSERT_IS_ALIGNED ((uintptr_t )&val, 8 );
157134 // return the value that had previously been in *ptr
158135 // NOTE: on Windows there is no *Sub* version of InterlockedExchange
159136 return InterlockedExchangeAdd64 ((LONG64 volatile * )(ptr ), - (LONG64 )val );
@@ -203,14 +180,6 @@ static inline void utils_atomic_load_acquire_ptr(void **ptr, void **out) {
203180 utils_annotate_acquire ((void * )ptr );
204181}
205182
206- static inline void utils_atomic_store_release_u64 (uint64_t *ptr,
207- uint64_t *val) {
208- ASSERT_IS_ALIGNED ((uintptr_t )ptr, 8 );
209- ASSERT_IS_ALIGNED ((uintptr_t )val, 8 );
210- utils_annotate_release (ptr);
211- __atomic_store (ptr, val, memory_order_release);
212- }
213-
214183static inline void utils_atomic_store_release_ptr (void * * ptr , void * val ) {
215184 ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
216185 utils_annotate_release (ptr );
0 commit comments