aboutsummaryrefslogtreecommitdiffstats
path: root/library/cpp/deprecated
diff options
context:
space:
mode:
authoreeight <eeight@yandex-team.ru>2022-06-15 23:20:13 +0300
committereeight <eeight@yandex-team.ru>2022-06-15 23:20:13 +0300
commit9321bf2b0be068375c8771de6edebbde8a0b41c4 (patch)
tree6b5853712f5cec51b1bb5b90b3046db5a2ae5e75 /library/cpp/deprecated
parent816bd0a9d59c9bdffcc2ca7fc652b016486eac2d (diff)
downloadydb-9321bf2b0be068375c8771de6edebbde8a0b41c4.tar.gz
IGNIETFERRO-1922 Move atomics from util to library/cpp/deprecated
ref:0c3b0edd3284567c45ec77e57166274f3cfed8c7
Diffstat (limited to 'library/cpp/deprecated')
-rw-r--r--library/cpp/deprecated/atomic/atomic.h50
-rw-r--r--library/cpp/deprecated/atomic/atomic_gcc.h89
-rw-r--r--library/cpp/deprecated/atomic/atomic_ops.h188
-rw-r--r--library/cpp/deprecated/atomic/atomic_ut.cpp227
-rw-r--r--library/cpp/deprecated/atomic/atomic_win.h113
5 files changed, 663 insertions, 4 deletions
diff --git a/library/cpp/deprecated/atomic/atomic.h b/library/cpp/deprecated/atomic/atomic.h
index d104ff834d..b376f34efa 100644
--- a/library/cpp/deprecated/atomic/atomic.h
+++ b/library/cpp/deprecated/atomic/atomic.h
@@ -1,3 +1,51 @@
#pragma once
-#include <util/system/atomic.h>
+#include <util/system/defaults.h>
+
+using TAtomicBase = intptr_t;
+using TAtomic = volatile TAtomicBase;
+
+#if defined(__GNUC__)
+ #include "atomic_gcc.h"
+#elif defined(_MSC_VER)
+ #include "atomic_win.h"
+#else
+ #error unsupported platform
+#endif
+
+#if !defined(ATOMIC_COMPILER_BARRIER)
+ #define ATOMIC_COMPILER_BARRIER()
+#endif
+
+static inline TAtomicBase AtomicSub(TAtomic& a, TAtomicBase v) {
+ return AtomicAdd(a, -v);
+}
+
+static inline TAtomicBase AtomicGetAndSub(TAtomic& a, TAtomicBase v) {
+ return AtomicGetAndAdd(a, -v);
+}
+
+#if defined(USE_GENERIC_SETGET)
+static inline TAtomicBase AtomicGet(const TAtomic& a) {
+ return a;
+}
+
+static inline void AtomicSet(TAtomic& a, TAtomicBase v) {
+ a = v;
+}
+#endif
+
+static inline bool AtomicTryLock(TAtomic* a) {
+ return AtomicCas(a, 1, 0);
+}
+
+static inline bool AtomicTryAndTryLock(TAtomic* a) {
+ return (AtomicGet(*a) == 0) && AtomicTryLock(a);
+}
+
+static inline void AtomicUnlock(TAtomic* a) {
+ ATOMIC_COMPILER_BARRIER();
+ AtomicSet(*a, 0);
+}
+
+#include "atomic_ops.h"
diff --git a/library/cpp/deprecated/atomic/atomic_gcc.h b/library/cpp/deprecated/atomic/atomic_gcc.h
index db332d2584..ed8dc2bdc5 100644
--- a/library/cpp/deprecated/atomic/atomic_gcc.h
+++ b/library/cpp/deprecated/atomic/atomic_gcc.h
@@ -1,3 +1,90 @@
#pragma once
-#include <util/system/atomic_gcc.h>
+#define ATOMIC_COMPILER_BARRIER() __asm__ __volatile__("" \
+ : \
+ : \
+ : "memory")
+
+static inline TAtomicBase AtomicGet(const TAtomic& a) {
+ TAtomicBase tmp;
+#if defined(_arm64_)
+ __asm__ __volatile__(
+ "ldar %x[value], %[ptr] \n\t"
+ : [value] "=r"(tmp)
+ : [ptr] "Q"(a)
+ : "memory");
+#else
+ __atomic_load(&a, &tmp, __ATOMIC_ACQUIRE);
+#endif
+ return tmp;
+}
+
+static inline void AtomicSet(TAtomic& a, TAtomicBase v) {
+#if defined(_arm64_)
+ __asm__ __volatile__(
+ "stlr %x[value], %[ptr] \n\t"
+ : [ptr] "=Q"(a)
+ : [value] "r"(v)
+ : "memory");
+#else
+ __atomic_store(&a, &v, __ATOMIC_RELEASE);
+#endif
+}
+
+static inline intptr_t AtomicIncrement(TAtomic& p) {
+ return __atomic_add_fetch(&p, 1, __ATOMIC_SEQ_CST);
+}
+
+static inline intptr_t AtomicGetAndIncrement(TAtomic& p) {
+ return __atomic_fetch_add(&p, 1, __ATOMIC_SEQ_CST);
+}
+
+static inline intptr_t AtomicDecrement(TAtomic& p) {
+ return __atomic_sub_fetch(&p, 1, __ATOMIC_SEQ_CST);
+}
+
+static inline intptr_t AtomicGetAndDecrement(TAtomic& p) {
+ return __atomic_fetch_sub(&p, 1, __ATOMIC_SEQ_CST);
+}
+
+static inline intptr_t AtomicAdd(TAtomic& p, intptr_t v) {
+ return __atomic_add_fetch(&p, v, __ATOMIC_SEQ_CST);
+}
+
+static inline intptr_t AtomicGetAndAdd(TAtomic& p, intptr_t v) {
+ return __atomic_fetch_add(&p, v, __ATOMIC_SEQ_CST);
+}
+
+static inline intptr_t AtomicSwap(TAtomic* p, intptr_t v) {
+ (void)p; // disable strange 'parameter set but not used' warning on gcc
+ intptr_t ret;
+ __atomic_exchange(p, &v, &ret, __ATOMIC_SEQ_CST);
+ return ret;
+}
+
+static inline bool AtomicCas(TAtomic* a, intptr_t exchange, intptr_t compare) {
+ (void)a; // disable strange 'parameter set but not used' warning on gcc
+ return __atomic_compare_exchange(a, &compare, &exchange, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
+}
+
+static inline intptr_t AtomicGetAndCas(TAtomic* a, intptr_t exchange, intptr_t compare) {
+ (void)a; // disable strange 'parameter set but not used' warning on gcc
+ __atomic_compare_exchange(a, &compare, &exchange, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
+ return compare;
+}
+
+static inline intptr_t AtomicOr(TAtomic& a, intptr_t b) {
+ return __atomic_or_fetch(&a, b, __ATOMIC_SEQ_CST);
+}
+
+static inline intptr_t AtomicXor(TAtomic& a, intptr_t b) {
+ return __atomic_xor_fetch(&a, b, __ATOMIC_SEQ_CST);
+}
+
+static inline intptr_t AtomicAnd(TAtomic& a, intptr_t b) {
+ return __atomic_and_fetch(&a, b, __ATOMIC_SEQ_CST);
+}
+
+static inline void AtomicBarrier() {
+ __sync_synchronize();
+}
diff --git a/library/cpp/deprecated/atomic/atomic_ops.h b/library/cpp/deprecated/atomic/atomic_ops.h
index 58b04b6713..d705cd9b1d 100644
--- a/library/cpp/deprecated/atomic/atomic_ops.h
+++ b/library/cpp/deprecated/atomic/atomic_ops.h
@@ -2,4 +2,190 @@
#include "atomic.h"
-#include <util/system/atomic_ops.h>
+#include <util/generic/typetraits.h>
+
+template <typename T>
+inline TAtomic* AsAtomicPtr(T volatile* target) {
+ return reinterpret_cast<TAtomic*>(target);
+}
+
+template <typename T>
+inline const TAtomic* AsAtomicPtr(T const volatile* target) {
+ return reinterpret_cast<const TAtomic*>(target);
+}
+
+// integral types
+
+template <typename T>
+struct TAtomicTraits {
+ enum {
+ Castable = std::is_integral<T>::value && sizeof(T) == sizeof(TAtomicBase) && !std::is_const<T>::value,
+ };
+};
+
+template <typename T, typename TT>
+using TEnableIfCastable = std::enable_if_t<TAtomicTraits<T>::Castable, TT>;
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicGet(T const volatile& target) {
+ return static_cast<T>(AtomicGet(*AsAtomicPtr(&target)));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, void> AtomicSet(T volatile& target, TAtomicBase value) {
+ AtomicSet(*AsAtomicPtr(&target), value);
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicIncrement(T volatile& target) {
+ return static_cast<T>(AtomicIncrement(*AsAtomicPtr(&target)));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicGetAndIncrement(T volatile& target) {
+ return static_cast<T>(AtomicGetAndIncrement(*AsAtomicPtr(&target)));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicDecrement(T volatile& target) {
+ return static_cast<T>(AtomicDecrement(*AsAtomicPtr(&target)));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicGetAndDecrement(T volatile& target) {
+ return static_cast<T>(AtomicGetAndDecrement(*AsAtomicPtr(&target)));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicAdd(T volatile& target, TAtomicBase value) {
+ return static_cast<T>(AtomicAdd(*AsAtomicPtr(&target), value));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicGetAndAdd(T volatile& target, TAtomicBase value) {
+ return static_cast<T>(AtomicGetAndAdd(*AsAtomicPtr(&target), value));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicSub(T volatile& target, TAtomicBase value) {
+ return static_cast<T>(AtomicSub(*AsAtomicPtr(&target), value));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicGetAndSub(T volatile& target, TAtomicBase value) {
+ return static_cast<T>(AtomicGetAndSub(*AsAtomicPtr(&target), value));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicSwap(T volatile* target, TAtomicBase exchange) {
+ return static_cast<T>(AtomicSwap(AsAtomicPtr(target), exchange));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, bool> AtomicCas(T volatile* target, TAtomicBase exchange, TAtomicBase compare) {
+ return AtomicCas(AsAtomicPtr(target), exchange, compare);
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicGetAndCas(T volatile* target, TAtomicBase exchange, TAtomicBase compare) {
+ return static_cast<T>(AtomicGetAndCas(AsAtomicPtr(target), exchange, compare));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, bool> AtomicTryLock(T volatile* target) {
+ return AtomicTryLock(AsAtomicPtr(target));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, bool> AtomicTryAndTryLock(T volatile* target) {
+ return AtomicTryAndTryLock(AsAtomicPtr(target));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, void> AtomicUnlock(T volatile* target) {
+ AtomicUnlock(AsAtomicPtr(target));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicOr(T volatile& target, TAtomicBase value) {
+ return static_cast<T>(AtomicOr(*AsAtomicPtr(&target), value));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicAnd(T volatile& target, TAtomicBase value) {
+ return static_cast<T>(AtomicAnd(*AsAtomicPtr(&target), value));
+}
+
+template <typename T>
+inline TEnableIfCastable<T, T> AtomicXor(T volatile& target, TAtomicBase value) {
+ return static_cast<T>(AtomicXor(*AsAtomicPtr(&target), value));
+}
+
+// pointer types
+
+template <typename T>
+inline T* AtomicGet(T* const volatile& target) {
+ return reinterpret_cast<T*>(AtomicGet(*AsAtomicPtr(&target)));
+}
+
+template <typename T>
+inline void AtomicSet(T* volatile& target, T* value) {
+ AtomicSet(*AsAtomicPtr(&target), reinterpret_cast<TAtomicBase>(value));
+}
+
+using TNullPtr = decltype(nullptr);
+
+template <typename T>
+inline void AtomicSet(T* volatile& target, TNullPtr) {
+ AtomicSet(*AsAtomicPtr(&target), 0);
+}
+
+template <typename T>
+inline T* AtomicSwap(T* volatile* target, T* exchange) {
+ return reinterpret_cast<T*>(AtomicSwap(AsAtomicPtr(target), reinterpret_cast<TAtomicBase>(exchange)));
+}
+
+template <typename T>
+inline T* AtomicSwap(T* volatile* target, TNullPtr) {
+ return reinterpret_cast<T*>(AtomicSwap(AsAtomicPtr(target), 0));
+}
+
+template <typename T>
+inline bool AtomicCas(T* volatile* target, T* exchange, T* compare) {
+ return AtomicCas(AsAtomicPtr(target), reinterpret_cast<TAtomicBase>(exchange), reinterpret_cast<TAtomicBase>(compare));
+}
+
+template <typename T>
+inline T* AtomicGetAndCas(T* volatile* target, T* exchange, T* compare) {
+ return reinterpret_cast<T*>(AtomicGetAndCas(AsAtomicPtr(target), reinterpret_cast<TAtomicBase>(exchange), reinterpret_cast<TAtomicBase>(compare)));
+}
+
+template <typename T>
+inline bool AtomicCas(T* volatile* target, T* exchange, TNullPtr) {
+ return AtomicCas(AsAtomicPtr(target), reinterpret_cast<TAtomicBase>(exchange), 0);
+}
+
+template <typename T>
+inline T* AtomicGetAndCas(T* volatile* target, T* exchange, TNullPtr) {
+ return reinterpret_cast<T*>(AtomicGetAndCas(AsAtomicPtr(target), reinterpret_cast<TAtomicBase>(exchange), 0));
+}
+
+template <typename T>
+inline bool AtomicCas(T* volatile* target, TNullPtr, T* compare) {
+ return AtomicCas(AsAtomicPtr(target), 0, reinterpret_cast<TAtomicBase>(compare));
+}
+
+template <typename T>
+inline T* AtomicGetAndCas(T* volatile* target, TNullPtr, T* compare) {
+ return reinterpret_cast<T*>(AtomicGetAndCas(AsAtomicPtr(target), 0, reinterpret_cast<TAtomicBase>(compare)));
+}
+
+template <typename T>
+inline bool AtomicCas(T* volatile* target, TNullPtr, TNullPtr) {
+ return AtomicCas(AsAtomicPtr(target), 0, 0);
+}
+
+template <typename T>
+inline T* AtomicGetAndCas(T* volatile* target, TNullPtr, TNullPtr) {
+ return reinterpret_cast<T*>(AtomicGetAndCas(AsAtomicPtr(target), 0, 0));
+}
diff --git a/library/cpp/deprecated/atomic/atomic_ut.cpp b/library/cpp/deprecated/atomic/atomic_ut.cpp
new file mode 100644
index 0000000000..07211ffba7
--- /dev/null
+++ b/library/cpp/deprecated/atomic/atomic_ut.cpp
@@ -0,0 +1,227 @@
+#include "atomic.h"
+
+#include <library/cpp/testing/unittest/registar.h>
+
+#include <util/generic/ylimits.h>
+
+template <typename TAtomic>
+class TAtomicTest
+ : public TTestBase {
+ UNIT_TEST_SUITE(TAtomicTest);
+ UNIT_TEST(TestAtomicInc1)
+ UNIT_TEST(TestAtomicInc2)
+ UNIT_TEST(TestAtomicGetAndInc)
+ UNIT_TEST(TestAtomicDec)
+ UNIT_TEST(TestAtomicGetAndDec)
+ UNIT_TEST(TestAtomicAdd)
+ UNIT_TEST(TestAtomicGetAndAdd)
+ UNIT_TEST(TestAtomicSub)
+ UNIT_TEST(TestAtomicGetAndSub)
+ UNIT_TEST(TestAtomicSwap)
+ UNIT_TEST(TestAtomicOr)
+ UNIT_TEST(TestAtomicAnd)
+ UNIT_TEST(TestAtomicXor)
+ UNIT_TEST(TestCAS)
+ UNIT_TEST(TestGetAndCAS)
+ UNIT_TEST(TestLockUnlock)
+ UNIT_TEST_SUITE_END();
+
+private:
+ inline void TestLockUnlock() {
+ TAtomic v = 0;
+
+ UNIT_ASSERT(AtomicTryLock(&v));
+ UNIT_ASSERT(!AtomicTryLock(&v));
+ UNIT_ASSERT_VALUES_EQUAL(v, 1);
+ AtomicUnlock(&v);
+ UNIT_ASSERT_VALUES_EQUAL(v, 0);
+ }
+
+ inline void TestCAS() {
+ TAtomic v = 0;
+
+ UNIT_ASSERT(AtomicCas(&v, 1, 0));
+ UNIT_ASSERT(!AtomicCas(&v, 1, 0));
+ UNIT_ASSERT_VALUES_EQUAL(v, 1);
+ UNIT_ASSERT(AtomicCas(&v, 0, 1));
+ UNIT_ASSERT_VALUES_EQUAL(v, 0);
+ UNIT_ASSERT(AtomicCas(&v, Max<intptr_t>(), 0));
+ UNIT_ASSERT_VALUES_EQUAL(v, Max<intptr_t>());
+ }
+
+ inline void TestGetAndCAS() {
+ TAtomic v = 0;
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndCas(&v, 1, 0), 0);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndCas(&v, 2, 0), 1);
+ UNIT_ASSERT_VALUES_EQUAL(v, 1);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndCas(&v, 0, 1), 1);
+ UNIT_ASSERT_VALUES_EQUAL(v, 0);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndCas(&v, Max<intptr_t>(), 0), 0);
+ UNIT_ASSERT_VALUES_EQUAL(v, Max<intptr_t>());
+ }
+
+ inline void TestAtomicInc1() {
+ TAtomic v = 0;
+
+ UNIT_ASSERT(AtomicAdd(v, 1));
+ UNIT_ASSERT_VALUES_EQUAL(v, 1);
+ UNIT_ASSERT(AtomicAdd(v, 10));
+ UNIT_ASSERT_VALUES_EQUAL(v, 11);
+ }
+
+ inline void TestAtomicInc2() {
+ TAtomic v = 0;
+
+ UNIT_ASSERT(AtomicIncrement(v));
+ UNIT_ASSERT_VALUES_EQUAL(v, 1);
+ UNIT_ASSERT(AtomicIncrement(v));
+ UNIT_ASSERT_VALUES_EQUAL(v, 2);
+ }
+
+ inline void TestAtomicGetAndInc() {
+ TAtomic v = 0;
+
+ UNIT_ASSERT_EQUAL(AtomicGetAndIncrement(v), 0);
+ UNIT_ASSERT_VALUES_EQUAL(v, 1);
+ UNIT_ASSERT_EQUAL(AtomicGetAndIncrement(v), 1);
+ UNIT_ASSERT_VALUES_EQUAL(v, 2);
+ }
+
+ inline void TestAtomicDec() {
+ TAtomic v = 2;
+
+ UNIT_ASSERT(AtomicDecrement(v));
+ UNIT_ASSERT_VALUES_EQUAL(v, 1);
+ UNIT_ASSERT(!AtomicDecrement(v));
+ UNIT_ASSERT_VALUES_EQUAL(v, 0);
+ }
+
+ inline void TestAtomicGetAndDec() {
+ TAtomic v = 2;
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndDecrement(v), 2);
+ UNIT_ASSERT_VALUES_EQUAL(v, 1);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndDecrement(v), 1);
+ UNIT_ASSERT_VALUES_EQUAL(v, 0);
+ }
+
+ inline void TestAtomicAdd() {
+ TAtomic v = 0;
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicAdd(v, 1), 1);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicAdd(v, 2), 3);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicAdd(v, -4), -1);
+ UNIT_ASSERT_VALUES_EQUAL(v, -1);
+ }
+
+ inline void TestAtomicGetAndAdd() {
+ TAtomic v = 0;
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndAdd(v, 1), 0);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndAdd(v, 2), 1);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndAdd(v, -4), 3);
+ UNIT_ASSERT_VALUES_EQUAL(v, -1);
+ }
+
+ inline void TestAtomicSub() {
+ TAtomic v = 4;
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicSub(v, 1), 3);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicSub(v, 2), 1);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicSub(v, 3), -2);
+ UNIT_ASSERT_VALUES_EQUAL(v, -2);
+ }
+
+ inline void TestAtomicGetAndSub() {
+ TAtomic v = 4;
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndSub(v, 1), 4);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndSub(v, 2), 3);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGetAndSub(v, 3), 1);
+ UNIT_ASSERT_VALUES_EQUAL(v, -2);
+ }
+
+ inline void TestAtomicSwap() {
+ TAtomic v = 0;
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicSwap(&v, 3), 0);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicSwap(&v, 5), 3);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicSwap(&v, -7), 5);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicSwap(&v, Max<intptr_t>()), -7);
+ UNIT_ASSERT_VALUES_EQUAL(v, Max<intptr_t>());
+ }
+
+ inline void TestAtomicOr() {
+ TAtomic v = 0xf0;
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicOr(v, 0x0f), 0xff);
+ UNIT_ASSERT_VALUES_EQUAL(v, 0xff);
+ }
+
+ inline void TestAtomicAnd() {
+ TAtomic v = 0xff;
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicAnd(v, 0xf0), 0xf0);
+ UNIT_ASSERT_VALUES_EQUAL(v, 0xf0);
+ }
+
+ inline void TestAtomicXor() {
+ TAtomic v = 0x00;
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicXor(v, 0xff), 0xff);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicXor(v, 0xff), 0x00);
+ }
+
+ inline void TestAtomicPtr() {
+ int* p;
+ AtomicSet(p, nullptr);
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGet(p), 0);
+
+ int i;
+ AtomicSet(p, &i);
+
+ UNIT_ASSERT_VALUES_EQUAL(AtomicGet(p), &i);
+ UNIT_ASSERT_VALUES_EQUAL(AtomicSwap(&p, nullptr), &i);
+ UNIT_ASSERT(AtomicCas(&p, &i, nullptr));
+ }
+};
+
+UNIT_TEST_SUITE_REGISTRATION(TAtomicTest<TAtomic>);
+
+#ifndef _MSC_VER
+// chooses type *other than* T1
+template <typename T1, typename T2, typename T3>
+struct TChooser {
+ using TdType = T2;
+};
+
+template <typename T1, typename T2>
+struct TChooser<T1, T1, T2> {
+ using TdType = T2;
+};
+
+template <typename T1>
+struct TChooser<T1, T1, T1> {};
+
+ #if defined(__IOS__) && defined(_32_)
+using TAltAtomic = int;
+ #else
+using TAltAtomic = volatile TChooser<TAtomicBase, long, long long>::TdType;
+ #endif
+
+class TTTest: public TAtomicTest<TAltAtomic> {
+public:
+ TString Name() const noexcept override {
+ return "TAtomicTest<TAltAtomic>";
+ }
+
+ static TString StaticName() noexcept {
+ return "TAtomicTest<TAltAtomic>";
+ }
+};
+
+UNIT_TEST_SUITE_REGISTRATION(TTTest);
+
+#endif
diff --git a/library/cpp/deprecated/atomic/atomic_win.h b/library/cpp/deprecated/atomic/atomic_win.h
index 0b787a0014..65c290e6cc 100644
--- a/library/cpp/deprecated/atomic/atomic_win.h
+++ b/library/cpp/deprecated/atomic/atomic_win.h
@@ -1,3 +1,114 @@
#pragma once
-#include <util/system/atomic_win.h>
+#include <intrin.h>
+
+#define USE_GENERIC_SETGET
+
+#if defined(_i386_)
+
+ #pragma intrinsic(_InterlockedIncrement)
+ #pragma intrinsic(_InterlockedDecrement)
+ #pragma intrinsic(_InterlockedExchangeAdd)
+ #pragma intrinsic(_InterlockedExchange)
+ #pragma intrinsic(_InterlockedCompareExchange)
+
+static inline intptr_t AtomicIncrement(TAtomic& a) {
+ return _InterlockedIncrement((volatile long*)&a);
+}
+
+static inline intptr_t AtomicGetAndIncrement(TAtomic& a) {
+ return _InterlockedIncrement((volatile long*)&a) - 1;
+}
+
+static inline intptr_t AtomicDecrement(TAtomic& a) {
+ return _InterlockedDecrement((volatile long*)&a);
+}
+
+static inline intptr_t AtomicGetAndDecrement(TAtomic& a) {
+ return _InterlockedDecrement((volatile long*)&a) + 1;
+}
+
+static inline intptr_t AtomicAdd(TAtomic& a, intptr_t b) {
+ return _InterlockedExchangeAdd((volatile long*)&a, b) + b;
+}
+
+static inline intptr_t AtomicGetAndAdd(TAtomic& a, intptr_t b) {
+ return _InterlockedExchangeAdd((volatile long*)&a, b);
+}
+
+static inline intptr_t AtomicSwap(TAtomic* a, intptr_t b) {
+ return _InterlockedExchange((volatile long*)a, b);
+}
+
+static inline bool AtomicCas(TAtomic* a, intptr_t exchange, intptr_t compare) {
+ return _InterlockedCompareExchange((volatile long*)a, exchange, compare) == compare;
+}
+
+static inline intptr_t AtomicGetAndCas(TAtomic* a, intptr_t exchange, intptr_t compare) {
+ return _InterlockedCompareExchange((volatile long*)a, exchange, compare);
+}
+
+#else // _x86_64_
+
+ #pragma intrinsic(_InterlockedIncrement64)
+ #pragma intrinsic(_InterlockedDecrement64)
+ #pragma intrinsic(_InterlockedExchangeAdd64)
+ #pragma intrinsic(_InterlockedExchange64)
+ #pragma intrinsic(_InterlockedCompareExchange64)
+
+static inline intptr_t AtomicIncrement(TAtomic& a) {
+ return _InterlockedIncrement64((volatile __int64*)&a);
+}
+
+static inline intptr_t AtomicGetAndIncrement(TAtomic& a) {
+ return _InterlockedIncrement64((volatile __int64*)&a) - 1;
+}
+
+static inline intptr_t AtomicDecrement(TAtomic& a) {
+ return _InterlockedDecrement64((volatile __int64*)&a);
+}
+
+static inline intptr_t AtomicGetAndDecrement(TAtomic& a) {
+ return _InterlockedDecrement64((volatile __int64*)&a) + 1;
+}
+
+static inline intptr_t AtomicAdd(TAtomic& a, intptr_t b) {
+ return _InterlockedExchangeAdd64((volatile __int64*)&a, b) + b;
+}
+
+static inline intptr_t AtomicGetAndAdd(TAtomic& a, intptr_t b) {
+ return _InterlockedExchangeAdd64((volatile __int64*)&a, b);
+}
+
+static inline intptr_t AtomicSwap(TAtomic* a, intptr_t b) {
+ return _InterlockedExchange64((volatile __int64*)a, b);
+}
+
+static inline bool AtomicCas(TAtomic* a, intptr_t exchange, intptr_t compare) {
+ return _InterlockedCompareExchange64((volatile __int64*)a, exchange, compare) == compare;
+}
+
+static inline intptr_t AtomicGetAndCas(TAtomic* a, intptr_t exchange, intptr_t compare) {
+ return _InterlockedCompareExchange64((volatile __int64*)a, exchange, compare);
+}
+
+static inline intptr_t AtomicOr(TAtomic& a, intptr_t b) {
+ return _InterlockedOr64(&a, b) | b;
+}
+
+static inline intptr_t AtomicAnd(TAtomic& a, intptr_t b) {
+ return _InterlockedAnd64(&a, b) & b;
+}
+
+static inline intptr_t AtomicXor(TAtomic& a, intptr_t b) {
+ return _InterlockedXor64(&a, b) ^ b;
+}
+
+#endif // _x86_
+
+//TODO
+static inline void AtomicBarrier() {
+ TAtomic val = 0;
+
+ AtomicSwap(&val, 0);
+}