48 bool is_locked()
const noexcept
50 return semaphore.load(std::memory_order::relaxed) != 0;
55 if constexpr (UseDeadLockDetector) {
57 tt_axiom(other !=
this,
"Mutex already locked.");
58 tt_axiom(other ==
nullptr,
"Potential dead-lock.");
61 tt_axiom(holds_invariant());
64 semaphore_value_type expected = 0;
65 if (!semaphore.compare_exchange_strong(expected, 1, std::memory_order::acquire)) {
66 [[unlikely]] lock_contended(expected);
69 tt_axiom(holds_invariant());
80 if constexpr (UseDeadLockDetector) {
82 tt_axiom(other !=
this,
"Mutex already locked.");
83 tt_axiom(other ==
nullptr,
"Potential dead-lock.");
86 tt_axiom(holds_invariant());
89 semaphore_value_type expected = 0;
90 if (!semaphore.compare_exchange_strong(expected, 1, std::memory_order::acquire)) {
91 tt_axiom(holds_invariant());
93 if constexpr (UseDeadLockDetector) {
97 [[unlikely]]
return false;
100 tt_axiom(holds_invariant());
104 void unlock() noexcept {
105 if constexpr (UseDeadLockDetector) {
109 tt_axiom(holds_invariant());
111 if (semaphore.fetch_sub(1, std::memory_order::relaxed) != 1) {
112 [[unlikely]] semaphore.store(0, std::memory_order::release);
114 semaphore.notify_one();
119 tt_axiom(holds_invariant());
129 std::atomic_unsigned_lock_free semaphore = 0;
130 using semaphore_value_type =
typename decltype(semaphore)::value_type;
132 bool holds_invariant() const noexcept
134 return semaphore.load(std::memory_order::relaxed) <= 2;
137 tt_no_inline
void lock_contended(semaphore_value_type expected)
noexcept
139 tt_axiom(holds_invariant());
142 ttlet should_wait = expected == 2;
146 if (should_wait || semaphore.compare_exchange_strong(expected, 2)) {
147 tt_axiom(holds_invariant());
151 tt_axiom(holds_invariant());
154 }
while (!semaphore.compare_exchange_strong(expected, 2));