51 template <
template <
typename>
class Atom>
59 #if FOLLY_HAZPTR_THR_LOCAL 60 if (
LIKELY(&domain == &default_hazptr_domain<Atom>())) {
61 auto hprec = hazptr_tc_tls<Atom>().try_get();
68 hprec_ = domain.hprec_acquire();
86 if (
LIKELY(hprec_ !=
nullptr)) {
88 auto domain = hprec_->
domain();
89 #if FOLLY_HAZPTR_THR_LOCAL 90 if (
LIKELY(domain == &default_hazptr_domain<Atom>())) {
91 if (
LIKELY(hazptr_tc_tls<Atom>().try_put(hprec_))) {
96 domain->hprec_release(hprec_);
107 rhs.hprec_ =
nullptr;
115 template <
typename T>
120 template <
typename T,
typename Func>
128 ptr = src.load(std::memory_order_acquire);
137 template <
typename T>
142 template <
typename T,
typename Func>
144 T*
ptr = src.load(std::memory_order_relaxed);
152 template <
typename T>
186 template <
template <
typename>
class Atom>
196 template <
template <
typename>
class Atom>
216 template <u
int8_t M,
template <
typename>
class Atom>
218 static_assert(
M > 0,
"M must be a positive integer.");
227 #if FOLLY_HAZPTR_THR_LOCAL 230 "M must be within the thread cache capacity.");
231 auto&
tc = hazptr_tc_tls<Atom>();
240 DCHECK(
hprec !=
nullptr);
244 tc.set_count(offset);
268 empty_ = other.empty_;
281 #if FOLLY_HAZPTR_THR_LOCAL 282 auto&
tc = hazptr_tc_tls<Atom>();
283 auto count = tc.count();
286 tc.evict((
M + count) - cap);
294 tc.set_count(count + M);
297 h[
i].~hazptr_holder();
308 empty_ = other.empty_;
334 template <u
int8_t M,
template <
typename>
class Atom>
336 static_assert(
M > 0,
"M must be a positive integer.");
344 #if FOLLY_HAZPTR_THR_LOCAL 347 "M must be <= hazptr_tc::capacity().");
348 auto&
tc = hazptr_tc_tls<Atom>();
359 DCHECK(
hprec !=
nullptr);
378 #if FOLLY_HAZPTR_THR_LOCAL 380 auto&
tc = hazptr_tc_tls<Atom>();
389 h[
i].~hazptr_holder();
FOLLY_ALWAYS_INLINE hazptr_holder & operator=(hazptr_holder &&rhs) noexcept
FOLLY_ALWAYS_INLINE bool try_protect(T *&ptr, const Atom< T * > &src) noexcept
#define FOLLY_ALWAYS_INLINE
FOLLY_ALWAYS_INLINE T * get_protected(const Atom< T * > &src) noexcept
FOLLY_ALWAYS_INLINE hazptr_holder(hazptr_holder &&rhs) noexcept
FOLLY_ALWAYS_INLINE hazptr_array & operator=(hazptr_array &&other) noexcept
constexpr detail::Map< Move > move
FOLLY_ALWAYS_INLINE void reset(std::nullptr_t=nullptr) noexcept
—— Concurrent Priority Queue Implementation ——
FOLLY_ALWAYS_INLINE hazptr_local()
requires E e noexcept(noexcept(s.error(std::move(e))))
hazptr_rec< Atom > * hprec_
FOLLY_PUSH_WARNING RHS rhs
FOLLY_ALWAYS_INLINE ~hazptr_local()
FOLLY_ALWAYS_INLINE hazptr_holder< Atom > & operator[](uint8_t i) noexcept
FOLLY_ALWAYS_INLINE hazptr_array(std::nullptr_t) noexcept
FOLLY_ALWAYS_INLINE ~hazptr_holder()
FOLLY_ALWAYS_INLINE T * get_protected(const Atom< T * > &src, Func f) noexcept
FOLLY_ALWAYS_INLINE bool try_protect(T *&ptr, const Atom< T * > &src, Func f) noexcept
FOLLY_ALWAYS_INLINE void set_hprec(hazptr_rec< Atom > *hprec) noexcept
FOLLY_ALWAYS_INLINE void asymmetricLightBarrier()
typename std::aligned_storage< sizeof(hazptr_holder< Atom >), alignof(hazptr_holder< Atom >)>::type aligned_hazptr_holder
FOLLY_ALWAYS_INLINE hazptr_holder(hazptr_domain< Atom > &domain=default_hazptr_domain< Atom >())
**Optimized Holders **The template hazptr_array< M > provides most of the functionality *of M hazptr_holder s but with faster construction destruction *for M
FOLLY_ALWAYS_INLINE hazptr_domain< Atom > * domain()
FOLLY_ALWAYS_INLINE void swap(hazptr_holder< Atom > &lhs, hazptr_holder< Atom > &rhs) noexcept
FOLLY_ALWAYS_INLINE void reset_hazptr(const void *p=nullptr) noexcept
FOLLY_ALWAYS_INLINE hazptr_holder< Atom > & operator[](uint8_t i) noexcept
hazptr_holder & operator=(const hazptr_holder &)=delete
FOLLY_ALWAYS_INLINE hazptr_array()
FOLLY_ALWAYS_INLINE void reset(const T *ptr) noexcept
FOLLY_ALWAYS_INLINE hazptr_rec< Atom > * hprec() const noexcept
FOLLY_ALWAYS_INLINE hazptr_holder(std::nullptr_t) noexcept
FOLLY_ALWAYS_INLINE ~hazptr_array()
FOLLY_ALWAYS_INLINE void swap(hazptr_holder< Atom > &rhs) noexcept
FOLLY_ALWAYS_INLINE hazptr_array(hazptr_array &&other) noexcept