17 #ifndef FOLLY_ATOMICHASHARRAY_H_ 18 #error "This should only be included by AtomicHashArray.h" 21 #include <type_traits> 50 double _maxLoadFactor,
52 : capacity_(capacity),
53 maxEntries_(size_t(_maxLoadFactor * capacity_ + 0.5)),
55 kLockedKey_(lockedKey),
56 kErasedKey_(erasedKey),
58 numEntries_(0, cacheSize),
59 numPendingEntries_(0, cacheSize),
78 template <
class LookupKeyT,
class LookupHashFcn,
class LookupEqualFcn>
95 checkLegalKeyIfKey<LookupKeyT>(key_in);
97 for (
size_t idx = keyToAnchorIdx<LookupKeyT, LookupHashFcn>(key_in),
100 idx = ProbeFcn()(idx, numProbes,
capacity_)) {
102 if (
LIKELY(LookupEqualFcn()(key, key_in))) {
139 typename LookupHashFcn,
140 typename LookupEqualFcn,
141 typename LookupKeyToKeyFcn,
159 const short NO_NEW_INSERTS = 1;
160 const short NO_PENDING_INSERTS = 2;
161 checkLegalKeyIfKey<LookupKeyT>(key_in);
163 size_t idx = keyToAnchorIdx<LookupKeyT, LookupHashFcn>(key_in);
164 size_t numProbes = 0;
173 if (
isFull_.load(std::memory_order_acquire)) {
184 return (
isFull_.load(std::memory_order_acquire) !=
185 NO_PENDING_INSERTS) &&
188 isFull_.store(NO_PENDING_INSERTS, std::memory_order_release);
202 key_new = LookupKeyToKeyFcn()(key_in);
205 constexpr
bool kAlreadyChecked =
207 if (!kAlreadyChecked) {
214 new (
const_cast<mapped*
>(&cell->second))
215 ValueT(std::forward<ArgTs>(vCtorArgs)...);
233 isFull_.store(NO_NEW_INSERTS, std::memory_order_relaxed);
247 if (LookupEqualFcn()(thisKey, key_in)) {
266 idx = ProbeFcn()(idx, numProbes,
capacity_);
301 idx = ProbeFcn()(idx, numProbes,
capacity_)) {
310 if (EqualFcn()(currentKey, key_in)) {
315 numErases_.fetch_add(1, std::memory_order_relaxed);
369 auto const mem = Allocator().allocate(sz);
379 Allocator().deallocate(mem, sz);
383 SmartPtr map(static_cast<AtomicHashArray*>((
void*)mem));
399 ->store(map->kEmptyKey_, std::memory_order_relaxed);
431 Allocator().deallocate((
char*)p, sz);
460 isFull_.store(0, std::memory_order_relaxed);
461 numErases_.store(0, std::memory_order_relaxed);
474 template <
class ContT,
class IterVal>
483 : boost::iterator_facade<
484 aha_iterator<ContT, IterVal>,
486 boost::forward_traversal_tag> {
492 template <
class OtherContT,
class OtherVal>
495 typename std::enable_if<
500 : aha_(array),
offset_(offset) {}
510 while (offset_ < aha_->
capacity_ && !isValid()) {
517 friend class boost::iterator_core_access;
534 return key != aha_->kEmptyKey_ && key != aha_->kLockedKey_ &&
535 key != aha_->kErasedKey_;
std::atomic< int64_t > isFull_
SimpleRetT findInternal(const LookupKeyT key)
constexpr T nextPowTwo(T const v)
std::unique_ptr< AtomicHashArray, Deleter > SmartPtr
void atomic_hash_spin_wait(Cond condition)
std::pair< const KeyT, ValueT > value_type
void checkLegalKeyIfKey(MaybeKeyT key)
ThreadCachedInt< uint64_t > numEntries_
—— Concurrent Priority Queue Implementation ——
IterVal & dereference() const
size_t keyToAnchorIdx(const LookupKeyT k) const
uint32_t getIndex() const
bool tryLockCell(value_type *const cell)
#define FOR_EACH_RANGE(i, begin, end)
static void destroy(AtomicHashArray *)
uint32_t entryCountThreadCacheSize
void unlockCell(value_type *const cell, KeyT newKey)
ThreadCachedInt< uint64_t > numPendingEntries_
static KeyT relaxedLoadKey(const value_type &r)
SimpleRetT insertInternal(LookupKeyT key, ArgTs &&...vCtorArgs)
void expect(LineReader &lr, const char *expected)
static const char *const value
aha_iterator(const aha_iterator< OtherContT, OtherVal > &o, typename std::enable_if< std::is_convertible< OtherVal *, IterVal * >::value >::type *=nullptr)
static KeyT acquireLoadKey(const value_type &r)
AtomicHashArray(size_t capacity, KeyT emptyKey, KeyT lockedKey, KeyT erasedKey, double maxLoadFactor, uint32_t cacheSize)
static SmartPtr create(size_t maxSize, const Config &c=Config())
~AtomicHashArray()=default
bool equal(const aha_iterator &o) const
Map mapped(Predicate pred=Predicate())
static std::atomic< KeyT > * cellKeyPtr(const value_type &r)
aha_iterator(ContT *array, size_t offset)
std::atomic< int64_t > numErases_
constexpr detail::First first