23 #include <type_traits> 25 #include <boost/noncopyable.hpp> 39 template <
typename Pool>
45 bool EagerRecycleWhenTrivial =
false,
46 bool EagerRecycleWhenNotTrivial =
true>
50 : EagerRecycleWhenNotTrivial;
56 if (!eagerRecycle()) {
64 if (!eagerRecycle()) {
71 template <
typename...
Args>
74 sizeof...(
Args) == 0 || eagerRecycle(),
75 "emplace-style allocation requires eager recycle, " 76 "which is defaulted only for non-trivial types");
78 new (
ptr)
T(std::forward<Args>(args)...);
157 template <
typename>
class Atom = std::atomic,
162 typedef std::unique_ptr<T, detail::IndexedMemPoolRecycler<IndexedMemPool>>
165 static_assert(LocalListLimit_ <= 255,
"LocalListLimit must fit in 8 bits");
167 NumLocalLists = NumLocalLists_,
168 LocalListLimit = LocalListLimit_,
172 std::is_nothrow_default_constructible<Atom<uint32_t>>::
value,
173 "Atom must be nothrow default constructible");
182 uint64_t(capacity) + (NumLocalLists - 1) * LocalListLimit,
187 return maxIndex - (NumLocalLists - 1) * LocalListLimit;
193 : actualCapacity_(maxIndexForCapacity(capacity)),
196 const size_t needed =
sizeof(
Slot) * (actualCapacity_ + 1);
197 size_t pagesize = size_t(sysconf(_SC_PAGESIZE));
198 mmapLength_ = ((needed - 1) & ~(pagesize - 1)) + pagesize;
199 assert(needed <= mmapLength_ && mmapLength_ < needed + pagesize);
200 assert((mmapLength_ % pagesize) == 0);
202 slots_ =
static_cast<Slot*
>(mmap(
205 PROT_READ | PROT_WRITE,
206 MAP_PRIVATE | MAP_ANONYMOUS,
209 if (slots_ == MAP_FAILED) {
210 assert(errno == ENOMEM);
211 throw std::bad_alloc();
219 if (!std::is_trivial<Atom<uint32_t>>
::value) {
220 for (
size_t i = 1;
i < actualCapacity_ + 1;
i++) {
222 new (&slots_[
i].localNext) Atom<uint32_t>;
223 new (&slots_[
i].globalNext) Atom<uint32_t>;
230 using A = Atom<uint32_t>;
231 for (
uint32_t i = maxAllocatedIndex();
i > 0; --
i) {
234 for (
size_t i = 1;
i < actualCapacity_ + 1;
i++) {
235 slots_[
i].localNext.~A();
236 slots_[
i].globalNext.~A();
238 munmap(slots_, mmapLength_);
246 return capacityForMaxIndex(actualCapacity_);
263 template <
typename...
Args>
265 auto idx = localPop(localHead());
268 Traits::onAllocate(&s.
elem, std::forward<Args>(args)...);
278 template <
typename...
Args>
280 auto idx = allocIndex(std::forward<Args>(args)...);
281 T*
ptr = idx == 0 ?
nullptr : &slot(idx).elem;
282 return UniquePtr(ptr,
typename UniquePtr::deleter_type(
this));
287 assert(isAllocated(idx));
288 localPush(localHead(), idx);
293 return slot(idx).elem;
298 return slot(idx).elem;
310 auto slot =
reinterpret_cast<const Slot*
>(
311 reinterpret_cast<const char*
>(elem) - offsetof(
Slot, elem));
315 assert(elem == &(*
this)[rv]);
321 return slot(idx).localNext.load(std::memory_order_acquire) ==
uint32_t(-1);
332 Slot() : localNext{}, globalNext{} {}
345 SizeMask = (1U << SizeBits) - 1,
346 TagIncr = 1U << SizeBits,
350 return tagAndSize & SizeMask;
354 assert(repl <= LocalListLimit);
355 return TaggedPtr{idx, (tagAndSize & ~SizeMask) | repl};
359 assert(
size() < LocalListLimit);
369 return TaggedPtr{repl, tagAndSize + TagIncr};
377 struct alignas(hardware_destructive_interference_size)
LocalList {
419 0 < idx && idx <= actualCapacity_ &&
420 idx <= size_.load(std::memory_order_acquire));
425 return slots_[slotIndex(idx)];
429 return slots_[slotIndex(idx)];
436 TaggedPtr gh = globalHead_.load(std::memory_order_acquire);
438 if (globalHead_.compare_exchange_strong(gh, gh.
withIdx(localHead))) {
451 Traits::onRecycle(&slot(idx).elem);
453 if (h.
size() == LocalListLimit) {
474 TaggedPtr gh = globalHead_.load(std::memory_order_acquire);
476 globalHead_.compare_exchange_strong(
479 slot(gh.
idx).globalNext.load(std::memory_order_relaxed)))) {
504 if (size_.load(std::memory_order_relaxed) >= actualCapacity_ ||
505 (idx = ++size_) > actualCapacity_) {
509 Traits::initialize(&slot(idx).elem);
527 return local_[stripe].
head;
535 static constexpr std::size_t kSlotSize =
sizeof(
Slot);
543 template <
typename Pool>
554 pool->recycleIndex(pool->locateElem(elem));
#define FOLLY_GNU_DISABLE_WARNING(warningName)
#define FOLLY_POP_WARNING
T & operator[](uint32_t idx)
Provides access to the pooled element referenced by idx.
void globalPush(Slot &s, uint32_t localHead)
Atom< uint32_t > globalNext
#define FOLLY_PUSH_WARNING
TaggedPtr withEmpty() const
void operator()(typename Pool::value_type *elem) const
AtomicStruct< TaggedPtr, Atom > & localHead()
uint32_t maxAllocatedIndex() const
bool compare_exchange_strong(T &v0, T v1, std::memory_order mo=std::memory_order_seq_cst) noexcept
Atom< uint32_t > localNext
static constexpr bool eagerRecycle()
internal::ArgsMatcher< InnerMatcher > Args(const InnerMatcher &matcher)
T load(std::memory_order mo=std::memory_order_seq_cst) const noexcept
—— Concurrent Priority Queue Implementation ——
const Slot & slot(uint32_t idx) const
TaggedPtr withSizeIncr() const
FOLLY_PUSH_WARNING RHS rhs
static void cleanup(T *ptr)
static void initialize(T *ptr)
uint32_t locateElem(const T *elem) const
TaggedPtr withIdx(uint32_t repl) const
constexpr std::size_t hardware_destructive_interference_size
constexpr auto size(C const &c) -> decltype(c.size())
const T & operator[](uint32_t idx) const
Provides access to the pooled element referenced by idx.
bool isAllocated(uint32_t idx) const
Returns true iff idx has been alloc()ed and not recycleIndex()ed.
void markAllocated(Slot &slot)
Slot & slot(uint32_t idx)
UniquePtr allocElem(Args &&...args)
void localPush(AtomicStruct< TaggedPtr, Atom > &head, uint32_t idx)
static const char *const value
uint32_t allocIndex(Args &&...args)
static constexpr uint32_t maxIndexForCapacity(uint32_t capacity)
static void onAllocate(T *ptr, Args &&...args)
TaggedPtr withSize(uint32_t repl) const
std::unique_ptr< T, detail::IndexedMemPoolRecycler< IndexedMemPool > > UniquePtr
static void onRecycle(T *ptr)
Called when the element is recycled.
AtomicStruct< TaggedPtr, Atom > head
void recycleIndex(uint32_t idx)
Gives up ownership previously granted by alloc()
IndexedMemPool(uint32_t capacity)
IndexedMemPoolRecycler(Pool *pool)
~IndexedMemPool()
Destroys all of the contained elements.
TaggedPtr withSizeDecr() const
uint32_t localPop(AtomicStruct< TaggedPtr, Atom > &head)
uint32_t slotIndex(uint32_t idx) const
static constexpr uint32_t capacityForMaxIndex(uint32_t maxIndex)