proxygen
AtomicUnorderedMap.h
Go to the documentation of this file.
1 /*
2  * Copyright 2013-present Facebook, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #pragma once
18 
19 #include <atomic>
20 #include <cstdint>
21 #include <functional>
22 #include <limits>
23 #include <stdexcept>
24 #include <system_error>
25 #include <type_traits>
26 
27 #include <boost/type_traits/has_trivial_destructor.hpp>
28 
29 #include <folly/Conv.h>
30 #include <folly/Likely.h>
31 #include <folly/Random.h>
33 #include <folly/lang/Bits.h>
36 
37 namespace folly {
38 
132 template <
133  typename Key,
134  typename Value,
135  typename Hash = std::hash<Key>,
136  typename KeyEqual = std::equal_to<Key>,
137  bool SkipKeyValueDeletion =
140  template <typename> class Atom = std::atomic,
141  typename IndexType = uint32_t,
142  typename Allocator = folly::detail::MMapAlloc>
143 
145  typedef Key key_type;
146  typedef Value mapped_type;
147  typedef std::pair<Key, Value> value_type;
148  typedef std::size_t size_type;
149  typedef std::ptrdiff_t difference_type;
150  typedef Hash hasher;
151  typedef KeyEqual key_equal;
152  typedef const value_type& const_reference;
153 
154  typedef struct ConstIterator {
155  ConstIterator(const AtomicUnorderedInsertMap& owner, IndexType slot)
156  : owner_(owner), slot_(slot) {}
157 
158  ConstIterator(const ConstIterator&) = default;
159  ConstIterator& operator=(const ConstIterator&) = default;
160 
161  const value_type& operator*() const {
162  return owner_.slots_[slot_].keyValue();
163  }
164 
165  const value_type* operator->() const {
166  return &owner_.slots_[slot_].keyValue();
167  }
168 
169  // pre-increment
171  while (slot_ > 0) {
172  --slot_;
173  if (owner_.slots_[slot_].state() == LINKED) {
174  break;
175  }
176  }
177  return *this;
178  }
179 
180  // post-increment
181  ConstIterator operator++(int /* dummy */) {
182  auto prev = *this;
183  ++*this;
184  return prev;
185  }
186 
187  bool operator==(const ConstIterator& rhs) const {
188  return slot_ == rhs.slot_;
189  }
190  bool operator!=(const ConstIterator& rhs) const {
191  return !(*this == rhs);
192  }
193 
194  private:
196  IndexType slot_;
197  } const_iterator;
198 
200 
208  size_t maxSize,
209  float maxLoadFactor = 0.8f,
210  const Allocator& alloc = Allocator())
211  : allocator_(alloc) {
212  size_t capacity = size_t(maxSize / std::min(1.0f, maxLoadFactor) + 128);
213  size_t avail = size_t{1} << (8 * sizeof(IndexType) - 2);
214  if (capacity > avail && maxSize < avail) {
215  // we'll do our best
216  capacity = avail;
217  }
218  if (capacity < maxSize || capacity > avail) {
219  throw std::invalid_argument(
220  "AtomicUnorderedInsertMap capacity must fit in IndexType with 2 bits "
221  "left over");
222  }
223 
224  numSlots_ = capacity;
225  slotMask_ = folly::nextPowTwo(capacity * 4) - 1;
226  mmapRequested_ = sizeof(Slot) * capacity;
227  slots_ = reinterpret_cast<Slot*>(allocator_.allocate(mmapRequested_));
228  zeroFillSlots();
229  // mark the zero-th slot as in-use but not valid, since that happens
230  // to be our nil value
231  slots_[0].stateUpdate(EMPTY, CONSTRUCTING);
232  }
233 
235  if (!SkipKeyValueDeletion) {
236  for (size_t i = 1; i < numSlots_; ++i) {
237  slots_[i].~Slot();
238  }
239  }
240  allocator_.deallocate(reinterpret_cast<char*>(slots_), mmapRequested_);
241  }
242 
262  template <typename Func>
263  std::pair<const_iterator, bool> findOrConstruct(const Key& key, Func&& func) {
264  auto const slot = keyToSlotIdx(key);
265  auto prev = slots_[slot].headAndState_.load(std::memory_order_acquire);
266 
267  auto existing = find(key, slot);
268  if (existing != 0) {
269  return std::make_pair(ConstIterator(*this, existing), false);
270  }
271 
272  auto idx = allocateNear(slot);
273  new (&slots_[idx].keyValue().first) Key(key);
274  func(static_cast<void*>(&slots_[idx].keyValue().second));
275 
276  while (true) {
277  slots_[idx].next_ = prev >> 2;
278 
279  // we can merge the head update and the CONSTRUCTING -> LINKED update
280  // into a single CAS if slot == idx (which should happen often)
281  auto after = idx << 2;
282  if (slot == idx) {
283  after += LINKED;
284  } else {
285  after += (prev & 3);
286  }
287 
288  if (slots_[slot].headAndState_.compare_exchange_strong(prev, after)) {
289  // success
290  if (idx != slot) {
291  slots_[idx].stateUpdate(CONSTRUCTING, LINKED);
292  }
293  return std::make_pair(ConstIterator(*this, idx), true);
294  }
295  // compare_exchange_strong updates its first arg on failure, so
296  // there is no need to reread prev
297 
298  existing = find(key, slot);
299  if (existing != 0) {
300  // our allocated key and value are no longer needed
301  slots_[idx].keyValue().first.~Key();
302  slots_[idx].keyValue().second.~Value();
303  slots_[idx].stateUpdate(CONSTRUCTING, EMPTY);
304 
305  return std::make_pair(ConstIterator(*this, existing), false);
306  }
307  }
308  }
309 
314  template <class K, class V>
315  std::pair<const_iterator, bool> emplace(const K& key, V&& value) {
316  return findOrConstruct(
317  key, [&](void* raw) { new (raw) Value(std::forward<V>(value)); });
318  }
319 
320  const_iterator find(const Key& key) const {
321  return ConstIterator(*this, find(key, keyToSlotIdx(key)));
322  }
323 
325  IndexType slot = numSlots_ - 1;
326  while (slot > 0 && slots_[slot].state() != LINKED) {
327  --slot;
328  }
329  return ConstIterator(*this, slot);
330  }
331 
333  return ConstIterator(*this, 0);
334  }
335 
336  private:
337  enum : IndexType {
338  kMaxAllocationTries = 1000, // after this we throw
339  };
340 
341  enum BucketState : IndexType {
342  EMPTY = 0,
343  CONSTRUCTING = 1,
344  LINKED = 2,
345  };
346 
354  struct Slot {
359  Atom<IndexType> headAndState_;
360 
362  IndexType next_;
363 
367 
368  ~Slot() {
369  auto s = state();
370  assert(s == EMPTY || s == LINKED);
371  if (s == LINKED) {
372  keyValue().first.~Key();
373  keyValue().second.~Value();
374  }
375  }
376 
377  BucketState state() const {
378  return BucketState(headAndState_.load(std::memory_order_acquire) & 3);
379  }
380 
381  void stateUpdate(BucketState before, BucketState after) {
382  assert(state() == before);
383  headAndState_ += (after - before);
384  }
385 
386  value_type& keyValue() {
387  assert(state() != EMPTY);
388  return *static_cast<value_type*>(static_cast<void*>(&raw_));
389  }
390 
391  const value_type& keyValue() const {
392  assert(state() != EMPTY);
393  return *static_cast<const value_type*>(static_cast<const void*>(&raw_));
394  }
395  };
396 
397  // We manually manage the slot memory so we can bypass initialization
398  // (by getting a zero-filled mmap chunk) and optionally destruction of
399  // the slots
400 
402  size_t numSlots_;
403 
405  size_t slotMask_;
406 
407  Allocator allocator_;
409 
410  IndexType keyToSlotIdx(const Key& key) const {
411  size_t h = hasher()(key);
412  h &= slotMask_;
413  while (h >= numSlots_) {
414  h -= numSlots_;
415  }
416  return h;
417  }
418 
419  IndexType find(const Key& key, IndexType slot) const {
420  KeyEqual ke = {};
421  auto hs = slots_[slot].headAndState_.load(std::memory_order_acquire);
422  for (slot = hs >> 2; slot != 0; slot = slots_[slot].next_) {
423  if (ke(key, slots_[slot].keyValue().first)) {
424  return slot;
425  }
426  }
427  return 0;
428  }
429 
432  IndexType allocateNear(IndexType start) {
433  for (IndexType tries = 0; tries < kMaxAllocationTries; ++tries) {
434  auto slot = allocationAttempt(start, tries);
435  auto prev = slots_[slot].headAndState_.load(std::memory_order_acquire);
436  if ((prev & 3) == EMPTY &&
437  slots_[slot].headAndState_.compare_exchange_strong(
438  prev, prev + CONSTRUCTING - EMPTY)) {
439  return slot;
440  }
441  }
442  throw std::bad_alloc();
443  }
444 
448  IndexType allocationAttempt(IndexType start, IndexType tries) const {
449  if (LIKELY(tries < 8 && start + tries < numSlots_)) {
450  return IndexType(start + tries);
451  } else {
452  IndexType rv;
453  if (sizeof(IndexType) <= 4) {
454  rv = IndexType(folly::Random::rand32(numSlots_));
455  } else {
456  rv = IndexType(folly::Random::rand64(numSlots_));
457  }
458  assert(rv < numSlots_);
459  return rv;
460  }
461  }
462 
463  void zeroFillSlots() {
466  memset(slots_, 0, mmapRequested_);
467  }
468  }
469 };
470 
475 template <
476  typename Key,
477  typename Value,
478  typename Hash = std::hash<Key>,
479  typename KeyEqual = std::equal_to<Key>,
480  bool SkipKeyValueDeletion =
483  template <typename> class Atom = std::atomic,
484  typename Allocator = folly::detail::MMapAlloc>
486  Key,
487  Value,
488  Hash,
489  KeyEqual,
490  SkipKeyValueDeletion,
491  Atom,
492  uint64_t,
493  Allocator>;
494 
499 template <typename T, template <typename> class Atom = std::atomic>
500 struct MutableAtom {
501  mutable Atom<T> data;
502 
503  explicit MutableAtom(const T& init) : data(init) {}
504 };
505 
509 template <typename T>
510 struct MutableData {
511  mutable T data;
512  explicit MutableData(const T& init) : data(init) {}
513 };
514 
515 } // namespace folly
*than *hazptr_holder h
Definition: Hazptr.h:116
auto f
bool operator!=(const ConstIterator &rhs) const
IndexType keyToSlotIdx(const Key &key) const
constexpr T nextPowTwo(T const v)
Definition: Bits.h:149
PskType type
void stateUpdate(BucketState before, BucketState after)
internal::KeyMatcher< M > Key(M inner_matcher)
#define LIKELY(x)
Definition: Likely.h:47
folly::std T
—— Concurrent Priority Queue Implementation ——
Definition: AtomicBitSet.h:29
FOLLY_PUSH_WARNING RHS rhs
Definition: Traits.h:649
MutableData(const T &init)
void init(int *argc, char ***argv, bool removeFlags)
Definition: Init.cpp:34
MutableAtom(const T &init)
std::pair< const_iterator, bool > findOrConstruct(const Key &key, Func &&func)
LogLevel min
Definition: LogLevel.cpp:30
bool Value(const T &value, M matcher)
#define Atom
const_iterator cbegin() const
IndexType allocateNear(IndexType start)
static const char *const value
Definition: Conv.cpp:50
auto start
IndexType allocationAttempt(IndexType start, IndexType tries) const
std::aligned_storage< sizeof(value_type), alignof(value_type)>::type raw_
Key and Value.
bool operator==(const ConstIterator &rhs) const
std::pair< const_iterator, bool > emplace(const K &key, V &&value)
static set< string > s
uint64_t value(const typename LockFreeRingBuffer< T, Atom >::Cursor &rbcursor)
AtomicUnorderedInsertMap(size_t maxSize, float maxLoadFactor=0.8f, const Allocator &alloc=Allocator())
const value_type & keyValue() const
ConstIterator(const AtomicUnorderedInsertMap &owner, IndexType slot)
IndexType next_
The next bucket in the chain.
static uint32_t rand32()
Definition: Random.h:213
static uint64_t rand64()
Definition: Random.h:263
std::pair< Key, Value > value_type
IndexType find(const Key &key, IndexType slot) const
size_t slotMask_
tricky, see keyToSlodIdx
state
Definition: http_parser.c:272
const_iterator find(const Key &key) const
constexpr detail::First first
Definition: Base-inl.h:2553