Skip to content

Commit 754e2b4

Browse files
committed
sync bela/pugixml
1 parent 83c71a0 commit 754e2b4

40 files changed

+2109
-1232
lines changed

vendor/bela.lock

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
https://github.com/fcharlie/bela/tree/96686cfb2173c52baad010be6832e0059824f7f8
1+
https://github.com/fcharlie/bela/tree/5565e770ec209510945a3c46a6bdfd33e231392c
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
https://github.com/greg7mdp/parallel-hashmap.git
2-
60acfa4690303eeefde355175eaaad57c65a113d
2+
2d062fc53967b978cab766c76829e2fcd8817ccc

vendor/bela/include/bela/__phmap/phmap.h

Lines changed: 70 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -329,11 +329,17 @@ static_assert(kDeleted == -2,
329329
// A single block of empty control bytes for tables without any slots allocated.
330330
// This enables removing a branch in the hot path of find().
331331
// --------------------------------------------------------------------------
332+
template <class std_alloc_t>
332333
inline ctrl_t* EmptyGroup() {
333-
alignas(16) static constexpr ctrl_t empty_group[] = {
334-
kSentinel, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty,
335-
kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty};
336-
return const_cast<ctrl_t*>(empty_group);
334+
PHMAP_IF_CONSTEXPR (std_alloc_t::value) {
335+
alignas(16) static constexpr ctrl_t empty_group[] = {
336+
kSentinel, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty,
337+
kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty};
338+
339+
return const_cast<ctrl_t*>(empty_group);
340+
} else {
341+
return nullptr;
342+
}
337343
}
338344

339345
// --------------------------------------------------------------------------
@@ -869,6 +875,8 @@ class raw_hash_set
869875
template <class K>
870876
using key_arg = typename KeyArgImpl::template type<K, key_type>;
871877

878+
using std_alloc_t = std::is_same<typename std::decay<Alloc>::type, phmap::priv::Allocator<value_type>>;
879+
872880
private:
873881
// Give an early error when key_type is not hashable/eq.
874882
auto KeyTypeCanBeHashed(const Hash& h, const key_type& k) -> decltype(h(k));
@@ -918,12 +926,7 @@ class raw_hash_set
918926
using IsDecomposable = IsDecomposable<void, PolicyTraits, Hash, Eq, Ts...>;
919927

920928
public:
921-
static_assert(std::is_same<pointer, value_type*>::value,
922-
"Allocators with custom pointer types are not supported");
923-
static_assert(std::is_same<const_pointer, const value_type*>::value,
924-
"Allocators with custom pointer types are not supported");
925-
926-
class iterator
929+
class iterator
927930
{
928931
friend class raw_hash_set;
929932

@@ -989,6 +992,11 @@ class raw_hash_set
989992
iterator(ctrl_t* ctrl, slot_type* slot) : ctrl_(ctrl), slot_(slot) {}
990993

991994
void skip_empty_or_deleted() {
995+
PHMAP_IF_CONSTEXPR (!std_alloc_t::value) {
996+
// ctrl_ could be nullptr
997+
if (!ctrl_)
998+
return;
999+
}
9921000
while (IsEmptyOrDeleted(*ctrl_)) {
9931001
// ctrl is not necessarily aligned to Group::kWidth. It is also likely
9941002
// to read past the space for ctrl bytes and into slots. This is ok
@@ -1057,7 +1065,7 @@ class raw_hash_set
10571065
explicit raw_hash_set(size_t bucket_cnt, const hasher& hashfn = hasher(),
10581066
const key_equal& eq = key_equal(),
10591067
const allocator_type& alloc = allocator_type())
1060-
: ctrl_(EmptyGroup()), settings_(0, hashfn, eq, alloc) {
1068+
: ctrl_(EmptyGroup<std_alloc_t>()), settings_(0, hashfn, eq, alloc) {
10611069
if (bucket_cnt) {
10621070
size_t new_capacity = NormalizeCapacity(bucket_cnt);
10631071
reset_growth_left(new_capacity);
@@ -1180,7 +1188,7 @@ class raw_hash_set
11801188
std::is_nothrow_copy_constructible<hasher>::value&&
11811189
std::is_nothrow_copy_constructible<key_equal>::value&&
11821190
std::is_nothrow_copy_constructible<allocator_type>::value)
1183-
: ctrl_(phmap::exchange(that.ctrl_, EmptyGroup())),
1191+
: ctrl_(phmap::exchange(that.ctrl_, EmptyGroup<std_alloc_t>())),
11841192
slots_(phmap::exchange(that.slots_, nullptr)),
11851193
size_(phmap::exchange(that.size_, 0)),
11861194
capacity_(phmap::exchange(that.capacity_, 0)),
@@ -1194,7 +1202,7 @@ class raw_hash_set
11941202
}
11951203

11961204
raw_hash_set(raw_hash_set&& that, const allocator_type& a)
1197-
: ctrl_(EmptyGroup()),
1205+
: ctrl_(EmptyGroup<std_alloc_t>()),
11981206
slots_(nullptr),
11991207
size_(0),
12001208
capacity_(0),
@@ -1615,6 +1623,7 @@ class raw_hash_set
16151623
// This overload is necessary because otherwise erase<K>(const K&) would be
16161624
// a better match if non-const iterator is passed as an argument.
16171625
iterator erase(iterator it) {
1626+
assert(it != end());
16181627
auto res = it;
16191628
++res;
16201629
_erase(it);
@@ -1738,7 +1747,8 @@ class raw_hash_set
17381747

17391748
template <class K = key_type>
17401749
void prefetch(const key_arg<K>& key) const {
1741-
prefetch_hash(this->hash(key));
1750+
PHMAP_IF_CONSTEXPR (std_alloc_t::value)
1751+
prefetch_hash(this->hash(key));
17421752
}
17431753

17441754
// The API of find() has two extensions.
@@ -1848,6 +1858,11 @@ class raw_hash_set
18481858

18491859
template <class K = key_type>
18501860
bool find_impl(const key_arg<K>& key, size_t hashval, size_t& offset) {
1861+
PHMAP_IF_CONSTEXPR (!std_alloc_t::value) {
1862+
// ctrl_ could be nullptr
1863+
if (!ctrl_)
1864+
return false;
1865+
}
18511866
auto seq = probe(hashval);
18521867
while (true) {
18531868
Group g{ ctrl_ + seq.offset() };
@@ -1877,7 +1892,11 @@ class raw_hash_set
18771892
{
18781893
template <class K, class... Args>
18791894
size_t operator()(const K& key, Args&&...) const {
1895+
#if PHMAP_DISABLE_MIX
1896+
return h(key);
1897+
#else
18801898
return phmap_mix<sizeof(size_t)>()(h(key));
1899+
#endif
18811900
}
18821901
const hasher& h;
18831902
};
@@ -2025,7 +2044,7 @@ class raw_hash_set
20252044
// Unpoison before returning the memory to the allocator.
20262045
SanitizerUnpoisonMemoryRegion(slots_, sizeof(slot_type) * capacity_);
20272046
Deallocate<Layout::Alignment()>(&alloc_ref(), ctrl_, layout.AllocSize());
2028-
ctrl_ = EmptyGroup();
2047+
ctrl_ = EmptyGroup<std_alloc_t>();
20292048
slots_ = nullptr;
20302049
size_ = 0;
20312050
capacity_ = 0;
@@ -2135,6 +2154,11 @@ class raw_hash_set
21352154
}
21362155

21372156
bool has_element(const value_type& elem, size_t hashval) const {
2157+
PHMAP_IF_CONSTEXPR (!std_alloc_t::value) {
2158+
// ctrl_ could be nullptr
2159+
if (!ctrl_)
2160+
return false;
2161+
}
21382162
auto seq = probe(hashval);
21392163
while (true) {
21402164
Group g{ctrl_ + seq.offset()};
@@ -2197,6 +2221,11 @@ class raw_hash_set
21972221
protected:
21982222
template <class K>
21992223
size_t _find_key(const K& key, size_t hashval) {
2224+
PHMAP_IF_CONSTEXPR (!std_alloc_t::value) {
2225+
// ctrl_ could be nullptr
2226+
if (!ctrl_)
2227+
return (size_t)-1;
2228+
}
22002229
auto seq = probe(hashval);
22012230
while (true) {
22022231
Group g{ctrl_ + seq.offset()};
@@ -2221,7 +2250,12 @@ class raw_hash_set
22212250
}
22222251

22232252
size_t prepare_insert(size_t hashval) PHMAP_ATTRIBUTE_NOINLINE {
2224-
auto target = find_first_non_full(hashval);
2253+
PHMAP_IF_CONSTEXPR (!std_alloc_t::value) {
2254+
// ctrl_ could be nullptr
2255+
if (!ctrl_)
2256+
rehash_and_grow_if_necessary();
2257+
}
2258+
FindInfo target = find_first_non_full(hashval);
22252259
if (PHMAP_PREDICT_FALSE(growth_left() == 0 &&
22262260
!IsDeleted(ctrl_[target.offset]))) {
22272261
rehash_and_grow_if_necessary();
@@ -2335,10 +2369,10 @@ class raw_hash_set
23352369
// TODO(alkis): Investigate removing some of these fields:
23362370
// - ctrl/slots can be derived from each other
23372371
// - size can be moved into the slot array
2338-
ctrl_t* ctrl_ = EmptyGroup(); // [(capacity + 1) * ctrl_t]
2339-
slot_type* slots_ = nullptr; // [capacity * slot_type]
2340-
size_t size_ = 0; // number of full slots
2341-
size_t capacity_ = 0; // total number of slots
2372+
ctrl_t* ctrl_ = EmptyGroup<std_alloc_t>(); // [(capacity + 1) * ctrl_t]
2373+
slot_type* slots_ = nullptr; // [capacity * slot_type]
2374+
size_t size_ = 0; // number of full slots
2375+
size_t capacity_ = 0; // total number of slots
23422376
HashtablezInfoHandle infoz_;
23432377
std::tuple<size_t /* growth_left */, hasher, key_equal, allocator_type>
23442378
settings_{0, hasher{}, key_equal{}, allocator_type{}};
@@ -2582,7 +2616,6 @@ class parallel_hash_set
25822616
using UniqueLock = typename Lockable::UniqueLock;
25832617
using SharedLock = typename Lockable::SharedLock;
25842618
using ReadWriteLock = typename Lockable::ReadWriteLock;
2585-
25862619

25872620
// --------------------------------------------------------------------
25882621
struct Inner : public Lockable
@@ -3144,14 +3177,9 @@ class parallel_hash_set
31443177
{
31453178
Inner& inner = sets_[subidx(hashval)];
31463179
auto& set = inner.set_;
3147-
ReadWriteLock m(inner);
3180+
UniqueLock m(inner);
31483181

31493182
size_t offset = set._find_key(key, hashval);
3150-
if (offset == (size_t)-1 && m.switch_to_unique()) {
3151-
// we did an unlock/lock, and another thread could have inserted the same key, so we need to
3152-
// do a find() again.
3153-
offset = set._find_key(key, hashval);
3154-
}
31553183
if (offset == (size_t)-1) {
31563184
offset = set.prepare_insert(hashval);
31573185
set.emplace_at(offset, std::forward<Args>(args)...);
@@ -3234,13 +3262,8 @@ class parallel_hash_set
32343262
iterator lazy_emplace_with_hash(const key_arg<K>& key, size_t hashval, F&& f) {
32353263
Inner& inner = sets_[subidx(hashval)];
32363264
auto& set = inner.set_;
3237-
ReadWriteLock m(inner);
3265+
UniqueLock m(inner);
32383266
size_t offset = set._find_key(key, hashval);
3239-
if (offset == (size_t)-1 && m.switch_to_unique()) {
3240-
// we did an unlock/lock, and another thread could have inserted the same key, so we need to
3241-
// do a find() again.
3242-
offset = set._find_key(key, hashval);
3243-
}
32443267
if (offset == (size_t)-1) {
32453268
offset = set.prepare_insert(hashval);
32463269
set.lazy_emplace_at(offset, std::forward<F>(f));
@@ -3355,7 +3378,7 @@ class parallel_hash_set
33553378
template <class K = key_type, class FExists, class FEmplace>
33563379
bool lazy_emplace_l(const key_arg<K>& key, FExists&& fExists, FEmplace&& fEmplace) {
33573380
size_t hashval = this->hash(key);
3358-
ReadWriteLock m;
3381+
UniqueLock m;
33593382
auto res = this->find_or_prepare_insert_with_hash(hashval, key, m);
33603383
Inner* inner = std::get<0>(res);
33613384
if (std::get<2>(res)) {
@@ -3730,7 +3753,11 @@ class parallel_hash_set
37303753
{
37313754
template <class K, class... Args>
37323755
size_t operator()(const K& key, Args&&...) const {
3756+
#if PHMAP_DISABLE_MIX
3757+
return h(key);
3758+
#else
37333759
return phmap_mix<sizeof(size_t)>()(h(key));
3760+
#endif
37343761
}
37353762
const hasher& h;
37363763
};
@@ -3809,16 +3836,11 @@ class parallel_hash_set
38093836

38103837
template <class K>
38113838
std::tuple<Inner*, size_t, bool>
3812-
find_or_prepare_insert_with_hash(size_t hashval, const K& key, ReadWriteLock &mutexlock) {
3839+
find_or_prepare_insert_with_hash(size_t hashval, const K& key, UniqueLock &mutexlock) {
38133840
Inner& inner = sets_[subidx(hashval)];
38143841
auto& set = inner.set_;
3815-
mutexlock = std::move(ReadWriteLock(inner));
3842+
mutexlock = std::move(UniqueLock(inner));
38163843
size_t offset = set._find_key(key, hashval);
3817-
if (offset == (size_t)-1 && mutexlock.switch_to_unique()) {
3818-
// we did an unlock/lock, and another thread could have inserted the same key, so we need to
3819-
// do a find() again.
3820-
offset = set._find_key(key, hashval);
3821-
}
38223844
if (offset == (size_t)-1) {
38233845
offset = set.prepare_insert(hashval);
38243846
return std::make_tuple(&inner, offset, true);
@@ -3828,7 +3850,7 @@ class parallel_hash_set
38283850

38293851
template <class K>
38303852
std::tuple<Inner*, size_t, bool>
3831-
find_or_prepare_insert(const K& key, ReadWriteLock &mutexlock) {
3853+
find_or_prepare_insert(const K& key, UniqueLock &mutexlock) {
38323854
return find_or_prepare_insert_with_hash<K>(this->hash(key), key, mutexlock);
38333855
}
38343856

@@ -4050,7 +4072,7 @@ class parallel_hash_map : public parallel_hash_set<N, RefSet, Mtx_, Policy, Hash
40504072
template <class K = key_type, class F, class... Args>
40514073
bool try_emplace_l(K&& k, F&& f, Args&&... args) {
40524074
size_t hashval = this->hash(k);
4053-
ReadWriteLock m;
4075+
UniqueLock m;
40544076
auto res = this->find_or_prepare_insert_with_hash(hashval, k, m);
40554077
typename Base::Inner *inner = std::get<0>(res);
40564078
if (std::get<2>(res)) {
@@ -4071,7 +4093,7 @@ class parallel_hash_map : public parallel_hash_set<N, RefSet, Mtx_, Policy, Hash
40714093
template <class K = key_type, class... Args>
40724094
std::pair<typename parallel_hash_map::parallel_hash_set::pointer, bool> try_emplace_p(K&& k, Args&&... args) {
40734095
size_t hashval = this->hash(k);
4074-
ReadWriteLock m;
4096+
UniqueLock m;
40754097
auto res = this->find_or_prepare_insert_with_hash(hashval, k, m);
40764098
typename Base::Inner *inner = std::get<0>(res);
40774099
if (std::get<2>(res)) {
@@ -4101,7 +4123,7 @@ class parallel_hash_map : public parallel_hash_set<N, RefSet, Mtx_, Policy, Hash
41014123
template <class K, class V>
41024124
std::pair<iterator, bool> insert_or_assign_impl(K&& k, V&& v) {
41034125
size_t hashval = this->hash(k);
4104-
ReadWriteLock m;
4126+
UniqueLock m;
41054127
auto res = this->find_or_prepare_insert_with_hash(hashval, k, m);
41064128
typename Base::Inner *inner = std::get<0>(res);
41074129
if (std::get<2>(res)) {
@@ -4121,7 +4143,7 @@ class parallel_hash_map : public parallel_hash_set<N, RefSet, Mtx_, Policy, Hash
41214143

41224144
template <class K = key_type, class... Args>
41234145
std::pair<iterator, bool> try_emplace_impl_with_hash(size_t hashval, K&& k, Args&&... args) {
4124-
ReadWriteLock m;
4146+
UniqueLock m;
41254147
auto res = this->find_or_prepare_insert_with_hash(hashval, k, m);
41264148
typename Base::Inner *inner = std::get<0>(res);
41274149
if (std::get<2>(res)) {
@@ -4576,6 +4598,8 @@ struct HashtableDebugAccess<Set, typename std::enable_if<has_member_type_raw_has
45764598

45774599
static size_t GetNumProbes(const Set& set,
45784600
const typename Set::key_type& key) {
4601+
if (!set.ctrl_)
4602+
return 0;
45794603
size_t num_probes = 0;
45804604
size_t hashval = set.hash(key);
45814605
auto seq = set.probe(hashval);

vendor/bela/include/bela/__phmap/phmap_base.h

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2774,7 +2774,9 @@ class node_handle_base
27742774
node_handle_base& operator=(node_handle_base&& other) noexcept {
27752775
destroy();
27762776
if (!other.empty()) {
2777-
alloc_ = other.alloc_;
2777+
if (other.alloc_) {
2778+
alloc_.emplace(other.alloc_.value());
2779+
}
27782780
PolicyTraits::transfer(alloc(), slot(), other.slot());
27792781
other.reset();
27802782
}
@@ -4189,7 +4191,7 @@ void* Allocate(Alloc* alloc, size_t n) {
41894191
using A = typename phmap::allocator_traits<Alloc>::template rebind_alloc<M>;
41904192
using AT = typename phmap::allocator_traits<Alloc>::template rebind_traits<M>;
41914193
A mem_alloc(*alloc);
4192-
void* p = AT::allocate(mem_alloc, (n + sizeof(M) - 1) / sizeof(M));
4194+
void* p = &*AT::allocate(mem_alloc, (n + sizeof(M) - 1) / sizeof(M)); // `&*` to support custom pointers such as boost offset_ptr.
41934195
assert(reinterpret_cast<uintptr_t>(p) % Alignment == 0 &&
41944196
"allocator does not respect alignment");
41954197
return p;

vendor/bela/include/bela/__phmap/phmap_config.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636

3737
#define PHMAP_VERSION_MAJOR 1
3838
#define PHMAP_VERSION_MINOR 4
39-
#define PHMAP_VERSION_PATCH 0
39+
#define PHMAP_VERSION_PATCH 1
4040

4141
// Included for the __GLIBC__ macro (or similar macros on other systems).
4242
#include <limits.h>

vendor/bela/include/bela/__phmap/phmap_dump.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,8 @@ bool raw_hash_set<Policy, Hash, Eq, Alloc>::phmap_load(InputArchive& ar) {
9494
if (version >= s_version_base) {
9595
// growth_left should be restored after calling initialize_slots() which resets it.
9696
ar.loadBinary(&growth_left(), sizeof(size_t));
97+
} else {
98+
drop_deletes_without_resize();
9799
}
98100
return true;
99101
}

vendor/bela/include/bela/__strings/int128_have_intrinsic.inc

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -173,27 +173,29 @@ inline int128::operator float() const {
173173
// complement overwhelms the precision of the mantissa.
174174
//
175175
// Also check to make sure we don't negate Int128Min()
176+
constexpr float pow_2_64 = 18446744073709551616.0f;
176177
return v_ < 0 && *this != Int128Min()
177178
? -static_cast<float>(-*this)
178179
: static_cast<float>(Int128Low64(*this)) +
179-
std::ldexp(static_cast<float>(Int128High64(*this)), 64);
180+
static_cast<float>(Int128High64(*this)) * pow_2_64;
180181
}
181182

182183
inline int128::operator double() const {
183184
// See comment in int128::operator float() above.
185+
constexpr double pow_2_64 = 18446744073709551616.0;
184186
return v_ < 0 && *this != Int128Min()
185187
? -static_cast<double>(-*this)
186188
: static_cast<double>(Int128Low64(*this)) +
187-
std::ldexp(static_cast<double>(Int128High64(*this)), 64);
189+
static_cast<double>(Int128High64(*this)) * pow_2_64;
188190
}
189191

190192
inline int128::operator long double() const {
191193
// See comment in int128::operator float() above.
194+
constexpr long double pow_2_64 = 18446744073709551616.0L;
192195
return v_ < 0 && *this != Int128Min()
193196
? -static_cast<long double>(-*this)
194197
: static_cast<long double>(Int128Low64(*this)) +
195-
std::ldexp(static_cast<long double>(Int128High64(*this)),
196-
64);
198+
static_cast<long double>(Int128High64(*this)) * pow_2_64;
197199
}
198200
#endif // Clang on PowerPC
199201

0 commit comments

Comments
 (0)