6 #ifndef GENERIC_EPOCH_BASED_IMPL
7 #error "This is an impl file and must not be included directly!"
10 #include <xenium/detail/port.hpp>
15 #pragma warning(disable: 4127) // conditional expression is constant
18 namespace xenium {
namespace reclamation {
24 template<
class Reclaimer>
26 bool scan(
typename Reclaimer::epoch_t epoch) {
27 auto prevents_update = [epoch](
const typename Reclaimer::thread_control_block &data) ->
bool {
30 constexpr
auto memory_order = TSAN_MEMORY_ORDER(std::memory_order_acquire, std::memory_order_relaxed);
31 return data.is_in_critical_region.load(memory_order) &&
32 data.local_epoch.load(memory_order) != epoch;
36 return !std::any_of(Reclaimer::global_thread_block_list.begin(),
37 Reclaimer::global_thread_block_list.end(),
50 template<
class Reclaimer>
54 bool scan(
typename Reclaimer::epoch_t epoch) {
55 for (
unsigned i = 0; i < N; ++i) {
58 constexpr
auto memory_order = TSAN_MEMORY_ORDER(std::memory_order_acquire, std::memory_order_relaxed);
59 if (!thread_iterator->is_in_critical_region.load(memory_order) ||
60 thread_iterator->local_epoch.load(memory_order) == epoch) {
61 if (++thread_iterator == Reclaimer::global_thread_block_list.end())
69 thread_iterator = Reclaimer::global_thread_block_list.begin();
73 typename detail::thread_block_list<typename Reclaimer::thread_control_block>::iterator thread_iterator;
81 template<
class Reclaimer>
91 using retire_list = detail::retire_list<>;
92 static void apply(retire_list&, detail::orphan_list<>&) {}
100 using retire_list = detail::retire_list<>;
101 static void apply(retire_list& retire_list, detail::orphan_list<>& orphans)
103 if (!retire_list.empty())
104 orphans.add(retire_list.steal());
112 template <
size_t Threshold>
114 using retire_list = detail::counting_retire_list<>;
115 static void apply(retire_list& retire_list, detail::orphan_list<>& orphans)
117 if (retire_list.size() >= Threshold)
118 orphans.add(retire_list.steal());
123 template <
class Traits>
126 local_thread_data.enter_region();
129 template <
class Traits>
130 generic_epoch_based<Traits>::region_guard::~region_guard() noexcept
132 local_thread_data.leave_region();
135 template <
class Traits>
136 template <
class T,
class MarkedPtr>
137 generic_epoch_based<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(
const MarkedPtr& p) noexcept :
141 local_thread_data.enter_critical();
144 template <
class Traits>
145 template <
class T,
class MarkedPtr>
146 generic_epoch_based<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(
const guard_ptr& p) noexcept :
147 guard_ptr(MarkedPtr(p))
150 template <
class Traits>
151 template <
class T,
class MarkedPtr>
152 generic_epoch_based<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(guard_ptr&& p) noexcept :
158 template <
class Traits>
159 template <
class T,
class MarkedPtr>
160 auto generic_epoch_based<Traits>::guard_ptr<T, MarkedPtr>::operator=(
const guard_ptr& p) noexcept
169 local_thread_data.enter_critical();
174 template <
class Traits>
175 template <
class T,
class MarkedPtr>
176 auto generic_epoch_based<Traits>::guard_ptr<T, MarkedPtr>::operator=(guard_ptr&& p) noexcept
183 this->ptr = std::move(p.ptr);
189 template <
class Traits>
190 template <
class T,
class MarkedPtr>
191 void generic_epoch_based<Traits>::guard_ptr<T, MarkedPtr>::acquire(
192 const concurrent_ptr<T>& p, std::memory_order order) noexcept
194 if (p.load(std::memory_order_relaxed) ==
nullptr)
201 local_thread_data.enter_critical();
203 this->ptr = p.load(order);
205 local_thread_data.leave_critical();
208 template <
class Traits>
209 template <
class T,
class MarkedPtr>
210 bool generic_epoch_based<Traits>::guard_ptr<T, MarkedPtr>::acquire_if_equal(
211 const concurrent_ptr<T>& p,
const MarkedPtr& expected, std::memory_order order) noexcept
213 auto actual = p.load(std::memory_order_relaxed);
214 if (actual ==
nullptr || actual != expected)
217 return actual == expected;
221 local_thread_data.enter_critical();
223 this->ptr = p.load(order);
224 if (!this->ptr || this->ptr != expected)
226 local_thread_data.leave_critical();
230 return this->ptr == expected;
233 template <
class Traits>
234 template <
class T,
class MarkedPtr>
235 void generic_epoch_based<Traits>::guard_ptr<T, MarkedPtr>::reset() noexcept
238 local_thread_data.leave_critical();
242 template <
class Traits>
243 template <
class T,
class MarkedPtr>
244 void generic_epoch_based<Traits>::guard_ptr<T, MarkedPtr>::reclaim(Deleter d) noexcept
246 this->ptr->set_deleter(std::move(d));
247 local_thread_data.add_retired_node(this->ptr.get());
251 template <
class Traits>
252 struct generic_epoch_based<Traits>::thread_control_block :
253 detail::thread_block_list<thread_control_block>::entry
255 thread_control_block() :
256 is_in_critical_region(false),
257 local_epoch(number_epochs)
260 std::atomic<bool> is_in_critical_region;
261 std::atomic<epoch_t> local_epoch;
264 template <
class Traits>
265 struct generic_epoch_based<Traits>::thread_data
269 if (control_block ==
nullptr)
272 for (
unsigned i = 0; i < number_epochs; ++i)
273 if (!retire_lists[i].empty())
274 orphans[i].add(retire_lists[i].steal());
276 assert(control_block->is_in_critical_region.load(std::memory_order_relaxed) ==
false);
277 global_thread_block_list.release_entry(control_block);
278 control_block =
nullptr;
283 ensure_has_control_block();
284 if (Traits::region_extension_type != region_extension::none && ++region_entries == 1)
286 if (Traits::region_extension_type == region_extension::eager)
287 set_critical_region_flag();
293 if (Traits::region_extension_type != region_extension::none && --region_entries == 0)
295 clear_critical_region_flag();
299 void enter_critical()
302 if (++nested_critical_entries == 1)
306 void leave_critical()
308 if (--nested_critical_entries == 0 && Traits::region_extension_type == region_extension::none)
309 clear_critical_region_flag();
314 void ensure_has_control_block()
316 if (XENIUM_UNLIKELY(control_block ==
nullptr))
317 acquire_control_block();
320 XENIUM_NOINLINE
void acquire_control_block()
322 assert(control_block ==
nullptr);
323 control_block = global_thread_block_list.acquire_entry();
324 assert(control_block->is_in_critical_region.load() ==
false);
325 auto epoch = global_epoch.load(std::memory_order_relaxed);
326 control_block->local_epoch.store(epoch, std::memory_order_relaxed);
327 local_epoch_idx = epoch % number_epochs;
328 scan_strategy.reset();
331 void set_critical_region_flag()
333 assert(!control_block->is_in_critical_region.load(std::memory_order_relaxed));
334 control_block->is_in_critical_region.store(
true, std::memory_order_relaxed);
337 std::atomic_thread_fence(std::memory_order_seq_cst);
340 void clear_critical_region_flag()
345 assert(control_block !=
nullptr);
346 assert(control_block->is_in_critical_region.load(std::memory_order_relaxed));
349 control_block->is_in_critical_region.store(
false, std::memory_order_release);
350 for (
unsigned i = 0; i < number_epochs; ++i)
351 Traits::abandon_strategy::apply(retire_lists[i], orphans[i]);
354 void do_enter_critical()
356 if (Traits::region_extension_type == region_extension::none)
357 set_critical_region_flag();
358 else if (Traits::region_extension_type == region_extension::lazy)
360 if (!control_block->is_in_critical_region.load(std::memory_order_relaxed))
361 set_critical_region_flag();
364 assert(control_block->is_in_critical_region.load(std::memory_order_relaxed));
367 auto epoch = global_epoch.load(std::memory_order_acquire);
368 if (control_block->local_epoch.load(std::memory_order_relaxed) != epoch)
370 critical_entries_since_update = 0;
371 update_local_epoch(epoch);
373 else if (critical_entries_since_update++ == Traits::scan_frequency)
375 critical_entries_since_update = 0;
376 if (scan_strategy.scan(epoch))
378 epoch = update_global_epoch(epoch, epoch + 1);
379 update_local_epoch(epoch);
384 void update_local_epoch(epoch_t new_epoch)
391 const auto old_epoch = control_block->local_epoch.load(std::memory_order_relaxed);
392 assert(new_epoch > old_epoch);
395 constexpr
auto memory_order = TSAN_MEMORY_ORDER(std::memory_order_release, std::memory_order_relaxed);
396 control_block->local_epoch.store(new_epoch, memory_order);
398 auto diff = std::min<int>(
static_cast<int>(number_epochs),
static_cast<int>(new_epoch - old_epoch));
399 epoch_t epoch_idx = local_epoch_idx;
400 for (
int i = diff - 1; i >= 0; --i)
402 epoch_idx = (new_epoch - i) % number_epochs;
403 auto nodes = retire_lists[epoch_idx].steal();
404 detail::delete_objects(nodes.first);
406 local_epoch_idx = epoch_idx;
408 scan_strategy.reset();
411 epoch_t update_global_epoch(epoch_t curr_epoch, epoch_t new_epoch)
413 if (global_epoch.load(std::memory_order_relaxed) == curr_epoch)
417 std::atomic_thread_fence(std::memory_order_acquire);
420 bool success = global_epoch.compare_exchange_strong(curr_epoch, new_epoch,
421 std::memory_order_release,
422 std::memory_order_relaxed);
423 if (XENIUM_LIKELY(success))
424 reclaim_orphans(new_epoch);
429 void add_retired_node(detail::deletable_object* p)
431 retire_lists[local_epoch_idx].push(p);
434 void reclaim_orphans(epoch_t epoch)
436 auto idx = epoch % number_epochs;
437 auto nodes = orphans[idx].adopt();
438 detail::delete_objects(nodes);
441 unsigned critical_entries_since_update = 0;
442 unsigned nested_critical_entries = 0;
443 unsigned region_entries = 0;
444 typename Traits::scan_strategy::template type<generic_epoch_based> scan_strategy;
445 thread_control_block* control_block =
nullptr;
446 epoch_t local_epoch_idx = 0;
447 std::array<typename Traits::abandon_strategy::retire_list, number_epochs> retire_lists = {};
449 friend class generic_epoch_based;
450 ALLOCATION_COUNTER(generic_epoch_based);
453 #ifdef TRACK_ALLOCATIONS
454 template <
class Traits>
455 inline void generic_epoch_based<Traits>::count_allocation()
456 { local_thread_data.allocation_counter.count_allocation(); }
458 template <
class Traits>
459 inline void generic_epoch_based<Traits>::count_reclamation()
460 { local_thread_data.allocation_counter.count_reclamation(); }