4 #ifndef __W_GC_POOL_FOREST_H 5 #define __W_GC_POOL_FOREST_H 212 return _raw == other.
_raw;
219 return _raw != other.
_raw;
242 _raw.components.status = get_aba() | (on ? 0x80000000 : 0);
247 return _raw.components.status < 0;
252 return static_cast<gc_aba>(_raw.components.status & 0x7FFFFFFF);
257 _raw.components.status = (_raw.components.status & 0x80000000)
258 | static_cast<gc_status>(stamp);
263 return _raw.components.generation;
268 return _raw.components.segment;
273 return _raw.components.offset;
278 return get_generation() == 0;
300 return lintel::unsafe::atomic_compare_exchange_strong<gc_pointer_raw>(
301 &_raw, &expected_tmp, desired.
_raw);
310 return GcPointer(lintel::unsafe::atomic_exchange<gc_pointer_raw>(&_raw, new_ptr.
_raw));
340 allocated_objects(0) {
341 objects =
new T[size];
356 allocated_objects = 0;
382 retire_suggested(false),
384 allocated_segments(0),
385 generation_nowrap(generation_nowrap_arg) {}
388 DBGOUT1(<<
"Destroying a GC Generation " << generation_nowrap
389 <<
". total_segments=" << total_segments
390 <<
", allocated_segments=" << allocated_segments);
391 for (
gc_segment i = 0; i < total_segments; ++i) {
398 return total_segments - allocated_segments;
407 bool preallocate_segments(
size_t segment_count,
gc_offset segment_size);
414 void recycle(uint32_t generation_nowrap_arg) {
415 retire_suggested =
false;
416 generation_nowrap = generation_nowrap_arg;
417 for (uint32_t seg = 0; seg < total_segments; ++seg) {
418 if (segments[seg]->owner != 0) {
419 segments[seg]->recycle();
422 allocated_segments = 0;
471 virtual void wakeup() = 0;
486 size_t initial_segment_count,
gc_offset initial_segment_size) :
491 desired_generations(desired_gens),
492 gc_wakeup_functor(nullptr) {
494 generations[0] =
nullptr;
496 if (initial_segment_count > 0) {
504 DBGOUT1(<< name <<
": Destroying a GC Pool Forest. head_nowrap=" << head_nowrap
505 <<
", tail_nowrap=" << tail_nowrap);
506 for (
size_t i = head_nowrap; i < tail_nowrap; ++i) {
507 delete generations[wrap(i)];
513 return pointer.
raw();
524 return generations[gen];
534 w_assert1(seg < generation->total_segments);
551 deallocate(pointer->gc_pointer);
565 return wrap(head_nowrap);
570 return wrap(tail_nowrap);
575 return wrap(curr_nowrap);
580 return tail_nowrap - head_nowrap;
584 return generations[head()];
588 return generations[curr()];
595 if (generation == 0) {
598 if (head() < tail()) {
599 return generation >= head() && generation < tail();
600 }
else if (tail() < head()) {
602 || generation < tail();
616 bool advance_generation(
lsn_t low_water_mark,
lsn_t now,
617 size_t initial_segment_count,
gc_offset segment_size);
664 if (generation == 0) {
669 w_assert1(is_valid_generation(generation));
678 w_assert1(offset < segment->total_objects);
679 w_assert1(offset < segment->allocated_objects);
689 next = occupy_segment(
self);
700 next = occupy_segment(
self);
711 ret->gc_pointer = next;
742 DBGOUT0(<< name <<
": GC Thread is not catching up. have to sleep. me=" <<
self);
743 if (gc_wakeup_functor !=
nullptr) {
744 gc_wakeup_functor->wakeup();
746 const uint32_t SLEEP_MICROSEC = 10000;
747 ::usleep(SLEEP_MICROSEC);
752 if (lintel::unsafe::atomic_compare_exchange_strong<uint32_t>(
756 while (segment ==
nullptr) {
759 DBGOUT3(<< name <<
": Waiting for segment " << alloc_segment <<
" in generation " 761 segment = gen->
segments[alloc_segment];
768 DBGOUT1(<< name <<
": Occupied a pre-allocated Segment " << alloc_segment
770 segment->
owner =
self;
779 DBGOUT1(<<
"Oops, CAS failed");
787 size_t segment_count,
gc_offset segment_size) {
790 if (low_water_mark !=
lsn_t::null && active_generations() >= desired_generations) {
792 uint32_t old_tail_nowrap = tail_nowrap;
793 retire_generations(low_water_mark, now);
794 if (old_tail_nowrap < tail_nowrap) {
799 ERROUT(<< name <<
": Too many generations!");
802 uint32_t new_generation_nowrap = tail_nowrap;
803 uint32_t new_tail = new_generation_nowrap + 1;
804 if (wrap(new_tail) == 0) {
805 DBGOUT1(<< name <<
": Generation wrapped!");
808 if (lintel::unsafe::atomic_compare_exchange_strong<uint32_t>(
809 &tail_nowrap, &new_generation_nowrap, new_tail)) {
812 generations[new_generation] =
new GcGeneration<T>(new_generation_nowrap);
813 epochs[new_generation] = now;
814 generations[new_generation]->preallocate_segments(segment_count, segment_size);
815 DBGOUT1(<< name <<
": Generation " << new_generation_nowrap
816 <<
" created. epoch=" << now.
data());
817 curr_nowrap = new_generation_nowrap;
829 while (segment_count > 0) {
835 uint32_t new_segment = total_segments;
836 if (lintel::unsafe::atomic_compare_exchange_strong<uint32_t>(
837 &total_segments, &new_segment, new_segment + 1)) {
840 segments[new_segment] = seg;
842 DBGOUT1(<<
"Pre-allocated Segment " << new_segment <<
" in generation " 843 << generation_nowrap <<
". segment size=" << segment_size <<
".");
857 uint32_t oldest_nowrap = head_nowrap;
858 uint32_t next_oldest_nowrap = oldest_nowrap + 1;
859 if (wrap(next_oldest_nowrap) == 0) {
860 ++next_oldest_nowrap;
862 const uint32_t MIN_HEALTHY_GENERATIONS = 2;
863 if (tail_nowrap <= next_oldest_nowrap + MIN_HEALTHY_GENERATIONS) {
870 if (low_water_mark >= epochs[wrap(next_oldest_nowrap)]) {
874 if (lintel::unsafe::atomic_compare_exchange_strong<uint32_t>(
875 &head_nowrap, &oldest_nowrap, next_oldest_nowrap)) {
877 generations[wrap(oldest_nowrap)] =
nullptr;
878 epochs[wrap(oldest_nowrap)].set(0);
880 DBGOUT1(<< name <<
": Successfully retired generation " << oldest_nowrap);
881 if (recycle_now !=
lsn_t::null && active_generations() <= desired_generations) {
882 DBGOUT1(<<
"Now recycling it as new generation ...");
884 uint32_t new_generation_nowrap = tail_nowrap;
885 uint32_t new_tail = new_generation_nowrap + 1;
886 if (wrap(new_tail) == 0) {
887 DBGOUT1(<< name <<
": Generation wrapped!");
891 if (lintel::unsafe::atomic_compare_exchange_strong<uint32_t>(
892 &tail_nowrap, &new_generation_nowrap, new_tail)) {
893 oldest->
recycle(new_generation_nowrap);
894 generations[wrap(new_generation_nowrap)] = oldest;
895 epochs[wrap(new_generation_nowrap)].set(recycle_now.
data());
896 curr_nowrap = new_generation_nowrap;
897 DBGOUT1(<< name <<
": Successfully recycled as gen " << new_generation_nowrap);
901 DBGOUT1(<< name <<
": Oops, others incremented generation. couldn't " 902 <<
" reuse the retired generation");
910 DBGOUT1(<< name <<
": Oops, CAS failed, someone has retired it?");
927 #endif // __W_GC_POOL_FOREST_H void atomic_thread_fence(memory_order order)
Definition: AtomicCounter.hpp:223
gc_generation tail() const
Definition: w_gc_pool_forest.h:569
bool operator==(const GcPointer &other) const
Definition: w_gc_pool_forest.h:211
Definition: w_gc_pool_forest.h:460
GcPointer()
Definition: w_gc_pool_forest.h:189
T * allocate(gc_pointer_raw &next, gc_thread_id self)
Definition: w_gc_pool_forest.h:685
gc_pointer_raw raw() const
Definition: w_gc_pool_forest.h:287
bool is_null() const
Definition: w_gc_pool_forest.h:277
#define w_assert1(x)
Level 1 should not add significant extra time.
Definition: w_base.h:198
T * resolve_pointer(const GcPointer< T > &pointer)
Definition: w_gc_pool_forest.h:512
virtual ~GcWakeupFunctor()
Definition: w_gc_pool_forest.h:469
uint32_t desired_generations
Definition: w_gc_pool_forest.h:649
GcPointer(const GcPointer &other)
Definition: w_gc_pool_forest.h:197
const size_t GC_MAX_GENERATIONS
Definition: w_gc_pool_forest.h:83
GcPointer atomic_swap(const GcPointer &new_ptr)
[Atomic] Atomic swap.
Definition: w_gc_pool_forest.h:309
gc_offset total_objects
Definition: w_gc_pool_forest.h:363
GcGeneration< T > * curr_generation()
Definition: w_gc_pool_forest.h:587
A generation of objects.
Definition: w_gc_pool_forest.h:55
bool preallocate_segments(size_t segment_count, gc_offset segment_size)
Definition: w_gc_pool_forest.h:826
bool operator==(const gc_pointer_raw &a, const gc_pointer_raw &b)
Definition: w_gc_pool_forest.h:141
uint32_t allocated_segments
Definition: w_gc_pool_forest.h:447
gc_pointer_raw occupy_segment(gc_thread_id self)
Definition: w_gc_pool_forest.h:734
~GcPoolForest()
Definition: w_gc_pool_forest.h:503
Header file for lintel::Atomic class.
const size_t GC_MAX_SEGMENTS
Definition: w_gc_pool_forest.h:91
uint32_t gc_aba
Definition: w_gc_pool_forest.h:75
gc_offset offset
Definition: w_gc_pool_forest.h:133
GcSegment< T > * resolve_segment(gc_generation gen, gc_segment seg)
Definition: w_gc_pool_forest.h:531
bool operator!=(const GcPointer &other) const
Definition: w_gc_pool_forest.h:218
void deallocate(T *pointer)
Definition: w_gc_pool_forest.h:550
~GcSegment()
Definition: w_gc_pool_forest.h:345
GcGeneration< T > * resolve_generation(gc_generation gen)
Definition: w_gc_pool_forest.h:523
gc_segment get_segment() const
Definition: w_gc_pool_forest.h:267
static const lsn_t null
Definition: lsn.h:371
gc_generation get_generation() const
Definition: w_gc_pool_forest.h:262
GcGeneration< T > * head_generation()
Definition: w_gc_pool_forest.h:583
GcPointer(gc_pointer_raw raw)
Definition: w_gc_pool_forest.h:194
bool is_marked() const
Definition: w_gc_pool_forest.h:246
uint64_t word
Definition: w_gc_pool_forest.h:137
void retire_generations(lsn_t low_water_mark, lsn_t recycle_now=lsn_t::null)
Definition: w_gc_pool_forest.h:851
gc_pointer_raw & raw()
Definition: w_gc_pool_forest.h:282
struct gc_pointer_raw::@14 components
#define DBGOUT3(a)
Definition: w_debug.h:212
gc_aba get_aba() const
Definition: w_gc_pool_forest.h:251
lsndata_t data() const
Definition: lsn.h:270
#define DBGOUT1(a)
Definition: w_debug.h:200
bool retire_suggested
Definition: w_gc_pool_forest.h:430
bool is_valid_generation(gc_generation generation) const
Definition: w_gc_pool_forest.h:591
gc_segment segment
Definition: w_gc_pool_forest.h:132
bool is_equal_address(const GcPointer &other) const
Definition: w_gc_pool_forest.h:226
gc_offset get_offset() const
Definition: w_gc_pool_forest.h:272
gc_generation generation
Definition: w_gc_pool_forest.h:131
#define DBGOUT0(a)
Definition: w_debug.h:194
gc_thread_id owner
Definition: w_gc_pool_forest.h:360
const size_t GC_MAX_OFFSETS
Definition: w_gc_pool_forest.h:99
Log Sequence Number. See Log Sequence Numbers (LSN).
Definition: lsn.h:243
bool operator!=(const gc_pointer_raw &a, const gc_pointer_raw &b)
Definition: w_gc_pool_forest.h:145
Definition: AtomicCounter.hpp:116
GcSegment< T > * segments[GC_MAX_SEGMENTS]
Definition: w_gc_pool_forest.h:453
uint32_t head_nowrap
Definition: w_gc_pool_forest.h:640
void recycle()
Definition: w_gc_pool_forest.h:354
GcWakeupFunctor * gc_wakeup_functor
Definition: w_gc_pool_forest.h:658
gc_generation head() const
Definition: w_gc_pool_forest.h:564
#define ERROUT(a)
Definition: w_debug.h:175
void set_aba(gc_aba stamp)
Definition: w_gc_pool_forest.h:256
void mfence() const
Definition: w_gc_pool_forest.h:628
A portable and logical pointer with mark-for-death, ABA counter, and generation/segement bits...
Definition: w_gc_pool_forest.h:127
uint8_t gc_segment
Definition: w_gc_pool_forest.h:89
~GcGeneration()
Definition: w_gc_pool_forest.h:387
uint8_t gc_generation
Definition: w_gc_pool_forest.h:81
Garbage-collected Pool Forest.
Definition: w_gc_pool_forest.h:57
uint32_t tail_nowrap
Definition: w_gc_pool_forest.h:646
uint32_t total_segments
Definition: w_gc_pool_forest.h:437
void recycle(uint32_t generation_nowrap_arg)
Definition: w_gc_pool_forest.h:414
GcGeneration< T > * resolve_generation(gc_pointer_raw pointer)
Definition: w_gc_pool_forest.h:519
int32_t gc_status
Status bits in gc_pointer.
Definition: w_gc_pool_forest.h:57
const char * name
Definition: w_gc_pool_forest.h:637
gc_pointer_raw gc_pointer
Definition: w_gc_pool_forest.h:461
Wrapper for gc_pointer_raw.
Definition: w_gc_pool_forest.h:50
uint16_t gc_offset
Definition: w_gc_pool_forest.h:97
uint32_t generation_nowrap
Definition: w_gc_pool_forest.h:450
gc_status status
Definition: w_gc_pool_forest.h:130
uint64_t gc_thread_id
Definition: w_gc_pool_forest.h:105
Definition: w_gc_pool_forest.h:468
gc_generation curr() const
Definition: w_gc_pool_forest.h:574
gc_offset allocated_objects
Definition: w_gc_pool_forest.h:366
bool advance_generation(lsn_t low_water_mark, lsn_t now, size_t initial_segment_count, gc_offset segment_size)
Definition: w_gc_pool_forest.h:786
uint32_t get_free_count() const
Definition: w_gc_pool_forest.h:397
bool atomic_cas(const GcPointer &expected, const GcPointer &desired)
[Atomic] Compare and set for pointer, mark, and ABA counter altogether. See Figure 9...
Definition: w_gc_pool_forest.h:298
uint32_t curr_nowrap
Definition: w_gc_pool_forest.h:643
GcGeneration(uint32_t generation_nowrap_arg)
Definition: w_gc_pool_forest.h:381
GcSegment< T > * resolve_segment(gc_pointer_raw pointer)
Definition: w_gc_pool_forest.h:527
GcPointer & operator=(const GcPointer &other)
Definition: w_gc_pool_forest.h:202
GcSegment(gc_offset size)
Definition: w_gc_pool_forest.h:337
A segment in each generation.
Definition: w_gc_pool_forest.h:53
size_t active_generations() const
Definition: w_gc_pool_forest.h:579
static gc_generation wrap(size_t i)
Definition: w_gc_pool_forest.h:632
GcPoolForest(const char *debug_name, uint32_t desired_gens, size_t initial_segment_count, gc_offset initial_segment_size)
Definition: w_gc_pool_forest.h:485
gc_pointer_raw _raw
Definition: w_gc_pool_forest.h:315
#define T
Definition: w_okvl_inl.h:45
Definition: AtomicCounter.hpp:112
T * dereference(GcPoolForest< T > &pool) const
Definition: w_gc_pool_forest.h:921
T * objects
Definition: w_gc_pool_forest.h:369
void set_mark(bool on)
Definition: w_gc_pool_forest.h:241