Skip to content

8358340: Support CDS heap archive with Generational Shenandoah #25597

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 4 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 19 additions & 6 deletions src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2779,8 +2779,13 @@ HeapWord* ShenandoahHeap::allocate_loaded_archive_space(size_t size) {
// Flip humongous -> regular.
{
ShenandoahHeapLocker locker(lock(), false);
// The 'waste' in the last region is no longer wasted at this point,
// so we must stop treating it as such.
ShenandoahHeapRegion* last = get_region(start_idx + num_regions - 1);
last->decrement_humongous_waste();
for (size_t c = start_idx; c < start_idx + num_regions; c++) {
get_region(c)->make_regular_bypass();
ShenandoahHeapRegion* r = get_region(c);
r->make_regular_bypass();
}
}

Expand All @@ -2802,7 +2807,7 @@ void ShenandoahHeap::complete_loaded_archive_space(MemRegion archive_space) {
HeapWord* cur = start;
while (cur < end) {
oop oop = cast_to_oop(cur);
shenandoah_assert_in_correct_region(nullptr, oop);
shenandoah_assert_correct(nullptr, oop);
cur += oop->size();
}

Expand All @@ -2811,11 +2816,19 @@ void ShenandoahHeap::complete_loaded_archive_space(MemRegion archive_space) {
"Archive space should be fully used: " PTR_FORMAT " " PTR_FORMAT,
p2i(cur), p2i(end));

size_t begin_reg_idx = heap_region_index_containing(start);
size_t end_reg_idx = heap_region_index_containing(end);
for (size_t idx = begin_reg_idx; idx <= end_reg_idx; idx++) {
ShenandoahHeapRegion* r = get_region(idx);
assert(r->is_regular(), "Must be");
assert(r->get_update_watermark() == r->bottom(), "Must be");
assert(idx == end_reg_idx || r->top() == r->end(), "Must be");
assert(r->affiliation() == YOUNG_GENERATION, "Should be young");
}

// Region bounds are good.
ShenandoahHeapRegion* begin_reg = heap_region_containing(start);
ShenandoahHeapRegion* end_reg = heap_region_containing(end);
assert(begin_reg->is_regular(), "Must be");
assert(end_reg->is_regular(), "Must be");
ShenandoahHeapRegion* begin_reg = get_region(begin_reg_idx);
ShenandoahHeapRegion* end_reg = get_region(end_reg_idx);
assert(begin_reg->bottom() == start,
"Must agree: archive-space-start: " PTR_FORMAT ", begin-region-bottom: " PTR_FORMAT,
p2i(start), p2i(begin_reg->bottom()));
Expand Down
2 changes: 1 addition & 1 deletion src/hotspot/share/gc/shenandoah/shenandoahHeap.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -681,7 +681,7 @@ class ShenandoahHeap : public CollectedHeap {

// ---------- CDS archive support

bool can_load_archived_objects() const override { return !ShenandoahCardBarrier; }
bool can_load_archived_objects() const override { return true; }
HeapWord* allocate_loaded_archive_space(size_t size) override;
void complete_loaded_archive_space(MemRegion archive_space) override;

Expand Down
14 changes: 4 additions & 10 deletions src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -147,22 +147,13 @@ void ShenandoahHeapRegion::make_regular_bypass() {
ShenandoahHeap::heap()->is_degenerated_gc_in_progress(),
"Only for STW GC or when Universe is initializing (CDS)");
reset_age();
auto cur_state = state();
switch (cur_state) {
switch (state()) {
case _empty_uncommitted:
do_commit();
case _empty_committed:
case _cset:
case _humongous_start:
case _humongous_cont:
if (cur_state == _humongous_start || cur_state == _humongous_cont) {
// CDS allocates chunks of the heap to fill with regular objects. The allocator
// will dutifully track any waste in the unused portion of the last region. Once
// CDS has finished initializing the objects, it will convert these regions to
// regular regions. The 'waste' in the last region is no longer wasted at this point,
// so we must stop treating it as such.
decrement_humongous_waste();
}
set_state(_regular);
return;
case _pinned_cset:
Expand Down Expand Up @@ -813,6 +804,9 @@ void ShenandoahHeapRegion::do_commit() {
if (AlwaysPreTouch) {
os::pretouch_memory(bottom(), end(), heap->pretouch_heap_page_size());
}
if (ZapUnusedHeapArea) {
SpaceMangler::mangle_region(MemRegion(_bottom, _end));
}
heap->increase_committed(ShenandoahHeapRegion::region_size_bytes());
}

Expand Down
3 changes: 2 additions & 1 deletion src/hotspot/share/gc/shenandoah/shenandoahHeapRegion.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -491,8 +491,9 @@ class ShenandoahHeapRegion {
_needs_bitmap_reset = false;
}

private:
void decrement_humongous_waste() const;

private:
void do_commit();
void do_uncommit();

Expand Down
3 changes: 3 additions & 0 deletions src/hotspot/share/oops/instanceMirrorKlass.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,9 @@ class InstanceMirrorKlass: public InstanceKlass {

InstanceMirrorKlass(const ClassFileParser& parser) : InstanceKlass(parser, Kind) {}

template <class OopClosureType>
void do_metadata(oop obj, OopClosureType* closure);

public:
InstanceMirrorKlass();

Expand Down
58 changes: 26 additions & 32 deletions src/hotspot/share/oops/instanceMirrorKlass.inline.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -46,38 +46,36 @@ void InstanceMirrorKlass::oop_oop_iterate_statics(oop obj, OopClosureType* closu
}
}

template <class OopClosureType>
void InstanceMirrorKlass::do_metadata(oop obj, OopClosureType* closure) {
Klass* klass = java_lang_Class::as_Klass(obj);
if (klass != nullptr) {
if (klass->class_loader_data() == nullptr) {
// This is a mirror that belongs to a shared class that has not been loaded yet.
assert(klass->is_shared(), "must be");
} else if (klass->is_instance_klass() && klass->class_loader_data()->has_class_mirror_holder()) {
// A non-strong hidden class doesn't have its own class loader,
// so when handling the java mirror for the class we need to make sure its class
// loader data is claimed, this is done by calling do_cld explicitly.
// For non-strong hidden classes the call to do_cld is made when the class
// loader itself is handled.
Devirtualizer::do_cld(closure, klass->class_loader_data());
} else {
Devirtualizer::do_klass(closure, klass);
}
} else {
// Klass is null means this has been a mirror for a primitive type
// that we do not need to follow as they are always strong roots.
assert(java_lang_Class::is_primitive(obj), "Sanity check");
}
}

template <typename T, class OopClosureType>
void InstanceMirrorKlass::oop_oop_iterate(oop obj, OopClosureType* closure) {
InstanceKlass::oop_oop_iterate<T>(obj, closure);

if (Devirtualizer::do_metadata(closure)) {
Klass* klass = java_lang_Class::as_Klass(obj);
// We'll get null for primitive mirrors.
if (klass != nullptr) {
if (klass->class_loader_data() == nullptr) {
// This is a mirror that belongs to a shared class that has not be loaded yet.
assert(klass->is_shared(), "must be");
} else if (klass->is_instance_klass() && klass->class_loader_data()->has_class_mirror_holder()) {
// A non-strong hidden class doesn't have its own class loader,
// so when handling the java mirror for the class we need to make sure its class
// loader data is claimed, this is done by calling do_cld explicitly.
// For non-strong hidden classes the call to do_cld is made when the class
// loader itself is handled.
Devirtualizer::do_cld(closure, klass->class_loader_data());
} else {
Devirtualizer::do_klass(closure, klass);
}
} else {
// We would like to assert here (as below) that if klass has been null, then
// this has been a mirror for a primitive type that we do not need to follow
// as they are always strong roots.
// However, we might get across a klass that just changed during CMS concurrent
// marking if allocation occurred in the old generation.
// This is benign here, as we keep alive all CLDs that were loaded during the
// CMS concurrent phase in the class loading, i.e. they will be iterated over
// and kept alive during remark.
// assert(java_lang_Class::is_primitive(obj), "Sanity check");
}
do_metadata<OopClosureType>(obj, closure);
}

oop_oop_iterate_statics<T>(obj, closure);
Expand Down Expand Up @@ -121,11 +119,7 @@ void InstanceMirrorKlass::oop_oop_iterate_bounded(oop obj, OopClosureType* closu

if (Devirtualizer::do_metadata(closure)) {
if (mr.contains(obj)) {
Klass* klass = java_lang_Class::as_Klass(obj);
// We'll get null for primitive mirrors.
if (klass != nullptr) {
Devirtualizer::do_klass(closure, klass);
}
do_metadata<OopClosureType>(obj, closure);
}
}

Expand Down