Skip to content

Commit 1d2f595

Browse files
Yumin QiYumin Qi
authored andcommitted
8255917: runtime/cds/SharedBaseAddress.java failed "assert(reserved_rgn != 0LL) failed: No reserved region"
1 parent 29a09c8 commit 1d2f595

File tree

1 file changed

+42
-25
lines changed

1 file changed

+42
-25
lines changed

src/hotspot/share/memory/metaspaceShared.cpp

Lines changed: 42 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1472,6 +1472,7 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
14721472
// cover both archive and class space.
14731473
address cds_base = (address)static_mapinfo->mapped_base();
14741474
address ccs_end = (address)class_space_rs.end();
1475+
assert(ccs_end > cds_base, "Sanity check");
14751476
CompressedKlassPointers::initialize(cds_base, ccs_end - cds_base);
14761477

14771478
// map_heap_regions() compares the current narrow oop and klass encodings
@@ -1612,34 +1613,50 @@ char* MetaspaceShared::reserve_address_space_for_archives(FileMapInfo* static_ma
16121613
align_up(archive_space_size + gap_size + class_space_size,
16131614
os::vm_allocation_granularity());
16141615

1615-
ReservedSpace total_rs;
1616-
if (base_address != NULL) {
1617-
// Reserve at the given archive base address, or not at all.
1618-
total_rs = ReservedSpace(total_range_size, archive_space_alignment,
1619-
false /* bool large */, (char*) base_address);
1616+
assert(total_range_size > ccs_begin_offset, "must be");
1617+
if (use_windows_memory_mapping() && use_archive_base_addr) {
1618+
if (base_address != nullptr) {
1619+
address ccs_base = base_address + archive_space_size + gap_size;
1620+
archive_space_rs = ReservedSpace(archive_space_size, archive_space_alignment,
1621+
false /* large */, (char*)base_address);
1622+
class_space_rs = ReservedSpace(class_space_size, class_space_alignment,
1623+
false /* large */, (char*)ccs_base);
1624+
}
1625+
if (!archive_space_rs.is_reserved() || !class_space_rs.is_reserved()) {
1626+
release_reserved_spaces(archive_space_rs, class_space_rs);
1627+
return NULL;
1628+
}
16201629
} else {
1621-
// Reserve at any address, but leave it up to the platform to choose a good one.
1622-
total_rs = Metaspace::reserve_address_space_for_compressed_classes(total_range_size);
1623-
}
1624-
1625-
if (!total_rs.is_reserved()) {
1626-
return NULL;
1627-
}
1628-
1629-
// Paranoid checks:
1630-
assert(base_address == NULL || (address)total_rs.base() == base_address,
1631-
"Sanity (" PTR_FORMAT " vs " PTR_FORMAT ")", p2i(base_address), p2i(total_rs.base()));
1632-
assert(is_aligned(total_rs.base(), archive_space_alignment), "Sanity");
1633-
assert(total_rs.size() == total_range_size, "Sanity");
1634-
assert(CompressedKlassPointers::is_valid_base((address)total_rs.base()), "Sanity");
1630+
ReservedSpace total_rs;
1631+
if (base_address != NULL) {
1632+
// Reserve at the given archive base address, or not at all.
1633+
total_rs = ReservedSpace(total_range_size, archive_space_alignment,
1634+
false /* bool large */, (char*) base_address);
1635+
} else {
1636+
// Reserve at any address, but leave it up to the platform to choose a good one.
1637+
total_rs = Metaspace::reserve_address_space_for_compressed_classes(total_range_size);
1638+
}
16351639

1636-
// Now split up the space into ccs and cds archive. For simplicity, just leave
1637-
// the gap reserved at the end of the archive space.
1638-
archive_space_rs = total_rs.first_part(ccs_begin_offset,
1639-
(size_t)os::vm_allocation_granularity(),
1640-
/*split=*/true);
1641-
class_space_rs = total_rs.last_part(ccs_begin_offset);
1640+
if (!total_rs.is_reserved()) {
1641+
return NULL;
1642+
}
16421643

1644+
// Paranoid checks:
1645+
assert(base_address == NULL || (address)total_rs.base() == base_address,
1646+
"Sanity (" PTR_FORMAT " vs " PTR_FORMAT ")", p2i(base_address), p2i(total_rs.base()));
1647+
assert(is_aligned(total_rs.base(), archive_space_alignment), "Sanity");
1648+
assert(total_rs.size() == total_range_size, "Sanity");
1649+
assert(CompressedKlassPointers::is_valid_base((address)total_rs.base()), "Sanity");
1650+
1651+
// Now split up the space into ccs and cds archive. For simplicity, just leave
1652+
// the gap reserved at the end of the archive space.
1653+
archive_space_rs = total_rs.first_part(ccs_begin_offset,
1654+
(size_t)os::vm_allocation_granularity(),
1655+
/*split=*/false);
1656+
class_space_rs = total_rs.last_part(ccs_begin_offset);
1657+
MemTracker::record_virtual_memory_split_reserved(total_rs.base(), total_rs.size(),
1658+
ccs_begin_offset);
1659+
}
16431660
assert(is_aligned(archive_space_rs.base(), archive_space_alignment), "Sanity");
16441661
assert(is_aligned(archive_space_rs.size(), archive_space_alignment), "Sanity");
16451662
assert(is_aligned(class_space_rs.base(), class_space_alignment), "Sanity");

0 commit comments

Comments
 (0)