diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/AddressRangeCommittedMemoryProvider.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/AddressRangeCommittedMemoryProvider.java index 6db8a46a6fb9..f9c06ee7c8d2 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/AddressRangeCommittedMemoryProvider.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/AddressRangeCommittedMemoryProvider.java @@ -108,6 +108,7 @@ public class AddressRangeCommittedMemoryProvider extends ChunkBasedCommittedMemo protected static final int COMMIT_FAILED = 2; private static final OutOfMemoryError NODE_ALLOCATION_FAILED = new OutOfMemoryError("Could not allocate node for free list, OS may be out of memory."); + private static final OutOfMemoryError OUT_OF_METASPACE = new OutOfMemoryError("Could not allocate a metaspace chunk because the metaspace is exhausted."); private static final OutOfMemoryError ALIGNED_OUT_OF_ADDRESS_SPACE = new OutOfMemoryError("Could not allocate an aligned heap chunk because the heap address space is exhausted. " + "Consider increasing the address space size (see option -XX:ReservedAddressSpaceSize)."); private static final OutOfMemoryError UNALIGNED_OUT_OF_ADDRESS_SPACE = new OutOfMemoryError("Could not allocate an unaligned heap chunk because the heap address space is exhausted. " + @@ -129,7 +130,8 @@ public class AddressRangeCommittedMemoryProvider extends ChunkBasedCommittedMemo protected FreeListNode unusedListHead; protected long unusedListCount; - protected UnsignedWord reservedSpaceSize; + protected UnsignedWord reservedAddressSpaceSize; + protected UnsignedWord reservedMetaspaceSize; protected Pointer collectedHeapBegin; protected UnsignedWord collectedHeapSize; @@ -212,7 +214,7 @@ private static int initialize(Pointer spaceBegin, UnsignedWord spaceSize, Pointe @SuppressWarnings("hiding") @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) protected int initializeFields(Pointer spaceBegin, UnsignedWord reservedSpaceSize, Pointer collectedHeapBegin) { - this.reservedSpaceSize = reservedSpaceSize; + this.reservedAddressSpaceSize = reservedSpaceSize; this.collectedHeapBegin = collectedHeapBegin; this.collectedHeapSize = spaceBegin.add(reservedSpaceSize).subtract(collectedHeapBegin); @@ -320,12 +322,38 @@ public int tearDown() { @Uninterruptible(reason = "Tear-down in progress.") protected int unmapAddressSpace(PointerBase heapBase) { - if (VirtualMemoryProvider.get().free(heapBase, reservedSpaceSize) != 0) { + if (VirtualMemoryProvider.get().free(heapBase, reservedAddressSpaceSize) != 0) { return CEntryPointErrors.FREE_ADDRESS_SPACE_FAILED; } return CEntryPointErrors.NO_ERROR; } + @Override + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public Pointer allocateMetaspaceChunk(UnsignedWord nbytes, UnsignedWord alignment) { + WordPointer allocOut = UnsafeStackValue.get(WordPointer.class); + int error = allocateInHeapAddressSpace(nbytes, alignment, allocOut); + if (error == NO_ERROR) { + if (VMInspectionOptions.hasNativeMemoryTrackingSupport()) { + NativeMemoryTracking.singleton().trackCommit(nbytes, NmtCategory.Metaspace); + } + return allocOut.read(); + } + throw reportMetaspaceChunkAllocationFailed(error); + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + protected OutOfMemoryError reportMetaspaceChunkAllocationFailed(int error) { + /* Explicitly don't use OutOfMemoryUtil as the metaspace is not part of the Java heap. */ + if (error == OUT_OF_ADDRESS_SPACE) { + throw OUT_OF_METASPACE; + } else if (error == COMMIT_FAILED) { + throw METASPACE_CHUNK_COMMIT_FAILED; + } else { + throw VMError.shouldNotReachHereAtRuntime(); + } + } + @Override @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public Pointer allocateAlignedChunk(UnsignedWord nbytes, UnsignedWord alignment) { @@ -698,7 +726,7 @@ protected void mergeNodes(FreeListNode target, FreeListNode obsolete) { private void increaseBounds(FreeListNode node, Pointer otherStart, UnsignedWord otherSize) { assert getNodeEnd(node).equal(otherStart) || otherStart.add(otherSize).equal(node.getStart()) : "must be adjacent"; assert UnsignedUtils.isAMultiple(otherSize, getGranularity()); - assert otherSize.belowOrEqual(reservedSpaceSize); + assert otherSize.belowOrEqual(reservedAddressSpaceSize); Pointer newStart = PointerUtils.min(node.getStart(), otherStart); UnsignedWord newSize = node.getSize().add(otherSize); @@ -707,7 +735,7 @@ private void increaseBounds(FreeListNode node, Pointer otherStart, UnsignedWord @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) protected void trimBounds(FreeListNode fit, Pointer newStart, UnsignedWord newSize) { - assert newSize.belowOrEqual(reservedSpaceSize); + assert newSize.belowOrEqual(reservedAddressSpaceSize); assert fit.getStart().equal(newStart) && newSize.belowThan(fit.getSize()) || fit.getStart().belowThan(newStart) && getNodeEnd(fit).equal(newStart.add(newSize)); @@ -790,7 +818,12 @@ private boolean isInAllocList(FreeListNode node) { @Override public UnsignedWord getReservedAddressSpaceSize() { - return reservedSpaceSize; + return reservedAddressSpaceSize; + } + + @Override + public UnsignedWord getReservedMetaspaceSize() { + return reservedMetaspaceSize; } /** Keeps track of unused memory. */ diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/AlignedHeapChunk.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/AlignedHeapChunk.java index a2dbdeda8c2e..0e27ae7c9671 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/AlignedHeapChunk.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/AlignedHeapChunk.java @@ -24,6 +24,8 @@ */ package com.oracle.svm.core.genscavenge; +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; + import org.graalvm.nativeimage.c.struct.RawField; import org.graalvm.nativeimage.c.struct.RawStructure; import org.graalvm.word.Pointer; @@ -112,19 +114,21 @@ public static boolean isEmpty(AlignedHeader that) { /** Allocate uninitialized memory within this AlignedHeapChunk. */ @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - static Pointer allocateMemory(AlignedHeader that, UnsignedWord size) { - Pointer result = Word.nullPointer(); + public static Pointer tryAllocateMemory(AlignedHeader that, UnsignedWord size) { UnsignedWord available = HeapChunk.availableObjectMemory(that); - if (size.belowOrEqual(available)) { - result = HeapChunk.getTopPointer(that); - Pointer newTop = result.add(size); - HeapChunk.setTopPointerCarefully(that, newTop); + if (size.aboveThan(available)) { + return Word.nullPointer(); } + + Pointer result = HeapChunk.getTopPointer(that); + Pointer newTop = result.add(size); + HeapChunk.setTopPointerCarefully(that, newTop); return result; } @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) public static AlignedHeader getEnclosingChunk(Object obj) { + assert ObjectHeaderImpl.isAlignedObject(obj); Pointer ptr = Word.objectToUntrackedPointer(obj); return getEnclosingChunkFromObjectPointer(ptr); } @@ -144,7 +148,8 @@ public static UnsignedWord getObjectOffset(AlignedHeader that, Pointer objectPoi return objectPointer.subtract(objectsStart); } - static void walkObjects(AlignedHeader that, ObjectVisitor visitor) { + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static void walkObjects(AlignedHeader that, ObjectVisitor visitor) { HeapChunk.walkObjectsFrom(that, getObjectsStart(that), visitor); } diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/CompactingOldGeneration.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/CompactingOldGeneration.java index 3803769e8341..8522d4e5c069 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/CompactingOldGeneration.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/CompactingOldGeneration.java @@ -45,6 +45,7 @@ import com.oracle.svm.core.genscavenge.compacting.PlanningVisitor; import com.oracle.svm.core.genscavenge.compacting.RuntimeCodeCacheFixupWalker; import com.oracle.svm.core.genscavenge.compacting.SweepingVisitor; +import com.oracle.svm.core.genscavenge.metaspace.MetaspaceImpl; import com.oracle.svm.core.genscavenge.remset.BrickTable; import com.oracle.svm.core.genscavenge.remset.RememberedSet; import com.oracle.svm.core.graal.RuntimeCompilation; @@ -107,7 +108,7 @@ */ final class CompactingOldGeneration extends OldGeneration { - private final Space space = new Space("Old", "O", false, HeapParameters.getMaxSurvivorSpaces() + 1); + private final Space space = new Space("Old", "O", false, getAge()); private final MarkStack markStack = new MarkStack(); private final GreyObjectsWalker toGreyObjectsWalker = new GreyObjectsWalker(); @@ -141,11 +142,6 @@ void absorb(YoungGeneration youngGen) { } } - @Override - void appendChunk(AlignedHeapChunk.AlignedHeader hdr) { - space.appendAlignedHeapChunk(hdr); - } - @Override @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) void blackenDirtyCardRoots(GreyToBlackObjectVisitor visitor, GreyToBlackObjRefVisitor refVisitor) { @@ -178,7 +174,7 @@ boolean scanGreyObjects(boolean incrementalGc) { public Object promoteAlignedObject(Object original, AlignedHeapChunk.AlignedHeader originalChunk, Space originalSpace) { if (!GCImpl.getGCImpl().isCompleteCollection()) { assert originalSpace.isFromSpace(); - return space.copyAlignedObject(original, originalSpace); + return ObjectPromoter.copyAlignedObject(original, originalSpace, space); } assert originalSpace == space; ObjectHeader oh = Heap.getHeap().getObjectHeader(); @@ -195,7 +191,7 @@ public Object promoteAlignedObject(Object original, AlignedHeapChunk.AlignedHead * change during compaction, so we must add a field to store it, which increases the * object's size. The easiest way to handle this is to copy the object. */ - result = space.copyAlignedObject(original, originalSpace); + result = ObjectPromoter.copyAlignedObject(original, originalSpace, space); assert !ObjectHeaderImpl.hasIdentityHashFromAddressInline(oh.readHeaderFromObject(result)); } ObjectHeaderImpl.setMarked(result); @@ -209,7 +205,7 @@ public Object promoteAlignedObject(Object original, AlignedHeapChunk.AlignedHead protected Object promoteUnalignedObject(Object original, UnalignedHeapChunk.UnalignedHeader originalChunk, Space originalSpace) { if (!GCImpl.getGCImpl().isCompleteCollection()) { assert originalSpace.isFromSpace(); - space.promoteUnalignedHeapChunk(originalChunk, originalSpace); + ObjectPromoter.promoteUnalignedHeapChunk(originalChunk, originalSpace, space); return original; } assert originalSpace == space; @@ -226,9 +222,9 @@ protected boolean promotePinnedObject(Object obj, HeapChunk.Header originalCh if (!GCImpl.getGCImpl().isCompleteCollection()) { assert originalSpace != space && originalSpace.isFromSpace(); if (isAligned) { - space.promoteAlignedHeapChunk((AlignedHeapChunk.AlignedHeader) originalChunk, originalSpace); + ObjectPromoter.promoteAlignedHeapChunk((AlignedHeapChunk.AlignedHeader) originalChunk, originalSpace, space); } else { - space.promoteUnalignedHeapChunk((UnalignedHeapChunk.UnalignedHeader) originalChunk, originalSpace); + ObjectPromoter.promoteUnalignedHeapChunk((UnalignedHeapChunk.UnalignedHeader) originalChunk, originalSpace, space); } return true; } @@ -309,6 +305,13 @@ private void fixupReferencesBeforeCompaction(ChunkReleaser chunkReleaser, Timers oldFixupImageHeapTimer.stop(); } + Timer oldFixupMetaspaceTimer = timers.oldFixupMetaspace.start(); + try { + fixupMetaspace(); + } finally { + oldFixupMetaspaceTimer.stop(); + } + Timer oldFixupThreadLocalsTimer = timers.oldFixupThreadLocals.start(); try { for (IsolateThread isolateThread = VMThreads.firstThread(); isolateThread.isNonNull(); isolateThread = VMThreads.nextThread(isolateThread)) { @@ -356,6 +359,20 @@ private void fixupImageHeapRoots(ImageHeapInfo info) { } } + @Uninterruptible(reason = "Avoid unnecessary safepoint checks in GC for performance.") + private void fixupMetaspace() { + if (!MetaspaceImpl.isSupported()) { + return; + } + + if (SerialGCOptions.useRememberedSet()) { + /* Cards have been cleaned and roots re-marked during the initial scan. */ + MetaspaceImpl.singleton().walkDirtyObjects(fixupVisitor, refFixupVisitor, false); + } else { + MetaspaceImpl.singleton().walkObjects(fixupVisitor); + } + } + @Uninterruptible(reason = "Avoid unnecessary safepoint checks in GC for performance.") private void fixupUnalignedChunkReferences(ChunkReleaser chunkReleaser) { UnalignedHeapChunk.UnalignedHeader uChunk = space.getFirstUnalignedHeapChunk(); diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/CopyingOldGeneration.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/CopyingOldGeneration.java index fe45dc236f35..2805217831d7 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/CopyingOldGeneration.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/CopyingOldGeneration.java @@ -52,9 +52,8 @@ final class CopyingOldGeneration extends OldGeneration { @Platforms(Platform.HOSTED_ONLY.class) CopyingOldGeneration(String name) { super(name); - int age = HeapParameters.getMaxSurvivorSpaces() + 1; - this.fromSpace = new Space("Old", "O", false, age); - this.toSpace = new Space("Old To", "O", true, age); + this.fromSpace = new Space("Old", "O", false, getAge()); + this.toSpace = new Space("Old To", "O", true, getAge()); } @Override @@ -76,7 +75,7 @@ public void walkObjects(ObjectVisitor visitor) { @Override public Object promoteAlignedObject(Object original, AlignedHeapChunk.AlignedHeader originalChunk, Space originalSpace) { assert originalSpace.isFromSpace(); - return getToSpace().copyAlignedObject(original, originalSpace); + return ObjectPromoter.copyAlignedObject(original, originalSpace, getToSpace()); } @AlwaysInline("GC performance") @@ -84,7 +83,7 @@ public Object promoteAlignedObject(Object original, AlignedHeapChunk.AlignedHead @Override protected Object promoteUnalignedObject(Object original, UnalignedHeapChunk.UnalignedHeader originalChunk, Space originalSpace) { assert originalSpace.isFromSpace(); - getToSpace().promoteUnalignedHeapChunk(originalChunk, originalSpace); + ObjectPromoter.promoteUnalignedHeapChunk(originalChunk, originalSpace, getToSpace()); return original; } @@ -93,9 +92,9 @@ protected Object promoteUnalignedObject(Object original, UnalignedHeapChunk.Unal protected boolean promotePinnedObject(Object obj, HeapChunk.Header originalChunk, boolean isAligned, Space originalSpace) { assert originalSpace.isFromSpace(); if (isAligned) { - getToSpace().promoteAlignedHeapChunk((AlignedHeapChunk.AlignedHeader) originalChunk, originalSpace); + ObjectPromoter.promoteAlignedHeapChunk((AlignedHeapChunk.AlignedHeader) originalChunk, originalSpace, getToSpace()); } else { - getToSpace().promoteUnalignedHeapChunk((UnalignedHeapChunk.UnalignedHeader) originalChunk, originalSpace); + ObjectPromoter.promoteUnalignedHeapChunk((UnalignedHeapChunk.UnalignedHeader) originalChunk, originalSpace, getToSpace()); } return true; } @@ -146,11 +145,6 @@ Space getToSpace() { return toSpace; } - @Override - void appendChunk(AlignedHeapChunk.AlignedHeader hdr) { - getToSpace().appendAlignedHeapChunk(hdr); - } - @Override void swapSpaces() { assert getFromSpace().isEmpty() : "fromSpace should be empty."; diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/GCImpl.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/GCImpl.java index 43bdc8ca0f50..a638857be6f5 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/GCImpl.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/GCImpl.java @@ -64,6 +64,7 @@ import com.oracle.svm.core.genscavenge.HeapAccounting.HeapSizes; import com.oracle.svm.core.genscavenge.HeapChunk.Header; import com.oracle.svm.core.genscavenge.UnalignedHeapChunk.UnalignedHeader; +import com.oracle.svm.core.genscavenge.metaspace.MetaspaceImpl; import com.oracle.svm.core.genscavenge.remset.RememberedSet; import com.oracle.svm.core.graal.RuntimeCompilation; import com.oracle.svm.core.heap.AbstractPinnedObjectSupport; @@ -71,7 +72,6 @@ import com.oracle.svm.core.heap.CodeReferenceMapDecoder; import com.oracle.svm.core.heap.GC; import com.oracle.svm.core.heap.GCCause; -import com.oracle.svm.core.heap.Heap; import com.oracle.svm.core.heap.NoAllocationVerifier; import com.oracle.svm.core.heap.ObjectReferenceVisitor; import com.oracle.svm.core.heap.ObjectVisitor; @@ -658,20 +658,10 @@ private void scanFromRoots() { startTicks = JfrGCEvents.startGCPhasePause(); try { - /* - * Stack references are grey at the beginning of a collection, so I need to blacken - * them. - */ blackenStackRoots(); - - /* Custom memory regions which contain object references. */ - walkThreadLocals(); - - /* - * Native image Objects are grey at the beginning of a collection, so I need to - * blacken them. - */ + blackenThreadLocals(); blackenImageHeapRoots(); + blackenMetaspace(); } finally { JfrGCEvents.emitGCPhasePauseEvent(getCollectionEpoch(), "Scan Roots", startTicks); } @@ -732,21 +722,10 @@ private void scanFromDirtyRoots() { * will be visited by the grey object scanner. */ blackenDirtyCardRoots(); - - /* - * Stack references are grey at the beginning of a collection, so I need to blacken - * them. - */ blackenStackRoots(); - - /* Custom memory regions which contain object references. */ - walkThreadLocals(); - - /* - * Native image Objects are grey at the beginning of a collection, so I need to - * blacken them. - */ + blackenThreadLocals(); blackenDirtyImageHeapRoots(); + blackenMetaspace(); } finally { JfrGCEvents.emitGCPhasePauseEvent(getCollectionEpoch(), "Scan Roots", startTicks); } @@ -884,7 +863,7 @@ private static void walkStack(IsolateThread thread, JavaStackWalk walk, ObjectRe } @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - private void walkThreadLocals() { + private void blackenThreadLocals() { Timer walkThreadLocalsTimer = timers.walkThreadLocals.start(); try { for (IsolateThread isolateThread = VMThreads.firstThread(); isolateThread.isNonNull(); isolateThread = VMThreads.nextThread(isolateThread)) { @@ -987,6 +966,25 @@ private void blackenDirtyCardRoots() { } } + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + private void blackenMetaspace() { + if (!MetaspaceImpl.isSupported()) { + return; + } + + if (SerialGCOptions.useRememberedSet()) { + /* + * If we have a remembered set, only walk the dirty objects. Also, only clean and remark + * cards during complete collections (similar to the writable part of the image heap). + */ + boolean clean = completeCollection; + MetaspaceImpl.singleton().walkDirtyObjects(greyToBlackObjectVisitor, greyToBlackObjRefVisitor, clean); + } else { + /* Scan all metaspace objects */ + MetaspaceImpl.singleton().walkObjects(greyToBlackObjectVisitor); + } + } + @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) private static void beginPromotion(boolean isIncremental) { HeapImpl heap = HeapImpl.getHeapImpl(); @@ -1031,7 +1029,7 @@ Object promoteObject(Object original, UnsignedWord header) { Header originalChunk = getChunk(original, isAligned); Space originalSpace = HeapChunk.getSpace(originalChunk); if (originalSpace.isToSpace()) { - assert !SerialGCOptions.useCompactingOldGen() || !completeCollection; + assert !SerialGCOptions.useCompactingOldGen() || !completeCollection || originalSpace.isMetaspace(); return original; } @@ -1070,7 +1068,7 @@ private static Header getChunk(Object obj, boolean isAligned) { @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) private void promotePinnedObject(Object pinned) { assert pinned != null; - assert !Heap.getHeap().isInImageHeap(pinned); + assert AbstractPinnedObjectSupport.needsPinning(pinned); assert HeapChunk.getEnclosingHeapChunk(pinned).getPinnedObjectCount() > 0; HeapImpl heap = HeapImpl.getHeapImpl(); diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapAllocation.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapAllocation.java index 7e8efcf7aed5..591dc81ad28f 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapAllocation.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapAllocation.java @@ -185,7 +185,7 @@ private Pointer newAllocChunkAndAllocate(UnsignedWord requestedSize) { AlignedHeader newChunk = requestNewAlignedChunk(); if (newChunk.isNonNull()) { - Pointer result = AlignedHeapChunk.allocateMemory(newChunk, requestedSize); + Pointer result = AlignedHeapChunk.tryAllocateMemory(newChunk, requestedSize); assert result.isNonNull(); HeapChunk.setNext(newChunk, currentChunk); diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapChunkLogging.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapChunkLogging.java index 11250c4e4ee5..2f8e5e4b8cfb 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapChunkLogging.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapChunkLogging.java @@ -35,44 +35,40 @@ class HeapChunkLogging { private static final int MAX_CHUNKS_TO_PRINT = 64 * 1024; public static void logChunks(Log log, AlignedHeapChunk.AlignedHeader firstChunk, String shortSpaceName, boolean isToSpace) { - if (firstChunk.isNonNull()) { - int i = 0; - AlignedHeapChunk.AlignedHeader chunk = firstChunk; - while (chunk.isNonNull() && i < MAX_CHUNKS_TO_PRINT) { - Pointer bottom = AlignedHeapChunk.getObjectsStart(chunk); - Pointer top = HeapChunk.getTopPointer(chunk); - Pointer end = AlignedHeapChunk.getObjectsEnd(chunk); + int i = 0; + AlignedHeapChunk.AlignedHeader chunk = firstChunk; + while (chunk.isNonNull() && i < MAX_CHUNKS_TO_PRINT) { + Pointer bottom = AlignedHeapChunk.getObjectsStart(chunk); + Pointer top = HeapChunk.getTopPointer(chunk); + Pointer end = AlignedHeapChunk.getObjectsEnd(chunk); - logChunk(log, chunk, bottom, top, end, true, shortSpaceName, isToSpace); + logChunk(log, chunk, bottom, top, end, true, shortSpaceName, isToSpace); - chunk = HeapChunk.getNext(chunk); - i++; - } - if (chunk.isNonNull()) { - assert i == MAX_CHUNKS_TO_PRINT; - log.newline().string("... (truncated)"); - } + chunk = HeapChunk.getNext(chunk); + i++; + } + if (chunk.isNonNull()) { + assert i == MAX_CHUNKS_TO_PRINT; + log.newline().string("... (truncated)"); } } public static void logChunks(Log log, UnalignedHeapChunk.UnalignedHeader firstChunk, String shortSpaceName, boolean isToSpace) { - if (firstChunk.isNonNull()) { - int i = 0; - UnalignedHeapChunk.UnalignedHeader chunk = firstChunk; - while (chunk.isNonNull() && i < MAX_CHUNKS_TO_PRINT) { - Pointer bottom = UnalignedHeapChunk.getObjectStart(chunk); - Pointer top = HeapChunk.getTopPointer(chunk); - Pointer end = UnalignedHeapChunk.getObjectEnd(chunk); + int i = 0; + UnalignedHeapChunk.UnalignedHeader chunk = firstChunk; + while (chunk.isNonNull() && i < MAX_CHUNKS_TO_PRINT) { + Pointer bottom = UnalignedHeapChunk.getObjectStart(chunk); + Pointer top = HeapChunk.getTopPointer(chunk); + Pointer end = UnalignedHeapChunk.getObjectEnd(chunk); - logChunk(log, chunk, bottom, top, end, false, shortSpaceName, isToSpace); + logChunk(log, chunk, bottom, top, end, false, shortSpaceName, isToSpace); - chunk = HeapChunk.getNext(chunk); - i++; - } - if (chunk.isNonNull()) { - assert i == MAX_CHUNKS_TO_PRINT; - log.newline().string("... (truncated)"); - } + chunk = HeapChunk.getNext(chunk); + i++; + } + if (chunk.isNonNull()) { + assert i == MAX_CHUNKS_TO_PRINT; + log.newline().string("... (truncated)"); } } diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapImpl.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapImpl.java index 987fc9e56321..4507a339cf19 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapImpl.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapImpl.java @@ -49,8 +49,7 @@ import com.oracle.svm.core.Uninterruptible; import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.annotate.TargetClass; -import com.oracle.svm.core.genscavenge.AlignedHeapChunk.AlignedHeader; -import com.oracle.svm.core.genscavenge.graal.nodes.FormatArrayNode; +import com.oracle.svm.core.genscavenge.metaspace.MetaspaceImpl; import com.oracle.svm.core.genscavenge.remset.RememberedSet; import com.oracle.svm.core.graal.snippets.SubstrateAllocationSnippets; import com.oracle.svm.core.heap.GC; @@ -64,9 +63,7 @@ import com.oracle.svm.core.heap.ReferenceInternals; import com.oracle.svm.core.heap.RestrictHeapAccess; import com.oracle.svm.core.heap.RuntimeCodeInfoGCSupport; -import com.oracle.svm.core.heap.VMOperationInfos; import com.oracle.svm.core.hub.DynamicHub; -import com.oracle.svm.core.hub.LayoutEncoding; import com.oracle.svm.core.imagelayer.ImageLayerBuildingSupport; import com.oracle.svm.core.jfr.JfrTicks; import com.oracle.svm.core.jfr.events.SystemGCEvent; @@ -74,12 +71,12 @@ import com.oracle.svm.core.locks.VMCondition; import com.oracle.svm.core.locks.VMMutex; import com.oracle.svm.core.log.Log; +import com.oracle.svm.core.metaspace.Metaspace; import com.oracle.svm.core.nodes.CFunctionEpilogueNode; import com.oracle.svm.core.nodes.CFunctionPrologueNode; import com.oracle.svm.core.option.RuntimeOptionKey; import com.oracle.svm.core.os.ImageHeapProvider; import com.oracle.svm.core.snippets.KnownIntrinsics; -import com.oracle.svm.core.thread.JavaVMOperation; import com.oracle.svm.core.thread.PlatformThreads; import com.oracle.svm.core.thread.ThreadStatus; import com.oracle.svm.core.thread.VMOperation; @@ -94,7 +91,6 @@ import jdk.graal.compiler.core.common.NumUtil; import jdk.graal.compiler.core.common.SuppressFBWarnings; import jdk.graal.compiler.nodes.extended.MembarNode; -import jdk.graal.compiler.replacements.AllocationSnippets; import jdk.graal.compiler.word.Word; public final class HeapImpl extends Heap { @@ -111,8 +107,6 @@ public final class HeapImpl extends Heap { private final RuntimeCodeInfoGCSupportImpl runtimeCodeInfoGcSupport; private final HeapAccounting accounting = new HeapAccounting(); - private AlignedHeader lastDynamicHubChunk; - /** Head of the linked list of currently pending (ready to be enqueued) {@link Reference}s. */ private Reference refPendingList; /** Total number of times when a new pending reference list became available. */ @@ -216,6 +210,10 @@ public boolean tearDown() { youngGeneration.tearDown(); oldGeneration.tearDown(); getChunkProvider().tearDown(); + + if (MetaspaceImpl.isSupported()) { + MetaspaceImpl.singleton().tearDown(); + } return true; } @@ -276,11 +274,17 @@ public OldGeneration getOldGeneration() { } void logUsage(Log log) { + if (MetaspaceImpl.isSupported()) { + MetaspaceImpl.singleton().logUsage(log); + } youngGeneration.logUsage(log); oldGeneration.logUsage(log); } void logChunks(Log log, boolean allowUnsafe) { + if (MetaspaceImpl.isSupported()) { + MetaspaceImpl.singleton().logChunks(log); + } getYoungGeneration().logChunks(log, allowUnsafe); getOldGeneration().logChunks(log); getChunkProvider().logFreeChunks(log); @@ -799,6 +803,11 @@ private boolean printLocationInfo(Log log, Pointer ptr, boolean allowJavaHeapAcc } if (allowUnsafeOperations || VMOperation.isInProgressAtSafepoint()) { + /* Accessing the metaspace is unsafe due to possible concurrent modifications. */ + if (MetaspaceImpl.isSupported() && MetaspaceImpl.singleton().printLocationInfo(log, ptr)) { + return true; + } + /* * If we are not at a safepoint, then it is unsafe to access thread locals of another * thread as the IsolateThread could be freed at any time. @@ -818,8 +827,8 @@ private boolean printLocationInfo(Log log, Pointer ptr, boolean allowJavaHeapAcc return false; } - boolean isInHeap(Pointer ptr) { - return isInImageHeap(ptr) || youngGeneration.isInSpace(ptr) || oldGeneration.isInSpace(ptr); + boolean isInHeapSlow(Pointer ptr) { + return isInImageHeap(ptr) || youngGeneration.isInSpace(ptr) || oldGeneration.isInSpace(ptr) || Metaspace.singleton().isInAllocatedMemory(ptr); } @Override @@ -956,83 +965,6 @@ public void printDiagnostics(Log log, ErrorContext context, int maxDiagnosticLev log.indent(false); } } - - public static DynamicHub allocateDynamicHub(int vTableSlots) { - AllocateDynamicHubOp vmOp = new AllocateDynamicHubOp(vTableSlots); - vmOp.enqueue(); - return vmOp.result; - } - - private static class AllocateDynamicHubOp extends JavaVMOperation { - int vTableSlots; - DynamicHub result; - - AllocateDynamicHubOp(int vTableSlots) { - super(VMOperationInfos.get(AllocateDynamicHubOp.class, "Allocate DynamicHub", SystemEffect.SAFEPOINT)); - this.vTableSlots = vTableSlots; - } - - @Override - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - public boolean isGC() { - /* needs to append chunks into oldGen */ - return true; - } - - @Override - protected void operate() { - DynamicHub hubOfDynamicHub = DynamicHub.fromClass(Class.class); - /* - * Note that layoutEncoding already encodes the size of a DynamicHub and it is aware of - * its hybrid nature, including the size required for a VTable slot. - * - * Also note that inlined fields like `closedTypeWorldTypeCheckSlots` are not relevant - * here, as they are not available in the open type world configuration. - */ - UnsignedWord size = LayoutEncoding.getArrayAllocationSize(hubOfDynamicHub.getLayoutEncoding(), vTableSlots); - - Pointer memory = Word.nullPointer(); - if (getHeapImpl().lastDynamicHubChunk.isNonNull()) { - /* - * GR-57355: move this fast-path out of vmOp. Needs some locking (it's not - * thread-local) - */ - memory = AlignedHeapChunk.allocateMemory(getHeapImpl().lastDynamicHubChunk, size); - } - - if (memory.isNull()) { - /* Either no storage for DynamicHubs yet or we are out of memory */ - allocateNewDynamicHubChunk(); - - memory = AlignedHeapChunk.allocateMemory(getHeapImpl().lastDynamicHubChunk, size); - } - - VMError.guarantee(memory.isNonNull(), "failed to allocate DynamicHub"); - - /* DynamicHubs live allocated on aligned heap chunks */ - boolean unaligned = false; - result = (DynamicHub) FormatArrayNode.formatArray(memory, DynamicHub.class, vTableSlots, true, unaligned, AllocationSnippets.FillContent.WITH_ZEROES, true); - } - - private static void allocateNewDynamicHubChunk() { - /* - * GR-60085: Should be a dedicated generation. Make sure that those chunks are close to - * the heap base. The hub is stored as offset relative to the heap base. There are 5 - * status bits in the header and in addition, compressed references use a three-bit - * shift that word-aligns objects. This results in a 35-bit address range of 32 GB, of - * which DynamicHubs must reside in the lowest 1 GB. - */ - OldGeneration oldGeneration = getHeapImpl().getOldGeneration(); - - /* - * GR-60085: DynamicHub objects must never be be moved. Pin them either by (1) pinning - * each DynamicHub, or (2) mark the whole chunk as pinned (not supported yet). - */ - getHeapImpl().lastDynamicHubChunk = oldGeneration.requestAlignedChunk(); - - oldGeneration.appendChunk(getHeapImpl().lastDynamicHubChunk); - } - } } @TargetClass(value = java.lang.Runtime.class, onlyWith = UseSerialOrEpsilonGC.class) diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapParameters.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapParameters.java index 52a9e376aa74..7f20d385982f 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapParameters.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapParameters.java @@ -132,7 +132,7 @@ public static UnsignedWord getAlignedHeapChunkSize() { } @Fold - static UnsignedWord getAlignedHeapChunkAlignment() { + public static UnsignedWord getAlignedHeapChunkAlignment() { return getAlignedHeapChunkSize(); } diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapVerifier.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapVerifier.java index 24c3b3f26f92..d716e0b91f46 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapVerifier.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/HeapVerifier.java @@ -35,6 +35,7 @@ import com.oracle.svm.core.genscavenge.AlignedHeapChunk.AlignedHeader; import com.oracle.svm.core.genscavenge.StackVerifier.VerifyFrameReferencesVisitor; import com.oracle.svm.core.genscavenge.UnalignedHeapChunk.UnalignedHeader; +import com.oracle.svm.core.genscavenge.metaspace.MetaspaceImpl; import com.oracle.svm.core.genscavenge.remset.RememberedSet; import com.oracle.svm.core.heap.Heap; import com.oracle.svm.core.heap.ObjectHeader; @@ -66,6 +67,7 @@ public static HeapVerifier singleton() { public boolean verify(Occasion occasion) { boolean success = true; success &= verifyImageHeap(); + success &= verifyMetaspace(); success &= verifyYoungGeneration(occasion); success &= verifyOldGeneration(); success &= verifyRememberedSets(); @@ -81,6 +83,13 @@ protected boolean verifyImageHeap() { return success; } + private static boolean verifyMetaspace() { + if (!MetaspaceImpl.isSupported()) { + return true; + } + return MetaspaceImpl.singleton().verify(); + } + private static boolean verifyYoungGeneration(Occasion occasion) { boolean success = true; YoungGeneration youngGeneration = HeapImpl.getHeapImpl().getYoungGeneration(); @@ -135,18 +144,22 @@ private static boolean verifyRememberedSets() { success &= rememberedSet.verify(info.getFirstWritableUnalignedChunk(), info.getLastWritableUnalignedChunk()); } + if (MetaspaceImpl.isSupported()) { + success &= MetaspaceImpl.singleton().verifyRememberedSets(); + } + success &= HeapImpl.getHeapImpl().getOldGeneration().verifyRememberedSets(); return success; } - static boolean verifyRememberedSet(Space space) { + public static boolean verifyRememberedSet(Space space) { boolean success = true; success &= RememberedSet.get().verify(space.getFirstAlignedHeapChunk()); success &= RememberedSet.get().verify(space.getFirstUnalignedHeapChunk()); return success; } - static boolean verifySpace(Space space) { + public static boolean verifySpace(Space space) { boolean success = true; success &= verifyChunkList(space, "aligned", space.getFirstAlignedHeapChunk(), space.getLastAlignedHeapChunk()); success &= verifyChunkList(space, "unaligned", space.getFirstUnalignedHeapChunk(), space.getLastUnalignedHeapChunk()); @@ -349,7 +362,7 @@ private static boolean verifyReference(Object parentObject, Pointer reference, P return true; } - if (SerialGCOptions.VerifyReferencesPointIntoValidChunk.getValue() && !HeapImpl.getHeapImpl().isInHeap(referencedObject)) { + if (SerialGCOptions.VerifyReferencesPointIntoValidChunk.getValue() && !HeapImpl.getHeapImpl().isInHeapSlow(referencedObject)) { Log.log().string("Object reference at ").zhex(reference).string(" points outside the Java heap: ").zhex(referencedObject).string(". "); printParent(parentObject); return false; diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/ObjectHeaderImpl.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/ObjectHeaderImpl.java index 3c645750ccf1..eb2167e7c84c 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/ObjectHeaderImpl.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/ObjectHeaderImpl.java @@ -24,6 +24,8 @@ */ package com.oracle.svm.core.genscavenge; +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; + import org.graalvm.nativeimage.Platform; import org.graalvm.nativeimage.Platforms; import org.graalvm.word.Pointer; @@ -345,12 +347,13 @@ public long encodeHubPointerForImageHeap(ImageHeapObject obj, long hubOffsetFrom } @Override - public void verifyDynamicHubOffsetInImageHeap(long offsetFromHeapBase) { + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public void verifyDynamicHubOffset(long offsetFromHeapBase) { long referenceSizeMask = getReferenceSize() == Integer.BYTES ? 0xFFFF_FFFFL : -1L; long encoded = (offsetFromHeapBase << numReservedExtraHubBits) & referenceSizeMask; boolean shiftLosesInformation = (encoded >>> numReservedExtraHubBits != offsetFromHeapBase); if (shiftLosesInformation) { - throw VMError.shouldNotReachHere("Hub is too far from heap base for encoding in object header: " + offsetFromHeapBase); + throw VMError.shouldNotReachHere("Hub is too far from heap base for encoding in object header"); } } diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/ObjectPromoter.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/ObjectPromoter.java new file mode 100644 index 000000000000..5dc1cb66ac96 --- /dev/null +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/ObjectPromoter.java @@ -0,0 +1,201 @@ +/* + * Copyright (c) 2013, 2025, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the LICENSE file that accompanied this code. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.svm.core.genscavenge; + +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; +import static jdk.graal.compiler.nodes.extended.BranchProbabilityNode.SLOW_PATH_PROBABILITY; +import static jdk.graal.compiler.nodes.extended.BranchProbabilityNode.VERY_SLOW_PATH_PROBABILITY; +import static jdk.graal.compiler.nodes.extended.BranchProbabilityNode.probability; + +import org.graalvm.word.Pointer; +import org.graalvm.word.UnsignedWord; + +import com.oracle.svm.core.AlwaysInline; +import com.oracle.svm.core.Uninterruptible; +import com.oracle.svm.core.UnmanagedMemoryUtil; +import com.oracle.svm.core.config.ConfigurationValues; +import com.oracle.svm.core.genscavenge.remset.RememberedSet; +import com.oracle.svm.core.heap.Heap; +import com.oracle.svm.core.heap.ObjectHeader; +import com.oracle.svm.core.hub.LayoutEncoding; +import com.oracle.svm.core.identityhashcode.IdentityHashCodeSupport; +import com.oracle.svm.core.thread.VMOperation; + +import jdk.graal.compiler.word.ObjectAccess; +import jdk.graal.compiler.word.Word; + +/** Promotes individual objects or whole heap chunks to a target {@link Space}. */ +public class ObjectPromoter { + /** Promote an aligned Object to the target Space. */ + @AlwaysInline("GC performance") + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + static Object copyAlignedObject(Object original, Space originalSpace, Space targetSpace) { + assert ObjectHeaderImpl.isAlignedObject(original); + assert originalSpace.isFromSpace() || (originalSpace == targetSpace && targetSpace.isCompactingOldSpace()); + + Object copy = copyAlignedObject(original, targetSpace); + if (copy != null) { + ObjectHeaderImpl.getObjectHeaderImpl().installForwardingPointer(original, copy); + } + return copy; + } + + @AlwaysInline("GC performance") + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + private static Object copyAlignedObject(Object originalObj, Space targetSpace) { + assert VMOperation.isGCInProgress(); + assert ObjectHeaderImpl.isAlignedObject(originalObj); + + UnsignedWord originalSize = LayoutEncoding.getSizeFromObjectInlineInGC(originalObj, false); + UnsignedWord copySize = originalSize; + boolean addIdentityHashField = false; + if (ConfigurationValues.getObjectLayout().isIdentityHashFieldOptional()) { + ObjectHeader oh = Heap.getHeap().getObjectHeader(); + Word header = oh.readHeaderFromObject(originalObj); + if (probability(SLOW_PATH_PROBABILITY, ObjectHeaderImpl.hasIdentityHashFromAddressInline(header))) { + addIdentityHashField = true; + copySize = LayoutEncoding.getSizeFromObjectInlineInGC(originalObj, true); + assert copySize.aboveOrEqual(originalSize); + } + } + + Pointer copyMemory = allocateMemory(copySize, targetSpace); + if (probability(VERY_SLOW_PATH_PROBABILITY, copyMemory.isNull())) { + return null; + } + + /* + * This does a direct memory copy, without regard to whether the copied data contains object + * references. That's okay, because all references in the copy are visited and overwritten + * later on anyways (the card table is also updated at that point if necessary). + */ + Pointer originalMemory = Word.objectToUntrackedPointer(originalObj); + UnmanagedMemoryUtil.copyLongsForward(originalMemory, copyMemory, originalSize); + + Object copy = copyMemory.toObjectNonNull(); + if (probability(SLOW_PATH_PROBABILITY, addIdentityHashField)) { + // Must do first: ensures correct object size below and in other places + int value = IdentityHashCodeSupport.computeHashCodeFromAddress(originalObj); + int offset = LayoutEncoding.getIdentityHashOffset(copy); + ObjectAccess.writeInt(copy, offset, value, IdentityHashCodeSupport.IDENTITY_HASHCODE_LOCATION); + ObjectHeaderImpl.getObjectHeaderImpl().setIdentityHashInField(copy); + } + if (targetSpace.isOldSpace()) { + if (SerialGCOptions.useCompactingOldGen() && GCImpl.getGCImpl().isCompleteCollection()) { + /* + * In a compacting complete collection, the remembered set bit is set already during + * marking and the first object table is built later during fix-up. + */ + } else { + /* + * If an object was copied to the old generation, its remembered set bit must be set + * and the first object table must be updated (even when copying from old to old). + */ + AlignedHeapChunk.AlignedHeader copyChunk = AlignedHeapChunk.getEnclosingChunk(copy); + RememberedSet.get().enableRememberedSetForObject(copyChunk, copy, copySize); + } + } + return copy; + } + + /** Promote an AlignedHeapChunk by moving it to the target space. */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + static void promoteAlignedHeapChunk(AlignedHeapChunk.AlignedHeader chunk, Space originalSpace, Space targetSpace) { + assert targetSpace != originalSpace && originalSpace.isFromSpace(); + + originalSpace.extractAlignedHeapChunk(chunk); + targetSpace.appendAlignedHeapChunk(chunk); + + if (targetSpace.isOldSpace()) { + if (originalSpace.isYoungSpace()) { + RememberedSet.get().enableRememberedSetForChunk(chunk); + } else { + assert originalSpace.isOldSpace(); + RememberedSet.get().clearRememberedSet(chunk); + } + } + } + + /** Promote an UnalignedHeapChunk by moving it to the target Space. */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + static void promoteUnalignedHeapChunk(UnalignedHeapChunk.UnalignedHeader chunk, Space originalSpace, Space targetSpace) { + assert targetSpace != originalSpace && originalSpace.isFromSpace(); + + originalSpace.extractUnalignedHeapChunk(chunk); + targetSpace.appendUnalignedHeapChunk(chunk); + + if (targetSpace.isOldSpace()) { + if (originalSpace.isYoungSpace()) { + RememberedSet.get().enableRememberedSetForChunk(chunk); + } else { + assert originalSpace.isOldSpace(); + RememberedSet.get().clearRememberedSet(chunk); + } + } + } + + /** + * Allocate memory from an AlignedHeapChunk in the target Space. + */ + @AlwaysInline("GC performance") + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + private static Pointer allocateMemory(UnsignedWord objectSize, Space targetSpace) { + Pointer result = Word.nullPointer(); + /* Fast-path: try allocating in the last chunk. */ + AlignedHeapChunk.AlignedHeader oldChunk = targetSpace.getLastAlignedHeapChunk(); + if (oldChunk.isNonNull()) { + result = AlignedHeapChunk.tryAllocateMemory(oldChunk, objectSize); + } + if (result.isNonNull()) { + return result; + } + /* Slow-path: try allocating a new chunk for the requested memory. */ + return allocateInNewChunk(objectSize, targetSpace); + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + private static Pointer allocateInNewChunk(UnsignedWord objectSize, Space targetSpace) { + AlignedHeapChunk.AlignedHeader newChunk = requestAlignedHeapChunk(targetSpace); + if (newChunk.isNonNull()) { + return AlignedHeapChunk.tryAllocateMemory(newChunk, objectSize); + } + return Word.nullPointer(); + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + private static AlignedHeapChunk.AlignedHeader requestAlignedHeapChunk(Space targetSpace) { + AlignedHeapChunk.AlignedHeader chunk; + if (targetSpace.isYoungSpace()) { + assert targetSpace.isSurvivorSpace(); + chunk = HeapImpl.getHeapImpl().getYoungGeneration().requestAlignedSurvivorChunk(); + } else { + chunk = HeapImpl.getHeapImpl().getOldGeneration().requestAlignedChunk(); + } + if (chunk.isNonNull()) { + targetSpace.appendAlignedHeapChunk(chunk); + } + return chunk; + } +} diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/OldGeneration.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/OldGeneration.java index ee76c1f5e691..f7190cc0f526 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/OldGeneration.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/OldGeneration.java @@ -24,8 +24,7 @@ */ package com.oracle.svm.core.genscavenge; -import static jdk.graal.compiler.nodes.extended.BranchProbabilityNode.EXTREMELY_SLOW_PATH_PROBABILITY; -import static jdk.graal.compiler.nodes.extended.BranchProbabilityNode.probability; +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; import org.graalvm.word.Pointer; import org.graalvm.word.UnsignedWord; @@ -35,7 +34,6 @@ import com.oracle.svm.core.genscavenge.remset.RememberedSet; import com.oracle.svm.core.log.Log; import com.oracle.svm.core.thread.VMOperation; -import com.oracle.svm.core.util.VMError; public abstract class OldGeneration extends Generation { OldGeneration(String name) { @@ -68,8 +66,6 @@ public abstract class OldGeneration extends Generation { abstract void logChunks(Log log); - abstract void appendChunk(AlignedHeapChunk.AlignedHeader hdr); - abstract boolean verifyRememberedSets(); abstract boolean verifySpaces(); @@ -81,10 +77,13 @@ public abstract class OldGeneration extends Generation { AlignedHeapChunk.AlignedHeader requestAlignedChunk() { assert VMOperation.isGCInProgress() : "Should only be called from the collector."; AlignedHeapChunk.AlignedHeader chunk = HeapImpl.getChunkProvider().produceAlignedChunk(); - if (probability(EXTREMELY_SLOW_PATH_PROBABILITY, chunk.isNull())) { - throw VMError.shouldNotReachHere("OldGeneration.requestAlignedChunk: failure to allocate aligned chunk"); - } + assert chunk.isNonNull() : "OldGeneration.requestAlignedChunk: failed to allocate aligned chunk"; RememberedSet.get().enableRememberedSetForChunk(chunk); return chunk; } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static int getAge() { + return HeapParameters.getMaxSurvivorSpaces() + 1; + } } diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/Space.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/Space.java index 5c7adca1a575..769a977ce4c5 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/Space.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/Space.java @@ -24,9 +24,7 @@ */ package com.oracle.svm.core.genscavenge; -import static jdk.graal.compiler.nodes.extended.BranchProbabilityNode.SLOW_PATH_PROBABILITY; -import static jdk.graal.compiler.nodes.extended.BranchProbabilityNode.VERY_SLOW_PATH_PROBABILITY; -import static jdk.graal.compiler.nodes.extended.BranchProbabilityNode.probability; +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; import org.graalvm.nativeimage.IsolateThread; import org.graalvm.nativeimage.Platform; @@ -36,21 +34,14 @@ import com.oracle.svm.core.AlwaysInline; import com.oracle.svm.core.Uninterruptible; -import com.oracle.svm.core.UnmanagedMemoryUtil; -import com.oracle.svm.core.config.ConfigurationValues; import com.oracle.svm.core.genscavenge.GCImpl.ChunkReleaser; -import com.oracle.svm.core.genscavenge.remset.RememberedSet; +import com.oracle.svm.core.genscavenge.metaspace.MetaspaceImpl; import com.oracle.svm.core.graal.snippets.SubstrateAllocationSnippets; -import com.oracle.svm.core.heap.Heap; -import com.oracle.svm.core.heap.ObjectHeader; import com.oracle.svm.core.heap.ObjectVisitor; -import com.oracle.svm.core.hub.LayoutEncoding; -import com.oracle.svm.core.identityhashcode.IdentityHashCodeSupport; import com.oracle.svm.core.log.Log; import com.oracle.svm.core.thread.VMOperation; import com.oracle.svm.core.thread.VMThreads; -import jdk.graal.compiler.word.ObjectAccess; import jdk.graal.compiler.word.Word; /** @@ -78,7 +69,7 @@ public final class Space { * collections so they should not move. */ @Platforms(Platform.HOSTED_ONLY.class) - Space(String name, String shortName, boolean isToSpace, int age) { + public Space(String name, String shortName, boolean isToSpace, int age) { this(name, shortName, isToSpace, age, null); } @@ -108,7 +99,7 @@ public boolean isEmpty() { } @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - void tearDown() { + public void tearDown() { HeapChunkProvider.freeAlignedChunkList(getFirstAlignedHeapChunk()); HeapChunkProvider.freeUnalignedChunkList(getFirstUnalignedHeapChunk()); } @@ -130,7 +121,12 @@ boolean isSurvivorSpace() { @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) public boolean isOldSpace() { - return age == (HeapParameters.getMaxSurvivorSpaces() + 1); + return age == OldGeneration.getAge(); + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public boolean isMetaspace() { + return age == MetaspaceImpl.getAge(); } @AlwaysInline("GC performance.") @@ -146,6 +142,7 @@ int getAge() { @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) int getNextAgeForPromotion() { + assert age < OldGeneration.getAge(); return age + 1; } @@ -160,6 +157,7 @@ boolean isToSpace() { return isToSpace; } + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public void walkObjects(ObjectVisitor visitor) { AlignedHeapChunk.AlignedHeader aChunk = getFirstAlignedHeapChunk(); while (aChunk.isNonNull()) { @@ -201,34 +199,6 @@ public void logChunks(Log log) { HeapChunkLogging.logChunks(log, getFirstUnalignedHeapChunk(), shortName, isToSpace()); } - /** - * Allocate memory from an AlignedHeapChunk in this Space. - */ - @AlwaysInline("GC performance") - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - private Pointer allocateMemory(UnsignedWord objectSize) { - Pointer result = Word.nullPointer(); - /* Fast-path: try allocating in the last chunk. */ - AlignedHeapChunk.AlignedHeader oldChunk = getLastAlignedHeapChunk(); - if (oldChunk.isNonNull()) { - result = AlignedHeapChunk.allocateMemory(oldChunk, objectSize); - } - if (result.isNonNull()) { - return result; - } - /* Slow-path: try allocating a new chunk for the requested memory. */ - return allocateInNewChunk(objectSize); - } - - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - private Pointer allocateInNewChunk(UnsignedWord objectSize) { - AlignedHeapChunk.AlignedHeader newChunk = requestAlignedHeapChunk(); - if (newChunk.isNonNull()) { - return AlignedHeapChunk.allocateMemory(newChunk, objectSize); - } - return Word.nullPointer(); - } - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) public void releaseChunks(ChunkReleaser chunkReleaser) { chunkReleaser.add(firstAlignedHeapChunk); @@ -241,19 +211,22 @@ public void releaseChunks(ChunkReleaser chunkReleaser) { accounting.reset(); } - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) void appendAlignedHeapChunk(AlignedHeapChunk.AlignedHeader aChunk) { /* - * This method is used while detaching a thread, so it cannot guarantee that it is inside a - * VMOperation, only that there is some mutual exclusion. + * This method is called either during a GC or when detaching threads. So, it cannot + * guarantee that it is inside a VMOperation, only that there is some mutual exclusion. */ VMThreads.guaranteeOwnsThreadMutex("Trying to append an aligned heap chunk but no mutual exclusion.", true); - appendAlignedHeapChunkUninterruptibly(aChunk); - accounting.noteAlignedHeapChunk(); + appendAlignedHeapChunkUnsafe(aChunk); } - @Uninterruptible(reason = "Must not interact with garbage collections.") - private void appendAlignedHeapChunkUninterruptibly(AlignedHeapChunk.AlignedHeader aChunk) { + /** + * Callers need to ensure that there is some mutual exclusion in place that prevents races + * between threads. If possible, please use {@link #appendAlignedHeapChunk} instead. + */ + @Uninterruptible(reason = "GC must see a consistent state.") + public void appendAlignedHeapChunkUnsafe(AlignedHeapChunk.AlignedHeader aChunk) { AlignedHeapChunk.AlignedHeader oldLast = getLastAlignedHeapChunk(); HeapChunk.setSpace(aChunk, this); HeapChunk.setPrevious(aChunk, oldLast); @@ -265,17 +238,14 @@ private void appendAlignedHeapChunkUninterruptibly(AlignedHeapChunk.AlignedHeade if (getFirstAlignedHeapChunk().isNull()) { setFirstAlignedHeapChunk(aChunk); } + + accounting.noteAlignedHeapChunk(); } - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) + @Uninterruptible(reason = "GC must see a consistent state.") void extractAlignedHeapChunk(AlignedHeapChunk.AlignedHeader aChunk) { assert VMOperation.isGCInProgress() : "Should only be called by the collector."; - extractAlignedHeapChunkUninterruptibly(aChunk); - accounting.unnoteAlignedHeapChunk(); - } - @Uninterruptible(reason = "Must not interact with garbage collections.") - private void extractAlignedHeapChunkUninterruptibly(AlignedHeapChunk.AlignedHeader aChunk) { AlignedHeapChunk.AlignedHeader chunkNext = HeapChunk.getNext(aChunk); AlignedHeapChunk.AlignedHeader chunkPrev = HeapChunk.getPrevious(aChunk); if (chunkPrev.isNonNull()) { @@ -291,21 +261,18 @@ private void extractAlignedHeapChunkUninterruptibly(AlignedHeapChunk.AlignedHead HeapChunk.setNext(aChunk, Word.nullPointer()); HeapChunk.setPrevious(aChunk, Word.nullPointer()); HeapChunk.setSpace(aChunk, null); + + accounting.unnoteAlignedHeapChunk(); } - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) + @Uninterruptible(reason = "GC must see a consistent state.") void appendUnalignedHeapChunk(UnalignedHeapChunk.UnalignedHeader uChunk) { /* * This method is used while detaching a thread, so it cannot guarantee that it is inside a * VMOperation, only that there is some mutual exclusion. */ VMThreads.guaranteeOwnsThreadMutex("Trying to append an unaligned chunk but no mutual exclusion.", true); - appendUnalignedHeapChunkUninterruptibly(uChunk); - accounting.noteUnalignedHeapChunk(uChunk); - } - @Uninterruptible(reason = "Must not interact with garbage collections.") - private void appendUnalignedHeapChunkUninterruptibly(UnalignedHeapChunk.UnalignedHeader uChunk) { UnalignedHeapChunk.UnalignedHeader oldLast = getLastUnalignedHeapChunk(); HeapChunk.setSpace(uChunk, this); HeapChunk.setPrevious(uChunk, oldLast); @@ -317,17 +284,14 @@ private void appendUnalignedHeapChunkUninterruptibly(UnalignedHeapChunk.Unaligne if (getFirstUnalignedHeapChunk().isNull()) { setFirstUnalignedHeapChunk(uChunk); } + + accounting.noteUnalignedHeapChunk(uChunk); } - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) + @Uninterruptible(reason = "GC must see a consistent state.") void extractUnalignedHeapChunk(UnalignedHeapChunk.UnalignedHeader uChunk) { assert VMOperation.isGCInProgress() : "Trying to extract an unaligned chunk but not in a VMOperation."; - extractUnalignedHeapChunkUninterruptibly(uChunk); - accounting.unnoteUnalignedHeapChunk(uChunk); - } - @Uninterruptible(reason = "Must not interact with garbage collections.") - private void extractUnalignedHeapChunkUninterruptibly(UnalignedHeapChunk.UnalignedHeader uChunk) { UnalignedHeapChunk.UnalignedHeader chunkNext = HeapChunk.getNext(uChunk); UnalignedHeapChunk.UnalignedHeader chunkPrev = HeapChunk.getPrevious(uChunk); if (chunkPrev.isNonNull()) { @@ -344,6 +308,8 @@ private void extractUnalignedHeapChunkUninterruptibly(UnalignedHeapChunk.Unalign HeapChunk.setNext(uChunk, Word.nullPointer()); HeapChunk.setPrevious(uChunk, Word.nullPointer()); HeapChunk.setSpace(uChunk, null); + + accounting.unnoteUnalignedHeapChunk(uChunk); } @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) @@ -386,129 +352,6 @@ private void setLastUnalignedHeapChunk(UnalignedHeapChunk.UnalignedHeader chunk) lastUnalignedHeapChunk = chunk; } - /** Promote an aligned Object to this Space. */ - @AlwaysInline("GC performance") - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - Object copyAlignedObject(Object original, Space originalSpace) { - assert ObjectHeaderImpl.isAlignedObject(original); - assert originalSpace.isFromSpace() || (originalSpace == this && isCompactingOldSpace()); - - Object copy = copyAlignedObject(original); - if (copy != null) { - ObjectHeaderImpl.getObjectHeaderImpl().installForwardingPointer(original, copy); - } - return copy; - } - - @AlwaysInline("GC performance") - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - private Object copyAlignedObject(Object originalObj) { - assert VMOperation.isGCInProgress(); - assert ObjectHeaderImpl.isAlignedObject(originalObj); - - UnsignedWord originalSize = LayoutEncoding.getSizeFromObjectInlineInGC(originalObj, false); - UnsignedWord copySize = originalSize; - boolean addIdentityHashField = false; - if (ConfigurationValues.getObjectLayout().isIdentityHashFieldOptional()) { - ObjectHeader oh = Heap.getHeap().getObjectHeader(); - Word header = oh.readHeaderFromObject(originalObj); - if (probability(SLOW_PATH_PROBABILITY, ObjectHeaderImpl.hasIdentityHashFromAddressInline(header))) { - addIdentityHashField = true; - copySize = LayoutEncoding.getSizeFromObjectInlineInGC(originalObj, true); - assert copySize.aboveOrEqual(originalSize); - } - } - - Pointer copyMemory = allocateMemory(copySize); - if (probability(VERY_SLOW_PATH_PROBABILITY, copyMemory.isNull())) { - return null; - } - - /* - * This does a direct memory copy, without regard to whether the copied data contains object - * references. That's okay, because all references in the copy are visited and overwritten - * later on anyways (the card table is also updated at that point if necessary). - */ - Pointer originalMemory = Word.objectToUntrackedPointer(originalObj); - UnmanagedMemoryUtil.copyLongsForward(originalMemory, copyMemory, originalSize); - - Object copy = copyMemory.toObjectNonNull(); - if (probability(SLOW_PATH_PROBABILITY, addIdentityHashField)) { - // Must do first: ensures correct object size below and in other places - int value = IdentityHashCodeSupport.computeHashCodeFromAddress(originalObj); - int offset = LayoutEncoding.getIdentityHashOffset(copy); - ObjectAccess.writeInt(copy, offset, value, IdentityHashCodeSupport.IDENTITY_HASHCODE_LOCATION); - ObjectHeaderImpl.getObjectHeaderImpl().setIdentityHashInField(copy); - } - if (isOldSpace()) { - if (SerialGCOptions.useCompactingOldGen() && GCImpl.getGCImpl().isCompleteCollection()) { - /* - * In a compacting complete collection, the remembered set bit is set already during - * marking and the first object table is built later during fix-up. - */ - } else { - /* - * If an object was copied to the old generation, its remembered set bit must be set - * and the first object table must be updated (even when copying from old to old). - */ - AlignedHeapChunk.AlignedHeader copyChunk = AlignedHeapChunk.getEnclosingChunk(copy); - RememberedSet.get().enableRememberedSetForObject(copyChunk, copy, copySize); - } - } - return copy; - } - - /** Promote an AlignedHeapChunk by moving it to this space. */ - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - void promoteAlignedHeapChunk(AlignedHeapChunk.AlignedHeader chunk, Space originalSpace) { - assert this != originalSpace && originalSpace.isFromSpace(); - - originalSpace.extractAlignedHeapChunk(chunk); - appendAlignedHeapChunk(chunk); - - if (this.isOldSpace()) { - if (originalSpace.isYoungSpace()) { - RememberedSet.get().enableRememberedSetForChunk(chunk); - } else { - assert originalSpace.isOldSpace(); - RememberedSet.get().clearRememberedSet(chunk); - } - } - } - - /** Promote an UnalignedHeapChunk by moving it to this Space. */ - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - void promoteUnalignedHeapChunk(UnalignedHeapChunk.UnalignedHeader chunk, Space originalSpace) { - assert this != originalSpace && originalSpace.isFromSpace(); - - originalSpace.extractUnalignedHeapChunk(chunk); - appendUnalignedHeapChunk(chunk); - - if (this.isOldSpace()) { - if (originalSpace.isYoungSpace()) { - RememberedSet.get().enableRememberedSetForChunk(chunk); - } else { - assert originalSpace.isOldSpace(); - RememberedSet.get().clearRememberedSet(chunk); - } - } - } - - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - private AlignedHeapChunk.AlignedHeader requestAlignedHeapChunk() { - AlignedHeapChunk.AlignedHeader chunk; - if (isYoungSpace()) { - assert isSurvivorSpace(); - chunk = HeapImpl.getHeapImpl().getYoungGeneration().requestAlignedSurvivorChunk(); - } else { - chunk = HeapImpl.getHeapImpl().getOldGeneration().requestAlignedChunk(); - } - if (chunk.isNonNull()) { - appendAlignedHeapChunk(chunk); - } - return chunk; - } - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) void absorb(Space src) { /* @@ -592,7 +435,8 @@ private UnsignedWord computeUnalignedObjectBytes() { return result; } - boolean contains(Pointer p) { + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public boolean contains(Pointer p) { AlignedHeapChunk.AlignedHeader aChunk = getFirstAlignedHeapChunk(); while (aChunk.isNonNull()) { Pointer start = AlignedHeapChunk.getObjectsStart(aChunk); diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/Timers.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/Timers.java index a9067d8116fd..3a52a4fa5f57 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/Timers.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/Timers.java @@ -44,6 +44,7 @@ final class Timers { final Timer oldFixup = new Timer("oldFixup"); final Timer oldFixupAlignedChunks = new Timer("oldFixupAlignedChunks"); final Timer oldFixupImageHeap = new Timer("oldFixupImageHeap"); + final Timer oldFixupMetaspace = new Timer("oldFixupMetspace"); final Timer oldFixupThreadLocals = new Timer("oldFixupThreadLocals"); final Timer oldFixupRuntimeCodeCache = new Timer("oldFixupRuntimeCodeCache"); final Timer oldFixupStack = new Timer("oldFixupStack"); @@ -77,6 +78,7 @@ void resetAllExceptMutator() { oldFixup.reset(); oldFixupAlignedChunks.reset(); oldFixupImageHeap.reset(); + oldFixupMetaspace.reset(); oldFixupThreadLocals.reset(); oldFixupRuntimeCodeCache.reset(); oldFixupStack.reset(); @@ -111,6 +113,7 @@ void logAfterCollection(Log log) { logOneTimer(log, " ", oldFixup); logOneTimer(log, " ", oldFixupAlignedChunks); logOneTimer(log, " ", oldFixupImageHeap); + logOneTimer(log, " ", oldFixupMetaspace); logOneTimer(log, " ", oldFixupThreadLocals); logOneTimer(log, " ", oldFixupRuntimeCodeCache); logOneTimer(log, " ", oldFixupStack); diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/UnalignedHeapChunk.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/UnalignedHeapChunk.java index cfc23a4594fb..84a8ffd3ae25 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/UnalignedHeapChunk.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/UnalignedHeapChunk.java @@ -177,6 +177,7 @@ public static UnsignedWord getOffsetForObject(Pointer objPtr) { return RememberedSet.get().getOffsetForObjectInUnalignedChunk(objPtr); } + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public static void walkObjects(UnalignedHeader that, ObjectVisitor visitor) { HeapChunk.walkObjectsFrom(that, getObjectStart(that), visitor); } diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/YoungGeneration.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/YoungGeneration.java index 0a1a47726a67..a552134db7c0 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/YoungGeneration.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/YoungGeneration.java @@ -274,7 +274,7 @@ protected Object promoteAlignedObject(Object original, AlignedHeapChunk.AlignedH // survivor space. If it does not, we return null here to tell the caller. int age = originalSpace.getNextAgeForPromotion(); Space toSpace = getSurvivorToSpaceAt(age - 1); - return toSpace.copyAlignedObject(original, originalSpace); + return ObjectPromoter.copyAlignedObject(original, originalSpace, toSpace); } @AlwaysInline("GC performance") @@ -289,7 +289,7 @@ protected Object promoteUnalignedObject(Object original, UnalignedHeapChunk.Unal int age = originalSpace.getNextAgeForPromotion(); Space toSpace = getSurvivorToSpaceAt(age - 1); - toSpace.promoteUnalignedHeapChunk(originalChunk, originalSpace); + ObjectPromoter.promoteUnalignedHeapChunk(originalChunk, originalSpace, toSpace); return original; } @@ -305,9 +305,9 @@ protected boolean promotePinnedObject(Object obj, HeapChunk.Header originalCh int age = originalSpace.getNextAgeForPromotion(); Space toSpace = getSurvivorToSpaceAt(age - 1); if (isAligned) { - toSpace.promoteAlignedHeapChunk((AlignedHeapChunk.AlignedHeader) originalChunk, originalSpace); + ObjectPromoter.promoteAlignedHeapChunk((AlignedHeapChunk.AlignedHeader) originalChunk, originalSpace, toSpace); } else { - toSpace.promoteUnalignedHeapChunk((UnalignedHeapChunk.UnalignedHeader) originalChunk, originalSpace); + ObjectPromoter.promoteUnalignedHeapChunk((UnalignedHeapChunk.UnalignedHeader) originalChunk, originalSpace, toSpace); } return true; } diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/compacting/ObjectRefFixupVisitor.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/compacting/ObjectRefFixupVisitor.java index 54a63d54f766..d4dfcd9b81e1 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/compacting/ObjectRefFixupVisitor.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/compacting/ObjectRefFixupVisitor.java @@ -36,6 +36,7 @@ import com.oracle.svm.core.genscavenge.ObjectHeaderImpl; import com.oracle.svm.core.heap.ReferenceAccess; import com.oracle.svm.core.heap.UninterruptibleObjectReferenceVisitor; +import com.oracle.svm.core.metaspace.Metaspace; import jdk.graal.compiler.word.Word; @@ -60,7 +61,7 @@ public void visitObjectReferences(Pointer firstObjRef, boolean compressed, int r @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) private static void visitObjectReference(Pointer objRef, boolean compressed, Object holderObject) { Pointer p = ReferenceAccess.singleton().readObjectAsUntrackedPointer(objRef, compressed); - if (p.isNull() || HeapImpl.getHeapImpl().isInImageHeap(p)) { + if (p.isNull() || HeapImpl.getHeapImpl().isInImageHeap(p) || Metaspace.singleton().isInAddressSpace(p)) { return; } diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeAllocationSnippets.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeAllocationSnippets.java index fe0bd294fd84..e1bccedc7750 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeAllocationSnippets.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeAllocationSnippets.java @@ -129,11 +129,7 @@ public Templates(OptionValues options, Providers providers, SubstrateAllocationS formatObject = snippet(providers, GenScavengeAllocationSnippets.class, "formatObjectSnippet"); formatArray = snippet(providers, GenScavengeAllocationSnippets.class, "formatArraySnippet"); formatStoredContinuation = ContinuationSupport.isSupported() ? snippet(providers, GenScavengeAllocationSnippets.class, "formatStoredContinuation") : null; - formatPod = Pod.RuntimeSupport.isPresent() ? snippet(providers, - GenScavengeAllocationSnippets.class, - "formatPodSnippet", - NamedLocationIdentity.getArrayLocation(JavaKind.Byte)) - : null; + formatPod = Pod.RuntimeSupport.isPresent() ? snippet(providers, GenScavengeAllocationSnippets.class, "formatPodSnippet", NamedLocationIdentity.getArrayLocation(JavaKind.Byte)) : null; } public void registerLowering(Map, NodeLoweringProvider> lowerings) { diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeAllocationSupport.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeAllocationSupport.java index e28c8f812c72..11c450b4f00e 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeAllocationSupport.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeAllocationSupport.java @@ -37,7 +37,6 @@ import com.oracle.svm.core.graal.meta.SubstrateForeignCallsProvider; import com.oracle.svm.core.graal.snippets.GCAllocationSupport; import com.oracle.svm.core.heap.Pod; -import com.oracle.svm.core.hub.RuntimeClassLoading; import com.oracle.svm.core.snippets.SnippetRuntime; import com.oracle.svm.core.snippets.SnippetRuntime.SubstrateForeignCallDescriptor; import com.oracle.svm.core.snippets.SubstrateForeignCallTarget; @@ -58,7 +57,6 @@ public class GenScavengeAllocationSupport implements GCAllocationSupport { private static final SubstrateForeignCallDescriptor SLOW_NEW_ARRAY = SnippetRuntime.findForeignCall(GenScavengeAllocationSupport.class, "slowNewArray", NO_SIDE_EFFECT); private static final SubstrateForeignCallDescriptor SLOW_NEW_STORED_CONTINUATION = SnippetRuntime.findForeignCall(GenScavengeAllocationSupport.class, "slowNewStoredContinuation", NO_SIDE_EFFECT); private static final SubstrateForeignCallDescriptor SLOW_NEW_POD_INSTANCE = SnippetRuntime.findForeignCall(GenScavengeAllocationSupport.class, "slowNewPodInstance", NO_SIDE_EFFECT); - private static final SubstrateForeignCallDescriptor NEW_DYNAMICHUB = SnippetRuntime.findForeignCall(GenScavengeAllocationSupport.class, "newDynamicHub", NO_SIDE_EFFECT); private static final SubstrateForeignCallDescriptor[] UNCONDITIONAL_FOREIGN_CALLS = new SubstrateForeignCallDescriptor[]{SLOW_NEW_INSTANCE, SLOW_NEW_ARRAY}; public static void registerForeignCalls(SubstrateForeignCallsProvider foreignCalls) { @@ -69,9 +67,6 @@ public static void registerForeignCalls(SubstrateForeignCallsProvider foreignCal if (Pod.RuntimeSupport.isPresent()) { foreignCalls.register(SLOW_NEW_POD_INSTANCE); } - if (RuntimeClassLoading.isSupported()) { - foreignCalls.register(NEW_DYNAMICHUB); - } } @Override @@ -94,11 +89,6 @@ public ForeignCallDescriptor getNewPodInstanceStub() { return SLOW_NEW_POD_INSTANCE; } - @Override - public SubstrateForeignCallDescriptor getNewDynamicHub() { - return NEW_DYNAMICHUB; - } - @Override public boolean useTLAB() { return SubstrateGCOptions.TlabOptions.UseTLAB.getValue(); @@ -180,11 +170,6 @@ private static Object slowNewStoredContinuation(Word objectHeader, int length) { } } - @SubstrateForeignCallTarget(stubCallingConvention = false) - private static Object newDynamicHub(int vTableSlots) { - return HeapImpl.allocateDynamicHub(vTableSlots); - } - @Uninterruptible(reason = "Switch from uninterruptible to interruptible code.", calleeMustBe = false) private static Object slowNewInstanceInterruptibly(Word objectHeader) { return ThreadLocalAllocation.slowPathNewInstance(objectHeader); diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeGCFeature.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeGCFeature.java index b73da342d527..79020418b0fb 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeGCFeature.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/graal/GenScavengeGCFeature.java @@ -48,6 +48,7 @@ import com.oracle.svm.core.genscavenge.TlabOptionCache; import com.oracle.svm.core.genscavenge.jvmstat.EpsilonGCPerfData; import com.oracle.svm.core.genscavenge.jvmstat.SerialGCPerfData; +import com.oracle.svm.core.genscavenge.metaspace.MetaspaceImpl; import com.oracle.svm.core.genscavenge.remset.CardTableBasedRememberedSet; import com.oracle.svm.core.genscavenge.remset.NoRememberedSet; import com.oracle.svm.core.genscavenge.remset.RememberedSet; @@ -59,6 +60,7 @@ import com.oracle.svm.core.heap.AllocationFeature; import com.oracle.svm.core.heap.BarrierSetProvider; import com.oracle.svm.core.heap.Heap; +import com.oracle.svm.core.hub.RuntimeClassLoading; import com.oracle.svm.core.image.ImageHeapLayouter; import com.oracle.svm.core.imagelayer.ImageLayerBuildingSupport; import com.oracle.svm.core.jdk.RuntimeSupportFeature; @@ -67,6 +69,7 @@ import com.oracle.svm.core.jvmstat.PerfDataHolder; import com.oracle.svm.core.jvmstat.PerfManager; import com.oracle.svm.core.layeredimagesingleton.LayeredImageSingletonSupport; +import com.oracle.svm.core.metaspace.Metaspace; import com.oracle.svm.core.os.CommittedMemoryProvider; import com.oracle.svm.core.os.OSCommittedMemoryProvider; @@ -95,6 +98,12 @@ public void afterRegistration(AfterRegistrationAccess access) { GenScavengeMemoryPoolMXBeans memoryPoolMXBeans = new GenScavengeMemoryPoolMXBeans(); ImageSingletons.add(GenScavengeMemoryPoolMXBeans.class, memoryPoolMXBeans); ImageSingletons.add(GCRelatedMXBeans.class, new GenScavengeRelatedMXBeans(memoryPoolMXBeans)); + + if (RuntimeClassLoading.isSupported()) { + MetaspaceImpl metaspace = new MetaspaceImpl(); + ImageSingletons.add(Metaspace.class, metaspace); + ImageSingletons.add(MetaspaceImpl.class, metaspace); + } } @Override diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/metaspace/ChunkedMetaspaceMemory.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/metaspace/ChunkedMetaspaceMemory.java new file mode 100644 index 000000000000..b028a11bdc6c --- /dev/null +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/metaspace/ChunkedMetaspaceMemory.java @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2025, 2025, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the LICENSE file that accompanied this code. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.svm.core.genscavenge.metaspace; + +import static com.oracle.svm.core.genscavenge.HeapChunk.CHUNK_HEADER_TOP_IDENTITY; +import static com.oracle.svm.core.genscavenge.HeapChunk.asPointer; +import static jdk.graal.compiler.nodes.extended.MembarNode.FenceKind.STORE_STORE; + +import org.graalvm.nativeimage.Platform; +import org.graalvm.nativeimage.Platforms; +import org.graalvm.word.Pointer; +import org.graalvm.word.UnsignedWord; + +import com.oracle.svm.core.Uninterruptible; +import com.oracle.svm.core.genscavenge.AlignedHeapChunk; +import com.oracle.svm.core.genscavenge.AlignedHeapChunk.AlignedHeader; +import com.oracle.svm.core.genscavenge.HeapChunk.Header; +import com.oracle.svm.core.genscavenge.HeapParameters; +import com.oracle.svm.core.genscavenge.Space; +import com.oracle.svm.core.genscavenge.remset.RememberedSet; +import com.oracle.svm.core.metaspace.Metaspace; +import com.oracle.svm.core.os.ChunkBasedCommittedMemoryProvider; +import com.oracle.svm.core.thread.JavaSpinLockUtils; +import com.oracle.svm.core.thread.VMOperation; +import com.oracle.svm.core.util.VMError; + +import jdk.graal.compiler.nodes.extended.MembarNode; +import jdk.graal.compiler.word.Word; +import jdk.internal.misc.Unsafe; + +/** Uses {@link AlignedHeapChunk}s to manage the raw {@link Metaspace} memory. */ +class ChunkedMetaspaceMemory { + private static final Unsafe U = Unsafe.getUnsafe(); + private static final long LOCK_OFFSET = U.objectFieldOffset(ChunkedMetaspaceMemory.class, "lock"); + + private final Space space; + + private AlignedHeader currentChunk; + @SuppressWarnings("unused") // + private volatile int lock; + + @Platforms(Platform.HOSTED_ONLY.class) + ChunkedMetaspaceMemory(Space space) { + this.space = space; + } + + @Uninterruptible(reason = "Returns uninitialized memory.", callerMustBe = true) + public Pointer allocate(UnsignedWord size) { + assert !VMOperation.isGCInProgress(); + + AlignedHeader existingChunk = currentChunk; + if (existingChunk.isNonNull()) { + Pointer result = tryAllocateAtomically(existingChunk, size); + if (result.isNonNull()) { + return result; + } + } + return allocateLocked(size, existingChunk); + } + + @Uninterruptible(reason = "Returns uninitialized memory. Acquires a lock without a thread state transition.", callerMustBe = true) + private Pointer allocateLocked(UnsignedWord size, AlignedHeader existingChunk) { + JavaSpinLockUtils.lockNoTransition(this, LOCK_OFFSET); + try { + /* Another thread might have allocated a new chunk in the meanwhile. */ + AlignedHeader curChunk = currentChunk; + if (curChunk != existingChunk) { + Pointer result = tryAllocateAtomically(curChunk, size); + if (result.isNonNull()) { + return result; + } + } + + /* Request a new chunk and allocate memory there. */ + AlignedHeader newChunk = requestNewChunk(); + Pointer result = AlignedHeapChunk.tryAllocateMemory(newChunk, size); + VMError.guarantee(result.isNonNull(), "Metaspace allocation did not fit into aligned chunk"); + + /* Ensures that other threads see a fully initialized chunk. */ + MembarNode.memoryBarrier(STORE_STORE); + currentChunk = newChunk; + return result; + } finally { + JavaSpinLockUtils.unlock(this, LOCK_OFFSET); + } + } + + @Uninterruptible(reason = "Returns uninitialized memory.", callerMustBe = true) + private static Pointer tryAllocateAtomically(AlignedHeader chunk, UnsignedWord size) { + assert chunk.isNonNull(); + do { + UnsignedWord top = chunk.getTopOffset(CHUNK_HEADER_TOP_IDENTITY); + UnsignedWord available = chunk.getEndOffset().subtract(top); + if (available.belowThan(size)) { + return Word.nullPointer(); + } + + UnsignedWord newTop = top.add(size); + if (((Pointer) chunk).logicCompareAndSwapWord(Header.offsetOfTopOffset(), top, newTop, CHUNK_HEADER_TOP_IDENTITY)) { + return asPointer(chunk).add(top); + } + } while (true); + } + + @Uninterruptible(reason = "Prevent GCs.") + private AlignedHeader requestNewChunk() { + assert JavaSpinLockUtils.isLocked(this, LOCK_OFFSET); + + UnsignedWord chunkSize = HeapParameters.getAlignedHeapChunkAlignment(); + AlignedHeader newChunk = (AlignedHeader) ChunkBasedCommittedMemoryProvider.get().allocateMetaspaceChunk(HeapParameters.getAlignedHeapChunkSize(), chunkSize); + assert newChunk.isNonNull(); + + AlignedHeapChunk.initialize(newChunk, chunkSize); + RememberedSet.get().enableRememberedSetForChunk(newChunk); + space.appendAlignedHeapChunkUnsafe(newChunk); + return newChunk; + } +} diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/metaspace/MetaspaceImpl.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/metaspace/MetaspaceImpl.java new file mode 100644 index 000000000000..be663e403553 --- /dev/null +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/metaspace/MetaspaceImpl.java @@ -0,0 +1,153 @@ +/* + * Copyright (c) 2025, 2025, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the LICENSE file that accompanied this code. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.svm.core.genscavenge.metaspace; + +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; + +import org.graalvm.nativeimage.ImageSingletons; +import org.graalvm.nativeimage.Platform; +import org.graalvm.nativeimage.Platforms; +import org.graalvm.word.Pointer; + +import com.oracle.svm.core.Uninterruptible; +import com.oracle.svm.core.genscavenge.HeapVerifier; +import com.oracle.svm.core.genscavenge.OldGeneration; +import com.oracle.svm.core.genscavenge.Space; +import com.oracle.svm.core.genscavenge.remset.FirstObjectTable; +import com.oracle.svm.core.genscavenge.remset.RememberedSet; +import com.oracle.svm.core.heap.UninterruptibleObjectReferenceVisitor; +import com.oracle.svm.core.heap.UninterruptibleObjectVisitor; +import com.oracle.svm.core.hub.DynamicHub; +import com.oracle.svm.core.log.Log; +import com.oracle.svm.core.metaspace.Metaspace; +import com.oracle.svm.core.thread.VMOperation; + +import jdk.graal.compiler.api.replacements.Fold; +import jdk.graal.compiler.word.Word; + +/** + * {@link Metaspace} implementation for serial and epsilon GC. The metaspace uses the same address + * space as the Java heap, but it only consists of aligned heap chunks (see + * {@link ChunkedMetaspaceMemory}). Each chunk needs a {@link RememberedSet} and an up-to-date + * {@link FirstObjectTable}, similar to the writable part of the image heap. The chunks are managed + * in a single "To"-{@link Space}, which ensures that the GC doesn't try to move or promote the + * objects. + */ +public class MetaspaceImpl implements Metaspace { + private final Space space = new Space("Metaspace", "M", true, getAge()); + private final ChunkedMetaspaceMemory memory = new ChunkedMetaspaceMemory(space); + private final MetaspaceObjectAllocator allocator = new MetaspaceObjectAllocator(memory); + + @Platforms(Platform.HOSTED_ONLY.class) + public MetaspaceImpl() { + } + + @Fold + public static boolean isSupported() { + return ImageSingletons.contains(MetaspaceImpl.class); + } + + @Fold + public static MetaspaceImpl singleton() { + return ImageSingletons.lookup(MetaspaceImpl.class); + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static int getAge() { + return OldGeneration.getAge() + 1; + } + + @Override + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public boolean isInAllocatedMemory(Object obj) { + return isInAllocatedMemory(Word.objectToTrackedPointer(obj)); + } + + @Override + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public boolean isInAllocatedMemory(Pointer ptr) { + return space.contains(ptr); + } + + @Override + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public boolean isInAddressSpace(Object obj) { + return isInAddressSpace(Word.objectToTrackedPointer(obj)); + } + + @Override + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public boolean isInAddressSpace(Pointer ptr) { + /* Replace with address range check once GR-60085 is implemented. */ + return isInAllocatedMemory(ptr); + } + + @Override + public DynamicHub allocateDynamicHub(int numVTableEntries) { + return allocator.allocateDynamicHub(numVTableEntries); + } + + @Override + public byte[] allocateByteArray(int length) { + return allocator.allocateByteArray(length); + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public void walkObjects(UninterruptibleObjectVisitor objectVisitor) { + assert VMOperation.isInProgress() : "prevent other threads from manipulating the metaspace"; + space.walkObjects(objectVisitor); + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public void walkDirtyObjects(UninterruptibleObjectVisitor objectVisitor, UninterruptibleObjectReferenceVisitor refVisitor, boolean clean) { + assert VMOperation.isInProgress() : "prevent other threads from manipulating the metaspace"; + RememberedSet.get().walkDirtyObjects(space.getFirstAlignedHeapChunk(), space.getFirstUnalignedHeapChunk(), Word.nullPointer(), objectVisitor, refVisitor, clean); + } + + public void logChunks(Log log) { + space.logChunks(log); + } + + public void logUsage(Log log) { + space.logUsage(log, true); + } + + public boolean printLocationInfo(Log log, Pointer ptr) { + return space.printLocationInfo(log, ptr); + } + + public boolean verify() { + return HeapVerifier.verifySpace(space); + } + + public boolean verifyRememberedSets() { + return HeapVerifier.verifyRememberedSet(space); + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public void tearDown() { + space.tearDown(); + } +} diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/metaspace/MetaspaceObjectAllocator.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/metaspace/MetaspaceObjectAllocator.java new file mode 100644 index 000000000000..8f7ce1aafce4 --- /dev/null +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/metaspace/MetaspaceObjectAllocator.java @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2025, 2025, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the LICENSE file that accompanied this code. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.svm.core.genscavenge.metaspace; + +import org.graalvm.nativeimage.Platform; +import org.graalvm.nativeimage.Platforms; +import org.graalvm.word.Pointer; +import org.graalvm.word.UnsignedWord; + +import com.oracle.svm.core.SubstrateOptions; +import com.oracle.svm.core.Uninterruptible; +import com.oracle.svm.core.genscavenge.AlignedHeapChunk; +import com.oracle.svm.core.genscavenge.graal.nodes.FormatArrayNode; +import com.oracle.svm.core.genscavenge.remset.RememberedSet; +import com.oracle.svm.core.graal.meta.KnownOffsets; +import com.oracle.svm.core.heap.Heap; +import com.oracle.svm.core.hub.DynamicHub; +import com.oracle.svm.core.hub.LayoutEncoding; +import com.oracle.svm.core.metaspace.Metaspace; + +import jdk.graal.compiler.replacements.AllocationSnippets; + +/** Allocates Java objects in the {@link Metaspace}. */ +class MetaspaceObjectAllocator { + private final ChunkedMetaspaceMemory memory; + + @Platforms(Platform.HOSTED_ONLY.class) + MetaspaceObjectAllocator(ChunkedMetaspaceMemory memory) { + this.memory = memory; + } + + /** + * {@link DynamicHub}s can be allocated like normal hybrid (and therefore array-like) objects. + * The number of vtable entries is used as the array length. Note that inlined fields like + * {@code closedTypeWorldTypeCheckSlots} are not relevant here, as they are not available in the + * open type world configuration. + */ + public DynamicHub allocateDynamicHub(int vTableEntries) { + assert !SubstrateOptions.useClosedTypeWorldHubLayout(); + + DynamicHub hub = DynamicHub.fromClass(DynamicHub.class); + assert LayoutEncoding.getArrayBaseOffsetAsInt(hub.getLayoutEncoding()) == KnownOffsets.singleton().getVTableBaseOffset(); + + DynamicHub result = (DynamicHub) allocateArrayLikeObject(hub, vTableEntries); + assert Heap.getHeap().getObjectHeader().verifyDynamicHubOffset(result); + return result; + } + + public byte[] allocateByteArray(int length) { + DynamicHub hub = DynamicHub.fromClass(byte[].class); + return (byte[]) allocateArrayLikeObject(hub, length); + } + + @Uninterruptible(reason = "Holds uninitialized memory.") + private Object allocateArrayLikeObject(DynamicHub hub, int arrayLength) { + UnsignedWord size = LayoutEncoding.getArrayAllocationSize(hub.getLayoutEncoding(), arrayLength); + + Pointer ptr = memory.allocate(size); + Object result = FormatArrayNode.formatArray(ptr, DynamicHub.toClass(hub), arrayLength, true, false, AllocationSnippets.FillContent.WITH_ZEROES, true); + assert size == LayoutEncoding.getSizeFromObject(result); + + enableRememberedSetTracking(result, size); + return result; + } + + @Uninterruptible(reason = "Prevent GCs until first object table is updated.") + private static void enableRememberedSetTracking(Object result, UnsignedWord size) { + AlignedHeapChunk.AlignedHeader chunk = AlignedHeapChunk.getEnclosingChunk(result); + /* This updates the first object table as well. */ + RememberedSet.get().enableRememberedSetForObject(chunk, result, size); + } +} diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/remset/CardTableBasedRememberedSet.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/remset/CardTableBasedRememberedSet.java index 82f3f81639b6..367f0260745a 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/remset/CardTableBasedRememberedSet.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/remset/CardTableBasedRememberedSet.java @@ -41,6 +41,7 @@ import com.oracle.svm.core.genscavenge.HeapImpl; import com.oracle.svm.core.genscavenge.HeapParameters; import com.oracle.svm.core.genscavenge.ObjectHeaderImpl; +import com.oracle.svm.core.genscavenge.SerialGCOptions; import com.oracle.svm.core.genscavenge.UnalignedHeapChunk.UnalignedHeader; import com.oracle.svm.core.genscavenge.graal.SubstrateCardTableBarrierSet; import com.oracle.svm.core.heap.Heap; @@ -52,6 +53,7 @@ import com.oracle.svm.core.hub.DynamicHubSupport; import com.oracle.svm.core.hub.HubType; import com.oracle.svm.core.image.ImageHeapObject; +import com.oracle.svm.core.metaspace.Metaspace; import com.oracle.svm.core.snippets.KnownIntrinsics; import com.oracle.svm.core.util.HostedByteBufferPointer; import com.oracle.svm.core.util.VMError; @@ -189,25 +191,10 @@ public void dirtyCardIfNecessary(Object holderObject, Object object, Pointer obj if (holderObject == null || object == null) { return; } - // We dirty the cards of ... - if (HeapParameters.getMaxSurvivorSpaces() != 0 && !GCImpl.getGCImpl().isCompleteCollection() && HeapImpl.getHeapImpl().getYoungGeneration().contains(object)) { - /* - * ...references from the old generation to the young generation, unless there cannot be - * any such references if we do not use survivor spaces, or if we do but are doing a - * complete collection: in both cases, all objects are promoted to the old generation. - * (We avoid an extra old generation check and might remark a few image heap cards, too) - */ - } else if (HeapImpl.usesImageHeapCardMarking() && GCImpl.getGCImpl().isCompleteCollection() && HeapImpl.getHeapImpl().isInImageHeap(holderObject)) { - // ...references from the image heap to the runtime heap, but we clean and remark those - // only during complete collections. - assert !HeapImpl.getHeapImpl().isInImageHeap(object) : "should never be called for references to image heap objects"; - } else { - return; - } - ObjectHeader oh = Heap.getHeap().getObjectHeader(); - UnsignedWord objectHeader = oh.readHeaderFromObject(holderObject); - if (hasRememberedSet(objectHeader)) { + assert !HeapImpl.getHeapImpl().isInImageHeap(object) : "should never be called for references to image heap objects"; + + if (cardNeedsDirtying(holderObject, object)) { if (ObjectHeaderImpl.isAlignedObject(holderObject)) { AlignedChunkRememberedSet.dirtyCardForObject(holderObject, false); } else { @@ -217,6 +204,42 @@ public void dirtyCardIfNecessary(Object holderObject, Object object, Pointer obj } } + @AlwaysInline("GC performance") + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + private boolean cardNeedsDirtying(Object holderObject, Object object) { + assert holderObject != null && object != null; + + if (GCImpl.getGCImpl().isCompleteCollection()) { + /* + * After a full GC, the young generation is empty. So, there would be nothing to do. + * However, we want to keep track of all the references that the image heap or metaspace + * have to the runtime heap (regardless of the runtime heap generation). We clean and + * remark those only during complete collections. + */ + return HeapImpl.usesImageHeapCardMarking() && HeapImpl.getHeapImpl().isInImageHeap(holderObject) || + SerialGCOptions.useRememberedSet() && Metaspace.singleton().isInAddressSpace(holderObject); + } + + /* + * If we don't have any survivor spaces, then there is nothing to do because the young + * generation will be empty after a young GC. + */ + if (HeapParameters.getMaxSurvivorSpaces() == 0) { + return false; + } + + /* + * Dirty the card table for references from the image heap, metaspace, or old generation to + * the young generation. + */ + if (HeapImpl.getHeapImpl().getYoungGeneration().contains(object)) { + ObjectHeader oh = Heap.getHeap().getObjectHeader(); + UnsignedWord objectHeader = oh.readHeaderFromObject(holderObject); + return hasRememberedSet(objectHeader); + } + return false; + } + @Override @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public void dirtyAllReferencesIfNecessary(Object obj) { diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/graal/nodes/SubstrateNewDynamicHubNode.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/graal/nodes/SubstrateNewDynamicHubNode.java deleted file mode 100644 index 28b7a4aabff4..000000000000 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/graal/nodes/SubstrateNewDynamicHubNode.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2024, 2024, Oracle and/or its affiliates. All rights reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. Oracle designates this - * particular file as subject to the "Classpath" exception as provided - * by Oracle in the LICENSE file that accompanied this code. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA - * or visit www.oracle.com if you need additional information or have any - * questions. - */ -package com.oracle.svm.core.graal.nodes; - -import com.oracle.svm.core.hub.DynamicHub; -import jdk.graal.compiler.core.common.type.StampFactory; -import jdk.graal.compiler.core.common.type.TypeReference; -import jdk.graal.compiler.graph.NodeClass; -import jdk.graal.compiler.nodeinfo.NodeInfo; -import jdk.graal.compiler.nodes.ValueNode; -import jdk.graal.compiler.nodes.java.AbstractNewObjectNode; -import jdk.vm.ci.meta.ResolvedJavaType; - -/** - * The {@link SubstrateNewDynamicHubNode} represents the allocation of a new - * {@link com.oracle.svm.core.hub.DynamicHub} at execution time. - */ -@NodeInfo(nameTemplate = "SubstrateNewDynamicHubNode") -public final class SubstrateNewDynamicHubNode extends AbstractNewObjectNode { - public static final NodeClass TYPE = NodeClass.create(SubstrateNewDynamicHubNode.class); - - @Input ValueNode vTableEntries; - - public SubstrateNewDynamicHubNode(ResolvedJavaType dynamicHubType, ValueNode vTableEntries) { - super(TYPE, StampFactory.objectNonNull(TypeReference.createExactTrusted(dynamicHubType)), true, null); - this.vTableEntries = vTableEntries; - } - - public ValueNode getVTableEntries() { - return vTableEntries; - } - - @NodeIntrinsic - public static native DynamicHub allocate(@ConstantNodeParameter Class dynamicHubType, int vTableEntries); -} diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/graal/snippets/GCAllocationSupport.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/graal/snippets/GCAllocationSupport.java index 6ba29e4f40ea..689938ac099b 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/graal/snippets/GCAllocationSupport.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/graal/snippets/GCAllocationSupport.java @@ -24,9 +24,10 @@ */ package com.oracle.svm.core.graal.snippets; +import org.graalvm.word.UnsignedWord; + import jdk.graal.compiler.core.common.spi.ForeignCallDescriptor; import jdk.graal.compiler.word.Word; -import org.graalvm.word.UnsignedWord; /** * Used to abstract the GC-specific part of the allocation functionality, e.g., how does the TLAB @@ -41,8 +42,6 @@ public interface GCAllocationSupport { ForeignCallDescriptor getNewPodInstanceStub(); - ForeignCallDescriptor getNewDynamicHub(); - boolean useTLAB(); boolean shouldAllocateInTLAB(UnsignedWord size, boolean isArray); diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/graal/snippets/SubstrateAllocationSnippets.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/graal/snippets/SubstrateAllocationSnippets.java index 30ecda160974..f5c51c40e4ba 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/graal/snippets/SubstrateAllocationSnippets.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/graal/snippets/SubstrateAllocationSnippets.java @@ -46,17 +46,14 @@ import com.oracle.svm.core.allocationprofile.AllocationCounter; import com.oracle.svm.core.allocationprofile.AllocationSite; import com.oracle.svm.core.config.ConfigurationValues; -import com.oracle.svm.core.graal.meta.KnownOffsets; import com.oracle.svm.core.graal.meta.SubstrateForeignCallsProvider; import com.oracle.svm.core.graal.nodes.NewPodInstanceNode; import com.oracle.svm.core.graal.nodes.NewStoredContinuationNode; -import com.oracle.svm.core.graal.nodes.SubstrateNewDynamicHubNode; import com.oracle.svm.core.graal.nodes.SubstrateNewHybridInstanceNode; import com.oracle.svm.core.heap.Heap; import com.oracle.svm.core.heap.Pod; import com.oracle.svm.core.hub.DynamicHub; import com.oracle.svm.core.hub.LayoutEncoding; -import com.oracle.svm.core.hub.RuntimeClassLoading; import com.oracle.svm.core.identityhashcode.IdentityHashCodeSupport; import com.oracle.svm.core.meta.SharedType; import com.oracle.svm.core.option.HostedOptionValues; @@ -74,7 +71,6 @@ import jdk.graal.compiler.api.replacements.Snippet.NonNullParameter; import jdk.graal.compiler.api.replacements.Snippet.VarargsParameter; import jdk.graal.compiler.core.common.GraalOptions; -import jdk.graal.compiler.core.common.NumUtil; import jdk.graal.compiler.core.common.spi.ForeignCallDescriptor; import jdk.graal.compiler.core.common.spi.MetaAccessExtensionProvider; import jdk.graal.compiler.core.common.type.StampFactory; @@ -262,22 +258,6 @@ public Object allocatePod(@NonNullParameter DynamicHub hub, return piArrayCastToSnippetReplaceeStamp(verifyOop(result), arrayLength); } - @Snippet - public Object allocateDynamicHub(int vTableEntries, - @ConstantParameter int vTableBaseOffset, - @ConstantParameter int log2VTableEntrySize, - @ConstantParameter AllocationProfilingData profilingData) { - profilingData.snippetCounters.stub.inc(); - - // always slow path, because DynamicHubs are allocated into dedicated chunks - Object result = callNewDynamicHub(gcAllocationSupport().getNewDynamicHub(), vTableEntries); - - UnsignedWord allocationSize = arrayAllocationSize(vTableEntries, vTableBaseOffset, log2VTableEntrySize); - profileAllocation(profilingData, allocationSize); - - return piArrayCastToSnippetReplaceeStamp(verifyOop(result), vTableEntries); - } - @Snippet public Object allocateInstanceDynamic(@NonNullParameter DynamicHub hub, @ConstantParameter boolean forceSlowPath, @@ -572,9 +552,6 @@ protected final Object callNewMultiArrayStub(Word objectHeader, int rank, Word d @NodeIntrinsic(value = ForeignCallNode.class) private static native Object callSlowNewStoredContinuation(@ConstantNodeParameter ForeignCallDescriptor descriptor, Word hub, int length); - @NodeIntrinsic(value = ForeignCallNode.class) - private static native Object callNewDynamicHub(@ConstantNodeParameter ForeignCallDescriptor descriptor, int vTableEntries); - @NodeIntrinsic(value = ForeignCallNode.class) private static native Object callNewMultiArray(@ConstantNodeParameter ForeignCallDescriptor descriptor, Word hub, int rank, Word dimensions); @@ -647,7 +624,6 @@ public static class Templates extends SubstrateTemplates { private final SnippetInfo allocateStoredContinuation; private final SnippetInfo allocatePod; - private final SnippetInfo allocateDynamicHub; @SuppressWarnings("this-escape") public Templates(OptionValues options, Providers providers, SubstrateAllocationSnippets receiver) { @@ -728,17 +704,6 @@ public Templates(OptionValues options, Providers providers, SubstrateAllocationS podLocations); } allocatePod = allocatePodSnippet; - - SnippetInfo allocateDynamicHubSnippet = null; - if (RuntimeClassLoading.isSupported()) { - allocateDynamicHubSnippet = snippet(providers, - SubstrateAllocationSnippets.class, - "allocateDynamicHub", - null, - receiver, - ALLOCATION_LOCATIONS); - } - allocateDynamicHub = allocateDynamicHubSnippet; } public void registerLowering(Map, NodeLoweringProvider> lowerings) { @@ -767,9 +732,6 @@ public void registerLowering(Map, NodeLoweringProvider> if (Pod.RuntimeSupport.isPresent()) { lowerings.put(NewPodInstanceNode.class, new NewPodInstanceLowering()); } - if (RuntimeClassLoading.isSupported()) { - lowerings.put(SubstrateNewDynamicHubNode.class, new NewDynamicHubLowering()); - } } public AllocationSnippetCounters getSnippetCounters() { @@ -1170,39 +1132,6 @@ public void lower(NewPodInstanceNode node, LoweringTool tool) { template(tool, node, args).instantiate(tool.getMetaAccess(), node, SnippetTemplate.DEFAULT_REPLACER, args); } } - - private final class NewDynamicHubLowering implements NodeLoweringProvider { - @Override - public void lower(SubstrateNewDynamicHubNode node, LoweringTool tool) { - StructuredGraph graph = node.graph(); - if (graph.getGuardsStage().areFrameStatesAtSideEffects()) { - return; - } - - assert node.getVTableEntries() != null; - assert node.fillContents() : "fillContents must be true for DynamicHub allocations"; - - ValueNode vTableEntries = node.getVTableEntries(); - SharedType type = (SharedType) tool.getMetaAccess().lookupJavaType(Class.class); - DynamicHub hubOfDynamicHub = type.getHub(); - - int layoutEncoding = hubOfDynamicHub.getLayoutEncoding(); - - int vTableBaseOffset = getArrayBaseOffset(layoutEncoding); - assert vTableBaseOffset == KnownOffsets.singleton().getVTableBaseOffset(); - - int log2VTableEntrySize = LayoutEncoding.getArrayIndexShift(layoutEncoding); - assert log2VTableEntrySize == NumUtil.unsignedLog2(KnownOffsets.singleton().getVTableEntrySize()); - - Arguments args = new Arguments(allocateDynamicHub, graph.getGuardsStage(), tool.getLoweringStage()); - args.add("vTableEntries", vTableEntries); - args.add("vTableBaseOffset", vTableBaseOffset); - args.add("log2VTableEntrySize", log2VTableEntrySize); - args.add("profilingData", getProfilingData(node, type)); - - template(tool, node, args).instantiate(tool.getMetaAccess(), node, SnippetTemplate.DEFAULT_REPLACER, args); - } - } } /** diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/AbstractPinnedObjectSupport.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/AbstractPinnedObjectSupport.java index 89e069965848..9278b3378387 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/AbstractPinnedObjectSupport.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/AbstractPinnedObjectSupport.java @@ -40,6 +40,7 @@ import com.oracle.svm.core.hub.DynamicHub; import com.oracle.svm.core.hub.LayoutEncoding; import com.oracle.svm.core.jdk.UninterruptibleUtils.AtomicReference; +import com.oracle.svm.core.metaspace.Metaspace; import com.oracle.svm.core.thread.VMOperation; import jdk.graal.compiler.api.replacements.Fold; @@ -131,8 +132,13 @@ public boolean isPinnedSlow(Object object) { } @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) - private static boolean needsPinning(Object object) { - return !SubstrateOptions.useEpsilonGC() && object != null && !Heap.getHeap().isInImageHeap(object); + public static boolean needsPinning(Object object) { + return !isImplicitlyPinned(object); + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + private static boolean isImplicitlyPinned(Object object) { + return SubstrateOptions.useEpsilonGC() || object == null || Heap.getHeap().isInImageHeap(object) || Metaspace.singleton().isInAddressSpace(object); } public static class PinnedObjectImpl implements PinnedObject { diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/ObjectHeader.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/ObjectHeader.java index fd5ab179c1d2..eb98c5790494 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/ObjectHeader.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/ObjectHeader.java @@ -24,6 +24,8 @@ */ package com.oracle.svm.core.heap; +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; + import org.graalvm.nativeimage.Platform; import org.graalvm.nativeimage.Platforms; import org.graalvm.word.LocationIdentity; @@ -34,6 +36,7 @@ import com.oracle.svm.core.config.ConfigurationValues; import com.oracle.svm.core.hub.DynamicHub; import com.oracle.svm.core.image.ImageHeapObject; +import com.oracle.svm.core.metaspace.Metaspace; import com.oracle.svm.core.snippets.KnownIntrinsics; import jdk.graal.compiler.api.replacements.Fold; @@ -86,7 +89,15 @@ protected ObjectHeader() { public abstract Word encodeAsUnmanagedObjectHeader(DynamicHub hub); - public abstract void verifyDynamicHubOffsetInImageHeap(long offsetFromHeapBase); + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public abstract void verifyDynamicHubOffset(long offsetFromHeapBase); + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public boolean verifyDynamicHubOffset(DynamicHub hub) { + long offsetFromHeapBase = Word.objectToUntrackedPointer(hub).subtract(KnownIntrinsics.heapBase()).rawValue(); + verifyDynamicHubOffset(offsetFromHeapBase); + return true; + } @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) public DynamicHub dynamicHubFromObjectHeader(Word header) { @@ -165,7 +176,7 @@ public boolean isEncodedObjectHeader(Word potentialHeader) { @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) private boolean isDynamicHub(Pointer potentialDynamicHub) { - if (Heap.getHeap().isInImageHeap(potentialDynamicHub)) { + if (Heap.getHeap().isInImageHeap(potentialDynamicHub) || Metaspace.singleton().isInAllocatedMemory(potentialDynamicHub)) { Pointer potentialHubOfDynamicHub = readPotentialDynamicHubFromPointer(potentialDynamicHub); return potentialHubOfDynamicHub.equal(Word.objectToUntrackedPointer(DynamicHub.class)); } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/hub/DynamicHub.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/hub/DynamicHub.java index c06ef55d3906..7ba90e3338cb 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/hub/DynamicHub.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/hub/DynamicHub.java @@ -114,7 +114,6 @@ import com.oracle.svm.core.config.ObjectLayout; import com.oracle.svm.core.configure.RuntimeConditionSet; import com.oracle.svm.core.graal.meta.DynamicHubOffsets; -import com.oracle.svm.core.graal.nodes.SubstrateNewDynamicHubNode; import com.oracle.svm.core.heap.UnknownObjectField; import com.oracle.svm.core.heap.UnknownPrimitiveField; import com.oracle.svm.core.hub.registry.ClassRegistries; @@ -125,6 +124,7 @@ import com.oracle.svm.core.meta.MethodRef; import com.oracle.svm.core.meta.SharedType; import com.oracle.svm.core.metadata.MetadataTracer; +import com.oracle.svm.core.metaspace.Metaspace; import com.oracle.svm.core.reflect.MissingReflectionRegistrationUtils; import com.oracle.svm.core.reflect.RuntimeMetadataDecoder; import com.oracle.svm.core.reflect.RuntimeMetadataDecoder.ConstructorDescriptor; @@ -545,11 +545,7 @@ public static DynamicHub allocate(String name, DynamicHub superHub, Object inter companion.hubMetadata = null; companion.reflectionMetadata = null; - /* - * We cannot do the allocation via {@code new DynamicHub(...)} because we need to inject the - * length for its vtable. - */ - DynamicHub hub = SubstrateNewDynamicHubNode.allocate(DynamicHub.class, vTableEntries); + DynamicHub hub = Metaspace.singleton().allocateDynamicHub(vTableEntries); DynamicHubOffsets dynamicHubOffsets = DynamicHubOffsets.singleton(); /* Write fields in defining order. */ diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/hub/DynamicHubSupport.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/hub/DynamicHubSupport.java index 361ad4b837d8..0c26844a42ca 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/hub/DynamicHubSupport.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/hub/DynamicHubSupport.java @@ -41,6 +41,7 @@ import com.oracle.svm.core.heap.InstanceReferenceMapDecoder.InstanceReferenceMap; import com.oracle.svm.core.heap.UnknownObjectField; import com.oracle.svm.core.heap.UnknownPrimitiveField; +import com.oracle.svm.core.imagelayer.DynamicImageLayerInfo; import com.oracle.svm.core.layeredimagesingleton.LayeredImageSingletonBuilderFlags; import com.oracle.svm.core.layeredimagesingleton.LayeredImageSingletonSupport; import com.oracle.svm.core.layeredimagesingleton.MultiLayeredImageSingleton; @@ -65,7 +66,13 @@ public static DynamicHubSupport forLayer(int layerId) { @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public static InstanceReferenceMap getInstanceReferenceMap(DynamicHub hub) { - NonmovableArray referenceMapEncoding = forLayer(hub.getLayerId()).getReferenceMapEncoding(); + int layerId = hub.getLayerId(); + if (RuntimeClassLoading.isSupported() && layerId == DynamicImageLayerInfo.CREMA_LAYER_ID) { + /* Assume that the reference map is in the base layer until GR-60080 is implemented. */ + layerId = 0; + } + + NonmovableArray referenceMapEncoding = forLayer(layerId).getReferenceMapEncoding(); return InstanceReferenceMapDecoder.getReferenceMap(referenceMapEncoding, hub.getReferenceMapIndex()); } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/Metaspace.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/Metaspace.java new file mode 100644 index 000000000000..8b626395f342 --- /dev/null +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/Metaspace.java @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2025, 2025, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the LICENSE file that accompanied this code. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.svm.core.metaspace; + +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; + +import org.graalvm.nativeimage.ImageSingletons; +import org.graalvm.word.Pointer; + +import com.oracle.svm.core.Uninterruptible; +import com.oracle.svm.core.hub.DynamicHub; + +import jdk.graal.compiler.api.replacements.Fold; + +/** + * The metaspace is used for VM-internal objects (such as runtime-allocated {@link DynamicHub}s) + * that have a different lifecycle than normal Java heap objects. Objects in the metaspace are + * neither collected nor moved by the GC. Objects in the metaspace also do not count towards the + * Java heap size. + */ +public interface Metaspace { + @Fold + static Metaspace singleton() { + return ImageSingletons.lookup(Metaspace.class); + } + + /** + * Returns {@code true} if the {@link Object} reference points to a location within the address + * range of the metaspace. Usually faster than {@link #isInAllocatedMemory}. + */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + boolean isInAddressSpace(Object obj); + + /** + * Returns {@code true} if the {@link Pointer} points to a location within the address range of + * the metaspace. Usually faster than {@link #isInAllocatedMemory}. + */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + boolean isInAddressSpace(Pointer obj); + + /** + * Returns {@code true} if the {@link Object} reference points to allocated memory that is + * located in the address range of the metaspace. Usually slower than {@link #isInAddressSpace}. + */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + boolean isInAllocatedMemory(Object obj); + + /** + * Returns {@code true} if the {@link Pointer} points to allocated memory that is located in the + * address range of the metaspace. Usually slower than {@link #isInAddressSpace}. + */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + boolean isInAllocatedMemory(Pointer ptr); + + /** + * Allocates a {@link DynamicHub} and zeroes all its fields. Typically, there should be no need + * to call this method directly and {@link DynamicHub#allocate} should be used instead. + */ + DynamicHub allocateDynamicHub(int numVTableEntries); + + /** Allocates a byte array. */ + byte[] allocateByteArray(int length); +} diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/MetaspaceFeature.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/MetaspaceFeature.java new file mode 100644 index 000000000000..4d09175af70f --- /dev/null +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/MetaspaceFeature.java @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2025, 2025, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the LICENSE file that accompanied this code. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.svm.core.metaspace; + +import org.graalvm.nativeimage.ImageSingletons; + +import com.oracle.svm.core.feature.AutomaticallyRegisteredFeature; +import com.oracle.svm.core.feature.InternalFeature; +import com.oracle.svm.core.hub.RuntimeClassLoading; + +/** + * A {@link Metaspace} is only needed if {@link RuntimeClassLoading} is supported (which is not the + * default). + */ +@AutomaticallyRegisteredFeature +public class MetaspaceFeature implements InternalFeature { + @Override + public void beforeAnalysis(BeforeAnalysisAccess access) { + if (!ImageSingletons.contains(Metaspace.class)) { + ImageSingletons.add(Metaspace.class, new NoMetaspace()); + } + } +} diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/NoMetaspace.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/NoMetaspace.java new file mode 100644 index 000000000000..e854b11b4116 --- /dev/null +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/NoMetaspace.java @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2025, 2025, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the LICENSE file that accompanied this code. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.svm.core.metaspace; + +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; + +import org.graalvm.word.Pointer; + +import com.oracle.svm.core.Uninterruptible; +import com.oracle.svm.core.hub.DynamicHub; +import com.oracle.svm.core.util.VMError; + +public final class NoMetaspace implements Metaspace { + @Override + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public boolean isInAllocatedMemory(Pointer ptr) { + return false; + } + + @Override + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public boolean isInAllocatedMemory(Object obj) { + return false; + } + + @Override + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public boolean isInAddressSpace(Pointer ptr) { + return false; + } + + @Override + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public boolean isInAddressSpace(Object obj) { + return false; + } + + @Override + public DynamicHub allocateDynamicHub(int numVTableEntries) { + throw VMError.shouldNotReachHere("Must not be called if metaspace support is not available."); + } + + @Override + public byte[] allocateByteArray(int length) { + throw VMError.shouldNotReachHere("Must not be called if metaspace support is not available."); + } +} diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/OWNERS.toml b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/OWNERS.toml new file mode 100644 index 000000000000..f8d0e94a102a --- /dev/null +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/metaspace/OWNERS.toml @@ -0,0 +1,7 @@ +[[rule]] +files = "*" +all = [ + "christian.haeubl@oracle.com" +] +any = [ +] diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/nmt/NmtCategory.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/nmt/NmtCategory.java index f0598a0e4655..e0830ea80d83 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/nmt/NmtCategory.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/nmt/NmtCategory.java @@ -49,6 +49,8 @@ public enum NmtCategory { JvmStat("jvmstat"), /** Java Virtual Machine Tool Interface. */ JVMTI("JVMTI"), + /** Metaspace objects. */ + Metaspace("Metaspace"), /** NMT itself. */ NMT("Native Memory Tracking"), /** Profile-guided optimizations. */ diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/AbstractCommittedMemoryProvider.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/AbstractCommittedMemoryProvider.java index 194bfcef00ba..e244b2fb7dc8 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/AbstractCommittedMemoryProvider.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/AbstractCommittedMemoryProvider.java @@ -128,11 +128,15 @@ protected static void free(PointerBase start, UnsignedWord nbytes, NmtCategory n @Override public UnsignedWord getCollectedHeapAddressSpaceSize() { /* Only a part of the address space is available for the collected Java heap. */ - UnsignedWord reservedAddressSpace = getReservedAddressSpaceSize(); + UnsignedWord reservedForJavaHeap = getReservedAddressSpaceSize().subtract(getReservedMetaspaceSize()); UnsignedWord imageHeapSize = Heap.getHeap().getImageHeapReservedBytes(); - assert reservedAddressSpace.aboveThan(imageHeapSize); - return reservedAddressSpace.subtract(imageHeapSize); + assert reservedForJavaHeap.aboveThan(imageHeapSize); + return reservedForJavaHeap.subtract(imageHeapSize); } + /** The total number of bytes reserved for the whole address space. */ protected abstract UnsignedWord getReservedAddressSpaceSize(); + + /** The number of address space bytes that are reserved for the metaspace. */ + protected abstract UnsignedWord getReservedMetaspaceSize(); } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/ChunkBasedCommittedMemoryProvider.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/ChunkBasedCommittedMemoryProvider.java index f13272568908..6cd5f2731e0e 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/ChunkBasedCommittedMemoryProvider.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/ChunkBasedCommittedMemoryProvider.java @@ -42,10 +42,10 @@ import jdk.graal.compiler.word.Word; public abstract class ChunkBasedCommittedMemoryProvider extends AbstractCommittedMemoryProvider { - protected static final OutOfMemoryError ALIGNED_CHUNK_COMMIT_FAILED = new OutOfMemoryError("Could not commit an aligned heap chunk. " + - "Either the OS/container is out of memory or another system-level resource limit was reached (such as the number of memory mappings)."); - protected static final OutOfMemoryError UNALIGNED_CHUNK_COMMIT_FAILED = new OutOfMemoryError("Could not commit an unaligned heap chunk. " + - "Either the OS/container is out of memory or another system-level resource limit was reached (such as the number of memory mappings)."); + private static final String SYSTEM_OUT_OF_MEMORY_MSG = "Either the OS/container is out of memory or another system-level resource limit was reached (such as the number of memory mappings)."; + protected static final OutOfMemoryError ALIGNED_CHUNK_COMMIT_FAILED = new OutOfMemoryError("Could not commit an aligned heap chunk. " + SYSTEM_OUT_OF_MEMORY_MSG); + protected static final OutOfMemoryError UNALIGNED_CHUNK_COMMIT_FAILED = new OutOfMemoryError("Could not commit an unaligned heap chunk. " + SYSTEM_OUT_OF_MEMORY_MSG); + protected static final OutOfMemoryError METASPACE_CHUNK_COMMIT_FAILED = new OutOfMemoryError("Could not commit a metaspace chunk. " + SYSTEM_OUT_OF_MEMORY_MSG); @Fold public static ChunkBasedCommittedMemoryProvider get() { @@ -53,7 +53,17 @@ public static ChunkBasedCommittedMemoryProvider get() { } /** Returns a non-null value or throws a pre-allocated exception. */ - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public Pointer allocateMetaspaceChunk(UnsignedWord nbytes, UnsignedWord alignment) { + Pointer result = allocate(nbytes, alignment, false, NmtCategory.Metaspace); + if (result.isNull()) { + throw METASPACE_CHUNK_COMMIT_FAILED; + } + return result; + } + + /** Returns a non-null value or throws a pre-allocated exception. */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public Pointer allocateAlignedChunk(UnsignedWord nbytes, UnsignedWord alignment) { Pointer result = allocate(nbytes, alignment, false, NmtCategory.JavaHeap); if (result.isNull()) { diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/OSCommittedMemoryProvider.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/OSCommittedMemoryProvider.java index c3d171cced8e..843426a5291f 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/OSCommittedMemoryProvider.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/os/OSCommittedMemoryProvider.java @@ -80,4 +80,9 @@ public UnsignedWord getReservedAddressSpaceSize() { } return maxAddressSpaceSize; } + + @Override + protected UnsignedWord getReservedMetaspaceSize() { + return Word.zero(); + } } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/thread/JavaLangThreadGroupSubstitutions.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/thread/JavaLangThreadGroupSubstitutions.java index 60da2ca7d22e..177d26340246 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/thread/JavaLangThreadGroupSubstitutions.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/thread/JavaLangThreadGroupSubstitutions.java @@ -25,7 +25,6 @@ package com.oracle.svm.core.thread; import java.lang.ref.WeakReference; -import java.util.Arrays; import org.graalvm.nativeimage.Platform; import org.graalvm.nativeimage.Platforms; @@ -39,7 +38,6 @@ import com.oracle.svm.core.annotate.InjectAccessors; import com.oracle.svm.core.annotate.RecomputeFieldValue; import com.oracle.svm.core.annotate.TargetClass; -import com.oracle.svm.core.heap.Heap; import com.oracle.svm.core.jdk.UninterruptibleUtils; import com.oracle.svm.core.jfr.JfrThreadRepository; import com.oracle.svm.util.ReflectionUtil; @@ -151,22 +149,6 @@ public Object transform(Object receiver, Object originalValue) { } } -final class ThreadGroupThreadsAccessor { - static Thread[] get(Target_java_lang_ThreadGroup that) { - return that.injectedThreads; - } - - static void set(Target_java_lang_ThreadGroup that, Thread[] value) { - if (that.injectedThreads != null && Heap.getHeap().isInImageHeap(that.injectedThreads)) { - Arrays.fill(that.injectedThreads, null); - } - that.injectedThreads = value; - } - - private ThreadGroupThreadsAccessor() { - } -} - public class JavaLangThreadGroupSubstitutions { @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) diff --git a/substratevm/src/com.oracle.svm.hosted/src/com/oracle/svm/hosted/image/NativeImageHeap.java b/substratevm/src/com.oracle.svm.hosted/src/com/oracle/svm/hosted/image/NativeImageHeap.java index d90daa6b2c8c..603e57f690a0 100644 --- a/substratevm/src/com.oracle.svm.hosted/src/com/oracle/svm/hosted/image/NativeImageHeap.java +++ b/substratevm/src/com.oracle.svm.hosted/src/com/oracle/svm/hosted/image/NativeImageHeap.java @@ -432,7 +432,7 @@ public int countAndVerifyDynamicHubs() { int count = 0; for (ObjectInfo o : getObjects()) { if (!o.constant.isWrittenInPreviousLayer() && hMetaAccess.isInstanceOf(o.getConstant(), DynamicHub.class)) { - objHeader.verifyDynamicHubOffsetInImageHeap(o.getOffset()); + objHeader.verifyDynamicHubOffset(o.getOffset()); count++; } } diff --git a/web-image/src/com.oracle.svm.hosted.webimage/src/com/oracle/svm/hosted/webimage/wasm/gc/WasmLMAllocationSupport.java b/web-image/src/com.oracle.svm.hosted.webimage/src/com/oracle/svm/hosted/webimage/wasm/gc/WasmLMAllocationSupport.java index 050dc0b154a5..c060b6c19156 100644 --- a/web-image/src/com.oracle.svm.hosted.webimage/src/com/oracle/svm/hosted/webimage/wasm/gc/WasmLMAllocationSupport.java +++ b/web-image/src/com.oracle.svm.hosted.webimage/src/com/oracle/svm/hosted/webimage/wasm/gc/WasmLMAllocationSupport.java @@ -68,11 +68,6 @@ public ForeignCallDescriptor getNewPodInstanceStub() { throw GraalError.unimplementedOverride(); // ExcludeFromJacocoGeneratedReport } - @Override - public ForeignCallDescriptor getNewDynamicHub() { - throw GraalError.unimplementedOverride(); // ExcludeFromJacocoGeneratedReport - } - @Override public boolean useTLAB() { return false; diff --git a/web-image/src/com.oracle.svm.hosted.webimage/src/com/oracle/svm/hosted/webimage/wasm/gc/WasmObjectHeader.java b/web-image/src/com.oracle.svm.hosted.webimage/src/com/oracle/svm/hosted/webimage/wasm/gc/WasmObjectHeader.java index 56e987693ee9..68e7c9d24578 100644 --- a/web-image/src/com.oracle.svm.hosted.webimage/src/com/oracle/svm/hosted/webimage/wasm/gc/WasmObjectHeader.java +++ b/web-image/src/com.oracle.svm.hosted.webimage/src/com/oracle/svm/hosted/webimage/wasm/gc/WasmObjectHeader.java @@ -25,6 +25,8 @@ package com.oracle.svm.hosted.webimage.wasm.gc; +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; + import org.graalvm.word.Pointer; import org.graalvm.word.UnsignedWord; import org.graalvm.word.WordBase; @@ -196,7 +198,8 @@ public long encodeHubPointerForImageHeap(ImageHeapObject obj, long hubOffsetFrom } @Override - public void verifyDynamicHubOffsetInImageHeap(long offsetFromHeapBase) { + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public void verifyDynamicHubOffset(long offsetFromHeapBase) { /* Nothing to do. */ } diff --git a/web-image/src/com.oracle.svm.webimage/src/com/oracle/svm/webimage/heap/WebImageNopAllocationSupport.java b/web-image/src/com.oracle.svm.webimage/src/com/oracle/svm/webimage/heap/WebImageNopAllocationSupport.java index e7de7ea16c43..ef111321caf1 100644 --- a/web-image/src/com.oracle.svm.webimage/src/com/oracle/svm/webimage/heap/WebImageNopAllocationSupport.java +++ b/web-image/src/com.oracle.svm.webimage/src/com/oracle/svm/webimage/heap/WebImageNopAllocationSupport.java @@ -63,11 +63,6 @@ public ForeignCallDescriptor getNewPodInstanceStub() { throw GraalError.unimplementedOverride(); } - @Override - public ForeignCallDescriptor getNewDynamicHub() { - throw GraalError.unimplementedOverride(); - } - @Override public boolean useTLAB() { throw GraalError.unimplementedOverride();