From c12090709d2936455cf9d2e135f2312cd0099df7 Mon Sep 17 00:00:00 2001 From: Eric Wu Date: Thu, 6 Feb 2025 09:56:42 -0800 Subject: [PATCH 1/2] support lazy deoptimization --- .../oracle/svm/core/genscavenge/GCImpl.java | 2 +- .../genscavenge/RuntimeCodeCacheWalker.java | 22 +- .../aarch64/SubstrateAArch64Backend.java | 30 +- .../SubstrateAArch64RegisterConfig.java | 5 +- .../graal/amd64/SubstrateAMD64Backend.java | 32 +- .../amd64/SubstrateAMD64RegisterConfig.java | 5 +- .../SubstrateRISCV64RegisterConfig.java | 5 +- .../oracle/svm/core/SubstrateDiagnostics.java | 7 +- .../{jvmti/headers => c}/BooleanPointer.java | 2 +- .../com/oracle/svm/core/code/CodeInfo.java | 31 +- .../oracle/svm/core/code/CodeInfoAccess.java | 12 +- .../oracle/svm/core/code/CodeInfoDecoder.java | 32 +- .../oracle/svm/core/code/CodeInfoEncoder.java | 17 + .../svm/core/code/CodeInfoQueryResult.java | 13 +- .../oracle/svm/core/code/CodeInfoTable.java | 9 +- .../svm/core/code/RuntimeCodeCache.java | 45 +- .../svm/core/code/RuntimeCodeInfoAccess.java | 6 +- .../com/oracle/svm/core/deopt/DeoptState.java | 6 +- .../svm/core/deopt/DeoptimizationSupport.java | 54 +- .../svm/core/deopt/DeoptimizedFrame.java | 28 +- .../oracle/svm/core/deopt/Deoptimizer.java | 605 +++++++++++++++--- .../core/deopt/SubstrateInstalledCode.java | 2 +- .../core/heap/RuntimeCodeCacheCleaner.java | 6 +- .../core/heap/StoredContinuationAccess.java | 4 +- .../jdk/Target_java_lang_StackWalker.java | 4 +- .../oracle/svm/core/jfr/JfrStackWalker.java | 2 +- .../oracle/svm/core/jvmti/JvmtiFunctions.java | 2 +- .../svm/core/snippets/ExceptionUnwind.java | 35 +- .../com/oracle/svm/core/stack/JavaFrame.java | 6 + .../com/oracle/svm/core/stack/JavaFrames.java | 22 +- .../svm/core/stack/JavaStackWalker.java | 3 +- .../stack/SubstrateStackIntrospection.java | 6 +- .../graal/hosted/DeoptimizationFeature.java | 27 +- .../meta/SubstrateInstalledCodeImpl.java | 4 +- .../api/SubstrateOptimizedCallTarget.java | 2 +- ...trateOptimizedCallTargetInstalledCode.java | 14 +- 36 files changed, 858 insertions(+), 249 deletions(-) rename substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/{jvmti/headers => c}/BooleanPointer.java (97%) diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/GCImpl.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/GCImpl.java index a15dddabfee6..cb1b06eb987d 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/GCImpl.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/GCImpl.java @@ -847,7 +847,7 @@ private static void walkStack(IsolateThread thread, JavaStackWalk walk, ObjectRe VMError.guarantee(!JavaFrames.isUnknownFrame(frame), "GC must not encounter unknown frames"); /* We are during a GC, so tethering of the CodeInfo is not necessary. */ - DeoptimizedFrame deoptFrame = Deoptimizer.checkDeoptimized(frame); + DeoptimizedFrame deoptFrame = Deoptimizer.checkEagerDeoptimized(frame); if (deoptFrame == null) { Pointer sp = frame.getSP(); CodeInfo codeInfo = CodeInfoAccess.unsafeConvert(frame.getIPCodeInfo()); diff --git a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/RuntimeCodeCacheWalker.java b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/RuntimeCodeCacheWalker.java index 848ee08019d2..1a2c7270fdc3 100644 --- a/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/RuntimeCodeCacheWalker.java +++ b/substratevm/src/com.oracle.svm.core.genscavenge/src/com/oracle/svm/core/genscavenge/RuntimeCodeCacheWalker.java @@ -76,26 +76,28 @@ public boolean visitCode(CodeInfo codeInfo) { Object tether = UntetheredCodeInfoAccess.getTetherUnsafe(codeInfo); if (tether != null && !isReachable(tether)) { int state = CodeInfoAccess.getState(codeInfo); - if (state == CodeInfo.STATE_INVALIDATED) { + if (state == CodeInfo.STATE_REMOVED_FROM_CODE_CACHE) { /* - * The tether object is not reachable and the CodeInfo was already invalidated, so - * we only need to visit references that will be accessed before the unmanaged - * memory is freed during this garbage collection. + * The tether object is not reachable and the CodeInfo was already removed from the + * code cache, so we only need to visit references that will be accessed before the + * unmanaged memory is freed during this garbage collection. */ RuntimeCodeInfoAccess.walkObjectFields(codeInfo, greyToBlackObjectVisitor); - CodeInfoAccess.setState(codeInfo, CodeInfo.STATE_UNREACHABLE); + CodeInfoAccess.setState(codeInfo, CodeInfo.STATE_PENDING_FREE); return true; } /* - * We don't want to keep heap objects unnecessarily alive, so invalidate and free the - * CodeInfo if it has weak references to otherwise unreachable objects. However, we need - * to make sure that all the objects that are accessed during the invalidation remain - * reachable. Those objects can only be collected in a subsequent garbage collection. + * We don't want to keep heap objects unnecessarily alive. So, we check if the CodeInfo + * has weak references to otherwise unreachable objects. If so, we remove the CodeInfo + * from the code cache and free the CodeInfo during the current safepoint (see + * RuntimeCodeCacheCleaner). However, we need to make sure that all the objects that are + * accessed while doing so remain reachable. Those objects can only be collected in a + * subsequent garbage collection. */ if (state == CodeInfo.STATE_NON_ENTRANT || invalidateCodeThatReferencesUnreachableObjects && state == CodeInfo.STATE_CODE_CONSTANTS_LIVE && hasWeakReferenceToUnreachableObject(codeInfo)) { RuntimeCodeInfoAccess.walkObjectFields(codeInfo, greyToBlackObjectVisitor); - CodeInfoAccess.setState(codeInfo, CodeInfo.STATE_READY_FOR_INVALIDATION); + CodeInfoAccess.setState(codeInfo, CodeInfo.STATE_PENDING_REMOVAL_FROM_CODE_CACHE); return true; } } diff --git a/substratevm/src/com.oracle.svm.core.graal.aarch64/src/com/oracle/svm/core/graal/aarch64/SubstrateAArch64Backend.java b/substratevm/src/com.oracle.svm.core.graal.aarch64/src/com/oracle/svm/core/graal/aarch64/SubstrateAArch64Backend.java index 37663a29b2c1..90b3a7c81c04 100755 --- a/substratevm/src/com.oracle.svm.core.graal.aarch64/src/com/oracle/svm/core/graal/aarch64/SubstrateAArch64Backend.java +++ b/substratevm/src/com.oracle.svm.core.graal.aarch64/src/com/oracle/svm/core/graal/aarch64/SubstrateAArch64Backend.java @@ -469,19 +469,27 @@ public SubstrateLIRGenerationResult(CompilationIdentifier compilationId, LIR lir super(compilationId, lir, frameMapBuilder, registerAllocationConfig, callingConvention); this.method = method; - if (method.hasCalleeSavedRegisters()) { + /* + * Besides for methods with callee saved registers, we reserve additional stack space + * for lazyDeoptStub too. This is necessary because the lazy deopt stub might read + * callee-saved register values in the callee of the function to be deoptimized, thus + * that stack space must not be overwritten by the lazy deopt stub. + */ + if (method.hasCalleeSavedRegisters() || method.getDeoptStubType() == Deoptimizer.StubType.LazyEntryStub) { AArch64CalleeSavedRegisters calleeSavedRegisters = AArch64CalleeSavedRegisters.singleton(); FrameMap frameMap = ((FrameMapBuilderTool) frameMapBuilder).getFrameMap(); int registerSaveAreaSizeInBytes = calleeSavedRegisters.getSaveAreaSize(); StackSlot calleeSaveArea = frameMap.allocateStackMemory(registerSaveAreaSizeInBytes, frameMap.getTarget().wordSize); - /* - * The offset of the callee save area must be fixed early during image generation. - * It is accessed when compiling methods that have a call with callee-saved calling - * convention. Here we verify that offset computed earlier is the same as the offset - * actually reserved. - */ - calleeSavedRegisters.verifySaveAreaOffsetInFrame(calleeSaveArea.getRawOffset()); + if (method.hasCalleeSavedRegisters()) { + /* + * The offset of the callee save area must be fixed early during image + * generation. It is accessed when compiling methods that have a call with + * callee-saved calling convention. Here we verify that offset computed earlier + * is the same as the offset actually reserved. + */ + calleeSavedRegisters.verifySaveAreaOffsetInFrame(calleeSaveArea.getRawOffset()); + } } if (method.canDeoptimize() || method.isDeoptTarget()) { @@ -951,8 +959,8 @@ public void returned(CompilationResultBuilder crb) { } /** - * Generates the prolog of a {@link com.oracle.svm.core.deopt.Deoptimizer.StubType#EntryStub} - * method. + * Generates the prolog of a + * {@link com.oracle.svm.core.deopt.Deoptimizer.StubType#EagerEntryStub} method. */ protected static class DeoptEntryStubContext extends SubstrateAArch64FrameContext { protected final CallingConvention callingConvention; @@ -1241,7 +1249,7 @@ public CompilationResultBuilder newCompilationResultBuilder(LIRGenerationResult } protected FrameContext createFrameContext(SharedMethod method, Deoptimizer.StubType stubType, CallingConvention callingConvention) { - if (stubType == Deoptimizer.StubType.EntryStub) { + if (stubType == Deoptimizer.StubType.EagerEntryStub || stubType == Deoptimizer.StubType.LazyEntryStub) { return new DeoptEntryStubContext(method, callingConvention); } else if (stubType == Deoptimizer.StubType.ExitStub) { return new DeoptExitStubContext(method, callingConvention); diff --git a/substratevm/src/com.oracle.svm.core.graal.aarch64/src/com/oracle/svm/core/graal/aarch64/SubstrateAArch64RegisterConfig.java b/substratevm/src/com.oracle.svm.core.graal.aarch64/src/com/oracle/svm/core/graal/aarch64/SubstrateAArch64RegisterConfig.java index 5afcab795630..8b5bd5f46992 100755 --- a/substratevm/src/com.oracle.svm.core.graal.aarch64/src/com/oracle/svm/core/graal/aarch64/SubstrateAArch64RegisterConfig.java +++ b/substratevm/src/com.oracle.svm.core.graal.aarch64/src/com/oracle/svm/core/graal/aarch64/SubstrateAArch64RegisterConfig.java @@ -289,8 +289,9 @@ public CallingConvention getCallingConvention(Type t, JavaType returnType, JavaT int currentFP = 0; /* - * We have to reserve a slot between return address and outgoing parameters for the deopt - * frame handle. Exception: calls to native methods. + * We have to reserve a slot between return address and outgoing parameters for the + * deoptimized frame (eager deoptimization), or the original return address (lazy + * deoptimization). Exception: calls to native methods. */ int currentStackOffset = (type.nativeABI() ? nativeParamsStackOffset : target.wordSize); diff --git a/substratevm/src/com.oracle.svm.core.graal.amd64/src/com/oracle/svm/core/graal/amd64/SubstrateAMD64Backend.java b/substratevm/src/com.oracle.svm.core.graal.amd64/src/com/oracle/svm/core/graal/amd64/SubstrateAMD64Backend.java index 7e330886f8f6..7085c7fe8fbf 100644 --- a/substratevm/src/com.oracle.svm.core.graal.amd64/src/com/oracle/svm/core/graal/amd64/SubstrateAMD64Backend.java +++ b/substratevm/src/com.oracle.svm.core.graal.amd64/src/com/oracle/svm/core/graal/amd64/SubstrateAMD64Backend.java @@ -554,19 +554,27 @@ public SubstrateLIRGenerationResult(CompilationIdentifier compilationId, LIR lir super(compilationId, lir, frameMapBuilder, registerAllocationConfig, callingConvention); this.method = method; - if (method.hasCalleeSavedRegisters()) { + /* + * Besides for methods with callee saved registers, we reserve additional stack space + * for lazyDeoptStub too. This is necessary because the lazy deopt stub might read + * callee-saved register values in the callee of the function to be deoptimized, thus + * that stack space must not be overwritten by the lazy deopt stub. + */ + if (method.hasCalleeSavedRegisters() || method.getDeoptStubType() == Deoptimizer.StubType.LazyEntryStub) { AMD64CalleeSavedRegisters calleeSavedRegisters = AMD64CalleeSavedRegisters.singleton(); FrameMap frameMap = ((FrameMapBuilderTool) frameMapBuilder).getFrameMap(); int registerSaveAreaSizeInBytes = calleeSavedRegisters.getSaveAreaSize(); StackSlot calleeSaveArea = frameMap.allocateStackMemory(registerSaveAreaSizeInBytes, frameMap.getTarget().wordSize); - /* - * The offset of the callee save area must be fixed early during image generation. - * It is accessed when compiling methods that have a call with callee-saved calling - * convention. Here we verify that offset computed earlier is the same as the offset - * actually reserved. - */ - calleeSavedRegisters.verifySaveAreaOffsetInFrame(calleeSaveArea.getRawOffset()); + if (method.hasCalleeSavedRegisters()) { + /* + * The offset of the callee save area must be fixed early during image + * generation. It is accessed when compiling methods that have a call with + * callee-saved calling convention. Here we verify that offset computed earlier + * is the same as the offset actually reserved. + */ + calleeSavedRegisters.verifySaveAreaOffsetInFrame(calleeSaveArea.getRawOffset()); + } } if (method.canDeoptimize() || method.isDeoptTarget()) { @@ -1326,8 +1334,8 @@ public void returned(CompilationResultBuilder crb) { } /** - * Generates the prolog of a {@link com.oracle.svm.core.deopt.Deoptimizer.StubType#EntryStub} - * method. + * Generates the prolog of a + * {@link com.oracle.svm.core.deopt.Deoptimizer.StubType#EagerEntryStub} method. */ protected static class DeoptEntryStubContext extends SubstrateAMD64FrameContext { protected DeoptEntryStubContext(SharedMethod method, CallingConvention callingConvention) { @@ -1366,7 +1374,7 @@ public void enter(CompilationResultBuilder tasm) { * method. * * Note no special handling is necessary for CFI as this will be a direct call from the - * {@link com.oracle.svm.core.deopt.Deoptimizer.StubType#EntryStub}. + * {@link com.oracle.svm.core.deopt.Deoptimizer.StubType#EagerEntryStub}. */ protected static class DeoptExitStubContext extends SubstrateAMD64FrameContext { protected DeoptExitStubContext(SharedMethod method, CallingConvention callingConvention) { @@ -1810,7 +1818,7 @@ protected AMD64MacroAssembler createAssembler(OptionValues options) { } protected FrameContext createFrameContext(SharedMethod method, Deoptimizer.StubType stubType, CallingConvention callingConvention) { - if (stubType == Deoptimizer.StubType.EntryStub) { + if (stubType == Deoptimizer.StubType.EagerEntryStub || stubType == Deoptimizer.StubType.LazyEntryStub) { return new DeoptEntryStubContext(method, callingConvention); } else if (stubType == Deoptimizer.StubType.ExitStub) { return new DeoptExitStubContext(method, callingConvention); diff --git a/substratevm/src/com.oracle.svm.core.graal.amd64/src/com/oracle/svm/core/graal/amd64/SubstrateAMD64RegisterConfig.java b/substratevm/src/com.oracle.svm.core.graal.amd64/src/com/oracle/svm/core/graal/amd64/SubstrateAMD64RegisterConfig.java index 9d2caca80527..66d1df2a37bb 100644 --- a/substratevm/src/com.oracle.svm.core.graal.amd64/src/com/oracle/svm/core/graal/amd64/SubstrateAMD64RegisterConfig.java +++ b/substratevm/src/com.oracle.svm.core.graal.amd64/src/com/oracle/svm/core/graal/amd64/SubstrateAMD64RegisterConfig.java @@ -264,8 +264,9 @@ public CallingConvention getCallingConvention(Type t, JavaType returnType, JavaT boolean isEntryPoint = type.nativeABI() && !type.outgoing; /* - * We have to reserve a slot between return address and outgoing parameters for the deopt - * frame handle. Exception: calls to native methods. + * We have to reserve a slot between return address and outgoing parameters for the + * deoptimized frame (eager deoptimization), or the original return address (lazy + * deoptimization). Exception: calls to native methods. */ int currentStackOffset = type.nativeABI() ? nativeParamsStackOffset : target.wordSize; diff --git a/substratevm/src/com.oracle.svm.core.graal.riscv64/src/com/oracle/svm/core/graal/riscv64/SubstrateRISCV64RegisterConfig.java b/substratevm/src/com.oracle.svm.core.graal.riscv64/src/com/oracle/svm/core/graal/riscv64/SubstrateRISCV64RegisterConfig.java index 987e7892a7dc..6cdf3162df80 100644 --- a/substratevm/src/com.oracle.svm.core.graal.riscv64/src/com/oracle/svm/core/graal/riscv64/SubstrateRISCV64RegisterConfig.java +++ b/substratevm/src/com.oracle.svm.core.graal.riscv64/src/com/oracle/svm/core/graal/riscv64/SubstrateRISCV64RegisterConfig.java @@ -230,8 +230,9 @@ public CallingConvention getCallingConvention(Type t, JavaType returnType, JavaT int currentFP = 0; /* - * We have to reserve a slot between return address and outgoing parameters for the deopt - * frame handle. Exception: calls to native methods. + * We have to reserve a slot between return address and outgoing parameters for the + * deoptimized frame (eager deoptimization), or the original return address (lazy + * deoptimization). Exception: calls to native methods. */ int currentStackOffset = (type.nativeABI() ? nativeParamsStackOffset : target.wordSize); diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/SubstrateDiagnostics.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/SubstrateDiagnostics.java index f6c6809c52e0..118a5de0dc32 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/SubstrateDiagnostics.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/SubstrateDiagnostics.java @@ -695,7 +695,12 @@ public int maxInvocationCount() { @Override @RestrictHeapAccess(access = RestrictHeapAccess.Access.NO_ALLOCATION, reason = "Must not allocate while printing diagnostics.") public void printDiagnostics(Log log, ErrorContext context, int maxDiagnosticLevel, int invocationCount) { - log.string("DeoptStubPointer address: ").zhex(DeoptimizationSupport.getDeoptStubPointer()).newline().newline(); + log.string("EagerDeoptStub address: ").zhex(DeoptimizationSupport.getEagerDeoptStubPointer()).newline(); + if (Deoptimizer.Options.UseLazyDeopt.getValue()) { + log.string("LazyDeoptStubPrimitiveReturn address: ").zhex(DeoptimizationSupport.getLazyDeoptStubPrimitiveReturnPointer()).newline(); + log.string("LazyDeoptStubObjectReturn address: ").zhex(DeoptimizationSupport.getLazyDeoptStubObjectReturnPointer()).newline(); + } + log.newline(); } } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jvmti/headers/BooleanPointer.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/c/BooleanPointer.java similarity index 97% rename from substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jvmti/headers/BooleanPointer.java rename to substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/c/BooleanPointer.java index b1764ce043b3..2a6de2f88516 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jvmti/headers/BooleanPointer.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/c/BooleanPointer.java @@ -22,7 +22,7 @@ * or visit www.oracle.com if you need additional information or have any * questions. */ -package com.oracle.svm.core.jvmti.headers; +package com.oracle.svm.core.c; import org.graalvm.nativeimage.c.struct.CPointerTo; import org.graalvm.word.PointerBase; diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfo.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfo.java index 4094497c4893..dd6fa9f7b678 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfo.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfo.java @@ -66,36 +66,37 @@ public interface CodeInfo extends UntetheredCodeInfo { /** * This state is only a temporary state when the VM is at a safepoint. It indicates that no * activations are remaining and that the code is no longer needed (code is non-entrant) or no - * longer wanted (code has references to otherwise unreachable objects). The GC will invalidate - * and free this {@link CodeInfo} object during the current safepoint. It is crucial that the GC - * still visits all heap references that may be accessed while invalidating and freeing the - * {@link CodeInfo} object (i.e., all object fields). + * longer wanted (code has references to otherwise unreachable objects). The GC will remove this + * {@link CodeInfo} object from the code cache and free it during the current safepoint. It is + * crucial that the GC still visits all heap references that may be accessed while removing and + * freeing the {@link CodeInfo} object (i.e., all object fields). */ @DuplicatedInNativeCode // - int STATE_READY_FOR_INVALIDATION = STATE_NON_ENTRANT + 1; + int STATE_PENDING_REMOVAL_FROM_CODE_CACHE = STATE_NON_ENTRANT + 1; /** - * Indicates that this {@link CodeInfo} object was invalidated. The data will be freed by the GC - * once the tether object becomes unreachable. Until then, the GC must continue visiting all - * heap references, including code constants that are directly embedded into the machine code. + * Indicates that this {@link CodeInfo} object was removed from the code cache. The data will be + * freed by the GC once the tether object becomes unreachable. Until then, the GC must continue + * visiting all heap references, including code constants that are directly embedded into the + * machine code. */ @DuplicatedInNativeCode // - int STATE_INVALIDATED = STATE_READY_FOR_INVALIDATION + 1; + int STATE_REMOVED_FROM_CODE_CACHE = STATE_PENDING_REMOVAL_FROM_CODE_CACHE + 1; /** * This state is only a temporary state when the VM is at a safepoint. It indicates that a - * previously invalidated {@link CodeInfo} object is no longer reachable from the GC point of - * view. The GC will free the {@link CodeInfo} object during the current safepoint. It is - * crucial that the GC still visits all heap references that may be accessed while freeing the - * {@link CodeInfo} object (i.e., all object fields). + * {@link CodeInfo} object which has already been removed from the code cache is no longer + * reachable from the GC point of view. The GC will free the {@link CodeInfo} object during the + * current safepoint. It is crucial that the GC still visits all heap references that may be + * accessed while freeing the {@link CodeInfo} object (i.e., all object fields). */ @DuplicatedInNativeCode // - int STATE_UNREACHABLE = STATE_INVALIDATED + 1; + int STATE_PENDING_FREE = STATE_REMOVED_FROM_CODE_CACHE + 1; /** * Indicates that the {@link CodeInfo} object was already freed. This state should never be * seen. */ @DuplicatedInNativeCode // - int STATE_FREED = STATE_UNREACHABLE + 1; + int STATE_FREED = STATE_PENDING_FREE + 1; } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoAccess.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoAccess.java index d3eab8589ab2..db9ca4965620 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoAccess.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoAccess.java @@ -155,12 +155,12 @@ public static String stateToString(int codeInfoState) { return "code constants live"; case CodeInfo.STATE_NON_ENTRANT: return "non-entrant"; - case CodeInfo.STATE_READY_FOR_INVALIDATION: - return "ready for invalidation"; - case CodeInfo.STATE_INVALIDATED: - return "invalidated"; - case CodeInfo.STATE_UNREACHABLE: - return "unreachable"; + case CodeInfo.STATE_PENDING_REMOVAL_FROM_CODE_CACHE: + return "pending removal from code cache"; + case CodeInfo.STATE_REMOVED_FROM_CODE_CACHE: + return "removed from code cache"; + case CodeInfo.STATE_PENDING_FREE: + return "pending free"; case CodeInfo.STATE_FREED: return "invalid (freed)"; default: diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoDecoder.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoDecoder.java index 4a6e99458239..b57e854d33ff 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoDecoder.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoDecoder.java @@ -26,7 +26,9 @@ import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; import static com.oracle.svm.core.util.VMError.shouldNotReachHereUnexpectedInput; +import static com.oracle.svm.core.deopt.Deoptimizer.Options.UseLazyDeopt; +import com.oracle.svm.core.deopt.DeoptimizationSupport; import org.graalvm.nativeimage.ImageSingletons; import org.graalvm.nativeimage.Platform; import org.graalvm.nativeimage.Platforms; @@ -224,6 +226,9 @@ static long lookupDeoptimizationEntrypoint(CodeInfo info, long method, long enco codeInfo.exceptionOffset = loadExceptionOffset(info, entryOffset, entryFlags); codeInfo.referenceMapIndex = loadReferenceMapIndex(info, entryOffset, entryFlags); codeInfo.frameInfo = loadFrameInfo(info, entryOffset, entryFlags, constantAccess); + if (UseLazyDeopt.getValue()) { + codeInfo.deoptReturnValueIsObject = loadDeoptReturnValueIsObject(info, entryOffset, entryFlags) != 0; + } assert codeInfo.frameInfo.isDeoptEntry() && codeInfo.frameInfo.getCaller() == null : "Deoptimization entry must not have inlined frames"; return entryIP; } @@ -276,6 +281,16 @@ static int loadEntryFlags(CodeInfo info, long curOffset) { return NonmovableByteArrayReader.getU1(CodeInfoAccess.getCodeInfoEncodings(info), curOffset); } + private static int loadDeoptReturnValueIsObject(CodeInfo info, long entryOffset, int entryFlags) { + /* + * The byte which encodes whether a return value is an object is stored at the end of the + * codeInfo and is only present for deopt entry points if lazy deoptimization is enabled. + */ + assert UseLazyDeopt.getValue() : "must have lazy deoptimization enabled to have this information in the codeinfo"; + long rvoOffset = getU1(AFTER_FI_OFFSET, entryFlags); + return NonmovableByteArrayReader.getU1(CodeInfoAccess.getCodeInfoEncodings(info), entryOffset + rvoOffset); + } + public static final int INVALID_SIZE_ENCODING = 0; private static final int INVALID_FRAME_INFO_ENTRY_OFFSET = -1; @@ -548,23 +563,28 @@ private static boolean endOfTable(long entryIP) { private static final byte[] EX_OFFSET; private static final byte[] RM_OFFSET; private static final byte[] FI_OFFSET; - private static final byte[] MEM_SIZE; + private static final byte[] AFTER_FI_OFFSET; static { assert TOTAL_BITS <= Byte.SIZE; int maxFlag = 1 << TOTAL_BITS; + /* + * When we enable useLazyDeopt, we have an extra byte in the codeinfo, which keeps track of + * whether each infopoint is at a call that returns an object. This byte is stored after the + * FI (frameInfo) section. It is accounted for by the advanceOffset() method. + */ IP_OFFSET = 1; FS_OFFSET = 2; EX_OFFSET = new byte[maxFlag]; RM_OFFSET = new byte[maxFlag]; FI_OFFSET = new byte[maxFlag]; - MEM_SIZE = new byte[maxFlag]; + AFTER_FI_OFFSET = new byte[maxFlag]; for (int i = 0; i < maxFlag; i++) { EX_OFFSET[i] = TypeConversion.asU1(FS_OFFSET + FS_MEM_SIZE[extractFS(i)]); RM_OFFSET[i] = TypeConversion.asU1(EX_OFFSET[i] + EX_MEM_SIZE[extractEX(i)]); FI_OFFSET[i] = TypeConversion.asU1(RM_OFFSET[i] + RM_MEM_SIZE[extractRM(i)]); - MEM_SIZE[i] = TypeConversion.asU1(FI_OFFSET[i] + FI_MEM_SIZE[extractFI(i)]); + AFTER_FI_OFFSET[i] = TypeConversion.asU1(FI_OFFSET[i] + FI_MEM_SIZE[extractFI(i)]); } } @@ -626,7 +646,11 @@ private static long offsetFI(long entryOffset, int entryFlags) { @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) private static long advanceOffset(long entryOffset, int entryFlags) { counters().advanceOffset.inc(); - return entryOffset + getU1(MEM_SIZE, entryFlags); + long returnValueIsObjectSize = 0; + if (DeoptimizationSupport.enabled() && UseLazyDeopt.getValue() && extractFI(entryFlags) == FI_DEOPT_ENTRY_INDEX_S4) { + returnValueIsObjectSize = Byte.BYTES; + } + return entryOffset + getU1(AFTER_FI_OFFSET, entryFlags) + returnValueIsObjectSize; } @Fold diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoEncoder.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoEncoder.java index cc9874230e81..8ec3d193d7a0 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoEncoder.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoEncoder.java @@ -25,6 +25,7 @@ package com.oracle.svm.core.code; import static com.oracle.svm.core.util.VMError.shouldNotReachHereUnexpectedInput; +import static com.oracle.svm.core.deopt.Deoptimizer.Options.UseLazyDeopt; import java.util.BitSet; import java.util.EnumSet; @@ -34,6 +35,8 @@ import java.util.function.IntFunction; import java.util.stream.Stream; +import com.oracle.svm.core.deopt.DeoptimizationSupport; +import jdk.vm.ci.meta.JavaType; import jdk.graal.compiler.word.Word; import org.graalvm.collections.EconomicSet; import org.graalvm.collections.Equivalence; @@ -306,6 +309,7 @@ static class IPData { protected long referenceMapIndex; protected FrameInfoEncoder.FrameData frameData; protected IPData next; + protected boolean deoptReturnValueIsObject; } private final TreeMap entries; @@ -392,6 +396,11 @@ public void addMethod(SharedMethod method, CompilationResult compilation, int co assert entry.referenceMap == null && (entry.frameData == null || entry.frameData.isDefaultFrameData) : entry; entry.referenceMap = (ReferenceMapEncoder.Input) debugInfo.getReferenceMap(); entry.frameData = frameInfoEncoder.addDebugInfo(method, compilation, infopoint, totalFrameSize); + if (DeoptimizationSupport.enabled() && UseLazyDeopt.getValue() && entry.frameData.frame.isDeoptEntry && infopoint instanceof Call call && call.target != null) { + ResolvedJavaMethod invokeTarget = (ResolvedJavaMethod) call.target; + JavaType returnType = invokeTarget.getSignature().getReturnType(null); + entry.deoptReturnValueIsObject = ((SharedType) returnType).getStorageKind().isObject(); + } if (entry.frameData != null && entry.frameData.frame.isDeoptEntry) { BytecodeFrame frame = debugInfo.frame(); long encodedBci = FrameInfoEncoder.encodeBci(frame.getBCI(), FrameState.StackState.of(frame)); @@ -505,6 +514,14 @@ private void encodeIPData() { writeExceptionOffset(encodingBuffer, data, entryFlags); writeReferenceMapIndex(encodingBuffer, data, entryFlags); writeEncodedFrameInfo(encodingBuffer, data, entryFlags); + + if (DeoptimizationSupport.enabled() && UseLazyDeopt.getValue() && data.frameData != null && data.frameData.frame.isDeoptEntry) { + /* + * When we enable useLazyDeopt, we have an extra byte in the codeinfo, which keeps + * track for each deopt entry point whether it is at a call that returns an object. + */ + encodingBuffer.putU1(data.deoptReturnValueIsObject ? 1 : 0); + } } codeInfoIndex = NonmovableArrays.createByteArray(TypeConversion.asU4(indexBuffer.getBytesWritten()), NmtCategory.Code); diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoQueryResult.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoQueryResult.java index 0dcbbc897d95..b9f663023ed2 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoQueryResult.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoQueryResult.java @@ -24,8 +24,9 @@ */ package com.oracle.svm.core.code; -import org.graalvm.nativeimage.c.function.CodePointer; +import static com.oracle.svm.core.deopt.Deoptimizer.Options.UseLazyDeopt; +import org.graalvm.nativeimage.c.function.CodePointer; import com.oracle.svm.core.Uninterruptible; import com.oracle.svm.core.heap.CodeReferenceMapDecoder; import com.oracle.svm.core.heap.CodeReferenceMapEncoder; @@ -56,6 +57,11 @@ public class CodeInfoQueryResult { protected long encodedFrameSize; protected long exceptionOffset; protected long referenceMapIndex; + /** + * deoptReturnValueIsObject is only set for deopt entry points and only if + * {@link com.oracle.svm.core.deopt.Deoptimizer.Options#UseLazyDeopt UseLazyDeopt} is enabled. + */ + protected boolean deoptReturnValueIsObject; protected FrameInfoQueryResult frameInfo; /** @@ -122,6 +128,11 @@ public long getReferenceMapIndex() { return referenceMapIndex; } + public boolean getDeoptReturnValueIsObject() { + assert UseLazyDeopt.getValue(); + return deoptReturnValueIsObject; + } + /** * Stack frame information used, e.g., for deoptimization and printing of stack frames in debug * builds. diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoTable.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoTable.java index 458a250a6bfd..92b934b81847 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoTable.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoTable.java @@ -59,6 +59,8 @@ import jdk.graal.compiler.options.Option; import jdk.vm.ci.code.InstalledCode; +import static com.oracle.svm.core.deopt.Deoptimizer.Options.UseLazyDeopt; + /** * Provides the main entry points to look up metadata for code, either {@link #getImageCodeCache() * ahead-of-time compiled code in the native image} or {@link CodeInfoTable#getRuntimeCodeCache() @@ -224,7 +226,12 @@ private static void invalidateInstalledCodeAtSafepoint(SubstrateInstalledCode in if (CodeInfoAccess.isAlive(info)) { invalidateCodeAtSafepoint0(info); } - assert CodeInfoAccess.getState(info) == CodeInfo.STATE_INVALIDATED; + // If lazy deoptimization is enabled, the CodeInfo will not be removed immediately. + if (UseLazyDeopt.getValue()) { + assert CodeInfoAccess.getState(info) == CodeInfo.STATE_NON_ENTRANT; + } else { + assert CodeInfoAccess.getState(info) == CodeInfo.STATE_REMOVED_FROM_CODE_CACHE; + } } finally { CodeInfoAccess.releaseTether(untetheredInfo, tether); } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeCache.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeCache.java index e9d28c101910..4f5145b7486a 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeCache.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeCache.java @@ -26,6 +26,7 @@ import static com.oracle.svm.core.option.RuntimeOptionKey.RuntimeOptionKeyFlag.RelevantForCompilationIsolates; import static com.oracle.svm.core.snippets.KnownIntrinsics.readCallerStackPointer; +import static com.oracle.svm.core.deopt.Deoptimizer.Options.UseLazyDeopt; import jdk.graal.compiler.word.Word; import org.graalvm.collections.EconomicMap; @@ -211,14 +212,20 @@ protected void invalidateMethod(CodeInfo info) { */ Deoptimizer.deoptimizeInRange(CodeInfoAccess.getCodeStart(info), CodeInfoAccess.getCodeEnd(info), false); - finishInvalidation(info); + boolean removeNow = !UseLazyDeopt.getValue(); + continueInvalidation(info, removeNow); } protected void invalidateNonStackMethod(CodeInfo info) { assert VMOperation.isGCInProgress() : "may only be called by the GC"; prepareInvalidation(info); assert codeNotOnStackVerifier.verify(info); - finishInvalidation(info); + + /* + * This method is called by the GC, so we must call continueInvalidation with removeNow + * being true, so that code is actually removed from the code cache. + */ + continueInvalidation(info, true); } private void prepareInvalidation(CodeInfo info) { @@ -237,27 +244,39 @@ private void prepareInvalidation(CodeInfo info) { } } - private void finishInvalidation(CodeInfo info) { + private void continueInvalidation(CodeInfo info, boolean removeNow) { InstalledCodeObserverSupport.removeObservers(RuntimeCodeInfoAccess.getCodeObserverHandles(info)); - finishInvalidation0(info); - RuntimeCodeInfoHistory.singleton().logInvalidate(info); + if (removeNow) { + /* If removeNow, then the CodeInfo is immediately removed from the code cache. */ + removeFromCodeCache(info); + RuntimeCodeInfoHistory.singleton().logInvalidate(info); + } else { + /* + * Otherwise, we leave the CodeInfo to be collected by GC after no stack activations are + * remaining by marking it as non-entrant. Note that the corresponding InstalledCode + * object is fully invalidated at that point (this is a major difference to normal + * non-entrant code, where the InstalledCode object remains valid). + */ + if (CodeInfoAccess.getState(info) < CodeInfo.STATE_NON_ENTRANT) { + CodeInfoAccess.setState(info, CodeInfo.STATE_NON_ENTRANT); + RuntimeCodeInfoHistory.singleton().logInvalidate(info); + } + } } + /** + * Remove info entry from our table. This should only be called when the CodeInfo is no longer + * on the stack and cannot be invoked anymore + */ @Uninterruptible(reason = "Modifying code tables that are used by the GC") - private void finishInvalidation0(CodeInfo info) { - /* - * Now it is guaranteed that the InstalledCode is not on the stack and cannot be invoked - * anymore, so we can free the code and all metadata. - */ - - /* Remove info entry from our table. */ + private void removeFromCodeCache(CodeInfo info) { int idx = binarySearch(codeInfos, 0, numCodeInfos, CodeInfoAccess.getCodeStart(info)); assert idx >= 0 : "info must be in table"; NonmovableArrays.arraycopy(codeInfos, idx + 1, codeInfos, idx, numCodeInfos - (idx + 1)); numCodeInfos--; NonmovableArrays.setWord(codeInfos, numCodeInfos, Word.nullPointer()); - RuntimeCodeInfoAccess.markAsInvalidated(info); + RuntimeCodeInfoAccess.markAsRemovedFromCodeCache(info); assert verifyTable(); } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeInfoAccess.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeInfoAccess.java index 1268cc368d53..693a33614d11 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeInfoAccess.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeInfoAccess.java @@ -219,11 +219,11 @@ public static CodeInfo allocateMethodInfo(NonmovableObjectArray objectDa } @Uninterruptible(reason = "Prevent the GC from running - otherwise, it could accidentally visit the freed memory.") - static void markAsInvalidated(CodeInfo info) { + static void markAsRemovedFromCodeCache(CodeInfo info) { CodeInfoImpl impl = cast(info); - assert CodeInfoAccess.isAliveState(impl.getState()) || impl.getState() == CodeInfo.STATE_READY_FOR_INVALIDATION : "unexpected state (probably already released)"; + assert CodeInfoAccess.isAliveState(impl.getState()) || impl.getState() == CodeInfo.STATE_PENDING_REMOVAL_FROM_CODE_CACHE : "unexpected state (probably already released)"; /* We can't free any data because only the GC is allowed to free CodeInfo data. */ - CodeInfoAccess.setState(info, CodeInfo.STATE_INVALIDATED); + CodeInfoAccess.setState(info, CodeInfo.STATE_REMOVED_FROM_CODE_CACHE); } public static CodePointer allocateCodeMemory(UnsignedWord size) { diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptState.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptState.java index 3c78353adb54..3fd9665bb982 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptState.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptState.java @@ -28,8 +28,6 @@ import com.oracle.svm.core.code.FrameInfoQueryResult; import com.oracle.svm.core.config.ConfigurationValues; import com.oracle.svm.core.config.ObjectLayout; -import com.oracle.svm.core.heap.GCCause; -import com.oracle.svm.core.heap.Heap; import com.oracle.svm.core.heap.ReferenceAccess; import com.oracle.svm.core.hub.DynamicHub; import com.oracle.svm.core.hub.LayoutEncoding; @@ -162,9 +160,7 @@ private Object materializeObject(int virtualObjectId, FrameInfoQueryResult sourc } materializedObjects[virtualObjectId] = obj; - if (Deoptimizer.testGCinDeoptimizer) { - Heap.getHeap().getGC().collect(GCCause.TestGCInDeoptimizer); - } + Deoptimizer.maybeTestGC(); while (curIdx < encodings.length) { FrameInfoQueryResult.ValueInfo value = encodings[curIdx]; diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizationSupport.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizationSupport.java index 7b4f2c199c83..1d4651f62948 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizationSupport.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizationSupport.java @@ -35,9 +35,13 @@ import com.oracle.svm.core.Uninterruptible; import com.oracle.svm.core.heap.UnknownPrimitiveField; +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; + public class DeoptimizationSupport { - @UnknownPrimitiveField(availability = ReadyForCompilation.class) private CFunctionPointer deoptStubPointer; + @UnknownPrimitiveField(availability = ReadyForCompilation.class) private CFunctionPointer eagerDeoptStubPointer; + @UnknownPrimitiveField(availability = ReadyForCompilation.class) private CFunctionPointer lazyDeoptStubPrimitiveReturnPointer; + @UnknownPrimitiveField(availability = ReadyForCompilation.class) private CFunctionPointer lazyDeoptStubObjectReturnPointer; @Platforms(Platform.HOSTED_ONLY.class) public DeoptimizationSupport() { @@ -61,21 +65,45 @@ static DeoptimizationSupport get() { return ImageSingletons.lookup(DeoptimizationSupport.class); } - /** - * Initializes the pointer to the code of {@link Deoptimizer#deoptStub}. - */ @Platforms(Platform.HOSTED_ONLY.class) - public static void setDeoptStubPointer(CFunctionPointer deoptStub) { - assert get().deoptStubPointer == null : "multiple deopt stub methods registered"; - get().deoptStubPointer = deoptStub; + public static void setEagerDeoptStubPointer(CFunctionPointer ptr) { + assert get().eagerDeoptStubPointer == null : "multiple eagerDeoptStub methods registered"; + get().eagerDeoptStubPointer = ptr; } - /** - * Returns a pointer to the code of {@link Deoptimizer#deoptStub}. - */ - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - public static CFunctionPointer getDeoptStubPointer() { - CFunctionPointer ptr = get().deoptStubPointer; + @Platforms(Platform.HOSTED_ONLY.class) + public static void setLazyDeoptStubPrimitiveReturnPointer(CFunctionPointer ptr) { + assert get().lazyDeoptStubPrimitiveReturnPointer == null : "multiple lazyDeoptStubPrimitiveReturn methods registered"; + assert Deoptimizer.Options.UseLazyDeopt.getValue() : "lazy deoptimization not enabled"; + get().lazyDeoptStubPrimitiveReturnPointer = ptr; + } + + @Platforms(Platform.HOSTED_ONLY.class) + public static void setLazyDeoptStubObjectReturnPointer(CFunctionPointer ptr) { + assert get().lazyDeoptStubObjectReturnPointer == null : "multiple lazyDeoptStubObjectReturn methods registered"; + assert Deoptimizer.Options.UseLazyDeopt.getValue() : "lazy deoptimization not enabled"; + get().lazyDeoptStubObjectReturnPointer = ptr; + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static CFunctionPointer getEagerDeoptStubPointer() { + CFunctionPointer ptr = get().eagerDeoptStubPointer; + assert ptr.rawValue() != 0; + return ptr; + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static CFunctionPointer getLazyDeoptStubPrimitiveReturnPointer() { + assert Deoptimizer.Options.UseLazyDeopt.getValue() : "lazy deoptimization not enabled"; + CFunctionPointer ptr = get().lazyDeoptStubPrimitiveReturnPointer; + assert ptr.rawValue() != 0; + return ptr; + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static CFunctionPointer getLazyDeoptStubObjectReturnPointer() { + assert Deoptimizer.Options.UseLazyDeopt.getValue() : "lazy deoptimization not enabled"; + CFunctionPointer ptr = get().lazyDeoptStubObjectReturnPointer; assert ptr.rawValue() != 0; return ptr; } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizedFrame.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizedFrame.java index 308e5c64078c..a0ed358bd5a6 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizedFrame.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizedFrame.java @@ -51,6 +51,8 @@ import jdk.vm.ci.code.InstalledCode; import jdk.vm.ci.meta.JavaConstant; +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; + /** * The handle to a deoptimized frame. It contains all stack entries which are written to the frame * of the deopt target method(s). For details see {@link Deoptimizer}. @@ -268,9 +270,9 @@ static class RelockObjectData { } protected static DeoptimizedFrame factory(int targetContentSize, long sourceEncodedFrameSize, SubstrateInstalledCode sourceInstalledCode, VirtualFrame topFrame, - RelockObjectData[] relockedObjects, CodePointer sourcePC, boolean rethrowException) { + RelockObjectData[] relockedObjects, CodePointer sourcePC, boolean rethrowException, boolean isEagerDeopt) { final TargetContent targetContentBuffer = new TargetContent(targetContentSize, ConfigurationValues.getTarget().arch.getByteOrder()); - return new DeoptimizedFrame(sourceEncodedFrameSize, sourceInstalledCode, topFrame, targetContentBuffer, relockedObjects, sourcePC, rethrowException); + return new DeoptimizedFrame(sourceEncodedFrameSize, sourceInstalledCode, topFrame, targetContentBuffer, relockedObjects, sourcePC, rethrowException, isEagerDeopt); } private final long sourceEncodedFrameSize; @@ -290,16 +292,17 @@ protected static DeoptimizedFrame factory(int targetContentSize, long sourceEnco private final boolean rethrowException; private DeoptimizedFrame(long sourceEncodedFrameSize, SubstrateInstalledCode sourceInstalledCode, VirtualFrame topFrame, Deoptimizer.TargetContent targetContent, - RelockObjectData[] relockedObjects, CodePointer sourcePC, boolean rethrowException) { + RelockObjectData[] relockedObjects, CodePointer sourcePC, boolean rethrowException, boolean isEagerDeopt) { this.sourceEncodedFrameSize = sourceEncodedFrameSize; this.topFrame = topFrame; this.targetContent = targetContent; this.relockedObjects = relockedObjects; this.sourceInstalledCode = sourceInstalledCode == null ? null : new WeakReference<>(sourceInstalledCode); this.sourcePC = sourcePC; - this.pin = PinnedObject.create(this); + // We assume that the frame will be pinned if and only if we are deoptimizing eagerly + this.pin = isEagerDeopt ? PinnedObject.create(this) : null; StringBuilderLog sbl = new StringBuilderLog(); - sbl.string("deoptStub: completed for DeoptimizedFrame at ").hex(pin.addressOfObject()).newline(); + sbl.string("deoptStub: completed ").string(isEagerDeopt ? "eagerly" : "lazily").string(" for DeoptimizedFrame at ").hex(Word.objectToUntrackedPointer(this)).newline(); this.completedMessage = sbl.getResult().toCharArray(); this.rethrowException = rethrowException; } @@ -346,12 +349,15 @@ protected Deoptimizer.TargetContent getTargetContent() { } /** - * Returns the {@link PinnedObject} that ensures that this {@link DeoptimizedFrame} is not moved - * by the GC. The {@link DeoptimizedFrame} is accessed during GC when walking the stack. + * Releases the {@link PinnedObject} that ensures that this {@link DeoptimizedFrame} is not + * moved by the GC after eager deoptimization. The {@link DeoptimizedFrame} is accessed during + * GC when walking the stack after being installed during eager deoptimization. For lazy + * deoptimization, the pin is not needed, and in that case this method must not be called. */ - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) - public PinnedObject getPin() { - return pin; + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public void unpin() { + assert pin != null; + pin.close(); } /** @@ -361,7 +367,7 @@ public CodePointer getSourcePC() { return sourcePC; } - @Uninterruptible(reason = "Called from Deoptimizer.deoptStub.") + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) char[] getCompletedMessage() { return completedMessage; } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/Deoptimizer.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/Deoptimizer.java index 5399848bdd17..e5e476e0134b 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/Deoptimizer.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/Deoptimizer.java @@ -33,12 +33,14 @@ import java.nio.ByteOrder; import java.util.ArrayList; +import com.oracle.svm.core.option.HostedOptionKey; +import com.oracle.svm.core.snippets.ExceptionUnwind; +import com.oracle.svm.core.stack.StackOverflowCheck; import org.graalvm.nativeimage.CurrentIsolate; import org.graalvm.nativeimage.IsolateThread; import org.graalvm.nativeimage.Platform; import org.graalvm.nativeimage.c.function.CodePointer; import org.graalvm.word.Pointer; -import org.graalvm.word.PointerBase; import org.graalvm.word.UnsignedWord; import org.graalvm.word.WordBase; @@ -102,29 +104,38 @@ * the call stack. A method is deoptimizable if {@link FrameInfoQueryResult deoptimization * information} is available. *

- * Deoptimization is done in two steps: + * Deoptimization can happen eagerly or lazily. For eager deoptimization, a {@link DeoptimizedFrame} + * is constructed immediately and pinned, whereas for lazy deoptimization, the + * {@link DeoptimizedFrame} is not constructed until immediately before it is installed (and + * therefore does not need to be pinned). + *

+ * The stack slot at SP[0] is reserved for deoptimization, and can be used freely by lazy and eager + * deoptimization. + *

+ * Eager Deoptimization is done in two steps: *

    *
  1. A call to {@link #deoptimizeInRange} walks the stack and for each method to deoptimize it * builds a {@link DeoptimizedFrame}. This handle contains all constants and materialized objects - * which are needed to build the deoptimization target frames. It is stored in the stack slot right - * above the return address. The return address (to the deoptimized method) is replaced by a pointer - * to {@link #deoptStub}. + * which are needed to build the deoptimization target frames. It is stored at SP[0] (directly above + * the return address). The return address (to the deoptimized method) is replaced by a pointer to + * {@link #eagerDeoptStub}. * *
    - *    :                                :
    - *    |                                |
    - *    |                                |
    - *    +--------------------------------+   frame of the
    - *    | outgoing stack parameters      |   deopmethod
    - *    +--------------------------------+
    - *    | pointer to DeoptimizedFrame    |
    - *    +--------------------------------+---------
    - *    | pointer to deoptStub           |   return address
    - *    +--------------------------------+---------
    - *    |                                |
    - *    |                                |   frame of {@link #deoptimizeInRange} or
    - *    |                                |   a method which called it
    - *    :     ...                        :
    + *    top of stack (lowest address)
    + *
    + *    | ...                                   |
    + *    +---------------------------------------+-------------
    + *    |                                       | frame of the
    + *    |---------------------------------------| callee of
    + *    | return address (points to deoptStub)  | deopt method
    + *    +---------------------------------------+-------------
    + *    | pointer to DeoptimizedFrame           | frame of the
    + *    |---------------------------------------| deopt method
    + *    | outgoing stack parameters             |
    + *    |---------------------------------------|
    + *    |                                       |
    + *    +---------------------------------------+-------------
    + *    | ...                                   |
      * 
    *

    * From now on, the frame of the deoptimized method is no longer valid and the GC will ignore it. @@ -133,19 +144,59 @@ * {@link DeoptimizedFrame}, which contains all objects which are needed by the deoptimization * targets. *

    - * There is one exception: outgoing parameters. Outgoing parameters of a deoptimized method may - * still be accessed by a called method, even after the first step of deoptimization is done. - * Therefore the calling convention must make sure that there is a free stack slot for the - * {@link DeoptimizedFrame} between the outgoing parameters and the return address slot. + * There is one exception: outgoing primitive parameters of a deoptimized method may still be + * accessed by a called method, even after the first step of eager deoptimization is done. Note that + * this does not apply to outgoing object parameters as those are always copied to registers at the + * beginning of the called method to avoid problems with the GC.

  2. *

    - * Exception from the exception: outgoing object parameters are always copied to registers at the - * beginning of the called method. Therefore we don't have to worry about GC these parameters. - *

    - *

  3. Now when a called method will return to a deoptimized method, the {@link #deoptStub} will be + *
  4. Now when a called method will return to a deoptimized method, the eager deopt stub will be * called instead. It reads the {@link DeoptimizedFrame} handle and replaces the deoptimized - * method's frame with the frame(s) of the deopt target method(s). Note that the deopt stub is + * method's frame with the frame(s) of the deopt target method(s). Note that the eager deopt stub is * completely allocation free.
  5. *
+ * + *

+ * Lazy Deoptimization is also done in two steps: + *

    + *
  1. During the first step, we patch the frame's return address to the return address of a lazy + * deopt stub. Depending on whether the method being deoptimized returns an object or a primitive, + * this return address either points to {@link #lazyDeoptStubObjectReturn} or + * {@link #lazyDeoptStubPrimitiveReturn}. The stack slot that is used to store the + * {@link DeoptimizedFrame} in eager deoptimization is instead used to store the original return + * address, which points somewhere into the deopt source method. + * + *
    + *    top of stack (lowest address)
    + *
    + *    | ...                                       |
    + *    +-------------------------------------------+-------------
    + *    |                                           | frame of the
    + *    |-------------------------------------------| callee of
    + *    | return address (points to lazyDeoptStub)  | deopt method
    + *    +-------------------------------------------+-------------
    + *    | original return address                   | frame of the
    + *    |-------------------------------------------| deopt method
    + *    | outgoing stack parameters                 |
    + *    |-------------------------------------------|
    + *    |                                           |
    + *    +-------------------------------------------+-------------
    + *    | ...                                       |
    + * 
    + * + * Stack walks and GC will now visit this frame that is pending lazy deoptimization as if it was a + * normal stack frame, with the only difference being that the original return address is stored in + * a different slot.
  2. + *
  3. + *

    + * When a method returns to this method pending lazy deoptimization, it instead calls one of the + * lazy deopt stubs, which leads to {@link #lazyDeoptStubCore}. This method performs all the + * necessary operations to construct a {@link DeoptimizedFrame} just like the first step of eager + * deoptimization. The process of constructing the frame is interruptible and involves allocation, + * therefore if {@code gpReturnValue} contains an object reference, it must be turned into an object + * reference so that the GC is aware of said reference. + *

    + * The frame is then copied onto the stack in {@link #rewriteStackStub}.

  4. + *
*/ public final class Deoptimizer { private static final int MAX_DEOPTIMIZATION_EVENT_PRINT_LENGTH = 1000; @@ -215,6 +266,18 @@ public static class Options { @Option(help = "Print verbose logging information for every deoptimization")// public static final RuntimeOptionKey TraceDeoptimizationDetails = new RuntimeOptionKey<>(false); + + /** + * Enables lazy deoptimization. If not enabled, then all calls to the lazy deoptimization + * methods are handled eagerly. + * + * Enabling this option adds 1 byte to the CodeInfo of each infopoint for deopt target + * methods, which is used to keep track of whether each infopoint is a call where an object + * is returned. This determines whether {@link #lazyDeoptStubCore} treats + * {@code gpReturnValue} as an object reference. + */ + @Option(help = "Enables delayed deoptimization of runtime-compiled code. This slightly enlarges code metadata.")// + public static final HostedOptionKey UseLazyDeopt = new HostedOptionKey<>(false); } /** @@ -223,19 +286,52 @@ public static class Options { */ public static boolean testGCinDeoptimizer = false; + /** + * If true, then we call eager deoptimization from within {@link #lazyDeoptStubCore}, which + * triggers a fatal error. This is only set to true for testing. + */ + public static boolean testEagerDeoptInLazyDeoptFatalError = false; + + public static void maybeTestGC() { + if (testGCinDeoptimizer) { + Heap.getHeap().getGC().collect(GCCause.TestGCInDeoptimizer); + } + } + + private static void maybeTestEagerDeoptInLazyDeoptFatalError(Deoptimizer deoptimizer, CodePointer pc) { + if (testEagerDeoptInLazyDeoptFatalError) { + deoptimizer.deoptSourceFrameEagerly(pc, false); + } + } + + /** Returns true if the frame has been eagerly or lazily deoptimized. */ @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) - public static DeoptimizedFrame checkDeoptimized(JavaFrame frame) { + public static boolean checkIsDeoptimized(JavaFrame frame) { + return checkLazyDeoptimized(frame) || checkEagerDeoptimized(frame) != null; + } + + /** + * Returns the DeoptimizedFrame object installed during eager deoptimization, or null if the + * frame was not eagerly deoptimized. + */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static DeoptimizedFrame checkEagerDeoptimized(JavaFrame frame) { if (DeoptimizationSupport.enabled()) { - return checkDeoptimized0(frame.getSP(), frame.getIP()); + return checkEagerDeoptimized0(frame.getSP(), frame.getIP()); } return null; } + /** + * Returns the DeoptimizedFrame object installed during eager deoptimization, or null if the + * frame was not eagerly deoptimized. This method must not be called if the return address is + * stored in a native frame, since we do not control the layout of native frames. + */ @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) - public static DeoptimizedFrame checkDeoptimized(IsolateThread thread, Pointer sp) { + public static DeoptimizedFrame checkEagerDeoptimized(IsolateThread thread, Pointer sp) { if (DeoptimizationSupport.enabled()) { CodePointer ip = FrameAccess.singleton().readReturnAddress(thread, sp); - return checkDeoptimized0(sp, ip); + return checkEagerDeoptimized0(sp, ip); } return null; } @@ -245,10 +341,10 @@ public static DeoptimizedFrame checkDeoptimized(IsolateThread thread, Pointer sp * returns the {@link DeoptimizedFrame} in that case. */ @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) - private static DeoptimizedFrame checkDeoptimized0(Pointer sp, CodePointer ip) { - /* A frame is deoptimized when the return address was patched to the deoptStub. */ - if (ip.equal(DeoptimizationSupport.getDeoptStubPointer())) { - /* The DeoptimizedFrame instance is stored above the return address. */ + private static DeoptimizedFrame checkEagerDeoptimized0(Pointer sp, CodePointer ip) { + /* A frame was eagerly deoptimized if the return address was patched to the deoptStub. */ + if (ip.equal(DeoptimizationSupport.getEagerDeoptStubPointer())) { + /* The DeoptimizedFrame instance is stored above the return address, at sp[0]. */ DeoptimizedFrame result = (DeoptimizedFrame) ReferenceAccess.singleton().readObjectAt(sp, true); if (result == null) { throw checkDeoptimizedError(sp); @@ -258,6 +354,51 @@ private static DeoptimizedFrame checkDeoptimized0(Pointer sp, CodePointer ip) { return null; } + /** + * Checks whether a {@link JavaFrame} is lazily deoptimized. + */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static boolean checkLazyDeoptimized(JavaFrame frame) { + if (DeoptimizationSupport.enabled() && Options.UseLazyDeopt.getValue()) { + return frame.getIsPendingLazyDeopt(); + } + return false; + } + + /** + * Checks whether a frame identified by the stack pointer is lazily deoptimized. This must not + * be called if the return address is stored in a native frame, since we do not control the + * layout of native frames. + */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static boolean checkLazyDeoptimized(IsolateThread thread, Pointer sp) { + if (DeoptimizationSupport.enabled() && Options.UseLazyDeopt.getValue()) { + CodePointer ip = FrameAccess.singleton().readReturnAddress(thread, sp); + return checkLazyDeoptimized0(ip); + } + return false; + } + + /** + * Checks whether a return address is equal to one of the lazy deopt stubs. + */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static boolean checkLazyDeoptimized(CodePointer ip) { + if (DeoptimizationSupport.enabled() && Options.UseLazyDeopt.getValue()) { + return checkLazyDeoptimized0(ip); + } + return false; + } + + /** + * Checks whether a return address is equal to one of the lazy deopt stubs. + */ + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + private static boolean checkLazyDeoptimized0(CodePointer ip) { + assert Options.UseLazyDeopt.getValue(); + return ip.equal(DeoptimizationSupport.getLazyDeoptStubPrimitiveReturnPointer()) || ip.equal(DeoptimizationSupport.getLazyDeoptStubObjectReturnPointer()); + } + @Uninterruptible(reason = "Switch to interruptible code and report a fatal error.", calleeMustBe = false) private static RuntimeException checkDeoptimizedError(Pointer sp) { throw checkDeoptimizedError0(sp); @@ -272,9 +413,10 @@ private static RuntimeException checkDeoptimizedError0(Pointer sp) { @Uninterruptible(reason = "Prevent stack walks from seeing an inconsistent stack.") private void installDeoptimizedFrame(DeoptimizedFrame deoptimizedFrame) { /* - * Replace the return address to the deoptimized method with a pointer to the deoptStub. + * Replace the return address to the deoptimized method with a pointer to the + * eagerDeoptStub. */ - FrameAccess.singleton().writeReturnAddress(deoptState.targetThread, deoptState.sourceSp, DeoptimizationSupport.getDeoptStubPointer()); + FrameAccess.singleton().writeReturnAddress(deoptState.targetThread, deoptState.sourceSp, DeoptimizationSupport.getEagerDeoptStubPointer()); /* * GR-54888: leaveInterpreterStub uses the deoptSlot, thus an existing value should be saved @@ -317,7 +459,6 @@ protected void operate() { @NeverInline("deoptimize must have a separate stack frame") public static void deoptimizeInRange(CodePointer fromIp, CodePointer toIp, boolean deoptAll) { VMOperation.guaranteeInProgressAtSafepoint("Deoptimization requires a safepoint."); - /* Captures "fromIp", "toIp", and "deoptAll" for the VMOperation. */ deoptimizeInRangeOperation(fromIp, toIp, deoptAll); } @@ -341,9 +482,7 @@ private static void deoptimizeInRangeOperation(CodePointer fromIp, CodePointer t StackFrameVisitor deoptVisitor = getStackFrameVisitor((Pointer) fromIp, (Pointer) toIp, deoptAll, vmThread); JavaStackWalker.walkThread(vmThread, deoptVisitor); } - if (testGCinDeoptimizer) { - Heap.getHeap().getGC().collect(GCCause.TestGCInDeoptimizer); - } + maybeTestGC(); } private static StackFrameVisitor getStackFrameVisitor(Pointer fromIp, Pointer toIp, boolean deoptAll, IsolateThread targetThread) { @@ -354,7 +493,7 @@ public boolean visitRegularFrame(Pointer frameSp, CodePointer frameIp, CodeInfo if ((ip.aboveOrEqual(fromIp) && ip.belowThan(toIp)) || deoptAll) { CodeInfoQueryResult queryResult = CodeInfoTable.lookupCodeInfoQueryResult(codeInfo, frameIp); Deoptimizer deoptimizer = new Deoptimizer(frameSp, queryResult, targetThread); - deoptimizer.deoptSourceFrame(frameIp, deoptAll); + deoptimizer.deoptSourceFrameLazily(frameIp, deoptAll); } return true; } @@ -368,26 +507,42 @@ protected boolean visitDeoptimizedFrame(Pointer originalSP, CodePointer deoptStu } /** - * Deoptimizes the given frame. + * Deoptimizes the given frame (lazily or eagerly, depending on the configuration). * * @param ignoreNonDeoptimizable if set to true, a frame that cannot be deoptimized is ignored - * instead of raising an error (use for deoptimzation testing only). + * instead of raising an error (use for deoptimization testing only). */ @NeverInline("Inlining of this method would require that we have deopt targets for callees of this method (SVM internals).") public static void deoptimizeFrame(Pointer sp, boolean ignoreNonDeoptimizable, SpeculationReason speculation) { + boolean deoptEagerly = !Options.UseLazyDeopt.getValue(); + deoptimizeFrame0(sp, ignoreNonDeoptimizable, speculation, deoptEagerly); + } + + /** + * Deoptimizes the given frame eagerly. + * + * @param ignoreNonDeoptimizable if set to true, a frame that cannot be deoptimized is ignored + * instead of raising an error (use for deoptimization testing only). + */ + @NeverInline("Inlining of this method would require that we have deopt targets for callees of this method (SVM internals).") + public static void deoptimizeFrameEagerly(Pointer sp, boolean ignoreNonDeoptimizable, SpeculationReason speculation) { + deoptimizeFrame0(sp, ignoreNonDeoptimizable, speculation, true); + } + + private static void deoptimizeFrame0(Pointer sp, boolean ignoreNonDeoptimizable, SpeculationReason speculation, boolean deoptEagerly) { /* * Note that the thread needs to be read outside of the VMOperation, since the operation can * run in any different thread. */ IsolateThread targetThread = CurrentIsolate.getCurrentThread(); - DeoptimizedFrame deoptFrame = Deoptimizer.checkDeoptimized(targetThread, sp); + DeoptimizedFrame deoptFrame = Deoptimizer.checkEagerDeoptimized(targetThread, sp); if (deoptFrame != null) { /* Already deoptimized, so nothing to do. */ registerSpeculationFailure(deoptFrame.getSourceInstalledCode(), speculation); return; } - DeoptimizeFrameOperation vmOp = new DeoptimizeFrameOperation(sp, ignoreNonDeoptimizable, speculation, targetThread); + DeoptimizeFrameOperation vmOp = new DeoptimizeFrameOperation(sp, ignoreNonDeoptimizable, speculation, targetThread, deoptEagerly); vmOp.enqueue(); } @@ -396,52 +551,69 @@ private static class DeoptimizeFrameOperation extends JavaVMOperation { private final boolean ignoreNonDeoptimizable; private final SpeculationReason speculation; private final IsolateThread targetThread; + private final boolean deoptEagerly; - DeoptimizeFrameOperation(Pointer sourceSp, boolean ignoreNonDeoptimizable, SpeculationReason speculation, IsolateThread targetThread) { + DeoptimizeFrameOperation(Pointer sourceSp, boolean ignoreNonDeoptimizable, SpeculationReason speculation, IsolateThread targetThread, boolean deoptEagerly) { super(VMOperationInfos.get(DeoptimizeFrameOperation.class, "Deoptimize frame", SystemEffect.SAFEPOINT)); this.sourceSp = sourceSp; this.ignoreNonDeoptimizable = ignoreNonDeoptimizable; this.speculation = speculation; this.targetThread = targetThread; + this.deoptEagerly = deoptEagerly; + if (Options.UseLazyDeopt.getValue() && deoptEagerly) { + /* + * If lazy deoptimization is enabled, eager deoptimization is only used for stack + * introspection. We enforce that eager deoptimization cannot be applied to other + * threads, because we do not want an eager deoptimization operation to interrupt + * and interfere with a thread that is undergoing lazy deoptimization. + */ + assert targetThread == CurrentIsolate.getCurrentThread() : "With lazy deoptimization enabled, eager deoptimization cannot be used to deoptimize other threads"; + } } @Override protected void operate() { VMOperation.guaranteeInProgress("doDeoptimizeFrame"); CodePointer ip = FrameAccess.singleton().readReturnAddress(targetThread, sourceSp); - deoptimizeFrame(targetThread, sourceSp, ip, ignoreNonDeoptimizable, speculation); + deoptimizeFrame(targetThread, sourceSp, ip, ignoreNonDeoptimizable, speculation, deoptEagerly); } } @Uninterruptible(reason = "Prevent the GC from freeing the CodeInfo object.") - private static void deoptimizeFrame(IsolateThread targetThread, Pointer sp, CodePointer ip, boolean ignoreNonDeoptimizable, SpeculationReason speculation) { + private static void deoptimizeFrame(IsolateThread targetThread, Pointer sp, CodePointer ip, boolean ignoreNonDeoptimizable, SpeculationReason speculation, boolean deoptEagerly) { UntetheredCodeInfo untetheredInfo = CodeInfoTable.lookupCodeInfo(ip); Object tether = CodeInfoAccess.acquireTether(untetheredInfo); try { CodeInfo info = CodeInfoAccess.convert(untetheredInfo, tether); - deoptimize(targetThread, sp, ip, ignoreNonDeoptimizable, speculation, info); + deoptimize(targetThread, sp, ip, ignoreNonDeoptimizable, speculation, info, deoptEagerly); } finally { CodeInfoAccess.releaseTether(untetheredInfo, tether); } } @Uninterruptible(reason = "Pass the now protected CodeInfo object to interruptible code.", calleeMustBe = false) - private static void deoptimize(IsolateThread targetThread, Pointer sp, CodePointer ip, boolean ignoreNonDeoptimizable, SpeculationReason speculation, CodeInfo info) { - deoptimize0(targetThread, sp, ip, ignoreNonDeoptimizable, speculation, info); + private static void deoptimize(IsolateThread targetThread, Pointer sp, CodePointer ip, boolean ignoreNonDeoptimizable, SpeculationReason speculation, CodeInfo info, boolean deoptEagerly) { + deoptimize0(targetThread, sp, ip, ignoreNonDeoptimizable, speculation, info, deoptEagerly); } - private static void deoptimize0(IsolateThread targetThread, Pointer sp, CodePointer ip, boolean ignoreNonDeoptimizable, SpeculationReason speculation, CodeInfo info) { + private static void deoptimize0(IsolateThread targetThread, Pointer sp, CodePointer ip, boolean ignoreNonDeoptimizable, SpeculationReason speculation, CodeInfo info, boolean deoptEagerly) { CodeInfoQueryResult queryResult = CodeInfoTable.lookupCodeInfoQueryResult(info, ip); Deoptimizer deoptimizer = new Deoptimizer(sp, queryResult, targetThread); - DeoptimizedFrame sourceFrame = deoptimizer.deoptSourceFrame(ip, ignoreNonDeoptimizable); - if (sourceFrame != null) { - registerSpeculationFailure(sourceFrame.getSourceInstalledCode(), speculation); + if (deoptEagerly) { + DeoptimizedFrame sourceFrame = deoptimizer.deoptSourceFrameEagerly(ip, ignoreNonDeoptimizable); + if (sourceFrame != null) { + registerSpeculationFailure(sourceFrame.getSourceInstalledCode(), speculation); + } + } else { + deoptimizer.deoptSourceFrameLazily(ip, ignoreNonDeoptimizable); + SubstrateInstalledCode installedCode = CodeInfoTable.lookupInstalledCode(ip); + registerSpeculationFailure(installedCode, speculation); } } /** * Invalidates the {@link InstalledCode} of the method of the given frame. The method must be a - * runtime compiled method, since there is not {@link InstalledCode} for native image methods. + * runtime compiled method, since there is no {@link InstalledCode} for native image methods. */ public static void invalidateMethodOfFrame(IsolateThread thread, Pointer sp, SpeculationReason speculation) { CodePointer ip = FrameAccess.singleton().readReturnAddress(thread, sp); @@ -455,19 +627,20 @@ public static void invalidateMethodOfFrame(IsolateThread thread, Pointer sp, Spe * installedCode multiple times in case of a race is not a problem because the actual * invalidation is in a VMOperation. */ - DeoptimizedFrame deoptimizedFrame = checkDeoptimized(thread, sp); + DeoptimizedFrame deoptimizedFrame = checkEagerDeoptimized(thread, sp); if (deoptimizedFrame != null) { installedCode = deoptimizedFrame.getSourceInstalledCode(); - if (installedCode == null) { - /* When the method was invalidated before, all the metadata can be gone by now. */ + } + + if (installedCode == null) { + boolean alreadyDeoptimized = deoptimizedFrame != null || checkLazyDeoptimized(thread, sp); + if (alreadyDeoptimized) { + /* All the metadata might already be gone. */ return; } - } else { - if (installedCode == null) { - throw VMError.shouldNotReachHere( - "Only runtime compiled methods can be invalidated. sp = " + Long.toHexString(sp.rawValue()) + ", returnAddress = " + Long.toHexString(ip.rawValue())); - } + throw VMError.shouldNotReachHere("Only runtime compiled methods can be invalidated. sp = " + Long.toHexString(sp.rawValue()) + ", returnAddress = " + Long.toHexString(ip.rawValue())); } + registerSpeculationFailure(installedCode, speculation); VMOperation.guaranteeNotInProgress("invalidateMethodOfFrame: running user code that can block"); installedCode.invalidate(); @@ -523,7 +696,15 @@ public enum StubType { /** * Custom prologue: save all of the architecture's return registers onto the stack. */ - EntryStub, + EagerEntryStub, + + /** + * Custom prologue: same custom Prologue as the EagerEntryStub, but we also reserve some + * additional memory on the stack when this stub is entered, because the lazyDeoptStub might + * need to access callee-saved values in the frame of the callee of the method to be + * deoptimized. + */ + LazyEntryStub, /** * Custom prologue: set the stack pointer to the first method parameter. @@ -557,6 +738,142 @@ public boolean isInterpreterStub() { StubType stubType(); } + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + private static boolean isNonNullValue(UnsignedWord pointer) { + if (SubstrateOptions.SpawnIsolates.getValue()) { + /* + * KnownIntrinsics.heapBase() can represent null objects, but we cannot convert the heap + * base value to an object. + */ + return pointer != Word.nullPointer() && pointer != KnownIntrinsics.heapBase(); + } + /* + * With spawn isolates option disabled (which is a legacy mode), the heap base may represent + * a valid heap object. + */ + return pointer != Word.nullPointer(); + } + + @DeoptStub(stubType = StubType.LazyEntryStub) + @Uninterruptible(reason = "gpReturnValue may hold unmanaged reference") + public static UnsignedWord lazyDeoptStubObjectReturn(Pointer framePointer, UnsignedWord gpReturnValue, UnsignedWord fpReturnValue) { + assert PointerUtils.isAMultiple(KnownIntrinsics.readStackPointer(), Word.unsigned(ConfigurationValues.getTarget().stackAlignment)); + assert Options.UseLazyDeopt.getValue(); + assert VMThreads.StatusSupport.isStatusJava() : "Deopt stub execution must not be visible to other threads."; + + boolean hasException = ExceptionUnwind.getLazyDeoptStubShouldReturnToExceptionHandler(); + if (hasException) { + ExceptionUnwind.setLazyDeoptStubShouldReturnToExceptionHandler(false); + } + Object gpReturnValueObject = null; + if (isNonNullValue(gpReturnValue)) { + gpReturnValueObject = ((Pointer) gpReturnValue).toObject(); + } + + return lazyDeoptStubCore(framePointer, gpReturnValue, fpReturnValue, hasException, gpReturnValueObject); + } + + @DeoptStub(stubType = StubType.LazyEntryStub) + @Uninterruptible(reason = "gpReturnValue may hold unmanaged reference") + public static UnsignedWord lazyDeoptStubPrimitiveReturn(Pointer framePointer, UnsignedWord gpReturnValue, UnsignedWord fpReturnValue) { + /* + * If we need to return to the exception handler, then we should always go to + * lazyDeoptStubObjectReturn, since returning to an exception handler involves returning an + * Exception Object. + */ + assert PointerUtils.isAMultiple(KnownIntrinsics.readStackPointer(), Word.unsigned(ConfigurationValues.getTarget().stackAlignment)); + assert Options.UseLazyDeopt.getValue(); + assert VMThreads.StatusSupport.isStatusJava() : "Deopt stub execution must not be visible to other threads."; + assert !ExceptionUnwind.getLazyDeoptStubShouldReturnToExceptionHandler(); + + return lazyDeoptStubCore(framePointer, gpReturnValue, fpReturnValue, false, null); + } + + /** + * The handler for lazy deoptimization. + * + * Despite being marked Uninterruptible, this contains interruptible sections when we look up + * the codeinfo, and construct the {@link DeoptimizedFrame}. + */ + @Uninterruptible(reason = "frame will hold objects in unmanaged storage") + private static UnsignedWord lazyDeoptStubCore(Pointer framePointer, UnsignedWord gpReturnValue, UnsignedWord fpReturnValue, boolean hasException, Object gpReturnValueObject) { + DeoptimizedFrame deoptFrame; + Pointer newSp; + + StackOverflowCheck.singleton().makeYellowZoneAvailable(); + try { + /* The original return address is at offset 0 from the stack pointer */ + CodePointer originalReturnAddress = framePointer.readWord(0); + assert originalReturnAddress.isNonNull(); + + /* Clear the deoptimization slot. */ + framePointer.writeWord(0, Word.nullPointer()); + + /* + * Write the old return address to the return address slot, so that stack walks see a + * consistent stack. + */ + FrameAccess.singleton().writeReturnAddress(CurrentIsolate.getCurrentThread(), framePointer, originalReturnAddress); + + UntetheredCodeInfo untetheredInfo = CodeInfoTable.lookupCodeInfo(originalReturnAddress); + Object tether = CodeInfoAccess.acquireTether(untetheredInfo); + try { + CodeInfo info = CodeInfoAccess.convert(untetheredInfo, tether); + deoptFrame = constructLazilyDeoptimizedFrameInterruptibly(framePointer, info, originalReturnAddress, hasException); + } finally { + CodeInfoAccess.releaseTether(untetheredInfo, tether); + } + + DeoptimizationCounters.counters().deoptCount.inc(); + assert deoptFrame != null : "was not able to lazily construct a deoptimized frame"; + + newSp = computeNewFramePointer(framePointer, deoptFrame); + + /* Build the content of the deopt target stack frames. */ + deoptFrame.buildContent(newSp); + + /* + * We fail fatally if eager deoptimization is invoked when the lazy deopt stub is + * executing, because eager deoptimization should only be invoked through stack + * introspection, which can only be called from the current thread. Thus, there is no + * use case for eager deoptimization to happen if the current thread is executing the + * lazy deopt stub. + */ + VMError.guarantee(framePointer.readWord(0) == Word.nullPointer(), "Eager deoptimization should not occur when lazy deoptimization is in progress"); + + recentDeoptimizationEvents.append(deoptFrame.getCompletedMessage()); + } finally { + StackOverflowCheck.singleton().protectYellowZone(); + } + // From this point on, only uninterruptible code may be executed. + UnsignedWord updatedGpReturnValue = gpReturnValue; + if (gpReturnValueObject != null) { + updatedGpReturnValue = Word.objectToUntrackedPointer(gpReturnValueObject); + } + + /* Do the stack rewriting. Return directly to the deopt target. */ + return rewriteStackStub(newSp, updatedGpReturnValue, fpReturnValue, deoptFrame); + } + + @Uninterruptible(reason = "Wrapper to call interruptible methods", calleeMustBe = false) + private static DeoptimizedFrame constructLazilyDeoptimizedFrameInterruptibly(Pointer sourceSp, CodeInfo info, CodePointer ip, boolean hasException) { + return constructLazilyDeoptimizedFrameInterruptibly0(sourceSp, info, ip, hasException); + } + + private static DeoptimizedFrame constructLazilyDeoptimizedFrameInterruptibly0(Pointer sourceSp, CodeInfo info, CodePointer ip, boolean hasException) { + maybeTestGC(); + CodeInfoQueryResult sourceChunk = CodeInfoTable.lookupCodeInfoQueryResult(info, ip); + maybeTestGC(); + Deoptimizer deoptimizer = new Deoptimizer(sourceSp, sourceChunk, CurrentIsolate.getCurrentThread()); + maybeTestEagerDeoptInLazyDeoptFatalError(deoptimizer, ip); + DeoptimizedFrame deoptFrame = deoptimizer.doDeoptSourceFrame(ip, true, false); + if (hasException) { + deoptFrame.takeException(); + } + maybeTestGC(); + return deoptFrame; + } + /** * Performs the second step of deoptimization: the actual rewriting of a deoptimized method's * frame. @@ -565,7 +882,7 @@ public boolean isInterpreterStub() { * {@link #deoptimizeInRange}. Therefore the stub is "called" when a method wants to return to a * deoptimized method. *

- * When {@link #deoptStub} is "called", the stack looks like this: + * When {@link #eagerDeoptStub} is "called", the stack looks like this: * *

      *    :                                :
@@ -576,7 +893,7 @@ public boolean isInterpreterStub() {
      *    +--------------------------------+--------- no return address between the frames!
      *    |                                |
      *    |                                |   frame of
-     *    |                                |   {@link #deoptStub}
+     *    |                                |   {@link #eagerDeoptStub}
      *    :     ...                        :
      * 
* @@ -593,9 +910,9 @@ public boolean isInterpreterStub() { * when the deopt stub was reached. It must be restored to the register before * completion of the stub. */ - @DeoptStub(stubType = StubType.EntryStub) + @DeoptStub(stubType = StubType.EagerEntryStub) @Uninterruptible(reason = "Frame holds Objects in unmanaged storage.") - public static UnsignedWord deoptStub(Pointer framePointer, UnsignedWord gpReturnValue, UnsignedWord fpReturnValue) { + public static UnsignedWord eagerDeoptStub(Pointer framePointer, UnsignedWord gpReturnValue, UnsignedWord fpReturnValue) { assert PointerUtils.isAMultiple(KnownIntrinsics.readStackPointer(), Word.unsigned(ConfigurationValues.getTarget().stackAlignment)); VMError.guarantee(VMThreads.StatusSupport.isStatusJava(), "Deopt stub execution must not be visible to other threads."); DeoptimizedFrame frame = (DeoptimizedFrame) ReferenceAccess.singleton().readObjectAt(framePointer, true); @@ -605,12 +922,7 @@ public static UnsignedWord deoptStub(Pointer framePointer, UnsignedWord gpReturn DeoptimizationCounters.startTime.set(System.nanoTime()); } - /* Computation of the new stack pointer: we start with the stack pointer of this frame. */ - final Pointer newSp = framePointer - /* Remove the size of the frame that gets deoptimized. */ - .add(Word.unsigned(frame.getSourceTotalFrameSize())) - /* Add the size of the deoptimization target frames. */ - .subtract(frame.getTargetContent().getSize()); + final Pointer newSp = computeNewFramePointer(framePointer, frame); /* Build the content of the deopt target stack frames. */ frame.buildContent(newSp); @@ -619,7 +931,7 @@ public static UnsignedWord deoptStub(Pointer framePointer, UnsignedWord gpReturn * The frame was pinned to keep it from moving during construction. I can unpin it now that * I am uninterruptible. (And I have to unpin it.) */ - frame.getPin().close(); + frame.unpin(); recentDeoptimizationEvents.append(frame.getCompletedMessage()); @@ -627,6 +939,16 @@ public static UnsignedWord deoptStub(Pointer framePointer, UnsignedWord gpReturn return rewriteStackStub(newSp, gpReturnValue, fpReturnValue, frame); } + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + private static Pointer computeNewFramePointer(Pointer originalFramePointer, DeoptimizedFrame deoptimizedFrame) { + /* Computation of the new stack pointer: we start with the stack pointer of this frame. */ + return originalFramePointer + /* Remove the size of the frame that gets deoptimized. */ + .add(Word.unsigned(deoptimizedFrame.getSourceTotalFrameSize())) + /* Add the size of the deoptimization target frames. */ + .subtract(deoptimizedFrame.getTargetContent().getSize()); + } + /** * Performs the actual stack rewriting. The custom prologue of this method sets the stack * pointer to the new value passed in as the first parameter. @@ -669,35 +991,109 @@ static int savedBasePointerSize() { } /** - * Deoptimizes a source frame. + * Deoptimizes a source frame lazily, or reverts to eager deoptimization if lazy deoptimization + * is not enabled. * * @param pc A code address inside the source method (= the method to deoptimize) */ - public DeoptimizedFrame deoptSourceFrame(CodePointer pc, boolean ignoreNonDeoptimizable) { - final DeoptSourceFrameOperation operation = new DeoptSourceFrameOperation(this, pc, ignoreNonDeoptimizable); + public void deoptSourceFrameLazily(CodePointer pc, boolean ignoreNonDeoptimizable) { + assert VMOperation.isInProgress(); + if (!Options.UseLazyDeopt.getValue()) { + deoptSourceFrameEagerly(pc, ignoreNonDeoptimizable); + return; + } + if (checkLazyDeoptimized(deoptState.targetThread, deoptState.sourceSp)) { + // already lazily deoptimized, nothing to do + return; + } else if (checkEagerDeoptimized(deoptState.targetThread, deoptState.sourceSp) != null) { + // if already eagerly deoptimized, don't lazily deoptimize. + return; + } + + if (!canBeDeoptimized(sourceChunk.getFrameInfo())) { + if (ignoreNonDeoptimizable) { + return; + } else { + throw fatalDeoptimizationError("Deoptimization: cannot lazily deoptimize a method that has no deoptimization entry point", sourceChunk.getFrameInfo(), sourceChunk.getFrameInfo()); + } + } + + FrameInfoQueryResult frameInfo = sourceChunk.getFrameInfo(); + CodeInfoQueryResult targetInfo = CodeInfoTable.lookupDeoptimizationEntrypoint(frameInfo.getDeoptMethodImageCodeInfo(), frameInfo.getDeoptMethodOffset(), frameInfo.getEncodedBci()); + installLazyDeoptStubReturnAddress(targetInfo.getDeoptReturnValueIsObject(), deoptState.sourceSp, deoptState.targetThread); + } + + /** + * Deoptimizes a source frame eagerly. + */ + public DeoptimizedFrame deoptSourceFrameEagerly(CodePointer pc, boolean ignoreNonDeoptimizable) { + if (!canBeDeoptimized(sourceChunk.getFrameInfo())) { + if (ignoreNonDeoptimizable) { + return null; + } else { + throw fatalDeoptimizationError("Deoptimization: cannot lazily deoptimize a method that has no deoptimization entry point", sourceChunk.getFrameInfo(), sourceChunk.getFrameInfo()); + } + } + + final EagerDeoptSourceFrameOperation operation = new EagerDeoptSourceFrameOperation(this, pc, ignoreNonDeoptimizable); operation.enqueue(); return operation.getResult(); } - /** A VMOperation to encapsulate deoptSourceFrame. */ - private static final class DeoptSourceFrameOperation extends JavaVMOperation { + @Uninterruptible(reason = "Prevent stack walks from seeing an inconsistent stack.") + private static void installLazyDeoptStubReturnAddress(boolean returnValueIsObject, Pointer sourceSp, IsolateThread targetThread) { + assert Options.UseLazyDeopt.getValue(); + assert VMOperation.isInProgress(); + CodePointer oldReturnAddress = FrameAccess.singleton().readReturnAddress(targetThread, sourceSp); + + // Replace the return address to the deoptimized method with a pointer to the lazyDeoptStub. + CodePointer stubAddress = returnValueIsObject ? DeoptimizationSupport.getLazyDeoptStubObjectReturnPointer() : DeoptimizationSupport.getLazyDeoptStubPrimitiveReturnPointer(); + FrameAccess.singleton().writeReturnAddress(targetThread, sourceSp, stubAddress); + /* + * Write the original return address into the slot where the Deoptimized Frame would go in + * the case of eager deoptimization. + */ + sourceSp.writeWord(0, oldReturnAddress); + } + + @Uninterruptible(reason = "Prevent stack walks from seeing an inconsistent stack.") + private static void uninstallLazyDeoptStubReturnAddress(Pointer sourceSp, IsolateThread thread) { + assert Options.UseLazyDeopt.getValue(); + assert VMOperation.isInProgress(); + CodePointer oldReturnAddress = sourceSp.readWord(0); + assert oldReturnAddress.isNonNull(); + // Clear the old return address from the deopt slot + sourceSp.writeWord(0, Word.nullPointer()); + // Restore the old return address on the stack + FrameAccess.singleton().writeReturnAddress(thread, sourceSp, oldReturnAddress); + } + + /** + * A VMOperation to deoptimize a frame eagerly. This involves patching the return address to + * {@link #eagerDeoptStub} and also installing a heap-allocated {@link DeoptimizedFrame} in a + * reserved stack slot. + */ + private static final class EagerDeoptSourceFrameOperation extends JavaVMOperation { private final Deoptimizer receiver; private final CodePointer pc; private final boolean ignoreNonDeoptimizable; private DeoptimizedFrame result; - DeoptSourceFrameOperation(Deoptimizer receiver, CodePointer pc, boolean ignoreNonDeoptimizable) { - super(VMOperationInfos.get(DeoptSourceFrameOperation.class, "Deoptimize source frame", SystemEffect.SAFEPOINT)); + EagerDeoptSourceFrameOperation(Deoptimizer receiver, CodePointer pc, boolean ignoreNonDeoptimizable) { + super(VMOperationInfos.get(EagerDeoptSourceFrameOperation.class, "Eagerly deoptimize source frame", SystemEffect.SAFEPOINT)); this.receiver = receiver; this.pc = pc; this.ignoreNonDeoptimizable = ignoreNonDeoptimizable; this.result = null; + if (Options.UseLazyDeopt.getValue()) { + assert receiver.deoptState.targetThread == CurrentIsolate.getCurrentThread() : "With lazy deoptimization enabled, eager deoptimization cannot be used to deoptimize other threads"; + } } @Override public void operate() { - result = receiver.deoptSourceFrameOperation(pc, ignoreNonDeoptimizable); + result = receiver.doDeoptSourceFrame(pc, ignoreNonDeoptimizable, true); } public DeoptimizedFrame getResult() { @@ -705,15 +1101,38 @@ public DeoptimizedFrame getResult() { } } - private DeoptimizedFrame deoptSourceFrameOperation(CodePointer pc, boolean ignoreNonDeoptimizable) { - VMOperation.guaranteeInProgress("deoptSourceFrame"); + /** + * Checks if a frame has a deoptimization target. + */ + private static boolean canBeDeoptimized(FrameInfoQueryResult frame) { + if (frame == null) { + return false; + } + FrameInfoQueryResult currFrame = frame; + while (currFrame != null) { + if (currFrame.getDeoptMethodOffset() == 0) { + return false; + } + currFrame = currFrame.getCaller(); + } + return true; + } + + private DeoptimizedFrame doDeoptSourceFrame(CodePointer pc, boolean ignoreNonDeoptimizable, boolean isEagerDeopt) { + assert !Options.UseLazyDeopt.getValue() || deoptState.targetThread == CurrentIsolate.getCurrentThread() : "with lazy deopt enabled, this method may only be called for the current thread"; + assert !isEagerDeopt || VMOperation.isInProgressAtSafepoint() : "eager deopts may only happen at a safepoint"; - DeoptimizedFrame existing = checkDeoptimized(deoptState.targetThread, deoptState.sourceSp); + DeoptimizedFrame existing = checkEagerDeoptimized(deoptState.targetThread, deoptState.sourceSp); if (existing != null) { /* Already deoptimized, so nothing to do. */ return existing; } + if (isEagerDeopt && checkLazyDeoptimized(deoptState.targetThread, deoptState.sourceSp)) { + // already pending lazy deoptimization. Fix return address, then deopt eagerly below. + uninstallLazyDeoptStubReturnAddress(deoptState.sourceSp, deoptState.targetThread); + } + final FrameInfoQueryResult frameInfo = sourceChunk.getFrameInfo(); if (frameInfo == null) { if (ignoreNonDeoptimizable) { @@ -779,9 +1198,11 @@ private DeoptimizedFrame deoptSourceFrameOperation(CodePointer pc, boolean ignor boolean rethrowException = FrameInfoDecoder.decodeRethrowException(frameInfo.getEncodedBci()); /* Allocate a buffer to hold the contents of the new target frame. */ DeoptimizedFrame deoptimizedFrame = DeoptimizedFrame.factory(targetContentSize, sourceChunk.getEncodedFrameSize(), CodeInfoTable.lookupInstalledCode(pc), topFrame, relockObjectData, pc, - rethrowException); + rethrowException, isEagerDeopt); - installDeoptimizedFrame(deoptimizedFrame); + if (isEagerDeopt) { + installDeoptimizedFrame(deoptimizedFrame); + } if (Options.TraceDeoptimization.getValue()) { printDeoptimizedFrame(Log.log(), deoptState.sourceSp, deoptimizedFrame, frameInfo, false); @@ -793,7 +1214,7 @@ private DeoptimizedFrame deoptSourceFrameOperation(CodePointer pc, boolean ignor private static void logDeoptSourceFrameOperation(Pointer sp, DeoptimizedFrame deoptimizedFrame, FrameInfoQueryResult frameInfo) { StringBuilderLog log = new StringBuilderLog(); - PointerBase deoptimizedFrameAddress = deoptimizedFrame.getPin().addressOfObject(); + Pointer deoptimizedFrameAddress = Word.objectToUntrackedPointer(deoptimizedFrame); log.string("deoptSourceFrameOperation: DeoptimizedFrame at ").zhex(deoptimizedFrameAddress).string(": "); printDeoptimizedFrame(log, sp, deoptimizedFrame, frameInfo, true); recentDeoptimizationEvents.append(log.getResult().toCharArray()); diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/SubstrateInstalledCode.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/SubstrateInstalledCode.java index 1af19b2b1f67..8e0f12d4291f 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/SubstrateInstalledCode.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/SubstrateInstalledCode.java @@ -102,7 +102,7 @@ public interface SubstrateInstalledCode { * {@link #isValid()} returns {@code false}, {@link #isAlive()} returns {@code true}, and * {@link #getEntryPoint()} returns 0. */ - void invalidateWithoutDeoptimization(); + void makeNonEntrant(); SubstrateSpeculationLog getSpeculationLog(); diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/RuntimeCodeCacheCleaner.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/RuntimeCodeCacheCleaner.java index d81daf130498..246cb1b7e27d 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/RuntimeCodeCacheCleaner.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/RuntimeCodeCacheCleaner.java @@ -67,12 +67,12 @@ public boolean visitCode(CodeInfo codeInfo) { } int state = CodeInfoAccess.getState(codeInfo); - if (state == CodeInfo.STATE_UNREACHABLE) { + if (state == CodeInfo.STATE_PENDING_FREE) { freeMemory(codeInfo); - } else if (state == CodeInfo.STATE_READY_FOR_INVALIDATION) { + } else if (state == CodeInfo.STATE_PENDING_REMOVAL_FROM_CODE_CACHE) { // All objects that are accessed during invalidation must still be reachable. CodeInfoTable.invalidateNonStackCodeAtSafepoint(codeInfo); - assert CodeInfoAccess.getState(codeInfo) == CodeInfo.STATE_INVALIDATED; + assert CodeInfoAccess.getState(codeInfo) == CodeInfo.STATE_REMOVED_FROM_CODE_CACHE; freeMemory(codeInfo); } return true; diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/StoredContinuationAccess.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/StoredContinuationAccess.java index 701b6fb03e5c..16112c0ee9cb 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/StoredContinuationAccess.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/heap/StoredContinuationAccess.java @@ -181,7 +181,7 @@ public static boolean walkReferences(StoredContinuation s, ObjectReferenceVisito JavaFrame frame = JavaStackWalker.getCurrentFrame(walk); VMError.guarantee(!JavaFrames.isEntryPoint(frame), "Entry point frames are not supported"); VMError.guarantee(!JavaFrames.isUnknownFrame(frame), "Stack walk must not encounter unknown frame"); - VMError.guarantee(Deoptimizer.checkDeoptimized(frame) == null, "Deoptimized frames are not supported"); + VMError.guarantee(!Deoptimizer.checkIsDeoptimized(frame), "Deoptimized frames are not supported"); UntetheredCodeInfo untetheredCodeInfo = frame.getIPCodeInfo(); Object tether = CodeInfoAccess.acquireTether(untetheredCodeInfo); @@ -208,7 +208,7 @@ public static void walkFrames(StoredContinuation s, ContinuationStackFrameVisito JavaFrame frame = JavaStackWalker.getCurrentFrame(walk); VMError.guarantee(!JavaFrames.isEntryPoint(frame), "Entry point frames are not supported"); VMError.guarantee(!JavaFrames.isUnknownFrame(frame), "Stack walk must not encounter unknown frame"); - VMError.guarantee(Deoptimizer.checkDeoptimized(frame) == null, "Deoptimized frames are not supported"); + VMError.guarantee(!Deoptimizer.checkIsDeoptimized(frame), "Deoptimized frames are not supported"); UntetheredCodeInfo untetheredCodeInfo = frame.getIPCodeInfo(); Object tether = CodeInfoAccess.acquireTether(untetheredCodeInfo); diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jdk/Target_java_lang_StackWalker.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jdk/Target_java_lang_StackWalker.java index 2e87c44fb35b..bde46ce39eea 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jdk/Target_java_lang_StackWalker.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jdk/Target_java_lang_StackWalker.java @@ -316,7 +316,7 @@ private boolean advancePhysically0() { JavaFrame frame = JavaStackWalker.getCurrentFrame(walk); VMError.guarantee(!JavaFrames.isEntryPoint(frame), "Entry point frames are not supported"); VMError.guarantee(!JavaFrames.isUnknownFrame(frame), "Stack walk must not encounter unknown frame"); - VMError.guarantee(Deoptimizer.checkDeoptimized(frame) == null, "Deoptimized frames are not supported"); + VMError.guarantee(!Deoptimizer.checkIsDeoptimized(frame), "Deoptimized frames are not supported"); UntetheredCodeInfo untetheredInfo = frame.getIPCodeInfo(); VMError.guarantee(UntetheredCodeInfoAccess.isAOTImageCode(untetheredInfo)); @@ -393,7 +393,7 @@ protected boolean advancePhysically() { JavaFrame frame = JavaStackWalker.getCurrentFrame(walk); VMError.guarantee(!JavaFrames.isUnknownFrame(frame), "Stack walk must not encounter unknown frame"); - DeoptimizedFrame deoptimizedFrame = Deoptimizer.checkDeoptimized(frame); + DeoptimizedFrame deoptimizedFrame = Deoptimizer.checkEagerDeoptimized(frame); if (deoptimizedFrame != null) { this.deoptimizedVFrame = deoptimizedFrame.getTopFrame(); } else { diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jfr/JfrStackWalker.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jfr/JfrStackWalker.java index ab264cbfd782..f3c4cbd467e6 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jfr/JfrStackWalker.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jfr/JfrStackWalker.java @@ -249,7 +249,7 @@ private static int walkCurrentThread0(SamplerSampleWriterData data, Pointer star while (JavaStackWalker.advance(walk, thread)) { JavaFrame frame = JavaStackWalker.getCurrentFrame(walk); - VMError.guarantee(Deoptimizer.checkDeoptimized(frame) == null, "JIT compilation is not supported"); + VMError.guarantee(!Deoptimizer.checkIsDeoptimized(frame), "JIT compilation is not supported"); if (JavaFrames.isUnknownFrame(frame) || isAsync && !hasValidCaller(walk, frame)) { /* Most likely, the stack walk already started with a wrong SP or IP. */ diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jvmti/JvmtiFunctions.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jvmti/JvmtiFunctions.java index 6e0fd4c70286..d1780b532070 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jvmti/JvmtiFunctions.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jvmti/JvmtiFunctions.java @@ -68,7 +68,7 @@ import com.oracle.svm.core.jni.headers.JNINativeInterface; import com.oracle.svm.core.jni.headers.JNINativeInterfacePointer; import com.oracle.svm.core.jni.headers.JNIObjectHandle; -import com.oracle.svm.core.jvmti.headers.BooleanPointer; +import com.oracle.svm.core.c.BooleanPointer; import com.oracle.svm.core.jvmti.headers.JClass; import com.oracle.svm.core.jvmti.headers.JClassPointer; import com.oracle.svm.core.jvmti.headers.JClassPointerPointer; diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/snippets/ExceptionUnwind.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/snippets/ExceptionUnwind.java index af76191567d2..b6565be5a656 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/snippets/ExceptionUnwind.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/snippets/ExceptionUnwind.java @@ -24,14 +24,18 @@ */ package com.oracle.svm.core.snippets; +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; import static jdk.graal.compiler.core.common.spi.ForeignCallDescriptor.CallSideEffect.NO_SIDE_EFFECT; +import com.oracle.svm.core.threadlocal.FastThreadLocalBytes; import jdk.graal.compiler.word.Word; import org.graalvm.nativeimage.CurrentIsolate; import org.graalvm.nativeimage.ImageSingletons; import org.graalvm.nativeimage.IsolateThread; import org.graalvm.nativeimage.StackValue; import org.graalvm.nativeimage.c.function.CodePointer; +import com.oracle.svm.core.c.BooleanPointer; +import org.graalvm.nativeimage.c.struct.SizeOf; import org.graalvm.nativeimage.hosted.Feature; import org.graalvm.word.LocationIdentity; import org.graalvm.word.Pointer; @@ -70,8 +74,20 @@ public abstract class ExceptionUnwind { }; public static final FastThreadLocalObject currentException = FastThreadLocalFactory.createObject(Throwable.class, "ExceptionUnwind.currentException"); + public static final FastThreadLocalBytes lazyDeoptStubShouldReturnToExceptionHandler = FastThreadLocalFactory.createBytes(() -> SizeOf.get(BooleanPointer.class), + "ExceptionUnwind.lazyDeoptStubShouldReturnToExceptionHandler"); - @Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true) + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static void setLazyDeoptStubShouldReturnToExceptionHandler(boolean val) { + lazyDeoptStubShouldReturnToExceptionHandler.getAddress().write(val); + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) + public static boolean getLazyDeoptStubShouldReturnToExceptionHandler() { + return lazyDeoptStubShouldReturnToExceptionHandler.getAddress().read(); + } + + @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) static boolean exceptionsAreFatal() { /* * If an exception is thrown while the thread is not in the Java state, most likely @@ -196,13 +212,26 @@ private static void defaultUnwindException(Pointer startSP, boolean fromMethodWi Pointer sp = frame.getSP(); if (DeoptimizationSupport.enabled()) { - DeoptimizedFrame deoptFrame = Deoptimizer.checkDeoptimized(frame); + DeoptimizedFrame deoptFrame = Deoptimizer.checkEagerDeoptimized(frame); if (deoptFrame != null) { /* Deoptimization entry points always have an exception handler. */ deoptTakeExceptionInterruptible(deoptFrame); - jumpToHandler(sp, DeoptimizationSupport.getDeoptStubPointer(), hasCalleeSavedRegisters); + jumpToHandler(sp, DeoptimizationSupport.getEagerDeoptStubPointer(), hasCalleeSavedRegisters); UnreachableNode.unreachable(); return; /* Unreachable */ + } else if (Deoptimizer.checkLazyDeoptimized(frame)) { + long exceptionOffset = frame.getExceptionOffset(); + if (exceptionOffset != CodeInfoQueryResult.NO_EXCEPTION_OFFSET) { + setLazyDeoptStubShouldReturnToExceptionHandler(true); + /* + * When handling exceptions, we always jump to the "object return" lazy + * deopt stub, because the Exception object is always passed as the return + * value. + */ + jumpToHandler(sp, DeoptimizationSupport.getLazyDeoptStubObjectReturnPointer(), hasCalleeSavedRegisters); + UnreachableNode.unreachable(); + return; /* Unreachable */ + } } } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaFrame.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaFrame.java index 530009eaa7e7..c5da2033636f 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaFrame.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaFrame.java @@ -52,6 +52,12 @@ public interface JavaFrame extends SimpleCodeInfoQueryResult { @RawField void setIP(CodePointer ip); + @RawField + boolean getIsPendingLazyDeopt(); + + @RawField + void setIsPendingLazyDeopt(boolean isPendingLazyDeopt); + @RawField UntetheredCodeInfo getIPCodeInfo(); diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaFrames.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaFrames.java index 5de17c85306c..9c3405477f90 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaFrames.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaFrames.java @@ -46,7 +46,7 @@ public class JavaFrames { @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public static boolean isUnknownFrame(JavaFrame frame) { - return frame.getIPCodeInfo().isNull() && Deoptimizer.checkDeoptimized(frame) == null; + return frame.getIPCodeInfo().isNull() && Deoptimizer.checkEagerDeoptimized(frame) == null; } @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) @@ -80,6 +80,7 @@ static void clearData(JavaFrame frame) { frame.setSP(Word.nullPointer()); frame.setIP(Word.nullPointer()); frame.setIPCodeInfo(Word.nullPointer()); + frame.setIsPendingLazyDeopt(false); frame.setEncodedFrameSize(CodeInfoDecoder.INVALID_SIZE_ENCODING); frame.setExceptionOffset(CodeInfoQueryResult.NO_EXCEPTION_OFFSET); @@ -90,15 +91,28 @@ static void clearData(JavaFrame frame) { public static void setData(JavaFrame frame, Pointer sp, CodePointer ip) { frame.setSP(sp); frame.setIP(ip); + frame.setIsPendingLazyDeopt(false); - DeoptimizedFrame deoptimizedFrame = Deoptimizer.checkDeoptimized(frame); + DeoptimizedFrame deoptimizedFrame = Deoptimizer.checkEagerDeoptimized(frame); if (deoptimizedFrame != null) { frame.setIPCodeInfo(Word.nullPointer()); frame.setEncodedFrameSize(deoptimizedFrame.getSourceEncodedFrameSize()); frame.setExceptionOffset(CodeInfoQueryResult.NO_EXCEPTION_OFFSET); frame.setReferenceMapIndex(ReferenceMapIndex.NO_REFERENCE_MAP); } else { - UntetheredCodeInfo untetheredCodeInfo = CodeInfoTable.lookupCodeInfo(ip); + CodePointer returnAddress = ip; + if (Deoptimizer.checkLazyDeoptimized(ip)) { + /* + * For lazily deoptimized frames, the return address is stored in a reserved slot. + * See Deoptimizer.java for details. + */ + frame.setIsPendingLazyDeopt(true); + returnAddress = sp.readWord(0); + assert returnAddress.isNonNull(); + frame.setIP(returnAddress); + } + + UntetheredCodeInfo untetheredCodeInfo = CodeInfoTable.lookupCodeInfo(returnAddress); frame.setIPCodeInfo(untetheredCodeInfo); if (untetheredCodeInfo.isNull()) { @@ -111,7 +125,7 @@ public static void setData(JavaFrame frame, Pointer sp, CodePointer ip) { Object tether = CodeInfoAccess.acquireTether(untetheredCodeInfo); try { CodeInfo info = CodeInfoAccess.convert(untetheredCodeInfo, tether); - CodeInfoAccess.lookupCodeInfo(info, ip, frame); + CodeInfoAccess.lookupCodeInfo(info, returnAddress, frame); } finally { CodeInfoAccess.releaseTether(untetheredCodeInfo, tether); } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaStackWalker.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaStackWalker.java index 807b1679bf04..d94441591530 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaStackWalker.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/JavaStackWalker.java @@ -472,12 +472,13 @@ static boolean doWalk(JavaStackWalk walk, IsolateThread thread, ParameterizedSta return visitUnknownFrame(sp, ip, visitor, data); } - DeoptimizedFrame deoptimizedFrame = Deoptimizer.checkDeoptimized(frame); + DeoptimizedFrame deoptimizedFrame = Deoptimizer.checkEagerDeoptimized(frame); if (deoptimizedFrame != null) { if (!vistDeoptimizedFrame(sp, ip, deoptimizedFrame, visitor, data)) { return false; } } else { + // Note that this code also visits frames pending lazy deoptimization. UntetheredCodeInfo untetheredInfo = frame.getIPCodeInfo(); Object tether = CodeInfoAccess.acquireTether(untetheredInfo); try { diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/SubstrateStackIntrospection.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/SubstrateStackIntrospection.java index a079b17052f7..397635e9899a 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/SubstrateStackIntrospection.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/stack/SubstrateStackIntrospection.java @@ -294,7 +294,7 @@ private void checkDeoptimized() { private VirtualFrame lookupVirtualFrame() { IsolateThread thread = CurrentIsolate.getCurrentThread(); - DeoptimizedFrame deoptimizedFrame = Deoptimizer.checkDeoptimized(thread, sp); + DeoptimizedFrame deoptimizedFrame = Deoptimizer.checkEagerDeoptimized(thread, sp); if (deoptimizedFrame != null) { /* * Find the matching inlined frame, by skipping over the virtual frames that were @@ -314,8 +314,8 @@ private VirtualFrame lookupVirtualFrame() { public void materializeVirtualObjects(boolean invalidateCode) { IsolateThread thread = CurrentIsolate.getCurrentThread(); if (virtualFrame == null) { - DeoptimizedFrame deoptimizedFrame = getDeoptimizer().deoptSourceFrame(ip, false); - assert deoptimizedFrame == Deoptimizer.checkDeoptimized(thread, sp); + DeoptimizedFrame deoptimizedFrame = getDeoptimizer().deoptSourceFrameEagerly(ip, false); + assert deoptimizedFrame == Deoptimizer.checkEagerDeoptimized(thread, sp); } if (invalidateCode) { diff --git a/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/hosted/DeoptimizationFeature.java b/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/hosted/DeoptimizationFeature.java index b32e68717eaf..103c81f4868a 100644 --- a/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/hosted/DeoptimizationFeature.java +++ b/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/hosted/DeoptimizationFeature.java @@ -28,6 +28,7 @@ import java.util.List; import java.util.Map; +import com.oracle.svm.util.ReflectionUtil; import jdk.graal.compiler.graph.Node; import jdk.graal.compiler.options.OptionValues; import jdk.graal.compiler.phases.util.Providers; @@ -52,7 +53,6 @@ import com.oracle.svm.core.graal.snippets.NodeLoweringProvider; import com.oracle.svm.core.meta.MethodPointer; import com.oracle.svm.core.util.CounterFeature; -import com.oracle.svm.core.util.VMError; import com.oracle.svm.hosted.FeatureImpl.BeforeAnalysisAccessImpl; import com.oracle.svm.hosted.FeatureImpl.CompilationAccessImpl; import com.oracle.svm.hosted.meta.HostedMetaAccess; @@ -63,15 +63,10 @@ @Platforms(InternalPlatform.NATIVE_ONLY.class) public final class DeoptimizationFeature implements InternalFeature { - private static final Method deoptStubMethod; - - static { - try { - deoptStubMethod = Deoptimizer.class.getMethod("deoptStub", Pointer.class, UnsignedWord.class, UnsignedWord.class); - } catch (NoSuchMethodException ex) { - throw VMError.shouldNotReachHere(ex); - } - } + private static final Method eagerDeoptStubMethod = ReflectionUtil.lookupMethod(Deoptimizer.class, "eagerDeoptStub", Pointer.class, UnsignedWord.class, UnsignedWord.class); + private static final Method lazyDeoptStubPrimitiveReturnMethod = ReflectionUtil.lookupMethod(Deoptimizer.class, "lazyDeoptStubPrimitiveReturn", Pointer.class, UnsignedWord.class, + UnsignedWord.class); + private static final Method lazyDeoptStubObjectReturnMethod = ReflectionUtil.lookupMethod(Deoptimizer.class, "lazyDeoptStubObjectReturn", Pointer.class, UnsignedWord.class, UnsignedWord.class); @Override public List> getRequiredFeatures() { @@ -93,7 +88,11 @@ public void beforeAnalysis(BeforeAnalysisAccess a) { * The deoptimization stub is never called directly. It is patched in as the new return * address during deoptimization. */ - access.registerAsRoot(deoptStubMethod, true, "Deoptimization stub, registered in " + DeoptimizationFeature.class); + access.registerAsRoot(eagerDeoptStubMethod, true, "Eager deoptimization stub, registered in " + DeoptimizationFeature.class); + if (Deoptimizer.Options.UseLazyDeopt.getValue()) { + access.registerAsRoot(lazyDeoptStubPrimitiveReturnMethod, true, "Lazy deoptimization stub for primitive return values, registered in " + DeoptimizationFeature.class); + access.registerAsRoot(lazyDeoptStubObjectReturnMethod, true, "Lazy deoptimization stub for object return values, registered in " + DeoptimizationFeature.class); + } /* * The deoptimize run time call is not used for method in the native image, but only for @@ -128,6 +127,10 @@ public void beforeCompilation(BeforeCompilationAccess a) { CompilationAccessImpl config = (CompilationAccessImpl) a; config.registerAsImmutable(ImageSingletons.lookup(DeoptimizationSupport.class)); HostedMetaAccess metaAccess = config.getMetaAccess(); - DeoptimizationSupport.setDeoptStubPointer(new MethodPointer(metaAccess.lookupJavaMethod(deoptStubMethod))); + DeoptimizationSupport.setEagerDeoptStubPointer(new MethodPointer(metaAccess.lookupJavaMethod(eagerDeoptStubMethod))); + if (Deoptimizer.Options.UseLazyDeopt.getValue()) { + DeoptimizationSupport.setLazyDeoptStubPrimitiveReturnPointer(new MethodPointer(metaAccess.lookupJavaMethod(lazyDeoptStubPrimitiveReturnMethod))); + DeoptimizationSupport.setLazyDeoptStubObjectReturnPointer(new MethodPointer(metaAccess.lookupJavaMethod(lazyDeoptStubObjectReturnMethod))); + } } } diff --git a/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/meta/SubstrateInstalledCodeImpl.java b/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/meta/SubstrateInstalledCodeImpl.java index a24be13c4f0e..1531fb92a064 100644 --- a/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/meta/SubstrateInstalledCodeImpl.java +++ b/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/meta/SubstrateInstalledCodeImpl.java @@ -105,9 +105,9 @@ public void invalidate() { * {@link #getEntryPoint()}), and the invocation of the entry point that was read. */ @Override - public void invalidateWithoutDeoptimization() { + public void makeNonEntrant() { assert VMOperation.isInProgressAtSafepoint(); - throw VMError.unimplemented("cannot invalidate without deoptimization"); + throw VMError.unimplemented("cannot make non-entrant"); } @Override diff --git a/substratevm/src/com.oracle.svm.truffle/src/com/oracle/svm/truffle/api/SubstrateOptimizedCallTarget.java b/substratevm/src/com.oracle.svm.truffle/src/com/oracle/svm/truffle/api/SubstrateOptimizedCallTarget.java index 5a9948f633d8..ee1ec9c65ea4 100644 --- a/substratevm/src/com.oracle.svm.truffle/src/com/oracle/svm/truffle/api/SubstrateOptimizedCallTarget.java +++ b/substratevm/src/com.oracle.svm.truffle/src/com/oracle/svm/truffle/api/SubstrateOptimizedCallTarget.java @@ -140,7 +140,7 @@ protected void onCodeInstalled(SubstrateOptimizedCallTargetInstalledCode code) { if (code == installedCode) { return; } - installedCode.invalidateWithoutDeoptimization(); + installedCode.makeNonEntrant(); installedCode = code; } diff --git a/substratevm/src/com.oracle.svm.truffle/src/com/oracle/svm/truffle/api/SubstrateOptimizedCallTargetInstalledCode.java b/substratevm/src/com.oracle.svm.truffle/src/com/oracle/svm/truffle/api/SubstrateOptimizedCallTargetInstalledCode.java index c56f56a1c47f..f07b3b92341a 100644 --- a/substratevm/src/com.oracle.svm.truffle/src/com/oracle/svm/truffle/api/SubstrateOptimizedCallTargetInstalledCode.java +++ b/substratevm/src/com.oracle.svm.truffle/src/com/oracle/svm/truffle/api/SubstrateOptimizedCallTargetInstalledCode.java @@ -105,10 +105,10 @@ public void onAssumptionInvalidated(Object source, CharSequence reason) { } /** - * Returns false if not valid, including if {@linkplain #invalidateWithoutDeoptimization - * previously invalidated without deoptimization} in which case there can still be - * {@linkplain #isAlive live activations}. In order to entirely invalidate code in such cases, - * {@link #invalidate} must still be called even when this method returns false. + * Returns false if not valid, including if {@linkplain #makeNonEntrant previously made + * non-entrant} in which case there can still be {@linkplain #isAlive live activations}. In + * order to entirely invalidate code in such cases, {@link #invalidate} must still be called + * even when this method returns false. */ @Override public boolean isValid() { @@ -187,15 +187,15 @@ public void clearAddress() { } @Override - public void invalidateWithoutDeoptimization() { + public void makeNonEntrant() { assert VMOperation.isInProgressAtSafepoint(); if (isValid()) { - invalidateWithoutDeoptimization0(); + makeNonEntrant0(); } } @Uninterruptible(reason = "Must tether the CodeInfo.") - private void invalidateWithoutDeoptimization0() { + private void makeNonEntrant0() { this.entryPoint = 0; UntetheredCodeInfo untetheredInfo = CodeInfoTable.lookupCodeInfo(Word.pointer(this.address)); From 2805efdfa8a1f114a9483adda9c8f19c3ad9fb2d Mon Sep 17 00:00:00 2001 From: Peter Hofer Date: Fri, 7 Feb 2025 13:25:52 +0100 Subject: [PATCH 2/2] Rename option UseLazyDeopt to LazyDeoptimization. --- .../oracle/svm/core/SubstrateDiagnostics.java | 2 +- .../oracle/svm/core/code/CodeInfoDecoder.java | 12 +++---- .../oracle/svm/core/code/CodeInfoEncoder.java | 14 ++++---- .../svm/core/code/CodeInfoQueryResult.java | 9 ++--- .../oracle/svm/core/code/CodeInfoTable.java | 8 ++--- .../svm/core/code/RuntimeCodeCache.java | 6 ++-- .../svm/core/deopt/DeoptimizationSupport.java | 13 +++---- .../oracle/svm/core/deopt/Deoptimizer.java | 35 ++++++++++--------- .../graal/hosted/DeoptimizationFeature.java | 13 +++---- 9 files changed, 58 insertions(+), 54 deletions(-) diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/SubstrateDiagnostics.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/SubstrateDiagnostics.java index 118a5de0dc32..cb1daad4913b 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/SubstrateDiagnostics.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/SubstrateDiagnostics.java @@ -696,7 +696,7 @@ public int maxInvocationCount() { @RestrictHeapAccess(access = RestrictHeapAccess.Access.NO_ALLOCATION, reason = "Must not allocate while printing diagnostics.") public void printDiagnostics(Log log, ErrorContext context, int maxDiagnosticLevel, int invocationCount) { log.string("EagerDeoptStub address: ").zhex(DeoptimizationSupport.getEagerDeoptStubPointer()).newline(); - if (Deoptimizer.Options.UseLazyDeopt.getValue()) { + if (Deoptimizer.Options.LazyDeoptimization.getValue()) { log.string("LazyDeoptStubPrimitiveReturn address: ").zhex(DeoptimizationSupport.getLazyDeoptStubPrimitiveReturnPointer()).newline(); log.string("LazyDeoptStubObjectReturn address: ").zhex(DeoptimizationSupport.getLazyDeoptStubObjectReturnPointer()).newline(); } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoDecoder.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoDecoder.java index b57e854d33ff..98c380b8225f 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoDecoder.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoDecoder.java @@ -25,10 +25,9 @@ package com.oracle.svm.core.code; import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; +import static com.oracle.svm.core.deopt.Deoptimizer.Options.LazyDeoptimization; import static com.oracle.svm.core.util.VMError.shouldNotReachHereUnexpectedInput; -import static com.oracle.svm.core.deopt.Deoptimizer.Options.UseLazyDeopt; -import com.oracle.svm.core.deopt.DeoptimizationSupport; import org.graalvm.nativeimage.ImageSingletons; import org.graalvm.nativeimage.Platform; import org.graalvm.nativeimage.Platforms; @@ -41,6 +40,7 @@ import com.oracle.svm.core.c.NonmovableArrays; import com.oracle.svm.core.c.NonmovableObjectArray; import com.oracle.svm.core.code.FrameInfoDecoder.ConstantAccess; +import com.oracle.svm.core.deopt.DeoptimizationSupport; import com.oracle.svm.core.heap.ReferenceMapIndex; import com.oracle.svm.core.jdk.UninterruptibleUtils; import com.oracle.svm.core.option.HostedOptionKey; @@ -226,7 +226,7 @@ static long lookupDeoptimizationEntrypoint(CodeInfo info, long method, long enco codeInfo.exceptionOffset = loadExceptionOffset(info, entryOffset, entryFlags); codeInfo.referenceMapIndex = loadReferenceMapIndex(info, entryOffset, entryFlags); codeInfo.frameInfo = loadFrameInfo(info, entryOffset, entryFlags, constantAccess); - if (UseLazyDeopt.getValue()) { + if (LazyDeoptimization.getValue()) { codeInfo.deoptReturnValueIsObject = loadDeoptReturnValueIsObject(info, entryOffset, entryFlags) != 0; } assert codeInfo.frameInfo.isDeoptEntry() && codeInfo.frameInfo.getCaller() == null : "Deoptimization entry must not have inlined frames"; @@ -286,7 +286,7 @@ private static int loadDeoptReturnValueIsObject(CodeInfo info, long entryOffset, * The byte which encodes whether a return value is an object is stored at the end of the * codeInfo and is only present for deopt entry points if lazy deoptimization is enabled. */ - assert UseLazyDeopt.getValue() : "must have lazy deoptimization enabled to have this information in the codeinfo"; + assert LazyDeoptimization.getValue() : "must have lazy deoptimization enabled to have this information in the code info"; long rvoOffset = getU1(AFTER_FI_OFFSET, entryFlags); return NonmovableByteArrayReader.getU1(CodeInfoAccess.getCodeInfoEncodings(info), entryOffset + rvoOffset); } @@ -570,7 +570,7 @@ private static boolean endOfTable(long entryIP) { int maxFlag = 1 << TOTAL_BITS; /* - * When we enable useLazyDeopt, we have an extra byte in the codeinfo, which keeps track of + * With lazy deoptimization, we have an extra byte in the code info, which keeps track of * whether each infopoint is at a call that returns an object. This byte is stored after the * FI (frameInfo) section. It is accounted for by the advanceOffset() method. */ @@ -647,7 +647,7 @@ private static long offsetFI(long entryOffset, int entryFlags) { private static long advanceOffset(long entryOffset, int entryFlags) { counters().advanceOffset.inc(); long returnValueIsObjectSize = 0; - if (DeoptimizationSupport.enabled() && UseLazyDeopt.getValue() && extractFI(entryFlags) == FI_DEOPT_ENTRY_INDEX_S4) { + if (DeoptimizationSupport.enabled() && LazyDeoptimization.getValue() && extractFI(entryFlags) == FI_DEOPT_ENTRY_INDEX_S4) { returnValueIsObjectSize = Byte.BYTES; } return entryOffset + getU1(AFTER_FI_OFFSET, entryFlags) + returnValueIsObjectSize; diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoEncoder.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoEncoder.java index 8ec3d193d7a0..e8a5da95df49 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoEncoder.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoEncoder.java @@ -24,8 +24,8 @@ */ package com.oracle.svm.core.code; +import static com.oracle.svm.core.deopt.Deoptimizer.Options.LazyDeoptimization; import static com.oracle.svm.core.util.VMError.shouldNotReachHereUnexpectedInput; -import static com.oracle.svm.core.deopt.Deoptimizer.Options.UseLazyDeopt; import java.util.BitSet; import java.util.EnumSet; @@ -35,9 +35,6 @@ import java.util.function.IntFunction; import java.util.stream.Stream; -import com.oracle.svm.core.deopt.DeoptimizationSupport; -import jdk.vm.ci.meta.JavaType; -import jdk.graal.compiler.word.Word; import org.graalvm.collections.EconomicSet; import org.graalvm.collections.Equivalence; import org.graalvm.nativeimage.ImageSingletons; @@ -57,6 +54,7 @@ import com.oracle.svm.core.config.ConfigurationValues; import com.oracle.svm.core.config.ObjectLayout; import com.oracle.svm.core.deopt.DeoptEntryInfopoint; +import com.oracle.svm.core.deopt.DeoptimizationSupport; import com.oracle.svm.core.feature.AutomaticallyRegisteredImageSingleton; import com.oracle.svm.core.graal.RuntimeCompilation; import com.oracle.svm.core.heap.CodeReferenceMapDecoder; @@ -91,6 +89,7 @@ import jdk.graal.compiler.core.common.util.UnsafeArrayTypeWriter; import jdk.graal.compiler.nodes.FrameState; import jdk.graal.compiler.options.Option; +import jdk.graal.compiler.word.Word; import jdk.vm.ci.code.BytecodeFrame; import jdk.vm.ci.code.DebugInfo; import jdk.vm.ci.code.RegisterValue; @@ -103,6 +102,7 @@ import jdk.vm.ci.code.site.Infopoint; import jdk.vm.ci.meta.JavaConstant; import jdk.vm.ci.meta.JavaKind; +import jdk.vm.ci.meta.JavaType; import jdk.vm.ci.meta.JavaValue; import jdk.vm.ci.meta.ResolvedJavaMethod; @@ -396,7 +396,7 @@ public void addMethod(SharedMethod method, CompilationResult compilation, int co assert entry.referenceMap == null && (entry.frameData == null || entry.frameData.isDefaultFrameData) : entry; entry.referenceMap = (ReferenceMapEncoder.Input) debugInfo.getReferenceMap(); entry.frameData = frameInfoEncoder.addDebugInfo(method, compilation, infopoint, totalFrameSize); - if (DeoptimizationSupport.enabled() && UseLazyDeopt.getValue() && entry.frameData.frame.isDeoptEntry && infopoint instanceof Call call && call.target != null) { + if (DeoptimizationSupport.enabled() && LazyDeoptimization.getValue() && entry.frameData.frame.isDeoptEntry && infopoint instanceof Call call && call.target != null) { ResolvedJavaMethod invokeTarget = (ResolvedJavaMethod) call.target; JavaType returnType = invokeTarget.getSignature().getReturnType(null); entry.deoptReturnValueIsObject = ((SharedType) returnType).getStorageKind().isObject(); @@ -515,9 +515,9 @@ private void encodeIPData() { writeReferenceMapIndex(encodingBuffer, data, entryFlags); writeEncodedFrameInfo(encodingBuffer, data, entryFlags); - if (DeoptimizationSupport.enabled() && UseLazyDeopt.getValue() && data.frameData != null && data.frameData.frame.isDeoptEntry) { + if (DeoptimizationSupport.enabled() && LazyDeoptimization.getValue() && data.frameData != null && data.frameData.frame.isDeoptEntry) { /* - * When we enable useLazyDeopt, we have an extra byte in the codeinfo, which keeps + * With lazy deoptimization, we have an extra byte in the code info, which keeps * track for each deopt entry point whether it is at a call that returns an object. */ encodingBuffer.putU1(data.deoptReturnValueIsObject ? 1 : 0); diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoQueryResult.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoQueryResult.java index b9f663023ed2..3972d220597d 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoQueryResult.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoQueryResult.java @@ -24,10 +24,12 @@ */ package com.oracle.svm.core.code; -import static com.oracle.svm.core.deopt.Deoptimizer.Options.UseLazyDeopt; +import static com.oracle.svm.core.deopt.Deoptimizer.Options.LazyDeoptimization; import org.graalvm.nativeimage.c.function.CodePointer; + import com.oracle.svm.core.Uninterruptible; +import com.oracle.svm.core.deopt.Deoptimizer.Options; import com.oracle.svm.core.heap.CodeReferenceMapDecoder; import com.oracle.svm.core.heap.CodeReferenceMapEncoder; @@ -58,8 +60,7 @@ public class CodeInfoQueryResult { protected long exceptionOffset; protected long referenceMapIndex; /** - * deoptReturnValueIsObject is only set for deopt entry points and only if - * {@link com.oracle.svm.core.deopt.Deoptimizer.Options#UseLazyDeopt UseLazyDeopt} is enabled. + * Only set for deopt entry points and only if {@link Options#LazyDeoptimization} is enabled. */ protected boolean deoptReturnValueIsObject; protected FrameInfoQueryResult frameInfo; @@ -129,7 +130,7 @@ public long getReferenceMapIndex() { } public boolean getDeoptReturnValueIsObject() { - assert UseLazyDeopt.getValue(); + assert LazyDeoptimization.getValue(); return deoptReturnValueIsObject; } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoTable.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoTable.java index 92b934b81847..9e2d240606b6 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoTable.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/CodeInfoTable.java @@ -24,10 +24,11 @@ */ package com.oracle.svm.core.code; +import static com.oracle.svm.core.deopt.Deoptimizer.Options.LazyDeoptimization; + import java.util.Arrays; import java.util.List; -import jdk.graal.compiler.word.Word; import org.graalvm.nativeimage.ImageSingletons; import org.graalvm.nativeimage.c.function.CodePointer; import org.graalvm.nativeimage.hosted.Feature; @@ -57,10 +58,9 @@ import jdk.graal.compiler.api.replacements.Fold; import jdk.graal.compiler.options.Option; +import jdk.graal.compiler.word.Word; import jdk.vm.ci.code.InstalledCode; -import static com.oracle.svm.core.deopt.Deoptimizer.Options.UseLazyDeopt; - /** * Provides the main entry points to look up metadata for code, either {@link #getImageCodeCache() * ahead-of-time compiled code in the native image} or {@link CodeInfoTable#getRuntimeCodeCache() @@ -227,7 +227,7 @@ private static void invalidateInstalledCodeAtSafepoint(SubstrateInstalledCode in invalidateCodeAtSafepoint0(info); } // If lazy deoptimization is enabled, the CodeInfo will not be removed immediately. - if (UseLazyDeopt.getValue()) { + if (LazyDeoptimization.getValue()) { assert CodeInfoAccess.getState(info) == CodeInfo.STATE_NON_ENTRANT; } else { assert CodeInfoAccess.getState(info) == CodeInfo.STATE_REMOVED_FROM_CODE_CACHE; diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeCache.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeCache.java index 4f5145b7486a..880f739536d1 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeCache.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/code/RuntimeCodeCache.java @@ -24,11 +24,10 @@ */ package com.oracle.svm.core.code; +import static com.oracle.svm.core.deopt.Deoptimizer.Options.LazyDeoptimization; import static com.oracle.svm.core.option.RuntimeOptionKey.RuntimeOptionKeyFlag.RelevantForCompilationIsolates; import static com.oracle.svm.core.snippets.KnownIntrinsics.readCallerStackPointer; -import static com.oracle.svm.core.deopt.Deoptimizer.Options.UseLazyDeopt; -import jdk.graal.compiler.word.Word; import org.graalvm.collections.EconomicMap; import org.graalvm.nativeimage.CurrentIsolate; import org.graalvm.nativeimage.IsolateThread; @@ -58,6 +57,7 @@ import jdk.graal.compiler.options.Option; import jdk.graal.compiler.options.OptionKey; import jdk.graal.compiler.options.OptionType; +import jdk.graal.compiler.word.Word; public class RuntimeCodeCache { @@ -212,7 +212,7 @@ protected void invalidateMethod(CodeInfo info) { */ Deoptimizer.deoptimizeInRange(CodeInfoAccess.getCodeStart(info), CodeInfoAccess.getCodeEnd(info), false); - boolean removeNow = !UseLazyDeopt.getValue(); + boolean removeNow = !LazyDeoptimization.getValue(); continueInvalidation(info, removeNow); } diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizationSupport.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizationSupport.java index 1d4651f62948..e45caf0b8503 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizationSupport.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/DeoptimizationSupport.java @@ -24,7 +24,8 @@ */ package com.oracle.svm.core.deopt; -import jdk.graal.compiler.api.replacements.Fold; +import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; + import org.graalvm.nativeimage.ImageSingletons; import org.graalvm.nativeimage.Platform; import org.graalvm.nativeimage.Platforms; @@ -35,7 +36,7 @@ import com.oracle.svm.core.Uninterruptible; import com.oracle.svm.core.heap.UnknownPrimitiveField; -import static com.oracle.svm.core.Uninterruptible.CALLED_FROM_UNINTERRUPTIBLE_CODE; +import jdk.graal.compiler.api.replacements.Fold; public class DeoptimizationSupport { @@ -74,14 +75,14 @@ public static void setEagerDeoptStubPointer(CFunctionPointer ptr) { @Platforms(Platform.HOSTED_ONLY.class) public static void setLazyDeoptStubPrimitiveReturnPointer(CFunctionPointer ptr) { assert get().lazyDeoptStubPrimitiveReturnPointer == null : "multiple lazyDeoptStubPrimitiveReturn methods registered"; - assert Deoptimizer.Options.UseLazyDeopt.getValue() : "lazy deoptimization not enabled"; + assert Deoptimizer.Options.LazyDeoptimization.getValue() : "lazy deoptimization not enabled"; get().lazyDeoptStubPrimitiveReturnPointer = ptr; } @Platforms(Platform.HOSTED_ONLY.class) public static void setLazyDeoptStubObjectReturnPointer(CFunctionPointer ptr) { assert get().lazyDeoptStubObjectReturnPointer == null : "multiple lazyDeoptStubObjectReturn methods registered"; - assert Deoptimizer.Options.UseLazyDeopt.getValue() : "lazy deoptimization not enabled"; + assert Deoptimizer.Options.LazyDeoptimization.getValue() : "lazy deoptimization not enabled"; get().lazyDeoptStubObjectReturnPointer = ptr; } @@ -94,7 +95,7 @@ public static CFunctionPointer getEagerDeoptStubPointer() { @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public static CFunctionPointer getLazyDeoptStubPrimitiveReturnPointer() { - assert Deoptimizer.Options.UseLazyDeopt.getValue() : "lazy deoptimization not enabled"; + assert Deoptimizer.Options.LazyDeoptimization.getValue() : "lazy deoptimization not enabled"; CFunctionPointer ptr = get().lazyDeoptStubPrimitiveReturnPointer; assert ptr.rawValue() != 0; return ptr; @@ -102,7 +103,7 @@ public static CFunctionPointer getLazyDeoptStubPrimitiveReturnPointer() { @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public static CFunctionPointer getLazyDeoptStubObjectReturnPointer() { - assert Deoptimizer.Options.UseLazyDeopt.getValue() : "lazy deoptimization not enabled"; + assert Deoptimizer.Options.LazyDeoptimization.getValue() : "lazy deoptimization not enabled"; CFunctionPointer ptr = get().lazyDeoptStubObjectReturnPointer; assert ptr.rawValue() != 0; return ptr; diff --git a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/Deoptimizer.java b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/Deoptimizer.java index e5e476e0134b..2d4d16f722aa 100644 --- a/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/Deoptimizer.java +++ b/substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/deopt/Deoptimizer.java @@ -33,9 +33,6 @@ import java.nio.ByteOrder; import java.util.ArrayList; -import com.oracle.svm.core.option.HostedOptionKey; -import com.oracle.svm.core.snippets.ExceptionUnwind; -import com.oracle.svm.core.stack.StackOverflowCheck; import org.graalvm.nativeimage.CurrentIsolate; import org.graalvm.nativeimage.IsolateThread; import org.graalvm.nativeimage.Platform; @@ -70,11 +67,14 @@ import com.oracle.svm.core.meta.SharedMethod; import com.oracle.svm.core.meta.SubstrateObjectConstant; import com.oracle.svm.core.monitor.MonitorSupport; +import com.oracle.svm.core.option.HostedOptionKey; import com.oracle.svm.core.option.RuntimeOptionKey; +import com.oracle.svm.core.snippets.ExceptionUnwind; import com.oracle.svm.core.snippets.KnownIntrinsics; import com.oracle.svm.core.stack.JavaFrame; import com.oracle.svm.core.stack.JavaStackWalker; import com.oracle.svm.core.stack.StackFrameVisitor; +import com.oracle.svm.core.stack.StackOverflowCheck; import com.oracle.svm.core.thread.JavaVMOperation; import com.oracle.svm.core.thread.VMOperation; import com.oracle.svm.core.thread.VMThreads; @@ -277,7 +277,7 @@ public static class Options { * {@code gpReturnValue} as an object reference. */ @Option(help = "Enables delayed deoptimization of runtime-compiled code. This slightly enlarges code metadata.")// - public static final HostedOptionKey UseLazyDeopt = new HostedOptionKey<>(false); + public static final HostedOptionKey LazyDeoptimization = new HostedOptionKey<>(false); } /** @@ -359,7 +359,7 @@ private static DeoptimizedFrame checkEagerDeoptimized0(Pointer sp, CodePointer i */ @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public static boolean checkLazyDeoptimized(JavaFrame frame) { - if (DeoptimizationSupport.enabled() && Options.UseLazyDeopt.getValue()) { + if (DeoptimizationSupport.enabled() && Options.LazyDeoptimization.getValue()) { return frame.getIsPendingLazyDeopt(); } return false; @@ -372,7 +372,7 @@ public static boolean checkLazyDeoptimized(JavaFrame frame) { */ @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public static boolean checkLazyDeoptimized(IsolateThread thread, Pointer sp) { - if (DeoptimizationSupport.enabled() && Options.UseLazyDeopt.getValue()) { + if (DeoptimizationSupport.enabled() && Options.LazyDeoptimization.getValue()) { CodePointer ip = FrameAccess.singleton().readReturnAddress(thread, sp); return checkLazyDeoptimized0(ip); } @@ -384,7 +384,7 @@ public static boolean checkLazyDeoptimized(IsolateThread thread, Pointer sp) { */ @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) public static boolean checkLazyDeoptimized(CodePointer ip) { - if (DeoptimizationSupport.enabled() && Options.UseLazyDeopt.getValue()) { + if (DeoptimizationSupport.enabled() && Options.LazyDeoptimization.getValue()) { return checkLazyDeoptimized0(ip); } return false; @@ -395,7 +395,7 @@ public static boolean checkLazyDeoptimized(CodePointer ip) { */ @Uninterruptible(reason = CALLED_FROM_UNINTERRUPTIBLE_CODE, mayBeInlined = true) private static boolean checkLazyDeoptimized0(CodePointer ip) { - assert Options.UseLazyDeopt.getValue(); + assert Options.LazyDeoptimization.getValue(); return ip.equal(DeoptimizationSupport.getLazyDeoptStubPrimitiveReturnPointer()) || ip.equal(DeoptimizationSupport.getLazyDeoptStubObjectReturnPointer()); } @@ -514,7 +514,7 @@ protected boolean visitDeoptimizedFrame(Pointer originalSP, CodePointer deoptStu */ @NeverInline("Inlining of this method would require that we have deopt targets for callees of this method (SVM internals).") public static void deoptimizeFrame(Pointer sp, boolean ignoreNonDeoptimizable, SpeculationReason speculation) { - boolean deoptEagerly = !Options.UseLazyDeopt.getValue(); + boolean deoptEagerly = !Options.LazyDeoptimization.getValue(); deoptimizeFrame0(sp, ignoreNonDeoptimizable, speculation, deoptEagerly); } @@ -560,7 +560,7 @@ private static class DeoptimizeFrameOperation extends JavaVMOperation { this.speculation = speculation; this.targetThread = targetThread; this.deoptEagerly = deoptEagerly; - if (Options.UseLazyDeopt.getValue() && deoptEagerly) { + if (Options.LazyDeoptimization.getValue() && deoptEagerly) { /* * If lazy deoptimization is enabled, eager deoptimization is only used for stack * introspection. We enforce that eager deoptimization cannot be applied to other @@ -758,7 +758,7 @@ private static boolean isNonNullValue(UnsignedWord pointer) { @Uninterruptible(reason = "gpReturnValue may hold unmanaged reference") public static UnsignedWord lazyDeoptStubObjectReturn(Pointer framePointer, UnsignedWord gpReturnValue, UnsignedWord fpReturnValue) { assert PointerUtils.isAMultiple(KnownIntrinsics.readStackPointer(), Word.unsigned(ConfigurationValues.getTarget().stackAlignment)); - assert Options.UseLazyDeopt.getValue(); + assert Options.LazyDeoptimization.getValue(); assert VMThreads.StatusSupport.isStatusJava() : "Deopt stub execution must not be visible to other threads."; boolean hasException = ExceptionUnwind.getLazyDeoptStubShouldReturnToExceptionHandler(); @@ -782,7 +782,7 @@ public static UnsignedWord lazyDeoptStubPrimitiveReturn(Pointer framePointer, Un * Exception Object. */ assert PointerUtils.isAMultiple(KnownIntrinsics.readStackPointer(), Word.unsigned(ConfigurationValues.getTarget().stackAlignment)); - assert Options.UseLazyDeopt.getValue(); + assert Options.LazyDeoptimization.getValue(); assert VMThreads.StatusSupport.isStatusJava() : "Deopt stub execution must not be visible to other threads."; assert !ExceptionUnwind.getLazyDeoptStubShouldReturnToExceptionHandler(); @@ -998,7 +998,7 @@ static int savedBasePointerSize() { */ public void deoptSourceFrameLazily(CodePointer pc, boolean ignoreNonDeoptimizable) { assert VMOperation.isInProgress(); - if (!Options.UseLazyDeopt.getValue()) { + if (!Options.LazyDeoptimization.getValue()) { deoptSourceFrameEagerly(pc, ignoreNonDeoptimizable); return; } @@ -1042,7 +1042,7 @@ public DeoptimizedFrame deoptSourceFrameEagerly(CodePointer pc, boolean ignoreNo @Uninterruptible(reason = "Prevent stack walks from seeing an inconsistent stack.") private static void installLazyDeoptStubReturnAddress(boolean returnValueIsObject, Pointer sourceSp, IsolateThread targetThread) { - assert Options.UseLazyDeopt.getValue(); + assert Options.LazyDeoptimization.getValue(); assert VMOperation.isInProgress(); CodePointer oldReturnAddress = FrameAccess.singleton().readReturnAddress(targetThread, sourceSp); @@ -1058,7 +1058,7 @@ private static void installLazyDeoptStubReturnAddress(boolean returnValueIsObjec @Uninterruptible(reason = "Prevent stack walks from seeing an inconsistent stack.") private static void uninstallLazyDeoptStubReturnAddress(Pointer sourceSp, IsolateThread thread) { - assert Options.UseLazyDeopt.getValue(); + assert Options.LazyDeoptimization.getValue(); assert VMOperation.isInProgress(); CodePointer oldReturnAddress = sourceSp.readWord(0); assert oldReturnAddress.isNonNull(); @@ -1086,7 +1086,7 @@ private static final class EagerDeoptSourceFrameOperation extends JavaVMOperatio this.pc = pc; this.ignoreNonDeoptimizable = ignoreNonDeoptimizable; this.result = null; - if (Options.UseLazyDeopt.getValue()) { + if (Options.LazyDeoptimization.getValue()) { assert receiver.deoptState.targetThread == CurrentIsolate.getCurrentThread() : "With lazy deoptimization enabled, eager deoptimization cannot be used to deoptimize other threads"; } } @@ -1119,7 +1119,8 @@ private static boolean canBeDeoptimized(FrameInfoQueryResult frame) { } private DeoptimizedFrame doDeoptSourceFrame(CodePointer pc, boolean ignoreNonDeoptimizable, boolean isEagerDeopt) { - assert !Options.UseLazyDeopt.getValue() || deoptState.targetThread == CurrentIsolate.getCurrentThread() : "with lazy deopt enabled, this method may only be called for the current thread"; + assert !Options.LazyDeoptimization.getValue() || + deoptState.targetThread == CurrentIsolate.getCurrentThread() : "with lazy deoptimization, this method may only be called for the current thread"; assert !isEagerDeopt || VMOperation.isInProgressAtSafepoint() : "eager deopts may only happen at a safepoint"; DeoptimizedFrame existing = checkEagerDeoptimized(deoptState.targetThread, deoptState.sourceSp); diff --git a/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/hosted/DeoptimizationFeature.java b/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/hosted/DeoptimizationFeature.java index 103c81f4868a..336670acd96c 100644 --- a/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/hosted/DeoptimizationFeature.java +++ b/substratevm/src/com.oracle.svm.graal/src/com/oracle/svm/graal/hosted/DeoptimizationFeature.java @@ -28,10 +28,6 @@ import java.util.List; import java.util.Map; -import com.oracle.svm.util.ReflectionUtil; -import jdk.graal.compiler.graph.Node; -import jdk.graal.compiler.options.OptionValues; -import jdk.graal.compiler.phases.util.Providers; import org.graalvm.nativeimage.ImageSingletons; import org.graalvm.nativeimage.Platforms; import org.graalvm.nativeimage.hosted.Feature; @@ -56,6 +52,11 @@ import com.oracle.svm.hosted.FeatureImpl.BeforeAnalysisAccessImpl; import com.oracle.svm.hosted.FeatureImpl.CompilationAccessImpl; import com.oracle.svm.hosted.meta.HostedMetaAccess; +import com.oracle.svm.util.ReflectionUtil; + +import jdk.graal.compiler.graph.Node; +import jdk.graal.compiler.options.OptionValues; +import jdk.graal.compiler.phases.util.Providers; /** * Feature to allow deoptimization in a generated native image. @@ -89,7 +90,7 @@ public void beforeAnalysis(BeforeAnalysisAccess a) { * address during deoptimization. */ access.registerAsRoot(eagerDeoptStubMethod, true, "Eager deoptimization stub, registered in " + DeoptimizationFeature.class); - if (Deoptimizer.Options.UseLazyDeopt.getValue()) { + if (Deoptimizer.Options.LazyDeoptimization.getValue()) { access.registerAsRoot(lazyDeoptStubPrimitiveReturnMethod, true, "Lazy deoptimization stub for primitive return values, registered in " + DeoptimizationFeature.class); access.registerAsRoot(lazyDeoptStubObjectReturnMethod, true, "Lazy deoptimization stub for object return values, registered in " + DeoptimizationFeature.class); } @@ -128,7 +129,7 @@ public void beforeCompilation(BeforeCompilationAccess a) { config.registerAsImmutable(ImageSingletons.lookup(DeoptimizationSupport.class)); HostedMetaAccess metaAccess = config.getMetaAccess(); DeoptimizationSupport.setEagerDeoptStubPointer(new MethodPointer(metaAccess.lookupJavaMethod(eagerDeoptStubMethod))); - if (Deoptimizer.Options.UseLazyDeopt.getValue()) { + if (Deoptimizer.Options.LazyDeoptimization.getValue()) { DeoptimizationSupport.setLazyDeoptStubPrimitiveReturnPointer(new MethodPointer(metaAccess.lookupJavaMethod(lazyDeoptStubPrimitiveReturnMethod))); DeoptimizationSupport.setLazyDeoptStubObjectReturnPointer(new MethodPointer(metaAccess.lookupJavaMethod(lazyDeoptStubObjectReturnMethod))); }