Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 10 additions & 7 deletions substratevm/mx.substratevm/testhello.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def __init__(self, name, regexps):
self.name = name
if not isinstance(regexps, list):
regexps = [regexps]
self.rexps = [re.compile(regexp) for regexp in regexps]
self.rexps = [re.compile(r) for r in regexps if r is not None]

# Check that successive lines of a gdb command's output text
# match the corresponding regexp patterns provided when this
Expand Down Expand Up @@ -180,6 +180,9 @@ def test():
# disable printing of address symbols
execute("set print symbol off")

hub_ref_size = int(execute("printf \"%d\", sizeof('java.lang.Object'::__hub__)"))
fixed_idhash_field = (hub_ref_size > 4)

# Print DefaultGreeter and check the modifiers of its methods and fields
exec_string = execute("ptype 'hello.Hello$DefaultGreeter'")
rexp = [r"type = class hello\.Hello\$DefaultGreeter : public hello\.Hello\$Greeter {",
Expand Down Expand Up @@ -241,7 +244,7 @@ def test():
r"%s<java.lang.Object> = {"%(spaces_pattern),
r"%s<_objhdr> = {"%(spaces_pattern),
r"%shub = %s,"%(spaces_pattern, address_pattern),
r"%sidHash = %s"%(spaces_pattern, address_pattern),
r"%sidHash = %s"%(spaces_pattern, address_pattern) if fixed_idhash_field else None,
r"%s}, <No data fields>}, "%(spaces_pattern),
r"%smembers of java\.lang\.String\[\]:"%(spaces_pattern),
r"%slen = 0x0,"%(spaces_pattern),
Expand All @@ -260,7 +263,7 @@ def test():
r"%s<java.lang.Object> = {"%(spaces_pattern),
r"%s<_objhdr> = {"%(spaces_pattern),
r"%shub = %s,"%(spaces_pattern, address_pattern),
r"%sidHash = %s"%(spaces_pattern, address_pattern),
r"%sidHash = %s"%(spaces_pattern, address_pattern) if fixed_idhash_field else None,
r"%s}, <No data fields>},"%(spaces_pattern),
r"%smembers of java\.lang\.Class:"%(spaces_pattern),
r"%sname = %s,"%(spaces_pattern, address_pattern),
Expand All @@ -270,7 +273,7 @@ def test():
r"%s<java.lang.Object> = {"%(spaces_pattern),
r"%s<_objhdr> = {"%(spaces_pattern),
r"%shub = %s,"%(spaces_pattern, address_pattern),
r"%sidHash = %s"%(spaces_pattern, address_pattern),
r"%sidHash = %s"%(spaces_pattern, address_pattern) if fixed_idhash_field else None,
r"%s}, <No data fields>},"%(spaces_pattern),
r"%smembers of java\.lang\.Class:"%(spaces_pattern),
r"%sname = %s,"%(spaces_pattern, address_pattern),
Expand Down Expand Up @@ -310,7 +313,7 @@ def test():
r"%s<java.lang.Object> = {"%(spaces_pattern),
r"%s<_objhdr> = {"%(spaces_pattern),
r"%shub = %s,"%(spaces_pattern, address_pattern),
r"%sidHash = %s"%(spaces_pattern, address_pattern),
r"%sidHash = %s"%(spaces_pattern, address_pattern) if fixed_idhash_field else None,
r"%s}, <No data fields>},"%(spaces_pattern),
r"%smembers of java\.lang\.Class:"%(spaces_pattern),
r"%sname = %s,"%(spaces_pattern, address_pattern),
Expand Down Expand Up @@ -426,12 +429,12 @@ def test():
if isolates:
rexp = [r"type = struct _objhdr {",
r"%s_z_\.java\.lang\.Class \*hub;"%(spaces_pattern),
r"%sint idHash;"%(spaces_pattern),
r"%sint idHash;"%(spaces_pattern) if fixed_idhash_field else None,
r"}"]
else:
rexp = [r"type = struct _objhdr {",
r"%sjava\.lang\.Class \*hub;"%(spaces_pattern),
r"%sint idHash;"%(spaces_pattern),
r"%sint idHash;"%(spaces_pattern) if fixed_idhash_field else None,
r"}"]

checker = Checker('ptype _objhdr', rexp)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,17 @@ public int getTenuringAge() {
return tenuringThreshold;
}

@Override
public void onCollectionBegin(boolean completeCollection, long requestingNanoTime) {
// Capture the fraction of bytes in aligned chunks at the start to include all allocated
// (also dead) objects, because we use it to reserve aligned chunks for future allocations
UnsignedWord youngChunkBytes = GCImpl.getGCImpl().getAccounting().getYoungChunkBytesBefore();
if (youngChunkBytes.notEqual(0)) {
UnsignedWord youngAlignedChunkBytes = HeapImpl.getHeapImpl().getYoungGeneration().getAlignedChunkBytes();
avgYoungGenAlignedChunkFraction.sample(UnsignedUtils.toDouble(youngAlignedChunkBytes) / UnsignedUtils.toDouble(youngChunkBytes));
}
}

@Override
public UnsignedWord getMinimumHeapSize() {
return sizes.minHeapSize;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,15 @@ class AdaptiveCollectionPolicy extends AbstractCollectionPolicy {
* (disabled by default) which uses linear least-square fitting without discounting.
*/
private static final boolean ADAPTIVE_SIZE_USE_COST_ESTIMATORS = true;
/** Number of space size adjustments before cost estimators are used. HotSpot default: 20. */
private static final int ADAPTIVE_SIZE_POLICY_INITIALIZING_STEPS = ADAPTIVE_SIZE_POLICY_READY_THRESHOLD;
/** The minimum increase in throughput in percent for expanding a space by 1% of its size. */
private static final double ADAPTIVE_SIZE_ESTIMATOR_MIN_SIZE_COST_TRADEOFF = 0.5;
/**
* The minimum estimated decrease in {@link #gcCost()} in percent to decide in favor of
* expanding a space by 1% of the combined size of {@link #edenSize} and {@link #promoSize}.
*/
private static final double ADAPTIVE_SIZE_ESTIMATOR_MIN_TOTAL_SIZE_COST_TRADEOFF = 0.5;
/** The effective number of most recent data points used by estimator (exponential decay). */
private static final int ADAPTIVE_SIZE_COST_ESTIMATORS_HISTORY_LENGTH = 12;
private static final double ADAPTIVE_SIZE_COST_ESTIMATORS_HISTORY_LENGTH = 12;
/** Threshold for triggering a complete collection after repeated minor collections. */
private static final int CONSECUTIVE_MINOR_TO_MAJOR_COLLECTION_PAUSE_TIME_RATIO = 2;
/**
Expand Down Expand Up @@ -240,7 +244,7 @@ private void computeSurvivorSpaceSizeAndThreshold(boolean isSurvivorOverflow, Un
protected void computeEdenSpaceSize(@SuppressWarnings("unused") boolean completeCollection, @SuppressWarnings("unused") GCCause cause) {
boolean expansionReducesCost = true; // general assumption
if (shouldUseEstimator(youngGenChangeForMinorThroughput, minorGcCost())) {
expansionReducesCost = expansionSignificantlyReducesCost(minorCostEstimator, edenSize);
expansionReducesCost = expansionSignificantlyReducesTotalCost(minorCostEstimator, edenSize, majorGcCost(), promoSize);
/*
* Note that if the estimator thinks expanding does not lead to significant improvement,
* shrink so to not get stuck in a supposed optimum and to keep collecting data points.
Expand Down Expand Up @@ -282,15 +286,17 @@ private static boolean shouldUseEstimator(long genChangeForThroughput, double co
return ADAPTIVE_SIZE_USE_COST_ESTIMATORS && genChangeForThroughput > ADAPTIVE_SIZE_POLICY_INITIALIZING_STEPS && cost <= ADAPTIVE_SIZE_COST_ESTIMATOR_GC_COST_LIMIT;
}

private static boolean expansionSignificantlyReducesCost(ReciprocalLeastSquareFit estimator, UnsignedWord size) {
private static boolean expansionSignificantlyReducesTotalCost(ReciprocalLeastSquareFit estimator, UnsignedWord size, double otherCost, UnsignedWord otherSize) {
double totalSize = UnsignedUtils.toDouble(size.add(otherSize));
double x0 = UnsignedUtils.toDouble(size);
double deltax = (1.01 - 1) * x0;
if (deltax == 0) { // division by zero below
return false;
if (deltax == 0 || totalSize == 0) { // division by zero below
return true; // general assumption for space expansion
}
double y0 = estimator.estimate(x0);
double y1 = y0 * (1 - 0.01 * ADAPTIVE_SIZE_ESTIMATOR_MIN_SIZE_COST_TRADEOFF);
double y0 = estimator.estimate(x0) + otherCost;
double y1 = y0 * (1 - deltax / totalSize * ADAPTIVE_SIZE_ESTIMATOR_MIN_TOTAL_SIZE_COST_TRADEOFF);
double minSlope = (y1 - y0) / deltax;

double estimatedSlope = estimator.getSlope(x0);
return estimatedSlope <= minSlope;
}
Expand Down Expand Up @@ -383,16 +389,10 @@ public void onCollectionBegin(boolean completeCollection, long requestingNanoTim
latestMinorMutatorIntervalNanos = timer.getMeasuredNanos();
}

// Capture the fraction of bytes in aligned chunks at the start to include all allocated
// (also dead) objects, because we use it to reserve aligned chunks for future allocations
UnsignedWord youngChunkBytes = GCImpl.getGCImpl().getAccounting().getYoungChunkBytesBefore();
if (youngChunkBytes.notEqual(0)) {
UnsignedWord youngAlignedChunkBytes = HeapImpl.getHeapImpl().getYoungGeneration().getAlignedChunkBytes();
avgYoungGenAlignedChunkFraction.sample(UnsignedUtils.toDouble(youngAlignedChunkBytes) / UnsignedUtils.toDouble(youngChunkBytes));
}

timer.reset();
timer.open(); // measure collection pause

super.onCollectionBegin(completeCollection, requestingNanoTime);
}

@Override
Expand Down Expand Up @@ -458,7 +458,7 @@ private void computeOldGenSpaceSize(UnsignedWord oldLive) { // compute_old_gen_f

boolean expansionReducesCost = true; // general assumption
if (shouldUseEstimator(oldGenChangeForMajorThroughput, majorGcCost())) {
expansionReducesCost = expansionSignificantlyReducesCost(majorCostEstimator, promoSize);
expansionReducesCost = expansionSignificantlyReducesTotalCost(majorCostEstimator, promoSize, minorGcCost(), edenSize);
/*
* Note that if the estimator thinks expanding does not lead to significant improvement,
* shrink so to not get stuck in a supposed optimum and to keep collecting data points.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,17 +39,34 @@
class AdaptiveWeightedAverage {
static final int OLD_THRESHOLD = 100;

private final int weight;
/** @see #computeEffectiveHistoryLengthForWeight */
static double computeWeightForEffectiveHistoryLength(double length) {
assert length > 0;
return 100.0 * (1.0 - Math.pow(Math.E, -1.0 / length));
}

/**
* Computes the effective history length for the given weight, which is the number of data
* points after which the former history is discounted to 1/e, i.e., its time constant.
*/
static double computeEffectiveHistoryLengthForWeight(double weight) {
assert weight > 0 && weight <= 100;
return -1.0 / Math.log(1.0 - weight / 100.0);
}

private final double weight;

private double average;
private long sampleCount;
private boolean isOld;

AdaptiveWeightedAverage(int weight) {
/** @param weight Weight of newest sample in percent, from 0 (exclusive) to 100 (inclusive). */
AdaptiveWeightedAverage(double weight) {
this(weight, 0);
}

AdaptiveWeightedAverage(int weight, double avg) {
AdaptiveWeightedAverage(double weight, double avg) {
assert weight > 0 && weight <= 100;
this.weight = weight;
this.average = avg;
}
Expand All @@ -76,16 +93,16 @@ protected double computeAdaptiveAverage(double sample, double avg) {
* it meaningful. We'd like the first weight used to be 1, the second to be 1/2, etc until
* we have OLD_THRESHOLD/weight samples.
*/
long countWeight = 0;
double countWeight = 0;
if (!isOld) { // avoid division by zero if the counter wraps
countWeight = OLD_THRESHOLD / sampleCount;
countWeight = OLD_THRESHOLD / (double) sampleCount;
}
long adaptiveWeight = Math.max(weight, countWeight);
double adaptiveWeight = Math.max(weight, countWeight);
return expAvg(avg, sample, adaptiveWeight);
}

private static double expAvg(double avg, double sample, long adaptiveWeight) {
assert adaptiveWeight <= 100 : "weight must be a percentage";
private static double expAvg(double avg, double sample, double adaptiveWeight) {
assert adaptiveWeight > 0 && adaptiveWeight <= 100 : "weight must be a percentage";
return (100.0 - adaptiveWeight) * avg / 100.0 + adaptiveWeight * sample / 100.0;
}
}
Expand All @@ -103,7 +120,7 @@ class AdaptivePaddedAverage extends AdaptiveWeightedAverage {
private double paddedAverage;
private double deviation;

AdaptivePaddedAverage(int weight, int padding) {
AdaptivePaddedAverage(double weight, int padding) {
this(weight, padding, false);
}

Expand All @@ -112,7 +129,7 @@ class AdaptivePaddedAverage extends AdaptiveWeightedAverage {
* allowed to change. This is to prevent zero samples from drastically changing the
* padded average.
*/
AdaptivePaddedAverage(int weight, int padding, boolean noZeroDeviations) {
AdaptivePaddedAverage(double weight, int padding, boolean noZeroDeviations) {
super(weight);
this.padding = padding;
this.noZeroDeviations = noZeroDeviations;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,11 +117,13 @@ static UnsignedWord getCommittedObjectMemory(AlignedHeader that) {
return HeapChunk.getEndOffset(that).subtract(getObjectsStartOffset());
}

@Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true)
public static AlignedHeader getEnclosingChunk(Object obj) {
Pointer ptr = Word.objectToUntrackedPointer(obj);
return getEnclosingChunkFromObjectPointer(ptr);
}

@Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true)
public static AlignedHeader getEnclosingChunkFromObjectPointer(Pointer ptr) {
return (AlignedHeader) PointerUtils.roundDown(ptr, HeapParameters.getAlignedHeapChunkAlignment());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ public long getUnallocatedBytes() {
this.position = position;

/* Cache to prevent frequent lookups of the object layout from ImageSingletons. */
minimumObjectSize = ConfigurationValues.getObjectLayout().getMinimumObjectSize();
this.minimumObjectSize = ConfigurationValues.getObjectLayout().getMinImageHeapObjectSize();
}

public long getPosition() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public class ChunkedImageHeapPartition extends AbstractImageHeapPartition {
this.hugeObjects = hugeObjects;

/* Cache to prevent frequent lookups of the object layout from ImageSingletons. */
minimumObjectSize = ConfigurationValues.getObjectLayout().getMinimumObjectSize();
this.minimumObjectSize = ConfigurationValues.getObjectLayout().getMinImageHeapObjectSize();
}

boolean usesUnalignedObjects() {
Expand Down Expand Up @@ -134,7 +134,7 @@ private static NavigableMap<Long, Queue<ImageHeapObject>> createSortedObjectsMap
for (ImageHeapObject obj : sorted) {
long objSize = obj.getSize();
if (objSize != currentObjectsSize) {
assert objSize > currentObjectsSize && objSize >= ConfigurationValues.getObjectLayout().getMinimumObjectSize();
assert objSize > currentObjectsSize && objSize >= ConfigurationValues.getObjectLayout().getMinImageHeapObjectSize();
currentObjectsSize = objSize;
currentQueue = new ArrayDeque<>();
map.put(currentObjectsSize, currentQueue);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import com.oracle.svm.core.heap.GCCause;
import com.oracle.svm.core.heap.PhysicalMemory;
import com.oracle.svm.core.util.UserError;
import com.oracle.svm.util.ReflectionUtil;

/** The interface for a garbage collection policy. All sizes are in bytes. */
public interface CollectionPolicy {
Expand All @@ -53,32 +54,38 @@ static String getInitialPolicyName() {
@Platforms(Platform.HOSTED_ONLY.class)
static CollectionPolicy getInitialPolicy() {
String name = getInitialPolicyName();
Class<? extends CollectionPolicy> clazz = getPolicyClass(name);
return ReflectionUtil.newInstance(clazz);
}

@Platforms(Platform.HOSTED_ONLY.class)
static Class<? extends CollectionPolicy> getPolicyClass(String name) {
switch (name) {
case "Adaptive":
return new AdaptiveCollectionPolicy();
return AdaptiveCollectionPolicy.class;
case "AggressiveShrink":
return new AggressiveShrinkCollectionPolicy();
return AggressiveShrinkCollectionPolicy.class;
case "Proportionate":
return new ProportionateSpacesPolicy();
return ProportionateSpacesPolicy.class;
case "BySpaceAndTime":
return new BasicCollectionPolicies.BySpaceAndTime();
return BasicCollectionPolicies.BySpaceAndTime.class;
case "OnlyCompletely":
return new BasicCollectionPolicies.OnlyCompletely();
return BasicCollectionPolicies.OnlyCompletely.class;
case "OnlyIncrementally":
return new BasicCollectionPolicies.OnlyIncrementally();
return BasicCollectionPolicies.OnlyIncrementally.class;
case "NeverCollect":
return new BasicCollectionPolicies.NeverCollect();
return BasicCollectionPolicies.NeverCollect.class;
}
throw UserError.abort("Policy %s does not exist.", name);
}

@Platforms(Platform.HOSTED_ONLY.class)
static int getMaxSurvivorSpaces(Integer userValue) {
String name = getInitialPolicyName();
if ("Adaptive".equals(name) || "Proportionate".equals(name)) {
return AbstractCollectionPolicy.getMaxSurvivorSpaces(userValue);
if (BasicCollectionPolicies.BasicPolicy.class.isAssignableFrom(getPolicyClass(name))) {
return BasicCollectionPolicies.getMaxSurvivorSpaces(userValue);
}
return BasicCollectionPolicies.getMaxSurvivorSpaces(userValue);
return AbstractCollectionPolicy.getMaxSurvivorSpaces(userValue);
}

static boolean shouldCollectYoungGenSeparately(boolean defaultValue) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,6 @@ private void collectOperation(CollectionVMOperationData data) {
startCollectionOrExit();

timers.resetAllExceptMutator();
collectionEpoch = collectionEpoch.add(1);

/* Flush all TLAB chunks to eden. */
ThreadLocalAllocation.disableAndFlushForAllThreads();
Expand Down Expand Up @@ -358,7 +357,7 @@ private void printGCBefore(String cause) {
Log verboseGCLog = Log.log();
HeapImpl heap = HeapImpl.getHeapImpl();
sizeBefore = ((SubstrateGCOptions.PrintGC.getValue() || SerialGCOptions.PrintHeapShape.getValue()) ? getChunkBytes() : WordFactory.zero());
if (SubstrateGCOptions.VerboseGC.getValue() && getCollectionEpoch().equal(1)) {
if (SubstrateGCOptions.VerboseGC.getValue() && getCollectionEpoch().equal(0)) {
verboseGCLog.string("[Heap policy parameters: ").newline();
verboseGCLog.string(" YoungGenerationSize: ").unsigned(getPolicy().getMaximumYoungGenerationSize()).newline();
verboseGCLog.string(" MaximumHeapSize: ").unsigned(getPolicy().getMaximumHeapSize()).newline();
Expand Down Expand Up @@ -1151,6 +1150,7 @@ private void startCollectionOrExit() {

private void finishCollection() {
assert collectionInProgress;
collectionEpoch = collectionEpoch.add(1);
collectionInProgress = false;
}

Expand Down
Loading