diff --git a/src/coreclr/nativeaot/Runtime/GCMemoryHelpers.inl b/src/coreclr/nativeaot/Runtime/GCMemoryHelpers.inl index aff497778d99e0..8bf7eb68585cfc 100644 --- a/src/coreclr/nativeaot/Runtime/GCMemoryHelpers.inl +++ b/src/coreclr/nativeaot/Runtime/GCMemoryHelpers.inl @@ -229,7 +229,7 @@ FORCEINLINE void InlinedBulkWriteBarrier(void* pMemStart, size_t cbMemSize) // Compute the shadow heap address corresponding to the beginning of the range of heap addresses modified // and in the process range check it to make sure we have the shadow version allocated. uintptr_t* shadowSlot = (uintptr_t*)(g_GCShadow + ((uint8_t*)pMemStart - g_lowest_address)); - if (shadowSlot <= (uintptr_t*)g_GCShadowEnd) + if (shadowSlot < (uintptr_t*)g_GCShadowEnd) { // Iterate over every pointer sized slot in the range, copying data from the real heap to the shadow heap. // As we perform each copy we need to recheck the real heap contents with an ordered read to ensure we're @@ -239,6 +239,7 @@ FORCEINLINE void InlinedBulkWriteBarrier(void* pMemStart, size_t cbMemSize) uintptr_t* realSlot = (uintptr_t*)pMemStart; uintptr_t slotCount = cbMemSize / sizeof(uintptr_t); + ASSERT(slotCount < (uintptr_t*)g_GCShadowEnd - shadowSlot); do { // Update shadow slot from real slot. diff --git a/src/coreclr/nativeaot/Runtime/amd64/WriteBarriers.S b/src/coreclr/nativeaot/Runtime/amd64/WriteBarriers.S index ac4c4b496252a4..4ec18c7d8864eb 100644 --- a/src/coreclr/nativeaot/Runtime/amd64/WriteBarriers.S +++ b/src/coreclr/nativeaot/Runtime/amd64/WriteBarriers.S @@ -27,7 +27,7 @@ jb LOCAL_LABEL(\BASENAME\()_UpdateShadowHeap_PopThenDone_\REFREG) add \DESTREG, [C_VAR(g_GCShadow)] cmp \DESTREG, [C_VAR(g_GCShadowEnd)] - ja LOCAL_LABEL(\BASENAME\()_UpdateShadowHeap_PopThenDone_\REFREG) + jae LOCAL_LABEL(\BASENAME\()_UpdateShadowHeap_PopThenDone_\REFREG) // Update the shadow heap. mov [\DESTREG], \REFREG diff --git a/src/coreclr/nativeaot/Runtime/amd64/WriteBarriers.asm b/src/coreclr/nativeaot/Runtime/amd64/WriteBarriers.asm index a249f0f043d29b..5a6dcc666fec5f 100644 --- a/src/coreclr/nativeaot/Runtime/amd64/WriteBarriers.asm +++ b/src/coreclr/nativeaot/Runtime/amd64/WriteBarriers.asm @@ -43,7 +43,7 @@ UPDATE_GC_SHADOW macro BASENAME, REFREG, DESTREG jb &BASENAME&_UpdateShadowHeap_PopThenDone_&REFREG& add DESTREG, [g_GCShadow] cmp DESTREG, [g_GCShadowEnd] - ja &BASENAME&_UpdateShadowHeap_PopThenDone_&REFREG& + jae &BASENAME&_UpdateShadowHeap_PopThenDone_&REFREG& ;; Update the shadow heap. mov [DESTREG], REFREG diff --git a/src/coreclr/nativeaot/Runtime/arm/WriteBarriers.S b/src/coreclr/nativeaot/Runtime/arm/WriteBarriers.S index 1db0dbdc01231a..863e17cc9fdf9f 100644 --- a/src/coreclr/nativeaot/Runtime/arm/WriteBarriers.S +++ b/src/coreclr/nativeaot/Runtime/arm/WriteBarriers.S @@ -38,7 +38,7 @@ ldr r12, =C_FUNC(g_GCShadowEnd) ldr r12, [r12] cmp \DESTREG, r12 - jhi LOCAL_LABEL(\BASENAME\()_UpdateShadowHeap_PopThenDone_\REFREG) + bhs LOCAL_LABEL(\BASENAME\()_UpdateShadowHeap_PopThenDone_\REFREG) // Update the shadow heap. str \REFREG, [\DESTREG] @@ -105,15 +105,15 @@ LOCAL_LABEL(\BASENAME\()_UpdateShadowHeap_Done_\REFREG): // If the reference is to an object that's not in an ephemeral generation we have no need to track it // (since the object won't be collected or moved by an ephemeral collection). - ldr r12, =C_FUNC(g_ephemeral_low) + ldr r12, =C_FUNC(g_ephemeral_low) ldr r12, [r12] cmp \REFREG, r12 blo LOCAL_LABEL(\BASENAME\()_EXIT_\REFREG) - ldr r12, =C_FUNC(g_ephemeral_high) + ldr r12, =C_FUNC(g_ephemeral_high) ldr r12, [r12] - cmp \REFREG, r12 - bhi LOCAL_LABEL(\BASENAME\()_EXIT_\REFREG) + cmp \REFREG, r12 + bhs LOCAL_LABEL(\BASENAME\()_EXIT_\REFREG) // We have a location on the GC heap being updated with a reference to an ephemeral object so we must // track this write. The location address is translated into an offset in the card table bitmap. We set @@ -167,11 +167,11 @@ ALTERNATE_ENTRY RhpAssignRef // // Note that none of this is relevant for single cpu machines. We may choose to implement a // uniprocessor specific version of this barrier if uni-proc becomes a significant scenario again. - dmb + dmb // Write the reference into the location. Note that we rely on the fact that no GC can occur between here // and the card table update we may perform below. -ALTERNATE_ENTRY "RhpAssignRefAvLocation"\EXPORT_REG_NAME // WriteBarrierFunctionAvLocation +ALTERNATE_ENTRY "RhpAssignRefAvLocation"\EXPORT_REG_NAME // WriteBarrierFunctionAvLocation .ifc \REFREG, r1 ALTERNATE_ENTRY RhpAssignRefAVLocation .endif @@ -198,14 +198,14 @@ DEFINE_UNCHECKED_WRITE_BARRIER r1, r1 // The location being updated might not even lie in the GC heap (a handle or stack location for instance), // in which case no write barrier is required. - ldr r12, =C_FUNC(g_lowest_address) + ldr r12, =C_FUNC(g_lowest_address) ldr r12, [r12] cmp r0, r12 blo LOCAL_LABEL(\BASENAME\()_NoBarrierRequired_\REFREG) - ldr r12, =C_FUNC(g_highest_address) + ldr r12, =C_FUNC(g_highest_address) ldr r12, [r12] cmp r0, r12 - bhi LOCAL_LABEL(\BASENAME\()_NoBarrierRequired_\REFREG) + bhs LOCAL_LABEL(\BASENAME\()_NoBarrierRequired_\REFREG) DEFINE_UNCHECKED_WRITE_BARRIER_CORE \BASENAME, \REFREG @@ -270,7 +270,7 @@ LEAF_ENTRY RhpCheckedLockCmpXchg, _TEXT // barrier must occur before the object reference update, so we have to do it unconditionally even // though the update may fail below. dmb -ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocation +ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocation LOCAL_LABEL(RhpCheckedLockCmpXchgRetry): ldrex r3, [r0] cmp r2, r3 @@ -337,7 +337,7 @@ LEAF_ENTRY RhpByRefAssignRef, _TEXT ldr r3, =C_FUNC(g_highest_address) ldr r3, [r3] cmp r0, r3 - bhi LOCAL_LABEL(RhpByRefAssignRef_NotInHeap) + bhs LOCAL_LABEL(RhpByRefAssignRef_NotInHeap) // Update the shadow copy of the heap with the same value just written to the same heap. (A no-op unless // we're in a debug build and write barrier checking has been enabled). @@ -352,7 +352,7 @@ LEAF_ENTRY RhpByRefAssignRef, _TEXT ldr r3, =C_FUNC(g_ephemeral_high) ldr r3, [r3] cmp r2, r3 - bhi LOCAL_LABEL(RhpByRefAssignRef_NotInHeap) + bhs LOCAL_LABEL(RhpByRefAssignRef_NotInHeap) // move current r0 value into r2 and then increment the pointers mov r2, r0 diff --git a/src/coreclr/nativeaot/Runtime/arm64/WriteBarriers.S b/src/coreclr/nativeaot/Runtime/arm64/WriteBarriers.S index b02737673caa60..3a6cce95800bcc 100644 --- a/src/coreclr/nativeaot/Runtime/arm64/WriteBarriers.S +++ b/src/coreclr/nativeaot/Runtime/arm64/WriteBarriers.S @@ -38,14 +38,14 @@ // Transform destReg into the equivalent address in the shadow heap. PREPARE_EXTERNAL_VAR_INDIRECT g_lowest_address, X9 subs \destReg, \destReg, x9 - blt 0f + blo 0f PREPARE_EXTERNAL_VAR_INDIRECT g_GCShadow, X9 add \destReg, \destReg, x9 PREPARE_EXTERNAL_VAR_INDIRECT g_GCShadowEnd, X9 cmp \destReg, x9 - bgt 0f + bhs 0f // Update the shadow heap. str \refReg, [\destReg] @@ -120,11 +120,11 @@ // an object not on the epehemeral segment. PREPARE_EXTERNAL_VAR_INDIRECT g_ephemeral_low, x\trash cmp \refReg, x\trash - blt 0f + blo 0f PREPARE_EXTERNAL_VAR_INDIRECT g_ephemeral_high, x\trash cmp \refReg, x\trash - bge 0f + bhs 0f // Set this objects card, if it has not already been set. @@ -172,11 +172,13 @@ PREPARE_EXTERNAL_VAR_INDIRECT g_lowest_address, x\trash cmp \destReg, x\trash - blt 0f PREPARE_EXTERNAL_VAR_INDIRECT g_highest_address, x\trash - cmp \destReg, x\trash - bgt 0f + + // If \destReg >= g_lowest_address, compare \destReg to g_highest_address. + // Otherwise, set the C flag (0x2) to take the next branch. + ccmp \destReg, x\trash, #0x2, hs + bhs 0f INSERT_UNCHECKED_WRITE_BARRIER_CORE \destReg, \refReg, \trash, \trash2 diff --git a/src/coreclr/nativeaot/Runtime/arm64/WriteBarriers.asm b/src/coreclr/nativeaot/Runtime/arm64/WriteBarriers.asm index 9aff215ffc57be..3889b08efcf02a 100644 --- a/src/coreclr/nativeaot/Runtime/arm64/WriteBarriers.asm +++ b/src/coreclr/nativeaot/Runtime/arm64/WriteBarriers.asm @@ -54,7 +54,7 @@ INVALIDGCVALUE EQU 0xCCCCCCCD adrp x12, g_lowest_address ldr x12, [x12, g_lowest_address] subs $destReg, $destReg, x12 - blt %ft0 + blo %ft0 adrp x12, $g_GCShadow ldr x12, [x12, $g_GCShadow] @@ -63,7 +63,7 @@ INVALIDGCVALUE EQU 0xCCCCCCCD adrp x12, $g_GCShadowEnd ldr x12, [x12, $g_GCShadowEnd] cmp $destReg, x12 - bgt %ft0 + bhs %ft0 ;; Update the shadow heap. str $refReg, [$destReg] @@ -127,12 +127,12 @@ INVALIDGCVALUE EQU 0xCCCCCCCD adrp x12, g_ephemeral_low ldr x12, [x12, g_ephemeral_low] cmp $refReg, x12 - blt %ft0 + blo %ft0 adrp x12, g_ephemeral_high ldr x12, [x12, g_ephemeral_high] cmp $refReg, x12 - bge %ft0 + bhs %ft0 ;; Set this object's card, if it hasn't already been set. adrp x12, g_card_table @@ -170,12 +170,14 @@ INVALIDGCVALUE EQU 0xCCCCCCCD adrp x12, g_lowest_address ldr x12, [x12, g_lowest_address] cmp $destReg, x12 - blt %ft0 adrp x12, g_highest_address ldr x12, [x12, g_highest_address] - cmp $destReg, x12 - bgt %ft0 + + ;; If $destReg >= g_lowest_address, compare $destReg to g_highest_address. + ;; Otherwise, set the C flag (0x2) to take the next branch. + ccmp $destReg, x12, #0x2, hs + bhs %ft0 INSERT_UNCHECKED_WRITE_BARRIER_CORE $destReg, $refReg, $trashReg diff --git a/src/coreclr/nativeaot/Runtime/i386/WriteBarriers.asm b/src/coreclr/nativeaot/Runtime/i386/WriteBarriers.asm index a7038354094567..d718f7aa085f0a 100644 --- a/src/coreclr/nativeaot/Runtime/i386/WriteBarriers.asm +++ b/src/coreclr/nativeaot/Runtime/i386/WriteBarriers.asm @@ -48,7 +48,7 @@ UPDATE_GC_SHADOW macro BASENAME, DESTREG, REFREG jb &BASENAME&_UpdateShadowHeap_PopThenDone_&DESTREG&_&REFREG& add DESTREG, [g_GCShadow] cmp DESTREG, [g_GCShadowEnd] - ja &BASENAME&_UpdateShadowHeap_PopThenDone_&DESTREG&_&REFREG& + jae &BASENAME&_UpdateShadowHeap_PopThenDone_&DESTREG&_&REFREG& ;; Update the shadow heap. mov [DESTREG], REFREG