From 70113c294399495497a29fbb7870410ec9a799c4 Mon Sep 17 00:00:00 2001 From: Aleksandar Micic Date: Wed, 28 Mar 2018 23:54:26 -0400 Subject: [PATCH] Read Barrier for CompareAndSwapObject Compare-And-Swap on object reference (like one used in sun/misc/Unsafe but also any other used internally by JVM), while primarily is a store operation, it is also an implicit read (it reads the existing value to be compared with a provided compare value, before the store itself), hence needs a read barrier. This change introduces the pre-read barrier in all VM/GC CompareAndSwapObject helpers, that is used in Evacuating read barrier triggered by the read value, used in Concurrent Scavenger. An interesting dilemma is which out of two pre operations (read and store) is to be executed first. Currently in OpenJ9, we do not have any collector that have both of them active at the same time, so the order ATM is not important. However, in theory (and it actually may happen later in OpenJ9), they could both be active. An example, for Gencon GC policy, is the same read barrier on reference value (Concurrent Scavenger) and Snap-Shot-At-The-Beginning store barrier (for concurrent global marking). Since read barrier can modify the value, and SATB cares about the value being overwritten, read barrier should go first to bring it up to date, before SATB evaluates it. On the other side, from SATB perspective, we do not care about the very original value of the field (even before read barrier modifies it), since it is an object being evacuated/collected and is not part of the Snapshot (it is the copy of that object that we want to be preserved as part of the Snapshot). Signed-off-by: Aleksandar Micic --- runtime/gc_base/ObjectAccessBarrier.cpp | 59 ++++++++++--------- runtime/gc_include/ObjectAccessBarrierAPI.hpp | 4 ++ 2 files changed, 34 insertions(+), 29 deletions(-) diff --git a/runtime/gc_base/ObjectAccessBarrier.cpp b/runtime/gc_base/ObjectAccessBarrier.cpp index 4855568c525..4cbbb0c45dc 100644 --- a/runtime/gc_base/ObjectAccessBarrier.cpp +++ b/runtime/gc_base/ObjectAccessBarrier.cpp @@ -1,5 +1,5 @@ /******************************************************************************* - * Copyright (c) 1991, 2017 IBM Corp. and others + * Copyright (c) 1991, 2018 IBM Corp. and others * * This program and the accompanying materials are made available under * the terms of the Eclipse Public License 2.0 which accompanies this @@ -1501,6 +1501,7 @@ MM_ObjectAccessBarrier::compareAndSwapObject(J9VMThread *vmThread, J9Object *des fj9object_t *actualDestAddress; fj9object_t compareValue = convertTokenFromPointer(compareObject); fj9object_t swapValue = convertTokenFromPointer(swapObject); + bool result = false; /* TODO: To make this API more consistent, it should probably be split into separate * indexable and non-indexable versions. Currently, when called on an indexable object, @@ -1513,25 +1514,22 @@ MM_ObjectAccessBarrier::compareAndSwapObject(J9VMThread *vmThread, J9Object *des actualDestAddress = J9OAB_MIXEDOBJECT_EA(destObject, ((UDATA)destAddress - (UDATA)destObject), fj9object_t); } - /* Note: This is a bit of a special case -- we call preObjectStore even though the store - * may not actually occur. This is safe and correct for Metronome. - */ - preObjectStore(vmThread, destObject, actualDestAddress, swapObject, true); - protectIfVolatileBefore(vmThread, true, false, false); - - /* One of these conditions will evaluate to true at compile time based on whether fields are 32 or 64 bits */ - bool result = false; - if (sizeof(fj9object_t) == sizeof(UDATA)) { - result = ((UDATA)compareValue == MM_AtomicOperations::lockCompareExchange((UDATA *)actualDestAddress, (UDATA)compareValue, (UDATA)swapValue)); - } else if (sizeof(fj9object_t) == sizeof(U_32)) { + if (preObjectRead(vmThread, destObject, actualDestAddress)) { + /* Note: This is a bit of a special case -- we call preObjectStore even though the store + * may not actually occur. This is safe and correct for Metronome. + */ + preObjectStore(vmThread, destObject, actualDestAddress, swapObject, true); + protectIfVolatileBefore(vmThread, true, false, false); + +#if defined(J9VM_GC_COMPRESSED_POINTERS) result = ((U_32)(UDATA)compareValue == MM_AtomicOperations::lockCompareExchangeU32((U_32 *)actualDestAddress, (U_32)(UDATA)compareValue, (U_32)(UDATA)swapValue)); - } else { - assume(false, "fj9object_t must be either U_32 (compressed pointers / 32-bit VM) or UDATA"); - } - - protectIfVolatileAfter(vmThread, true, false, false); - if (result) { - postObjectStore(vmThread, destObject, actualDestAddress, swapObject, true); +#else + result = ((UDATA)compareValue == MM_AtomicOperations::lockCompareExchange((UDATA *)actualDestAddress, (UDATA)compareValue, (UDATA)swapValue)); +#endif + protectIfVolatileAfter(vmThread, true, false, false); + if (result) { + postObjectStore(vmThread, destObject, actualDestAddress, swapObject, true); + } } return result; @@ -1647,6 +1645,7 @@ MM_ObjectAccessBarrier::compareAndExchangeObject(J9VMThread *vmThread, J9Object fj9object_t *actualDestAddress; fj9object_t compareValue = convertTokenFromPointer(compareObject); fj9object_t swapValue = convertTokenFromPointer(swapObject); + J9Object *result = NULL; /* TODO: To make this API more consistent, it should probably be split into separate * indexable and non-indexable versions. Currently, when called on an indexable object, @@ -1659,21 +1658,23 @@ MM_ObjectAccessBarrier::compareAndExchangeObject(J9VMThread *vmThread, J9Object actualDestAddress = J9OAB_MIXEDOBJECT_EA(destObject, ((UDATA)destAddress - (UDATA)destObject), fj9object_t); } - /* Note: This is a bit of a special case -- we call preObjectStore even though the store - * may not actually occur. This is safe and correct for Metronome. - */ - preObjectStore(vmThread, destObject, actualDestAddress, swapObject, true); - protectIfVolatileBefore(vmThread, true, false, false); + if (preObjectRead(vmThread, destObject, actualDestAddress)) { + /* Note: This is a bit of a special case -- we call preObjectStore even though the store + * may not actually occur. This is safe and correct for Metronome. + */ + preObjectStore(vmThread, destObject, actualDestAddress, swapObject, true); + protectIfVolatileBefore(vmThread, true, false, false); #if defined(J9VM_GC_COMPRESSED_POINTERS) - J9Object *result = (J9Object *)(UDATA)MM_AtomicOperations::lockCompareExchangeU32((U_32 *)actualDestAddress, (U_32)(UDATA)compareValue, (U_32)(UDATA)swapValue); + J9Object *result = (J9Object *)(UDATA)MM_AtomicOperations::lockCompareExchangeU32((U_32 *)actualDestAddress, (U_32)(UDATA)compareValue, (U_32)(UDATA)swapValue); #else - J9Object *result = (J9Object *)MM_AtomicOperations::lockCompareExchange((UDATA *)actualDestAddress, (UDATA)compareValue, (UDATA)swapValue); + J9Object *result = (J9Object *)MM_AtomicOperations::lockCompareExchange((UDATA *)actualDestAddress, (UDATA)compareValue, (UDATA)swapValue); #endif - protectIfVolatileAfter(vmThread, true, false, false); - if (result) { - postObjectStore(vmThread, destObject, actualDestAddress, swapObject, true); + protectIfVolatileAfter(vmThread, true, false, false); + if (result) { + postObjectStore(vmThread, destObject, actualDestAddress, swapObject, true); + } } return result; diff --git a/runtime/gc_include/ObjectAccessBarrierAPI.hpp b/runtime/gc_include/ObjectAccessBarrierAPI.hpp index 4413637d049..2a3e68f6762 100644 --- a/runtime/gc_include/ObjectAccessBarrierAPI.hpp +++ b/runtime/gc_include/ObjectAccessBarrierAPI.hpp @@ -485,6 +485,7 @@ class MM_ObjectAccessBarrierAPI if (j9gc_modron_wrtbar_always == _writeBarrierType) { return (1 == vmThread->javaVM->memoryManagerFunctions->j9gc_objaccess_compareAndSwapObject(vmThread, destObject, (J9Object **)destAddress, compareObject, swapObject)); } else { + preMixedObjectReadObject(vmThread, destObject, destAddress); preMixedObjectStoreObject(vmThread, destObject, destAddress, swapObject); protectIfVolatileBefore(isVolatile, false); @@ -526,6 +527,7 @@ class MM_ObjectAccessBarrierAPI if (j9gc_modron_wrtbar_always == _writeBarrierType) { return vmThread->javaVM->memoryManagerFunctions->j9gc_objaccess_compareAndExchangeObject(vmThread, destObject, (J9Object **)destAddress, compareObject, swapObject); } else { + preMixedObjectReadObject(vmThread, destObject, destAddress); preMixedObjectStoreObject(vmThread, destObject, destAddress, swapObject); protectIfVolatileBefore(isVolatile, false); @@ -1887,6 +1889,7 @@ class MM_ObjectAccessBarrierAPI if (j9gc_modron_wrtbar_always == _writeBarrierType) { return (1 == vmThread->javaVM->memoryManagerFunctions->j9gc_objaccess_compareAndSwapObject(vmThread, destArray, (J9Object **)actualAddress, compareObject, swapObject)); } else { + preIndexableObjectReadObject(vmThread, destArray, actualAddress); preIndexableObjectStoreObject(vmThread, destArray, actualAddress, swapObject); protectIfVolatileBefore(isVolatile, false); @@ -1928,6 +1931,7 @@ class MM_ObjectAccessBarrierAPI if (j9gc_modron_wrtbar_always == _writeBarrierType) { return vmThread->javaVM->memoryManagerFunctions->j9gc_objaccess_compareAndExchangeObject(vmThread, destArray, (J9Object **)actualAddress, compareObject, swapObject); } else { + preIndexableObjectReadObject(vmThread, destArray, actualAddress); preIndexableObjectStoreObject(vmThread, destArray, actualAddress, swapObject); protectIfVolatileBefore(isVolatile, false);