diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index 9b08a8fea32c5..5e05856d8d9e6 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -1389,6 +1389,7 @@ CallArgs::CallArgs() , m_hasRetBuffer(false) , m_isVarArgs(false) , m_abiInformationDetermined(false) + , m_newAbiInformationDetermined(false) , m_hasRegArgs(false) , m_hasStackArgs(false) , m_argsComplete(false) @@ -2568,6 +2569,8 @@ bool GenTreeCall::Equals(GenTreeCall* c1, GenTreeCall* c2) // void CallArgs::ResetFinalArgsAndABIInfo() { + m_newAbiInformationDetermined = false; + if (!IsAbiInformationDetermined()) { return; diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h index f705466cb50df..c7b5caa4b38ae 100644 --- a/src/coreclr/jit/gentree.h +++ b/src/coreclr/jit/gentree.h @@ -4824,10 +4824,11 @@ class CallArgs // made for this call. unsigned m_padStkAlign; #endif - bool m_hasThisPointer : 1; - bool m_hasRetBuffer : 1; - bool m_isVarArgs : 1; - bool m_abiInformationDetermined : 1; + bool m_hasThisPointer : 1; + bool m_hasRetBuffer : 1; + bool m_isVarArgs : 1; + bool m_abiInformationDetermined : 1; + bool m_newAbiInformationDetermined : 1; // True if we have one or more register arguments. bool m_hasRegArgs : 1; // True if we have one or more stack arguments. @@ -4885,8 +4886,10 @@ class CallArgs PushFront(comp, arg); } - void ResetFinalArgsAndABIInfo(); void AddFinalArgsAndDetermineABIInfo(Compiler* comp, GenTreeCall* call); + void ResetFinalArgsAndABIInfo(); + + void DetermineNewABIInfo(Compiler* comp, GenTreeCall* call); void ArgsComplete(Compiler* comp, GenTreeCall* call); void EvalArgsToTemps(Compiler* comp, GenTreeCall* call); @@ -4902,7 +4905,15 @@ class CallArgs void SetIsVarArgs() { m_isVarArgs = true; } void ClearIsVarArgs() { m_isVarArgs = false; } bool IsAbiInformationDetermined() const { return m_abiInformationDetermined; } - bool AreArgsComplete() const { return m_argsComplete; } + bool IsNewAbiInformationDetermined() const { return m_newAbiInformationDetermined; } + + // TODO-Remove: Workaround for bad codegen in MSVC versions < 19.41, see + // https://github.com/dotnet/runtime/pull/104370#issuecomment-2222910359 +#ifdef _MSC_VER + __declspec(noinline) +#endif + bool AreArgsComplete() const { return m_argsComplete; } + bool HasRegArgs() const { return m_hasRegArgs; } bool HasStackArgs() const { return m_hasStackArgs; } bool NeedsTemps() const { return m_needsTemps; } diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp index 1045d96258bd6..3b971780ff232 100644 --- a/src/coreclr/jit/morph.cpp +++ b/src/coreclr/jit/morph.cpp @@ -1087,8 +1087,9 @@ void CallArgs::ArgsComplete(Compiler* comp, GenTreeCall* call) // In "fgMorphMultiRegStructArg" we will expand the arg into a GT_FIELD_LIST with multiple indirections, so // here we consider spilling it into a local. We also need to spill it in case we have a node that we do not // currently handle in multi-reg morphing. + // This logic can be skipped when the arg is already in the right multireg arg shape. // - if (varTypeIsStruct(argx) && !arg.m_needTmp) + if (varTypeIsStruct(argx) && !arg.m_needTmp && !argx->OperIs(GT_FIELD_LIST)) { if ((arg.AbiInfo.NumRegs > 0) && ((arg.AbiInfo.NumRegs + arg.AbiInfo.GetStackSlotsNumber()) > 1)) { @@ -1650,36 +1651,64 @@ void CallArgs::EvalArgsToTemps(Compiler* comp, GenTreeCall* call) noway_assert(argx->gtType != TYP_STRUCT); #endif - unsigned tmpVarNum = comp->lvaGrabTemp(true DEBUGARG("argument with side effect")); - - setupArg = comp->gtNewTempStore(tmpVarNum, argx); - - LclVarDsc* varDsc = comp->lvaGetDesc(tmpVarNum); - var_types lclVarType = genActualType(argx->gtType); - var_types scalarType = TYP_UNKNOWN; - - if (setupArg->OperIsCopyBlkOp()) + if (argx->OperIs(GT_FIELD_LIST)) { - setupArg = comp->fgMorphCopyBlock(setupArg); -#if defined(TARGET_ARMARCH) || defined(UNIX_AMD64_ABI) || defined(TARGET_LOONGARCH64) || defined(TARGET_RISCV64) - if ((lclVarType == TYP_STRUCT) && (arg.AbiInfo.ArgType != TYP_STRUCT)) + GenTreeFieldList* fieldList = argx->AsFieldList(); + fieldList->gtFlags &= ~GTF_ALL_EFFECT; + for (GenTreeFieldList::Use& use : fieldList->Uses()) { - scalarType = arg.AbiInfo.ArgType; + unsigned tmpVarNum = comp->lvaGrabTemp(true DEBUGARG("argument with side effect")); + GenTree* store = comp->gtNewTempStore(tmpVarNum, use.GetNode()); + + if (setupArg == nullptr) + { + setupArg = store; + } + else + { + setupArg = comp->gtNewOperNode(GT_COMMA, TYP_VOID, setupArg, store); + } + + use.SetNode(comp->gtNewLclvNode(tmpVarNum, genActualType(use.GetNode()))); + fieldList->AddAllEffectsFlags(use.GetNode()); } -#endif // TARGET_ARMARCH || defined (UNIX_AMD64_ABI) || defined(TARGET_LOONGARCH64) || defined(TARGET_RISCV64) - } - // scalarType can be set to a wider type for ARM or unix amd64 architectures: (3 => 4) or (5,6,7 => - // 8) - if ((scalarType != TYP_UNKNOWN) && (scalarType != lclVarType)) - { - // Create a GT_LCL_FLD using the wider type to go to the late argument list - defArg = comp->gtNewLclFldNode(tmpVarNum, scalarType, 0); + // Keep the field list in the late list + defArg = fieldList; } else { - // Create a copy of the temp to go to the late argument list - defArg = comp->gtNewLclvNode(tmpVarNum, lclVarType); + unsigned tmpVarNum = comp->lvaGrabTemp(true DEBUGARG("argument with side effect")); + + setupArg = comp->gtNewTempStore(tmpVarNum, argx); + + LclVarDsc* varDsc = comp->lvaGetDesc(tmpVarNum); + var_types lclVarType = genActualType(argx->gtType); + var_types scalarType = TYP_UNKNOWN; + + if (setupArg->OperIsCopyBlkOp()) + { + setupArg = comp->fgMorphCopyBlock(setupArg); +#if defined(TARGET_ARMARCH) || defined(UNIX_AMD64_ABI) || defined(TARGET_LOONGARCH64) || defined(TARGET_RISCV64) + if ((lclVarType == TYP_STRUCT) && (arg.AbiInfo.ArgType != TYP_STRUCT)) + { + scalarType = arg.AbiInfo.ArgType; + } +#endif // TARGET_ARMARCH || defined (UNIX_AMD64_ABI) || defined(TARGET_LOONGARCH64) || defined(TARGET_RISCV64) + } + + // scalarType can be set to a wider type for ARM or unix amd64 architectures: (3 => 4) or (5,6,7 => + // 8) + if ((scalarType != TYP_UNKNOWN) && (scalarType != lclVarType)) + { + // Create a GT_LCL_FLD using the wider type to go to the late argument list + defArg = comp->gtNewLclFldNode(tmpVarNum, scalarType, 0); + } + else + { + // Create a copy of the temp to go to the late argument list + defArg = comp->gtNewLclvNode(tmpVarNum, lclVarType); + } } #ifdef DEBUG @@ -2270,7 +2299,54 @@ void CallArgs::AddFinalArgsAndDetermineABIInfo(Compiler* comp, GenTreeCall* call } #endif - m_abiInformationDetermined = true; + m_abiInformationDetermined = true; + m_newAbiInformationDetermined = true; +} + +//------------------------------------------------------------------------ +// DetermineNewABIInfo: +// Determine the new ABI info for all call args without making any IR +// changes. +// +// Parameters: +// comp - The compiler object. +// call - The call to which the CallArgs belongs. +// +void CallArgs::DetermineNewABIInfo(Compiler* comp, GenTreeCall* call) +{ + ClassifierInfo info; + info.CallConv = call->GetUnmanagedCallConv(); + // X86 tailcall helper is considered varargs, but not for ABI classification purposes. + info.IsVarArgs = call->IsVarargs() && !call->IsTailCallViaJitHelper(); + info.HasThis = call->gtArgs.HasThisPointer(); + info.HasRetBuff = call->gtArgs.HasRetBuffer(); + PlatformClassifier classifier(info); + + for (CallArg& arg : Args()) + { + const var_types argSigType = arg.GetSignatureType(); + const CORINFO_CLASS_HANDLE argSigClass = arg.GetSignatureClassHandle(); + ClassLayout* argLayout = argSigClass == NO_CLASS_HANDLE ? nullptr : comp->typGetObjLayout(argSigClass); + + // Some well known args have custom register assignment. + // These should not affect the placement of any other args or stack space required. + // Example: on AMD64 R10 and R11 are used for indirect VSD (generic interface) and cookie calls. + // TODO-Cleanup: Integrate this into the new style ABI classifiers. + regNumber nonStdRegNum = GetCustomRegister(comp, call->GetUnmanagedCallConv(), arg.GetWellKnownArg()); + + if (nonStdRegNum == REG_NA) + { + arg.NewAbiInfo = classifier.Classify(comp, argSigType, argLayout, arg.GetWellKnownArg()); + } + else + { + ABIPassingSegment segment = ABIPassingSegment::InRegister(nonStdRegNum, 0, TARGET_POINTER_SIZE); + arg.NewAbiInfo = ABIPassingInformation::FromSegment(comp, segment); + } + } + + m_argsStackSize = classifier.StackSize(); + m_newAbiInformationDetermined = true; } //------------------------------------------------------------------------ @@ -2436,10 +2512,10 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call) GenTree* argObj = argx->gtEffectiveVal(); bool makeOutArgCopy = false; - if (isStructArg && !reMorphing) + if (isStructArg && !reMorphing && !argObj->OperIs(GT_FIELD_LIST)) { unsigned originalSize; - if (argObj->TypeGet() == TYP_STRUCT) + if (argObj->TypeIs(TYP_STRUCT)) { assert(argObj->OperIs(GT_BLK, GT_LCL_VAR, GT_LCL_FLD)); originalSize = argObj->GetLayout(this)->GetSize(); @@ -2763,12 +2839,12 @@ void Compiler::fgMorphMultiregStructArgs(GenTreeCall* call) { if ((arg.AbiInfo.ArgType == TYP_STRUCT) && !arg.AbiInfo.PassedByRef) { + foundStructArg = true; GenTree*& argx = (arg.GetLateNode() != nullptr) ? arg.LateNodeRef() : arg.EarlyNodeRef(); if (!argx->OperIs(GT_FIELD_LIST)) { - argx = fgMorphMultiregStructArg(&arg); - foundStructArg = true; + argx = fgMorphMultiregStructArg(&arg); } } } diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index c6a1eb0b3f4e6..7fe9bffa70e94 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -58,10 +58,14 @@ struct Access // Number of times this is passed as a call arg. We insert writebacks // before these. unsigned CountCallArgs = 0; + // Number of times this is passed as a register call arg. We may be able to + // avoid the writeback for some overlapping replacements for these. + unsigned CountRegCallArgs = 0; weight_t CountWtd = 0; weight_t CountStoredFromCallWtd = 0; weight_t CountCallArgsWtd = 0; + weight_t CountRegCallArgsWtd = 0; #ifdef DEBUG // Number of times this access is the source of a store. @@ -113,12 +117,13 @@ enum class AccessKindFlags : uint32_t { None = 0, IsCallArg = 1, - IsStoredFromCall = 2, - IsCallRetBuf = 4, + IsRegCallArg = 2, + IsStoredFromCall = 4, + IsCallRetBuf = 8, #ifdef DEBUG - IsStoreSource = 8, - IsStoreDestination = 16, - IsReturned = 32, + IsStoreSource = 16, + IsStoreDestination = 32, + IsReturned = 64, #endif }; @@ -352,6 +357,12 @@ class LocalUses { access->CountCallArgs++; access->CountCallArgsWtd += weight; + + if ((flags & AccessKindFlags::IsRegCallArg) != AccessKindFlags::None) + { + access->CountRegCallArgs++; + access->CountRegCallArgsWtd += weight; + } } if ((flags & (AccessKindFlags::IsStoredFromCall | AccessKindFlags::IsCallRetBuf)) != AccessKindFlags::None) @@ -688,6 +699,44 @@ class LocalUses countOverlappedCallArgWtd += otherAccess.CountCallArgsWtd; countOverlappedStoredFromCallWtd += otherAccess.CountStoredFromCallWtd; + + if (otherAccess.CountRegCallArgs > 0) + { + auto willPassFieldInRegister = [=, &access, &otherAccess]() { + if (access.Offset < otherAccess.Offset) + { + return false; + } + + unsigned layoutOffset = access.Offset - otherAccess.Offset; + if ((layoutOffset % TARGET_POINTER_SIZE) != 0) + { + return false; + } + + unsigned accessSize = genTypeSize(access.AccessType); + if (accessSize == TARGET_POINTER_SIZE) + { + return true; + } + + const StructSegments& significantSegments = comp->GetSignificantSegments(otherAccess.Layout); + if (!significantSegments.Intersects( + StructSegments::Segment(layoutOffset + accessSize, layoutOffset + TARGET_POINTER_SIZE))) + { + return true; + } + + return false; + }; + // We may be able to decompose the call argument to require no + // write-back. + if (willPassFieldInRegister()) + { + countOverlappedCallArg -= otherAccess.CountRegCallArgs; + countOverlappedCallArgWtd -= otherAccess.CountRegCallArgsWtd; + } + } } // We cost any normal access (which is a struct load or store) without promotion at 3 cycles. @@ -881,6 +930,8 @@ class LocalUses access.CountStoreDestinationWtd); printf(" # as call arg: (%u, " FMT_WT ")\n", access.CountCallArgs, access.CountCallArgsWtd); + printf(" # as reg call arg: (%u, " FMT_WT ")\n", access.CountRegCallArgs, + access.CountRegCallArgsWtd); printf(" # as retbuf: (%u, " FMT_WT ")\n", access.CountPassedAsRetbuf, access.CountPassedAsRetbufWtd); printf(" # as returned value: (%u, " FMT_WT ")\n\n", access.CountReturns, @@ -1430,13 +1481,31 @@ class LocalsUseVisitor : public GenTreeVisitor if (user->IsCall()) { - for (CallArg& arg : user->AsCall()->gtArgs.Args()) + GenTreeCall* call = user->AsCall(); + for (CallArg& arg : call->gtArgs.Args()) { - if (arg.GetNode()->gtEffectiveVal() == lcl) + if (arg.GetNode()->gtEffectiveVal() != lcl) { - flags |= AccessKindFlags::IsCallArg; - break; + continue; + } + + flags |= AccessKindFlags::IsCallArg; + +#if FEATURE_MULTIREG_ARGS + if (!call->gtArgs.IsNewAbiInformationDetermined()) + { + call->gtArgs.DetermineNewABIInfo(m_compiler, call); + } + + if (!arg.NewAbiInfo.HasAnyStackSegment() && !arg.NewAbiInfo.HasExactlyOneRegisterSegment()) + { + // TODO-CQ: Support for other register args than multireg + // args as well. + flags |= AccessKindFlags::IsRegCallArg; } +#endif + + break; } } @@ -1859,15 +1928,18 @@ void ReplaceVisitor::InsertPreStatementReadBacks() // lcl - The local // offs - Start offset of the segment // size - Size of the segment -// func - Callback +// func - Callback of type bool(Replacement&). If the callback returns false, the visit aborts. +// +// Return Value: +// false if the visitor aborted. // template -void ReplaceVisitor::VisitOverlappingReplacements(unsigned lcl, unsigned offs, unsigned size, Func func) +bool ReplaceVisitor::VisitOverlappingReplacements(unsigned lcl, unsigned offs, unsigned size, Func func) { AggregateInfo* agg = m_aggregates.Lookup(lcl); if (agg == nullptr) { - return; + return true; } jitstd::vector& replacements = agg->Replacements; @@ -1886,10 +1958,15 @@ void ReplaceVisitor::VisitOverlappingReplacements(unsigned lcl, unsigned offs, u while ((index < replacements.size()) && (replacements[index].Offset < end)) { Replacement& rep = replacements[index]; - func(rep); + if (!func(rep)) + { + return false; + } index++; } + + return true; } //------------------------------------------------------------------------ @@ -1935,8 +2012,15 @@ void ReplaceVisitor::InsertPreStatementWriteBacks() for (CallArg& arg : call->gtArgs.Args()) { GenTree* node = arg.GetNode()->gtEffectiveVal(); - if (!node->TypeIs(TYP_STRUCT) || !node->OperIsLocalRead()) + if (!node->TypeIs(TYP_STRUCT) || !node->OperIsLocalRead() || + (m_replacer->m_aggregates.Lookup(node->AsLclVarCommon()->GetLclNum()) == nullptr)) + { + continue; + } + + if (m_replacer->CanReplaceCallArgWithFieldListOfReplacements(call, &arg, node->AsLclVarCommon())) { + // Multi-reg arg; can decompose into FIELD_LIST. continue; } @@ -2021,6 +2105,180 @@ GenTree** ReplaceVisitor::InsertMidTreeReadBacks(GenTree** use) return use; } +//------------------------------------------------------------------------ +// ReplaceCallArgWithFieldList: +// Handle a call that may pass a struct local with replacements as the +// retbuf. +// +// Parameters: +// call - The call +// argNode - The argument node +// +bool ReplaceVisitor::ReplaceCallArgWithFieldList(GenTreeCall* call, GenTreeLclVarCommon* argNode) +{ + CallArg* callArg = call->gtArgs.FindByNode(argNode); + if (callArg == nullptr) + { + // TODO-CQ: Could be wrapped in a comma? Does this happen? + return false; + } + + if (!CanReplaceCallArgWithFieldListOfReplacements(call, callArg, argNode)) + { + return false; + } + + AggregateInfo* agg = m_aggregates.Lookup(argNode->GetLclNum()); + ClassLayout* layout = argNode->GetLayout(m_compiler); + assert(layout != nullptr); + StructDeaths deaths = m_liveness->GetDeathsForStructLocal(argNode); + GenTreeFieldList* fieldList = new (m_compiler, GT_FIELD_LIST) GenTreeFieldList; + for (unsigned i = 0; i < callArg->NewAbiInfo.NumSegments; i++) + { + const ABIPassingSegment& seg = callArg->NewAbiInfo.Segment(i); + + Replacement* rep = nullptr; + if (agg->OverlappingReplacements(argNode->GetLclOffs() + seg.Offset, seg.Size, &rep, nullptr) && + rep->NeedsWriteBack) + { + GenTreeLclVar* fieldValue = m_compiler->gtNewLclvNode(rep->LclNum, rep->AccessType); + + if (deaths.IsReplacementDying(static_cast(rep - agg->Replacements.data()))) + { + fieldValue->gtFlags |= GTF_VAR_DEATH; + CheckForwardSubForLastUse(rep->LclNum); + } + + fieldList->AddField(m_compiler, fieldValue, seg.Offset, rep->AccessType); + } + else + { + // Unpromoted part, or replacement local is not up to date. + var_types type; + if (rep != nullptr) + { + type = rep->AccessType; + } + else if (genIsValidFloatReg(seg.GetRegister())) + { + type = seg.GetRegisterType(); + } + else + { + if ((seg.Offset % TARGET_POINTER_SIZE) == 0 && (seg.Size == TARGET_POINTER_SIZE)) + { + type = layout->GetGCPtrType(seg.Offset / TARGET_POINTER_SIZE); + } + else + { + type = seg.GetRegisterType(); + } + } + + GenTree* fieldValue = + m_compiler->gtNewLclFldNode(argNode->GetLclNum(), type, argNode->GetLclOffs() + seg.Offset); + fieldList->AddField(m_compiler, fieldValue, seg.Offset, type); + + if (!m_compiler->lvaGetDesc(argNode->GetLclNum())->lvDoNotEnregister) + { + m_compiler->lvaSetVarDoNotEnregister(argNode->GetLclNum() DEBUGARG(DoNotEnregisterReason::LocalField)); + } + } + } + + assert(callArg->GetEarlyNode() == argNode); + callArg->SetEarlyNode(fieldList); + + m_madeChanges = true; + return true; +} + +//------------------------------------------------------------------------ +// CanReplaceCallArgWithFieldListOfReplacements: +// Returns true if a struct arg is replaceable by a FIELD_LIST containing +// some replacement field. +// +// Parameters: +// call - The call +// callArg - The call argument +// lcl - The local that is the node of the call argument +// +// Returns: +// True if the arg can be replaced by a FIELD_LIST that contains at least one +// replacement. +// +bool ReplaceVisitor::CanReplaceCallArgWithFieldListOfReplacements(GenTreeCall* call, + CallArg* callArg, + GenTreeLclVarCommon* lcl) +{ +#if !FEATURE_MULTIREG_ARGS + // TODO-CQ: We should do a similar thing for structs passed in a single + // register. + return false; +#else + // We should have computed ABI information during the costing phase. + assert(call->gtArgs.IsNewAbiInformationDetermined()); + + if (callArg->NewAbiInfo.HasAnyStackSegment() || callArg->NewAbiInfo.HasExactlyOneRegisterSegment()) + { + return false; + } + + AggregateInfo* agg = m_aggregates.Lookup(lcl->GetLclNum()); + assert(agg != nullptr); + + bool anyReplacements = false; + for (unsigned i = 0; i < callArg->NewAbiInfo.NumSegments; i++) + { + const ABIPassingSegment& seg = callArg->NewAbiInfo.Segment(i); + assert(seg.IsPassedInRegister()); + + auto callback = [=, &anyReplacements, &seg](Replacement& rep) { + anyReplacements = true; + + // Replacement must start at the right offset... + if (rep.Offset != lcl->GetLclOffs() + seg.Offset) + { + return false; + } + + // It must not be too long.. + unsigned repSize = genTypeSize(rep.AccessType); + if (repSize > seg.Size) + { + return false; + } + + // If it is too short, the remainder that would be passed in the + // register should be padding. We can check that by only checking + // whether the remainder intersects anything unpromoted, since if + // the remainder is a different promotion we will return false when + // the replacement is visited in this callback. + if ((repSize < seg.Size) && + agg->Unpromoted.Intersects(StructSegments::Segment(rep.Offset + repSize, rep.Offset + seg.Size))) + { + return false; + } + + // Finally, the backend requires the register types to match. + if (!varTypeUsesSameRegType(rep.AccessType, seg.GetRegisterType())) + { + return false; + } + + return true; + }; + + if (!VisitOverlappingReplacements(lcl->GetLclNum(), lcl->GetLclOffs() + seg.Offset, seg.Size, callback)) + { + return false; + } + } + + return anyReplacements; +#endif +} + //------------------------------------------------------------------------ // ReadBackAfterCall: // Handle a call that may pass a struct local with replacements as the @@ -2187,22 +2445,27 @@ void ReplaceVisitor::ReplaceLocal(GenTree** use, GenTree* user) offs + lcl->GetLayout(m_compiler)->GetSize()); assert(effectiveUser->OperIs(GT_CALL, GT_RETURN, GT_SWIFT_ERROR_RET)); - unsigned size = lcl->GetLayout(m_compiler)->GetSize(); - WriteBackBeforeUse(use, lclNum, lcl->GetLclOffs(), size); - if (IsPromotedStructLocalDying(lcl)) + if (!effectiveUser->IsCall() || !ReplaceCallArgWithFieldList(effectiveUser->AsCall(), lcl)) { - lcl->gtFlags |= GTF_VAR_DEATH; - CheckForwardSubForLastUse(lclNum); + unsigned size = lcl->GetLayout(m_compiler)->GetSize(); + WriteBackBeforeUse(use, lclNum, lcl->GetLclOffs(), size); - // Relying on the values in the struct local after this struct use - // would effectively introduce another use of the struct, so - // indicate that no replacements are up to date. - for (Replacement& rep : replacements) + if (IsPromotedStructLocalDying(lcl)) { - SetNeedsWriteBack(rep); + lcl->gtFlags |= GTF_VAR_DEATH; + CheckForwardSubForLastUse(lclNum); + + // Relying on the values in the struct local after this struct use + // would effectively introduce another use of the struct, so + // indicate that no replacements are up to date. + for (Replacement& rep : replacements) + { + SetNeedsWriteBack(rep); + } } } + return; } @@ -2331,7 +2594,7 @@ void ReplaceVisitor::WriteBackBeforeCurrentStatement(unsigned lcl, unsigned offs VisitOverlappingReplacements(lcl, offs, size, [this, lcl](Replacement& rep) { if (!rep.NeedsWriteBack) { - return; + return true; } GenTree* readBack = Promotion::CreateWriteBack(m_compiler, lcl, rep); @@ -2340,6 +2603,7 @@ void ReplaceVisitor::WriteBackBeforeCurrentStatement(unsigned lcl, unsigned offs DISPSTMT(stmt); m_compiler->fgInsertStmtBefore(m_currentBlock, m_currentStmt, stmt); ClearNeedsWriteBack(rep); + return true; }); } @@ -2359,7 +2623,7 @@ void ReplaceVisitor::WriteBackBeforeUse(GenTree** use, unsigned lcl, unsigned of VisitOverlappingReplacements(lcl, offs, size, [this, &use, lcl](Replacement& rep) { if (!rep.NeedsWriteBack) { - return; + return true; } GenTreeOp* comma = m_compiler->gtNewOperNode(GT_COMMA, (*use)->TypeGet(), @@ -2369,6 +2633,7 @@ void ReplaceVisitor::WriteBackBeforeUse(GenTree** use, unsigned lcl, unsigned of ClearNeedsWriteBack(rep); m_madeChanges = true; + return true; }); } diff --git a/src/coreclr/jit/promotion.h b/src/coreclr/jit/promotion.h index b70901ec1ae30..28481a97eaf88 100644 --- a/src/coreclr/jit/promotion.h +++ b/src/coreclr/jit/promotion.h @@ -287,12 +287,14 @@ class ReplaceVisitor : public GenTreeVisitor void ClearNeedsReadBack(Replacement& rep); template - void VisitOverlappingReplacements(unsigned lcl, unsigned offs, unsigned size, Func func); + bool VisitOverlappingReplacements(unsigned lcl, unsigned offs, unsigned size, Func func); void InsertPreStatementReadBacks(); void InsertPreStatementWriteBacks(); GenTree** InsertMidTreeReadBacks(GenTree** use); + bool ReplaceCallArgWithFieldList(GenTreeCall* call, GenTreeLclVarCommon* callArg); + bool CanReplaceCallArgWithFieldListOfReplacements(GenTreeCall* call, CallArg* callArg, GenTreeLclVarCommon* lcl); void ReadBackAfterCall(GenTreeCall* call, GenTree* user); bool IsPromotedStructLocalDying(GenTreeLclVarCommon* structLcl); void ReplaceLocal(GenTree** use, GenTree* user);