diff --git a/llvm/include/llvm/IR/VPIntrinsics.def b/llvm/include/llvm/IR/VPIntrinsics.def index 3fad00e2caf21f..e81752dc33a9ab 100644 --- a/llvm/include/llvm/IR/VPIntrinsics.def +++ b/llvm/include/llvm/IR/VPIntrinsics.def @@ -95,15 +95,10 @@ #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) #endif -// Whether the intrinsic may have a rounding mode or exception behavior operand -// bundle. -// \p HASROUND '1' if the intrinsic can have a rounding mode operand bundle, -// '0' otherwise. -// \p HASEXCEPT '1' if the intrinsic can have an exception behavior operand -// bundle, '0' otherwise. -// \p INTRINID The constrained fp intrinsic this VP intrinsic corresponds to. +// If operation can have rounding or fp exceptions, maps to corresponding +// constrained fp intrinsic. #ifndef VP_PROPERTY_CONSTRAINEDFP -#define VP_PROPERTY_CONSTRAINEDFP(HASROUND, HASEXCEPT, INTRINID) +#define VP_PROPERTY_CONSTRAINEDFP(INTRINID) #endif // The intrinsic and/or SDNode has the same function as this ISD Opcode. @@ -123,22 +118,11 @@ #define VP_PROPERTY_NO_FUNCTIONAL #endif -// This VP Intrinsic is a memory operation -// The pointer arg is at POINTERPOS and the data arg is at DATAPOS. -#ifndef VP_PROPERTY_MEMOP -#define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) -#endif - // A property to infer VP binary-op SDNode opcodes automatically. #ifndef VP_PROPERTY_BINARYOP #define VP_PROPERTY_BINARYOP #endif -// A property to infer VP type casts automatically. -#ifndef VP_PROPERTY_CASTOP -#define VP_PROPERTY_CASTOP -#endif - /// } Property Macros ///// Integer Arithmetic { @@ -327,7 +311,7 @@ END_REGISTER_VP(vp_usub_sat, VP_USUBSAT) #define HELPER_REGISTER_BINARY_FP_VP(OPSUFFIX, VPSD, IROPC, SDOPC) \ BEGIN_REGISTER_VP(vp_##OPSUFFIX, 2, 3, VPSD, -1) \ VP_PROPERTY_FUNCTIONAL_OPC(IROPC) \ - VP_PROPERTY_CONSTRAINEDFP(1, 1, experimental_constrained_##OPSUFFIX) \ + VP_PROPERTY_CONSTRAINEDFP(experimental_constrained_##OPSUFFIX) \ VP_PROPERTY_FUNCTIONAL_SDOPC(SDOPC) \ VP_PROPERTY_BINARYOP \ END_REGISTER_VP(vp_##OPSUFFIX, VPSD) @@ -369,14 +353,14 @@ END_REGISTER_VP(vp_sqrt, VP_SQRT) // llvm.vp.fma(x,y,z,mask,vlen) BEGIN_REGISTER_VP(vp_fma, 3, 4, VP_FMA, -1) -VP_PROPERTY_CONSTRAINEDFP(1, 1, experimental_constrained_fma) +VP_PROPERTY_CONSTRAINEDFP(experimental_constrained_fma) VP_PROPERTY_FUNCTIONAL_INTRINSIC(fma) VP_PROPERTY_FUNCTIONAL_SDOPC(FMA) END_REGISTER_VP(vp_fma, VP_FMA) // llvm.vp.fmuladd(x,y,z,mask,vlen) BEGIN_REGISTER_VP(vp_fmuladd, 3, 4, VP_FMULADD, -1) -VP_PROPERTY_CONSTRAINEDFP(1, 1, experimental_constrained_fmuladd) +VP_PROPERTY_CONSTRAINEDFP(experimental_constrained_fmuladd) VP_PROPERTY_FUNCTIONAL_INTRINSIC(fmuladd) VP_PROPERTY_FUNCTIONAL_SDOPC(FMAD) END_REGISTER_VP(vp_fmuladd, VP_FMULADD) @@ -479,31 +463,30 @@ END_REGISTER_VP(vp_llrint, VP_LLRINT) #error \ "The internal helper macro HELPER_REGISTER_FP_CAST_VP is already defined!" #endif -#define HELPER_REGISTER_FP_CAST_VP(OPSUFFIX, VPSD, IROPC, SDOPC, HASROUND) \ +#define HELPER_REGISTER_FP_CAST_VP(OPSUFFIX, VPSD, IROPC, SDOPC) \ BEGIN_REGISTER_VP(vp_##OPSUFFIX, 1, 2, VPSD, -1) \ VP_PROPERTY_FUNCTIONAL_OPC(IROPC) \ VP_PROPERTY_FUNCTIONAL_SDOPC(SDOPC) \ - VP_PROPERTY_CONSTRAINEDFP(HASROUND, 1, experimental_constrained_##OPSUFFIX) \ - VP_PROPERTY_CASTOP \ + VP_PROPERTY_CONSTRAINEDFP(experimental_constrained_##OPSUFFIX) \ END_REGISTER_VP(vp_##OPSUFFIX, VPSD) // llvm.vp.fptoui(x,mask,vlen) -HELPER_REGISTER_FP_CAST_VP(fptoui, VP_FP_TO_UINT, FPToUI, FP_TO_UINT, 0) +HELPER_REGISTER_FP_CAST_VP(fptoui, VP_FP_TO_UINT, FPToUI, FP_TO_UINT) // llvm.vp.fptosi(x,mask,vlen) -HELPER_REGISTER_FP_CAST_VP(fptosi, VP_FP_TO_SINT, FPToSI, FP_TO_SINT, 0) +HELPER_REGISTER_FP_CAST_VP(fptosi, VP_FP_TO_SINT, FPToSI, FP_TO_SINT) // llvm.vp.uitofp(x,mask,vlen) -HELPER_REGISTER_FP_CAST_VP(uitofp, VP_UINT_TO_FP, UIToFP, UINT_TO_FP, 1) +HELPER_REGISTER_FP_CAST_VP(uitofp, VP_UINT_TO_FP, UIToFP, UINT_TO_FP) // llvm.vp.sitofp(x,mask,vlen) -HELPER_REGISTER_FP_CAST_VP(sitofp, VP_SINT_TO_FP, SIToFP, SINT_TO_FP, 1) +HELPER_REGISTER_FP_CAST_VP(sitofp, VP_SINT_TO_FP, SIToFP, SINT_TO_FP) // llvm.vp.fptrunc(x,mask,vlen) -HELPER_REGISTER_FP_CAST_VP(fptrunc, VP_FP_ROUND, FPTrunc, FP_ROUND, 1) +HELPER_REGISTER_FP_CAST_VP(fptrunc, VP_FP_ROUND, FPTrunc, FP_ROUND) // llvm.vp.fpext(x,mask,vlen) -HELPER_REGISTER_FP_CAST_VP(fpext, VP_FP_EXTEND, FPExt, FP_EXTEND, 0) +HELPER_REGISTER_FP_CAST_VP(fpext, VP_FP_EXTEND, FPExt, FP_EXTEND) #undef HELPER_REGISTER_FP_CAST_VP @@ -517,7 +500,6 @@ HELPER_REGISTER_FP_CAST_VP(fpext, VP_FP_EXTEND, FPExt, FP_EXTEND, 0) BEGIN_REGISTER_VP(vp_##OPSUFFIX, 1, 2, VPSD, -1) \ VP_PROPERTY_FUNCTIONAL_OPC(IROPC) \ VP_PROPERTY_FUNCTIONAL_SDOPC(SDOPC) \ - VP_PROPERTY_CASTOP \ END_REGISTER_VP(vp_##OPSUFFIX, VPSD) // llvm.vp.trunc(x,mask,vlen) @@ -532,13 +514,11 @@ HELPER_REGISTER_INT_CAST_VP(sext, VP_SIGN_EXTEND, SExt, SIGN_EXTEND) // llvm.vp.ptrtoint(x,mask,vlen) BEGIN_REGISTER_VP(vp_ptrtoint, 1, 2, VP_PTRTOINT, -1) VP_PROPERTY_FUNCTIONAL_OPC(PtrToInt) -VP_PROPERTY_CASTOP END_REGISTER_VP(vp_ptrtoint, VP_PTRTOINT) // llvm.vp.inttoptr(x,mask,vlen) BEGIN_REGISTER_VP(vp_inttoptr, 1, 2, VP_INTTOPTR, -1) VP_PROPERTY_FUNCTIONAL_OPC(IntToPtr) -VP_PROPERTY_CASTOP END_REGISTER_VP(vp_inttoptr, VP_INTTOPTR) #undef HELPER_REGISTER_INT_CAST_VP @@ -555,7 +535,7 @@ END_REGISTER_VP_SDNODE(VP_SETCC) BEGIN_REGISTER_VP_INTRINSIC(vp_fcmp, 3, 4) HELPER_MAP_VPID_TO_VPSD(vp_fcmp, VP_SETCC) VP_PROPERTY_FUNCTIONAL_OPC(FCmp) -VP_PROPERTY_CONSTRAINEDFP(0, 1, experimental_constrained_fcmp) +VP_PROPERTY_CONSTRAINEDFP(experimental_constrained_fcmp) END_REGISTER_VP_INTRINSIC(vp_fcmp) // llvm.vp.icmp(x,y,cc,mask,vlen) @@ -579,7 +559,6 @@ BEGIN_REGISTER_VP_SDNODE(VP_STORE, 1, vp_store, 4, 5) HELPER_MAP_VPID_TO_VPSD(vp_store, VP_STORE) VP_PROPERTY_FUNCTIONAL_OPC(Store) VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_store) -VP_PROPERTY_MEMOP(1, 0) END_REGISTER_VP(vp_store, VP_STORE) // llvm.experimental.vp.strided.store(val,ptr,stride,mask,vlen) @@ -588,7 +567,6 @@ BEGIN_REGISTER_VP_INTRINSIC(experimental_vp_strided_store, 3, 4) VP_PROPERTY_NO_FUNCTIONAL BEGIN_REGISTER_VP_SDNODE(EXPERIMENTAL_VP_STRIDED_STORE, 1, experimental_vp_strided_store, 5, 6) HELPER_MAP_VPID_TO_VPSD(experimental_vp_strided_store, EXPERIMENTAL_VP_STRIDED_STORE) -VP_PROPERTY_MEMOP(1, 0) END_REGISTER_VP(experimental_vp_strided_store, EXPERIMENTAL_VP_STRIDED_STORE) // llvm.vp.scatter(ptr,val,mask,vlen) @@ -597,7 +575,6 @@ BEGIN_REGISTER_VP_INTRINSIC(vp_scatter, 2, 3) BEGIN_REGISTER_VP_SDNODE(VP_SCATTER, 1, vp_scatter, 5, 6) HELPER_MAP_VPID_TO_VPSD(vp_scatter, VP_SCATTER) VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_scatter) -VP_PROPERTY_MEMOP(1, 0) END_REGISTER_VP(vp_scatter, VP_SCATTER) // llvm.vp.load(ptr,mask,vlen) @@ -607,7 +584,6 @@ BEGIN_REGISTER_VP_SDNODE(VP_LOAD, -1, vp_load, 3, 4) HELPER_MAP_VPID_TO_VPSD(vp_load, VP_LOAD) VP_PROPERTY_FUNCTIONAL_OPC(Load) VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_load) -VP_PROPERTY_MEMOP(0, std::nullopt) END_REGISTER_VP(vp_load, VP_LOAD) // llvm.experimental.vp.strided.load(ptr,stride,mask,vlen) @@ -616,7 +592,6 @@ BEGIN_REGISTER_VP_INTRINSIC(experimental_vp_strided_load, 2, 3) VP_PROPERTY_NO_FUNCTIONAL BEGIN_REGISTER_VP_SDNODE(EXPERIMENTAL_VP_STRIDED_LOAD, -1, experimental_vp_strided_load, 4, 5) HELPER_MAP_VPID_TO_VPSD(experimental_vp_strided_load, EXPERIMENTAL_VP_STRIDED_LOAD) -VP_PROPERTY_MEMOP(0, std::nullopt) END_REGISTER_VP(experimental_vp_strided_load, EXPERIMENTAL_VP_STRIDED_LOAD) // llvm.vp.gather(ptr,mask,vlen) @@ -625,7 +600,6 @@ BEGIN_REGISTER_VP_INTRINSIC(vp_gather, 1, 2) BEGIN_REGISTER_VP_SDNODE(VP_GATHER, -1, vp_gather, 4, 5) HELPER_MAP_VPID_TO_VPSD(vp_gather, VP_GATHER) VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_gather) -VP_PROPERTY_MEMOP(0, std::nullopt) END_REGISTER_VP(vp_gather, VP_GATHER) ///// } Memory Operations @@ -778,10 +752,8 @@ END_REGISTER_VP(experimental_vp_splat, EXPERIMENTAL_VP_SPLAT) #undef END_REGISTER_VP_SDNODE #undef HELPER_MAP_VPID_TO_VPSD #undef VP_PROPERTY_BINARYOP -#undef VP_PROPERTY_CASTOP #undef VP_PROPERTY_CONSTRAINEDFP #undef VP_PROPERTY_FUNCTIONAL_INTRINSIC #undef VP_PROPERTY_FUNCTIONAL_OPC #undef VP_PROPERTY_FUNCTIONAL_SDOPC #undef VP_PROPERTY_NO_FUNCTIONAL -#undef VP_PROPERTY_MEMOP diff --git a/llvm/lib/IR/IntrinsicInst.cpp b/llvm/lib/IR/IntrinsicInst.cpp index 966fa62abd94fe..7ed82c2ece464a 100644 --- a/llvm/lib/IR/IntrinsicInst.cpp +++ b/llvm/lib/IR/IntrinsicInst.cpp @@ -479,13 +479,16 @@ std::optional VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) { switch (VPID) { default: - break; -#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: -#define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS; -#define END_REGISTER_VP_INTRINSIC(VPID) break; -#include "llvm/IR/VPIntrinsics.def" + return std::nullopt; + case Intrinsic::vp_store: + case Intrinsic::vp_scatter: + case Intrinsic::experimental_vp_strided_store: + return 1; + case Intrinsic::vp_load: + case Intrinsic::vp_gather: + case Intrinsic::experimental_vp_strided_load: + return 0; } - return std::nullopt; } /// \return The data (payload) operand of this store or scatter. @@ -499,13 +502,12 @@ Value *VPIntrinsic::getMemoryDataParam() const { std::optional VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) { switch (VPID) { default: - break; -#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: -#define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS; -#define END_REGISTER_VP_INTRINSIC(VPID) break; -#include "llvm/IR/VPIntrinsics.def" + return std::nullopt; + case Intrinsic::vp_store: + case Intrinsic::vp_scatter: + case Intrinsic::experimental_vp_strided_store: + return 0; } - return std::nullopt; } constexpr bool isVPIntrinsic(Intrinsic::ID ID) { @@ -589,7 +591,7 @@ VPIntrinsic::getConstrainedIntrinsicIDForVP(Intrinsic::ID ID) { default: break; #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: -#define VP_PROPERTY_CONSTRAINEDFP(HASRND, HASEXCEPT, CID) return Intrinsic::CID; +#define VP_PROPERTY_CONSTRAINEDFP(CID) return Intrinsic::CID; #define END_REGISTER_VP_INTRINSIC(VPID) break; #include "llvm/IR/VPIntrinsics.def" } @@ -760,14 +762,9 @@ bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) { } bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) { - switch (ID) { - default: - break; -#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: -#define VP_PROPERTY_CASTOP return true; -#define END_REGISTER_VP_INTRINSIC(VPID) break; -#include "llvm/IR/VPIntrinsics.def" - } + // All of the vp.casts correspond to instructions + if (std::optional Opc = getFunctionalOpcodeForVP(ID)) + return Instruction::isCast(*Opc); return false; } diff --git a/llvm/unittests/IR/VPIntrinsicTest.cpp b/llvm/unittests/IR/VPIntrinsicTest.cpp index cf0a10d1f2e959..925a69bafa07ef 100644 --- a/llvm/unittests/IR/VPIntrinsicTest.cpp +++ b/llvm/unittests/IR/VPIntrinsicTest.cpp @@ -454,22 +454,6 @@ TEST_F(VPIntrinsicTest, VPIntrinsicDeclarationForParams) { } } -/// Check that the HANDLE_VP_TO_CONSTRAINEDFP maps to an existing intrinsic with -/// the right amount of constrained-fp metadata args. -TEST_F(VPIntrinsicTest, HandleToConstrainedFP) { -#define VP_PROPERTY_CONSTRAINEDFP(HASROUND, HASEXCEPT, CFPID) \ - { \ - SmallVector T; \ - Intrinsic::getIntrinsicInfoTableEntries(Intrinsic::CFPID, T); \ - unsigned NumMetadataArgs = 0; \ - for (auto TD : T) \ - NumMetadataArgs += (TD.Kind == Intrinsic::IITDescriptor::Metadata); \ - bool IsCmp = Intrinsic::CFPID == Intrinsic::experimental_constrained_fcmp; \ - ASSERT_EQ(NumMetadataArgs, (unsigned)(IsCmp + HASROUND + HASEXCEPT)); \ - } -#include "llvm/IR/VPIntrinsics.def" -} - } // end anonymous namespace /// Check various properties of VPReductionIntrinsics