@@ -2747,20 +2747,72 @@ bool RISCVTTIImpl::getTgtMemIntrinsic(IntrinsicInst *Inst,
27472747 Intrinsic::ID IID = Inst->getIntrinsicID ();
27482748 LLVMContext &C = Inst->getContext ();
27492749 bool HasMask = false ;
2750+
2751+ auto getSegNum = [](const IntrinsicInst *II, unsigned PtrOperandNo,
2752+ bool IsWrite) -> int64_t {
2753+ if (auto *TarExtTy =
2754+ dyn_cast<TargetExtType>(II->getArgOperand (0 )->getType ()))
2755+ return TarExtTy->getIntParameter (0 );
2756+
2757+ return 1 ;
2758+ };
2759+
27502760 switch (IID) {
27512761 case Intrinsic::riscv_vle_mask:
27522762 case Intrinsic::riscv_vse_mask:
2763+ case Intrinsic::riscv_vlseg2_mask:
2764+ case Intrinsic::riscv_vlseg3_mask:
2765+ case Intrinsic::riscv_vlseg4_mask:
2766+ case Intrinsic::riscv_vlseg5_mask:
2767+ case Intrinsic::riscv_vlseg6_mask:
2768+ case Intrinsic::riscv_vlseg7_mask:
2769+ case Intrinsic::riscv_vlseg8_mask:
2770+ case Intrinsic::riscv_vsseg2_mask:
2771+ case Intrinsic::riscv_vsseg3_mask:
2772+ case Intrinsic::riscv_vsseg4_mask:
2773+ case Intrinsic::riscv_vsseg5_mask:
2774+ case Intrinsic::riscv_vsseg6_mask:
2775+ case Intrinsic::riscv_vsseg7_mask:
2776+ case Intrinsic::riscv_vsseg8_mask:
27532777 HasMask = true ;
27542778 [[fallthrough]];
27552779 case Intrinsic::riscv_vle:
2756- case Intrinsic::riscv_vse: {
2780+ case Intrinsic::riscv_vse:
2781+ case Intrinsic::riscv_vlseg2:
2782+ case Intrinsic::riscv_vlseg3:
2783+ case Intrinsic::riscv_vlseg4:
2784+ case Intrinsic::riscv_vlseg5:
2785+ case Intrinsic::riscv_vlseg6:
2786+ case Intrinsic::riscv_vlseg7:
2787+ case Intrinsic::riscv_vlseg8:
2788+ case Intrinsic::riscv_vsseg2:
2789+ case Intrinsic::riscv_vsseg3:
2790+ case Intrinsic::riscv_vsseg4:
2791+ case Intrinsic::riscv_vsseg5:
2792+ case Intrinsic::riscv_vsseg6:
2793+ case Intrinsic::riscv_vsseg7:
2794+ case Intrinsic::riscv_vsseg8: {
27572795 // Intrinsic interface:
27582796 // riscv_vle(merge, ptr, vl)
27592797 // riscv_vle_mask(merge, ptr, mask, vl, policy)
27602798 // riscv_vse(val, ptr, vl)
27612799 // riscv_vse_mask(val, ptr, mask, vl, policy)
2800+ // riscv_vlseg#(merge, ptr, vl, sew)
2801+ // riscv_vlseg#_mask(merge, ptr, mask, vl, policy, sew)
2802+ // riscv_vsseg#(val, ptr, vl, sew)
2803+ // riscv_vsseg#_mask(val, ptr, mask, vl, sew)
27622804 bool IsWrite = Inst->getType ()->isVoidTy ();
27632805 Type *Ty = IsWrite ? Inst->getArgOperand (0 )->getType () : Inst->getType ();
2806+ // The results of segment loads are TargetExtType.
2807+ if (auto *TarExtTy = dyn_cast<TargetExtType>(Ty)) {
2808+ unsigned SEW =
2809+ 1 << cast<ConstantInt>(Inst->getArgOperand (Inst->arg_size () - 1 ))
2810+ ->getZExtValue ();
2811+ Ty = TarExtTy->getTypeParameter (0U );
2812+ Ty = ScalableVectorType::get (
2813+ IntegerType::get (C, SEW),
2814+ cast<ScalableVectorType>(Ty)->getMinNumElements () * 8 / SEW);
2815+ }
27642816 const auto *RVVIInfo = RISCVVIntrinsicsTable::getRISCVVIntrinsicInfo (IID);
27652817 unsigned VLIndex = RVVIInfo->VLOperand ;
27662818 unsigned PtrOperandNo = VLIndex - 1 - HasMask;
@@ -2771,23 +2823,72 @@ bool RISCVTTIImpl::getTgtMemIntrinsic(IntrinsicInst *Inst,
27712823 if (HasMask)
27722824 Mask = Inst->getArgOperand (VLIndex - 1 );
27732825 Value *EVL = Inst->getArgOperand (VLIndex);
2826+ unsigned SegNum = getSegNum (Inst, PtrOperandNo, IsWrite);
2827+ // RVV uses contiguous elements as a segment.
2828+ if (SegNum > 1 ) {
2829+ unsigned ElemSize = Ty->getScalarSizeInBits ();
2830+ auto *SegTy = IntegerType::get (C, ElemSize * SegNum);
2831+ Ty = VectorType::get (SegTy, cast<VectorType>(Ty));
2832+ }
27742833 Info.InterestingOperands .emplace_back (Inst, PtrOperandNo, IsWrite, Ty,
27752834 Alignment, Mask, EVL);
27762835 return true ;
27772836 }
27782837 case Intrinsic::riscv_vlse_mask:
27792838 case Intrinsic::riscv_vsse_mask:
2839+ case Intrinsic::riscv_vlsseg2_mask:
2840+ case Intrinsic::riscv_vlsseg3_mask:
2841+ case Intrinsic::riscv_vlsseg4_mask:
2842+ case Intrinsic::riscv_vlsseg5_mask:
2843+ case Intrinsic::riscv_vlsseg6_mask:
2844+ case Intrinsic::riscv_vlsseg7_mask:
2845+ case Intrinsic::riscv_vlsseg8_mask:
2846+ case Intrinsic::riscv_vssseg2_mask:
2847+ case Intrinsic::riscv_vssseg3_mask:
2848+ case Intrinsic::riscv_vssseg4_mask:
2849+ case Intrinsic::riscv_vssseg5_mask:
2850+ case Intrinsic::riscv_vssseg6_mask:
2851+ case Intrinsic::riscv_vssseg7_mask:
2852+ case Intrinsic::riscv_vssseg8_mask:
27802853 HasMask = true ;
27812854 [[fallthrough]];
27822855 case Intrinsic::riscv_vlse:
2783- case Intrinsic::riscv_vsse: {
2856+ case Intrinsic::riscv_vsse:
2857+ case Intrinsic::riscv_vlsseg2:
2858+ case Intrinsic::riscv_vlsseg3:
2859+ case Intrinsic::riscv_vlsseg4:
2860+ case Intrinsic::riscv_vlsseg5:
2861+ case Intrinsic::riscv_vlsseg6:
2862+ case Intrinsic::riscv_vlsseg7:
2863+ case Intrinsic::riscv_vlsseg8:
2864+ case Intrinsic::riscv_vssseg2:
2865+ case Intrinsic::riscv_vssseg3:
2866+ case Intrinsic::riscv_vssseg4:
2867+ case Intrinsic::riscv_vssseg5:
2868+ case Intrinsic::riscv_vssseg6:
2869+ case Intrinsic::riscv_vssseg7:
2870+ case Intrinsic::riscv_vssseg8: {
27842871 // Intrinsic interface:
27852872 // riscv_vlse(merge, ptr, stride, vl)
27862873 // riscv_vlse_mask(merge, ptr, stride, mask, vl, policy)
27872874 // riscv_vsse(val, ptr, stride, vl)
27882875 // riscv_vsse_mask(val, ptr, stride, mask, vl, policy)
2876+ // riscv_vlsseg#(merge, ptr, offset, vl, sew)
2877+ // riscv_vlsseg#_mask(merge, ptr, offset, mask, vl, policy, sew)
2878+ // riscv_vssseg#(val, ptr, offset, vl, sew)
2879+ // riscv_vssseg#_mask(val, ptr, offset, mask, vl, sew)
27892880 bool IsWrite = Inst->getType ()->isVoidTy ();
27902881 Type *Ty = IsWrite ? Inst->getArgOperand (0 )->getType () : Inst->getType ();
2882+ // The results of segment loads are TargetExtType.
2883+ if (auto *TarExtTy = dyn_cast<TargetExtType>(Ty)) {
2884+ unsigned SEW =
2885+ 1 << cast<ConstantInt>(Inst->getArgOperand (Inst->arg_size () - 1 ))
2886+ ->getZExtValue ();
2887+ Ty = TarExtTy->getTypeParameter (0U );
2888+ Ty = ScalableVectorType::get (
2889+ IntegerType::get (C, SEW),
2890+ cast<ScalableVectorType>(Ty)->getMinNumElements () * 8 / SEW);
2891+ }
27912892 const auto *RVVIInfo = RISCVVIntrinsicsTable::getRISCVVIntrinsicInfo (IID);
27922893 unsigned VLIndex = RVVIInfo->VLOperand ;
27932894 unsigned PtrOperandNo = VLIndex - 2 - HasMask;
@@ -2809,6 +2910,13 @@ bool RISCVTTIImpl::getTgtMemIntrinsic(IntrinsicInst *Inst,
28092910 if (HasMask)
28102911 Mask = Inst->getArgOperand (VLIndex - 1 );
28112912 Value *EVL = Inst->getArgOperand (VLIndex);
2913+ unsigned SegNum = getSegNum (Inst, PtrOperandNo, IsWrite);
2914+ // RVV uses contiguous elements as a segment.
2915+ if (SegNum > 1 ) {
2916+ unsigned ElemSize = Ty->getScalarSizeInBits ();
2917+ auto *SegTy = IntegerType::get (C, ElemSize * SegNum);
2918+ Ty = VectorType::get (SegTy, cast<VectorType>(Ty));
2919+ }
28122920 Info.InterestingOperands .emplace_back (Inst, PtrOperandNo, IsWrite, Ty,
28132921 Alignment, Mask, EVL, Stride);
28142922 return true ;
@@ -2817,19 +2925,89 @@ bool RISCVTTIImpl::getTgtMemIntrinsic(IntrinsicInst *Inst,
28172925 case Intrinsic::riscv_vluxei_mask:
28182926 case Intrinsic::riscv_vsoxei_mask:
28192927 case Intrinsic::riscv_vsuxei_mask:
2928+ case Intrinsic::riscv_vloxseg2_mask:
2929+ case Intrinsic::riscv_vloxseg3_mask:
2930+ case Intrinsic::riscv_vloxseg4_mask:
2931+ case Intrinsic::riscv_vloxseg5_mask:
2932+ case Intrinsic::riscv_vloxseg6_mask:
2933+ case Intrinsic::riscv_vloxseg7_mask:
2934+ case Intrinsic::riscv_vloxseg8_mask:
2935+ case Intrinsic::riscv_vluxseg2_mask:
2936+ case Intrinsic::riscv_vluxseg3_mask:
2937+ case Intrinsic::riscv_vluxseg4_mask:
2938+ case Intrinsic::riscv_vluxseg5_mask:
2939+ case Intrinsic::riscv_vluxseg6_mask:
2940+ case Intrinsic::riscv_vluxseg7_mask:
2941+ case Intrinsic::riscv_vluxseg8_mask:
2942+ case Intrinsic::riscv_vsoxseg2_mask:
2943+ case Intrinsic::riscv_vsoxseg3_mask:
2944+ case Intrinsic::riscv_vsoxseg4_mask:
2945+ case Intrinsic::riscv_vsoxseg5_mask:
2946+ case Intrinsic::riscv_vsoxseg6_mask:
2947+ case Intrinsic::riscv_vsoxseg7_mask:
2948+ case Intrinsic::riscv_vsoxseg8_mask:
2949+ case Intrinsic::riscv_vsuxseg2_mask:
2950+ case Intrinsic::riscv_vsuxseg3_mask:
2951+ case Intrinsic::riscv_vsuxseg4_mask:
2952+ case Intrinsic::riscv_vsuxseg5_mask:
2953+ case Intrinsic::riscv_vsuxseg6_mask:
2954+ case Intrinsic::riscv_vsuxseg7_mask:
2955+ case Intrinsic::riscv_vsuxseg8_mask:
28202956 HasMask = true ;
28212957 [[fallthrough]];
28222958 case Intrinsic::riscv_vloxei:
28232959 case Intrinsic::riscv_vluxei:
28242960 case Intrinsic::riscv_vsoxei:
2825- case Intrinsic::riscv_vsuxei: {
2961+ case Intrinsic::riscv_vsuxei:
2962+ case Intrinsic::riscv_vloxseg2:
2963+ case Intrinsic::riscv_vloxseg3:
2964+ case Intrinsic::riscv_vloxseg4:
2965+ case Intrinsic::riscv_vloxseg5:
2966+ case Intrinsic::riscv_vloxseg6:
2967+ case Intrinsic::riscv_vloxseg7:
2968+ case Intrinsic::riscv_vloxseg8:
2969+ case Intrinsic::riscv_vluxseg2:
2970+ case Intrinsic::riscv_vluxseg3:
2971+ case Intrinsic::riscv_vluxseg4:
2972+ case Intrinsic::riscv_vluxseg5:
2973+ case Intrinsic::riscv_vluxseg6:
2974+ case Intrinsic::riscv_vluxseg7:
2975+ case Intrinsic::riscv_vluxseg8:
2976+ case Intrinsic::riscv_vsoxseg2:
2977+ case Intrinsic::riscv_vsoxseg3:
2978+ case Intrinsic::riscv_vsoxseg4:
2979+ case Intrinsic::riscv_vsoxseg5:
2980+ case Intrinsic::riscv_vsoxseg6:
2981+ case Intrinsic::riscv_vsoxseg7:
2982+ case Intrinsic::riscv_vsoxseg8:
2983+ case Intrinsic::riscv_vsuxseg2:
2984+ case Intrinsic::riscv_vsuxseg3:
2985+ case Intrinsic::riscv_vsuxseg4:
2986+ case Intrinsic::riscv_vsuxseg5:
2987+ case Intrinsic::riscv_vsuxseg6:
2988+ case Intrinsic::riscv_vsuxseg7:
2989+ case Intrinsic::riscv_vsuxseg8: {
28262990 // Intrinsic interface (only listed ordered version):
28272991 // riscv_vloxei(merge, ptr, index, vl)
28282992 // riscv_vloxei_mask(merge, ptr, index, mask, vl, policy)
28292993 // riscv_vsoxei(val, ptr, index, vl)
28302994 // riscv_vsoxei_mask(val, ptr, index, mask, vl, policy)
2995+ // riscv_vloxseg#(merge, ptr, index, vl, sew)
2996+ // riscv_vloxseg#_mask(merge, ptr, index, mask, vl, policy, sew)
2997+ // riscv_vsoxseg#(val, ptr, index, vl, sew)
2998+ // riscv_vsoxseg#_mask(val, ptr, index, mask, vl, sew)
28312999 bool IsWrite = Inst->getType ()->isVoidTy ();
28323000 Type *Ty = IsWrite ? Inst->getArgOperand (0 )->getType () : Inst->getType ();
3001+ // The results of segment loads are TargetExtType.
3002+ if (auto *TarExtTy = dyn_cast<TargetExtType>(Ty)) {
3003+ unsigned SEW =
3004+ 1 << cast<ConstantInt>(Inst->getArgOperand (Inst->arg_size () - 1 ))
3005+ ->getZExtValue ();
3006+ Ty = TarExtTy->getTypeParameter (0U );
3007+ Ty = ScalableVectorType::get (
3008+ IntegerType::get (C, SEW),
3009+ cast<ScalableVectorType>(Ty)->getMinNumElements () * 8 / SEW);
3010+ }
28333011 const auto *RVVIInfo = RISCVVIntrinsicsTable::getRISCVVIntrinsicInfo (IID);
28343012 unsigned VLIndex = RVVIInfo->VLOperand ;
28353013 unsigned PtrOperandNo = VLIndex - 2 - HasMask;
@@ -2845,6 +3023,13 @@ bool RISCVTTIImpl::getTgtMemIntrinsic(IntrinsicInst *Inst,
28453023 Mask = ConstantInt::getTrue (MaskType);
28463024 }
28473025 Value *EVL = Inst->getArgOperand (VLIndex);
3026+ unsigned SegNum = getSegNum (Inst, PtrOperandNo, IsWrite);
3027+ // RVV uses contiguous elements as a segment.
3028+ if (SegNum > 1 ) {
3029+ unsigned ElemSize = Ty->getScalarSizeInBits ();
3030+ auto *SegTy = IntegerType::get (C, ElemSize * SegNum);
3031+ Ty = VectorType::get (SegTy, cast<VectorType>(Ty));
3032+ }
28483033 Value *OffsetOp = Inst->getArgOperand (PtrOperandNo + 1 );
28493034 Info.InterestingOperands .emplace_back (Inst, PtrOperandNo, IsWrite, Ty,
28503035 Align (1 ), Mask, EVL,
0 commit comments