@@ -2426,7 +2426,7 @@ void InnerLoopVectorizer::vectorizeInterleaveGroup(
24262426 // Prepare for the vector type of the interleaved load/store.
24272427 Type *ScalarTy = getLoadStoreType (Instr);
24282428 unsigned InterleaveFactor = Group->getFactor ();
2429- auto *VecTy = VectorType::get (ScalarTy, VF * InterleaveFactor);
2429+ auto *VecTy = VectorType::get (ScalarTy, State. VF * InterleaveFactor);
24302430
24312431 // Prepare for the new pointers.
24322432 SmallVector<Value *, 2 > AddrParts;
@@ -2444,7 +2444,7 @@ void InnerLoopVectorizer::vectorizeInterleaveGroup(
24442444 // uniform instructions, we're only required to generate a value for the
24452445 // first vector lane in each unroll iteration.
24462446 if (Group->isReverse ()) {
2447- Value *RuntimeVF = getRuntimeVF (Builder, Builder.getInt32Ty (), VF);
2447+ Value *RuntimeVF = getRuntimeVF (Builder, Builder.getInt32Ty (), State. VF );
24482448 Idx = Builder.CreateSub (RuntimeVF, Builder.getInt32 (1 ));
24492449 Idx = Builder.CreateMul (Idx, Builder.getInt32 (Group->getFactor ()));
24502450 Idx = Builder.CreateAdd (Idx, Builder.getInt32 (Index));
@@ -2481,14 +2481,14 @@ void InnerLoopVectorizer::vectorizeInterleaveGroup(
24812481
24822482 auto CreateGroupMask = [this , &BlockInMask, &State, &InterleaveFactor](
24832483 unsigned Part, Value *MaskForGaps) -> Value * {
2484- if (VF.isScalable ()) {
2484+ if (State. VF .isScalable ()) {
24852485 assert (!MaskForGaps && " Interleaved groups with gaps are not supported." );
24862486 assert (InterleaveFactor == 2 &&
24872487 " Unsupported deinterleave factor for scalable vectors" );
24882488 auto *BlockInMaskPart = State.get (BlockInMask, Part);
24892489 SmallVector<Value *, 2 > Ops = {BlockInMaskPart, BlockInMaskPart};
2490- auto *MaskTy =
2491- VectorType::get (Builder. getInt1Ty (), VF.getKnownMinValue () * 2 , true );
2490+ auto *MaskTy = VectorType::get (Builder. getInt1Ty (),
2491+ State. VF .getKnownMinValue () * 2 , true );
24922492 return Builder.CreateIntrinsic (MaskTy, Intrinsic::vector_interleave2, Ops,
24932493 /* FMFSource=*/ nullptr , " interleaved.mask" );
24942494 }
@@ -2499,7 +2499,7 @@ void InnerLoopVectorizer::vectorizeInterleaveGroup(
24992499 Value *BlockInMaskPart = State.get (BlockInMask, Part);
25002500 Value *ShuffledMask = Builder.CreateShuffleVector (
25012501 BlockInMaskPart,
2502- createReplicatedMask (InterleaveFactor, VF.getKnownMinValue ()),
2502+ createReplicatedMask (InterleaveFactor, State. VF .getKnownMinValue ()),
25032503 " interleaved.mask" );
25042504 return MaskForGaps ? Builder.CreateBinOp (Instruction::And, ShuffledMask,
25052505 MaskForGaps)
@@ -2511,7 +2511,7 @@ void InnerLoopVectorizer::vectorizeInterleaveGroup(
25112511 Value *MaskForGaps = nullptr ;
25122512 if (NeedsMaskForGaps) {
25132513 MaskForGaps =
2514- createBitMaskForGaps (Builder, VF.getKnownMinValue (), *Group);
2514+ createBitMaskForGaps (Builder, State. VF .getKnownMinValue (), *Group);
25152515 assert (MaskForGaps && " Mask for Gaps is required but it is null" );
25162516 }
25172517
@@ -2554,7 +2554,7 @@ void InnerLoopVectorizer::vectorizeInterleaveGroup(
25542554 Value *StridedVec = Builder.CreateExtractValue (DI, I);
25552555 // If this member has different type, cast the result type.
25562556 if (Member->getType () != ScalarTy) {
2557- VectorType *OtherVTy = VectorType::get (Member->getType (), VF);
2557+ VectorType *OtherVTy = VectorType::get (Member->getType (), State. VF );
25582558 StridedVec = createBitOrPointerCast (StridedVec, OtherVTy, DL);
25592559 }
25602560
@@ -2580,15 +2580,15 @@ void InnerLoopVectorizer::vectorizeInterleaveGroup(
25802580 continue ;
25812581
25822582 auto StrideMask =
2583- createStrideMask (I, InterleaveFactor, VF.getKnownMinValue ());
2583+ createStrideMask (I, InterleaveFactor, State. VF .getKnownMinValue ());
25842584 for (unsigned Part = 0 ; Part < State.UF ; Part++) {
25852585 Value *StridedVec = Builder.CreateShuffleVector (
25862586 NewLoads[Part], StrideMask, " strided.vec" );
25872587
25882588 // If this member has different type, cast the result type.
25892589 if (Member->getType () != ScalarTy) {
2590- assert (!VF.isScalable () && " VF is assumed to be non scalable." );
2591- VectorType *OtherVTy = VectorType::get (Member->getType (), VF);
2590+ assert (!State. VF .isScalable () && " VF is assumed to be non scalable." );
2591+ VectorType *OtherVTy = VectorType::get (Member->getType (), State. VF );
25922592 StridedVec = createBitOrPointerCast (StridedVec, OtherVTy, DL);
25932593 }
25942594
@@ -2603,14 +2603,14 @@ void InnerLoopVectorizer::vectorizeInterleaveGroup(
26032603 }
26042604
26052605 // The sub vector type for current instruction.
2606- auto *SubVT = VectorType::get (ScalarTy, VF);
2606+ auto *SubVT = VectorType::get (ScalarTy, State. VF );
26072607
26082608 // Vectorize the interleaved store group.
26092609 Value *MaskForGaps =
2610- createBitMaskForGaps (Builder, VF.getKnownMinValue (), *Group);
2610+ createBitMaskForGaps (Builder, State. VF .getKnownMinValue (), *Group);
26112611 assert ((!MaskForGaps || useMaskedInterleavedAccesses (*TTI)) &&
26122612 " masked interleaved groups are not allowed." );
2613- assert ((!MaskForGaps || !VF.isScalable ()) &&
2613+ assert ((!MaskForGaps || !State. VF .isScalable ()) &&
26142614 " masking gaps for scalable vectors is not yet supported." );
26152615 for (unsigned Part = 0 ; Part < State.UF ; Part++) {
26162616 // Collect the stored vector from each member.
0 commit comments