@@ -329,7 +329,7 @@ static int64_t getArgumentStackToRestore(MachineFunction &MF,
329329static bool produceCompactUnwindFrame (const AArch64FrameLowering &,
330330 MachineFunction &MF);
331331
332- // Conservatively, returns true if the function is likely to have an SVE vectors
332+ // Conservatively, returns true if the function is likely to have SVE vectors
333333// on the stack. This function is safe to be called before callee-saves or
334334// object offsets have been determined.
335335static bool isLikelyToHaveSVEStack (const AArch64FrameLowering &AFL,
@@ -343,7 +343,7 @@ static bool isLikelyToHaveSVEStack(const AArch64FrameLowering &AFL,
343343
344344 const MachineFrameInfo &MFI = MF.getFrameInfo ();
345345 for (int FI = MFI.getObjectIndexBegin (); FI < MFI.getObjectIndexEnd (); FI++) {
346- if (MFI.getStackID (FI) == TargetStackID::ScalableVector )
346+ if (MFI.isScalableStackID (FI))
347347 return true ;
348348 }
349349
@@ -726,8 +726,7 @@ static void emitCalleeSavedRestores(MachineBasicBlock &MBB,
726726 CFIInstBuilder CFIBuilder (MBB, MBBI, MachineInstr::FrameDestroy);
727727
728728 for (const auto &Info : CSI) {
729- if (SVE !=
730- (MFI.getStackID (Info.getFrameIdx ()) == TargetStackID::ScalableVector))
729+ if (SVE != MFI.isScalableStackID (Info.getFrameIdx ()))
731730 continue ;
732731
733732 MCRegister Reg = Info.getReg ();
@@ -2141,7 +2140,7 @@ AArch64FrameLowering::getFrameIndexReferenceFromSP(const MachineFunction &MF,
21412140 const auto *AFI = MF.getInfo <AArch64FunctionInfo>();
21422141 bool FPAfterSVECalleeSaves =
21432142 isTargetWindows (MF) && AFI->getSVECalleeSavedStackSize ();
2144- if (MFI.getStackID (FI) == TargetStackID::ScalableVector ) {
2143+ if (MFI.isScalableStackID (FI)) {
21452144 if (FPAfterSVECalleeSaves &&
21462145 -ObjectOffset <= (int64_t )AFI->getSVECalleeSavedStackSize ())
21472146 return StackOffset::getScalable (ObjectOffset);
@@ -2207,7 +2206,7 @@ StackOffset AArch64FrameLowering::resolveFrameIndexReference(
22072206 const auto &MFI = MF.getFrameInfo ();
22082207 int64_t ObjectOffset = MFI.getObjectOffset (FI);
22092208 bool isFixed = MFI.isFixedObjectIndex (FI);
2210- bool isSVE = MFI.getStackID (FI) == TargetStackID::ScalableVector ;
2209+ bool isSVE = MFI.isScalableStackID (FI);
22112210 return resolveFrameOffsetReference (MF, ObjectOffset, isFixed, isSVE, FrameReg,
22122211 PreferFP, ForSimm);
22132212}
@@ -2934,10 +2933,14 @@ bool AArch64FrameLowering::spillCalleeSavedRegisters(
29342933 }
29352934 // Update the StackIDs of the SVE stack slots.
29362935 MachineFrameInfo &MFI = MF.getFrameInfo ();
2937- if (RPI.Type == RegPairInfo::ZPR || RPI. Type == RegPairInfo::PPR ) {
2936+ if (RPI.Type == RegPairInfo::ZPR) {
29382937 MFI.setStackID (FrameIdxReg1, TargetStackID::ScalableVector);
29392938 if (RPI.isPaired ())
29402939 MFI.setStackID (FrameIdxReg2, TargetStackID::ScalableVector);
2940+ } else if (RPI.Type == RegPairInfo::PPR) {
2941+ MFI.setStackID (FrameIdxReg1, TargetStackID::ScalablePredicateVector);
2942+ if (RPI.isPaired ())
2943+ MFI.setStackID (FrameIdxReg2, TargetStackID::ScalablePredicateVector);
29412944 }
29422945 }
29432946 return true ;
@@ -3145,8 +3148,7 @@ void AArch64FrameLowering::determineStackHazardSlot(
31453148 for (auto &MI : MBB) {
31463149 std::optional<int > FI = getLdStFrameID (MI, MFI);
31473150 if (FI && *FI >= 0 && *FI < (int )FrameObjects.size ()) {
3148- if (MFI.getStackID (*FI) == TargetStackID::ScalableVector ||
3149- AArch64InstrInfo::isFpOrNEON (MI))
3151+ if (MFI.isScalableStackID (*FI) || AArch64InstrInfo::isFpOrNEON (MI))
31503152 FrameObjects[*FI] |= 2 ;
31513153 else
31523154 FrameObjects[*FI] |= 1 ;
@@ -3591,7 +3593,7 @@ static int64_t determineSVEStackObjectOffsets(MachineFrameInfo &MFI,
35913593#ifndef NDEBUG
35923594 // First process all fixed stack objects.
35933595 for (int I = MFI.getObjectIndexBegin (); I != 0 ; ++I)
3594- assert (MFI.getStackID (I) != TargetStackID::ScalableVector &&
3596+ assert (! MFI.isScalableStackID (I) &&
35953597 " SVE vectors should never be passed on the stack by value, only by "
35963598 " reference." );
35973599#endif
@@ -3625,12 +3627,11 @@ static int64_t determineSVEStackObjectOffsets(MachineFrameInfo &MFI,
36253627 int StackProtectorFI = -1 ;
36263628 if (MFI.hasStackProtectorIndex ()) {
36273629 StackProtectorFI = MFI.getStackProtectorIndex ();
3628- if (MFI.getStackID (StackProtectorFI) == TargetStackID::ScalableVector )
3630+ if (MFI.isScalableStackID (StackProtectorFI))
36293631 ObjectsToAllocate.push_back (StackProtectorFI);
36303632 }
36313633 for (int I = 0 , E = MFI.getObjectIndexEnd (); I != E; ++I) {
3632- unsigned StackID = MFI.getStackID (I);
3633- if (StackID != TargetStackID::ScalableVector)
3634+ if (!MFI.isScalableStackID (I))
36343635 continue ;
36353636 if (I == StackProtectorFI)
36363637 continue ;
@@ -4634,8 +4635,7 @@ void AArch64FrameLowering::orderFrameObjects(
46344635 if (AFI.hasStackHazardSlotIndex ()) {
46354636 std::optional<int > FI = getLdStFrameID (MI, MFI);
46364637 if (FI && *FI >= 0 && *FI < (int )FrameObjects.size ()) {
4637- if (MFI.getStackID (*FI) == TargetStackID::ScalableVector ||
4638- AArch64InstrInfo::isFpOrNEON (MI))
4638+ if (MFI.isScalableStackID (*FI) || AArch64InstrInfo::isFpOrNEON (MI))
46394639 FrameObjects[*FI].Accesses |= FrameObject::AccessFPR;
46404640 else
46414641 FrameObjects[*FI].Accesses |= FrameObject::AccessGPR;
@@ -4993,7 +4993,7 @@ void AArch64FrameLowering::emitRemarks(
49934993 }
49944994
49954995 unsigned RegTy = StackAccess::AccessType::GPR;
4996- if (MFI.getStackID (FrameIdx) == TargetStackID::ScalableVector ) {
4996+ if (MFI.isScalableStackID (FrameIdx)) {
49974997 // SPILL_PPR_TO_ZPR_SLOT_PSEUDO and FILL_PPR_FROM_ZPR_SLOT_PSEUDO
49984998 // spill/fill the predicate as a data vector (so are an FPR access).
49994999 if (MI.getOpcode () != AArch64::SPILL_PPR_TO_ZPR_SLOT_PSEUDO &&
0 commit comments