@@ -818,6 +818,16 @@ void AArch64FrameLowering::emitZeroCallUsedRegs(BitVector RegsToZero,
818818 }
819819}
820820
821+ static void getLiveRegsForEntryMBB (LivePhysRegs &LiveRegs,
822+ const MachineBasicBlock &MBB) {
823+ const MachineFunction *MF = MBB.getParent ();
824+ LiveRegs.addLiveIns (MBB);
825+ // Mark callee saved registers as used so we will not choose them.
826+ const MCPhysReg *CSRegs = MF->getRegInfo ().getCalleeSavedRegs ();
827+ for (unsigned i = 0 ; CSRegs[i]; ++i)
828+ LiveRegs.addReg (CSRegs[i]);
829+ }
830+
821831// Find a scratch register that we can use at the start of the prologue to
822832// re-align the stack pointer. We avoid using callee-save registers since they
823833// may appear to be free when this is called from canUseAsPrologue (during
@@ -839,12 +849,7 @@ static unsigned findScratchNonCalleeSaveRegister(MachineBasicBlock *MBB) {
839849 const AArch64Subtarget &Subtarget = MF->getSubtarget <AArch64Subtarget>();
840850 const AArch64RegisterInfo &TRI = *Subtarget.getRegisterInfo ();
841851 LivePhysRegs LiveRegs (TRI);
842- LiveRegs.addLiveIns (*MBB);
843-
844- // Mark callee saved registers as used so we will not choose them.
845- const MCPhysReg *CSRegs = MF->getRegInfo ().getCalleeSavedRegs ();
846- for (unsigned i = 0 ; CSRegs[i]; ++i)
847- LiveRegs.addReg (CSRegs[i]);
852+ getLiveRegsForEntryMBB (LiveRegs, *MBB);
848853
849854 // Prefer X9 since it was historically used for the prologue scratch reg.
850855 const MachineRegisterInfo &MRI = MF->getRegInfo ();
@@ -864,6 +869,19 @@ bool AArch64FrameLowering::canUseAsPrologue(
864869 MachineBasicBlock *TmpMBB = const_cast <MachineBasicBlock *>(&MBB);
865870 const AArch64Subtarget &Subtarget = MF->getSubtarget <AArch64Subtarget>();
866871 const AArch64RegisterInfo *RegInfo = Subtarget.getRegisterInfo ();
872+ const AArch64FunctionInfo *AFI = MF->getInfo <AArch64FunctionInfo>();
873+
874+ if (AFI->hasSwiftAsyncContext ()) {
875+ const AArch64RegisterInfo &TRI = *Subtarget.getRegisterInfo ();
876+ const MachineRegisterInfo &MRI = MF->getRegInfo ();
877+ LivePhysRegs LiveRegs (TRI);
878+ getLiveRegsForEntryMBB (LiveRegs, MBB);
879+ // The StoreSwiftAsyncContext clobbers X16 and X17. Make sure they are
880+ // available.
881+ if (!LiveRegs.available (MRI, AArch64::X16) ||
882+ !LiveRegs.available (MRI, AArch64::X17))
883+ return false ;
884+ }
867885
868886 // Don't need a scratch register if we're not going to re-align the stack.
869887 if (!RegInfo->hasStackRealignment (*MF))
0 commit comments