@@ -189,7 +189,7 @@ RuntimeCheckingPtrGroup::RuntimeCheckingPtrGroup(
189189}
190190
191191// / Returns \p A + \p B, if it is guaranteed not to unsigned wrap. Otherwise
192- // / return nullptr.
192+ // / return nullptr. \p A and \p B must have the same type.
193193static const SCEV *addSCEVOverflow (const SCEV *A, const SCEV *B,
194194 ScalarEvolution &SE) {
195195 if (!SE.willNotOverflow (Instruction::Add, false , A, B))
@@ -198,7 +198,7 @@ static const SCEV *addSCEVOverflow(const SCEV *A, const SCEV *B,
198198}
199199
200200// / Returns \p A * \p B, if it is guaranteed not to unsigned wrap. Otherwise
201- // / return nullptr.
201+ // / return nullptr. \p A and \p B must have the same type.
202202static const SCEV *mulSCEVOverflow (const SCEV *A, const SCEV *B,
203203 ScalarEvolution &SE) {
204204 if (!SE.willNotOverflow (Instruction::Mul, false , A, B))
@@ -225,49 +225,48 @@ static bool evaluatePtrAddRecAtMaxBTCWillNotWrap(const SCEVAddRecExpr *AR,
225225 return false ;
226226
227227 const SCEV *Step = AR->getStepRecurrence (SE);
228+ bool IsKnownNonNegative = SE.isKnownNonNegative (Step);
229+ if (!IsKnownNonNegative && !SE.isKnownNegative (Step))
230+ return false ;
231+
228232 Type *WiderTy = SE.getWiderType (MaxBTC->getType (), Step->getType ());
229233 Step = SE.getNoopOrSignExtend (Step, WiderTy);
230234 MaxBTC = SE.getNoopOrZeroExtend (MaxBTC, WiderTy);
231235
232236 // For the computations below, make sure they don't unsigned wrap.
233237 if (!SE.isKnownPredicate (CmpInst::ICMP_UGE, AR->getStart (), StartPtr))
234238 return false ;
235- const SCEV *StartOffset = SE.getNoopOrSignExtend (
239+ const SCEV *StartOffset = SE.getNoopOrZeroExtend (
236240 SE.getMinusSCEV (AR->getStart (), StartPtr), WiderTy);
237241
238242 const SCEV *OffsetAtLastIter =
239243 mulSCEVOverflow (MaxBTC, SE.getAbsExpr (Step, false ), SE);
240244 if (!OffsetAtLastIter)
241245 return false ;
242246
243- const SCEV *OffsetLastAccessedByte = addSCEVOverflow (
247+ const SCEV *OffsetEndBytes = addSCEVOverflow (
244248 OffsetAtLastIter, SE.getNoopOrZeroExtend (EltSize, WiderTy), SE);
245- if (!OffsetLastAccessedByte )
249+ if (!OffsetEndBytes )
246250 return false ;
247251
248- if (SE. isKnownPositive (Step) ) {
252+ if (IsKnownNonNegative ) {
249253 // For positive steps, check if
250254 // (AR->getStart() - StartPtr) + (MaxBTC * Step) + EltSize <= DerefBytes,
251255 // while making sure none of the computations unsigned wrap themselves.
252- const SCEV *LastAccessedByte =
253- addSCEVOverflow (StartOffset, OffsetLastAccessedByte, SE);
254- if (!LastAccessedByte)
256+ const SCEV *EndBytes = addSCEVOverflow (StartOffset, OffsetEndBytes, SE);
257+ if (!EndBytes)
255258 return false ;
256- return SE.isKnownPredicate (CmpInst::ICMP_ULE, LastAccessedByte ,
259+ return SE.isKnownPredicate (CmpInst::ICMP_ULE, EndBytes ,
257260 SE.getConstant (WiderTy, DerefBytes));
258261 }
259262
260- if (SE.isKnownNegative (Step)) {
261- // For negative steps check if
262- // * StartOffset >= (MaxBTC * Step + EltSize)
263- // * StartOffset <= DerefBytes.
264- return SE
265- .isKnownPredicate (CmpInst::ICMP_SGE, StartOffset,
266- OffsetLastAccessedByte)
267- SE.isKnownPredicate (CmpInst::ICMP_ULE, StartOffset,
268- SE.getConstant (WiderTy, DerefBytes));
269- }
270- return false ;
263+ // For negative steps check if
264+ // * StartOffset >= (MaxBTC * Step + EltSize)
265+ // * StartOffset <= DerefBytes.
266+ assert (SE.isKnownNegative (Step) && " must be known negative" );
267+ return SE.isKnownPredicate (CmpInst::ICMP_SGE, StartOffset, OffsetEndBytes) &&
268+ SE.isKnownPredicate (CmpInst::ICMP_ULE, StartOffset,
269+ SE.getConstant (WiderTy, DerefBytes));
271270}
272271
273272std::pair<const SCEV *, const SCEV *> llvm::getStartAndEndForAccess (
0 commit comments