@@ -494,20 +494,6 @@ static LinearExpression GetLinearExpression(
494494 return Val;
495495}
496496
497- // / To ensure a pointer offset fits in an integer of size IndexSize
498- // / (in bits) when that size is smaller than the maximum index size. This is
499- // / an issue, for example, in particular for 32b pointers with negative indices
500- // / that rely on two's complement wrap-arounds for precise alias information
501- // / where the maximum index size is 64b.
502- static void adjustToIndexSize (APInt &Offset, unsigned IndexSize) {
503- assert (IndexSize <= Offset.getBitWidth () && " Invalid IndexSize!" );
504- unsigned ShiftBits = Offset.getBitWidth () - IndexSize;
505- if (ShiftBits != 0 ) {
506- Offset <<= ShiftBits;
507- Offset.ashrInPlace (ShiftBits);
508- }
509- }
510-
511497namespace {
512498// A linear transformation of a Value; this class represents
513499// ZExt(SExt(Trunc(V, TruncBits), SExtBits), ZExtBits) * Scale.
@@ -594,9 +580,9 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
594580 SearchTimes++;
595581 const Instruction *CxtI = dyn_cast<Instruction>(V);
596582
597- unsigned MaxIndexSize = DL.getMaxIndexSizeInBits ( );
583+ unsigned IndexSize = DL.getIndexTypeSizeInBits (V-> getType () );
598584 DecomposedGEP Decomposed;
599- Decomposed.Offset = APInt (MaxIndexSize , 0 );
585+ Decomposed.Offset = APInt (IndexSize , 0 );
600586 do {
601587 // See if this is a bitcast or GEP.
602588 const Operator *Op = dyn_cast<Operator>(V);
@@ -614,7 +600,14 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
614600
615601 if (Op->getOpcode () == Instruction::BitCast ||
616602 Op->getOpcode () == Instruction::AddrSpaceCast) {
617- V = Op->getOperand (0 );
603+ Value *NewV = Op->getOperand (0 );
604+ // Don't look through casts between address spaces with differing index
605+ // widths.
606+ if (DL.getIndexTypeSizeInBits (NewV->getType ()) != IndexSize) {
607+ Decomposed.Base = V;
608+ return Decomposed;
609+ }
610+ V = NewV;
618611 continue ;
619612 }
620613
@@ -651,12 +644,8 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
651644
652645 assert (GEPOp->getSourceElementType ()->isSized () && " GEP must be sized" );
653646
654- unsigned AS = GEPOp->getPointerAddressSpace ();
655647 // Walk the indices of the GEP, accumulating them into BaseOff/VarIndices.
656648 gep_type_iterator GTI = gep_type_begin (GEPOp);
657- unsigned IndexSize = DL.getIndexSizeInBits (AS);
658- // Assume all GEP operands are constants until proven otherwise.
659- bool GepHasConstantOffset = true ;
660649 for (User::const_op_iterator I = GEPOp->op_begin () + 1 , E = GEPOp->op_end ();
661650 I != E; ++I, ++GTI) {
662651 const Value *Index = *I;
@@ -684,7 +673,7 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
684673 }
685674
686675 Decomposed.Offset += AllocTypeSize.getFixedValue () *
687- CIdx->getValue ().sextOrTrunc (MaxIndexSize );
676+ CIdx->getValue ().sextOrTrunc (IndexSize );
688677 continue ;
689678 }
690679
@@ -694,8 +683,6 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
694683 return Decomposed;
695684 }
696685
697- GepHasConstantOffset = false ;
698-
699686 // If the integer type is smaller than the index size, it is implicitly
700687 // sign extended or truncated to index size.
701688 bool NUSW = GEPOp->hasNoUnsignedSignedWrap ();
@@ -710,8 +697,8 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
710697 // Scale by the type size.
711698 unsigned TypeSize = AllocTypeSize.getFixedValue ();
712699 LE = LE.mul (APInt (IndexSize, TypeSize), NUW, NUSW);
713- Decomposed.Offset += LE.Offset . sext (MaxIndexSize) ;
714- APInt Scale = LE.Scale . sext (MaxIndexSize) ;
700+ Decomposed.Offset += LE.Offset ;
701+ APInt Scale = LE.Scale ;
715702 if (!LE.IsNUW )
716703 Decomposed.NWFlags = Decomposed.NWFlags .withoutNoUnsignedWrap ();
717704
@@ -731,21 +718,13 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
731718 }
732719 }
733720
734- // Make sure that we have a scale that makes sense for this target's
735- // index size.
736- adjustToIndexSize (Scale, IndexSize);
737-
738721 if (!!Scale) {
739722 VariableGEPIndex Entry = {LE.Val , Scale, CxtI, LE.IsNSW ,
740723 /* IsNegated */ false };
741724 Decomposed.VarIndices .push_back (Entry);
742725 }
743726 }
744727
745- // Take care of wrap-arounds
746- if (GepHasConstantOffset)
747- adjustToIndexSize (Decomposed.Offset , IndexSize);
748-
749728 // Analyze the base pointer next.
750729 V = GEPOp->getOperand (0 );
751730 } while (--MaxLookup);
@@ -1084,6 +1063,14 @@ AliasResult BasicAAResult::aliasGEP(
10841063 const GEPOperator *GEP1, LocationSize V1Size,
10851064 const Value *V2, LocationSize V2Size,
10861065 const Value *UnderlyingV1, const Value *UnderlyingV2, AAQueryInfo &AAQI) {
1066+ auto BaseObjectsAlias = [&]() {
1067+ AliasResult BaseAlias =
1068+ AAQI.AAR .alias (MemoryLocation::getBeforeOrAfter (UnderlyingV1),
1069+ MemoryLocation::getBeforeOrAfter (UnderlyingV2), AAQI);
1070+ return BaseAlias == AliasResult::NoAlias ? AliasResult::NoAlias
1071+ : AliasResult::MayAlias;
1072+ };
1073+
10871074 if (!V1Size.hasValue () && !V2Size.hasValue ()) {
10881075 // TODO: This limitation exists for compile-time reasons. Relax it if we
10891076 // can avoid exponential pathological cases.
@@ -1092,11 +1079,7 @@ AliasResult BasicAAResult::aliasGEP(
10921079
10931080 // If both accesses have unknown size, we can only check whether the base
10941081 // objects don't alias.
1095- AliasResult BaseAlias =
1096- AAQI.AAR .alias (MemoryLocation::getBeforeOrAfter (UnderlyingV1),
1097- MemoryLocation::getBeforeOrAfter (UnderlyingV2), AAQI);
1098- return BaseAlias == AliasResult::NoAlias ? AliasResult::NoAlias
1099- : AliasResult::MayAlias;
1082+ return BaseObjectsAlias ();
11001083 }
11011084
11021085 DominatorTree *DT = getDT (AAQI);
@@ -1107,6 +1090,10 @@ AliasResult BasicAAResult::aliasGEP(
11071090 if (DecompGEP1.Base == GEP1 && DecompGEP2.Base == V2)
11081091 return AliasResult::MayAlias;
11091092
1093+ // Fall back to base objects if pointers have different index widths.
1094+ if (DecompGEP1.Offset .getBitWidth () != DecompGEP2.Offset .getBitWidth ())
1095+ return BaseObjectsAlias ();
1096+
11101097 // Swap GEP1 and GEP2 if GEP2 has more variable indices.
11111098 if (DecompGEP1.VarIndices .size () < DecompGEP2.VarIndices .size ()) {
11121099 std::swap (DecompGEP1, DecompGEP2);
0 commit comments