Skip to content

Commit 306510b

Browse files
committed
SILCombine: remove a problematic cast peephole optimization.
This optimization inserted bit casts based on structural properties of types. It caused a miscompile in case of imported C structs. Also, by inspection, I found that checks for resilient types are missing. As this optimization does not have any noticeable impact on the benchmarks it's better to remove it at all, together with the complexity for checking the types. rdar://problem/40074362
1 parent c2cf0e1 commit 306510b

File tree

5 files changed

+9
-571
lines changed

5 files changed

+9
-571
lines changed

include/swift/SIL/SILType.h

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -367,17 +367,6 @@ class SILType {
367367
/// pointer.
368368
bool isPointerSizeAndAligned();
369369

370-
/// Return true if the layout of `toType` is an ABI compatible prefix of
371-
/// `fromType` ignoring reference types. `fromType` may be larger than
372-
/// `toType` and still be unsafe castable. `fromType` may contain references
373-
/// in positions where `toType` does not contain references and still be
374-
/// unsafe castable. This is used solely to determine whether an address cast
375-
/// can be promoted to a cast between aggregates of scalar values without
376-
/// confusing IRGen.
377-
static bool canPerformABICompatibleUnsafeCastValue(SILType fromType,
378-
SILType toType,
379-
SILModule &M);
380-
381370
/// True if `operTy` can be cast by single-reference value into `resultTy`.
382371
static bool canRefCast(SILType operTy, SILType resultTy, SILModule &M);
383372

lib/SIL/SILType.cpp

Lines changed: 0 additions & 187 deletions
Original file line numberDiff line numberDiff line change
@@ -110,193 +110,6 @@ bool SILType::isPointerSizeAndAligned() {
110110
return false;
111111
}
112112

113-
// Allow casting a struct by value when all elements in toType correspond to
114-
// an element of the same size or larger laid out in the same order in
115-
// fromType. The assumption is that if fromType has larger elements, or
116-
// additional elements, their presence cannot induce a more compact layout of
117-
// the overlapping elements.
118-
//
119-
// struct {A, B} -> A is castable
120-
// struct {A, B, C} -> struct {A, B} is castable
121-
// struct { struct {A, B}, C} -> struct {A, B} is castable
122-
// struct { A, B, C} -> struct { struct {A, B}, C} is NOT castable
123-
//
124-
// FIXME: This is unnecessarily conservative given the current ABI
125-
// (TypeLayout.rst). It would be simpler to flatten both `from` and `to` types,
126-
// exploding all structs and tuples, then trivially check if `to` is a prefix.
127-
static bool canUnsafeCastStruct(SILType fromType, StructDecl *fromStruct,
128-
SILType toType, SILModule &M) {
129-
auto fromRange = fromStruct->getStoredProperties();
130-
if (fromRange.begin() == fromRange.end())
131-
return false;
132-
133-
// Can the first element of fromStruct be cast by value into toType?
134-
SILType fromEltTy = fromType.getFieldType(*fromRange.begin(), M);
135-
if (SILType::canPerformABICompatibleUnsafeCastValue(fromEltTy, toType, M))
136-
return true;
137-
138-
// Otherwise, flatten one level of struct elements on each side.
139-
StructDecl *toStruct = toType.getStructOrBoundGenericStruct();
140-
if (!toStruct)
141-
return false;
142-
143-
auto toRange = toStruct->getStoredProperties();
144-
for (auto toI = toRange.begin(), toE = toRange.end(),
145-
fromI = fromRange.begin(), fromE = fromRange.end();
146-
toI != toE; ++toI, ++fromI) {
147-
148-
if (fromI == fromE)
149-
return false; // fromType is a struct with fewer elements.
150-
151-
SILType fromEltTy = fromType.getFieldType(*fromI, M);
152-
SILType toEltTy = toType.getFieldType(*toI, M);
153-
if (!SILType::canPerformABICompatibleUnsafeCastValue(fromEltTy, toEltTy, M))
154-
return false;
155-
}
156-
// fromType's overlapping elements are compatible.
157-
return true;
158-
}
159-
160-
// Allow casting a tuple by value when all elements in toType correspond to an
161-
// element of the same size or larger in fromType in the same order.
162-
static bool canUnsafeCastTuple(SILType fromType, CanTupleType fromTupleTy,
163-
SILType toType, SILModule &M) {
164-
unsigned numFromElts = fromTupleTy->getNumElements();
165-
// Can the first element of fromTupleTy be cast by value into toType?
166-
if (numFromElts != 0
167-
&& SILType::canPerformABICompatibleUnsafeCastValue(
168-
fromType.getTupleElementType(0), toType, M)) {
169-
return true;
170-
}
171-
// Otherwise, flatten one level of tuple elements on each side.
172-
auto toTupleTy = dyn_cast<TupleType>(toType.getSwiftRValueType());
173-
if (!toTupleTy)
174-
return false;
175-
176-
unsigned numToElts = toTupleTy->getNumElements();
177-
if (numFromElts < numToElts)
178-
return false;
179-
180-
for (unsigned i = 0; i != numToElts; ++i) {
181-
if (!SILType::canPerformABICompatibleUnsafeCastValue(
182-
fromType.getTupleElementType(i), toType.getTupleElementType(i),
183-
M)) {
184-
return false;
185-
}
186-
}
187-
return true;
188-
}
189-
190-
// Allow casting an enum by value when toType is an enum and each elements is
191-
// individually castable to toType. An enum cannot be smaller than its payload.
192-
static bool canUnsafeCastEnum(SILType fromType, EnumDecl *fromEnum,
193-
SILType toType, SILModule &M) {
194-
unsigned numToElements = 0;
195-
SILType toElementTy;
196-
if (EnumDecl *toEnum = toType.getEnumOrBoundGenericEnum()) {
197-
for (auto toElement : toEnum->getAllElements()) {
198-
++numToElements;
199-
if (!toElement->hasAssociatedValues())
200-
continue;
201-
// Bail on multiple payloads.
202-
if (!toElementTy.isNull())
203-
return false;
204-
toElementTy = toType.getEnumElementType(toElement, M);
205-
}
206-
} else {
207-
// If toType is not an enum, handle it like a singleton
208-
numToElements = 1;
209-
toElementTy = toType;
210-
}
211-
// If toType has more elements, it may be larger.
212-
auto fromElements = fromEnum->getAllElements();
213-
if (static_cast<ptrdiff_t>(numToElements) >
214-
std::distance(fromElements.begin(), fromElements.end()))
215-
return false;
216-
217-
if (toElementTy.isNull())
218-
return true;
219-
220-
// If any of the fromElements can be cast by value to the singleton toElement,
221-
// then the overall enum can be cast by value.
222-
for (auto fromElement : fromElements) {
223-
if (!fromElement->hasAssociatedValues())
224-
continue;
225-
226-
auto fromElementTy = fromType.getEnumElementType(fromElement, M);
227-
if (SILType::canPerformABICompatibleUnsafeCastValue(fromElementTy,
228-
toElementTy, M))
229-
return true;
230-
}
231-
return false;
232-
}
233-
234-
static bool canUnsafeCastScalars(SILType fromType, SILType toType,
235-
SILModule &M) {
236-
CanType fromCanTy = fromType.getSwiftRValueType();
237-
bool isToPointer = toType.isPointerSizeAndAligned();
238-
239-
unsigned LeastFromWidth = 0;
240-
// Like UnsafeRefBitCast, allow class existentials to be truncated to
241-
// single-pointer references. Unlike UnsafeRefBitCast, this also supports raw
242-
// pointers and words.
243-
if (fromType.isPointerSizeAndAligned()
244-
|| fromCanTy.isAnyClassReferenceType()) {
245-
246-
// Allow casting from a value that contains an aligned pointer into another
247-
// pointer value regardless of the fixed width.
248-
if (isToPointer)
249-
return true;
250-
251-
LeastFromWidth = BuiltinIntegerWidth::pointer().getLeastWidth();
252-
253-
} else if (auto fromIntTy = dyn_cast<BuiltinIntegerType>(fromCanTy)) {
254-
if (fromIntTy->isFixedWidth())
255-
LeastFromWidth = fromIntTy->getFixedWidth();
256-
}
257-
258-
unsigned GreatestToWidth = UINT_MAX;
259-
if (isToPointer) {
260-
GreatestToWidth = BuiltinIntegerWidth::pointer().getGreatestWidth();
261-
262-
} else if (auto toIntTy = dyn_cast<BuiltinIntegerType>(
263-
toType.getSwiftRValueType())) {
264-
if (toIntTy->isFixedWidth())
265-
GreatestToWidth = toIntTy->getFixedWidth();
266-
}
267-
return LeastFromWidth >= GreatestToWidth;
268-
}
269-
270-
bool SILType::canPerformABICompatibleUnsafeCastValue(SILType fromType,
271-
SILType toType,
272-
SILModule &M) {
273-
if (fromType == toType)
274-
return true;
275-
276-
// Unwrap single element structs.
277-
if (StructDecl *toStruct = toType.getStructOrBoundGenericStruct()) {
278-
auto toRange = toStruct->getStoredProperties();
279-
if (toRange.begin() != toRange.end()
280-
&& std::next(toRange.begin()) == toRange.end()) {
281-
toType = toType.getFieldType(*toRange.begin(), M);
282-
}
283-
}
284-
if (canUnsafeCastScalars(fromType, toType, M))
285-
return true;
286-
287-
if (StructDecl *fromStruct = fromType.getStructOrBoundGenericStruct())
288-
return canUnsafeCastStruct(fromType, fromStruct, toType, M);
289-
290-
if (CanTupleType fromTupleTy =
291-
dyn_cast<TupleType>(fromType.getSwiftRValueType())) {
292-
return canUnsafeCastTuple(fromType, fromTupleTy, toType, M);
293-
}
294-
if (EnumDecl *fromEnum = fromType.getEnumOrBoundGenericEnum())
295-
return canUnsafeCastEnum(fromType, fromEnum, toType, M);
296-
297-
return false;
298-
}
299-
300113
// Reference cast from representations with single pointer low bits.
301114
// Only reference cast to simple single pointer representations.
302115
//

lib/SILOptimizer/SILCombiner/SILCombinerCastVisitors.cpp

Lines changed: 1 addition & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -208,68 +208,7 @@ SILCombiner::visitUncheckedAddrCastInst(UncheckedAddrCastInst *UADCI) {
208208
return Builder.createUpcast(UADCI->getLoc(), UADCI->getOperand(),
209209
UADCI->getType());
210210

211-
// See if we have all loads from this unchecked_addr_cast. If we do, load the
212-
// original type and create the appropriate bitcast.
213-
214-
// First if our UADCI has not users, bail. This will be eliminated by DCE.
215-
if (UADCI->use_empty())
216-
return nullptr;
217-
218-
SILType InputTy = UADCI->getOperand()->getType();
219-
SILType OutputTy = UADCI->getType();
220-
221-
// If either type is address only, do not do anything here.
222-
if (InputTy.isAddressOnly(Mod) || OutputTy.isAddressOnly(Mod))
223-
return nullptr;
224-
225-
bool InputIsTrivial = InputTy.isTrivial(Mod);
226-
bool OutputIsTrivial = OutputTy.isTrivial(Mod);
227-
228-
// If our input is trivial and our output type is not, do not do
229-
// anything. This is to ensure that we do not change any types reference
230-
// semantics from trivial -> reference counted.
231-
if (InputIsTrivial && !OutputIsTrivial)
232-
return nullptr;
233-
234-
// Check that the input type can be value cast to the output type. It is
235-
// possible to cast the address of a smaller InputType to the address of a
236-
// larger OutputType (the actual memory object must be large enough to hold
237-
// both types). However, such address casts cannot be converted to value
238-
// casts.
239-
if (!SILType::canPerformABICompatibleUnsafeCastValue(InputTy, OutputTy,
240-
UADCI->getModule())) {
241-
return nullptr;
242-
}
243-
// For each user U of the unchecked_addr_cast...
244-
for (auto U : getNonDebugUses(UADCI))
245-
// Check if it is load. If it is not a load, bail...
246-
if (!isa<LoadInst>(U->getUser()))
247-
return nullptr;
248-
249-
SILValue Op = UADCI->getOperand();
250-
SILLocation Loc = UADCI->getLoc();
251-
252-
// Ok, we have all loads. Lets simplify this. Go back through the loads a
253-
// second time, rewriting them into a load + bitcast from our source.
254-
auto UsesRange = getNonDebugUses(UADCI);
255-
for (auto UI = UsesRange.begin(), E = UsesRange.end(); UI != E;) {
256-
// Grab the original load.
257-
LoadInst *L = cast<LoadInst>(UI->getUser());
258-
UI++;
259-
260-
// Insert a new load from our source and bitcast that as appropriate.
261-
LoadInst *NewLoad =
262-
Builder.createLoad(Loc, Op, LoadOwnershipQualifier::Unqualified);
263-
auto *BitCast = Builder.createUncheckedBitCast(Loc, NewLoad,
264-
OutputTy.getObjectType());
265-
// Replace all uses of the old load with the new bitcasted result and erase
266-
// the old load.
267-
replaceInstUsesWith(*L, BitCast);
268-
eraseInstFromFunction(*L);
269-
}
270-
271-
// Delete the old cast.
272-
return eraseInstFromFunction(*UADCI);
211+
return nullptr;
273212
}
274213

275214
SILInstruction *

0 commit comments

Comments
 (0)