From db4bfccde7b7a1bc0987cfe22b7a023e161a002f Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Tue, 18 Jan 2022 09:09:09 -0800 Subject: [PATCH 01/29] Move -enable-sil-opaque-value to SILOptions. --- include/swift/AST/SILOptions.h | 3 +++ include/swift/Basic/LangOptions.h | 5 ----- include/swift/Option/FrontendOptions.td | 6 +++--- lib/Frontend/CompilerInvocation.cpp | 2 +- lib/SIL/IR/SILType.cpp | 2 +- lib/SIL/IR/TypeLowering.cpp | 2 +- lib/SILOptimizer/Mandatory/AddressLowering.cpp | 2 +- tools/sil-opt/SILOpt.cpp | 3 +-- 8 files changed, 11 insertions(+), 14 deletions(-) diff --git a/include/swift/AST/SILOptions.h b/include/swift/AST/SILOptions.h index c9f4fade472a6..31d02389e86ee 100644 --- a/include/swift/AST/SILOptions.h +++ b/include/swift/AST/SILOptions.h @@ -140,6 +140,9 @@ class SILOptions { /// If this is disabled we do not serialize in OSSA form when optimizing. bool EnableOSSAModules = false; + /// If set to true, compile with the SIL Opaque Values enabled. + bool EnableSILOpaqueValues = false; + // The kind of function bodies to skip emitting. FunctionBodySkipping SkipFunctionBodies = FunctionBodySkipping::None; diff --git a/include/swift/Basic/LangOptions.h b/include/swift/Basic/LangOptions.h index de10fee8ec98d..4a23840854690 100644 --- a/include/swift/Basic/LangOptions.h +++ b/include/swift/Basic/LangOptions.h @@ -379,11 +379,6 @@ namespace swift { /// [TODO: Clang-type-plumbing] Turn on for feature rollout. bool UseClangFunctionTypes = false; - /// If set to true, compile with the SIL Opaque Values enabled. - /// This is for bootstrapping. It can't be in SILOptions because the - /// TypeChecker uses it to set resolve the ParameterConvention. - bool EnableSILOpaqueValues = false; - /// If set to true, the diagnosis engine can assume the emitted diagnostics /// will be used in editor. This usually leads to more aggressive fixit. bool DiagnosticsEditorMode = false; diff --git a/include/swift/Option/FrontendOptions.td b/include/swift/Option/FrontendOptions.td index 3edff91a9aa89..b991bae1d8e34 100644 --- a/include/swift/Option/FrontendOptions.td +++ b/include/swift/Option/FrontendOptions.td @@ -507,9 +507,6 @@ def disable_sil_ownership_verifier : Flag<["-"], "disable-sil-ownership-verifier def suppress_static_exclusivity_swap : Flag<["-"], "suppress-static-exclusivity-swap">, HelpText<"Suppress static violations of exclusive access with swap()">; -def enable_sil_opaque_values : Flag<["-"], "enable-sil-opaque-values">, - HelpText<"Enable SIL Opaque Values">; - def enable_experimental_static_assert : Flag<["-"], "enable-experimental-static-assert">, HelpText<"Enable experimental #assert">; @@ -1022,6 +1019,9 @@ def enable_ossa_modules : Flag<["-"], "enable-ossa-modules">, HelpText<"Always serialize SIL in ossa form. If this flag is not passed in, " "when optimizing ownership will be lowered before serializing SIL">; +def enable_sil_opaque_values : Flag<["-"], "enable-sil-opaque-values">, + HelpText<"Enable SIL Opaque Values">; + def new_driver_path : Separate<["-"], "new-driver-path">, MetaVarName<"">, HelpText<"Path of the new driver to be used">; diff --git a/lib/Frontend/CompilerInvocation.cpp b/lib/Frontend/CompilerInvocation.cpp index 5ea2f23bbdf33..1a9bf01906818 100644 --- a/lib/Frontend/CompilerInvocation.cpp +++ b/lib/Frontend/CompilerInvocation.cpp @@ -770,7 +770,6 @@ static bool ParseLangArgs(LangOptions &Opts, ArgList &Args, Opts.EnableObjCInterop = Args.hasFlag(OPT_enable_objc_interop, OPT_disable_objc_interop, Target.isOSDarwin()); - Opts.EnableSILOpaqueValues |= Args.hasArg(OPT_enable_sil_opaque_values); Opts.VerifyAllSubstitutionMaps |= Args.hasArg(OPT_verify_all_substitution_maps); @@ -1682,6 +1681,7 @@ static bool ParseSILArgs(SILOptions &Opts, ArgList &Args, Opts.EnableARCOptimizations &= !Args.hasArg(OPT_disable_arc_opts); Opts.EnableOSSAModules |= Args.hasArg(OPT_enable_ossa_modules); Opts.EnableOSSAOptimizations &= !Args.hasArg(OPT_disable_ossa_opts); + Opts.EnableSILOpaqueValues |= Args.hasArg(OPT_enable_sil_opaque_values); Opts.EnableSpeculativeDevirtualization |= Args.hasArg(OPT_enable_spec_devirt); Opts.EnableActorDataRaceChecks |= Args.hasFlag( OPT_enable_actor_data_race_checks, diff --git a/lib/SIL/IR/SILType.cpp b/lib/SIL/IR/SILType.cpp index da4bb30b81984..51e574c6ac198 100644 --- a/lib/SIL/IR/SILType.cpp +++ b/lib/SIL/IR/SILType.cpp @@ -507,7 +507,7 @@ SILResultInfo::getOwnershipKind(SILFunction &F, SILModuleConventions::SILModuleConventions(SILModule &M) : M(&M), - loweredAddresses(!M.getASTContext().LangOpts.EnableSILOpaqueValues + loweredAddresses(!M.getOptions().EnableSILOpaqueValues || M.getStage() == SILStage::Lowered) {} diff --git a/lib/SIL/IR/TypeLowering.cpp b/lib/SIL/IR/TypeLowering.cpp index fc6be72968979..65bf3fde1b804 100644 --- a/lib/SIL/IR/TypeLowering.cpp +++ b/lib/SIL/IR/TypeLowering.cpp @@ -1683,7 +1683,7 @@ namespace { TypeLowering *handleAddressOnly(CanType type, RecursiveProperties properties) { - if (!TC.Context.LangOpts.EnableSILOpaqueValues) { + if (!TC.Context.SILOpts.EnableSILOpaqueValues) { auto silType = SILType::getPrimitiveAddressType(type); return new (TC) AddressOnlyTypeLowering(silType, properties, Expansion); diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index bd428535e7371..1e1a3cd7539d8 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -1527,7 +1527,7 @@ void AddressLowering::runOnFunction(SILFunction *F) { /// The entry point to this function transformation. void AddressLowering::run() { - if (getModule()->getASTContext().LangOpts.EnableSILOpaqueValues) { + if (getModule()->getOptions().EnableSILOpaqueValues) { for (auto &F : *getModule()) runOnFunction(&F); } diff --git a/tools/sil-opt/SILOpt.cpp b/tools/sil-opt/SILOpt.cpp index ac01ba00642c0..9edd47cb4df1c 100644 --- a/tools/sil-opt/SILOpt.cpp +++ b/tools/sil-opt/SILOpt.cpp @@ -571,8 +571,6 @@ int main(int argc, char **argv) { EnableObjCInterop ? true : DisableObjCInterop ? false : llvm::Triple(Target).isOSDarwin(); - Invocation.getLangOptions().EnableSILOpaqueValues = EnableSILOpaqueValues; - Invocation.getLangOptions().OptimizationRemarkPassedPattern = createOptRemarkRegex(PassRemarksPassed); Invocation.getLangOptions().OptimizationRemarkMissedPattern = @@ -634,6 +632,7 @@ int main(int argc, char **argv) { SILOpts.EnableSpeculativeDevirtualization = EnableSpeculativeDevirtualization; SILOpts.IgnoreAlwaysInline = IgnoreAlwaysInline; SILOpts.EnableOSSAModules = EnableOSSAModules; + SILOpts.EnableSILOpaqueValues = EnableSILOpaqueValues; if (CopyPropagationState) { SILOpts.CopyPropagation = *CopyPropagationState; From 0897a6952f65f9f020025e237ec47d8f65e8cbe0 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Tue, 18 Jan 2022 09:12:23 -0800 Subject: [PATCH 02/29] Add emitLoad/emitStore to OpaqueValue type lowering. --- lib/SIL/IR/TypeLowering.cpp | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/lib/SIL/IR/TypeLowering.cpp b/lib/SIL/IR/TypeLowering.cpp index 65bf3fde1b804..52b0faf8f72d0 100644 --- a/lib/SIL/IR/TypeLowering.cpp +++ b/lib/SIL/IR/TypeLowering.cpp @@ -1605,10 +1605,6 @@ namespace { }; /// Lower address only types as opaque values. - /// - /// Opaque values behave like loadable leaf types in SIL. - /// - /// FIXME: When you remove an unreachable, just delete the method. class OpaqueValueTypeLowering : public LeafLoadableTypeLowering { public: OpaqueValueTypeLowering(SILType type, RecursiveProperties properties, @@ -1622,6 +1618,20 @@ namespace { llvm_unreachable("copy into"); } + // OpaqueValue store cannot be decoupled from a destroy because it is not + // bitwise-movable. + void emitStore(SILBuilder &B, SILLocation loc, SILValue value, + SILValue addr, StoreOwnershipQualifier qual) const override { + B.createStore(loc, value, addr, qual); + } + + // OpaqueValue load cannot be decoupled from a copy because it is not + // bitwise-movable. + SILValue emitLoad(SILBuilder &B, SILLocation loc, SILValue addr, + LoadOwnershipQualifier qual) const override { + return B.createLoad(loc, addr, qual); + } + // --- Same as LeafLoadableTypeLowering. SILValue emitLoweredCopyValue(SILBuilder &B, SILLocation loc, From 4ccb2e9b2d3868bb350c5a0f5e7b54a77ea2ff4d Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Mon, 14 Feb 2022 23:23:42 -0800 Subject: [PATCH 03/29] SILModule::hasLoweredAddress --- include/swift/SIL/SILModule.h | 15 ++++++++++++--- lib/SIL/IR/SILModule.cpp | 4 ++-- lib/SIL/IR/SILType.cpp | 5 +---- lib/SIL/Parser/ParseSIL.cpp | 3 +++ lib/SILGen/SILGenFunction.cpp | 13 ++++++------- lib/SILGen/SILGenFunction.h | 2 ++ lib/SILOptimizer/Mandatory/AddressLowering.cpp | 12 +++++++----- lib/SILOptimizer/PassManager/PassPipeline.cpp | 8 +++++--- lib/SILOptimizer/PassManager/Passes.cpp | 2 +- 9 files changed, 39 insertions(+), 25 deletions(-) diff --git a/include/swift/SIL/SILModule.h b/include/swift/SIL/SILModule.h index 36fbfdcaf7ece..63ef8ade8f357 100644 --- a/include/swift/SIL/SILModule.h +++ b/include/swift/SIL/SILModule.h @@ -307,6 +307,12 @@ class SILModule { /// The stage of processing this module is at. SILStage Stage; + /// True if SIL conventions force address-only to be passed by address. + /// + /// Used for bootstrapping the AddressLowering pass. This should eventually + /// be inferred from the SIL stage to be true only when Stage == Lowered. + bool loweredAddresses; + /// The set of deserialization notification handlers. DeserializationNotificationHandlerSet deserializationNotificationHandlers; @@ -806,6 +812,11 @@ class SILModule { Stage = s; } + /// True if SIL conventions force address-only to be passed by address. + bool useLoweredAddresses() const { return loweredAddresses; } + + void setLoweredAddresses(bool val) { loweredAddresses = val; } + llvm::IndexedInstrProfReader *getPGOReader() const { return PGOReader.get(); } void setPGOReader(std::unique_ptr IPR) { @@ -972,15 +983,13 @@ inline bool SILOptions::supportsLexicalLifetimes(const SILModule &mod) const { // entirely. return LexicalLifetimes != LexicalLifetimesOption::Off; case SILStage::Canonical: + case SILStage::Lowered: // In Canonical SIL, lexical markers are used to ensure that object // lifetimes do not get observably shortened from the end of a lexical // scope. That behavior only occurs when lexical lifetimes is (fully) // enabled. (When only diagnostic markers are enabled, the markers are // stripped as part of lowering from raw to canonical SIL.) return LexicalLifetimes == LexicalLifetimesOption::On; - case SILStage::Lowered: - // We do not support OSSA in Lowered SIL, so this is always false. - return false; } } diff --git a/lib/SIL/IR/SILModule.cpp b/lib/SIL/IR/SILModule.cpp index 6f63f8c85ec08..8bd442a03ddd8 100644 --- a/lib/SIL/IR/SILModule.cpp +++ b/lib/SIL/IR/SILModule.cpp @@ -91,8 +91,8 @@ class SILModule::SerializationCallback final SILModule::SILModule(llvm::PointerUnion context, Lowering::TypeConverter &TC, const SILOptions &Options) - : Stage(SILStage::Raw), indexTrieRoot(new IndexTrieNode()), - Options(Options), serialized(false), + : Stage(SILStage::Raw), loweredAddresses(!Options.EnableSILOpaqueValues), + indexTrieRoot(new IndexTrieNode()), Options(Options), serialized(false), regDeserializationNotificationHandlerForNonTransparentFuncOME(false), regDeserializationNotificationHandlerForAllFuncOME(false), prespecializedFunctionDeclsImported(false), SerializeSILAction(), diff --git a/lib/SIL/IR/SILType.cpp b/lib/SIL/IR/SILType.cpp index 51e574c6ac198..9914d564b4d79 100644 --- a/lib/SIL/IR/SILType.cpp +++ b/lib/SIL/IR/SILType.cpp @@ -506,10 +506,7 @@ SILResultInfo::getOwnershipKind(SILFunction &F, } SILModuleConventions::SILModuleConventions(SILModule &M) - : M(&M), - loweredAddresses(!M.getOptions().EnableSILOpaqueValues - || M.getStage() == SILStage::Lowered) -{} + : M(&M), loweredAddresses(M.useLoweredAddresses()) {} bool SILModuleConventions::isReturnedIndirectlyInSIL(SILType type, SILModule &M) { diff --git a/lib/SIL/Parser/ParseSIL.cpp b/lib/SIL/Parser/ParseSIL.cpp index d40a298744d37..d16c45e0d9a27 100644 --- a/lib/SIL/Parser/ParseSIL.cpp +++ b/lib/SIL/Parser/ParseSIL.cpp @@ -6535,6 +6535,9 @@ bool SILParserState::parseDeclSILStage(Parser &P) { } M.setStage(stage); + if (M.getOptions().EnableSILOpaqueValues) { + M.setLoweredAddresses(stage != SILStage::Raw); + } DidParseSILStage = true; return false; } diff --git a/lib/SILGen/SILGenFunction.cpp b/lib/SILGen/SILGenFunction.cpp index 79c075920bcc4..27dc9410142a5 100644 --- a/lib/SILGen/SILGenFunction.cpp +++ b/lib/SILGen/SILGenFunction.cpp @@ -290,13 +290,12 @@ void SILGenFunction::emitCaptures(SILLocation loc, // Get an address value for a SILValue if it is address only in an type // expansion context without opaque archetype substitution. auto getAddressValue = [&](SILValue entryValue) -> SILValue { - if (SGM.Types - .getTypeLowering( - valueType, - TypeExpansionContext::noOpaqueTypeArchetypesSubstitution( - expansion.getResilienceExpansion())) - .isAddressOnly() && - !entryValue->getType().isAddress()) { + if (SGM.Types.getTypeLowering( + valueType, + TypeExpansionContext::noOpaqueTypeArchetypesSubstitution( + expansion.getResilienceExpansion())) + .isAddressOnly() + && !entryValue->getType().isAddress()) { auto addr = emitTemporaryAllocation(vd, entryValue->getType()); auto val = B.emitCopyValueOperation(vd, entryValue); diff --git a/lib/SILGen/SILGenFunction.h b/lib/SILGen/SILGenFunction.h index 79e0b49d0d4b5..26456ac8b58d4 100644 --- a/lib/SILGen/SILGenFunction.h +++ b/lib/SILGen/SILGenFunction.h @@ -245,6 +245,8 @@ class LLVM_LIBRARY_VISIBILITY SILGenFunction /// The SILModuleConventions for this SIL module. SILModuleConventions silConv; + bool useLoweredAddresses() const { return silConv.useLoweredAddresses(); } + /// The DeclContext corresponding to the function currently being emitted. DeclContext * const FunctionDC; diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index 1e1a3cd7539d8..de04331eb6c71 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -1527,13 +1527,15 @@ void AddressLowering::runOnFunction(SILFunction *F) { /// The entry point to this function transformation. void AddressLowering::run() { - if (getModule()->getOptions().EnableSILOpaqueValues) { - for (auto &F : *getModule()) - runOnFunction(&F); + if (getModule()->useLoweredAddresses()) + return; + + for (auto &F : *getModule()) { + runOnFunction(&F); } - // Set the SIL state before the PassManager has a chance to run + // Update the SILModule before the PassManager has a chance to run // verification. - getModule()->setStage(SILStage::Lowered); + getModule()->setLoweredAddresses(true); } SILTransform *swift::createAddressLowering() { return new AddressLowering(); } diff --git a/lib/SILOptimizer/PassManager/PassPipeline.cpp b/lib/SILOptimizer/PassManager/PassPipeline.cpp index 0401dc5c780c3..e4c384bc95edc 100644 --- a/lib/SILOptimizer/PassManager/PassPipeline.cpp +++ b/lib/SILOptimizer/PassManager/PassPipeline.cpp @@ -87,12 +87,14 @@ static void addModulePrinterPipeline(SILPassPipelinePlan &plan, static void addMandatoryDebugSerialization(SILPassPipelinePlan &P) { P.startPipeline("Mandatory Debug Serialization"); + P.addAddressLowering(); P.addOwnershipModelEliminator(); P.addMandatoryInlining(); } static void addOwnershipModelEliminatorPipeline(SILPassPipelinePlan &P) { P.startPipeline("Ownership Model Eliminator"); + P.addAddressLowering(); P.addOwnershipModelEliminator(); } @@ -113,6 +115,7 @@ static void addDefiniteInitialization(SILPassPipelinePlan &P) { static void addMandatoryDiagnosticOptPipeline(SILPassPipelinePlan &P) { P.startPipeline("Mandatory Diagnostic Passes + Enabling Optimization Passes"); P.addSILGenCleanup(); + P.addAddressLowering(); P.addDiagnoseInvalidEscapingCaptures(); P.addDiagnoseStaticExclusivity(); P.addNestedSemanticFunctionCheck(); @@ -796,11 +799,10 @@ static void addSILDebugInfoGeneratorPipeline(SILPassPipelinePlan &P) { SILPassPipelinePlan SILPassPipelinePlan::getLoweringPassPipeline(const SILOptions &Options) { SILPassPipelinePlan P(Options); - P.startPipeline("Address Lowering"); + P.startPipeline("Lowering"); P.addLowerHopToActor(); // FIXME: earlier for more opportunities? P.addOwnershipModelEliminator(); P.addIRGenPrepare(); - P.addAddressLowering(); return P; } @@ -913,7 +915,7 @@ SILPassPipelinePlan::getOnonePassPipeline(const SILOptions &Options) { // depend on other passes needed for diagnostics). Thus we can run them later // and avoid having SourceKit run these passes when just emitting diagnostics // in the editor. - P.startPipeline("non-Diagnostic Enabling Mandatory Optimizations"); + P.startPipeline("Non-Diagnostic Mandatory Optimizations"); P.addForEachLoopUnroll(); P.addMandatoryCombine(); diff --git a/lib/SILOptimizer/PassManager/Passes.cpp b/lib/SILOptimizer/PassManager/Passes.cpp index 04c4dc612fa90..05aba1c411abc 100644 --- a/lib/SILOptimizer/PassManager/Passes.cpp +++ b/lib/SILOptimizer/PassManager/Passes.cpp @@ -213,7 +213,7 @@ void swift::runSILLoweringPasses(SILModule &Module) { SILPassPipelinePlan::getLoweringPassPipeline(opts), /*isMandatory*/ true); - assert(Module.getStage() == SILStage::Lowered); + Module.setStage(SILStage::Lowered); } /// Registered briged pass run functions. From bd803b91a99f9bff550b2040c1c3a78a0d45a325 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Sun, 28 Nov 2021 20:20:58 -0800 Subject: [PATCH 04/29] Update and reimplement AddressLowering pass (for SIL opaque values). Merge the AddressLowering pass from its old development branch and update it so we can begin incrementally enabling it under a flag. This has been reimplemented for simplicity. There's no point in looking at the old code. --- include/swift/SIL/SILBuilder.h | 7 +- .../Mandatory/AddressLowering.cpp | 3714 ++++++++++++----- lib/SILOptimizer/Mandatory/AddressLowering.h | 282 ++ lib/SILOptimizer/Mandatory/CMakeLists.txt | 1 + .../Mandatory/PhiStorageOptimizer.cpp | 237 ++ .../Mandatory/PhiStorageOptimizer.h | 51 + test/IRGen/opaque_values_irgen.sil | 6 +- test/SIL/Parser/opaque_values_parse.sil | 2 +- .../Serialization/opaque_values_serialize.sil | 2 +- .../opaque_use_verifier.sil | 2 +- test/SILOptimizer/address_lowering.sil | 1142 +++-- test/SILOptimizer/address_lowering_phi.sil | 440 ++ test/SILOptimizer/address_projection.sil | 444 -- test/SILOptimizer/copy_propagation_opaque.sil | 2 +- test/SILOptimizer/opaque_values_mandatory.sil | 17 +- test/SILOptimizer/opaque_values_opt.sil | 19 +- test/SILOptimizer/specialize_opaque.sil | 2 +- test/SILOptimizer/specialize_opaque_ossa.sil | 2 +- test/sil-passpipeline-dump/basic.test-sh | 2 +- 19 files changed, 4579 insertions(+), 1795 deletions(-) create mode 100644 lib/SILOptimizer/Mandatory/AddressLowering.h create mode 100644 lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp create mode 100644 lib/SILOptimizer/Mandatory/PhiStorageOptimizer.h create mode 100644 test/SILOptimizer/address_lowering_phi.sil delete mode 100644 test/SILOptimizer/address_projection.sil diff --git a/include/swift/SIL/SILBuilder.h b/include/swift/SIL/SILBuilder.h index 9a43437c39da0..5da1ae39cae7c 100644 --- a/include/swift/SIL/SILBuilder.h +++ b/include/swift/SIL/SILBuilder.h @@ -266,11 +266,10 @@ class SILBuilder { void clearInsertionPoint() { BB = nullptr; } /// setInsertionPoint - Set the insertion point. - void setInsertionPoint(SILBasicBlock *BB, SILBasicBlock::iterator InsertPt) { + void setInsertionPoint(SILBasicBlock *BB, SILBasicBlock::iterator insertPt) { this->BB = BB; - this->InsertPt = InsertPt; - if (InsertPt == BB->end()) - return; + this->InsertPt = insertPt; + assert(insertPt == BB->end() || insertPt->getParent() == BB); } /// setInsertionPoint - Set the insertion point to insert before the specified diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index de04331eb6c71..81efd270a1f6c 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -2,101 +2,114 @@ // // This source file is part of the Swift.org open source project // -// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors +// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // -// This pass lowers SILTypes. On completion, the SILType of every SILValue is -// its SIL storage type. A SIL storage type is always an address type for values -// that require indirect storage at the LLVM IR level. Consequently, this pass -// is required for IRGen. It is a mandatory IRGen preparation pass (not a -// diagnostic pass). -// -// In the following text, items marked "[REUSE]" only apply to the proposed -// storage reuse optimization, which is not currently implemented. -// -// ## State -// -// A `valueStorageMap` maps each opaque SIL value to its storage -// information containing: -// -// - An ordinal representing the position of this instruction. -// -// - [REUSE] The identifier of the storage object. An optimized storage object -// may have multiple disjoint lifetimes. A storage object may also have -// subobjects. Each subobject has its own live range. When considering -// liveness of the subobject, one must also consider liveness of the -// parent object. -// -// - If this is a subobject projection, refer back to the value whose -// storage object will be the parent that this storage address is a -// projection of. -// -// - The storage address for this subobject. -// -// ## Step #1: Map opaque values -// -// Populate `valueStorageMap` in forward order (RPO), giving each opaque value -// an ordinal position. -// -// [REUSE] Assign a storage identifier to each opaque value. Optionally optimize -// storage by assigning multiple values the same identifier. -// -// ## Step #2: Allocate storage -// -// In reverse order (PO), allocate the parent storage object for each opaque -// value. -// -// [REUSE] If storage has already been allocated for the current live range, -// then simply reuse it. -// -// If the value's use composes a parent object from this value, and use's -// storage can be projected from, then mark the value's storage as a projection -// from the use value. [REUSE] Also inherit the use's storage identifier, and -// add an interval to the live range with the current projection path. -// -// A use can be projected from if its allocation is available at (dominates) -// this value and using the same storage over the interval from this value to -// the use does not overlap with the existing live range. -// -// Checking interference requires checking all operands that have been marked as -// projections. In the case of block arguments, it means checking the terminator -// operands of all predecessor blocks. -// -// [REUSE] Rather than checking all value operands, each live range will contain -// a set of intervals. Each interval will be associated with a projection path. -// -// Opaque value's that are the root of all projection paths now have their -// `storageAddress` assigned to an `alloc_stack` or argument. Opaque value's -// that are projections do not yet have a `storageAddress`. -// -// ## Step #3. Rewrite opaque values -// -// In forward order (RPO), rewrite each opaque value definition, and all its -// uses. This generally involves creating a new `_addr` variant of the -// instruction and obtaining the storage address from the `valueStorageMap`. -// -// If this value's storage is a projection of the value defined by its composing -// use, then first generate instructions to materialize the projection. This is -// a recursive process starting with the root of the projection path. -// -// A projection path will be materialized once, for the leaf subobject. When -// this happens, the `storageAddress` will be assigned for any intermediate -// projection paths. When those values are rewritten, their `storageAddress` -// will already be available. -// +//===----------------------------------------------------------------------===// +/// +/// This pass removes "opaque SILValues" by translating them into addressable +/// memory locations such as a stack locations. This is mandatory for IRGen. +/// +/// Lowering to LLVM IR requires each SILValue's type to be a valid "SIL storage +/// type". Opaque SILValues have address-only types. Address-only values require +/// indirect storage in LLVM, so their SIL storage type must be an address type. +/// +/// This pass should not introduce any semantic copies. Guaranteed values always +/// reuse the borrowed value's storage. This means that we SIL cannot allow +/// guaranteed opaque uses unless they are projections of the definition. In +/// particular, borrowed structs, tuples, and enums of address-only types are +/// not allowed. +/// +/// When owned values are consumed by phis, multiple storage locations are +/// required to avoid interfering with other phi operands. However, the value +/// never needs to be live in multiple storage locations a once. When the value +/// is consumed by a phi, either it's own storage is coalesced with the phi +/// storage (they have the same address), or the value is bitwise moved into the +/// phi's storage. +/// +/// ## Step #1: Map opaque values +/// +/// Populate a map from each opaque SILValue to its ValueStorage in forward +/// order (RPO). Each opaque value is mapped to an ordinal ID representing the +/// storage. Storage locations can now be optimized by remapping the values. +/// +/// ## Step #2: Allocate storage +/// +/// In reverse order (PO), allocate the parent storage object for each opaque +/// value. +/// +/// If the value is a subobject extraction (struct_extract, tuple_extract, +/// open_existential_value, unchecked_enum_data), then mark the value's storage +/// as a projection from the def's storage. +/// +/// If the value's use composes a parent object from this value (struct, tuple, +/// enum), and the use's storage dominates this value, then mark the value's +/// storage as a projection into the use's storage. +/// +/// ValueStorage projections can be chained. A non-projection ValueStorage is +/// the root of a tree of projections. +/// +/// When allocating storage, each ValueStorage root has its `storageAddress` +/// assigned to an `alloc_stack` or an argument. Opaque values that are storage +/// projections are not mapped to a `storageAddress` at this point. That happens +/// during rewriting. +/// +/// After allocating storage for all non-phi opaque values, phi storage is +/// allocated. This is handled by a PhiStorageOptimizer that checks for +/// interference among the phi operands and reuses storage allocated to other +/// values. +/// +/// ## Step #3. Rewrite opaque values +/// +/// In forward order (RPO), rewrite each opaque value definition, and all its +/// uses. This generally involves creating a new `_addr` variant of the +/// instruction and obtaining the storage address from the `valueStorageMap`. +/// +/// If this value's storage is a def-projection (the value is used to compose an +/// aggregate), then first generate instructions to materialize the +/// projection. This is a recursive process starting with the root of the +/// projection path. +/// +/// A projection path will be materialized once for the leaf subobject. When +/// this happens, the `storageAddress` will be assigned for any intermediate +/// projection paths. When those values are rewritten, their `storageAddress` +/// will already be available. +/// +//===----------------------------------------------------------------------===// +/// +/// TODO: Much of the implementation complexity, including most of the general +/// helper routines, stems from handling calls with multiple return values as +/// tuples. Once those calls are properly represented as instructions with +/// multiple results, then the implementation complexity will fall away. See the +/// code tagged "TODO: Multi-Result". +/// +/// TODO: Some complexity stems from the SILPhiArgument type/opcode being used +/// for terminator results rather than phis. +/// //===----------------------------------------------------------------------===// #define DEBUG_TYPE "address-lowering" + +#include "PhiStorageOptimizer.h" +#include "swift/Basic/BlotSetVector.h" +#include "swift/Basic/Range.h" +#include "swift/SIL/BasicBlockUtils.h" #include "swift/SIL/DebugUtils.h" +#include "swift/SIL/OwnershipUtils.h" +#include "swift/SIL/PrettyStackTrace.h" +#include "swift/SIL/PrunedLiveness.h" #include "swift/SIL/SILArgument.h" #include "swift/SIL/SILBuilder.h" #include "swift/SIL/SILVisitor.h" #include "swift/SILOptimizer/Analysis/PostOrderAnalysis.h" #include "swift/SILOptimizer/PassManager/Transforms.h" +#include "swift/SILOptimizer/Utils/BasicBlockOptUtils.h" #include "swift/SILOptimizer/Utils/InstOptUtils.h" +#include "swift/SILOptimizer/Utils/InstructionDeleter.h" +#include "swift/SILOptimizer/Utils/StackNesting.h" #include "llvm/ADT/DenseMap.h" #include "llvm/ADT/SetVector.h" #include "llvm/Support/CommandLine.h" @@ -104,169 +117,406 @@ using namespace swift; using llvm::SmallSetVector; -using llvm::PointerIntPair; - -llvm::cl::opt - OptimizeOpaqueAddressLowering("optimize-opaque-address-lowering", - llvm::cl::init(false)); - -// Visit all call results. -// Stop when the visitor returns `false`. -static void visitCallResults(ApplySite apply, - llvm::function_ref visitor) { - // FIXME: this entire implementation only really works for ApplyInst. - auto applyInst = cast(apply); - if (applyInst->getType().is()) { - // TODO: MultiValueInstruction - for (auto *operand : applyInst->getUses()) { - if (auto extract = dyn_cast(operand->getUser())) - if (!visitor(extract)) - break; - } - } else - visitor(applyInst); + +/// Get a function's convention for Lowered SIL, even though the SIL stage is +/// still Canonical. +static SILFunctionConventions getLoweredFnConv(SILFunction *function) { + return SILFunctionConventions( + function->getLoweredFunctionType(), + SILModuleConventions::getLoweredAddressConventions( + function->getModule())); +} + +/// Get a call's function convention for Lowered SIL even though the SIL stage +/// is still Canonical. +static SILFunctionConventions getLoweredCallConv(ApplySite call) { + return SILFunctionConventions( + call.getSubstCalleeType(), + SILModuleConventions::getLoweredAddressConventions(call.getModule())); +} + +/// Invoke \p cleanup on all paths exiting a call. +static void +cleanupAfterCall(FullApplySite apply, + llvm::function_ref cleanup) { + switch (apply.getKind()) { + case FullApplySiteKind::ApplyInst: { + cleanup(std::next(apply.getInstruction()->getIterator())); + break; + } + case FullApplySiteKind::TryApplyInst: { + auto *tryApply = cast(apply.getInstruction()); + cleanup(tryApply->getNormalBB()->begin()); + cleanup(tryApply->getErrorBB()->begin()); + break; + } + case FullApplySiteKind::BeginApplyInst: { + // FIXME: Unimplemented + // + // This should be as simple as calling cleanup for all the end_applies. + llvm::report_fatal_error("Unimplemented coroutine"); + } + } } //===----------------------------------------------------------------------===// -// ValueStorageMap: Map Opaque/Resilient SILValues to abstract storage units. +// Multi-Result +// +// TODO: These helpers all compensate for the legacy representation of return +// values as tuples. Once calls are properly represented as multi-value +// instructions, this complexity all goes away. +// +// Calls are currently SILValues, but when the result type is a tuple, the call +// value does not represent a real value with storage. This is a bad situation +// for address lowering because there's no way to tell from any given value +// whether its legal to assign storage to that value. As a result, the +// implementation of call lowering doesn't fall out naturally from the algorithm +// that lowers values to storage. //===----------------------------------------------------------------------===// -namespace { -struct ValueStorage { - enum { IsProjectionMask = 0x1, IsRewrittenMask = 0x2 }; - PointerIntPair projectionAndFlags; - - /// The final address of this storage unit after rewriting the SIL. - /// For values linked to their own storage, this is set during storage - /// allocation. For projections, it is only set after instruction rewriting. - SILValue storageAddress; +/// If \p pseudoResult has multiple results, return the destructure. +static DestructureTupleInst *getCallMultiResult(SILValue pseudoResult) { + if (pseudoResult->getType().is()) { + if (auto *use = pseudoResult->getSingleUse()) + return cast(use->getUser()); - bool isProjection() const { - return projectionAndFlags.getInt() & IsProjectionMask; + assert(pseudoResult->use_empty() && "pseudo result can't be used"); } - /// Return the operand the composes an aggregate from this value. - Operand *getComposedOperand() const { - assert(isProjection()); - return projectionAndFlags.getPointer(); + return nullptr; +} + +/// \p destructure is the pseudo result of a multi-result call. +/// Visit all real call results. Stop when the visitor returns `false`. +static bool visitCallMultiResults( + DestructureTupleInst *destructure, SILFunctionConventions fnConv, + llvm::function_ref visitor) { + assert(fnConv.getNumDirectSILResults() == destructure->getNumResults()); + + auto resultIter = destructure->getAllResults().begin(); + for (auto resultInfo : fnConv.getDirectSILResults()) { + if (!visitor(*resultIter++, resultInfo)) + return false; } - void setComposedOperand(Operand *oper) { - projectionAndFlags.setPointer(oper); - projectionAndFlags.setInt(projectionAndFlags.getInt() | IsProjectionMask); + return true; +} + +/// Visit all real call results. Stop when the visitor returns `false`. +static bool +visitCallResults(FullApplySite apply, + llvm::function_ref visitor) { + auto fnConv = apply.getSubstCalleeConv(); + SILValue pseudoResult = apply.getPseudoResult(); + if (auto *destructure = getCallMultiResult(pseudoResult)) { + return visitCallMultiResults(destructure, fnConv, visitor); } + return visitor(pseudoResult, *fnConv.getDirectSILResults().begin()); +} - bool isRewritten() const { - if (projectionAndFlags.getInt() & IsRewrittenMask) { - assert(storageAddress); - return true; - } +/// Return true if the given value is either a "fake" tuple that represents all +/// of a call's results or an empty tuple of no results. This may return true +/// for either tuple_inst or a block argument. +static bool isPseudoCallResult(SILValue value) { + if (isa(value)) + return value->getType().is(); + + auto *bbArg = dyn_cast(value); + if (!bbArg) return false; + + auto *term = bbArg->getTerminatorForResult(); + if (!term) + return false; + + return isa(term) && bbArg->getType().is(); +} + +/// Return true if this is a pseudo-return value. +static bool isPseudoReturnValue(SILValue value) { + if (auto *tuple = dyn_cast(value)) { + Operand *singleUse = tuple->getSingleUse(); + return singleUse && isa(singleUse->getUser()); } - void markRewritten() { - projectionAndFlags.setInt(projectionAndFlags.getInt() | IsRewrittenMask); - } -}; + return false; +} -/// Map each opaque/resilient SILValue to its abstract storage. -/// O(1) membership test. -/// O(n) iteration in RPO order. -class ValueStorageMap { - typedef std::vector> ValueVector; - // Hash of values to ValueVector indices. - typedef llvm::DenseMap ValueHashMap; +/// Return the value representing storage of an address-only or indirectly +/// returned tuple element. For real tuples, return the tuple value itself. If +/// the tuple is a pseudo-return value, return the indirect function argument +/// for the corresponding result after lowering. +/// +/// bb0(%loweredIndirectResult : $*T, ...) +/// .... +/// %tuple = tuple(..., %operand, ...) +/// return %tuple +/// +/// When called on %operand, return %loweredIndirectResult. +/// +/// Precondition: \p operand's user is a TupleInst +/// +/// Precondition: indirect function arguments have already been rewritten +/// (see insertIndirectReturnArgs()). +static SILValue getTupleStorageValue(Operand *operand) { + auto *tuple = cast(operand->getUser()); + Operand *singleUse = tuple->getSingleUse(); + if (!singleUse || !isa(singleUse->getUser())) + return tuple; + + unsigned resultIdx = tuple->getElementIndex(operand); + + SILFunction *function = tuple->getFunction(); + auto loweredFnConv = getLoweredFnConv(function); + assert(loweredFnConv.getResults().size() == tuple->getElements().size()); + + unsigned indirectResultIdx = 0; + for (SILResultInfo result : loweredFnConv.getResults().slice(0, resultIdx)) { + if (loweredFnConv.isSILIndirect(result)) + ++indirectResultIdx; + } + // Cannot call F->getIndirectSILResults here because that API uses the + // function conventions before address lowering. + return function->getArguments()[indirectResultIdx]; +} - ValueVector valueVector; - ValueHashMap valueHashMap; +/// Return the value representing storage for a single return value. +/// +/// bb0(%loweredIndirectResult : $*T, ...) +/// return %oper +/// +/// For %oper, return %loweredIndirectResult +static SILValue getSingleReturnValue(Operand *operand) { + assert(!isPseudoReturnValue(operand->get())); + + auto *function = operand->getParentFunction(); + auto loweredFnConv = getLoweredFnConv(function); + assert(loweredFnConv.getNumIndirectSILResults() == 1); + (void)loweredFnConv; + + // Cannot call getIndirectSILResults here because that API uses the + // function conventions before address lowering. + return function->getArguments()[0]; +} -public: - bool empty() const { return valueVector.empty(); } +//===----------------------------------------------------------------------===// +// ValueStorageMap +// +// Map Opaque SILValues to abstract storage units. +//===----------------------------------------------------------------------===// - void clear() { - valueVector.clear(); - valueHashMap.clear(); - } +/// Check if this is a copy->store pair. If so, the copy storage will be +/// projected from the source, and the copy semantics will be handled by +/// UseRewriter::visitStoreInst. +static bool isStoreCopy(SILValue value) { + auto *copyInst = dyn_cast(value); + if (!copyInst) + return false; - ValueVector::iterator begin() { return valueVector.begin(); } + if (!copyInst->hasOneUse()) + return false; - ValueVector::iterator end() { return valueVector.end(); } + auto *user = value->getSingleUse()->getUser(); + return isa(user) || isa(user); +} - ValueVector::reverse_iterator rbegin() { return valueVector.rbegin(); } +ValueStorage &ValueStorageMap::insertValue(SILValue value) { + assert(!stableStorage && "cannot grow stable storage map"); - ValueVector::reverse_iterator rend() { return valueVector.rend(); } + auto hashResult = + valueHashMap.insert(std::make_pair(value, valueVector.size())); + (void)hashResult; + assert(hashResult.second && "SILValue already mapped"); - bool contains(SILValue value) const { - return valueHashMap.find(value) != valueHashMap.end(); - } + valueVector.emplace_back(value, ValueStorage()); - unsigned getOrdinal(SILValue value) { - auto hashIter = valueHashMap.find(value); - assert(hashIter != valueHashMap.end() && "Missing SILValue"); - return hashIter->second; - } + return valueVector.back().storage; +} - ValueStorage &getStorage(SILValue value) { - return valueVector[getOrdinal(value)].second; - } +void ValueStorageMap::replaceValue(SILValue oldValue, SILValue newValue) { + auto pos = valueHashMap.find(oldValue); + assert(pos != valueHashMap.end()); + unsigned ordinal = pos->second; + valueHashMap.erase(pos); - // This must be called in RPO order. - ValueStorage &insertValue(SILValue value) { - auto hashResult = - valueHashMap.insert(std::make_pair(value, valueVector.size())); - (void)hashResult; - assert(hashResult.second && "SILValue already mapped"); + auto hashResult = valueHashMap.insert(std::make_pair(newValue, ordinal)); + (void)hashResult; + assert(hashResult.second && "SILValue already mapped"); - valueVector.emplace_back(value, ValueStorage()); + valueVector[ordinal].value = newValue; +} - return valueVector.back().second; +void ValueStorageMap::dump() { + llvm::dbgs() << "ValueStorageMap:\n"; + for (unsigned ordinal : indices(valueVector)) { + auto &valStoragePair = valueVector[ordinal]; + llvm::dbgs() << "value: "; + valStoragePair.value->dump(); + auto &storage = valStoragePair.storage; + if (storage.isUseProjection) { + llvm::dbgs() << " use projection: "; + if (!storage.isRewritten) + valueVector[storage.projectedStorageID].value->dump(); + } else if (storage.isDefProjection) { + llvm::dbgs() << " def projection: "; + if (!storage.isRewritten) + valueVector[storage.projectedStorageID].value->dump(); + } + if (storage.storageAddress) { + llvm::dbgs() << " storage: "; + storage.storageAddress->dump(); + } } -}; -} // end anonymous namespace +} //===----------------------------------------------------------------------===// -// AddressLoweringState: shared state for the pass's analysis and transforms. +// AddressLoweringState +// +// Shared state for the pass's analysis and transforms. //===----------------------------------------------------------------------===// namespace { +class PhiRewriter; + struct AddressLoweringState { - SILFunction *F; + SILFunction *function; SILFunctionConventions loweredFnConv; // Dominators remain valid throughout this pass. DominanceInfo *domInfo; - // All opaque values and associated storage. + InstructionDeleter deleter; + + // All opaque values mapped to their associated storage. ValueStorageMap valueStorageMap; + // All call sites with formally indirect SILArgument or SILResult conventions. - // Calls are removed from the set when rewritten. - SmallSetVector indirectApplies; + // + // Applies with indirect results are removed as they are rewritten. Applies + // with only indirect arguments are rewritten in a post-pass, only after all + // parameters are rewritten. + SmallBlotSetVector indirectApplies; + // All function-exiting terminators (return or throw instructions). - SmallVector returnInsts; - // Delete these instructions after performing transformations. - // They must not have any remaining users. - SmallSetVector instsToDelete; - - AddressLoweringState(SILFunction *F, DominanceInfo *domInfo) - : F(F), - loweredFnConv(F->getLoweredFunctionType(), - SILModuleConventions::getLoweredAddressConventions(F->getModule())), + SmallVector exitingInsts; + + // Copies from a phi's operand storage to the phi storage. These logically + // occur on the CFG edge. Keep track of them to resolve anti-dependencies. + std::unique_ptr phiRewriter; + + AddressLoweringState(SILFunction *function, DominanceInfo *domInfo) + : function(function), loweredFnConv(getLoweredFnConv(function)), domInfo(domInfo) {} - bool isDead(SILInstruction *inst) const { return instsToDelete.count(inst); } + SILModule *getModule() const { return &function->getModule(); } - void markDead(SILInstruction *inst) { -#ifndef NDEBUG - for (auto result : inst->getResults()) - for (Operand *use : result->getUses()) - assert(instsToDelete.count(use->getUser())); -#endif - instsToDelete.insert(inst); + SILLocation genLoc() const { + return RegularLocation::getAutoGeneratedLocation(); + } + + // Get a builder that uses function conventions for the Lowered SIL stage even + // though the SIL stage hasn't explicitly changed yet. + SILBuilder getBuilder(SILBasicBlock::iterator insertPt) const { + return getBuilder(insertPt, &*insertPt); + } + SILBuilder getTermBuilder(TermInst *term) const { + return getBuilder(term->getParent()->end(), term); + } + + PhiRewriter &getPhiRewriter(); + + SILValue getMaterializedAddress(SILValue origValue) const { + return valueStorageMap.getStorage(origValue).getMaterializedAddress(); + } + +protected: + SILBuilder getBuilder(SILBasicBlock::iterator insertPt, + SILInstruction *originalInst) const { + SILBuilder builder(originalInst->getParent(), insertPt); + builder.setSILConventions( + SILModuleConventions::getLoweredAddressConventions( + builder.getModule())); + builder.setCurrentDebugScope(originalInst->getDebugScope()); + return builder; } }; } // end anonymous namespace //===----------------------------------------------------------------------===// -// OpaqueValueVisitor: Map OpaqueValues to ValueStorage. +// OpaqueValueVisitor +// +// Map opaque values to ValueStorage. //===----------------------------------------------------------------------===// +/// Before populating the ValueStorageMap, replace each value-typed argument to +/// the current function with an address-typed argument by inserting a temporary +/// load instruction. +static void convertIndirectFunctionArgs(AddressLoweringState &pass) { + // Insert temporary argument loads at the top of the function. + SILBuilder argBuilder = + pass.getBuilder(pass.function->getEntryBlock()->begin()); + + auto fnConv = pass.function->getConventions(); + unsigned argIdx = fnConv.getSILArgIndexOfFirstParam(); + for (SILParameterInfo param : + pass.function->getLoweredFunctionType()->getParameters()) { + + if (param.isFormalIndirect() && !fnConv.isSILIndirect(param)) { + SILArgument *arg = pass.function->getArgument(argIdx); + SILType addrType = arg->getType().getAddressType(); + LoadInst *loadArg = argBuilder.createTrivialLoadOr( + SILValue(arg).getLoc(), SILUndef::get(addrType, *pass.function), + LoadOwnershipQualifier::Take); + + arg->replaceAllUsesWith(loadArg); + assert(!pass.valueStorageMap.contains(arg)); + + arg = arg->getParent()->replaceFunctionArgument( + arg->getIndex(), addrType, OwnershipKind::None, arg->getDecl()); + + loadArg->setOperand(arg); + + // Indirect calling convention may be used for loadable types. In that + // case, generating the argument loads is sufficient. + if (addrType.isAddressOnly(*pass.function)) { + auto &storage = pass.valueStorageMap.insertValue(loadArg); + storage.storageAddress = arg; + storage.isRewritten = true; + } + } + ++argIdx; + } + assert(argIdx + == fnConv.getSILArgIndexOfFirstParam() + fnConv.getNumSILArguments()); +} + +/// Before populating the ValueStorageMap, insert function arguments for any +/// @out result type. Return the number of indirect result arguments added. +static unsigned insertIndirectReturnArgs(AddressLoweringState &pass) { + auto &astCtx = pass.getModule()->getASTContext(); + auto typeCtx = pass.function->getTypeExpansionContext(); + auto *declCtx = pass.function->getDeclContext(); + + unsigned argIdx = 0; + for (auto resultTy : pass.loweredFnConv.getIndirectSILResultTypes(typeCtx)) { + auto bodyResultTy = pass.function->mapTypeIntoContext(resultTy); + auto var = new (astCtx) ParamDecl( + SourceLoc(), SourceLoc(), astCtx.getIdentifier("$return_value"), + SourceLoc(), astCtx.getIdentifier("$return_value"), declCtx); + + SILFunctionArgument *funcArg = + pass.function->begin()->insertFunctionArgument( + argIdx, bodyResultTy.getAddressType(), OwnershipKind::None, var); + // Insert function results into valueStorageMap so that the caller storage + // can be projected onto values inside the function as use projections. + auto &storage = pass.valueStorageMap.insertValue(funcArg); + // This is the only case where a value defines its own storage. + storage.storageAddress = funcArg; + storage.isRewritten = true; + + ++argIdx; + } + assert(argIdx == pass.loweredFnConv.getNumIndirectSILResults()); + return argIdx; +} + namespace { /// Collect all opaque/resilient values, inserting them in `valueStorageMap` in /// RPO order. @@ -282,46 +532,53 @@ class OpaqueValueVisitor { public: explicit OpaqueValueVisitor(AddressLoweringState &pass) - : pass(pass), postorderInfo(pass.F) {} + : pass(pass), postorderInfo(pass.function) {} void mapValueStorage(); protected: - void visitApply(ApplySite applySite); + void checkForIndirectApply(FullApplySite applySite); void visitValue(SILValue value); + void canonicalizeReturnValues(); }; } // end anonymous namespace -/// Top-level entry: Populate `valueStorageMap`, `indirectResults`, and -/// `indirectOperands`. +/// Top-level entry. Populates AddressLoweringState's `valueStorageMap`, +/// `indirectApplies`, and `exitingInsts`. /// /// Find all Opaque/Resilient SILValues and add them /// to valueStorageMap in RPO. void OpaqueValueVisitor::mapValueStorage() { - for (auto *BB : postorderInfo.getReversePostOrder()) { - if (BB->getTerminator()->isFunctionExiting()) - pass.returnInsts.push_back(BB->getTerminator()); + for (auto *block : postorderInfo.getReversePostOrder()) { + if (block->getTerminator()->isFunctionExiting()) + pass.exitingInsts.push_back(block->getTerminator()); // Opaque function arguments have already been replaced. - if (BB != pass.F->getEntryBlock()) { - for (auto argI = BB->args_begin(), argEnd = BB->args_end(); - argI != argEnd; ++argI) { - visitValue(*argI); + if (block != pass.function->getEntryBlock()) { + for (auto *arg : block->getArguments()) { + if (isPseudoCallResult(arg)) + continue; + + visitValue(arg); } } - for (auto &II : *BB) { - if (auto apply = ApplySite::isa(&II)) - visitApply(apply); + for (auto &inst : *block) { + if (auto apply = FullApplySite::isa(&inst)) + checkForIndirectApply(apply); + + for (auto result : inst.getResults()) { + if (isPseudoCallResult(result) || isPseudoReturnValue(result)) + continue; - for (auto result : II.getResults()) visitValue(result); + } } } + canonicalizeReturnValues(); } -/// Populate `indirectApplies` and insert this apply in `valueStorageMap` if -/// the call's non-tuple result is returned indirectly. -void OpaqueValueVisitor::visitApply(ApplySite applySite) { +/// Populate `indirectApplies`. +void OpaqueValueVisitor::checkForIndirectApply(FullApplySite applySite) { auto calleeConv = applySite.getSubstCalleeConv(); unsigned calleeArgIdx = applySite.getCalleeArgIndexOfFirstAppliedArg(); for (Operand &operand : applySite.getArgumentOperands()) { @@ -329,265 +586,602 @@ void OpaqueValueVisitor::visitApply(ApplySite applySite) { auto argConv = calleeConv.getSILArgumentConvention(calleeArgIdx); if (argConv.isIndirectConvention()) { pass.indirectApplies.insert(applySite); + return; } } ++calleeArgIdx; } - - if (applySite.getSubstCalleeType()->hasIndirectFormalResults()) { + if (applySite.getSubstCalleeType()->hasIndirectFormalResults()) pass.indirectApplies.insert(applySite); - if (!applySite.getType().is()) - pass.valueStorageMap.insertValue(cast(applySite)); - - return; - } } -/// If `value` is address-only add it to the `valueStorageMap`. +/// If `value` is address-only, add it to the `valueStorageMap`. void OpaqueValueVisitor::visitValue(SILValue value) { - if (value->getType().isObject() - && value->getType().isAddressOnly(*pass.F)) { - if (pass.valueStorageMap.contains(value)) { - assert(isa( - pass.valueStorageMap.getStorage(value).storageAddress)); - return; - } - pass.valueStorageMap.insertValue(value); + if (!value->getType().isObject() + || !value->getType().isAddressOnly(*pass.function)) { + return; + } + if (pass.valueStorageMap.contains(value)) { + // Function arguments are already mapped from loads. + assert(isa( + pass.valueStorageMap.getStorage(value).storageAddress)); + return; } + pass.valueStorageMap.insertValue(value); } -//===----------------------------------------------------------------------===// -// OpaqueStorageAllocation: Generate alloc_stack and address projections for all -// abstract storage locations. -//===----------------------------------------------------------------------===// - -namespace { -/// Allocate storage on the stack for every opaque value defined in this -/// function in RPO order. If the definition is an argument of this function, -/// simply replace the function argument with an address representing the -/// caller's storage. -/// -/// TODO: shrink lifetimes by inserting alloc_stack at the dominance LCA and -/// finding the lifetime boundary with a simple backward walk from uses. -class OpaqueStorageAllocation { - AddressLoweringState &pass; +// Canonicalize returned values. +// +// Given: +// %t = def : $(T, T) +// use %t : $(T, T) +// return %t : $(T, T) +// +// Produce: +// %t = def +// use %t : $(T, T) +// (%e0, %e1) = destructure_tuple %t : $(T, T) +// %r = tuple (%e0 : $T, %e1 : $T) +// return %r : $(T, T) +// +// TODO: Multi-Result. This should be a standard OSSA canonicalization until +// returns are fixed to take multiple operands. +void OpaqueValueVisitor::canonicalizeReturnValues() { + auto numResults = pass.function->getConventions().getNumDirectSILResults(); + if (numResults < 2) + return; -public: - explicit OpaqueStorageAllocation(AddressLoweringState &pass) : pass(pass) {} + for (SILInstruction *termInst : pass.exitingInsts) { + auto *returnInst = dyn_cast(termInst); + if (!returnInst) { + assert(isa(termInst)); + continue; + } + SILValue oldResult = returnInst->getOperand(); + if (oldResult.getOwnershipKind() != OwnershipKind::Owned) + continue; - void allocateOpaqueStorage(); + assert(oldResult->getType().is()); + if (oldResult->hasOneUse()) { + assert(isPseudoReturnValue(oldResult)); + continue; + } + // There is another nonconsuming use of the returned tuple. + SILBuilderWithScope returnBuilder(returnInst); + auto loc = pass.genLoc(); + auto *destructure = returnBuilder.createDestructureTuple(loc, oldResult); + + SmallVector results; + results.reserve(numResults); + for (auto result : destructure->getResults()) { + // Update the value storage map for new instructions. Since they are + // created at function exits, they are naturally in RPO order. + this->visitValue(result); + results.push_back(result); + } + auto *newResult = returnBuilder.createTuple( + pass.genLoc(), oldResult->getType(), results, OwnershipKind::Owned); + returnInst->setOperand(newResult); -protected: - void convertIndirectFunctionArgs(); - unsigned insertIndirectReturnArgs(); - bool canProjectFrom(SingleValueInstruction *innerVal, - SILInstruction *composingUse); - void allocateForValue(SILValue value, ValueStorage &storage); -}; -} // end anonymous namespace + assert(isPseudoReturnValue(newResult)); + } +} -/// Top-level entry point: allocate storage for all opaque/resilient values. -void OpaqueStorageAllocation::allocateOpaqueStorage() { +/// Top-level entry point. +/// +/// Prepare the SIL by rewriting function arguments and returns. +/// Initialize the ValueStorageMap with an entry for each opaque value in the +/// function. +static void prepareValueStorage(AddressLoweringState &pass) { // Fixup this function's argument types with temporary loads. - convertIndirectFunctionArgs(); + convertIndirectFunctionArgs(pass); // Create a new function argument for each indirect result. - insertIndirectReturnArgs(); + insertIndirectReturnArgs(pass); // Populate valueStorageMap. OpaqueValueVisitor(pass).mapValueStorage(); - - // Create an AllocStack for every opaque value defined in the function. Visit - // values in post-order to create storage for aggregates before subobjects. - for (auto &valueStorageI : llvm::reverse(pass.valueStorageMap)) - allocateForValue(valueStorageI.first, valueStorageI.second); } -/// Replace each value-typed argument to the current function with an -/// address-typed argument by inserting a temporary load instruction. -void OpaqueStorageAllocation::convertIndirectFunctionArgs() { - // Insert temporary argument loads at the top of the function. - SILBuilder argBuilder(pass.F->getEntryBlock()->begin()); - argBuilder.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(pass.F->getModule())); - - auto fnConv = pass.F->getConventions(); - unsigned argIdx = fnConv.getSILArgIndexOfFirstParam(); - for (SILParameterInfo param : - pass.F->getLoweredFunctionType()->getParameters()) { - - if (param.isFormalIndirect() && !fnConv.isSILIndirect(param)) { - SILArgument *arg = pass.F->getArgument(argIdx); - SILType addrType = arg->getType().getAddressType(); +//===----------------------------------------------------------------------===// +// Storage Projection +// +// These queries determine whether storage for a SILValue can be projected from +// its operands or into its uses. +// ===---------------------------------------------------------------------===// - LoadInst *loadArg = argBuilder.createLoad( - RegularLocation(const_cast(arg->getDecl())), - SILUndef::get(addrType, *pass.F), - LoadOwnershipQualifier::Unqualified); +/// Return the operand whose source is an aggregate value that is extracted +/// into the given subobject, \p value. Or return nullptr. +/// +/// Def-projection oracle: The answer must be consistent across both +/// OpaqueStorageAllocation and AddressMaterialization. +/// +/// Invariant: +/// `getProjectedDefOperand(value) != nullptr` +/// if-and-only-if +/// `pass.valueStorageMap.getStorage(value).isDefProjection` +/// +/// Invariant: if \p value has guaranteed ownership, this must return a nonnull +/// value. +static Operand *getProjectedDefOperand(SILValue value) { + switch (value->getKind()) { + default: + return nullptr; + + case ValueKind::BeginBorrowInst: + return &cast(value)->getOperandRef(); + + case ValueKind::CopyValueInst: + if (isStoreCopy(value)) + return &cast(value)->getOperandRef(); + + return nullptr; + + case ValueKind::MultipleValueInstructionResult: { + SILInstruction *destructure = + cast(value)->getParent(); + switch (destructure->getKind()) { + default: + return nullptr; + case SILInstructionKind::DestructureStructInst: + return &destructure->getOperandRef(0); + case SILInstructionKind::DestructureTupleInst: { + auto *oper = &destructure->getOperandRef(0); + if (isPseudoCallResult(oper->get())) + return nullptr; + + return oper; + } + } + } + case ValueKind::TupleExtractInst: { + auto *TEI = cast(value); + // TODO: Multi-Result: TupleExtract from an apply are handled specially + // until we have multi-result calls. Force them to allocate storage. + if (ApplySite::isa(TEI->getOperand())) + return nullptr; - arg->replaceAllUsesWith(loadArg); - assert(!pass.valueStorageMap.contains(arg)); + LLVM_FALLTHROUGH; + } + case ValueKind::StructExtractInst: + case ValueKind::OpenExistentialValueInst: + case ValueKind::OpenExistentialBoxValueInst: + assert(value.getOwnershipKind() == OwnershipKind::Guaranteed); + return &cast(value)->getAllOperands()[0]; + } +} - arg = arg->getParent()->replaceFunctionArgument( - arg->getIndex(), addrType, OwnershipKind::None, arg->getDecl()); +/// Return the operand of the reused storage. These operations are always +/// rewritten by the use rewriter and destructively reuse their operand's +/// storage. If the result is address-only, then the operand must be +/// address-only (otherwise, the operand would not necessarilly have storage). +static Operand *getReusedStorageOperand(SILValue value) { + switch (value->getKind()) { + default: + break; - loadArg->setOperand(arg); + case ValueKind::OpenExistentialValueInst: + case ValueKind::OpenExistentialBoxValueInst: + case ValueKind::UncheckedEnumDataInst: + return &cast(value)->getOperandRef(0); - if (addrType.isAddressOnly(*pass.F)) - pass.valueStorageMap.insertValue(loadArg).storageAddress = arg; + case ValueKind::SILPhiArgument: { + if (auto *term = cast(value)->getTerminatorForResult()) { + if (auto *switchEnum = dyn_cast(term)) { + return &switchEnum->getAllOperands()[0]; + } } - ++argIdx; + break; } - assert(argIdx - == fnConv.getSILArgIndexOfFirstParam() + fnConv.getNumSILArguments()); -} - -/// Insert function arguments for any @out result type. Return the number of -/// indirect result arguments added. -unsigned OpaqueStorageAllocation::insertIndirectReturnArgs() { - auto &ctx = pass.F->getModule().getASTContext(); - unsigned argIdx = 0; - for (auto resultTy : pass.loweredFnConv.getIndirectSILResultTypes( - pass.F->getTypeExpansionContext())) { - auto bodyResultTy = pass.F->mapTypeIntoContext(resultTy); - auto var = new (ctx) - ParamDecl(SourceLoc(), SourceLoc(), - ctx.getIdentifier("$return_value"), SourceLoc(), - ctx.getIdentifier("$return_value"), - pass.F->getDeclContext()); - var->setSpecifier(ParamSpecifier::InOut); - - pass.F->begin()->insertFunctionArgument( - argIdx, bodyResultTy.getAddressType(), OwnershipKind::None, var); - ++argIdx; } - assert(argIdx == pass.loweredFnConv.getNumIndirectSILResults()); - return argIdx; + return nullptr; } -/// Is this operand composing an aggregate from a subobject, or simply -/// forwarding the operand's value to storage defined elsewhere? +/// If \p operand can project into its user, return the SILValue representing +/// user's storage. The user may composes an aggregate from its operands or +/// forwards its operands to arguments. /// -/// TODO: Handle struct. -/// TODO: Make this a visitor. -bool OpaqueStorageAllocation::canProjectFrom(SingleValueInstruction *innerVal, - SILInstruction *composingUse) { - if (!OptimizeOpaqueAddressLowering) - return false; - - SILValue composingValue; - switch (composingUse->getKind()) { +/// TODO: Handle SwitchValueInst, CheckedCastValueBranchInst. +static SILValue getProjectedUseValue(Operand *operand) { + auto *user = operand->getUser(); + switch (user->getKind()) { default: - return false; - case SILInstructionKind::ApplyInst: - // @in operands never need their own storage since they are non-mutating - // uses. They simply reuse the storage allocated for their operand. So it - // wouldn't make sense to "project" out of the apply argument. - return false; - case SILInstructionKind::EnumInst: - composingValue = cast(composingUse); - break; - case SILInstructionKind::InitExistentialValueInst: { - // Ensure that all opened archetypes are available at the inner value's - // definition. - auto *initExistential = cast(composingUse); - for (Operand &operand : initExistential->getTypeDependentOperands()) { - if (!pass.domInfo->properlyDominates(operand.get(), innerVal)) - return false; - } - composingValue = initExistential; break; - } + + // structs an enums are straightforward compositions. + case SILInstructionKind::StructInst: + case SILInstructionKind::EnumInst: + return cast(user); + + // init_existential_value composes an existential value, but may depends on + // opened archetypes. The caller will need to check that storage dominates + // the opened types. + case SILInstructionKind::InitExistentialValueInst: + return cast(user); + + // A tuple is either a composition or forwards its element through a return + // through function argument storage. Either way, its element can be a + // use projection. + case SILInstructionKind::TupleInst: + return getTupleStorageValue(operand); + + // Return instructions can project into the return value. case SILInstructionKind::ReturnInst: - return true; - case SILInstructionKind::StoreInst: { - if (cast(composingUse)->getSrc() == innerVal - && isa(innerVal)) { - return true; - } + return getSingleReturnValue(operand); + } + return SILValue(); +} + +//===----------------------------------------------------------------------===// +// OpaqueStorageAllocation +// +// Generate alloc_stack and address projections for abstract storage locations. +// ===---------------------------------------------------------------------===// + +// Record a storage projection from the source of the given operand into its +// use (e.g. struct_extract, tuple_extract, switch_enum). +void ValueStorageMap::recordDefProjection(Operand *oper, + SILValue projectedValue) { + auto &storage = getStorage(projectedValue); + storage.projectedStorageID = getOrdinal(oper->get()); + storage.isDefProjection = true; +} + +// Mark this operand as coalesced with \p useValue storage. +void ValueStorageMap::recordComposingUseProjection(Operand *oper, + SILValue userValue) { + auto &storage = getStorage(oper->get()); + assert(!storage.isAllocated()); + storage.projectedStorageID = getOrdinal(userValue); + storage.projectedOperandNum = oper->getOperandNumber(); + storage.isUseProjection = true; + + if (EnumDecl *enumDecl = userValue->getType().getEnumOrBoundGenericEnum()) { + storage.initializesEnum = true; + } + assert(!storage.isPhiProjection()); +} + +// Mark this phi operand as coalesced with the phi storage. +void ValueStorageMap::recordPhiUseProjection(Operand *operand, + SILPhiArgument *phi) { + assert(isa(operand->getUser())); + + auto &storage = getStorage(operand->get()); + assert(!storage.isAllocated()); + assert(storage.projectedOperandNum == ValueStorage::InvalidOper); + + storage.projectedStorageID = getOrdinal(phi); + storage.isUseProjection = true; + + assert(storage.isPhiProjection()); +} + +bool ValueStorageMap::isComposingUseProjection(Operand *oper) const { + auto hashPos = valueHashMap.find(oper->get()); + if (hashPos == valueHashMap.end()) + return false; + + auto &srcStorage = valueVector[hashPos->second].storage; + if (!srcStorage.isUseProjection) return false; + + return srcStorage.projectedOperandNum == oper->getOperandNumber(); +} + +namespace { +/// Allocate storage on the stack for every opaque value defined in this +/// function in postorder. If the definition is an argument of this function, +/// simply replace the function argument with an address representing the +/// caller's storage. +/// +/// TODO: shrink lifetimes by inserting alloc_stack at the dominance LCA and +/// finding the lifetime boundary with a simple backward walk from uses. +class OpaqueStorageAllocation { + AddressLoweringState &pass; + +public: + explicit OpaqueStorageAllocation(AddressLoweringState &pass) : pass(pass) {} + + void allocateOpaqueStorage(); + +protected: + void allocateValue(SILValue value); + bool findProjectionIntoUseImpl(SILValue value, + ArrayRef incomingValues, + bool intoPhi); + + bool findValueProjectionIntoUse(SILValue value) { + return findProjectionIntoUseImpl(value, ArrayRef(value), false); } - case SILInstructionKind::TupleInst: - composingValue = cast(composingUse); - break; + + bool findPhiProjectionIntoUse(SILValue value, + ArrayRef incomingValues) { + return findProjectionIntoUseImpl(value, incomingValues, true); } - ValueStorage &storage = pass.valueStorageMap.getStorage(composingValue); - if (SILValue addr = storage.storageAddress) { - if (auto *stackInst = dyn_cast(addr)) { - assert(pass.domInfo->properlyDominates(stackInst, innerVal)); - return true; - } - if (isa(addr)) { - return true; - } - } else if (storage.isProjection()) - return canProjectFrom(innerVal, storage.getComposedOperand()->getUser()); - return false; + bool checkStorageDominates(AllocStackInst *allocInst, + ArrayRef incomingValues); + + void allocatePhi(PhiValue phi); + + void removeAllocation(SILValue value); + + AllocStackInst *createStackAllocation(SILValue value); + + void createStackAllocationStorage(SILValue value) { + pass.valueStorageMap.getStorage(value).storageAddress = + createStackAllocation(value); + } +}; +} // end anonymous namespace + +/// Top-level entry point: allocate storage for all opaque/resilient values. +void OpaqueStorageAllocation::allocateOpaqueStorage() { + // Create an AllocStack for every opaque value defined in the function. Visit + // values in post-order to create storage for aggregates before subobjects. + for (auto &valueStorageI : llvm::reverse(pass.valueStorageMap)) { + SILValue value = valueStorageI.value; + if (!PhiValue(value)) + allocateValue(value); + } + // Only allocate phis after all SSA values have been allocated. allocatedValue + // assumes SSA form without checking interference. At that point, multiple + // SILValues can share storage via projections, but the storage is still + // singly defined. However, allocatePhi may coalesce multiple values, or even + // a single value across multiple loop iterations. The burden for checking + // inteference is entirely on allocatePhi. + for (auto &valueStorageI : llvm::reverse(pass.valueStorageMap)) { + if (auto phi = PhiValue(valueStorageI.value)) { + allocatePhi(phi); + } + } } /// Allocate storage for a single opaque/resilient value. -void OpaqueStorageAllocation::allocateForValue(SILValue value, - ValueStorage &storage) { +void OpaqueStorageAllocation::allocateValue(SILValue value) { + // Phis must be deferred. + assert(!PhiValue(value)); + + // Pseudo call results have no storage. + assert(!isPseudoCallResult(value)); + + // Pseudo return values have no storage. + assert(!isPseudoReturnValue(value)); + + auto &storage = pass.valueStorageMap.getStorage(value); + + // Fake loads for incoming function arguments are already rewritten; so are + // outgoing function arguments. + if (storage.isRewritten) + return; + + // Function arguments are preallocated to fake loads, so they aren't mapped to + // storage, and indirect results are already rewritten. assert(!isa(value)); - if (auto apply = ApplySite::isa(value)) { - // Result tuples will be canonicalized during apply rewriting so the tuple - // itself is unused. - if (value->getType().is()) { - assert(apply.getSubstCalleeType()->getNumResults() > 1); - return; - } + assert(!storage.isAllocated()); + + if (getReusedStorageOperand(value)) + return; + + // Check for values that inherently project storage from their operand. + if (auto *storageOper = getProjectedDefOperand(value)) { + pass.valueStorageMap.recordDefProjection(storageOper, value); + return; + } + if (value->getOwnershipKind() == OwnershipKind::Guaranteed) { + value->dump(); + llvm::report_fatal_error("^^^ guaranteed values must reuse storage"); } - // Argument loads already have a storage address. - if (storage.storageAddress) { - assert(isa(storage.storageAddress)); + // Attempt to reuse a user's storage. + if (findValueProjectionIntoUse(value)) return; + + // Eagerly create stack allocation. This way any operands can check + // alloc_stack dominance before their storage is coalesced with this + // value. Unfortunately, this alloc_stack may be dead if we later coalesce + // this value's storage with a branch use. + createStackAllocationStorage(value); +} + +/// Find a use of \p value that can provide the value's storage. +/// +/// \p incomingValues is a Range of SILValues (e.g. ArrayRef), +/// that all need \p value's storage to be available in their scope. +bool OpaqueStorageAllocation::findProjectionIntoUseImpl( + SILValue value, ArrayRef incomingValues, bool intoPhi) { + // Def-projections take precedence. + assert(!getProjectedDefOperand(value) && !getReusedStorageOperand(value)); + + for (Operand *use : value->getUses()) { + // Get the user's value, whose storage we will project into. + SILValue userValue = getProjectedUseValue(use); + if (!userValue) + continue; + + assert(!getProjectedDefOperand(userValue) + && "storage cannot project in two directions."); + + // Recurse through all storage projections to find the uniquely allocated + // storage. Enum storage cannot be reused across multiple subobjects because + // it must be initialized via a single init_enum_data_addr instruction. + // + // TODO: fix the memory verifier to consider the actual store instructions + // to initialize an enum rather than the init_enum_data_addr to reuse enum + // storage across multiple subobjects within the payload. + auto *baseStorage = pass.valueStorageMap.getBaseStorage( + userValue, /*allowInitEnum*/ !intoPhi); + if (!baseStorage) + continue; + + if (auto *stackInst = + dyn_cast(baseStorage->storageAddress)) { + if (!checkStorageDominates(stackInst, incomingValues)) + continue; + } else + assert(isa(baseStorage->storageAddress)); + + LLVM_DEBUG(llvm::dbgs() << " PROJECT "; value->dump(); + llvm::dbgs() << " into use "; use->getUser()->dump()); + + pass.valueStorageMap.recordComposingUseProjection(use, userValue); + return true; } + return false; +} - if (value->hasOneUse()) { - // TODO: Handle block arguments. - // TODO: Handle subobjects with a single composition, and other non-mutating - // uses such as @in arguments. - if (auto *def = dyn_cast(value)) { - Operand *useOper = *value->use_begin(); - if (canProjectFrom(def, useOper->getUser())) { - storage.setComposedOperand(useOper); - return; - } +bool OpaqueStorageAllocation:: +checkStorageDominates(AllocStackInst *allocInst, + ArrayRef incomingValues) { + + for (SILValue incomingValue : incomingValues) { + if (auto *defInst = incomingValue->getDefiningInstruction()) { + if (!pass.domInfo->properlyDominates(allocInst, defInst)) + return false; + continue; + } + // Handle both phis and terminator results. + auto *bbArg = cast(incomingValue); + // The storage block must strictly dominate the phi. + if (!pass.domInfo->properlyDominates( + allocInst->getParent(), bbArg->getParent())) { + return false; + } + } + return true; +} + +void OpaqueStorageAllocation::allocatePhi(PhiValue phi) { + // Coalesces phi operand storage with the phi storage. The algorithm processes + // all incoming values at once, so it is is run when visiting the block + // argument. + // + // The phi operand projections are computed first to give them priority. Then + // we determine if the phi itself can share storage with one of its users. + CoalescedPhi coalescedPhi; + coalescedPhi.coalesce(phi, pass.valueStorageMap); + + SmallVector coalescedValues; + coalescedValues.resize(coalescedPhi.getCoalescedOperands().size()); + for (SILValue value : coalescedPhi.getCoalescedValues()) + coalescedValues.push_back(value); + + if (!findPhiProjectionIntoUse(phi, coalescedValues)) + createStackAllocationStorage(phi); + + // Regardless of whether we projected into a user or allocated storage, + // provide this storage to all the incoming values that can reuse it. + for (Operand *phiOper : coalescedPhi.getCoalescedOperands()) { + removeAllocation(phiOper->get()); + pass.valueStorageMap.recordPhiUseProjection(phiOper, + PhiOperand(phiOper).getValue()); + } +} + +// Unfortunately, we create alloc_stack instructions for SSA values before +// coalescing block arguments. This temporary storage now needs to be removed. +void OpaqueStorageAllocation::removeAllocation(SILValue value) { + auto &storage = pass.valueStorageMap.getStorage(value); + auto *allocInst = cast(storage.storageAddress); + storage.storageAddress = nullptr; + + // It's only use should be dealloc_stacks. + for (Operand *use : allocInst->getUses()) { + pass.deleter.forceDelete(cast(use->getUser())); + } + pass.deleter.forceDelete(allocInst); +} + +// Create alloc_stack that dominates an owned value \p value. Create +// jointly-postdominating dealloc_stack instructions. Nesting will be fixed +// later. +// +// Any value that may be used by a return instruction must be deallocated +// immediately before the return. This allows the return to be rewritten by +// loading from storage. +AllocStackInst *OpaqueStorageAllocation:: +createStackAllocation(SILValue value) { + assert(value.getOwnershipKind() != OwnershipKind::Guaranteed && + "creating storage for a guaranteed value implies a copy"); + +#ifndef NDEBUG + // Instructions that produce an opened type never reach here because they + // have guaranteed ownership--they project their storage. We reach this + // point after the opened value has been copied. + if (auto *defInst = value->getDefiningInstruction()) { + if (auto *singleValue = dyn_cast(defInst)) { + assert(!cast(defInst)->getDefinedOpenedArchetype() + && "owned open_existential is unsupported"); } } +#endif + + SILType allocTy = value->getType(); - SILBuilder allocBuilder(pass.F->begin()->begin()); - allocBuilder.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(pass.F->getModule())); - AllocStackInst *allocInstr = - allocBuilder.createAllocStack(value.getLoc(), value->getType()); + // For opened existential types, allocate stack space at the type + // definition. Allocating as early as possible provides more opportunity for + // creating use projections into value. + SILInstruction *firstOpeningInst = nullptr; + allocTy.getASTType().visit([&](CanType type) { + auto archetype = dyn_cast(type); + if (!archetype) + return; - storage.storageAddress = allocInstr; + if (auto openedTy = getOpenedArchetypeOf(archetype)) { + auto openingVal = + pass.getModule()->getRootOpenedArchetypeDef(openedTy, pass.function); - // Insert stack deallocations. - for (TermInst *termInst : pass.returnInsts) { - SILBuilder deallocBuilder(termInst); - deallocBuilder.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(pass.F->getModule())); - deallocBuilder.createDeallocStack(allocInstr->getLoc(), allocInstr); + auto *openingInst = openingVal->getDefiningInstruction(); + assert(openingVal && "all opened archetypes should be resolved"); + if (firstOpeningInst + && pass.domInfo->dominates(firstOpeningInst, openingInst)) { + return; + } + firstOpeningInst = openingInst; + } + }); + auto allocPt = firstOpeningInst ? std::next(firstOpeningInst->getIterator()) + : pass.function->begin()->begin(); + auto allocBuilder = pass.getBuilder(allocPt); + AllocStackInst *alloc = allocBuilder.createAllocStack(pass.genLoc(), allocTy); + + auto dealloc = [&](SILBasicBlock::iterator insertPt) { + auto deallocBuilder = pass.getBuilder(insertPt); + deallocBuilder.createDeallocStack(pass.genLoc(), alloc); + }; + if (firstOpeningInst) { + // Deallocate at the dominance frontier to ensure that allocation encloses + // not only the uses of the current value, but also of any values reusing + // this storage as a use projection. + SmallVector frontier; + computeDominanceFrontier(alloc->getParent(), pass.domInfo, frontier); + for (SILBasicBlock *deallocBlock : frontier) { + dealloc(deallocBlock->getTerminator()->getIterator()); + } + } else { + for (SILInstruction *deallocPoint : pass.exitingInsts) { + dealloc(deallocPoint->getIterator()); + } } + return alloc; } //===----------------------------------------------------------------------===// -// AddressMaterialization - materialize storage addresses, generate projections. +// AddressMaterialization +// +// Materialize storage addresses, generate projections. //===----------------------------------------------------------------------===// namespace { /// Materialize the address of a value's storage. For values that are directly -/// mapped to a storage location, simply return the mapped `AllocStackInst`. -/// For subobjects emit any necessary `_addr` projections using the provided +/// mapped to a storage location, return the mapped `AllocStackInst`. For +/// subobjects emit any necessary `_addr` projections using the provided /// `SILBuilder`. /// -/// This is a common utility for ApplyRewriter, AddressOnlyDefRewriter, -/// and AddressOnlyUseRewriter. +/// This is a common utility for PhiRewriter, CallArgRewriter, ApplyRewriter, +/// ReturnRewriter, UseRewriter, and DefRewriter. class AddressMaterialization { AddressLoweringState &pass; SILBuilder &B; @@ -596,78 +1190,233 @@ class AddressMaterialization { AddressMaterialization(AddressLoweringState &pass, SILBuilder &B) : pass(pass), B(B) {} - SILValue initializeOperandMem(Operand *operand); + /// Return the address of the storage for `origValue`. This may involve + /// materializing projections. Record the materialized address as storage for + /// origValue. Called once at the definition of \p origValue. + SILValue materializeAddress(SILValue origValue) { + ValueStorage &storage = pass.valueStorageMap.getStorage(origValue); + if (storage.storageAddress) + return storage.storageAddress; + + if (storage.isUseProjection) { + materializeUseProjectionStorage(storage, /*intoPhiOperand*/ false); + } else { + assert(storage.isDefProjection); + storage.storageAddress = materializeDefProjection(origValue); + } + return storage.storageAddress; + } + + void initializeOperand(Operand *operand); - SILValue materializeAddress(SILValue origValue); + SILValue materializeUseProjectionStorage(ValueStorage &storage, + bool intoPhiOperand); + + SILValue materializeDefProjection(SILValue origValue); protected: - SILValue materializeProjection(Operand *operand); + SILValue materializeStructExtract(SILInstruction *extractInst, + SILValue elementValue, unsigned fieldIdx); + + SILValue materializeTupleExtract(SILInstruction *extractInst, + SILValue elementValue, unsigned fieldIdx); + + SILValue materializeProjectionIntoUse(Operand *operand, bool intoPhiOperand); + + SILValue materializeComposingUser(SingleValueInstruction *user, + bool intoPhiOperand) { + return materializeUseProjectionStorage( + pass.valueStorageMap.getStorage(user), intoPhiOperand); + } }; } // anonymous namespace -// Materialize an address pointing to initialized memory for this operand, -// generating a projection and copy if needed. -SILValue AddressMaterialization::initializeOperandMem(Operand *operand) { +/// Given the operand of an aggregate instruction (struct, tuple, enum), ensure +/// that the in-memory subobject is initialized. Generates an address +/// projection and copy if needed. +/// +/// If the operand projects into its use, then the memory was already +/// initialized when visiting the use. +void AddressMaterialization::initializeOperand(Operand *operand) { SILValue def = operand->get(); - SILValue destAddr; - if (operand->get()->getType().isAddressOnly(*pass.F)) { + if (def->getType().isAddressOnly(*pass.function)) { ValueStorage &storage = pass.valueStorageMap.getStorage(def); - // Source value should already be rewritten. - assert(storage.isRewritten()); - if (storage.isProjection()) - destAddr = storage.storageAddress; - else { - destAddr = materializeProjection(operand); - B.createCopyAddr(operand->getUser()->getLoc(), storage.storageAddress, - destAddr, IsTake, IsInitialization); - } - } else { - destAddr = materializeProjection(operand); - B.createStore(operand->getUser()->getLoc(), operand->get(), destAddr, - StoreOwnershipQualifier::Unqualified); + assert(storage.isRewritten && "Source value should be rewritten"); + + if (storage.isUseProjection) + return; + + auto destAddr = + materializeProjectionIntoUse(operand, /*intoPhiOperand*/ false); + B.createCopyAddr(operand->getUser()->getLoc(), storage.storageAddress, + destAddr, IsTake, IsInitialization); + return; } - return destAddr; + SILValue destAddr = materializeProjectionIntoUse(operand, + /*intoPhiOperand*/ false); + B.createTrivialStoreOr(operand->getUser()->getLoc(), operand->get(), destAddr, + StoreOwnershipQualifier::Init); } -/// Return the address of the storage for `origValue`. This may involve -/// materializing projections. -SILValue AddressMaterialization::materializeAddress(SILValue origValue) { - ValueStorage &storage = pass.valueStorageMap.getStorage(origValue); - - if (!storage.storageAddress) - storage.storageAddress = - materializeProjection(storage.getComposedOperand()); +// Recursively materialize the address for storage at the point that a use +// projects into it via either a composing-use (struct, tuple, enum) or phi +// projection. This only materializes the address that the operands project +// into. It does not materialize the storage for the result. e.g. it +// materializes init_enum_data_addr, not inject_enum_addr. +// +// If \p intoPhiOperand is true, this materializes the address in the path that +// reaches a phi operand, not the phi block itself. +// +// If \p intoPhiOperand is false, then the materialized address is guaranteed to +// domaninate the composing user. Map the user onto this address to avoid +// rematerialization. +SILValue AddressMaterialization::materializeUseProjectionStorage( + ValueStorage &storage, bool intoPhiOperand = false) { + // If this storage is already materialized, then simply return its + // address. This not only avoids redundant projections, but is necessary for + // correctness when emitting init_enum_data_addr. + if (!intoPhiOperand && storage.storageAddress) + return storage.storageAddress; + auto recordAddress = [&](SILValue address) { + if (!intoPhiOperand) + storage.storageAddress = address; + return address; + }; + if (storage.isComposingUseProjection()) { + // Handle chains of composing users. + auto &useStorage = pass.valueStorageMap.getProjectedStorage(storage); + SILValue useVal = useStorage.value; + if (auto *defInst = useVal->getDefiningInstruction()) { + Operand *useOper = + &defInst->getAllOperands()[storage.projectedOperandNum]; + return recordAddress( + materializeProjectionIntoUse(useOper, intoPhiOperand)); + } + // For indirect function results, projectedOperandNum is the index into + // the tuple of opaque results, which isn't useful here. + assert(isa(useVal) && useStorage.storage.isRewritten); + return recordAddress(useStorage.storage.storageAddress); + } + if (storage.isPhiProjection()) { + return recordAddress(materializeUseProjectionStorage( + pass.valueStorageMap.getProjectedStorage(storage).storage, + /*intoPhiOperand*/ true)); + } + assert(!storage.isProjection() + && "a composing user may not also be a def projection"); return storage.storageAddress; } -SILValue AddressMaterialization::materializeProjection(Operand *operand) { - SILInstruction *user = operand->getUser(); +/// Materialize the address of a subobject. +/// +/// \param origValue is a value associated with the subobject storage. It is +/// either a SingleValueInstruction projection or a terminator result. +SILValue AddressMaterialization::materializeDefProjection(SILValue origValue) { + switch (origValue->getKind()) { + default: + llvm_unreachable("Unexpected projection from def."); + + case ValueKind::CopyValueInst: + assert(isStoreCopy(origValue)); + return pass.getMaterializedAddress( + cast(origValue)->getOperand()); + + case ValueKind::MultipleValueInstructionResult: { + auto *result = cast(origValue); + SILInstruction *destructure = result->getParent(); + switch (destructure->getKind()) { + default: + llvm_unreachable("Unexpected projection from def."); + + case SILInstructionKind::DestructureStructInst: { + return materializeStructExtract(destructure, origValue, + result->getIndex()); + break; + } + case SILInstructionKind::DestructureTupleInst: { + return materializeTupleExtract(destructure, origValue, + result->getIndex()); + break; + } + } + } + case ValueKind::StructExtractInst: { + auto *extractInst = cast(origValue); + return materializeStructExtract(extractInst, origValue, + extractInst->getFieldIndex()); + } + case ValueKind::TupleExtractInst: { + auto *extractInst = cast(origValue); + return materializeTupleExtract(extractInst, origValue, + extractInst->getFieldIndex()); + } + case ValueKind::SILPhiArgument: { + // Handle this in the caller. unchecked_take_enum_data_addr is + // destructive. It cannot be materialized on demand. + llvm_unreachable("Unimplemented switch_enum optimization"); + } + } +} +// \p structInst is a unary instruction whose first operand is a struct. +SILValue AddressMaterialization::materializeStructExtract( + SILInstruction *extractInst, SILValue elementValue, unsigned fieldIdx) { + auto structVal = extractInst->getOperand(0); + SILValue srcAddr = pass.getMaterializedAddress(structVal); + auto *structType = structVal->getType().getStructOrBoundGenericStruct(); + auto *varDecl = structType->getStoredProperties()[fieldIdx]; + return B.createStructElementAddr(extractInst->getLoc(), srcAddr, varDecl, + elementValue->getType().getAddressType()); +} + +// \p tupleInst is a unary instruction whose first operand is a tuple. +SILValue AddressMaterialization::materializeTupleExtract( + SILInstruction *extractInst, SILValue elementValue, unsigned fieldIdx) { + SILValue srcAddr = pass.getMaterializedAddress(extractInst->getOperand(0)); + return B.createTupleElementAddr(extractInst->getLoc(), srcAddr, fieldIdx, + elementValue->getType().getAddressType()); +} + +/// Recursively materialize the address of a subobject that is a member of the +/// operand's user. The operand's user must be an aggregate struct, tuple, enum, +/// init_existential_value. +SILValue +AddressMaterialization::materializeProjectionIntoUse(Operand *operand, + bool intoPhiOperand) { + SILInstruction *user = operand->getUser(); switch (user->getKind()) { default: LLVM_DEBUG(user->dump()); - llvm_unreachable("Unexpected subobject composition."); + llvm_unreachable("Unexpected projection from use."); case SILInstructionKind::EnumInst: { auto *enumInst = cast(user); - SILValue enumAddr = materializeAddress(enumInst); + SILValue enumAddr = materializeComposingUser(enumInst, intoPhiOperand); return B.createInitEnumDataAddr(enumInst->getLoc(), enumAddr, enumInst->getElement(), operand->get()->getType().getAddressType()); } case SILInstructionKind::InitExistentialValueInst: { auto *initExistentialValue = cast(user); - SILValue containerAddr = materializeAddress(initExistentialValue); + SILValue containerAddr = + materializeComposingUser(initExistentialValue, intoPhiOperand); auto canTy = initExistentialValue->getFormalConcreteType(); auto opaque = Lowering::AbstractionPattern::getOpaque(); - auto &concreteTL = pass.F->getTypeLowering(opaque, canTy); + auto &concreteTL = pass.function->getTypeLowering(opaque, canTy); return B.createInitExistentialAddr( initExistentialValue->getLoc(), containerAddr, canTy, concreteTL.getLoweredType(), initExistentialValue->getConformances()); } - case SILInstructionKind::ReturnInst: { - assert(pass.loweredFnConv.hasIndirectSILResults()); - return pass.F->getArguments()[0]; + case SILInstructionKind::StructInst: { + auto *structInst = cast(user); + + auto fieldIter = structInst->getStructDecl()->getStoredProperties().begin(); + std::advance(fieldIter, operand->getOperandNumber()); + + SILValue structAddr = materializeComposingUser(structInst, intoPhiOperand); + return B.createStructElementAddr( + structInst->getLoc(), structAddr, *fieldIter, + operand->get()->getType().getAddressType()); } case SILInstructionKind::TupleInst: { auto *tupleInst = cast(user); @@ -678,854 +1427,1749 @@ SILValue AddressMaterialization::materializeProjection(Operand *operand) { assert(resultIdx < pass.loweredFnConv.getNumIndirectSILResults()); // Cannot call getIndirectSILResults here because that API uses the // original function type. - return pass.F->getArguments()[resultIdx]; + return pass.function->getArguments()[resultIdx]; } - // TODO: emit tuple_element_addr - llvm_unreachable("Unimplemented"); + SILValue tupleAddr = materializeComposingUser(tupleInst, intoPhiOperand); + return B.createTupleElementAddr(tupleInst->getLoc(), tupleAddr, + operand->getOperandNumber(), + operand->get()->getType().getAddressType()); } } } //===----------------------------------------------------------------------===// -// ApplyRewriter - rewrite call sites with indirect arguments. +// PhiRewriter +// +// Insert copies on CFG edges to break phi operand interferences. //===----------------------------------------------------------------------===// namespace { -/// Rewrite an Apply, lowering its indirect SIL arguments. -/// -/// Replace indirect parameter arguments of this function with address-type -/// arguments. -/// -/// Insert new indirect result arguments for this function to represent the -/// caller's storage. -class ApplyRewriter { + +// To materialize a phi operand in the corresponding phi predecessor block: +// +// 1. Materialize the phi address. If the phi projects into a use, this requires +// initialization of the user's storage in each predecessor. +// +// 2. If the phi operand is not coalesced, then copy the operand into the +// materialized phi address. +// +// For blocks with multiple phis, all copies of phi operands semantically occur +// in parallel on the CFG edge from the predecessor to the phi block. As these +// copies are inserted into the predecessor's intruction list, maintain the +// illusion of parallel copies by resolving any interference between the phi +// copies. This is done by checking for anti-dependencies to or from other phi +// copies. If one phi copy's source reads from another phi copy's dest, then the +// read must occur before the write. +// +// Insert a second copy to break an anti-dependence cycle when both the source +// and destination of the new phi interferes with other phis (the classic +// phi-swap problem). +// +// Input: +// addr0 = alloc_stack // storage for val0 +// addr1 = alloc_stack // storage for val1 +// bb1: +// br bb3(val0, val1) +// bb2: +// br bb3(val1, val0) +// bb3(phi0, phi1): +// +// Output: +// +// bb1: +// br bb3(val0, val1) +// bb2: +// temp = alloc_stack +// copy_addr addr0 to temp +// copy_addr addr1 to addr0 +// copy_addr temp to addr1 +// dealloc_stack temp +// br bb3(val1, val1) +// bb3(phi0, phi1): +class PhiRewriter { AddressLoweringState &pass; - ApplySite apply; - SILBuilder argBuilder; - /// For now, we assume that the apply site is a normal apply. - ApplyInst *getApplyInst() const { return cast(apply); } + // A set of copies from a phi operand storage to phi storage. These logically + // occur on the CFG edge. Keep track of them to resolve anti-dependencies. + SmallPtrSet phiCopies; public: - ApplyRewriter(ApplySite origCall, AddressLoweringState &pass) - : pass(pass), apply(origCall), argBuilder(origCall.getInstruction()) { - argBuilder.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(origCall.getModule())); - } + PhiRewriter(AddressLoweringState &pass) : pass(pass) {} - void rewriteParameters(); - void rewriteIndirectParameter(Operand *operand); - - void convertApplyWithIndirectResults(); + void materializeOperand(PhiOperand phiOperand); protected: - void - canonicalizeResults(MutableArrayRef directResultValues, - ArrayRef nonCanonicalUses); - SILValue materializeIndirectResultAddress( - SingleValueInstruction *origDirectResultVal, - SILType argTy); + PhiRewriter(const PhiRewriter &) = delete; + PhiRewriter &operator=(const PhiRewriter &) = delete; + + CopyAddrInst *createPhiCopy(SILBuilder &builder, SILValue from, SILValue to) { + auto *copy = builder.createCopyAddr(pass.genLoc(), from, to, IsTake, + IsInitialization); + phiCopies.insert(copy); + return copy; + } + + struct CopyPosition { + SILBasicBlock::iterator latestCopyPos; + bool foundAntiDependenceCycle = false; + }; + CopyPosition findPhiCopyPosition(PhiOperand phiOper); }; -} // end anonymous namespace +} // anonymous namespace -/// Rewrite any indirect parameter in place. -void ApplyRewriter::rewriteParameters() { - // Rewrite all incoming indirect operands. - unsigned calleeArgIdx = apply.getCalleeArgIndexOfFirstAppliedArg(); - for (Operand &operand : apply.getArgumentOperands()) { - if (operand.get()->getType().isObject()) { - auto argConv = - apply.getSubstCalleeConv().getSILArgumentConvention(calleeArgIdx); - if (argConv.isIndirectConvention()) - rewriteIndirectParameter(&operand); +void PhiRewriter::materializeOperand(PhiOperand phiOper) { + auto &operStorage = + pass.valueStorageMap.getStorage(phiOper.getOperand()->get()); + if (operStorage.isPhiProjection()) { + if (operStorage.projectedStorageID + == pass.valueStorageMap.getOrdinal(phiOper.getValue())) { + // This operand was coalesced with this particular phi. No copy needed. + return; } - ++calleeArgIdx; } + auto phiOperAddress = operStorage.getMaterializedAddress(); + + auto copyPos = findPhiCopyPosition(phiOper); + + auto builder = pass.getBuilder(copyPos.latestCopyPos); + AddressMaterialization addrMat(pass, builder); + + auto &phiStorage = pass.valueStorageMap.getStorage(phiOper.getValue()); + SILValue phiAddress = + addrMat.materializeUseProjectionStorage(phiStorage, + /*intoPhiOperand*/ true); + + if (!copyPos.foundAntiDependenceCycle) { + createPhiCopy(builder, phiOperAddress, phiAddress); + return; + } + AllocStackInst *alloc = + builder.createAllocStack(pass.genLoc(), phiOper.getValue()->getType()); + createPhiCopy(builder, phiOperAddress, alloc); + + auto tempBuilder = pass.getBuilder(phiOper.getBranch()->getIterator()); + createPhiCopy(tempBuilder, alloc, phiAddress); + tempBuilder.createDeallocStack(pass.genLoc(), alloc); } -/// Deallocate temporary call-site stack storage. -/// -/// `argLoad` is non-null for @out args that are loaded. -static void insertStackDeallocationAtCall(AllocStackInst *allocInst, - SILInstruction *applyInst, - SILInstruction *argLoad) { - SILInstruction *lastUse = argLoad ? argLoad : applyInst; - - switch (applyInst->getKind()) { - case SILInstructionKind::ApplyInst: { - SILBuilder deallocBuilder(&*std::next(lastUse->getIterator())); - deallocBuilder.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(applyInst->getModule())); - deallocBuilder.createDeallocStack(allocInst->getLoc(), allocInst); - break; +PhiRewriter &AddressLoweringState::getPhiRewriter() { + if (!this->phiRewriter) { + this->phiRewriter = std::make_unique(*this); } - case SILInstructionKind::TryApplyInst: - // TODO!!!: insert dealloc in the catch block. - llvm_unreachable("not implemented for this instruction!"); - case SILInstructionKind::PartialApplyInst: - llvm_unreachable("partial apply cannot have indirect results."); - default: - llvm_unreachable("not implemented for this instruction!"); + return *(this->phiRewriter.get()); +} + +// Return the latest position at which a copy into this phi may be emitted +// without violating an anti-dependence on another phi copy. +PhiRewriter::CopyPosition PhiRewriter::findPhiCopyPosition(PhiOperand phiOper) { + auto phiBaseAddress = + pass.valueStorageMap.getBaseStorage(phiOper.getValue()).storageAddress; + + auto operBaseAddress = + pass.valueStorageMap.getBaseStorage(phiOper.getOperand()->get()) + .storageAddress; + + auto insertPt = phiOper.getBranch()->getIterator(); + bool foundEarliestInsertPoint = false; + + CopyPosition copyPos; + copyPos.latestCopyPos = insertPt; + + // Continue scanning until all phi copies have been checked for interference. + for (auto beginIter = phiOper.predBlock->begin(); insertPt != beginIter;) { + --insertPt; + + auto *phiCopy = dyn_cast(&*insertPt); + if (!phiCopy || !phiCopies.contains(phiCopy)) + break; + + if (!foundEarliestInsertPoint + && getAccessBase(phiCopy->getSrc()) == phiBaseAddress) { + // Anti-dependence from the phi copy to the phi value. Do not copy into + // the phi storage before this point. + foundEarliestInsertPoint = true; + } + if (getAccessBase(phiCopy->getDest()) == operBaseAddress) { + // Anti-dependence from the phi operand to the phi copy. Do not copy out + // of the operand storage after this point. + copyPos.latestCopyPos = insertPt; + // If the earliest and latest points conflict, allocate a temporary. + if (foundEarliestInsertPoint) { + copyPos.foundAntiDependenceCycle = true; + } + } } + return copyPos; } -/// Rewrite a formally indirect parameter in place. +//===----------------------------------------------------------------------===// +// CallArgRewriter +// +// Rewrite call arguments for indirect parameters. +//===----------------------------------------------------------------------===// + +namespace { +/// This rewrites one parameter at a time, replacing the incoming +/// object arguments with address-type arguments. +class CallArgRewriter { + AddressLoweringState &pass; + FullApplySite apply; + SILLocation callLoc; + SILBuilder argBuilder; + AddressMaterialization addrMat; + +public: + CallArgRewriter(FullApplySite apply, AddressLoweringState &pass) + : pass(pass), apply(apply), callLoc(apply.getLoc()), + argBuilder(pass.getBuilder(apply.getInstruction()->getIterator())), + addrMat(pass, argBuilder) {} + + bool rewriteArguments(); + + void rewriteIndirectArgument(Operand *operand); +}; +} // end anonymous namespace + +/// Rewrite all incoming indirect arguments in place without modifying the call. +bool CallArgRewriter::rewriteArguments() { + bool changed = false; + + auto origConv = apply.getSubstCalleeConv(); + assert(apply.getNumArguments() == origConv.getNumParameters() + && "results should not yet be rewritten"); + + for (unsigned argIdx = apply.getCalleeArgIndexOfFirstAppliedArg(), + endArgIdx = argIdx + apply.getNumArguments(); + argIdx < endArgIdx; ++argIdx) { + + Operand &operand = apply.getArgumentRef(argIdx); + // Ignore arguments that have already been rewritten with an address. + if (operand.get()->getType().isAddress()) + continue; + + auto argConv = apply.getSubstCalleeConv().getSILArgumentConvention(argIdx); + if (argConv.isIndirectConvention()) { + rewriteIndirectArgument(&operand); + changed |= true; + } + } + return changed; +} + +/// Rewrite a formally indirect argument in place. /// Update the operand to the incoming value's storage address. /// After this, the SIL argument types no longer match SIL function conventions. /// /// Temporary argument storage may be created for loadable values. -/// -/// Note: Temporary argument storage does not own its value. If the argument -/// is owned, the stored value should already have been copied. -void ApplyRewriter::rewriteIndirectParameter(Operand *operand) { +void CallArgRewriter::rewriteIndirectArgument(Operand *operand) { SILValue argValue = operand->get(); - if (argValue->getType().isAddressOnly(*pass.F)) { + if (argValue->getType().isAddressOnly(*pass.function)) { ValueStorage &storage = pass.valueStorageMap.getStorage(argValue); - // Source value should already be rewritten. - assert(storage.isRewritten()); + assert(storage.isRewritten && "arg source should be rewritten"); operand->set(storage.storageAddress); return; } // Allocate temporary storage for a loadable operand. - AllocStackInst *allocInstr = - argBuilder.createAllocStack(apply.getLoc(), argValue->getType()); - - argBuilder.createStore(apply.getLoc(), argValue, allocInstr, - StoreOwnershipQualifier::Unqualified); - - operand->set(allocInstr); - - insertStackDeallocationAtCall(allocInstr, apply.getInstruction(), - /*argLoad=*/nullptr); -} - -// Canonicalize call result uses. Treat each result of a multi-result call as -// an independent value. Currently, SILGen may generate tuple_extract for each -// result but generate a single destroy_value for the entire tuple of -// results. This makes it impossible to reason about each call result as an -// independent value according to the callee's function type. -// -// directResultValues has an entry for each tuple extract corresponding to -// that result if one exists. This function will add an entry to -// directResultValues whenever it needs to materialize a TupleExtractInst. -void ApplyRewriter::canonicalizeResults( - MutableArrayRef directResultValues, - ArrayRef nonCanonicalUses) { - - auto *applyInst = getApplyInst(); - - for (Operand *operand : nonCanonicalUses) { - auto *destroyInst = dyn_cast(operand->getUser()); - if (!destroyInst) - llvm::report_fatal_error("Simultaneous use of multiple call results."); - - for (unsigned resultIdx : indices(directResultValues)) { - SingleValueInstruction *result = directResultValues[resultIdx]; - if (!result) { - SILBuilder resultBuilder(std::next(SILBasicBlock::iterator(applyInst))); - resultBuilder.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(applyInst->getModule())); - result = resultBuilder.createTupleExtract(applyInst->getLoc(), - applyInst, resultIdx); - directResultValues[resultIdx] = result; + AllocStackInst *allocInst = + argBuilder.createAllocStack(callLoc, argValue->getType()); + + operand->set(allocInst); + + if (apply.getArgumentConvention(*operand).isOwnedConvention()) { + argBuilder.createTrivialStoreOr(apply.getLoc(), argValue, allocInst, + StoreOwnershipQualifier::Init); + cleanupAfterCall(apply, [&](SILBasicBlock::iterator insertPt) { + auto deallocBuilder = pass.getBuilder(insertPt); + deallocBuilder.createDeallocStack(callLoc, allocInst); + }); + } else { + auto borrow = argBuilder.emitBeginBorrowOperation(callLoc, argValue); + auto *storeInst = + argBuilder.emitStoreBorrowOperation(callLoc, borrow, allocInst); + + cleanupAfterCall(apply, [&](SILBasicBlock::iterator insertPt) { + auto cleanupBuilder = pass.getBuilder(insertPt); + if (auto *storeBorrow = dyn_cast(storeInst)) { + cleanupBuilder.emitEndBorrowOperation(callLoc, storeBorrow); } - SILBuilder B(destroyInst); - B.setSILConventions(SILModuleConventions::getLoweredAddressConventions(applyInst->getModule())); - auto &TL = pass.F->getTypeLowering(result->getType()); - TL.emitDestroyValue(B, destroyInst->getLoc(), result); + cleanupBuilder.emitEndBorrowOperation(callLoc, borrow); + cleanupBuilder.createDeallocStack(callLoc, allocInst); + }); + } +} + +//===----------------------------------------------------------------------===// +// ApplyRewriter +// +// Rewrite call sites with indirect results. +// ===---------------------------------------------------------------------===// + +namespace { +/// Once any result needs to be rewritten, then the entire apply is +/// replaced. Creates new indirect result arguments for this function to +/// represent the caller's storage. +/// +/// TODO: Multi-Result - this is complicated because calls are not properly +/// represented as multi-value instructions. +class ApplyRewriter { + AddressLoweringState &pass; + + // This apply site mutates when the new apply instruction is generated. + FullApplySite apply; + SILLocation callLoc; + + // For building incoming arguments and materializing addresses. + SILBuilder argBuilder; + + // For loading results. + SILBuilder resultBuilder; + + AddressMaterialization addrMat; + SILFunctionConventions opaqueCalleeConv; + SILFunctionConventions loweredCalleeConv; + +public: + ApplyRewriter(FullApplySite oldCall, AddressLoweringState &pass) + : pass(pass), apply(oldCall), callLoc(oldCall.getLoc()), + argBuilder(pass.getBuilder(oldCall.getInstruction()->getIterator())), + resultBuilder(pass.getBuilder(getCallResultInsertionPoint())), + addrMat(pass, argBuilder), + opaqueCalleeConv(oldCall.getSubstCalleeConv()), + loweredCalleeConv(getLoweredCallConv(oldCall)) {} + + void convertApplyWithIndirectResults(); + +protected: + SILBasicBlock::iterator getCallResultInsertionPoint() { + if (isa(apply)) + return std::next(SILBasicBlock::iterator(apply.getInstruction())); + + auto *bb = cast(apply)->getNormalBB(); + return bb->begin(); + } + + void makeIndirectArgs(MutableArrayRef newCallArgs); + + SILBasicBlock::iterator getResultInsertionPoint(); + + SILValue materializeIndirectResultAddress(SILValue oldResult, SILType argTy); + + void rewriteApply(ArrayRef newCallArgs); + + void rewriteTryApply(ArrayRef newCallArgs); + + void replaceDirectResults(DestructureTupleInst *oldDestructure); +}; +} // end anonymous namespace + +/// Top-level entry: Allocate storage for formally indirect results at a call +/// site. Create a new apply instruction with indirect SIL arguments. The +/// original apply instruction remains in place, unless it is a try_apply. +/// +/// Input (T = address-only, L=Loadable): +/// +/// %addr = alloc_stack $T // storage for %oldResult +/// ... +/// %oldResult = apply : $() -> @out T +/// +/// Output: +/// +/// %addr = alloc_stack $T // storage for %oldResult +/// ... +/// %newCall = apply(%addr) : $() -> @out T // no uses +/// %oldResult = apply() : $() -> @out T // original apply +/// +/// Input: +/// +/// %result = apply : $() -> @out L +/// +/// Output: +/// +/// %addr = alloc_stack $L // unmapped temp storage +/// %newCall = apply(%addr) : $() -> @out L // no uses +/// %oldCall = apply() : $() -> @out L // original apply, no uses +/// %result = load %addr : $*L +/// dealloc_stack %addr +/// +/// Input: +/// +/// %addr0 = alloc_stack $T // storage for %result0 +/// ... +/// %tuple = apply : $() -> (@out T, @out L, L) +/// (%r0, %r1, %r2) = destructure_tuple %tuple : $(T, T, T) +/// +/// Output: +/// +/// %addr0 = alloc_stack $T // storage for %r0 +/// ... +/// %addr1 = alloc_stack // unmapped temp storage +/// %r2 = apply(%addr0, %addr1) : $() -> (@out T, @out L, L) +/// %oldCall = apply() : $() -> (@out T, @out L, L) +/// %r1 = load %addr1 : $*L +/// (%r0, %d1, %d2) = destructure_tuple %tuple : $(T, T, T) +/// // no uses of %d1, %d2 +/// +void ApplyRewriter::convertApplyWithIndirectResults() { + // Gather information from the old apply before rewriting it and mutating + // this->apply. + + // Avoid revisiting this apply. + bool erased = pass.indirectApplies.erase(apply); + assert(erased && "all results should be rewritten at the same time"); + (void)erased; + + // List of new call arguments. + SmallVector newCallArgs(loweredCalleeConv.getNumSILArguments()); + + // Materialize and map the address of each opaque indirect result, possibly + // creating alloc_stacks. + // + // Create a load for each loadable indirect result. + // + // Populate newCallArgs. + makeIndirectArgs(newCallArgs); + + // Record the original results before potentially removing the apply + // (try_apply is removed during rewriting). + auto *destructure = getCallMultiResult(apply.getPseudoResult()); + + switch (apply.getKind()) { + case FullApplySiteKind::ApplyInst: { + // this->apply will be updated with the new apply instruction. + rewriteApply(newCallArgs); + break; + } + case FullApplySiteKind::TryApplyInst: { + // this->apply will be updated with the new try_apply instruction. + rewriteTryApply(newCallArgs); + break; + } + case FullApplySiteKind::BeginApplyInst: + // BeginApply does not need to be rewritten. It's argument list is not + // polluted with indirect results. + break; + }; + + // Replace all results of the original call that remain direct. ApplyRewriter + // is only used when at least one result is indirect. So any direct results + // require a destructure. + if (destructure) { + replaceDirectResults(destructure); + } +} + +// Populate \p newCallArgs with the new call instruction's SIL argument list. +// Materialize temporary storage for loadable indirect results. +// +// Input (T = address-only, L=Loadable): +// +// %addr = alloc_stack $T // storage for %oldResult +// ... +// %oldResult = apply : $() -> @out T +// +// Output (newCallArgs = [%addr]): +// +// Input: +// +// %result = apply : $() -> @out L +// +// Output (newCallArgs = [%addr]): +// +// %addr = alloc_stack $L // unmapped temp storage +// %oldCall = apply() : $() -> @out L // no uses +// %result = load %addr : $*L +// dealloc_stack %addr +// +// Input: +// +// %addr0 = alloc_stack $T // storage for %r0 +// ... +// %tuple = apply : $() -> (@out T, @out L, L) +// (%r0, %r1, %r2) = destructure_tuple %tuple : $(T, L, L) +// +// Output (newCallArgs = [%addr0, %addr1]): +// +// %addr0 = alloc_stack $T // storage for %r0 +// ... +// %addr1 = alloc_stack // unmapped temp storage +// %tuple = apply() : $() -> (@out T, @out L, L) +// %r1 = load %addr1 : $*L +// dealloc_stack %addr1 +// (%r0, %d1, %r2) = destructure_tuple %tuple : $(T, L, L) +// // no uses of %d1 +// +void ApplyRewriter::makeIndirectArgs(MutableArrayRef newCallArgs) { + + auto typeCtx = pass.function->getTypeExpansionContext(); + + // The index of the next indirect result argument. + unsigned newResultArgIdx = + loweredCalleeConv.getSILArgIndexOfFirstIndirectResult(); + + auto visitCallResult = [&](SILValue result, SILResultInfo resultInfo) { + assert(!opaqueCalleeConv.isSILIndirect(resultInfo) + && "canonical call results are always direct"); + + if (loweredCalleeConv.isSILIndirect(resultInfo)) { + SILValue indirectResultAddr = materializeIndirectResultAddress( + result, loweredCalleeConv.getSILType(resultInfo, typeCtx)); + // Record the new indirect call argument. + newCallArgs[newResultArgIdx++] = indirectResultAddr; } - destroyInst->eraseFromParent(); + return true; + }; + visitCallResults(apply, visitCallResult); + + // Append the existing call arguments to the SIL argument list. They were + // already lowered to addresses by CallArgRewriter. + assert(newResultArgIdx == loweredCalleeConv.getSILArgIndexOfFirstParam()); + unsigned origArgIdx = apply.getSubstCalleeConv().getSILArgIndexOfFirstParam(); + for (unsigned endIdx = newCallArgs.size(); newResultArgIdx < endIdx; + ++newResultArgIdx, ++origArgIdx) { + newCallArgs[newResultArgIdx] = apply.getArgument(origArgIdx); + } +} + +SILBasicBlock::iterator ApplyRewriter::getResultInsertionPoint() { + switch (apply.getKind()) { + case FullApplySiteKind::ApplyInst: { + return std::next(apply.getInstruction()->getIterator()); + } + case FullApplySiteKind::TryApplyInst: { + auto *tryApply = cast(apply.getInstruction()); + return tryApply->getNormalBB()->begin(); + } + case FullApplySiteKind::BeginApplyInst: { + llvm_unreachable("coroutines don't have indirect results"); + } } } /// Return the storage address for the indirect result corresponding to the -/// given original result value. Allocate temporary argument storage for any -/// indirect results that are unmapped because they are loadable or unused. +/// \p oldResult. Allocate temporary argument storage for an +/// indirect result that isn't mapped to storage because it is either loadable +/// or unused. /// -/// origDirectResultVal may be nullptr for unused results. -SILValue ApplyRewriter::materializeIndirectResultAddress( - SingleValueInstruction *origDirectResultVal, SILType argTy) { - - if (origDirectResultVal - && origDirectResultVal->getType().isAddressOnly(*pass.F)) { - auto &storage = pass.valueStorageMap.getStorage(origDirectResultVal); +/// \p oldResult is invalid for an unused result. +SILValue ApplyRewriter::materializeIndirectResultAddress(SILValue oldResult, + SILType argTy) { + if (oldResult && oldResult->getType().isAddressOnly(*pass.function)) { + // Results that project into their uses have not yet been materialized. + addrMat.materializeAddress(oldResult); + + auto &storage = pass.valueStorageMap.getStorage(oldResult); storage.markRewritten(); - // Pass the local storage address as the indirect result address. return storage.storageAddress; } // Allocate temporary call-site storage for an unused or loadable result. - SILInstruction *origCallInst = apply.getInstruction(); - SILLocation loc = origCallInst->getLoc(); - auto *allocInst = argBuilder.createAllocStack(loc, argTy); - LoadInst *loadInst = nullptr; - if (origDirectResultVal) { - // TODO: Find the try_apply's result block. - // Build results outside-in to next stack allocations. - SILBuilder resultBuilder(std::next(SILBasicBlock::iterator(origCallInst))); - resultBuilder.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(origCallInst->getModule())); + auto *allocInst = argBuilder.createAllocStack(callLoc, argTy); + + // Instead of using resultBuilder, insert dealloc immediately after the call + // for stack discpline across loadable indirect results. + cleanupAfterCall(apply, [&](SILBasicBlock::iterator insertPt) { + auto cleanupBuilder = pass.getBuilder(insertPt); + cleanupBuilder.createDeallocStack(callLoc, allocInst); + }); + + if (oldResult && !oldResult->use_empty()) { + // Insert reloads immediately after the call. Get the reaload insertion + // point after emitting dealloc to ensure the reload happens first. + auto reloadBuilder = pass.getBuilder(getResultInsertionPoint()); + // This is a formally indirect argument, but is loadable. - loadInst = resultBuilder.createLoad(loc, allocInst, - LoadOwnershipQualifier::Unqualified); - origDirectResultVal->replaceAllUsesWith(loadInst); - pass.markDead(origDirectResultVal); + auto *loadInst = reloadBuilder.createTrivialLoadOr( + callLoc, allocInst, LoadOwnershipQualifier::Take); + oldResult->replaceAllUsesWith(loadInst); } - insertStackDeallocationAtCall(allocInst, origCallInst, loadInst); return SILValue(allocInst); } -/// Allocate storage for formally indirect results at the given call site. -/// Create a new call instruction with indirect SIL arguments. -void ApplyRewriter::convertApplyWithIndirectResults() { - assert(apply.getSubstCalleeType()->hasIndirectFormalResults()); - - auto *origCallInst = getApplyInst(); - SILFunctionConventions origFnConv = apply.getSubstCalleeConv(); - - // Gather the original direct return values. - // Canonicalize results so no user uses more than one result. - SmallVector origDirectResultValues( - origFnConv.getNumDirectSILResults()); - SmallVector nonCanonicalUses; - if (origCallInst->getType().is()) { - for (Operand *operand : origCallInst->getUses()) { - if (auto *extract = dyn_cast(operand->getUser())) - origDirectResultValues[extract->getFieldIndex()] = extract; - else - nonCanonicalUses.push_back(operand); - } - if (!nonCanonicalUses.empty()) - canonicalizeResults(origDirectResultValues, nonCanonicalUses); - } else { - // This call has a single, indirect result (convertApplyWithIndirectResults - // only handles call with at least one indirect result). - // An unused result can remain unmapped. Temporary storage will be allocated - // later when fixing up the call's uses. - assert(origDirectResultValues.size() == 1); - if (!origCallInst->use_empty()) { - assert(pass.valueStorageMap.contains(origCallInst)); - origDirectResultValues[0] = origCallInst; - } +void ApplyRewriter::rewriteApply(ArrayRef newCallArgs) { + auto *oldCall = cast(apply.getInstruction()); + + auto *newCall = argBuilder.createApply( + callLoc, apply.getCallee(), apply.getSubstitutionMap(), newCallArgs, + oldCall->getApplyOptions(), oldCall->getSpecializationInfo()); + + this->apply = FullApplySite(newCall); + + // No need to delete this apply. It either has a single address-only result + // and will be deleted at the end of the pass. Or it has multiple results and + // will be deleted with its destructure_tuple. +} + +// Replace \p tryApply with a new try_apply using \p newCallArgs. +// +// If the old result was a single address-only value, then create and return a +// fake load that takes its place in the storage map. Otherwise, return an +// invalid SILValue. +// +// Update this->apply with the new call instruction. +// +// Input (T = address-only, L=Loadable): +// +// %addr = alloc_stack $T // storage for %oldResult +// ... +// try_apply : $() -> @out T +// bbNormal(%oldResult : $T): +// +// Output (return %oldResult - ApplyRewriter final)): +// +// %addr = alloc_stack $T // storage for %oldResult +// ... +// try_apply(%addr) : $() -> @out T +// bbNormal(%newResult : $()): +// %oldResult = load undef +// +// Input: +// +// %addr = alloc_stack $L // unmapped temp storage +// try_apply() : $() -> @out L +// bbNormal(%oldResult : $L): // no uses +// %result = load %addr : $*L +// dealloc_stack %addr +// +// Output (return invalid - ApplyRewriter final): +// +// %addr = alloc_stack $L // unmapped temp storage +// try_apply(%addr) : $() -> @out L +// bbNormal(%oldResult : $()): // no uses +// %result = load %addr : $*L +// dealloc_stack %addr +// +// Input: +// +// %addr0 = alloc_stack $T // storage for %result0 +// ... +// %addr1 = alloc_stack // unmapped temp storage +// try_apply() : $() -> (@out T, @out L, L) +// bbNormal(%tuple : $(T, L, L)): +// %r1 = load %addr1 : $*L +// dealloc_stack %addr1 +// (%r0, %d1, %r2) = destructure_tuple %tuple : $(T, T, T) +// // no uses of %d1 +// +// Output (return invalid): +// +// %addr0 = alloc_stack $T // storage for %result0 +// ... +// %addr1 = alloc_stack // unmapped temp storage +// try_apply(%addr0, %addr1) : $() -> (@out T, @out L, L) +// bbNormal(%newResult : $L): // no uses yet +// %r1 = load %addr1 : $*L +// dealloc_stack %addr1 +// (%r0, %d1, %r2) = destructure_tuple undef : $(T, T, T) +// // no uses of %d1 +// +void ApplyRewriter::rewriteTryApply(ArrayRef newCallArgs) { + auto typeCtx = pass.function->getTypeExpansionContext(); + auto *tryApply = cast(apply.getInstruction()); + + auto *newCallInst = argBuilder.createTryApply( + callLoc, apply.getCallee(), apply.getSubstitutionMap(), newCallArgs, + tryApply->getNormalBB(), tryApply->getErrorBB(), + tryApply->getApplyOptions(), tryApply->getSpecializationInfo()); + + auto *resultArg = cast(apply.getPseudoResult()); + + auto replaceTermResult = [&](SILValue newResultVal) { + SILType resultTy = loweredCalleeConv.getSILResultType(typeCtx); + auto ownership = resultTy.isTrivial(*pass.function) + ? OwnershipKind::None + : OwnershipKind::Owned; + + resultArg->replaceAllUsesWith(newResultVal); + assert(resultArg->getIndex() == 0); + resultArg->getParent()->replacePhiArgument(0, resultTy, ownership, + resultArg->getDecl()); + }; + // Immediately delete the old try_apply (old applies hang around until + // dead code removal because they directly define values). + pass.deleter.forceDelete(tryApply); + this->apply = FullApplySite(newCallInst); + + // Handle a single opaque result value. + if (pass.valueStorageMap.contains(resultArg)) { + assert(!resultArg->getType().is()); + + // Storage was materialized by materializeIndirectResultAddress. + auto &origStorage = pass.valueStorageMap.getStorage(resultArg); + assert(origStorage.isRewritten); + (void)origStorage; + + // Rewriting try_apply with a new function type requires erasing the opaque + // block argument. Create a dummy load-copy until all uses have been + // rewritten. + LoadInst *loadArg = resultBuilder.createLoad( + callLoc, origStorage.storageAddress, LoadOwnershipQualifier::Copy); + + pass.valueStorageMap.replaceValue(resultArg, loadArg); + replaceTermResult(loadArg); + return; } + // Loadable results were loaded by materializeIndirectResultAddress. + // Temporarily redirect all uses to Undef. They will be fixed in + // replaceDirectResults(). + replaceTermResult( + SILUndef::get(resultArg->getType().getAddressType(), *pass.function)); +} - // Prepare to emit a new call instruction. - SILLocation loc = origCallInst->getLoc(); - SILBuilder callBuilder(origCallInst); - callBuilder.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(origCallInst->getModule())); - - // The new call instruction's SIL calling convention. - SILFunctionConventions loweredCalleeConv( - apply.getSubstCalleeType(), - SILModuleConventions::getLoweredAddressConventions(origCallInst->getModule())); - - // The new call instruction's SIL argument list. - SmallVector newCallArgs(loweredCalleeConv.getNumSILArguments()); - - // Map the original result indices to new result indices. - SmallVector newDirectResultIndices( - origFnConv.getNumDirectSILResults()); - // Indices used to populate newDirectResultIndices. - unsigned oldDirectResultIdx = 0, newDirectResultIdx = 0; - - // The index of the next indirect result argument. - unsigned newResultArgIdx = - loweredCalleeConv.getSILArgIndexOfFirstIndirectResult(); - - // Visit each result. Redirect results that are now indirect by calling - // materializeIndirectResultAddress. Result that remain direct will be - // redirected later. Populate newCallArgs and newDirectResultIndices. - for_each( - apply.getSubstCalleeType()->getResults(), - origDirectResultValues, - [&](SILResultInfo resultInfo, SingleValueInstruction *origDirectResultVal) { - // Assume that all original results are direct in SIL. - assert(!origFnConv.isSILIndirect(resultInfo)); - - if (loweredCalleeConv.isSILIndirect(resultInfo)) { - SILValue indirectResultAddr = materializeIndirectResultAddress( - origDirectResultVal, - loweredCalleeConv.getSILType( - resultInfo, callBuilder.getTypeExpansionContext())); - // Record the new indirect call argument. - newCallArgs[newResultArgIdx++] = indirectResultAddr; - // Leave a placeholder for indirect results. - newDirectResultIndices[oldDirectResultIdx++] = ~0; - } else { - // Record the new direct result, and advance the direct result indices. - newDirectResultIndices[oldDirectResultIdx++] = newDirectResultIdx++; - } - // replaceAllUses will be called later to handle direct results that - // remain direct results of the new call instruction. - }); +// Replace all formally direct results by rewriting the destructure_tuple. +// +// Input: +// +// %addr0 = alloc_stack $T // storage for %r0 +// ... +// %addr1 = alloc_stack // unmapped temp storage +// %newPseudoResult = apply(%addr0, %addr1) : $() -> (@out T, @out L, L) +// %tuple = apply() : $() -> (@out T, @out L, L) +// %r1 = load %addr1 : $*L +// dealloc_stack %addr1 +// (%r0, %d1, %r2) = destructure_tuple %tuple : $(T, T, T) +// // no uses of %d1 +// +// Output: +// +// %addr0 = alloc_stack $T // storage for %r0 +// ... +// %addr1 = alloc_stack // unmapped temp storage +// %r2 = apply(%addr0, %addr1) : $() -> (@out T, @out L, L) +// %tuple = apply() : $() -> (@out T, @out L, L) +// %r1 = load %addr1 : $*L +// dealloc_stack %addr1 +// (%r0, %d1, %d2) = destructure_tuple %tuple : $(T, T, T) +// // no uses of %d1, %d2 +// +void ApplyRewriter::replaceDirectResults(DestructureTupleInst *oldDestructure) { + SILValue newPseudoResult = apply.getPseudoResult(); - // Append the existing call arguments to the SIL argument list. They were - // already lowered to addresses by rewriteIncomingArgument. - assert(newResultArgIdx == loweredCalleeConv.getSILArgIndexOfFirstParam()); - unsigned origArgIdx = apply.getSubstCalleeConv().getSILArgIndexOfFirstParam(); - for (unsigned endIdx = newCallArgs.size(); newResultArgIdx < endIdx; - ++newResultArgIdx, ++origArgIdx) { - newCallArgs[newResultArgIdx] = apply.getArgument(origArgIdx); + DestructureTupleInst *newDestructure = nullptr; + if (loweredCalleeConv.getNumDirectSILResults() > 1) { + newDestructure = + resultBuilder.createDestructureTuple(callLoc, newPseudoResult); } + unsigned newDirectResultIdx = 0; + + auto visitOldCallResult = [&](SILValue result, SILResultInfo resultInfo) { + assert(!opaqueCalleeConv.isSILIndirect(resultInfo) + && "canonical call results are always direct"); - // Create a new apply with indirect result operands. - ApplyInst *newCallInst; - switch (origCallInst->getKind()) { - case SILInstructionKind::ApplyInst: - newCallInst = callBuilder.createApply( - loc, apply.getCallee(), apply.getSubstitutionMap(), newCallArgs, - cast(origCallInst)->getApplyOptions()); - break; - case SILInstructionKind::TryApplyInst: - // TODO: insert dealloc in the catch block. - llvm_unreachable("not implemented for this instruction!"); - case SILInstructionKind::PartialApplyInst: - // Partial apply does not have formally indirect results. - default: - llvm_unreachable("not implemented for this instruction!"); - } - - // Replace all unmapped uses of the original call with uses of the new call. - // - // TODO: handle bbargs from try_apply. - SILBuilder resultBuilder( - std::next(SILBasicBlock::iterator(origCallInst))); - resultBuilder.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(apply.getModule())); - - SmallVector origUses(origCallInst->getUses()); - for (Operand *operand : origUses) { - auto *extractInst = dyn_cast(operand->getUser()); - if (!extractInst) { - assert(origFnConv.getNumDirectSILResults() == 1); - assert(pass.valueStorageMap.contains(origCallInst)); - continue; - } - unsigned origResultIdx = extractInst->getFieldIndex(); - auto resultInfo = origFnConv.getResults()[origResultIdx]; - - if (extractInst->getType().isAddressOnly(*pass.F)) { - // Uses of indirect results will be rewritten by AddressOnlyUseRewriter. - assert(loweredCalleeConv.isSILIndirect(resultInfo)); - assert(pass.valueStorageMap.contains(extractInst)); - if (extractInst->use_empty()) - pass.markDead(extractInst); - continue; - } if (loweredCalleeConv.isSILIndirect(resultInfo)) { + if (result->getType().isAddressOnly(*pass.function)) { + // Mark the extract as rewritten now so we don't attempt to convert the + // call again. + pass.valueStorageMap.getStorage(result).markRewritten(); + return true; + } // This loadable indirect use should already be redirected to a load from // the argument storage and marked dead. - assert(extractInst->use_empty()); - continue; - } - // Either the new call instruction has only a single direct result, or we - // map the original tuple field to the new tuple field. - SILValue newValue = newCallInst; - if (loweredCalleeConv.getNumDirectSILResults() > 1) { - assert(newValue->getType().is()); - newValue = resultBuilder.createTupleExtract( - extractInst->getLoc(), newValue, - newDirectResultIndices[origResultIdx]); + assert(result->use_empty()); + return true; } - extractInst->replaceAllUsesWith(newValue); - extractInst->eraseFromParent(); + auto newResult = newDestructure + ? newDestructure->getResult(newDirectResultIdx) + : newPseudoResult; + ++newDirectResultIdx; + result->replaceAllUsesWith(newResult); + return true; + }; + visitCallMultiResults(oldDestructure, opaqueCalleeConv, visitOldCallResult); + assert(newDirectResultIdx == loweredCalleeConv.getNumDirectSILResults()); + + // If the oldDestructure produces any address-only results, then it will still + // have uses, those results are mapped to storage, and the destructure will be + // force-deleted later during deleteRewrittenInstructions. But if there are no + // address-only results, then all of the old destructure's uses will already + // be replaced. It must be force deleted now to avoid deleting it later as + // regular dead code and emitting a bad lifetime fixup for its owned operand. + if (isInstructionTriviallyDead(oldDestructure)) { + pass.deleter.forceDelete(oldDestructure); } - if (!pass.valueStorageMap.contains(origCallInst)) - pass.markDead(origCallInst); } //===----------------------------------------------------------------------===// -// ReturnRewriter - rewrite return instructions for indirect results. +// ReturnRewriter +// +// Rewrite return instructions for indirect results. //===----------------------------------------------------------------------===// class ReturnRewriter { AddressLoweringState &pass; + SILFunctionConventions opaqueFnConv; public: - ReturnRewriter(AddressLoweringState &pass) : pass(pass) {} + ReturnRewriter(AddressLoweringState &pass) + : pass(pass), opaqueFnConv(pass.function->getConventions()) {} void rewriteReturns(); protected: void rewriteReturn(ReturnInst *returnInst); + + void rewriteElement(SILValue oldResult, SILArgument *newResultArg, + SILBuilder &returnBuilder); }; void ReturnRewriter::rewriteReturns() { - for (TermInst *termInst : pass.returnInsts) { - // TODO: handle throws - rewriteReturn(cast(termInst)); + for (SILInstruction *termInst : pass.exitingInsts) { + if (auto *returnInst = dyn_cast(termInst)) + rewriteReturn(returnInst); + else + assert(isa(termInst)); } } void ReturnRewriter::rewriteReturn(ReturnInst *returnInst) { + auto &astCtx = pass.getModule()->getASTContext(); + auto typeCtx = pass.function->getTypeExpansionContext(); + + // Find the point before allocated storage has been deallocated. auto insertPt = SILBasicBlock::iterator(returnInst); - auto bbStart = returnInst->getParent()->begin(); - while (insertPt != bbStart) { - --insertPt; - if (!isa(*insertPt)) + for (auto bbStart = returnInst->getParent()->begin(); + insertPt != bbStart; --insertPt) { + if (!isa(*std::prev(insertPt))) break; } - SILBuilder B(insertPt); - B.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(returnInst->getModule())); + auto returnBuilder = pass.getBuilder(insertPt); // Gather direct function results. - unsigned numOrigDirectResults = - pass.F->getConventions().getNumDirectSILResults(); - SmallVector origDirectResultValues; - if (numOrigDirectResults == 1) - origDirectResultValues.push_back(returnInst->getOperand()); + unsigned numOldResults = opaqueFnConv.getNumDirectSILResults(); + SmallVector oldResults; + TupleInst *pseudoReturnVal = nullptr; + if (numOldResults == 1) + oldResults.push_back(returnInst->getOperand()); else { - auto *tupleInst = cast(returnInst->getOperand()); - origDirectResultValues.append(tupleInst->getElements().begin(), - tupleInst->getElements().end()); - assert(origDirectResultValues.size() == numOrigDirectResults); + pseudoReturnVal = cast(returnInst->getOperand()); + oldResults.append(pseudoReturnVal->getElements().begin(), + pseudoReturnVal->getElements().end()); + assert(oldResults.size() == numOldResults); } - SILFunctionConventions origFnConv(pass.F->getConventions()); - (void)origFnConv; - - // Convert each result. SmallVector newDirectResults; unsigned newResultArgIdx = pass.loweredFnConv.getSILArgIndexOfFirstIndirectResult(); + // Initialize the indirect result arguments and populate newDirectResults. for_each( - pass.F->getLoweredFunctionType()->getResults(), origDirectResultValues, - [&](SILResultInfo resultInfo, SILValue origDirectResultVal) { - // Assume that all original results are direct in SIL. - assert(!origFnConv.isSILIndirect(resultInfo)); - - if (pass.loweredFnConv.isSILIndirect(resultInfo)) { - assert(newResultArgIdx - < pass.loweredFnConv.getSILArgIndexOfFirstParam()); - - SILArgument *resultArg = B.getFunction().getArgument(newResultArgIdx); - SILType resultTy = origDirectResultVal->getType(); - if (resultTy.isAddressOnly(*pass.F)) { - ValueStorage &storage = - pass.valueStorageMap.getStorage(origDirectResultVal); - assert(storage.isRewritten()); - if (!storage.isProjection()) { - // Copy the result from local storage into the result argument. - SILValue resultAddr = storage.storageAddress; - B.createCopyAddr(returnInst->getLoc(), resultAddr, resultArg, - IsTake, IsInitialization); - } - } else { - // Store the result into the result argument. - B.createStore(returnInst->getLoc(), origDirectResultVal, resultArg, - StoreOwnershipQualifier::Unqualified); - } - ++newResultArgIdx; - } else { - // Record the direct result for populating the result tuple. - newDirectResults.push_back(origDirectResultVal); - } - }); + pass.function->getLoweredFunctionType()->getResults(), oldResults, + [&](SILResultInfo resultInfo, SILValue oldResult) { + // Assume that all original results are direct in SIL. + assert(!opaqueFnConv.isSILIndirect(resultInfo)); + if (!pass.loweredFnConv.isSILIndirect(resultInfo)) { + newDirectResults.push_back(oldResult); + return; + } + SILArgument *newResultArg = + pass.function->getArgument(newResultArgIdx); + rewriteElement(oldResult, newResultArg, returnBuilder); + ++newResultArgIdx; + }); + assert(newDirectResults.size() == pass.loweredFnConv.getNumDirectSILResults()); + assert(newResultArgIdx == pass.loweredFnConv.getSILArgIndexOfFirstParam()); + + // Generate a new return_inst for the new direct results. SILValue newReturnVal; if (newDirectResults.empty()) { - SILType emptyTy = SILType::getPrimitiveObjectType( - B.getModule().getASTContext().TheEmptyTupleType); - newReturnVal = B.createTuple(returnInst->getLoc(), emptyTy, {}); + SILType emptyTy = SILType::getPrimitiveObjectType(astCtx.TheEmptyTupleType); + newReturnVal = returnBuilder.createTuple(pass.genLoc(), emptyTy, {}); } else if (newDirectResults.size() == 1) { newReturnVal = newDirectResults[0]; } else { - newReturnVal = B.createTuple( - returnInst->getLoc(), - pass.loweredFnConv.getSILResultType(B.getTypeExpansionContext()), - newDirectResults); + newReturnVal = returnBuilder.createTuple(pass.genLoc(), + pass.loweredFnConv.getSILResultType(typeCtx), + newDirectResults); } + // Rewrite the returned value. SILValue origFullResult = returnInst->getOperand(); + assert(isPseudoReturnValue(origFullResult) == (pseudoReturnVal != nullptr)); + returnInst->setOperand(newReturnVal); - if (auto *fullResultInst = origFullResult->getDefiningInstruction()) { - if (!fullResultInst->hasUsesOfAnyResult()) - pass.markDead(fullResultInst); + // A pseudo return value is not be deleted during deleteRewrittenInstructions + // because it is not mapped ValueStorage. Delete it now since it's value are + // all consumed by newReturnVal. + if (pseudoReturnVal) { + pass.deleter.forceDelete(pseudoReturnVal); + } +} + +void ReturnRewriter::rewriteElement(SILValue oldResult, + SILArgument *newResultArg, + SILBuilder &returnBuilder) { + SILType resultTy = oldResult->getType(); + if (resultTy.isAddressOnly(*pass.function)) { + ValueStorage &storage = pass.valueStorageMap.getStorage(oldResult); + assert(storage.isRewritten); + SILValue resultAddr = storage.storageAddress; + if (resultAddr != newResultArg) { + // Copy the result from local storage into the result argument. + returnBuilder.createCopyAddr(pass.genLoc(), resultAddr, newResultArg, + IsTake, IsInitialization); + } + } else { + // Store the result into the result argument. + returnBuilder.createTrivialStoreOr(pass.genLoc(), oldResult, newResultArg, + StoreOwnershipQualifier::Init); } } //===----------------------------------------------------------------------===// -// AddressOnlyUseRewriter - rewrite opaque value uses. +// UseRewriter +// +// Rewrite opaque value uses in forward order--uses are rewritten before defs. //===----------------------------------------------------------------------===// namespace { -class AddressOnlyUseRewriter - : SILInstructionVisitor { - friend SILVisitorBase; - friend SILInstructionVisitor; +class UseRewriter : SILInstructionVisitor { + friend SILVisitorBase; + friend SILInstructionVisitor; AddressLoweringState &pass; - SILBuilder B; + SILBuilder builder; AddressMaterialization addrMat; - Operand *currOper; + Operand *use = nullptr; + + explicit UseRewriter(AddressLoweringState &pass, Operand *use) + : pass(pass), builder(pass.getBuilder(use->getUser()->getIterator())), + addrMat(pass, builder), use(use) {} public: - explicit AddressOnlyUseRewriter(AddressLoweringState &pass) - : pass(pass), B(*pass.F), addrMat(pass, B) { - B.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(pass.F->getModule())); - } + static void rewriteUse(Operand *use, AddressLoweringState &pass) { + // Special handling for the broken opened archetypes representation in which + // a single result represents both a value of the opened type and the + // metatype itself :/ + if (use->isTypeDependent()) + return; - void visitOperand(Operand *operand) { - currOper = operand; - visit(operand->getUser()); + UseRewriter(pass, use).visit(use->getUser()); } protected: + // If rewriting a use also rewrites the value defined by the user, then mark + // the defined value as rewritten. The defined value will not be revisited by + // DefRewriter. void markRewritten(SILValue oldValue, SILValue addr) { auto &storage = pass.valueStorageMap.getStorage(oldValue); + // getReusedStorageOperand() ensures that oldValue does not already have + // separate storage. So there's no need to delete its alloc_stack. + assert(!storage.storageAddress || storage.storageAddress == addr); storage.storageAddress = addr; storage.markRewritten(); } - void beforeVisit(SILInstruction *I) { - LLVM_DEBUG(llvm::dbgs() << " REWRITE USE "; I->dump()); - - B.setInsertionPoint(I); - B.setCurrentDebugScope(I->getDebugScope()); + void beforeVisit(SILInstruction *inst) { + LLVM_DEBUG(llvm::dbgs() << "REWRITE USE "; inst->dump()); } - void visitSILInstruction(SILInstruction *I) { - LLVM_DEBUG(I->dump()); - llvm_unreachable("Unimplemented?!"); + void visitSILInstruction(SILInstruction *inst) { + inst->dump(); + llvm::report_fatal_error("^^^ Unimplemented opaque value use."); } + // Opaque call argument. void visitApplyInst(ApplyInst *applyInst) { - ApplyRewriter(applyInst, pass).rewriteIndirectParameter(currOper); + CallArgRewriter(applyInst, pass).rewriteIndirectArgument(use); + } + + void visitAssignInst(AssignInst *assignInst); + + void visitBeginBorrowInst(BeginBorrowInst *borrow); + + void visitEndBorrowInst(EndBorrowInst *end) {} + + void visitBranchInst(BranchInst *) { + pass.getPhiRewriter().materializeOperand(use); + + use->set(SILUndef::get(use->get()->getType(), *pass.function)); + } + + // Opaque checked cast source. + void visitCheckedCastValueBranchInst( + CheckedCastValueBranchInst *checkedBranchInst) { + // FIXME: Unimplemented + llvm::report_fatal_error("Unimplemented CheckCastValueBranch use."); } + // Copy from an opaque source operand. void visitCopyValueInst(CopyValueInst *copyInst) { - ValueStorage &storage = pass.valueStorageMap.getStorage(copyInst); - // Fold a copy into a store. - if (storage.isProjection() - && isa(storage.getComposedOperand()->getUser())) { - return; - } SILValue srcVal = copyInst->getOperand(); SILValue srcAddr = pass.valueStorageMap.getStorage(srcVal).storageAddress; + SILValue destAddr = addrMat.materializeAddress(copyInst); - B.createCopyAddr(copyInst->getLoc(), srcAddr, destAddr, IsNotTake, - IsInitialization); + if (destAddr != srcAddr) { + builder.createCopyAddr(copyInst->getLoc(), srcAddr, destAddr, IsNotTake, + IsInitialization); + } markRewritten(copyInst, destAddr); } - + void visitDebugValueInst(DebugValueInst *debugInst) { SILValue srcVal = debugInst->getOperand(); SILValue srcAddr = pass.valueStorageMap.getStorage(srcVal).storageAddress; - B.createDebugValueAddr(debugInst->getLoc(), srcAddr, - *debugInst->getVarInfo()); - pass.markDead(debugInst); + builder.createDebugValueAddr(debugInst->getLoc(), srcAddr, + *debugInst->getVarInfo()); + pass.deleter.forceDelete(debugInst); + } + + void visitDeinitExistentialValueInst( + DeinitExistentialValueInst *deinitExistential) { + // FIXME: Unimplemented + llvm::report_fatal_error("Unimplemented DeinitExsitentialValue use."); } - - void visitDestroyValueInst(DestroyValueInst *destroyInst) { - SILValue srcVal = destroyInst->getOperand(); + + void visitDestroyValueInst(DestroyValueInst *destroy) { + SILValue srcVal = destroy->getOperand(); SILValue srcAddr = pass.valueStorageMap.getStorage(srcVal).storageAddress; - B.createDestroyAddr(destroyInst->getLoc(), srcAddr); - pass.markDead(destroyInst); + builder.createDestroyAddr(destroy->getLoc(), srcAddr); + pass.deleter.forceDelete(destroy); + } + + void rewriteDestructure(SILInstruction *destructure); + + void visitDestructureStructInst(DestructureStructInst *destructure) { + rewriteDestructure(destructure); } - // Handle EnumInst on the def side to handle both opaque and - // loadable operands. + void visitDestructureTupleInst(DestructureTupleInst *destructure) { + rewriteDestructure(destructure); + } + + // Enums are rewritten on the def side to handle both address-only and + // loadable payloads. An address-only payload implies an address-only Enum. void visitEnumInst(EnumInst *enumInst) {} - // Handle InitExistentialValue on the def side to handle both opaque and - // loadable operands. + // Handle InitExistentialValue on the def side because loadable values must + // also be copied into existential storage. void visitInitExistentialValueInst(InitExistentialValueInst *initExistential) {} + // Opening an opaque existential. Rewrite the opened existentials here on + // the use-side because it may produce either loadable or address-only + // types. + void visitOpenExistentialValueInst(OpenExistentialValueInst *openExistential); + + void visitOpenExistentialBoxValueInst( + OpenExistentialBoxValueInst *openExistentialBox) { + // FIXME: Unimplemented + llvm::report_fatal_error("Unimplemented OpenExistentialBox use."); + } + void visitReturnInst(ReturnInst *returnInst) { - // Returns are rewritten for any function with indirect results after opaque - // value rewriting. + // Returns are rewritten for any function with indirect results after + // opaque value rewriting. + } + + void visitSelectValueInst(SelectValueInst *selectInst) { + // FIXME: Unimplemented + llvm::report_fatal_error("Unimplemented SelectValue use."); } - void visitStoreInst(StoreInst *storeInst) { - SILValue srcVal = storeInst->getSrc(); - assert(currOper->get() == srcVal); + // Opaque enum operand to a switch_enum. + void visitSwitchEnumInst(SwitchEnumInst *SEI); - ValueStorage &storage = pass.valueStorageMap.getStorage(srcVal); - SILValue srcAddr = storage.storageAddress; + void rewriteStore(SILValue srcVal, SILValue destAddr, + IsInitialization_t isInit); - IsTake_t isTakeFlag = IsTake; - assert(storeInst->getOwnershipQualifier() - == StoreOwnershipQualifier::Unqualified); + void visitStoreInst(StoreInst *storeInst); - if (storage.isProjection()) { - assert(!srcAddr); - auto *copyInst = cast(srcVal); - ValueStorage &srcStorage = - pass.valueStorageMap.getStorage(copyInst->getOperand()); - assert(!srcStorage.isProjection()); - srcAddr = srcStorage.storageAddress; - isTakeFlag = IsNotTake; - } - // Bitwise copy the value. Two locations now share ownership. This is - // modeled as a take-init. - B.createCopyAddr(storeInst->getLoc(), srcAddr, storeInst->getDest(), - isTakeFlag, IsInitialization); - pass.markDead(storeInst); + /// Emit end_borrows for a an incomplete BorrowedValue with only nonlifetime + /// ending uses. + void emitEndBorrows(SILValue value); + + void emitExtract(SingleValueInstruction *extractInst); + + // Extract from an opaque struct. + void visitStructExtractInst(StructExtractInst *extractInst); + + // Structs are rewritten on the def-side, where both the address-only and + // loadable elements that compose a struct can be handled. An address-only + // member implies an address-only Struct. + void visitStructInst(StructInst *structInst) {} + + // Opaque call argument. + void visitTryApplyInst(TryApplyInst *tryApplyInst) { + CallArgRewriter(tryApplyInst, pass).rewriteIndirectArgument(use); } - void visitTupleInst(TupleInst *tupleInst) { - // Tuples are rewritten on the def-side, where both direct and indirect - // elements are composed. + // Tuples are rewritten on the def-side, where both the address-only and + // loadable elements that compose a tuple can be handled. An address-only + // element implies an address-only Tuple. + void visitTupleInst(TupleInst *tupleInst) {} + + // Extract from an opaque tuple. + void visitTupleExtractInst(TupleExtractInst *extractInst); + + void visitUncheckedBitwiseCast(UncheckedBitwiseCastInst *uncheckedCastInst) { + // FIXME: Unimplemented + llvm::report_fatal_error("Unimplemented UncheckedBitwiseCast use."); } - void visitTupleExtractInst(TupleExtractInst *extractInst) { - // Apply results are rewritten when the result definition is visited. - if (ApplySite::isa(currOper->get())) - return; + void visitUncheckedEnumDataInst(UncheckedEnumDataInst *enumDataInst); - // TODO: generate tuple_element_addr. - // generate copy_addr if we can't project. - llvm_unreachable("unimplemented."); + void visitUnconditionalCheckedCastValueInst( + UnconditionalCheckedCastValueInst *checkedCastInst) { + + // FIXME: Unimplemented + llvm::report_fatal_error("Unimplemented UnconditionalCheckedCast use."); } }; } // end anonymous namespace +void UseRewriter::rewriteDestructure(SILInstruction *destructure) { + for (auto result : destructure->getResults()) { + SILValue extractAddr = addrMat.materializeDefProjection(result); + if (result->getType().isAddressOnly(*pass.function)) { + assert(use == getProjectedDefOperand(result)); + markRewritten(result, extractAddr); + } else { + assert(!pass.valueStorageMap.contains(result)); + SILValue loadElement = builder.createTrivialLoadOr( + destructure->getLoc(), extractAddr, LoadOwnershipQualifier::Take); + + result->replaceAllUsesWith(loadElement); + } + } +} + +void UseRewriter::visitBeginBorrowInst(BeginBorrowInst *borrow) { + assert(use == getProjectedDefOperand(borrow)); + + // Mark the value as rewritten and use the operand's storage. + auto address = pass.valueStorageMap.getStorage(use->get()).storageAddress; + markRewritten(borrow, address); + + // Borrows are irrelevant unless they are marked lexical. + if (borrow->isLexical()) { + if (auto *allocStack = dyn_cast(address)) { + allocStack->setIsLexical(); + return; + } + // Function arguments are inherently lexical. + if (isa(address)) + return; + + SWIFT_ASSERT_ONLY(address->dump()); + llvm_unreachable("^^^ unknown lexical address producer"); + } +} + +// Opening an opaque existential. Rewrite the opened existentials here on +// the use-side because it may produce either loadable or address-only +// types. +void UseRewriter::visitOpenExistentialValueInst( + OpenExistentialValueInst *openExistential) { + assert(use == getReusedStorageOperand(openExistential)); + SILValue srcAddr = pass.valueStorageMap.getStorage(use->get()).storageAddress; + + // Replace the module's openedArchetypesDef + pass.getModule()->willDeleteInstruction(openExistential); + + // Mutable access is always by address. + auto *openAddr = builder.createOpenExistentialAddr( + openExistential->getLoc(), srcAddr, + openExistential->getType().getAddressType(), + OpenedExistentialAccess::Immutable); + + SmallVector typeUses; + for (Operand *use : openExistential->getUses()) { + if (use->isTypeDependent()) { + typeUses.push_back(use); + } + } + for (Operand *use : typeUses) { + use->set(openAddr); + } + markRewritten(openExistential, openAddr); +} + +void UseRewriter::rewriteStore(SILValue srcVal, SILValue destAddr, + IsInitialization_t isInit) { + assert(use->get() == srcVal); + auto *storeInst = use->getUser(); + auto loc = storeInst->getLoc(); + + ValueStorage &storage = pass.valueStorageMap.getStorage(srcVal); + SILValue srcAddr = storage.storageAddress; + + IsTake_t isTake = IsTake; + if (auto *copy = dyn_cast(srcVal)) { + if (storage.isDefProjection) { + SILValue copySrcAddr = + pass.valueStorageMap.getStorage(copy->getOperand()).storageAddress; + assert(srcAddr == copySrcAddr && "folded copy should borrow storage"); + (void)copySrcAddr; + isTake = IsNotTake; + } + } + builder.createCopyAddr(loc, srcAddr, destAddr, isTake, isInit); + pass.deleter.forceDelete(storeInst); +} + +// If the source is a copy that projects storage from its def, then the copy +// semantics are handled here (by omitting the [take] flag from copy_addr). +void UseRewriter::visitStoreInst(StoreInst *storeInst) { + IsInitialization_t isInit; + auto qualifier = storeInst->getOwnershipQualifier(); + if (qualifier == StoreOwnershipQualifier::Init) + isInit = IsInitialization; + else { + assert(qualifier == StoreOwnershipQualifier::Assign); + isInit = IsNotInitialization; + } + rewriteStore(storeInst->getSrc(), storeInst->getDest(), isInit); +} + +void UseRewriter::visitAssignInst(AssignInst *assignInst) { + rewriteStore(assignInst->getSrc(), assignInst->getDest(), + IsNotInitialization); +} + +/// Emit end_borrows for a an incomplete BorrowedValue with only nonlifetime +/// ending uses. This function inserts end_borrows on the lifetime boundary. +void UseRewriter::emitEndBorrows(SILValue value) { + assert(BorrowedValue(value)); + + // Place end_borrows that cover the load_borrow uses. It is not necessary to + // cover the outer borrow scope of the extract's operand. If a lexical + // borrow scope exists for the outer value, which is now in memory, then + // its alloc_stack will be marked lexical, and the in-memory values will be + // kept alive until the end of the outer scope. + SmallVector usePoints; + findInnerTransitiveGuaranteedUses(value, &usePoints); + + SmallVector discoveredBlocks; + PrunedLiveness liveness(&discoveredBlocks); + for (auto *use : usePoints) { + assert(!use->isLifetimeEnding()); + liveness.updateForUse(use->getUser(), /*lifetimeEnding*/ false); + } + PrunedLivenessBoundary guaranteedBoundary; + guaranteedBoundary.compute(liveness); + guaranteedBoundary.visitInsertionPoints( + [&](SILBasicBlock::iterator insertPt) { + pass.getBuilder(insertPt).createEndBorrow(pass.genLoc(), value); + }); +} + +// Extract from an opaque struct or tuple. +void UseRewriter::emitExtract(SingleValueInstruction *extractInst) { + SILValue extractAddr = addrMat.materializeDefProjection(extractInst); + + if (extractInst->getType().isAddressOnly(*pass.function)) { + assert(use == getProjectedDefOperand(extractInst)); + markRewritten(extractInst, extractAddr); + return; + } + auto replaceUsesWithLoad = [&](SingleValueInstruction *oldInst, + SILValue load) { + oldInst->replaceAllUsesWith(load); + pass.deleter.forceDelete(oldInst); + }; + auto loc = extractInst->getLoc(); + if (extractInst->getType().isTrivial(*pass.function)) { + auto *load = + builder.createLoad(loc, extractAddr, LoadOwnershipQualifier::Trivial); + replaceUsesWithLoad(extractInst, load); + return; + } + if (Operand *use = extractInst->getSingleUse()) { + if (auto *copy = dyn_cast(use->getUser())) { + auto *load = + builder.createLoad(loc, extractAddr, LoadOwnershipQualifier::Copy); + replaceUsesWithLoad(copy, load); + return; + } + } + SILValue loadElement = + builder.emitLoadBorrowOperation(extractInst->getLoc(), extractAddr); + replaceUsesWithLoad(extractInst, loadElement); + emitEndBorrows(loadElement); +} + +void UseRewriter::visitStructExtractInst(StructExtractInst *extractInst) { + emitExtract(extractInst); +} + +// Extract from an opaque tuple. +void UseRewriter::visitTupleExtractInst(TupleExtractInst *extractInst) { + emitExtract(extractInst); +} + +// Rewrite switch_enum to switch_enum_addr. All associated block arguments are +// removed. +void UseRewriter::visitSwitchEnumInst(SwitchEnumInst * switchEnum) { + SILValue enumVal = switchEnum->getOperand(); + assert(use->get() == enumVal); + + SILValue enumAddr = pass.getMaterializedAddress(enumVal); + auto loc = switchEnum->getLoc(); + auto rewriteCase = [&](EnumElementDecl *caseDecl, SILBasicBlock *caseBB) { + // Nothing to do for unused case payloads. + if (caseBB->getArguments().size() == 0) + return; + + assert(caseBB->getArguments().size() == 1); + SILArgument *caseArg = caseBB->getArguments()[0]; + + assert(&switchEnum->getOperandRef(0) == getReusedStorageOperand(caseArg)); + assert(caseDecl->hasAssociatedValues() && "caseBB has a payload argument"); + + SILBuilder caseBuilder = pass.getBuilder(caseBB->begin()); + auto *caseAddr = + caseBuilder.createUncheckedTakeEnumDataAddr(loc, enumAddr, caseDecl); + auto *caseLoad = caseBuilder.createTrivialLoadOr( + switchEnum->getLoc(), caseAddr, LoadOwnershipQualifier::Take); + caseArg->replaceAllUsesWith(caseLoad); + if (caseArg->getType().isAddressOnly(*pass.function)) { + // Remap caseArg to the new dummy load which will be deleted during + // deleteRewrittenInstructions. + pass.valueStorageMap.replaceValue(caseArg, caseLoad); + markRewritten(caseLoad, caseAddr); + } + caseBB->eraseArgument(0); + }; + + // TODO: The case list does not change. We should be able to avoid copying. + SmallVector, 8> cases; + SmallVector caseCounters; + + // Collect switch cases for rewriting and remove block arguments. + for (unsigned caseIdx : range(switchEnum->getNumCases())) { + auto caseDeclAndBB = switchEnum->getCase(caseIdx); + EnumElementDecl *caseDecl = caseDeclAndBB.first; + SILBasicBlock *caseBB = caseDeclAndBB.second; + + cases.push_back(caseDeclAndBB); + caseCounters.push_back(switchEnum->getCaseCount(caseIdx)); + + rewriteCase(caseDecl, caseBB); + } + SILBasicBlock *defaultBB = nullptr; + auto defaultCounter = ProfileCounter(); + if (switchEnum->hasDefault()) { + defaultBB = switchEnum->getDefaultBB(); + defaultCounter = switchEnum->getDefaultCount(); + if (auto defaultDecl = switchEnum->getUniqueCaseForDefault()) { + rewriteCase(defaultDecl.get(), defaultBB); + } + } + auto builder = pass.getTermBuilder(switchEnum); + pass.deleter.forceDelete(switchEnum); + builder.createSwitchEnumAddr(loc, enumAddr, defaultBB, cases, + ArrayRef(caseCounters), + defaultCounter); +} + +void UseRewriter::visitUncheckedEnumDataInst( + UncheckedEnumDataInst *enumDataInst) { + assert(use == getReusedStorageOperand(enumDataInst)); + + assert(enumDataInst->getOwnershipKind() != OwnershipKind::Guaranteed); + + // unchecked_enum_data could be a def-projection. It is handled as a + // separate allocation to make it clear that it can't be + // rematerialized. This means that + auto srcAddr = pass.valueStorageMap.getStorage(use->get()).storageAddress; + + auto loc = enumDataInst->getLoc(); + auto elt = enumDataInst->getElement(); + auto destTy = enumDataInst->getType().getAddressType(); + auto *enumAddrInst = + builder.createUncheckedTakeEnumDataAddr(loc, srcAddr, elt, destTy); + + markRewritten(enumDataInst, enumAddrInst); +} + //===----------------------------------------------------------------------===// -// AddressOnlyDefRewriter - rewrite opaque value definitions. +// DefRewriter +// +// Rewrite opaque value definitions in forward order--defs are after uses. //===----------------------------------------------------------------------===// namespace { -class AddressOnlyDefRewriter - : SILInstructionVisitor { - friend SILVisitorBase; - friend SILInstructionVisitor; +class DefRewriter : SILInstructionVisitor { + friend SILVisitorBase; + friend SILInstructionVisitor; AddressLoweringState &pass; - SILBuilder B; + SILBuilder builder; AddressMaterialization addrMat; - ValueStorage *storage = nullptr; + ValueStorage &storage; -public: - explicit AddressOnlyDefRewriter(AddressLoweringState &pass) - : pass(pass), B(*pass.F), addrMat(pass, B) { - B.setSILConventions( - SILModuleConventions::getLoweredAddressConventions(pass.F->getModule())); + explicit DefRewriter(AddressLoweringState &pass, SILValue value, + SILBasicBlock::iterator insertPt) + : pass(pass), builder(pass.getBuilder(insertPt)), addrMat(pass, builder), + storage(pass.valueStorageMap.getStorage(value)) { + assert(!storage.isRewritten); } - void visitInst(SILInstruction *inst) { visit(inst); } +public: + static void rewriteValue(SILValue value, AddressLoweringState &pass) { + if (auto *inst = value->getDefiningInstruction()) { + DefRewriter(pass, value, inst->getIterator()).visit(inst); + + } else { + // function args are already rewritten. + auto *blockArg = cast(value); + auto insertPt = blockArg->getParent()->begin(); + DefRewriter(pass, value, insertPt).rewriteArg(blockArg); + } + } protected: - void beforeVisit(SILInstruction *I) { - // This cast succeeds beecause only specific instructions get added to - // the value storage map. - storage = &pass.valueStorageMap.getStorage(cast(I)); + // Set the storage address for an opaque block arg and mark it rewritten. + void rewriteArg(SILPhiArgument *arg) { + LLVM_DEBUG(llvm::dbgs() << "REWRITE ARG "; arg->dump()); + if (storage.storageAddress) + LLVM_DEBUG(llvm::dbgs() << " STORAGE "; storage.storageAddress->dump()); - LLVM_DEBUG(llvm::dbgs() << "REWRITE DEF "; I->dump()); - if (storage->storageAddress) - LLVM_DEBUG(llvm::dbgs() << " STORAGE "; storage->storageAddress->dump()); + storage.storageAddress = addrMat.materializeAddress(arg); + } - B.setInsertionPoint(I); - B.setCurrentDebugScope(I->getDebugScope()); + void beforeVisit(SILInstruction *inst) { + LLVM_DEBUG(llvm::dbgs() << "REWRITE DEF "; inst->dump()); + if (storage.storageAddress) + LLVM_DEBUG(llvm::dbgs() << " STORAGE "; storage.storageAddress->dump()); } - void visitSILInstruction(SILInstruction *I) { - LLVM_DEBUG(I->dump()); - llvm_unreachable("Unimplemented?!"); + void visitSILInstruction(SILInstruction *inst) { + inst->dump(); + llvm::report_fatal_error("^^^ Unimplemented opaque value def."); } void visitApplyInst(ApplyInst *applyInst) { - assert(isa(applyInst) && - "beforeVisit assumes that ApplyInst is an SVI"); - assert(!storage->isRewritten()); // Completely rewrite the apply instruction, handling any remaining // (loadable) indirect parameters, allocating memory for indirect // results, and generating a new apply instruction. - ApplyRewriter rewriter(applyInst, pass); - rewriter.rewriteParameters(); - rewriter.convertApplyWithIndirectResults(); - } - - void visitCopyValueInst(CopyValueInst *copyInst) { - // A folded copy is not rewritten. - assert(storage->isProjection() || storage->isRewritten()); + CallArgRewriter(applyInst, pass).rewriteArguments(); + ApplyRewriter(applyInst, pass).convertApplyWithIndirectResults(); + } + + // Rewrite the apply for an indirect result. + void visitDestructureTupleInst(DestructureTupleInst *destructure) { + SILValue srcVal = destructure->getOperand(); + assert(isPseudoCallResult(srcVal) && "destructure use should be rewritten"); + + FullApplySite apply; + if (auto *applyInst = dyn_cast(srcVal)) { + apply = FullApplySite::isa(applyInst); + } else { + auto *termInst = + SILArgument::isTerminatorResult(srcVal)->getTerminatorForResult(); + apply = FullApplySite::isa(termInst); + } + CallArgRewriter(apply, pass).rewriteArguments(); + ApplyRewriter(apply, pass).convertApplyWithIndirectResults(); } + // Define an opaque enum value. void visitEnumInst(EnumInst *enumInst) { - SILValue enumAddr; if (enumInst->hasOperand()) { - addrMat.initializeOperandMem(&enumInst->getOperandRef()); - - assert(storage->storageAddress); - enumAddr = storage->storageAddress; - } else - enumAddr = addrMat.materializeAddress(enumInst); - - B.createInjectEnumAddr(enumInst->getLoc(), enumAddr, - enumInst->getElement()); + // Handle operands here because loadable operands must also be copied. + addrMat.initializeOperand(&enumInst->getOperandRef()); + } + SILValue enumAddr = addrMat.materializeAddress(enumInst); - storage->markRewritten(); + builder.createInjectEnumAddr(enumInst->getLoc(), enumAddr, + enumInst->getElement()); } + // Define an existential. void visitInitExistentialValueInst( InitExistentialValueInst *initExistentialValue) { // Initialize memory for the operand which may be opaque or loadable. - addrMat.initializeOperandMem(&initExistentialValue->getOperandRef()); + addrMat.initializeOperand(&initExistentialValue->getOperandRef()); + } - assert(storage->storageAddress); - storage->markRewritten(); + // Project an opaque value out of a box-type existential. + void visitOpenExistentialBoxValueInst( + OpenExistentialBoxValueInst *openExistentialBox) { + // FIXME: Unimplemented + llvm::report_fatal_error("Unimplemented OpenExistentialBoxValue def."); } + // Load an opaque value. void visitLoadInst(LoadInst *loadInst) { - // Bitwise copy the value. Two locations now share ownership. This is - // modeled as a take-init. - SILValue addr = pass.valueStorageMap.getStorage(loadInst).storageAddress; + SILValue addr = addrMat.materializeAddress(loadInst); + IsTake_t isTake; + if (loadInst->getOwnershipQualifier() == LoadOwnershipQualifier::Take) + isTake = IsTake; + else { + assert(loadInst->getOwnershipQualifier() == LoadOwnershipQualifier::Copy); + isTake = IsNotTake; + } + // Dummy loads are already mapped to their storage address. if (addr != loadInst->getOperand()) { - B.createCopyAddr(loadInst->getLoc(), loadInst->getOperand(), addr, IsTake, - IsInitialization); + builder.createCopyAddr(loadInst->getLoc(), loadInst->getOperand(), addr, + isTake, IsInitialization); } - storage->markRewritten(); } + // Define an opaque struct. + void visitStructInst(StructInst *structInst) { + // For each element, initialize the operand's memory. Some struct elements + // may be loadable types. + for (Operand &operand : structInst->getAllOperands()) + addrMat.initializeOperand(&operand); + } + + // Define an opaque tuple. void visitTupleInst(TupleInst *tupleInst) { - ValueStorage &storage = pass.valueStorageMap.getStorage(tupleInst); - if (storage.isProjection() - && isa(storage.getComposedOperand()->getUser())) { - // For indirectly returned values, each element has its own storage. - return; - } // For each element, initialize the operand's memory. Some tuple elements // may be loadable types. - SILValue tupleAddr = addrMat.materializeAddress(tupleInst); - unsigned eltIdx = 0; - for (Operand &operand : tupleInst->getAllOperands()) { - SILType eltTy = operand.get()->getType(); - if (eltTy.isAddressOnly(*pass.F)) - addrMat.initializeOperandMem(&operand); - else { - auto *elementAddr = B.createTupleElementAddr( - tupleInst->getLoc(), tupleAddr, eltIdx, eltTy.getAddressType()); - B.createStore(tupleInst->getLoc(), operand.get(), elementAddr, - StoreOwnershipQualifier::Unqualified); - } - ++eltIdx; - } + for (Operand &operand : tupleInst->getAllOperands()) + addrMat.initializeOperand(&operand); } +}; +} // end anonymous namespace - void visitTupleExtractInst(TupleExtractInst *extractInst) { - // If the source is an opaque tuple, as opposed to a call result, then the - // extract is rewritten on the use-side. - if (storage->isRewritten()) - return; +//===----------------------------------------------------------------------===// +// Rewrite Opaque Values +//===----------------------------------------------------------------------===// - // This must be an indirect result for an apply that has not yet been - // rewritten. Rewrite the apply. - SILValue srcVal = extractInst->getOperand(); - ApplyRewriter(cast(srcVal), pass) - .convertApplyWithIndirectResults(); +// Rewrite applies with indirect paramters or results of loadable types which +// were not visited during opaque value rewritting. +static void rewriteIndirectApply(FullApplySite apply, + AddressLoweringState &pass) { + // If all indirect args were loadable, then they still need to be rewritten. + CallArgRewriter(apply, pass).rewriteArguments(); - assert(storage->storageAddress); + if (!apply.getSubstCalleeType()->hasIndirectFormalResults()) + return; + + // If the call has indirect results and wasn't already rewritten, rewrite it + // now. This handles try_apply, which is not rewritten when DefRewriter visits + // block arguments. It also handles apply with loadable indirect results. + ApplyRewriter(apply, pass).convertApplyWithIndirectResults(); + + if (!apply.getInstruction()->isDeleted()) { + assert(!getCallMultiResult(apply.getPseudoResult()) + && "replaceDirectResults deletes the destructure"); + pass.deleter.forceDelete(apply.getInstruction()); } -}; -} // end anonymous namespace +} static void rewriteFunction(AddressLoweringState &pass) { - AddressOnlyDefRewriter defVisitor(pass); - AddressOnlyUseRewriter useVisitor(pass); + // During rewriting, storage references are stable. + pass.valueStorageMap.setStable(); + + // For each opaque value in forward order, rewrite its users and its defining + // instruction. + for (auto &valueAndStorage : pass.valueStorageMap) { + SILValue valueDef = valueAndStorage.value; + // Rewrite a def that wasn't already rewritten when handling its operands. + if (!valueAndStorage.storage.isRewritten) { + DefRewriter::rewriteValue(valueDef, pass); + valueAndStorage.storage.markRewritten(); + } + // Rewrite a use of any non-address value mapped to storage (does not + // include the already rewritten uses of indirect arguments). + if (valueDef->getType().isAddress()) + continue; - for (auto &valueStorageI : pass.valueStorageMap) { - SILValue valueDef = valueStorageI.first; + SmallVector uses(valueDef->getUses()); + for (Operand *oper : uses) { + UseRewriter::rewriteUse(oper, pass); + } + } + // Rewrite any applies with indirect parameters now that all such parameters + // are rewritten. If the apply had indirect results, it was already rewritten + // by the defVisitor. + for (auto optionalApply : pass.indirectApplies) { + if (optionalApply) { + rewriteIndirectApply(optionalApply.getValue(), pass); + } + } + // Rewrite this function's return value now that all opaque values within the + // function are rewritten. This still depends on a valid ValueStorage + // projection operands. + if (pass.function->getLoweredFunctionType()->hasIndirectFormalResults()) + ReturnRewriter(pass).rewriteReturns(); +} + +// Given an array of terminator operand values, produce an array of +// operands with those corresponding to deadArgIndices stripped out. +static void filterDeadArgs(OperandValueArrayRef origArgs, + ArrayRef deadArgIndices, + SmallVectorImpl &newArgs) { + auto nextDeadArgI = deadArgIndices.begin(); + for (unsigned i : indices(origArgs)) { + if (i == *nextDeadArgI) { + ++nextDeadArgI; + continue; + } + newArgs.push_back(origArgs[i]); + } + assert(nextDeadArgI == deadArgIndices.end()); +} - // TODO: MultiValueInstruction: ApplyInst - if (auto *defInst = dyn_cast(valueDef)) - defVisitor.visitInst(defInst); +// Rewrite a BranchInst omitting dead arguments. +static void removeBranchArgs(BranchInst *branch, + SmallVectorImpl &deadArgIndices, + AddressLoweringState &pass) { - SmallVector uses(valueDef->getUses()); - for (Operand *oper : uses) - useVisitor.visitOperand(oper); - } - - // Rewrite any remaining (loadable) indirect parameters. - for (ApplySite apply : pass.indirectApplies) { - // Calls with indirect formal results have already been rewritten. - if (apply.getSubstCalleeType()->hasIndirectFormalResults()) { - bool isRewritten = false; - visitCallResults(apply, [&](SILValue result) { - if (result->getType().isAddressOnly(*pass.F)) { - assert(pass.valueStorageMap.getStorage(result).isRewritten()); - isRewritten = true; - return false; - } - return true; - }); - if (!isRewritten) { - ApplyRewriter rewriter(apply, pass); - rewriter.rewriteParameters(); - rewriter.convertApplyWithIndirectResults(); - continue; + llvm::SmallVector branchArgs; + filterDeadArgs(branch->getArgs(), deadArgIndices, branchArgs); + + pass.getBuilder(branch->getIterator()) + .createBranch(branch->getLoc(), branch->getDestBB(), branchArgs); + pass.deleter.forceDelete(branch); +} + +// Remove opaque phis. Their inputs have already been substituted with Undef. +static void removeOpaquePhis(SILBasicBlock *bb, AddressLoweringState &pass) { + if (bb->isEntry()) + return; + + SmallVector deadArgIndices; + for (auto *bbArg : bb->getArguments()) { + if (bbArg->getType().isAddressOnly(*pass.function)) + deadArgIndices.push_back(bbArg->getIndex()); + } + if (deadArgIndices.empty()) + return; + + // Iterate while modifying the predecessor's terminators. + for (auto *predecessor : bb->getPredecessorBlocks()) { + auto *branch = cast(predecessor->getTerminator()); + removeBranchArgs(branch, deadArgIndices, pass); + } + // erase in reverse to avoid index invalidation. + while (!deadArgIndices.empty()) { + bb->eraseArgument(deadArgIndices.pop_back_val()); + } +} + +// Instructions that use an address-only value without producing one are already +// deleted. The rest of the address-only definitions are now removed bottom-up +// by visiting valuestorageMap. +// +// Phis are removed here after all other instructions. +static void deleteRewrittenInstructions(AddressLoweringState &pass) { + // Add the rest of the instructions to the dead list in post order. + for (auto &valueAndStorage : llvm::reverse(pass.valueStorageMap)) { + SILValue val = valueAndStorage.value; + ValueStorage &storage = valueAndStorage.storage; + + assert(&pass.valueStorageMap.getStorage(val) == &valueAndStorage.storage + && "invalid storage map"); + + // Returned tuples and multi-result calls are not in the + // valueStorageMap. Everything else must have been rewritten. + assert(storage.isRewritten && "opaque value has not been rewritten"); + + // If the storage was unused, e.g. because all uses were projected into + // users, then delete the allocation. + if (auto *allocInst = storage.storageAddress->getDefiningInstruction()) { + pass.deleter.deleteIfDead(allocInst); + } + auto *deadInst = val->getDefiningInstruction(); + if (!deadInst || deadInst->isDeleted()) + continue; + + if (auto *destructure = dyn_cast(deadInst)) { + auto tupleVal = destructure->getOperand(); + if (auto *applyInst = dyn_cast(tupleVal)) { + deadInst = applyInst; } } - ApplyRewriter(apply, pass).rewriteParameters(); + LLVM_DEBUG(llvm::dbgs() << "DEAD "; deadInst->dump()); + if (!isa(deadInst)) { + pass.deleter.forceDeleteWithUsers(deadInst); + continue; + } + // willDeleteInstruction was already called for open_existential_value to + // update the registered type. Carry out the remaining deletion steps. + deadInst->getParent()->remove(deadInst); + pass.getModule()->scheduleForDeletion(deadInst); } - if (pass.F->getLoweredFunctionType()->hasIndirectFormalResults()) - ReturnRewriter(pass).rewriteReturns(); + + pass.valueStorageMap.clear(); + + // Remove block args after removing all instructions that may use them. + for (auto &bb : *pass.function) + removeOpaquePhis(&bb, pass); + + pass.deleter.cleanupDeadInstructions(); } //===----------------------------------------------------------------------===// -// AddressLowering: Top-Level Function Transform. +// AddressLowering: Module Pass //===----------------------------------------------------------------------===// namespace { +// Note: the only reason this is not a FunctionTransform is to change the SIL +// stage for all functions at once. class AddressLowering : public SILModuleTransform { - /// The entry point to this function transformation. + /// The entry point to this module transformation. void run() override; void runOnFunction(SILFunction *F); }; } // end anonymous namespace -void AddressLowering::runOnFunction(SILFunction *F) { - auto *DA = PM->getAnalysis(); +void AddressLowering::runOnFunction(SILFunction *function) { + if (!function->isDefinition()) + return; + + assert(function->hasOwnership() && "SIL opaque values requires OSSA"); + + PrettyStackTraceSILFunction FuncScope("address-lowering", function); + + LLVM_DEBUG(llvm::dbgs() << "Address Lowering: " << function->getName() + << "\n"); - AddressLoweringState pass(F, DA->get(F)); + // Ensure that blocks can be processed in RPO order. + removeUnreachableBlocks(*function); - // Rewrite function args and insert alloc_stack/dealloc_stack. + auto *dominance = PM->getAnalysis(); + + AddressLoweringState pass(function, dominance->get(function)); + + // ## Step #1: Map opaque values + // + // First, rewrite this function's arguments and return values, then populate + // pass.valueStorageMap with an entry for each address-only value. + prepareValueStorage(pass); + + // ## Step #2: Allocate storage + // + // For each address-only value mapped in step #1, either create an + // alloc_stack/dealloc_stack pair, or mark its ValueStorage entry as a + // def-projection out of its operand's def or a use projection into its + // composing use or into a phi (branch operand). OpaqueStorageAllocation allocator(pass); allocator.allocateOpaqueStorage(); - LLVM_DEBUG(llvm::dbgs() << "\nREWRITING: " << F->getName(); F->dump()); + LLVM_DEBUG(llvm::dbgs() << "Finished allocating storage.\n"; function->dump(); + pass.valueStorageMap.dump()); - // Rewrite instructions with address-only operands or results. + // ## Step #3. Rewrite opaque values + // + // Rewrite all instructions that either define or use an address-only value. + // Creates new '_addr' variants of instructions, obtaining the storage + // address from the 'valueStorageMap'. This materializes projections in + // forward order, setting 'storageAddress' for each projection as it goes. rewriteFunction(pass); - invalidateAnalysis(F, SILAnalysis::InvalidationKind::Instructions); - - // Instructions that were explicitly marked dead should already have no - // users. - // - // Add the rest of the instructions to the dead list in post order. - // FIXME: make sure we cleaned up address-only BB arguments. - for (auto &valueStorageI : llvm::reverse(pass.valueStorageMap)) { - // TODO: MultiValueInstruction: ApplyInst - auto *deadInst = dyn_cast(valueStorageI.first); - if (!deadInst) - continue; + deleteRewrittenInstructions(pass); - LLVM_DEBUG(llvm::dbgs() << "DEAD "; deadInst->dump()); -#ifndef NDEBUG - for (auto result : deadInst->getResults()) - for (Operand *operand : result->getUses()) - assert(pass.instsToDelete.count(operand->getUser())); -#endif - pass.instsToDelete.insert(deadInst); - } - pass.valueStorageMap.clear(); + StackNesting::fixNesting(function); - // Delete instructions in postorder - recursivelyDeleteTriviallyDeadInstructions(pass.instsToDelete.takeVector(), - true); + // The CFG may change because of criticalEdge splitting during + // createStackAllocation or StackNesting. + invalidateAnalysis(function, + SILAnalysis::InvalidationKind::BranchesAndInstructions); } -/// The entry point to this function transformation. +/// The entry point to this module transformation. void AddressLowering::run() { if (getModule()->useLoweredAddresses()) return; diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.h b/lib/SILOptimizer/Mandatory/AddressLowering.h new file mode 100644 index 0000000000000..e4c6ae79b0c2e --- /dev/null +++ b/lib/SILOptimizer/Mandatory/AddressLowering.h @@ -0,0 +1,282 @@ +//===--- AddressLowering.h - Lower SIL address-only types. ----------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// + +#include "swift/SIL/SILArgument.h" +#include "swift/SIL/SILInstruction.h" +#include "swift/SIL/SILValue.h" +#include "llvm/ADT/DenseMap.h" + +namespace swift { + +/// Track a value's storage. Stages in the storage life-cycle: +/// +/// 1. Unallocated +/// +/// 2. Allocated. Either (a) 'storageAddress' is an alloc_stack, or (b) +/// 'projectedStorageID' refers to a different ValueStorage, which recursively +/// leads to a valid 'storageAddress'. +/// +/// 3. Materialized. 'storageAddress' is valid. Address projections have been +/// emitted at the point that this value is defined. +/// +/// 4. Rewritten. The definition of this address-only value is fully translated +/// into lowered SIL. Instructions are typically materialized and rewritten at +/// the same time. A indirect result, however, is materialized as soon as its +/// alloc_stack is emitted, but only rewritten once the call itself is +/// rewritten. +/// +/// A projection may project out of an operand's definition (def-projection). +/// After allocation, before materialization or rewriting, we may have: +/// +/// %result_addr = alloc_stack // storage for %result +/// %result = apply () -> @out T +/// %extract = struct_extact %result // def-projection of %result +/// +/// Or, a projection may project into a composing use (use-projection): +/// +/// %struct_addr = alloc_stack // storage for %struct +/// %result = apply () -> @out T // use-projection of %struct at operand #0 +/// %struct = struct %result +/// +/// A phi-projection is a use projection that projects its entire value +/// through a phi rather than into a composing use. It has an invalid +/// 'projectedOperandNum'. +/// +/// Operations that destructively resuse storage (open_existential_value, +/// unchecked_enum_data, and switch_enum), are not considered storage +/// projections. Instead, these values have no ValueStorage but are rewritten to +/// directly reuse their operand's storage. +/// +/// To materialize projections, address lowering follows the original def-use +/// edges for address-only values. Consequently, values that have storage cannot +/// be removed from SIL or from the storage map until rewriting is +/// complete. Mapped values can, however, be substituted on-the-fly by emitting +/// a place-holder value and updating the map entry. This works because the +/// value storage map holds no direct references to any SIL entities, such as +/// Operands or SILValues. +struct ValueStorage { + enum : uint32_t { InvalidID = uint32_t(~0) }; + enum : uint16_t { InvalidOper = uint16_t(~0) }; + + /// The final address of this storage after rewriting the SIL. For values + /// linked to their own storage, this is set during storage allocation to an + /// alloc_stack or indirect function argument. For projections, it is only set + /// after materialization (during instruction rewriting). + SILValue storageAddress; + + /// When either isDefProjection or isUseProjection is set, this refers to the + /// storage whose "def" this value projects out of or whose operand this + /// storage projects into via its "use. + uint32_t projectedStorageID; + + /// For use-projections, identifies the operand index of the composing use. + /// Only valid for non-phi use projections. + uint16_t projectedOperandNum; + + /// Projection out of a storage def. e.g. this value is a destructure. + unsigned isDefProjection : 1; + + /// Projection into a composing use or phi. e.g. this value is used by a + /// struct, tuple, enum, or branch. + unsigned isUseProjection : 1; + + // The definition of this value is fully translated to lowered SIL. + unsigned isRewritten : 1; + + // This is a use-projection into an enum. Tracked to avoid projecting enums + // across phis, which would result in piecewise initialization. + unsigned initializesEnum : 1; + + ValueStorage() { clear(); } + + void clear() { + storageAddress = SILValue(); + projectedStorageID = InvalidID; + projectedOperandNum = InvalidOper; + isUseProjection = false; + isDefProjection = false; + isRewritten = false; + initializesEnum = false; + } + + bool isAllocated() const { + return storageAddress || isUseProjection || isDefProjection; + } + + bool isProjection() const { return isUseProjection || isDefProjection; } + + bool isPhiProjection() const { + return isUseProjection && projectedOperandNum == InvalidOper; + } + + bool isComposingUseProjection() const { + return isUseProjection && projectedOperandNum != InvalidOper; + } + + void markRewritten() { + assert(storageAddress); + isRewritten = true; + } + + SILValue getMaterializedAddress() const { + assert(isRewritten && "storage has not been materialized"); + return storageAddress; + } +}; + +/// Map each opaque/resilient SILValue to its abstract storage. +/// Iteration guarantees RPO order. +/// +/// Mapped values are expected to be created in a single RPO pass. "erase" is +/// unsupported. Values must be replaced using 'replaceValue()'. +class ValueStorageMap { + struct ValueStoragePair { + SILValue value; + ValueStorage storage; + ValueStoragePair(SILValue v, ValueStorage s) : value(v), storage(s) {} + }; + typedef std::vector ValueVector; + // Hash of values to ValueVector indices. + typedef llvm::DenseMap ValueHashMap; + + ValueVector valueVector; + ValueHashMap valueHashMap; + + // True after valueVector is done growing, so ValueStorage references will no + // longer be invalidated. + SWIFT_ASSERT_ONLY_DECL(bool stableStorage = false); + +public: + bool empty() const { return valueVector.empty(); } + + void clear() { + valueVector.clear(); + valueHashMap.clear(); + } + + /// Iterate over value storage in RPO order. Once we begin erasing + /// instructions, some entries could become invalid. ValueStorage validity can + /// be checked with valueStorageMap.contains(value). + ValueVector::iterator begin() { return valueVector.begin(); } + + ValueVector::iterator end() { return valueVector.end(); } + + ValueVector::reverse_iterator rbegin() { return valueVector.rbegin(); } + + ValueVector::reverse_iterator rend() { return valueVector.rend(); } + + bool contains(SILValue value) const { + return valueHashMap.find(value) != valueHashMap.end(); + } + + unsigned getOrdinal(SILValue value) const { + auto hashIter = valueHashMap.find(value); + assert(hashIter != valueHashMap.end() && "Missing SILValue"); + return hashIter->second; + } + + ValueStorage &getStorage(SILValue value) { + return valueVector[getOrdinal(value)].storage; + } + const ValueStorage &getStorage(SILValue value) const { + return valueVector[getOrdinal(value)].storage; + } + + const ValueStorage *getStorageOrNull(SILValue value) const { + auto iter = valueHashMap.find(value); + if (iter == valueHashMap.end()) + return nullptr; + + return &valueVector[iter->second].storage; + } + + void setStable() { SWIFT_ASSERT_ONLY(stableStorage = true); } + + /// Given storage for a projection, return the projected storage by following + /// single level of projected storage. The returned storage may + /// recursively be a another projection. + ValueStoragePair &getProjectedStorage(const ValueStorage &storage) { + assert(storage.isProjection()); + return valueVector[storage.projectedStorageID]; + } + + /// Return the non-projection storage that the given storage ultimately refers + /// to by following all projections. After allocation, this storage always has + /// a valid address. + const ValueStorage &getBaseStorage(const ValueStorage &storage) { + if (storage.isDefProjection || storage.isUseProjection) + return getBaseStorage(getProjectedStorage(storage).storage); + + return storage; + } + + /// Return the non-projection storage that the given storage ultimately refers + /// to by following all projections. + const ValueStorage &getBaseStorage(SILValue value) { + return getBaseStorage(getStorage(value)); + } + + /// Return the non-projection storage that this storage refers to. If this + /// storage holds an Enum or any intermediate storage that projects into this + /// storage holds an Enum, then return nullptr. + const ValueStorage *getNonEnumBaseStorage(const ValueStorage &storage) { + if (storage.initializesEnum) + return nullptr; + + if (storage.isUseProjection) { + auto &storageAndValue = getProjectedStorage(storage); + return getNonEnumBaseStorage(storageAndValue.storage); + } + assert(!storage.isDefProjection && "def projections should not reach here"); + return &storage; + } + + /// Return the non-projection storage that this storage refers to, or nullptr + /// if \p allowInitEnum is true and the storage initializes an Enum. + const ValueStorage *getBaseStorage(SILValue value, bool allowInitEnum) { + if (allowInitEnum) + return &getBaseStorage(value); + + return getNonEnumBaseStorage(getStorage(value)); + } + + /// Insert a value in the map, creating a ValueStorage object for it. This + /// must be called in RPO order. + ValueStorage &insertValue(SILValue value); + + /// Replace a value that is mapped to storage with another value. This allows + /// limited rewritting of original address-only values. For example, block + /// arguments can be replaced with fake loads in order to rewrite their + /// corresponding terminator. + void replaceValue(SILValue oldValue, SILValue newValue); + + /// Record a storage projection from the source of the given operand into its + /// use (e.g. struct_extract, tuple_extract, switch_enum). + void recordDefProjection(Operand *oper, SILValue projectedValue); + + /// Record a storage projection from the use of the given operand into the + /// operand's source. (e.g. Any value used by a struct, tuple, or enum may + /// project storage into its use). + void recordComposingUseProjection(Operand *oper, SILValue userValue); + + // Mark a phi operand value as coalesced with the phi storage. + void recordPhiUseProjection(Operand *oper, SILPhiArgument *phi); + + /// Return true \p oper projects into its use's aggregate storage. + bool isComposingUseProjection(Operand *oper) const; + +#ifndef NDEBUG + void dump(); +#endif +}; + +} // namespace swift diff --git a/lib/SILOptimizer/Mandatory/CMakeLists.txt b/lib/SILOptimizer/Mandatory/CMakeLists.txt index 927259804c56d..904874a03f74a 100644 --- a/lib/SILOptimizer/Mandatory/CMakeLists.txt +++ b/lib/SILOptimizer/Mandatory/CMakeLists.txt @@ -4,6 +4,7 @@ target_sources(swiftSILOptimizer PRIVATE AddressLowering.cpp CapturePromotion.cpp ClosureLifetimeFixup.cpp + PhiStorageOptimizer.cpp ConstantPropagation.cpp DefiniteInitialization.cpp DIMemoryUseCollector.cpp diff --git a/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp b/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp new file mode 100644 index 0000000000000..4e4af608679a3 --- /dev/null +++ b/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp @@ -0,0 +1,237 @@ +//===--- PhiStorageOptimizer.cpp - Phi storage optimizer ------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2021 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// +/// +/// PhiStorageOptimizer implements an analysis used by AddressLowering +/// to reuse storage across block arguments. +/// +/// TODO: This does not yet coalesce the copy_value instructions that produce a +/// phi operand. Such a copy implies that both the operand and phi value are +/// live past the phi. Nonetheleses, they could still be coalesced as +/// follows... First coalesce all direct phi operands. Then transitively +/// coalesce copies by redoing the liveness traversal from the uses of the copy. +/// +/// TODO: This approach uses on-the-fly liveness discovery for all incoming +/// values at once. It requires no storage for liveness. Hopefully this is +/// sufficient for -Onone. At -O, we could explore implementing strong phi +/// elimination. However, that depends the ability to perform interference +/// checks between arbitrary storage locations, which requires computing and +/// storing liveness per-storage location. +/// +//===----------------------------------------------------------------------===// + +#define DEBUG_TYPE "address-lowering" + +#include "PhiStorageOptimizer.h" +#include "swift/SIL/BasicBlockDatastructures.h" +#include "swift/SIL/SILBasicBlock.h" +#include "swift/SIL/SILInstruction.h" + +using namespace swift; + +namespace swift { + +/// An analysis used by AddressLowering to reuse phi storage. +/// +/// Populates CoalescedPhi::coalescedOperands with all phi operands that can +/// reuse the phi's storage. +class PhiStorageOptimizer { + PhiValue phi; + const ValueStorageMap &valueStorageMap; + + CoalescedPhi &coalescedPhi; + + BasicBlockSet occupiedBlocks; + +public: + PhiStorageOptimizer(PhiValue phi, const ValueStorageMap &valueStorageMap, + CoalescedPhi &coalescedPhi) + : phi(phi), valueStorageMap(valueStorageMap), coalescedPhi(coalescedPhi), + occupiedBlocks(getFunction()) {} + + SILFunction *getFunction() const { return phi.phiBlock->getParent(); } + + void optimize(); + +protected: + bool hasUseProjection(SILInstruction *defInst); + bool canCoalesceValue(SILValue incomingVal); + void tryCoalesceOperand(SILBasicBlock *incomingPred); + bool recordUseLiveness(SILValue incomingVal, BasicBlockSetVector &liveBlocks); +}; + +} // namespace swift + +void CoalescedPhi::coalesce(PhiValue phi, + const ValueStorageMap &valueStorageMap) { + assert(empty() && "attempt to recoalesce the same phi"); + + PhiStorageOptimizer(phi, valueStorageMap, *this).optimize(); +} + +/// Optimize phi storage by coalescing phi operands. +/// +/// Finds all non-interfering phi operands and adds them to the result's +/// coalecedOperands. The algorithm can be described in the abstract as follows +/// (assuming no critical edges): +/// +/// All blocks are in one of three states at any point: +/// - clean (not present in the live or occupied set) +/// - live +/// - occupied +/// +/// All blocks start clean. +/// +/// For each incoming value: +/// +/// For all uses of the current incoming value: +/// +/// Scan the CFG backward following predecessors. +/// If the current block is: +/// +/// Clean: mark it live and continue scanning. +/// +/// Live: stop scanning and continue with the next use. +/// +/// Occupied: record interference, stop scanning, continue to next use. +/// +/// If no occupied blocks were reached, mark this phi operand coalseced. It's +/// storage can be projected from the phi storage. +/// +/// Mark all live blocks occupied. +/// +/// In the end, we have a set of non-interfering incoming values that can reuse +/// the phi's storage. +void PhiStorageOptimizer::optimize() { + // The single incoming value case always projects storage. + if (auto *predecessor = phi.phiBlock->getSinglePredecessorBlock()) { + coalescedPhi.coalescedOperands.push_back(phi.getOperand(predecessor)); + return; + } + occupiedBlocks.insert(phi.phiBlock); + for (auto *incomingPred : phi.phiBlock->getPredecessorBlocks()) { + tryCoalesceOperand(incomingPred); + } +} + +// Return true if any of \p defInst's operands are composing use projections +// into \p defInst's storage. +bool PhiStorageOptimizer::hasUseProjection(SILInstruction *defInst) { + for (Operand &oper : defInst->getAllOperands()) { + if (valueStorageMap.isComposingUseProjection(&oper)) + return true; + } + return false; +} + +// Return true in \p incomingVal can be coalesced with this phi ignoring +// possible interference. Simply determine whether storage reuse is possible. +// +// Precondition: \p incomingVal is an operand of this phi. +bool PhiStorageOptimizer::canCoalesceValue(SILValue incomingVal) { + // A Phi must not project from storage that was initialized on a path that + // reaches the phi because other uses of the storage may interfere with the + // phi. A phi may, however, be a composing use projection. + assert(!valueStorageMap.getStorage(phi.getValue()).isDefProjection + && !valueStorageMap.getStorage(phi.getValue()).isPhiProjection()); + + auto &incomingStorage = valueStorageMap.getStorage(incomingVal); + + // If the incoming use is pre-allocated it can't be coalesced. + // This also handles incoming values that are already coalesced with + // another use. + // + // Coalescing use projections from incomingVal into its other non-phi uses + // would require by recursively following uses across projections when + // computing liveness. + if (incomingStorage.isProjection()) + return false; + + auto *defInst = incomingVal->getDefiningInstruction(); + if (!defInst) { + // Indirect function arguments were replaced by loads. + assert(!isa(incomingVal)); + // Do not coalesce a phi with other phis. This would require liveness + // analysis of the whole phi web before coalescing phi operands. + return false; + } + assert(incomingStorage.isAllocated() && "nonphi must be allocated"); + + // Don't coalesce an incoming value unless it's storage is from a stack + // allocation, which can be replaced with another alloc_stack. + if (!isa(incomingStorage.storageAddress)) + return false; + + // Make sure that the incomingVal is not coalesced with any of its operands. + // + // Handling incomingValues whose operands project into them would require by + // recursively finding the set of value definitions and their dominating defBB + // instead of simply incomingVal->getParentBlock(). + if (hasUseProjection(defInst)) + return false; + + return true; +} + +// Process a single incoming phi operand. Compute the value's liveness while +// checking for interference. If no interference exists, mark it coalesced. +void PhiStorageOptimizer::tryCoalesceOperand(SILBasicBlock *incomingPred) { + Operand *incomingOper = phi.getOperand(incomingPred); + SILValue incomingVal = incomingOper->get(); + + if (!canCoalesceValue(incomingVal)) + return; + + BasicBlockSetVector liveBlocks(getFunction()); + if (!recordUseLiveness(incomingVal, liveBlocks)) + return; + + for (auto *block : liveBlocks) { + occupiedBlocks.insert(block); + } + assert(occupiedBlocks.contains(incomingPred)); + coalescedPhi.coalescedOperands.push_back(incomingOper); +} + +// Record liveness generated by uses of \p incomingVal. +// +// Return true if no interference was detected along the way. +bool PhiStorageOptimizer::recordUseLiveness(SILValue incomingVal, + BasicBlockSetVector &liveBlocks) { + assert(liveBlocks.empty()); + + // Stop liveness traversal at defBB. + SILBasicBlock *defBB = incomingVal->getParentBlock(); + for (auto *use : incomingVal->getUses()) { + StackList liveBBWorklist(getFunction()); + + auto visitLiveBlock = [&](SILBasicBlock *liveBB) { + if (occupiedBlocks.contains(liveBB)) + return false; + + if (liveBlocks.insert(liveBB) && liveBB != defBB) { + liveBBWorklist.push_back(liveBB); + } + return true; + }; + if (!visitLiveBlock(use->getUser()->getParent())) + return false; + + while (!liveBBWorklist.empty()) { + auto *succBB = liveBBWorklist.pop_back_val(); + for (auto *predBB : succBB->getPredecessorBlocks()) { + if (!visitLiveBlock(predBB)) + return false; + } + } + } + return true; +} diff --git a/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.h b/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.h new file mode 100644 index 0000000000000..07bae9c484033 --- /dev/null +++ b/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.h @@ -0,0 +1,51 @@ +//===--- PhiStorageOptimizer.h - Phi storage optimizer --------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2021 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// +/// +/// This file defines PhiStorageOptimizer, a utility for use with the +/// mandatory AddressLowering pass. +/// +//===----------------------------------------------------------------------===// + +#include "AddressLowering.h" +#include "swift/SIL/SILArgument.h" +#include "swift/SIL/SILBasicBlock.h" +#include "swift/SIL/SILValue.h" +#include "llvm/ADT/SmallPtrSet.h" +#include "llvm/ADT/SmallVector.h" + +namespace swift { + +class CoalescedPhi { + friend class PhiStorageOptimizer; + + SmallVector coalescedOperands; + + CoalescedPhi(const CoalescedPhi &) = delete; + CoalescedPhi &operator=(const CoalescedPhi &) = delete; + +public: + CoalescedPhi() = default; + CoalescedPhi(CoalescedPhi &&) = default; + CoalescedPhi &operator=(CoalescedPhi &&) = default; + + void coalesce(PhiValue phi, const ValueStorageMap &valueStorageMap); + + bool empty() const { return coalescedOperands.empty(); } + + ArrayRef getCoalescedOperands() const { return coalescedOperands; } + + SILInstruction::OperandValueRange getCoalescedValues() const { + return SILInstruction::getOperandValues(getCoalescedOperands()); + } +}; + +} // namespace swift diff --git a/test/IRGen/opaque_values_irgen.sil b/test/IRGen/opaque_values_irgen.sil index 5ed4a8fd374d9..3e4d6f8d447f4 100644 --- a/test/IRGen/opaque_values_irgen.sil +++ b/test/IRGen/opaque_values_irgen.sil @@ -2,7 +2,7 @@ import Builtin -sil_stage canonical +sil_stage raw // CHECK: define hidden swiftcc void @f010_irgen_identity(%swift.opaque* noalias nocapture sret({{.*}}) %0, %swift.opaque* noalias nocapture %1, %swift.type* %T) // CHECK: entry: @@ -12,7 +12,7 @@ sil_stage canonical // CHECK: %{{.*}} = call %swift.opaque* %initializeWithTake(%swift.opaque* noalias %0, %swift.opaque* noalias %1, %swift.type* %T) // CHECK-NOT: call // CHECK: ret void -sil hidden @f010_irgen_identity : $@convention(thin) (@in T) -> @out T { -bb0(%0 : $T): +sil hidden [ossa] @f010_irgen_identity : $@convention(thin) (@in T) -> @out T { +bb0(%0 : @owned $T): return %0 : $T } diff --git a/test/SIL/Parser/opaque_values_parse.sil b/test/SIL/Parser/opaque_values_parse.sil index 750177c5696b1..2ca772047fdae 100644 --- a/test/SIL/Parser/opaque_values_parse.sil +++ b/test/SIL/Parser/opaque_values_parse.sil @@ -3,7 +3,7 @@ import Builtin import Swift -sil_stage canonical +sil_stage raw protocol Foo { func foo() diff --git a/test/SIL/Serialization/opaque_values_serialize.sil b/test/SIL/Serialization/opaque_values_serialize.sil index 376e5202f7b2b..976dd7683b82a 100644 --- a/test/SIL/Serialization/opaque_values_serialize.sil +++ b/test/SIL/Serialization/opaque_values_serialize.sil @@ -5,7 +5,7 @@ // RUN: %target-sil-opt %t/tmp.sib -enable-sil-opaque-values -verify -o %t/tmp.2.sib -module-name opaqueval // RUN: %target-sil-opt %t/tmp.2.sib -enable-sil-opaque-values -emit-sorted-sil -verify -module-name opaqueval | %FileCheck %s -sil_stage canonical +sil_stage raw import Builtin import Swift diff --git a/test/SIL/ownership-verifier/opaque_use_verifier.sil b/test/SIL/ownership-verifier/opaque_use_verifier.sil index 8475f38cd39dd..90c4131a3bced 100644 --- a/test/SIL/ownership-verifier/opaque_use_verifier.sil +++ b/test/SIL/ownership-verifier/opaque_use_verifier.sil @@ -5,7 +5,7 @@ // incorrectly. This is important to ensure that the verifier does not // regress. It should only deal with use matching of opaque types. -sil_stage canonical +sil_stage raw import Builtin diff --git a/test/SILOptimizer/address_lowering.sil b/test/SILOptimizer/address_lowering.sil index c4338e72b28eb..afcbbfb965fad 100644 --- a/test/SILOptimizer/address_lowering.sil +++ b/test/SILOptimizer/address_lowering.sil @@ -1,133 +1,175 @@ -// RUN: %target-sil-opt -address-lowering -enable-sil-opaque-values -emit-sorted-sil %s | %FileCheck %s +// RUN: %target-sil-opt -address-lowering -enable-sil-opaque-values -emit-sorted-sil -module-name Swift -sil-verify-all %s | %FileCheck %s +// +// The module name must be Swift so that declarations like Error are parsed as the correct loadable type. import Builtin -import Swift -sil_stage canonical -// CHECK: sil_stage lowered +sil_stage raw +typealias AnyObject = Builtin.AnyObject typealias Int = Builtin.Int64 +typealias Bool = Builtin.Int1 -// CHECK-LABEL: sil hidden @f010_addrlower_identity : $@convention(thin) (@in T) -> @out T { +public protocol C : AnyObject {} + +sil_default_witness_table C {} + +protocol P { + func foo() +} + +enum Optional { + case none + case some(T) +} + +protocol Error {} + +struct I {} + +struct SI { + var element: T + var index: I +} + +struct SRef { + var object: AnyObject + var element: T +} + +struct Pair { + var x : T + var y : T +} + +enum Mixed { + case i(Int) + case t(T) + case o(AnyObject) +}; + +precedencegroup ComparisonPrecedence { + assignment: true + associativity: right +} +infix operator <: ComparisonPrecedence +public protocol Comparable { + static func < (lhs: Self, rhs: Self) -> Bool +} + +sil [ossa] @takeGuaranteedObject : $@convention(thin) (@guaranteed AnyObject) -> () +sil [ossa] @takeIndirectClass : $@convention(thin) (@in_guaranteed C) -> () +sil [ossa] @takeTuple : $@convention(thin) <τ_0_0> (@in_guaranteed (τ_0_0, C)) -> () + +sil [ossa] @takeIn : $@convention(thin) (@in T) -> () +sil [ossa] @takeInGuaranteed : $@convention(thin) (@in_guaranteed T) -> () + +sil [ossa] @throwsError : $@convention(thin) (@in T) -> (@out T, @error Error) +sil [ossa] @returnInt : $@convention(thin) (@in T) -> (Int, @error Error) +sil [ossa] @returnIntOut : $@convention(thin) (@in T) -> (@out Int, @error Error) +sil [ossa] @returnTuple : $@convention(thin) (@in T) -> (@out T, Int, @out Int, @out T, @error Error) + +// CHECK-LABEL: sil [ossa] @f010_addrlower_identity : $@convention(thin) (@in T) -> @out T { // CHECK: bb0(%0 : $*T, %1 : $*T): // CHECK: copy_addr [take] %1 to [initialization] %0 : $*T // CHECK: return %{{.*}} : $() // CHECK-LABEL: } // end sil function 'f010_addrlower_identity' -sil hidden @f010_addrlower_identity : $@convention(thin) (@in T) -> @out T { -bb0(%0 : $T): +sil [ossa] @f010_addrlower_identity : $@convention(thin) (@in T) -> @out T { +bb0(%0 : @owned $T): return %0 : $T } - -sil hidden [noinline] @f020_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { -bb0(%0 : $T): +// CHECK-LABEL: sil [ossa] @f020_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { +// CHECK: %0 "$return_value" +// CHECK: %1 "$return_value" +// CHECK: %2 "$return_value" +// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T, %3 : $*T): +// CHECK: copy_addr %3 to [initialization] %1 : $*T +// CHECK: copy_addr %3 to [initialization] %2 : $*T +// CHECK: copy_addr [take] %3 to [initialization] %0 : $*T +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'f020_multiResult' +sil [ossa] @f020_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { +bb0(%0 : @owned $T): + %1 = copy_value %0 : $T %2 = copy_value %0 : $T - %3 = copy_value %0 : $T - %4 = copy_value %0 : $T - destroy_value %0 : $T - %6 = tuple (%2 : $T, %3 : $T, %4 : $T) + %6 = tuple (%0 : $T, %1 : $T, %2 : $T) return %6 : $(T, T, T) } // Test returning an opaque tuple of tuples as a concrete tuple. // The multiResult call is specialized, but the SIL result convention does not change. // --- -// CHECK-LABEL: sil @f021_callMultiResult : $@convention(thin) (Builtin.Int64) -> (Builtin.Int64, Builtin.Int64, Builtin.Int64) { +// CHECK-LABEL: sil [ossa] @f021_callMultiResult : $@convention(thin) (Builtin.Int64) -> (Builtin.Int64, Builtin.Int64, Builtin.Int64) { // CHECK: bb0(%0 : $Builtin.Int64): // CHECK: %[[FN:.*]] = function_ref @f020_multiResult : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) // CHECK: %[[IN:.*]] = alloc_stack $Builtin.Int64 -// CHECK: store %0 to %[[IN]] : $*Builtin.Int64 +// CHECK: store %0 to [trivial] %[[IN]] : $*Builtin.Int64 // CHECK: %[[OUT1:.*]] = alloc_stack $Builtin.Int64 // CHECK: %[[OUT2:.*]] = alloc_stack $Builtin.Int64 // CHECK: %[[OUT3:.*]] = alloc_stack $Builtin.Int64 // CHECK: %{{.*}} = apply %[[FN]](%[[OUT1]], %[[OUT2]], %[[OUT3]], %[[IN]]) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) -// CHECK: %[[R3:.*]] = load %[[OUT3]] : $*Builtin.Int64 +// CHECK: %[[R3:.*]] = load [trivial] %[[OUT3]] : $*Builtin.Int64 // CHECK: dealloc_stack %[[OUT3]] : $*Builtin.Int64 -// CHECK: %[[R2:.*]] = load %[[OUT2]] : $*Builtin.Int64 +// CHECK: %[[R2:.*]] = load [trivial] %[[OUT2]] : $*Builtin.Int64 // CHECK: dealloc_stack %[[OUT2]] : $*Builtin.Int64 -// CHECK: %[[R1:.*]] = load %[[OUT1]] : $*Builtin.Int64 +// CHECK: %[[R1:.*]] = load [trivial] %[[OUT1]] : $*Builtin.Int64 // CHECK: dealloc_stack %[[OUT1]] : $*Builtin.Int64 // CHECK: dealloc_stack %[[IN]] : $*Builtin.Int64 // CHECK: %[[R:.*]] = tuple (%[[R1]] : $Builtin.Int64, %[[R2]] : $Builtin.Int64, %[[R3]] : $Builtin.Int64) // CHECK: return %[[R]] : $(Builtin.Int64, Builtin.Int64, Builtin.Int64) // CHECK-LABEL: } // end sil function 'f021_callMultiResult' -sil @f021_callMultiResult : $@convention(thin) (Int) -> (Int, Int, Int) { +sil [ossa] @f021_callMultiResult : $@convention(thin) (Int) -> (Int, Int, Int) { bb0(%0 : $Int): %1 = function_ref @f020_multiResult : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) %2 = apply %1(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) - %3 = tuple_extract %2 : $(Int, Int, Int), 0 - %4 = tuple_extract %2 : $(Int, Int, Int), 1 - %5 = tuple_extract %2 : $(Int, Int, Int), 2 + (%3, %4, %5) = destructure_tuple %2 : $(Int, Int, Int) %6 = tuple (%3 : $Int, %4 : $Int, %5 : $Int) return %6 : $(Int, Int, Int) } -// CHECK-LABEL: sil @f030_returnPair : $@convention(thin) (@in T) -> (@out T, @out T) { +// CHECK-LABEL: sil [ossa] @f030_returnPair : $@convention(thin) (@in T) -> (@out T, @out T) { // CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T): -// CHECK: %[[LOCAL:.*]] = alloc_stack $T -// CHECK: copy_addr %2 to [initialization] %[[LOCAL]] : $*T -// CHECK: copy_addr [take] %[[LOCAL]] to [initialization] %0 : $*T -// CHECK: copy_addr [take] %2 to [initialization] %1 : $*T -// CHECK: %[[R:.*]] = tuple () -// CHECK: dealloc_stack %[[LOCAL]] : $*T -// CHECK: return %[[R]] : $() +// CHECK: copy_addr %2 to [initialization] %1 : $*T +// CHECK: copy_addr [take] %2 to [initialization] %0 : $*T +// CHECK: return %{{.*}} : $() // CHECK-LABEL: } // end sil function 'f030_returnPair' -sil @f030_returnPair : $@convention(thin) (@in T) -> (@out T, @out T) { -bb0(%0 : $T): - %2 = copy_value %0 : $T - %3 = tuple (%2 : $T, %0 : $T) +sil [ossa] @f030_returnPair : $@convention(thin) (@in T) -> (@out T, @out T) { +bb0(%0 : @owned $T): + %1 = copy_value %0 : $T + %3 = tuple (%0 : $T, %1 : $T) return %3 : $(T, T) } -// CHECK-LABEL: sil @f031_unusedIndirect : $@convention(thin) (@in T) -> @out T { +// CHECK-LABEL: sil [ossa] @f031_unusedIndirect : $@convention(thin) (@in T) -> @out T { // CHECK: bb0(%0 : $*T, %1 : $*T): // CHECK: %[[LOC0:.*]] = alloc_stack $T -// CHECK: %[[OUT1:.*]] = alloc_stack $T -// CHECK: %[[LOC1:.*]] = alloc_stack $T -// CHECK: %[[OUT2:.*]] = alloc_stack $T -// CHECK: %[[LOC2:.*]] = alloc_stack $T -// CHECK: // function_ref f030_returnPair // CHECK: %[[F:.*]] = function_ref @f030_returnPair : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0) -// CHECK: copy_addr %1 to [initialization] %[[LOC0]] : $*T -// CHECK: %[[R0:.*]] = apply %[[F]](%[[OUT1]], %[[OUT2]], %[[LOC0]]) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0) -// CHECK: copy_addr %[[OUT1]] to [initialization] %[[LOC1]] : $*T -// CHECK: copy_addr %[[OUT2]] to [initialization] %[[LOC2]] : $*T -// CHECK: destroy_addr %[[OUT1]] : $*T -// CHECK: destroy_addr %[[OUT2]] : $*T -// CHECK: destroy_addr %[[LOC1]] : $*T -// CHECK: destroy_addr %1 : $*T -// CHECK: copy_addr [take] %[[LOC2]] to [initialization] %0 : $*T -// CHECK: %[[R:.*]] = tuple () -// CHECK: dealloc_stack %[[LOC2]] : $*T -// CHECK: dealloc_stack %[[OUT2]] : $*T -// CHECK: dealloc_stack %[[LOC1]] : $*T -// CHECK: dealloc_stack %[[OUT1]] : $*T +// CHECK: %[[R0:.*]] = apply %[[F]](%[[LOC0]], %0, %1) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0) +// CHECK: destroy_addr %[[LOC0]] : $*T // CHECK: dealloc_stack %[[LOC0]] : $*T -// CHECK: return %[[R]] : $() +// CHECK: return %{{.*}} : $() // CHECK-LABEL: } // end sil function 'f031_unusedIndirect' -sil @f031_unusedIndirect : $@convention(thin) (@in T) -> @out T { -bb0(%0 : $T): +sil [ossa] @f031_unusedIndirect : $@convention(thin) (@in T) -> @out T { +bb0(%0 : @owned $T): %2 = function_ref @f030_returnPair : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0) - %3 = copy_value %0 : $T - %4 = apply %2(%3) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0) - %5 = tuple_extract %4 : $(T, T), 0 - %6 = copy_value %5 : $T - %7 = tuple_extract %4 : $(T, T), 1 - %8 = copy_value %7 : $T - destroy_value %4 : $(T, T) - destroy_value %6 : $T - destroy_value %0 : $T - return %8 : $T + %4 = apply %2(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0) + (%5, %6) = destructure_tuple %4 : $(T, T) + destroy_value %5 : $T + return %6 : $T } -sil hidden @f040_consumeArg : $@convention(thin) (@in T) -> () { -bb0(%0 : $T): +// CHECK-LABEL: sil [ossa] @f040_consumeArg : $@convention(thin) (@in T) -> () { +// CHECK: destroy_addr %0 : $*T +// CHECK-LABEL: } // end sil function 'f040_consumeArg' +sil [ossa] @f040_consumeArg : $@convention(thin) (@in T) -> () { +bb0(%0 : @owned $T): destroy_value %0 : $T %3 = tuple () return %3 : $() } -// CHECK-LABEL: sil @f041_opaqueArg : $@convention(thin) (@in T) -> () { +// CHECK-LABEL: sil [ossa] @f041_opaqueArg : $@convention(thin) (@in T) -> () { // CHECK: bb0(%0 : $*T): // CHECK: %[[LOC:.*]] = alloc_stack $T // CHECK: %[[FN:.*]] = function_ref @f040_consumeArg : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () @@ -138,8 +180,8 @@ bb0(%0 : $T): // CHECK: dealloc_stack %[[LOC]] : $*T // CHECK: return %[[R]] : $() // CHECK-LABEL: } // end sil function 'f041_opaqueArg' -sil @f041_opaqueArg : $@convention(thin) (@in T) -> () { -bb0(%0 : $T): +sil [ossa] @f041_opaqueArg : $@convention(thin) (@in T) -> () { +bb0(%0 : @owned $T): %2 = function_ref @f040_consumeArg : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () %3 = copy_value %0 : $T %4 = apply %2(%3) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () @@ -148,83 +190,120 @@ bb0(%0 : $T): return %6 : $() } -// CHECK-LABEL: sil @f050_storeinout : $@convention(thin) (@inout T, @inout T, @in T) -> () { +// FIXME: Optimize transfers from indirect function args to indirect call args. +// Verify that a subsequent pass remove the temp allocation. +// +// CHECK-LABEL: sil [ossa] @f043_indirectGuaranteedArg : $@convention(thin) (@in C) -> () { +// CHECK: bb0(%0 : $*C): +// CHECK: [[LD:%.*]] = load [take] %0 : $*C +// CHECK: [[TMP:%.*]] = alloc_stack $C +// CHECK: [[B:%.*]] = begin_borrow [[LD]] : $C +// CHECK: [[SB:%.*]] = store_borrow [[B]] to [[TMP]] : $*C +// CHECK: apply %{{.*}}([[TMP]]) : $@convention(thin) (@in_guaranteed C) -> () +// CHECK: end_borrow [[B]] : $C +// CHECK: dealloc_stack [[TMP]] : $*C +// CHECK: destroy_value [[LD]] : $C +// CHECK-LABEL: } // end sil function 'f043_indirectGuaranteedArg' +sil [ossa] @f043_indirectGuaranteedArg : $@convention(thin) (@in C) -> () { +bb0(%0 : @owned $C): + %1 = function_ref @takeIndirectClass : $@convention(thin) (@in_guaranteed C) -> () + %2 = apply %1(%0) : $@convention(thin) (@in_guaranteed C) -> () + destroy_value %0 : $C + %6 = tuple () + return %6 : $() +} + +sil [ossa] @f044_indirectGuaranteed : $@convention(thin) (@in_guaranteed T) -> () + +// CHECK-LABEL: sil [ossa] @f045_indirectGuaranteedArg : $@convention(thin) (@in T) -> () { +// CHECK: bb0(%0 : $*T): +// CHECK: apply %{{.*}}(%0) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () +// CHECK: destroy_addr %0 : $*T +// CHECK-LABEL: } // end sil function 'f045_indirectGuaranteedArg' +sil [ossa] @f045_indirectGuaranteedArg : $@convention(thin) (@in T) -> () { +bb0(%0 : @owned $T): + %1 = function_ref @f044_indirectGuaranteed : $@convention(thin) <τ_0_0>(@in_guaranteed τ_0_0) -> () + %2 = apply %1(%0) : $@convention(thin) <τ_0_0>(@in_guaranteed τ_0_0) -> () + destroy_value %0 : $T + %6 = tuple () + return %6 : $() +} + +// CHECK-LABEL: sil [ossa] @f050_storeinout : $@convention(thin) (@inout T, @inout T, @in T) -> () { // CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T): -// CHECK: %[[ARG1:.*]] = alloc_stack $T // CHECK: %[[PREV1:.*]] = alloc_stack $T -// CHECK: %[[ARG2:.*]] = alloc_stack $T // CHECK: %[[PREV2:.*]] = alloc_stack $T -// CHECK: debug_value %0 : $*T, var, name "t", argno 1, expr op_deref -// CHECK: debug_value %1 : $*T, var, name "u", argno 2, expr op_deref -// CHECK: debug_value %2 : $*T, {{.*}} expr op_deref -// CHECK: copy_addr %2 to [initialization] %[[ARG1]] : $*T +// CHECK: debug_value %0 : $*T, var, name "t", argno 1 +// CHECK: debug_value %1 : $*T, var, name "u", argno 2 +// CHECK: debug_value %2 : $*T // CHECK: copy_addr [take] %0 to [initialization] %[[PREV1]] : $*T -// CHECK: copy_addr [take] %[[ARG1]] to [initialization] %0 : $*T +// CHECK: copy_addr %2 to [initialization] %0 : $*T // CHECK: destroy_addr %[[PREV1]] : $*T -// CHECK: copy_addr %2 to [initialization] %[[ARG2]] : $*T -// CHECK: copy_addr [take] %1 to [initialization] %[[PREV2]] : $*T -// CHECK: copy_addr [take] %[[ARG2]] to [initialization] %1 : $*T +// CHECK: copy_addr %1 to [initialization] %[[PREV2]] : $*T +// CHECK: copy_addr %2 to %1 : $*T // CHECK: destroy_addr %[[PREV2]] : $*T // CHECK: destroy_addr %2 : $*T -// CHECK: %[[R:.*]] = tuple () // CHECK: dealloc_stack %[[PREV2]] : $*T -// CHECK: dealloc_stack %[[ARG2]] : $*T // CHECK: dealloc_stack %[[PREV1]] : $*T -// CHECK: dealloc_stack %[[ARG1]] : $*T -// CHECK: return %[[R]] : $() +// CHECK: return %{{.*}} : $() // CHECK-LABEL: } // end sil function 'f050_storeinout' -sil @f050_storeinout : $@convention(thin) (@inout T, @inout T, @in T) -> () { -bb0(%0 : $*T, %1 : $*T, %2 : $T): +sil [ossa] @f050_storeinout : $@convention(thin) (@inout T, @inout T, @in T) -> () { +bb0(%0 : $*T, %1 : $*T, %2 : @owned $T): debug_value %0 : $*T, var, name "t", argno 1, expr op_deref debug_value %1 : $*T, var, name "u", argno 2, expr op_deref debug_value %2 : $T, let, name "x", argno 3 + %7 = load [take] %0 : $*T %6 = copy_value %2 : $T - %7 = load %0 : $*T - store %6 to %0 : $*T + store %6 to [init] %0 : $*T destroy_value %7 : $T + %11 = load [copy] %1 : $*T %10 = copy_value %2 : $T - %11 = load %1 : $*T - store %10 to %1 : $*T + store %10 to [assign] %1 : $*T destroy_value %11 : $T destroy_value %2 : $T %15 = tuple () return %15 : $() } -sil hidden @f060_mutate : $@convention(thin) (@inout T, @in T) -> () { -bb0(%0 : $*T, %1 : $T): +// CHECK-LABEL: sil [ossa] @f060_mutate : $@convention(thin) (@inout T, @in T) -> () { +// CHECK: bb0(%0 : $*T, %1 : $*T): +// CHECK: [[A0:%.*]] = alloc_stack $T +// CHECK: copy_addr [take] %0 to [initialization] [[A0]] : $*T +// CHECK: copy_addr %1 to [initialization] %0 : $*T +// CHECK: destroy_addr [[A0]] : $*T +// CHECK: destroy_addr %1 : $*T +// CHECK: dealloc_stack [[A0]] : $*T +// CHECK-LABEL: } // end sil function 'f060_mutate' +sil [ossa] @f060_mutate : $@convention(thin) (@inout T, @in T) -> () { +bb0(%0 : $*T, %1 : @owned $T): %4 = copy_value %1 : $T - %5 = load %0 : $*T - store %4 to %0 : $*T + %5 = load [take] %0 : $*T + store %4 to [init] %0 : $*T destroy_value %5 : $T destroy_value %1 : $T %9 = tuple () return %9 : $() } -// CHECK-LABEL: sil @f061_callinout : $@convention(thin) (@in T) -> () { +// CHECK-LABEL: sil [ossa] @f061_callinout : $@convention(thin) (@in T) -> () { // CHECK: bb0(%0 : $*T): // CHECK: %[[LOC1:.*]] = alloc_stack $T -// CHECK: %[[LOC2:.*]] = alloc_stack $T // CHECK: %[[INOUT:.*]] = alloc_stack $T, var, name "u" -// CHECK: copy_addr %0 to [initialization] %[[LOC1]] : $*T -// CHECK: copy_addr [take] %[[LOC1]] to [initialization] %[[INOUT]] : $*T +// CHECK: copy_addr %0 to [initialization] %[[INOUT]] : $*T // CHECK: %[[FN:.*]] = function_ref @f060_mutate : $@convention(thin) <τ_0_0> (@inout τ_0_0, @in τ_0_0) -> () -// CHECK: copy_addr %0 to [initialization] %[[LOC2]] : $*T -// CHECK: %{{.*}} = apply %[[FN]](%[[INOUT]], %[[LOC2]]) : $@convention(thin) <τ_0_0> (@inout τ_0_0, @in τ_0_0) -> () +// CHECK: copy_addr %0 to [initialization] %[[LOC1]] : $*T +// CHECK: %{{.*}} = apply %[[FN]](%[[INOUT]], %[[LOC1]]) : $@convention(thin) <τ_0_0> (@inout τ_0_0, @in τ_0_0) -> () // CHECK: destroy_addr %[[INOUT]] : $*T // CHECK: destroy_addr %0 : $*T -// CHECK: %[[R:.*]] = tuple () // CHECK: dealloc_stack %[[INOUT]] : $*T -// CHECK: dealloc_stack %[[LOC2]] : $*T // CHECK: dealloc_stack %[[LOC1]] : $*T -// CHECK: return %[[R]] : $() +// CHECK: return %{{.*}} : $() // CHECK-LABEL: } // end sil function 'f061_callinout' -sil @f061_callinout : $@convention(thin) (@in T) -> () { -bb0(%0 : $T): +sil [ossa] @f061_callinout : $@convention(thin) (@in T) -> () { +bb0(%0 : @owned $T): %1 = alloc_stack $T, var, name "u" %3 = copy_value %0 : $T - store %3 to %1 : $*T + store %3 to [init] %1 : $*T %5 = function_ref @f060_mutate : $@convention(thin) <τ_0_0> (@inout τ_0_0, @in τ_0_0) -> () %6 = copy_value %0 : $T %7 = apply %5(%1, %6) : $@convention(thin) <τ_0_0> (@inout τ_0_0, @in τ_0_0) -> () @@ -235,242 +314,233 @@ bb0(%0 : $T): return %10 : $() } -public protocol C : class {} - -// CHECK-LABEL: sil @f070_mixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { -// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $C): +// CHECK-LABEL: sil [ossa] @f070_mixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { +// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : @owned $C): // CHECK: copy_addr [take] %1 to [initialization] %0 : $*T // CHECK: return %2 : $C // CHECK-LABEL: } // end sil function 'f070_mixedResult1' -sil @f070_mixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { -bb0(%0 : $T, %1 : $C): +sil [ossa] @f070_mixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { +bb0(%0 : @owned $T, %1 : @owned $C): %4 = tuple (%0 : $T, %1 : $C) return %4 : $(T, C) } -// CHECK-LABEL: sil @f071_mixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { -// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T, %3 : $C): -// CHECK: %[[L:.*]] = alloc_stack $T -// CHECK: copy_addr %2 to [initialization] %[[L]] : $*T -// CHECK: strong_retain %3 : $C -// CHECK: copy_addr [take] %[[L]] to [initialization] %0 : $*T +// CHECK-LABEL: sil [ossa] @f071_mixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { +// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T, %3 : @owned $C): +// CHECK: copy_addr %2 to [initialization] %0 : $*T +// CHECK: [[C:%.*]] = copy_value %3 : $C // CHECK: copy_addr [take] %2 to [initialization] %1 : $*T -// CHECK: %[[T:.*]] = tuple (%3 : $C, %3 : $C) -// CHECK: dealloc_stack %[[L]] : $*T -// CHECK: return %[[T]] : $(C, C) +// CHECK: [[T:%.*]] = tuple ([[C]] : $C, %3 : $C) +// CHECK: return [[T]] : $(C, C) // CHECK-LABEL: } // end sil function 'f071_mixedResult2' -sil @f071_mixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { -bb0(%0 : $T, %1 : $C): +sil [ossa] @f071_mixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { +bb0(%0 : @owned $T, %1 : @owned $C): %4 = copy_value %0 : $T - strong_retain %1 : $C - %6 = tuple (%4 : $T, %0 : $T, %1 : $C, %1 : $C) + %5 = copy_value %1 : $C + %6 = tuple (%4 : $T, %0 : $T, %5 : $C, %1 : $C) return %6 : $(T, T, C, C) } -// CHECK-LABEL: sil @f072_callMixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { -// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $C): -// CHECK: %[[LIN:.*]] = alloc_stack $T -// CHECK: %[[OUT:.*]] = alloc_stack $T -// CHECK: %[[LOUT:.*]] = alloc_stack $T +// CHECK-LABEL: sil [ossa] @f072_callMixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { +// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : @owned $C): +// CHECK: [[IN:%.*]] = alloc_stack $T // CHECK: // function_ref f070_mixedResult1 -// CHECK: %[[F:.*]] = function_ref @f070_mixedResult1 : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @owned C) -// CHECK: copy_addr %1 to [initialization] %[[LIN]] : $*T -// CHECK: strong_retain %2 : $C -// CHECK: %[[R:.*]] = apply %[[F]](%[[OUT]], %[[LIN]], %2) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @owned C) -// CHECK: copy_addr %[[OUT]] to [initialization] %[[LOUT]] : $*T -// CHECK: strong_retain %[[R]] : $C -// CHECK: destroy_addr %[[OUT]] : $*T -// CHECK: strong_release %[[R]] : $C -// CHECK: strong_release %2 : $C +// CHECK: [[F:%.*]] = function_ref @f070_mixedResult1 : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @owned C) +// CHECK: copy_addr %1 to [initialization] [[IN]] : $*T +// CHECK: [[C:%.*]] = copy_value %2 : $C +// CHECK: [[R:%.*]] = apply [[F]](%0, [[IN]], [[C]]) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @owned C) +// CHECK: destroy_value %2 : $C // CHECK: destroy_addr %1 : $*T -// CHECK: copy_addr [take] %[[LOUT]] to [initialization] %0 : $*T -// CHECK: dealloc_stack %[[LOUT]] : $*T -// CHECK: dealloc_stack %[[OUT]] : $*T -// CHECK: dealloc_stack %[[LIN]] : $*T -// CHECK: return %[[R]] : $C +// CHECK: dealloc_stack [[IN]] : $*T +// CHECK: return [[R]] : $C // CHECK-LABEL: } // end sil function 'f072_callMixedResult1' -sil @f072_callMixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { -bb0(%0 : $T, %1 : $C): +sil [ossa] @f072_callMixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { +bb0(%0 : @owned $T, %1 : @owned $C): %4 = function_ref @f070_mixedResult1 : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @owned C) %5 = copy_value %0 : $T - strong_retain %1 : $C - %7 = apply %4(%5, %1) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @owned C) - %8 = tuple_extract %7 : $(T, C), 0 - %9 = copy_value %8 : $T - %10 = tuple_extract %7 : $(T, C), 1 - strong_retain %10 : $C - destroy_value %7 : $(T, C) - strong_release %1 : $C + %6 = copy_value %1 : $C + %7 = apply %4(%5, %6) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @owned C) + (%8, %9) = destructure_tuple %7 : $(T, C) + destroy_value %1 : $C destroy_value %0 : $T - %15 = tuple (%9 : $T, %10 : $C) + %15 = tuple (%8 : $T, %9 : $C) return %15 : $(T, C) } -// CHECK-LABEL: sil @f073_callMixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { -// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T, %3 : $C): -// CHECK: %[[LOC0:.*]] = alloc_stack $T -// CHECK: %[[OUT1:.*]] = alloc_stack $T -// CHECK: %[[LOC1:.*]] = alloc_stack $T -// CHECK: %[[OUT2:.*]] = alloc_stack $T -// CHECK: %[[LOC2:.*]] = alloc_stack $T -// CHECK: %[[F:.*]] = function_ref @f071_mixedResult2 : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @out τ_0_0, @owned C, @owned C) -// CHECK: copy_addr %2 to [initialization] %[[LOC0]] : $*T -// CHECK: strong_retain %3 : $C -// CHECK: %[[R:.*]] = apply %[[F]](%[[OUT1]], %[[OUT2]], %[[LOC0]], %3) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @out τ_0_0, @owned C, @owned C) -// CHECK: %[[T2:.*]] = tuple_extract %[[R]] : $(C, C), 1 -// CHECK: %[[T1:.*]] = tuple_extract %[[R]] : $(C, C), 0 -// CHECK: copy_addr %[[OUT1]] to [initialization] %[[LOC1]] : $*T -// CHECK: copy_addr %[[OUT2]] to [initialization] %[[LOC2]] : $*T -// CHECK: strong_retain %[[T1]] : $C -// CHECK: strong_retain %[[T2]] : $C -// CHECK: destroy_addr %[[OUT1]] : $*T -// CHECK: destroy_addr %[[OUT2]] : $*T -// CHECK: strong_release %[[T1]] : $C -// CHECK: strong_release %[[T2]] : $C -// CHECK: strong_release %3 : $C +// CHECK-LABEL: sil [ossa] @f073_callMixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { +// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T, %3 : @owned $C): +// CHECK: [[IN:%.*]] = alloc_stack $T +// CHECK: [[F:%.*]] = function_ref @f071_mixedResult2 : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @out τ_0_0, @owned C, @owned C) +// CHECK: copy_addr %2 to [initialization] [[IN]] : $*T +// CHECK: [[C:%.*]] = copy_value %3 : $C +// CHECK: [[T:%.*]] = apply [[F]](%0, %1, [[IN]], [[C]]) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @out τ_0_0, @owned C, @owned C) +// CHECK: ([[OUT0:%.*]], [[OUT1:%.*]]) = destructure_tuple [[T]] : $(C, C) +// CHECK: destroy_value %3 : $C // CHECK: destroy_addr %2 : $*T -// CHECK: copy_addr [take] %[[LOC1]] to [initialization] %0 : $*T -// CHECK: copy_addr [take] %[[LOC2]] to [initialization] %1 : $*T -// CHECK: %[[T:.*]] = tuple (%[[T1]] : $C, %[[T2]] : $C) -// CHECK: dealloc_stack %[[LOC2]] : $*T -// CHECK: dealloc_stack %[[OUT2]] : $*T -// CHECK: dealloc_stack %[[LOC1]] : $*T -// CHECK: dealloc_stack %[[OUT1]] : $*T -// CHECK: dealloc_stack %[[LOC0]] : $*T -// CHECK: return %[[T]] : $(C, C) +// CHECK: [[R:%.*]] = tuple ([[OUT0]] : $C, [[OUT1]] : $C) +// CHECK: dealloc_stack [[IN]] : $*T +// CHECK: return [[R]] : $(C, C) // CHECK-LABEL: } // end sil function 'f073_callMixedResult2' -sil @f073_callMixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { -bb0(%0 : $T, %1 : $C): +sil [ossa] @f073_callMixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { +bb0(%0 : @owned $T, %1 : @owned $C): %4 = function_ref @f071_mixedResult2 : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @out τ_0_0, @owned C, @owned C) %5 = copy_value %0 : $T - strong_retain %1 : $C - %7 = apply %4(%5, %1) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @out τ_0_0, @owned C, @owned C) - %8 = tuple_extract %7 : $(T, T, C, C), 0 - %9 = copy_value %8 : $T - %10 = tuple_extract %7 : $(T, T, C, C), 1 - %11 = copy_value %10 : $T - %12 = tuple_extract %7 : $(T, T, C, C), 2 - strong_retain %12 : $C - %14 = tuple_extract %7 : $(T, T, C, C), 3 - strong_retain %14 : $C - destroy_value %7 : $(T, T, C, C) - strong_release %1 : $C + %6 = copy_value %1 : $C + %7 = apply %4(%5, %6) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @out τ_0_0, @owned C, @owned C) + (%8, %9, %10, %11) = destructure_tuple %7 : $(T, T, C, C) + destroy_value %1 : $C destroy_value %0 : $T - %19 = tuple (%9 : $T, %11 : $T, %12 : $C, %14 : $C) + %19 = tuple (%8 : $T, %9 : $T, %10 : $C, %11 : $C) return %19 : $(T, T, C, C) } -sil_default_witness_table C {} +sil [ossa] @returnMixedResult3 : $@convention(thin) () -> (@out T, @out C, @owned C) -enum Optional { - case none - case some(T) +// Example directly from the comments in +// ApplyRewriter::convertApplyWithIndirectResults() +// +// FIXME: verify that a subsequent pass remove the temp allocation. +// +// CHECK-LABEL: sil [ossa] @f074_callMixedResult3 : $@convention(thin) () -> (@out T, @out C, @owned C) { +// CHECK: bb0(%0 : $*T, %1 : $*C): +// CHECK: [[OUT1:%.*]] = alloc_stack $C +// CHECK: [[OUT2:%.*]] = apply %{{.*}}(%0, [[OUT1]]) : $@convention(thin) <τ_0_0> () -> (@out τ_0_0, @out C, @owned C) +// CHECK: [[LD:%.*]] = load [take] [[OUT1]] : $*C +// CHECK: dealloc_stack [[OUT1]] : $*C +// CHECK: store [[LD]] to [init] %1 : $*C +// CHECK: return [[OUT2]] : $C +// CHECK-LABEL: } // end sil function 'f074_callMixedResult3' +sil [ossa] @f074_callMixedResult3 : $@convention(thin) () -> (@out T, @out C, @owned C) { +bb0: + %0 = function_ref @returnMixedResult3 : $@convention(thin) () -> (@out T, @out C, @owned C) + %1 = apply %0() : $@convention(thin) () -> (@out T, @out C, @owned C) + (%2, %3, %4) = destructure_tuple %1 : $(T, C, C) + %5 = tuple (%2 : $T, %3 : $C, %4 : $C) + return %5 : $(T, C, C) +} + +// CHECK-LABEL: sil [ossa] @f075_reusedResult : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { +// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : @owned $C): +// CHECK: [[TUPLE:%.*]] = alloc_stack $(T, C) +// CHECK: [[E1:%.*]] = tuple_element_addr [[TUPLE]] : $*(T, C), 0 +// CHECK: copy_addr [take] %1 to [initialization] [[E1]] : $*T +// CHECK: [[E2:%.*]] = tuple_element_addr [[TUPLE]] : $*(T, C), 1 +// CHECK: store %2 to [init] [[E2]] : $*C +// CHECK: apply %{{.*}}([[TUPLE]]) : $@convention(thin) <τ_0_0> (@in_guaranteed (τ_0_0, C)) -> () +// CHECK: [[E1:%.*]] = tuple_element_addr [[TUPLE]] : $*(T, C), 0 +// CHECK: [[E2:%.*]] = tuple_element_addr [[TUPLE]] : $*(T, C), 1 +// CHECK: [[LD:%.*]] = load [take] [[E2]] : $*C +// CHECK: copy_addr [take] [[E1]] to [initialization] %0 : $*T +// CHECK: dealloc_stack [[TUPLE]] : $*(T, C) +// CHECK: return [[LD]] : $C +// CHECK-LABEL: } // end sil function 'f075_reusedResult' +sil [ossa] @f075_reusedResult : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { +bb0(%0 : @owned $T, %1 : @owned $C): + %2 = tuple (%0 : $T, %1 : $C) + %f = function_ref @takeTuple : $@convention(thin) <τ_0_0> (@in_guaranteed (τ_0_0, C)) -> () + %c = apply %f(%2) : $@convention(thin) <τ_0_0> (@in_guaranteed (τ_0_0, C)) -> () + return %2 : $(T, C) } -// CHECK-LABEL: sil @f080_optional : $@convention(thin) (@in T) -> @out Optional { +// CHECK-LABEL: sil [ossa] @f080_optional : $@convention(thin) (@in T) -> @out Optional { // CHECK: bb0(%0 : $*Optional, %1 : $*T): -// CHECK: %[[L1:.*]] = alloc_stack $T -// CHECK: %[[L2:.*]] = alloc_stack $Optional -// CHECK: copy_addr %1 to [initialization] %[[L1]] : $*T -// CHECK: %[[DATA:.*]] = init_enum_data_addr %[[L2]] : $*Optional, #Optional.some!enumelt -// CHECK: copy_addr [take] %[[L1]] to [initialization] %[[DATA]] : $*T -// CHECK: inject_enum_addr %[[L2]] : $*Optional, #Optional.some!enumelt +// CHECK: [[DATA:%.*]] = init_enum_data_addr %0 : $*Optional, #Optional.some!enumelt +// CHECK: copy_addr %1 to [initialization] [[DATA]] : $*T +// CHECK: inject_enum_addr %0 : $*Optional, #Optional.some!enumelt // CHECK: destroy_addr %1 : $*T -// CHECK: copy_addr [take] %[[L2]] to [initialization] %0 : $*Optional -// CHECK: %[[T:.*]] = tuple () -// CHECK: dealloc_stack %[[L2]] : $*Optional -// CHECK: dealloc_stack %[[L1]] : $*T -// CHECK: return %[[T]] : $() +// CHECK: return %{{.*}} : $() // CHECK-LABEL: } // end sil function 'f080_optional' -sil @f080_optional : $@convention(thin) (@in T) -> @out Optional { -bb0(%0 : $T): +sil [ossa] @f080_optional : $@convention(thin) (@in T) -> @out Optional { +bb0(%0 : @owned $T): %cpy = copy_value %0 : $T %opt = enum $Optional, #Optional.some!enumelt, %cpy : $T destroy_value %0 : $T return %opt : $Optional } -// CHECK-LABEL: sil @f090_tupletuple : $@convention(thin) ((Builtin.Int64, Builtin.Int64), Builtin.Int64) -> (@out (Builtin.Int64, Builtin.Int64), @out (Builtin.Int64, Builtin.Int64), Builtin.Int64, Builtin.Int64) { +// CHECK-LABEL: sil [ossa] @f081_unwrap : $@convention(thin) (@in Optional) -> () { +// CHECK: bb0(%0 : $*Optional): +// CHECK: [[A:%.*]] = unchecked_take_enum_data_addr %0 : $*Optional, #Optional.some!enumelt +// CHECK: apply %{{.*}}([[A]]) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () +// CHECK-LABEL: } // end sil function 'f081_unwrap' +sil [ossa] @f081_unwrap : $@convention(thin) (@in Optional) -> () { +bb0(%0 : @owned $Optional): + %d = unchecked_enum_data %0 : $Optional, #Optional.some!enumelt + %f = function_ref @takeIn : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () + %call = apply %f(%d) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () + %4 = tuple () + return %4 : $() +} + +// CHECK-LABEL: sil [ossa] @f090_tupletuple : $@convention(thin) ((Builtin.Int64, Builtin.Int64), Builtin.Int64) -> (@out (Builtin.Int64, Builtin.Int64), @out (Builtin.Int64, Builtin.Int64), Builtin.Int64, Builtin.Int64) { // CHECK: bb0(%0 : $*(Builtin.Int64, Builtin.Int64), %1 : $*(Builtin.Int64, Builtin.Int64), %2 : $(Builtin.Int64, Builtin.Int64), %3 : $Builtin.Int64): -// CHECK: store %2 to %0 : $*(Builtin.Int64, Builtin.Int64) -// CHECK: store %2 to %1 : $*(Builtin.Int64, Builtin.Int64) +// CHECK: store %2 to [trivial] %0 : $*(Builtin.Int64, Builtin.Int64) +// CHECK: store %2 to [trivial] %1 : $*(Builtin.Int64, Builtin.Int64) // CHECK: %[[T:.*]] = tuple (%3 : $Builtin.Int64, %3 : $Builtin.Int64) // CHECK: return %[[T]] : $(Builtin.Int64, Builtin.Int64) // CHECK-LABEL: } // end sil function 'f090_tupletuple' -sil @f090_tupletuple : $@convention(thin) ((Int, Int), Int) -> (@out (Int, Int), @out (Int, Int), Int, Int) { +sil [ossa] @f090_tupletuple : $@convention(thin) ((Int, Int), Int) -> (@out (Int, Int), @out (Int, Int), Int, Int) { bb0(%0 : $(Int, Int), %1 : $Int): %2 = tuple (%0 : $(Int, Int), %0 : $(Int, Int), %1 : $Int, %1 : $Int) return %2 : $((Int, Int), (Int, Int), Int, Int) } -// CHECK-LABEL: sil @f091_callTuple : $@convention(thin) (Builtin.Int64) -> (Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64) { +// CHECK-LABEL: sil [ossa] @f091_callTuple : $@convention(thin) (Builtin.Int64) -> (Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64) { // CHECK: bb0(%0 : $Builtin.Int64): -// CHECK: %[[T1:.*]] = tuple (%0 : $Builtin.Int64, %0 : $Builtin.Int64) -// CHECK: %[[F:.*]] = function_ref @f090_tupletuple : $@convention(thin) ((Builtin.Int64, Builtin.Int64), Builtin.Int64) -> (@out (Builtin.Int64, Builtin.Int64), @out (Builtin.Int64, Builtin.Int64), Builtin.Int64, Builtin.Int64) -// CHECK: %[[O1:.*]] = alloc_stack $(Builtin.Int64, Builtin.Int64) -// CHECK: %[[O2:.*]] = alloc_stack $(Builtin.Int64, Builtin.Int64) -// CHECK: %[[RT:.*]] = apply %[[F]](%[[O1]], %4, %1, %0) : $@convention(thin) ((Builtin.Int64, Builtin.Int64), Builtin.Int64) -> (@out (Builtin.Int64, Builtin.Int64), @out (Builtin.Int64, Builtin.Int64), Builtin.Int64, Builtin.Int64) -// CHECK: %[[R1:.*]] = tuple_extract %[[RT]] : $(Builtin.Int64, Builtin.Int64), 1 -// CHECK: %[[R0:.*]] = tuple_extract %[[RT]] : $(Builtin.Int64, Builtin.Int64), 0 -// CHECK: %[[L2:.*]] = load %[[O2]] : $*(Builtin.Int64, Builtin.Int64) -// CHECK: dealloc_stack %[[O2]] : $*(Builtin.Int64, Builtin.Int64) -// CHECK: %[[L1:.*]] = load %[[O1]] : $*(Builtin.Int64, Builtin.Int64) -// CHECK: dealloc_stack %[[O1]] : $*(Builtin.Int64, Builtin.Int64) -// CHECK: %[[E10:.*]] = tuple_extract %[[L1]] : $(Builtin.Int64, Builtin.Int64), 0 -// CHECK: %[[E11:.*]] = tuple_extract %[[L1]] : $(Builtin.Int64, Builtin.Int64), 1 -// CHECK: %[[E20:.*]] = tuple_extract %[[L2]] : $(Builtin.Int64, Builtin.Int64), 0 -// CHECK: %[[E21:.*]] = tuple_extract %[[L2]] : $(Builtin.Int64, Builtin.Int64), 1 -// CHECK: %[[RET:.*]] = tuple (%[[E10]] : $Builtin.Int64, %[[E11]] : $Builtin.Int64, %[[E20]] : $Builtin.Int64, %[[E21]] : $Builtin.Int64, %[[R0]] : $Builtin.Int64, %[[R1]] : $Builtin.Int64) -// CHECK: return %[[RET]] : $(Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64) +// CHECK: [[T1:%.*]] = tuple (%0 : $Builtin.Int64, %0 : $Builtin.Int64) +// CHECK: [[F:%.*]] = function_ref @f090_tupletuple : $@convention(thin) ((Builtin.Int64, Builtin.Int64), Builtin.Int64) -> (@out (Builtin.Int64, Builtin.Int64), @out (Builtin.Int64, Builtin.Int64), Builtin.Int64, Builtin.Int64) +// CHECK: [[O1:%.*]] = alloc_stack $(Builtin.Int64, Builtin.Int64) +// CHECK: [[O2:%.*]] = alloc_stack $(Builtin.Int64, Builtin.Int64) +// CHECK: [[R:%.*]] = apply [[F]]([[O1]], [[O2]], %1, %0) : $@convention(thin) ((Builtin.Int64, Builtin.Int64), Builtin.Int64) -> (@out (Builtin.Int64, Builtin.Int64), @out (Builtin.Int64, Builtin.Int64), Builtin.Int64, Builtin.Int64) +// CHECK: [[L2:%.*]] = load [trivial] [[O2]] : $*(Builtin.Int64, Builtin.Int64) +// CHECK: dealloc_stack [[O2]] : $*(Builtin.Int64, Builtin.Int64) +// CHECK: [[L1:%.*]] = load [trivial] [[O1]] : $*(Builtin.Int64, Builtin.Int64) +// CHECK: dealloc_stack [[O1]] : $*(Builtin.Int64, Builtin.Int64) +// CHECK: ([[R4:%.*]], [[R5:%.*]]) = destructure_tuple [[R]] : $(Builtin.Int64, Builtin.Int64) +// CHECK: ([[R0:%.*]], [[R1:%.*]]) = destructure_tuple [[L1]] : $(Builtin.Int64, Builtin.Int64) +// CHECK: ([[R2:%.*]], [[R3:%.*]]) = destructure_tuple [[L2]] : $(Builtin.Int64, Builtin.Int64) +// CHECK: [[RET:%.*]] = tuple ([[R0]] : $Builtin.Int64, [[R1]] : $Builtin.Int64, [[R2]] : $Builtin.Int64, [[R3]] : $Builtin.Int64, [[R4]] : $Builtin.Int64, [[R5]] : $Builtin.Int64) +// CHECK: return [[RET]] : $(Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64) // CHECK-LABEL: } // end sil function 'f091_callTuple' -sil @f091_callTuple : $@convention(thin) (Int) -> (Int, Int, Int, Int, Int, Int) { +sil [ossa] @f091_callTuple : $@convention(thin) (Int) -> (Int, Int, Int, Int, Int, Int) { bb0(%0: $Int): %1 = tuple (%0 : $Int, %0 : $Int) %2 = function_ref @f090_tupletuple : $@convention(thin) ((Int, Int), Int) -> (@out (Int, Int), @out (Int, Int), Int, Int) %3 = apply %2(%1, %0) : $@convention(thin) ((Int, Int), Int) -> (@out (Int, Int), @out (Int, Int), Int, Int) - %9 = tuple_extract %3 : $((Int, Int), (Int, Int), Int, Int), 0 - %10 = tuple_extract %3 : $((Int, Int), (Int, Int), Int, Int), 1 - %11 = tuple_extract %3 : $((Int, Int), (Int, Int), Int, Int), 2 - %12 = tuple_extract %3 : $((Int, Int), (Int, Int), Int, Int), 3 - %13 = tuple_extract %9 : $(Int, Int), 0 - %14 = tuple_extract %9 : $(Int, Int), 1 - %15 = tuple_extract %10 : $(Int, Int), 0 - %16 = tuple_extract %10 : $(Int, Int), 1 - %17 = tuple (%13 : $Int, %14 : $Int, %15 : $Int, %16 : $Int, %11 : $Int, %12 : $Int) - return %17 : $(Int, Int, Int, Int, Int, Int) -} - -// CHECK-LABEL: sil hidden @f100_any : $@convention(thin) (@in Any) -> () { + (%4, %5, %6, %7) = destructure_tuple %3 : $((Int, Int), (Int, Int), Int, Int) + (%8, %9) = destructure_tuple %4 : $(Int, Int) + (%10, %11) = destructure_tuple %5 : $(Int, Int) + %12 = tuple (%8 : $Int, %9 : $Int, %10 : $Int, %11 : $Int, %6 : $Int, %7 : $Int) + return %12 : $(Int, Int, Int, Int, Int, Int) +} + +// CHECK-LABEL: sil [ossa] @f100_any : $@convention(thin) (@in Any) -> () { // CHECK: bb0(%0 : $*Any): // CHECK: destroy_addr %0 : $*Any // CHECK: %[[T:.*]] = tuple () // CHECK: return %[[T]] : $() // CHECK-LABEL: } // end sil function 'f100_any' -sil hidden @f100_any : $@convention(thin) (@in Any) -> () { -bb0(%0 : $Any): +sil [ossa] @f100_any : $@convention(thin) (@in Any) -> () { +bb0(%0 : @owned $Any): debug_value %0 : $Any, let, name "any", argno 1 destroy_value %0 : $Any %3 = tuple () return %3 : $() } -// CHECK-LABEL: sil @f101_passAny : $@convention(thin) (@in T) -> () { +// CHECK-LABEL: sil [ossa] @f101_passAny : $@convention(thin) (@in T) -> () { // CHECK: bb0(%0 : $*T): -// CHECK: %[[T1:.*]] = alloc_stack $T // CHECK: %[[A:.*]] = alloc_stack $Any // CHECK: %[[F:.*]] = function_ref @f100_any : $@convention(thin) (@in Any) -> () -// CHECK: copy_addr %0 to [initialization] %[[T1]] : $*T // CHECK: %[[T2:.*]] = init_existential_addr %[[A]] : $*Any, $T -// CHECK: copy_addr [take] %[[T1]] to [initialization] %[[T2]] : $*T +// CHECK: copy_addr %0 to [initialization] %[[T2]] : $*T // CHECK: %{{.*}} = apply %[[F]](%[[A]]) : $@convention(thin) (@in Any) -> () // CHECK: destroy_addr %0 : $*T -// CHECK: %[[R:.*]] = tuple () // CHECK: dealloc_stack %[[A]] : $*Any -// CHECK: dealloc_stack %[[T1]] : $*T -// CHECK: return %[[R]] : $() +// CHECK: return %{{.*}} : $() // CHECK-LABEL: } // end sil function 'f101_passAny' -sil @f101_passAny : $@convention(thin) (@in T) -> () { -bb0(%0 : $T): +sil [ossa] @f101_passAny : $@convention(thin) (@in T) -> () { +bb0(%0 : @owned $T): %2 = function_ref @f100_any : $@convention(thin) (@in Any) -> () %3 = copy_value %0 : $T %4 = init_existential_value %3 : $T, $T, $Any @@ -482,27 +552,527 @@ bb0(%0 : $T): // Test convertIndirectFunctionArgs and init_existential_value on concrete // types. -// CHECK-LABEL: sil @f102_passAnyObjectAsAny : $@convention(thin) (@in AnyObject) -> () { +// CHECK-LABEL: sil [ossa] @f102_passAnyObjectAsAny : $@convention(thin) (@in AnyObject) -> () { // CHECK: bb0(%0 : $*AnyObject): -// CHECK: %[[A:.*]] = alloc_stack $Any -// CHECK: %[[ARG:.*]] = load %0 : $*AnyObject -// CHECK: %[[F:.*]] = function_ref @f100_any : $@convention(thin) (@in Any) -> () -// CHECK: strong_retain %[[ARG]] : $AnyObject -// CHECK: %[[VAL:.*]] = init_existential_addr %[[A]] : $*Any, $AnyObject -// CHECK: store %[[ARG]] to %[[VAL]] : $*AnyObject -// CHECK: %{{.*}} = apply %[[F]](%[[A]]) : $@convention(thin) (@in Any) -> () -// CHECK: strong_release %[[ARG]] : $AnyObject -// CHECK: %[[R:.*]] = tuple () -// CHECK: dealloc_stack %[[A]] : $*Any -// CHECK: return %[[R]] : $() +// CHECK: [[A:%.*]] = alloc_stack $Any +// CHECK: [[ARG:%.*]] = load [take] %0 : $*AnyObject +// CHECK: [[F:%.*]] = function_ref @f100_any : $@convention(thin) (@in Any) -> () +// CHECK: [[VAL:%.*]] = init_existential_addr [[A]] : $*Any, $AnyObject +// CHECK: store [[ARG]] to [init] [[VAL]] : $*AnyObject +// CHECK: %{{.*}} = apply [[F]]([[A]]) : $@convention(thin) (@in Any) -> () +// CHECK: [[R:%.*]] = tuple () +// CHECK: dealloc_stack [[A]] : $*Any +// CHECK: return [[R]] : $() // CHECK-LABEL: } // end sil function 'f102_passAnyObjectAsAny' -sil @f102_passAnyObjectAsAny : $@convention(thin) (@in AnyObject) -> () { -bb0(%0 : $AnyObject): +sil [ossa] @f102_passAnyObjectAsAny : $@convention(thin) (@in AnyObject) -> () { +bb0(%0 : @owned $AnyObject): %2 = function_ref @f100_any : $@convention(thin) (@in Any) -> () - strong_retain %0 : $AnyObject %4 = init_existential_value %0 : $AnyObject, $AnyObject, $Any %5 = apply %2(%4) : $@convention(thin) (@in Any) -> () - strong_release %0 : $AnyObject %7 = tuple () return %7 : $() } + +// Helper +sil [ossa] @f110_singleIndirectFunc : $@convention(thin) <τ_0_0> () -> @out τ_0_0 + +// Test convertApplyWithIndirectResults. +// CHECK-LABEL: sil [ossa] @f111_singleIndirectApply : $@convention(thin) <τ_0_0> () -> @out τ_0_0 { +// CHECK: bb0(%0 : $*τ_0_0): +// CHECK: [[F:%.*]] = function_ref @f110_singleIndirectFunc : $@convention(thin) <τ_0_0> () -> @out τ_0_0 +// CHECK: %{{.*}} = apply [[F]]<τ_0_0>(%0) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'f111_singleIndirectApply' +sil [ossa] @f111_singleIndirectApply : $@convention(thin) <τ_0_0> () -> @out τ_0_0 { +bb0: + %2 = function_ref @f110_singleIndirectFunc : $@convention(thin) <τ_0_0> () -> @out τ_0_0 + %3 = apply %2<τ_0_0>() : $@convention(thin) <τ_0_0> () -> @out τ_0_0 + return %3 : $τ_0_0 +} + +// CHECK-LABEL: sil [ossa] @f120_testDestructure : $@convention(method) (@in SI) -> (@out Element, @out I) { +// CHECK: bb0(%0 : $*Element, %1 : $*I, %2 : $*SI): +// CHECK: [[ELT_ADR:%.*]] = struct_element_addr %2 : $*SI, #SI.element +// CHECK: [[IDX_ADR:%.*]] = struct_element_addr %2 : $*SI, #SI.index +// CHECK: [[IDX:%.*]] = load [trivial] [[IDX_ADR]] : $*I +// CHECK: copy_addr [take] [[ELT_ADR]] to [initialization] %0 : $*Element // id: %6 +// CHECK: store [[IDX]] to [trivial] %1 : $*I +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'f120_testDestructure' +sil [ossa] @f120_testDestructure : $@convention(method) (@in SI) -> (@out Element, @out I) { +bb0(%0 : @owned $SI): + (%element, %index) = destructure_struct %0 : $SI + %tuple = tuple(%element : $Element, %index : $I) + return %tuple : $(Element, I) +} + +// CHECK-LABEL: sil [ossa] @f121_testStructExtract : $@convention(method) (@in SI) -> (@out AnyObject, @out I) { +// CHECK: bb0(%0 : $*AnyObject, %1 : $*I, %2 : $*SI): +// CHECK: [[IN:%.*]] = load [take] %2 : $*SI +// CHECK: [[B:%.*]] = begin_borrow [[IN]] : $SI +// CHECK: [[E0:%.*]] = struct_extract [[B]] : $SI, #SI.element +// CHECK: [[C:%.*]] = copy_value [[E0]] : $AnyObject +// CHECK: [[E1:%.*]] = struct_extract [[B]] : $SI, #SI.index +// CHECK: end_borrow [[B]] : $SI +// CHECK: destroy_value [[IN]] : $SI +// CHECK: store [[C]] to [init] %0 : $*AnyObject +// CHECK: store [[E1]] to [trivial] %1 : $*I +// CHECK-LABEL: } // end sil function 'f121_testStructExtract' +sil [ossa] @f121_testStructExtract : $@convention(method) (@in SI) -> (@out AnyObject, @out I) { +bb0(%0 : @owned $SI): + %borrow = begin_borrow %0 : $SI + %object = struct_extract %borrow : $SI, #SI.element + %copy = copy_value %object : $AnyObject + %index = struct_extract %borrow : $SI, #SI.index + end_borrow %borrow : $SI + destroy_value %0 : $SI + %tuple = tuple(%copy : $AnyObject, %index : $I) + return %tuple : $(AnyObject, I) +} + +// CHECK-LABEL: sil [ossa] @f122_testStructExtract : $@convention(method) (@in SRef) -> (@out AnyObject, @out T) { +// CHECK: bb0(%0 : $*AnyObject, %1 : $*T, %2 : $*SRef): +// CHECK-NOT: alloc_stack +// CHECK: [[E0:%.*]] = struct_element_addr %2 : $*SRef, #SRef.object +// CHECK: [[C:%.*]] = load [copy] [[E0]] : $*AnyObject +// CHECK: [[E1:%.*]] = struct_element_addr %2 : $*SRef, #SRef.element +// CHECK: copy_addr [[E1]] to [initialization] %1 : $*T +// CHECK: destroy_addr %2 : $*SRef +// CHECK: store [[C]] to [init] %0 : $*AnyObject +// CHECK-NOT: dealloc_stack +// CHECK-LABEL: } // end sil function 'f122_testStructExtract' +sil [ossa] @f122_testStructExtract : $@convention(method) (@in SRef) -> (@out AnyObject, @out T) { +bb0(%0 : @owned $SRef): + %borrow = begin_borrow %0 : $SRef + %object = struct_extract %borrow : $SRef, #SRef.object + %copy1 = copy_value %object : $AnyObject + %element = struct_extract %borrow : $SRef, #SRef.element + %copy2 = copy_value %element : $T + end_borrow %borrow : $SRef + destroy_value %0 : $SRef + %tuple = tuple(%copy1 : $AnyObject, %copy2 : $T) + return %tuple : $(AnyObject, T) +} + +// CHECK-LABEL: sil [ossa] @f123_testStructExtract : $@convention(method) (@in SRef) -> (@out AnyObject, @out T) { +// CHECK: bb0(%0 : $*AnyObject, %1 : $*T, %2 : $*SRef): +// CHECK-NOT: alloc_stack +// CHECK: [[E0:%.*]] = struct_element_addr %2 : $*SRef, #SRef.object +// CHECK: [[L:%.*]] = load_borrow [[E0]] : $*AnyObject +// CHECK: apply %{{.*}}([[L]]) : $@convention(thin) (@guaranteed AnyObject) -> () +// CHECK: [[C:%.*]] = copy_value [[L]] : $AnyObject +// CHECK: end_borrow [[L]] : $AnyObject +// CHECK: [[E1:%.*]] = struct_element_addr %2 : $*SRef, #SRef.element +// CHECK: copy_addr [[E1]] to [initialization] %1 : $*T +// CHECK: destroy_addr %2 : $*SRef +// CHECK: store [[C]] to [init] %0 : $*AnyObject +// CHECK-NOT: dealloc_stack +// CHECK-LABEL: } // end sil function 'f123_testStructExtract' +sil [ossa] @f123_testStructExtract : $@convention(method) (@in SRef) -> (@out AnyObject, @out T) { +bb0(%0 : @owned $SRef): + %borrow = begin_borrow %0 : $SRef + %object = struct_extract %borrow : $SRef, #SRef.object + %f = function_ref @takeGuaranteedObject : $@convention(thin) (@guaranteed AnyObject) -> () + %call = apply%f(%object) : $@convention(thin) (@guaranteed AnyObject) -> () + %copy1 = copy_value %object : $AnyObject + %element = struct_extract %borrow : $SRef, #SRef.element + %copy2 = copy_value %element : $T + end_borrow %borrow : $SRef + destroy_value %0 : $SRef + %tuple = tuple(%copy1 : $AnyObject, %copy2 : $T) + return %tuple : $(AnyObject, T) +} + +// CHECK-LABEL: sil [ossa] @f124_testTupleExtract : $@convention(method) (@in (AnyObject, T)) -> (@out AnyObject, @out T) { +// CHECK: bb0(%0 : $*AnyObject, %1 : $*T, %2 : $*(AnyObject, T)): +// CHECK-NOT: alloc_stack +// CHECK: [[E0:%.*]] = tuple_element_addr %2 : $*(AnyObject, T), 0 +// CHECK: [[C:%.*]] = load [copy] [[E0]] : $*AnyObject +// CHECK: [[E1:%.*]] = tuple_element_addr %2 : $*(AnyObject, T), 1 +// CHECK: copy_addr [[E1]] to [initialization] %1 : $*T +// CHECK: destroy_addr %2 : $*(AnyObject, T) +// CHECK: store [[C]] to [init] %0 : $*AnyObject +// CHECK-NOT: dealloc_stack +sil [ossa] @f124_testTupleExtract : $@convention(method) (@in (AnyObject, T)) -> (@out AnyObject, @out T) { +bb0(%0 : @owned $(AnyObject, T)): + %borrow = begin_borrow %0 : $(AnyObject, T) + %object = tuple_extract %borrow : $(AnyObject, T), 0 + %copy1 = copy_value %object : $AnyObject + %element = tuple_extract %borrow : $(AnyObject, T), 1 + %copy2 = copy_value %element : $T + end_borrow %borrow : $(AnyObject, T) + destroy_value %0 : $(AnyObject, T) + %tuple = tuple(%copy1 : $AnyObject, %copy2 : $T) + return %tuple : $(AnyObject, T) +} + +// CHECK-LABEL: sil [ossa] @f125_testTupleExtract : $@convention(method) (@in (AnyObject, T)) -> (@out AnyObject, @out T) { +// CHECK: bb0(%0 : $*AnyObject, %1 : $*T, %2 : $*(AnyObject, T)): +// CHECK-NOT: alloc_stack +// CHECK: [[E0:%.*]] = tuple_element_addr %2 : $*(AnyObject, T), 0 +// CHECK: [[L:%.*]] = load_borrow %3 : $*AnyObject +// CHECK: apply %{{.*}}([[L]]) : $@convention(thin) (@guaranteed AnyObject) -> () +// CHECK: [[C:%.*]] = copy_value [[L]] : $AnyObject +// CHECK: end_borrow [[L]] : $AnyObject +// CHECK: [[E1:%.*]] = tuple_element_addr %2 : $*(AnyObject, T), 1 +// CHECK: copy_addr [[E1]] to [initialization] %1 : $*T +// CHECK: destroy_addr %2 : $*(AnyObject, T) +// CHECK: store [[C]] to [init] %0 : $*AnyObject +// CHECK-NOT: dealloc_stack +// CHECK-LABEL: } // end sil function 'f125_testTupleExtract' +sil [ossa] @f125_testTupleExtract : $@convention(method) (@in (AnyObject, T)) -> (@out AnyObject, @out T) { +bb0(%0 : @owned $(AnyObject, T)): + %borrow = begin_borrow %0 : $(AnyObject, T) + %object = tuple_extract %borrow : $(AnyObject, T), 0 + %f = function_ref @takeGuaranteedObject : $@convention(thin) (@guaranteed AnyObject) -> () + %call = apply%f(%object) : $@convention(thin) (@guaranteed AnyObject) -> () + %copy1 = copy_value %object : $AnyObject + %element = tuple_extract %borrow : $(AnyObject, T), 1 + %copy2 = copy_value %element : $T + end_borrow %borrow : $(AnyObject, T) + destroy_value %0 : $(AnyObject, T) + %tuple = tuple(%copy1 : $AnyObject, %copy2 : $T) + return %tuple : $(AnyObject, T) +} + +// CHECK-LABEL: sil [ossa] @f130_testReleaseValue : $@convention(thin) (@in T) -> () { +// CHECK: bb0(%0 : $*T): +// CHECK: destroy_addr %0 : $*T +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'f130_testReleaseValue' +sil [ossa] @f130_testReleaseValue : $@convention(thin) (@in T) -> () { +bb0(%0 : @owned $T): + destroy_value %0 : $T + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [ossa] @f140_testTupleProject : $@convention(thin) (@in T) -> () { +// CHECK: bb0(%0 : $*T): +// CHECK: [[LOCAL:%.*]] = alloc_stack $((T, T), T) +// CHECK: [[ELT0:%.*]] = tuple_element_addr [[LOCAL]] : $*((T, T), T), 0 +// CHECK: [[ELT0_0:%.*]] = tuple_element_addr [[ELT0]] : $*(T, T), 0 +// CHECK: copy_addr %0 to [initialization] [[ELT0_0]] : $*T +// CHECK: [[ELT1:%.*]] = tuple_element_addr [[LOCAL]] : $*((T, T), T), 1 +// CHECK: copy_addr %0 to [initialization] [[ELT1]] : $*T +// CHECK: [[ELT0_1:%.*]] = tuple_element_addr [[ELT0]] : $*(T, T), 1 +// CHECK: copy_addr [take] %0 to [initialization] [[ELT0_1]] : $*T +// CHECK: destroy_addr [[LOCAL]] : $*((T, T), T) +// CHECK: dealloc_stack [[LOCAL]] : $*((T, T), T) +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'f140_testTupleProject' +sil [ossa] @f140_testTupleProject : $@convention(thin) (@in T) -> () { +bb0(%0 : @owned $T): + %copy0 = copy_value %0 : $T + %copy1 = copy_value %0 : $T + %tuple1 = tuple (%copy0 : $T, %0 : $T) + %tuple2 = tuple (%tuple1 : $(T, T), %copy1 : $T) + destroy_value %tuple2 : $((T, T), T) + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [ossa] @f150_testStructProject : $@convention(thin) (@in T) -> () { +// CHECK: bb0(%0 : $*T): +// CHECK: [[ALLOC:%.*]] = alloc_stack $Pair> +// CHECK: [[ELT_X:%.*]] = struct_element_addr [[ALLOC]] : $*Pair>, #Pair.x +// CHECK: [[ELT_XY:%.*]] = struct_element_addr [[ELT_X]] : $*Pair, #Pair.y +// CHECK: copy_addr %0 to [initialization] [[ELT_XY]] : $*T +// CHECK: [[ELT_XX:%.*]] = struct_element_addr [[ELT_X]] : $*Pair, #Pair.x +// CHECK: copy_addr [take] %0 to [initialization] [[ELT_XX]] : $*T +// CHECK: [[ELT_Y:%.*]] = struct_element_addr [[ALLOC]] : $*Pair>, #Pair.y +// CHECK: copy_addr [[ELT_X]] to [initialization] [[ELT_Y]] : $*Pair +// CHECK: destroy_addr [[ALLOC]] : $*Pair> +// CHECK: dealloc_stack [[ALLOC]] : $*Pair> +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'f150_testStructProject' +sil [ossa] @f150_testStructProject : $@convention(thin) (@in T) -> () { +bb0(%0 : @owned $T): + %copy0 = copy_value %0 : $T + %struct1 = struct $Pair (%0 : $T, %copy0 : $T) + %struct_copy = copy_value %struct1 : $Pair + %struct2 = struct $Pair> (%struct1 : $Pair, %struct_copy : $Pair) + destroy_value %struct2 : $Pair> + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [ossa] @f160_testOpenedArchetype : $@convention(thin) (@in P) -> () { +// CHECK: bb0(%0 : $*P): +// CHECK: [[ALLOC:%.*]] = alloc_stack $P, var, name "q" +// CHECK: copy_addr %0 to [initialization] [[ALLOC]] : $*P +// CHECK: [[OPEN:%.*]] = open_existential_addr immutable_access %0 : $*P to $*[[ARCHETYPE:@opened(.*)]] P +// CHECK: [[CP:%.*]] = alloc_stack $[[ARCHETYPE]] P // type-defs: [[OPEN]]; +// CHECK: [[WT:%.*]] = witness_method $[[ARCHETYPE]] P, #P.foo : (Self) -> () -> (), [[OPEN]] : $*[[ARCHETYPE]] P : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () +// CHECK: copy_addr [[OPEN]] to [initialization] [[CP]] : $*[[ARCHETYPE]] P +// CHECK: %{{.*}} = apply [[WT]]<[[ARCHETYPE]] P>([[CP]]) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () +// CHECK: destroy_addr [[CP]] : $*[[ARCHETYPE]] P +// CHECK: destroy_addr [[ALLOC]] : $*P +// CHECK: destroy_addr %0 : $*P +// CHECK: %{{.*}} = tuple () +// CHECK: dealloc_stack [[CP]] : $*[[ARCHETYPE]] P +// CHECK: dealloc_stack [[ALLOC]] : $*P +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'f160_testOpenedArchetype' +sil [ossa] @f160_testOpenedArchetype : $@convention(thin) (@in P) -> () { +bb0(%0 : @owned $P): + %2 = alloc_stack $P, var, name "q" + %3 = copy_value %0 : $P + store %3 to [init] %2 : $*P + %b = begin_borrow %0 : $P + %8 = open_existential_value %b : $P to $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P + %9 = witness_method $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P, #P.foo, %8 : $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () + // Test that we can handle owned value of type opened archetype. + %10 = copy_value %8 : $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P + end_borrow %b : $P + %11 = apply %9<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P>(%10) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () + destroy_value %10 : $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P + destroy_addr %2 : $*P + dealloc_stack %2 : $*P + destroy_value %0 : $P + %16 = tuple () + return %16 : $() +} + +// CHECK-LABEL: sil [ossa] @f161_testOpenedArchetype : $@convention(thin) (@in P) -> () { +// CHECK: bb0(%0 : $*P): +// CHECK: [[ALLOCP:%.*]] = alloc_stack $P, var, name "q" +// CHECK: copy_addr %0 to [initialization] [[ALLOCP]] : $*P +// CHECK: [[OPEN:%.*]] = open_existential_addr immutable_access %0 : $*P to $*@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P +// CHECK: [[OPTIONAL:%.*]] = alloc_stack $Optional<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P> +// CHECK: witness_method $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P, #P.foo : (Self) -> () -> (), [[OPEN]] : $*@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () +// CHECK: [[INIT:%.*]] = init_enum_data_addr [[OPTIONAL]] : $*Optional<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P>, #Optional.some!enumelt +// CHECK: copy_addr [[OPEN]] to [initialization] [[INIT]] : $*@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P +// CHECK: inject_enum_addr [[OPTIONAL]] : $*Optional<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P>, #Optional.some!enumelt +// CHECK: [[DATA:%.*]] = unchecked_take_enum_data_addr [[OPTIONAL]] : $*Optional<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P>, #Optional.some!enumelt +// CHECK: %10 = apply %{{.*}}<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P>([[DATA]]) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () +// CHECK: destroy_addr %9 : $*@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P +// CHECK: destroy_addr [[ALLOCP]] : $*P +// CHECK: destroy_addr %0 : $*P +// CHECK: dealloc_stack [[OPTIONAL]] : $*Optional<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P> +// CHECK: dealloc_stack [[ALLOCP]] : $*P +// CHECK-LABEL: } // end sil function 'f161_testOpenedArchetype' +sil [ossa] @f161_testOpenedArchetype : $@convention(thin) (@in P) -> () { +bb0(%0 : @owned $P): + %2 = alloc_stack $P, var, name "q" + %3 = copy_value %0 : $P + store %3 to [init] %2 : $*P + %b = begin_borrow %0 : $P + %8 = open_existential_value %b : $P to $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P + %9 = witness_method $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P, #P.foo, %8 : $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () + %cpy = copy_value %8 : $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P + end_borrow %b : $P + // This optional is an aggregate that contains an opened exsitential. May sure it's allocated after open_existential_addr. + %opt = enum $Optional<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P>, #Optional.some!enumelt, %cpy : $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P + %some = unchecked_enum_data %opt : $Optional<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P>, #Optional.some!enumelt + %11 = apply %9<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P>(%some) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () + destroy_value %some : $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P + destroy_addr %2 : $*P + dealloc_stack %2 : $*P + destroy_value %0 : $P + %16 = tuple () + return %16 : $() +} + +// CHECK-LABEL: sil [ossa] @f170_compare : $@convention(thin) (@in_guaranteed T, @in_guaranteed T) -> @out T { +// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T): +// CHECK: [[WT:%.*]] = witness_method $T, #Comparable."<" : (Self.Type) -> (Self, Self) -> Builtin.Int1 : $@convention(witness_method: Comparable) <τ_0_0 where τ_0_0 : Comparable> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0, @thick τ_0_0.Type) -> Builtin.Int1 +// CHECK: [[MT:%.*]] = metatype $@thick T.Type +// CHECK: [[COND:%.*]] = apply [[WT]](%1, %2, [[MT]]) : $@convention(witness_method: Comparable) <τ_0_0 where τ_0_0 : Comparable> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0, @thick τ_0_0.Type) -> Builtin.Int1 +// CHECK: cond_br [[COND]], bb2, bb1 +// CHECK: bb1: +// CHECK: copy_addr %1 to [initialization] %0 : $*T +// CHECK: br bb3 +// CHECK: bb2: +// CHECK: copy_addr %2 to [initialization] %0 : $*T +// CHECK: br bb3 +// CHECK-LABEL: } // end sil function 'f170_compare' +sil [ossa] @f170_compare : $@convention(thin) (@in_guaranteed T, @in_guaranteed T) -> @out T { +bb0(%0 : @guaranteed $T, %1 : @guaranteed $T): + %2 = witness_method $T, #Comparable."<" : (Self.Type) -> (Self, Self) -> Bool : $@convention(witness_method: Comparable) <τ_0_0 where τ_0_0 : Comparable> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0, @thick τ_0_0.Type) -> Bool + %3 = metatype $@thick T.Type + %4 = apply %2(%0, %1, %3) : $@convention(witness_method: Comparable) <τ_0_0 where τ_0_0 : Comparable> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0, @thick τ_0_0.Type) -> Bool + cond_br %4, bb1, bb2 + +bb1: + %6 = copy_value %1 : $T + br bb3(%6 : $T) + +bb2: + %8 = copy_value %0 : $T + br bb3(%8 : $T) + +bb3(%15 : @owned $T): + return %15 : $T +} + +// Test switching on a single opaque value. +// CHECK-LABEL: sil [ossa] @f210_testSwitchEnum : $@convention(method) (@in Optional, @inout T) -> () { +// CHECK: bb0(%0 : $*Optional, %1 : $*T): +// CHECK: switch_enum_addr %0 : $*Optional, case #Optional.some!enumelt: [[SOMEBB:bb[0-9]+]], case #Optional.none!enumelt: [[NONEBB:bb[0-9]+]] +// CHECK: [[NONEBB]]: +// CHECK: br [[RETBB:bb[0-9]+]] +// CHECK: [[SOMEBB]]: +// CHECK: [[CAST:%.*]] = unchecked_take_enum_data_addr %0 : $*Optional, #Optional.some!enumelt +// CHECK: copy_addr [take] [[CAST]] to [initialization] %1 : $*T +// CHECK: br [[RETBB]] +// CHECK: [[RETBB]]: +// CHECK-LABEL: } // end sil function 'f210_testSwitchEnum' +sil [ossa] @f210_testSwitchEnum : $@convention(method) (@in Optional, @inout T) -> () { +bb0(%0 : @owned $Optional, %1 : $*T): + switch_enum %0 : $Optional, case #Optional.some: bb2, case #Optional.none: bb1 + +bb1: + br bb3 + +bb2(%some : @owned $T): + destroy_addr %1 : $*T + store %some to [init] %1 : $*T + br bb3 + +bb3: + %31 = tuple () + return %31 : $() +} + +// f220_testSwitchMixed +// CHECK-LABEL: sil [ossa] @f220_testSwitchMixed : $@convention(method) (@in Mixed, @inout Builtin.Int64, @inout T) -> () { +// CHECK: bb0(%0 : $*Mixed, %1 : $*Builtin.Int64, %2 : $*T): +// CHECK: switch_enum_addr %0 : $*Mixed, case #Mixed.i!enumelt: [[IBB:bb[0-9]+]], case #Mixed.t!enumelt: [[TBB:bb[0-9]+]], default [[DBB:bb[0-9]+]] +// CHECK: [[DBB]]: +// CHECK: [[OBJADDR:%.*]] = unchecked_take_enum_data_addr %0 : $*Mixed, #Mixed.o!enumelt +// CHECK: [[LD:%.*]] = load [take] [[OBJADDR]] : $*AnyObject +// CHECK: destroy_value [[LD]] : $AnyObject +// CHECK: br [[RBB:bb[0-9]+]] +// CHECK: [[TBB]]: +// CHECK: [[CAST:%.*]] = unchecked_take_enum_data_addr %0 : $*Mixed, #Mixed.t!enumelt +// CHECK: destroy_addr %2 : $*T +// CHECK: copy_addr [take] [[CAST]] to [initialization] %2 : $*T +// CHECK: br [[RBB]] +// CHECK: [[IBB]]: +// CHECK: [[CAST:%.*]] = unchecked_take_enum_data_addr %0 : $*Mixed, #Mixed.i!enumelt +// CHECK: [[VAL:%.*]] = load [trivial] [[CAST]] : $*Builtin.Int64 +// CHECK: store [[VAL]] to [trivial] %1 : $*Builtin.Int64 +// CHECK: br [[RBB]] +// CHECK: [[RBB]]: +// CHECK-LABEL: } // end sil function 'f220_testSwitchMixed' +sil [ossa] @f220_testSwitchMixed : $@convention(method) (@in Mixed, @inout Int, @inout T) -> () { +bb0(%0 : @owned $Mixed, %1 : $*Int, %2 : $*T): + switch_enum %0 : $Mixed, case #Mixed.i: bb1, case #Mixed.t: bb2, default bb3 + +bb1(%13 : $Int): + store %13 to [trivial] %1 : $*Int + br bb4 + +bb2(%14 : @owned $T): + destroy_addr %2 : $*T + store %14 to [init] %2 : $*T + br bb4 + +bb3(%18: @owned $AnyObject): + destroy_value %18 : $AnyObject + br bb4 + +bb4: + %31 = tuple () + return %31 : $() +} + +// CHECK-LABEL: sil [ossa] @f230_testTryApply : $@convention(thin) (@in T) -> (@out T, @error Error) { +// CHECK: bb0(%0 : $*T, %1 : $*T): +// CHECK: [[F:%.*]] = function_ref @throwsError : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @error Error) +// CHECK: try_apply [[F]](%0, %1) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @error Error), normal bb2, error bb1 +// CHECK: bb1([[E:%.*]] : $Error): +// CHECK: throw [[E]] : $Error +// CHECK: bb2([[NONE:%.*]] : $()): +// CHECK: %{{.*}} = tuple () +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'f230_testTryApply' +sil [ossa] @f230_testTryApply : $@convention(thin) (@in T) -> (@out T, @error Error) { +bb0(%0 : @owned $T): + %3 = function_ref @throwsError : $@convention(thin) (@in T) -> (@out T, @error Error) + try_apply %3(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @error Error), normal bb1, error bb2 + +bb1(%5 : @owned $T): + return %5 : $T + +bb2(%7 : $Error): + throw %7 : $Error +} + +// CHECK-LABEL: sil [ossa] @f240_testTryApplyDirect : $@convention(thin) (@in T) -> (Builtin.Int64, @error Error) { +// CHECK: bb0(%0 : $*T): +// CHECK: [[F:%.*]] = function_ref @returnInt : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (Builtin.Int64, @error Error) +// CHECK: try_apply [[F]](%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (Builtin.Int64, @error Error), normal bb2, error bb1 +// CHECK: bb1([[E:%.*]] : $Error): +// CHECK: throw [[E]] : $Error +// CHECK: bb2([[V:%.*]] : $Builtin.Int64): +// CHECK: return [[V]] : $Builtin.Int64 +// CHECK-LABEL: } // end sil function 'f240_testTryApplyDirect' +sil [ossa] @f240_testTryApplyDirect : $@convention(thin) (@in T) -> (Int, @error Error) { +bb0(%0 : @owned $T): + %3 = function_ref @returnInt : $@convention(thin) (@in T) -> (Int, @error Error) + try_apply %3(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (Int, @error Error), normal bb1, error bb2 + +bb1(%5 : $Int): + return %5 : $Int + +bb2(%7 : $Error): + throw %7 : $Error +} + +// CHECK-LABEL: sil [ossa] @f250_testTryApplyIndirect : $@convention(thin) (@in T) -> (Builtin.Int64, @error Error) { +// CHECK: bb0(%0 : $*T): +// CHECK: [[F:%.*]] = function_ref @returnIntOut : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out Builtin.Int64, @error Error) +// CHECK: [[OUT_I:%.*]] = alloc_stack $Builtin.Int64 +// CHECK: try_apply %1([[OUT_I]], %0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out Builtin.Int64, @error Error), normal bb2, error bb1 +// CHECK: bb1([[E:%.*]] : $Error): +// CHECK: dealloc_stack [[OUT_I]] : $*Builtin.Int64 +// CHECK: throw [[E]] : $Error +// CHECK: bb2(%{{.*}} : $()): +// CHECK: [[V:%.*]] = load [trivial] [[OUT_I]] : $*Builtin.Int64 +// CHECK: dealloc_stack [[OUT_I]] : $*Builtin.Int64 +// CHECK: return [[V]] : $Builtin.Int64 +// CHECK-LABEL: } // end sil function 'f250_testTryApplyIndirect' +sil [ossa] @f250_testTryApplyIndirect : $@convention(thin) (@in T) -> (Int, @error Error) { +bb0(%0 : @owned $T): + %3 = function_ref @returnIntOut : $@convention(thin) (@in T) -> (@out Int, @error Error) + try_apply %3(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out Int, @error Error), normal bb1, error bb2 + +bb1(%5 : $Int): + return %5 : $Int + +bb2(%7 : $Error): + throw %7 : $Error +} + +// CHECK-LABEL: sil [ossa] @f260_testTryApplyTuple : $@convention(thin) (@in T) -> (@out T, @error Error) { +// CHECK: bb0(%0 : $*T, %1 : $*T): +// CHECK: [[OUT_T:%.*]] = alloc_stack $T +// CHECK: [[F:%.*]] = function_ref @returnTuple : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, Builtin.Int64, @out Builtin.Int64, @out τ_0_0, @error Error) +// CHECK: [[OUT_I:%.*]] = alloc_stack $Builtin.Int64 +// CHECK: try_apply [[F]]([[OUT_T]], [[OUT_I]], %0, %1) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, Builtin.Int64, @out Builtin.Int64, @out τ_0_0, @error Error), normal bb2, error bb1 +// CHECK: bb1([[E:%.*]] : $Error): +// CHECK: dealloc_stack [[OUT_I]] : $*Builtin.Int64 +// CHECK: dealloc_stack [[OUT_T]] : $*T +// CHECK: throw [[E]] : $Error +// CHECK: bb2([[RESULT:%.*]] : $Builtin.Int64): +// CHECK: dealloc_stack [[OUT_I]] : $*Builtin.Int64 +// CHECK: destroy_addr [[OUT_T]] : $*T +// CHECK: %{{.*}} = tuple () +// CHECK: dealloc_stack [[OUT_T]] : $*T +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'f260_testTryApplyTuple' +sil [ossa] @f260_testTryApplyTuple : $@convention(thin) (@in T) -> (@out T, @error Error) { +bb0(%0 : @owned $T): + %1 = function_ref @returnTuple : $@convention(thin) (@in T) -> (@out T, Int, @out Int, @out T, @error Error) + try_apply %1(%0) : $@convention(thin) (@in T) -> (@out T, Int, @out Int, @out T, @error Error), normal bb1, error bb2 + +bb1(%3 : @owned $(T, Int, Int, T)): + (%4, %5, %6, %7) = destructure_tuple %3 : $(T, Int, Int, T) + destroy_value %4 : $T + return %7 : $T + +bb2(%9 : $Error): + throw %9 : $Error +} diff --git a/test/SILOptimizer/address_lowering_phi.sil b/test/SILOptimizer/address_lowering_phi.sil new file mode 100644 index 0000000000000..a7ec7e882ac8b --- /dev/null +++ b/test/SILOptimizer/address_lowering_phi.sil @@ -0,0 +1,440 @@ +// RUN: %target-sil-opt -address-lowering -enable-sil-opaque-values -emit-sorted-sil -module-name Swift -sil-verify-all %s | %FileCheck %s +// +// Test the PhiStorageOptimizer within the AddressLowering pass. + +import Builtin + +sil_stage raw + +typealias AnyObject = Builtin.AnyObject +typealias Int = Builtin.Int64 +typealias Bool = Builtin.Int1 + +struct SRef { + @_hasStorage var object: AnyObject { get set } + @_hasStorage var element: T { get set } + init(object: AnyObject, element: T) +} + +enum InnerEnum { + case payload(T, AnyObject) +} +enum OuterEnum { + case inner(InnerEnum, AnyObject) +} + +struct InnerStruct { + var t: T + var object: AnyObject +} +struct OuterStruct { + var inner: InnerStruct + var object: AnyObject +} + +sil [ossa] @getOut : $@convention(thin) () -> @out T + +// Test BBArgs allocation. + +// No projection from incoming values. No interference. +// CHECK-LABEL: sil [ossa] @f010_testBBArgSelect : $@convention(thin) () -> @out T { +// CHECK: bb0(%0 : $*T): +// CHECK: [[F:%.*]] = function_ref @getOut : $@convention(thin) <τ_0_0> () -> @out τ_0_0 +// CHECK: cond_br undef, bb2, bb1 +// CHECK: bb1: +// CHECK: [[GET0:%.*]] = apply [[F]](%0) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 +// CHECK: br bb3 +// CHECK: bb2: +// CHECK: [[GET1:%.*]] = apply [[F]](%0) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 +// CHECK: br bb3 +// CHECK: bb3: +// CHECK: %{{.*}} = tuple () +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'f010_testBBArgSelect' +sil [ossa] @f010_testBBArgSelect : $@convention(thin) () -> @out T { +bb0: + %get = function_ref @getOut : $@convention(thin) <τ_0_0>() -> @out τ_0_0 + cond_br undef, bb1, bb2 + +bb1: + %get0 = apply %get() : $@convention(thin) <τ_0_0>() -> @out τ_0_0 + br bb3(%get0 : $T) + +bb2: + %get1 = apply %get() : $@convention(thin) <τ_0_0>() -> @out τ_0_0 + br bb3(%get1 : $T) + +// %15 +bb3(%15 : @owned $T): + return %15 : $T +} + +// One projection from incoming values. One interference. +// +// CHECK-LABEL: sil [ossa] @f020_testBBArgProjectOne : $@convention(thin) () -> @out T { +// CHECK: bb0(%0 : $*T): +// CHECK: [[ALLOC:%.*]] = alloc_stack $T +// CHECK: apply %{{.*}}(%0) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 +// CHECK: apply %{{.*}}([[ALLOC]]) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 +// CHECK: cond_br undef, bb2, bb1 +// CHECK: bb1: +// CHECK: destroy_addr %0 : $*T +// CHECK: copy_addr [take] %1 to [initialization] %0 : $*T +// CHECK: br bb3 +// CHECK: bb2: +// CHECK: destroy_addr %1 : $*T +// CHECK: br bb3 +// CHECK: bb3: +// CHECK: dealloc_stack [[ALLOC]] : $*T +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'f020_testBBArgProjectOne' +sil [ossa] @f020_testBBArgProjectOne : $@convention(thin) () -> @out T { +bb0: + %get = function_ref @getOut : $@convention(thin) <τ_0_0>() -> @out τ_0_0 + %get0 = apply %get() : $@convention(thin) <τ_0_0>() -> @out τ_0_0 + %get1 = apply %get() : $@convention(thin) <τ_0_0>() -> @out τ_0_0 + cond_br undef, bb2, bb1 + +bb1: + destroy_value %get0 : $T + br bb3(%get1 : $T) + +bb2: + destroy_value %get1 : $T + br bb3(%get0 : $T) + +bb3(%arg : @owned $T): + return %arg : $T +} + +// Projection from incoming values. No interference. +// CHECK-LABEL: sil [ossa] @f030_testBBArgProjectIn : $@convention(thin) (@in T, @in T) -> @out T { +// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T): +// CHECK: cond_br undef, bb2, bb1 +// CHECK: bb1: +// CHECK: destroy_addr %2 : $*T +// CHECK: copy_addr [take] %1 to [initialization] %0 : $*T +// CHECK: br bb3 +// CHECK: bb2: +// CHECK: destroy_addr %1 : $*T +// CHECK: copy_addr [take] %2 to [initialization] %0 : $*T +// CHECK: br bb3 +// CHECK: bb3: +// CHECK-LABEL: } // end sil function 'f030_testBBArgProjectIn' +sil [ossa] @f030_testBBArgProjectIn : $@convention(thin) (@in T, @in T) -> @out T { +bb0(%0 : @owned $T, %1 : @owned $T): + cond_br undef, bb1, bb2 + +bb1: + destroy_value %0 : $T + br bb3(%1 : $T) + +bb2: + destroy_value %1 : $T + br bb3(%0 : $T) + +bb3(%arg : @owned $T): + return %arg : $T +} + +// CHECK-LABEL: sil [ossa] @f040_testInSwapOut : $@convention(thin) (@in T, @in T) -> (@out T, @out T) { +// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T, %3 : $*T): +// CHECK: cond_br undef, bb2, bb1 +// CHECK: bb1: +// CHECK: copy_addr [take] %2 to [initialization] %1 : $*T +// CHECK: copy_addr [take] %3 to [initialization] %0 : $*T +// CHECK: br bb3 +// CHECK: bb2: +// CHECK: copy_addr [take] %2 to [initialization] %0 : $*T +// CHECK: copy_addr [take] %3 to [initialization] %1 : $*T +// CHECK: br bb3 +// CHECK-LABEL: } // end sil function 'f040_testInSwapOut' +sil [ossa] @f040_testInSwapOut : $@convention(thin) (@in T, @in T) -> (@out T, @out T) { +bb0(%0 : @owned $T, %1 : @owned $T): + cond_br undef, bb1, bb2 + +bb1: + br bb3(%0 : $T, %1 : $T) + +bb2: + br bb3(%1 : $T, %0 : $T) + +bb3(%arg0 : @owned $T, %arg1 : @owned $T): + %result = tuple (%arg0 : $T, %arg1 : $T) + return %result : $(T, T) +} + +// CHECK-LABEL: sil [ossa] @f050_testCombine : $@convention(thin) (@in T, @in T) -> (@out T, @out T) { +// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T, %3 : $*T): +// CHECK: cond_br undef, bb2, bb1 +// CHECK: bb1: +// CHECK: copy_addr [take] %2 to [initialization] %0 : $*T +// CHECK: copy_addr [take] %3 to [initialization] %1 : $*T +// CHECK: br bb3 +// CHECK: bb2: +// CHECK: copy_addr %2 to [initialization] %1 : $*T +// CHECK: destroy_addr %3 : $*T +// CHECK: copy_addr [take] %2 to [initialization] %0 : $*T +// CHECK: br bb3 +// CHECK: bb3: +// CHECK-LABEL: } // end sil function 'f050_testCombine' +sil [ossa] @f050_testCombine : $@convention(thin) (@in T, @in T) -> (@out T, @out T) { +bb0(%0 : @owned $T, %1 : @owned $T): + cond_br undef, bb2, bb1 + +bb1: + br bb3(%0 : $T, %1 : $T) + +bb2: + %copy = copy_value %0 : $T + destroy_value %1 : $T + br bb3(%0 : $T, %copy : $T) + +bb3(%arg0 : @owned $T, %arg1 : @owned $T): + %result = tuple (%arg0 : $T, %arg1 : $T) + return %result : $(T, T) +} + +// Test cyclic anti-dependence on phi copies. +// +// CHECK-LABEL: sil [ossa] @f060_testInoutSwap : $@convention(thin) (@inout T, @inout T) -> () { +// CHECK: bb0(%0 : $*T, %1 : $*T): +// CHECK: [[ALLOC0:%.*]] = alloc_stack $T +// CHECK: [[ALLOC1:%.*]] = alloc_stack $T +// CHECK: copy_addr [take] %0 to [initialization] [[ALLOC1]] : $*T +// CHECK: copy_addr [take] %1 to [initialization] [[ALLOC0]] : $*T +// CHECK: cond_br undef, bb2, bb1 +// CHECK: bb1: +// CHECK: [[TMP:%.*]] = alloc_stack $T +// CHECK: copy_addr [take] [[ALLOC0]] to [initialization] [[TMP]] : $*T +// CHECK: copy_addr [take] [[ALLOC1]] to [initialization] [[ALLOC0]] : $*T +// CHECK: copy_addr [take] [[TMP]] to [initialization] [[ALLOC1]] : $*T +// CHECK: dealloc_stack [[TMP]] : $*T +// CHECK: br bb3 +// CHECK: bb2: +// CHECK: br bb3 +// CHECK: bb3: +// CHECK: copy_addr [take] [[ALLOC0]] to [initialization] %0 : $*T +// CHECK: copy_addr [take] [[ALLOC1]] to [initialization] %1 : $*T +// CHECK: dealloc_stack [[ALLOC1]] : $*T +// CHECK: dealloc_stack [[ALLOC0]] : $*T +// CHECK-LABEL: } // end sil function 'f060_testInoutSwap' +sil [ossa] @f060_testInoutSwap : $@convention(thin) (@inout T, @inout T) -> () { +bb0(%0 : $*T, %1 : $*T): + %2 = load [take] %0 : $*T + %3 = load [take] %1 : $*T + cond_br undef, bb2, bb1 + +bb1: + br bb3(%2 : $T, %3 : $T) + +bb2: + br bb3(%3 : $T, %2 : $T) + +bb3(%phi0 : @owned $T, %phi1 : @owned $T): + store %phi0 to [init] %0 : $*T + store %phi1 to [init] %1 : $*T + %99 = tuple () + return %99 : $() +} + +// Test phi copies that project into their use. +// +// CHECK-LABEL: sil [ossa] @f070_testInoutFieldSwap : $@convention(thin) (@inout SRef, @inout SRef) -> () { +// CHECK: bb0(%0 : $*SRef, %1 : $*SRef): +// CHECK: [[ALLOCA:%.*]] = alloc_stack $SRef +// CHECK: [[ALLOCB:%.*]] = alloc_stack $SRef +// CHECK: [[ALLOCSA:%.*]] = alloc_stack $SRef +// CHECK: [[ALLOCSB:%.*]] = alloc_stack $SRef +// CHECK: copy_addr [take] %0 to [initialization] [[ALLOCA]] : $*SRef +// CHECK: copy_addr [take] %1 to [initialization] [[ALLOCB]] : $*SRef +// CHECK: cond_br undef, bb2, bb1 +// CHECK: bb1: +// CHECK: [[A1OADR:%.*]] = struct_element_addr [[ALLOCA]] : $*SRef, #SRef.object +// CHECK: [[A1O:%.*]] = load [take] [[A1OADR]] : $*AnyObject +// CHECK: [[A1EADR:%.*]] = struct_element_addr [[ALLOCA]] : $*SRef, #SRef.element +// CHECK: [[B1OADR:%.*]] = struct_element_addr [[ALLOCB]] : $*SRef, #SRef.object +// CHECK: [[B1O:%.*]] = load [take] [[B1OADR]] : $*AnyObject +// CHECK: [[B1EADR:%.*]] = struct_element_addr [[ALLOCB]] : $*SRef, #SRef.element +// CHECK: destroy_value [[B1O]] : $AnyObject +// CHECK: [[CP1:%.*]] = copy_value [[A1O]] : $AnyObject +// CHECK: [[SA1EADR:%.*]] = struct_element_addr [[ALLOCSA]] : $*SRef, #SRef.element +// CHECK: copy_addr [take] [[A1EADR]] to [initialization] [[SA1EADR]] : $*T +// CHECK: [[SB1EADR:%.*]] = struct_element_addr [[ALLOCSB]] : $*SRef, #SRef.element +// CHECK: copy_addr [take] [[B1EADR]] to [initialization] [[SB1EADR]] : $*T +// CHECK: br bb3([[A1O]] : $AnyObject, [[CP1]] : $AnyObject) +// CHECK: bb2: +// CHECK: [[A2OADR:%.*]] = struct_element_addr [[ALLOCA]] : $*SRef, #SRef.object +// CHECK: [[A2O:%.*]] = load [take] [[A2OADR]] : $*AnyObject +// CHECK: [[A2EADR:%.*]] = struct_element_addr [[ALLOCA]] : $*SRef, #SRef.element +// CHECK: [[B2OADR:%.*]] = struct_element_addr [[ALLOCB]] : $*SRef, #SRef.object +// CHECK: [[B2O:%.*]] = load [take] [[B2OADR]] : $*AnyObject +// CHECK: [[B2EADR:%.*]] = struct_element_addr [[ALLOCB]] : $*SRef, #SRef.element +// CHECK: destroy_value [[B2O]] : $AnyObject +// CHECK: [[CP2:%.*]] = copy_value [[A2O]] : $AnyObject +// CHECK: [[SB2EADR:%.*]] = struct_element_addr [[ALLOCSB]] : $*SRef, #SRef.element +// CHECK: copy_addr [take] [[A2EADR]] to [initialization] [[SB2EADR]] : $*T +// CHECK: [[SA2EADR:%.*]] = struct_element_addr [[ALLOCSA]] : $*SRef, #SRef.element +// CHECK: copy_addr [take] [[B2EADR]] to [initialization] [[SA2EADR]] : $*T +// CHECK: br bb3([[A2O]] : $AnyObject, [[CP2]] : $AnyObject) +// CHECK: bb3([[PHI0:%.*]] : @owned $AnyObject, [[PHI1:%.*]] : @owned $AnyObject): +// CHECK: [[SA3EADR:%.*]] = struct_element_addr [[ALLOCSA]] : $*SRef, #SRef.object +// CHECK: store [[PHI0]] to [init] [[SA3EADR]] : $*AnyObject +// CHECK: [[SA3EADR:%.*]] = struct_element_addr [[ALLOCSB]] : $*SRef, #SRef.object +// CHECK: store [[PHI1]] to [init] [[SA3EADR]] : $*AnyObject +// CHECK: copy_addr [take] [[ALLOCSA]] to [initialization] %0 : $*SRef +// CHECK: copy_addr [take] [[ALLOCSB]] to [initialization] %1 : $*SRef +// CHECK-LABEL: } // end sil function 'f070_testInoutFieldSwap' +sil [ossa] @f070_testInoutFieldSwap : $@convention(thin) (@inout SRef, @inout SRef) -> () { +bb0(%0 : $*SRef, %1 : $*SRef): + %la = load [take] %0 : $*SRef + %lb = load [take] %1 : $*SRef + cond_br undef, bb2, bb1 + +bb1: + (%da1o, %da1e) = destructure_struct %la : $SRef + (%db1o, %db1e) = destructure_struct %lb : $SRef + destroy_value %db1o : $AnyObject + %ca1o = copy_value %da1o : $AnyObject + br bb3(%da1o : $AnyObject, %ca1o : $AnyObject, %da1e : $T, %db1e : $T) + +bb2: + (%da2o, %da2e) = destructure_struct %la : $SRef + (%db2o, %db2e) = destructure_struct %lb : $SRef + destroy_value %db2o : $AnyObject + %ca2o = copy_value %da2o : $AnyObject + br bb3(%da2o : $AnyObject, %ca2o : $AnyObject, %db2e : $T, %da2e : $T) + +bb3(%phio0 : @owned $AnyObject, %phio1 : @owned $AnyObject, %phie0 : @owned $T, %phie1 : @owned $T): + %sa = struct $SRef (%phio0 : $AnyObject, %phie0 : $T) + %sb = struct $SRef (%phio1 : $AnyObject, %phie1 : $T) + store %sa to [init] %0 : $*SRef + store %sb to [init] %1 : $*SRef + %99 = tuple () + return %99 : $() +} + +// CHECK-LABEL: sil [ossa] @f080_testNestedComposeEnumPhi : $@convention(thin) (@in T, @in T, @owned AnyObject, @owned AnyObject) -> @out OuterEnum { +// CHECK: bb0(%0 : $*OuterEnum, %1 : $*T, %2 : $*T, %3 : @owned $AnyObject, %4 : @owned $AnyObject): +// CHECK: cond_br undef, bb2, bb1 +// CHECK: bb1: +// CHECK: destroy_addr %2 : $*T +// CHECK: [[TUPLE1:%.*]] = init_enum_data_addr [[INNER1:%.*]] : $*InnerEnum, #InnerEnum.payload!enumelt +// CHECK: [[TUPLE1_0:%.*]] = tuple_element_addr [[TUPLE1]] : $*(T, AnyObject), 0 +// CHECK: copy_addr [take] %1 to [initialization] [[TUPLE1_0]] : $*T +// CHECK: [[TUPLE1_1:%.*]] = tuple_element_addr [[TUPLE1]] : $*(T, AnyObject), 1 +// CHECK: store %3 to [init] [[TUPLE1_1]] : $*AnyObject +// CHECK: inject_enum_addr [[INNER1]] : $*InnerEnum, #InnerEnum.payload!enumelt +// CHECK: copy_addr [take] [[INNER1]] to [initialization] [[PHI6:%.*]] : $*InnerEnum +// CHECK: br bb6 +// CHECK: bb2: +// CHECK: cond_br undef, bb4, bb3 +// CHECK: bb3: +// CHECK: destroy_addr %1 : $*T +// CHECK: copy_addr [take] %2 to [initialization] [[PHI5:%.*]] : $*T +// CHECK: br bb5 +// CHECK: bb4: +// CHECK: destroy_addr %2 : $*T +// CHECK: copy_addr [take] %1 to [initialization] [[PHI5]] : $*T +// CHECK: br bb5 +// CHECK: bb5: +// CHECK: [[TUPLE5:%.*]] = init_enum_data_addr [[INNER5:%.*]] : $*InnerEnum, #InnerEnum.payload!enumelt +// CHECK: [[TUPLE5_0:%.*]] = tuple_element_addr [[TUPLE5]] : $*(T, AnyObject), 0 +// CHECK: copy_addr [take] [[PHI5]] to [initialization] [[TUPLE5_0]] : $*T +// CHECK: [[TUPLE5_1:%.*]] = tuple_element_addr [[TUPLE5]] : $*(T, AnyObject), 1 +// CHECK: store %3 to [init] [[TUPLE5_1]] : $*AnyObject +// CHECK: inject_enum_addr [[INNER5]] : $*InnerEnum, #InnerEnum.payload!enumelt +// CHECK: copy_addr [take] [[INNER5]] to [initialization] [[PHI6:%.*]] : $*InnerEnum +// CHECK: br bb6 +// CHECK: bb6: +// CHECK: [[TUPLE6:%.*]] = init_enum_data_addr %0 : $*OuterEnum, #OuterEnum.inner!enumelt +// CHECK: [[TUPLE6_0:%.*]] = tuple_element_addr [[TUPLE6]] : $*(InnerEnum, AnyObject), 0 +// CHECK: copy_addr [take] [[PHI6]] to [initialization] [[TUPLE6_0]] : $*InnerEnum +// CHECK: [[TUPLE6_1:%.*]] = tuple_element_addr [[TUPLE6]] : $*(InnerEnum, AnyObject), 1 +// CHECK: store %4 to [init] [[TUPLE6_1]] : $*AnyObject +// CHECK: inject_enum_addr %0 : $*OuterEnum, #OuterEnum.inner!enumelt +// CHECK-LABEL: } // end sil function 'f080_testNestedComposeEnumPhi' +sil [ossa] @f080_testNestedComposeEnumPhi : $@convention(thin) (@in T, @in T, @owned AnyObject, @owned AnyObject) -> @out OuterEnum { +bb0(%0 : @owned $T, %1 : @owned $T, %2 : @owned $AnyObject, %3 : @owned $AnyObject): + cond_br undef, bb2, bb1 +bb1: + destroy_value %1 : $T + %tuple1 = tuple (%0 : $T, %2 : $AnyObject) + %inner1 = enum $InnerEnum, #InnerEnum.payload, %tuple1 : $(T, AnyObject) + br bb6(%inner1 : $InnerEnum) +bb2: + cond_br undef, bb4, bb3 +bb3: + destroy_value %0 : $T + br bb5(%1 : $T) +bb4: + destroy_value %1 : $T + br bb5(%0 : $T) +bb5(%phi5 : @owned $T): + %tuple5 = tuple (%phi5 : $T, %2 : $AnyObject) + %inner5 = enum $InnerEnum, #InnerEnum.payload, %tuple5 : $(T, AnyObject) + br bb6(%inner5 : $InnerEnum) +bb6(%phi6 : @owned $InnerEnum): + %tuple6 = tuple (%phi6 : $InnerEnum, %3 : $AnyObject) + %outer = enum $OuterEnum, #OuterEnum.inner, %tuple6 : $(InnerEnum, AnyObject) + return %outer : $OuterEnum +} + +// CHECK-LABEL: sil [ossa] @f080_testNestedComposeStructWithPhi : $@convention(thin) (@in T, @in T, @owned AnyObject, @owned AnyObject) -> @out OuterStruct { +// CHECK: bb0(%0 : $*OuterStruct, %1 : $*T, %2 : $*T, %3 : @owned $AnyObject, %4 : @owned $AnyObject): +// CHECK-NOT: alloc +// CHECK: cond_br undef, bb2, bb1 +// CHECK: bb1: +// CHECK: destroy_addr %2 : $*T +// CHECK: [[INNER1:%.*]] = struct_element_addr %0 : $*OuterStruct, #OuterStruct.inner +// CHECK: [[T2:%.*]] = struct_element_addr [[INNER1]] : $*InnerStruct, #InnerStruct.t +// CHECK: copy_addr [take] %1 to [initialization] [[T2]] : $*T +// CHECK: [[O2:%.*]] = struct_element_addr [[INNER1]] : $*InnerStruct, #InnerStruct.object +// CHECK: store %3 to [init] [[O2]] : $*AnyObject +// CHECK: br bb6 +// CHECK: bb2: +// CHECK: cond_br undef, bb4, bb3 +// CHECK: bb3: +// CHECK: destroy_addr %1 : $*T +// CHECK: [[INNER3:%.*]] = struct_element_addr %0 : $*OuterStruct, #OuterStruct.inner +// CHECK: [[T3:%.*]] = struct_element_addr [[INNER3]] : $*InnerStruct, #InnerStruct.t +// CHECK: copy_addr [take] %2 to [initialization] [[T3]] : $*T +// CHECK: br bb5 +// CHECK: bb4: +// CHECK: destroy_addr %2 : $*T +// CHECK: [[INNER4:%.*]] = struct_element_addr %0 : $*OuterStruct, #OuterStruct.inner +// CHECK: [[T4:%.*]] = struct_element_addr [[INNER4]] : $*InnerStruct, #InnerStruct.t +// CHECK: copy_addr [take] %1 to [initialization] [[T4]] : $*T +// CHECK: br bb5 +// CHECK: bb5: +// CHECK: [[INNER5:%.*]] = struct_element_addr %0 : $*OuterStruct, #OuterStruct.inner +// CHECK: [[O5:%.*]] = struct_element_addr [[INNER5]] : $*InnerStruct, #InnerStruct.object +// CHECK: store %3 to [init] [[O5]] : $*AnyObject +// CHECK: br bb6 +// CHECK: bb6: +// CHECK: [[O6:%.*]] = struct_element_addr %0 : $*OuterStruct, #OuterStruct.object +// CHECK: store %4 to [init] [[O6]] : $*AnyObject +// CHECK-NOT: dealloc +// CHECK-LABEL: } // end sil function 'f080_testNestedComposeStructWithPhi' +sil [ossa] @f080_testNestedComposeStructWithPhi : $@convention(thin) (@in T, @in T, @owned AnyObject, @owned AnyObject) -> @out OuterStruct { +bb0(%0 : @owned $T, %1 : @owned $T, %2 : @owned $AnyObject, %3 : @owned $AnyObject): + cond_br undef, bb2, bb1 +bb1: + destroy_value %1 : $T + %inner2 = struct $InnerStruct (%0 : $T, %2 : $AnyObject) + br bb6(%inner2 : $InnerStruct) +bb2: + cond_br undef, bb4, bb3 +bb3: + destroy_value %0 : $T + br bb5(%1 : $T) +bb4: + destroy_value %1 : $T + br bb5(%0 : $T) +bb5(%phi5 : @owned $T): + %inner5 = struct $InnerStruct (%phi5 : $T, %2 : $AnyObject) + br bb6(%inner5 : $InnerStruct) +bb6(%phi6 : @owned $InnerStruct): + %outer = struct $OuterStruct (%phi6 : $InnerStruct, %3 : $AnyObject) + return %outer : $OuterStruct +} diff --git a/test/SILOptimizer/address_projection.sil b/test/SILOptimizer/address_projection.sil deleted file mode 100644 index 9cd4240929a52..0000000000000 --- a/test/SILOptimizer/address_projection.sil +++ /dev/null @@ -1,444 +0,0 @@ -// RUN: %target-sil-opt -address-lowering -enable-sil-opaque-values -optimize-opaque-address-lowering -emit-sorted-sil %s | %FileCheck %s - -import Builtin - -sil_stage canonical -// CHECK: sil_stage lowered - -typealias AnyObject = Builtin.AnyObject -typealias Int = Builtin.Int64 - -// CHECK-LABEL: sil hidden @f010_addrlower_identity : $@convention(thin) (@in T) -> @out T { -// CHECK: bb0(%0 : $*T, %1 : $*T): -// CHECK: copy_addr [take] %1 to [initialization] %0 : $*T -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function 'f010_addrlower_identity' -sil hidden @f010_addrlower_identity : $@convention(thin) (@in T) -> @out T { -bb0(%0 : $T): - return %0 : $T -} - -sil hidden [noinline] @f020_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { -bb0(%0 : $T): - %2 = copy_value %0 : $T - %3 = copy_value %0 : $T - %4 = copy_value %0 : $T - destroy_value %0 : $T - %6 = tuple (%2 : $T, %3 : $T, %4 : $T) - return %6 : $(T, T, T) -} - -// Test returning an opaque tuple of tuples as a concrete tuple. -// The multiResult call is specialized, but the SIL result convention does not change. -// --- -// CHECK-LABEL: sil @f021_callMultiResult : $@convention(thin) (Builtin.Int64) -> (Builtin.Int64, Builtin.Int64, Builtin.Int64) { -// CHECK: bb0(%0 : $Builtin.Int64): -// CHECK: %[[FN:.*]] = function_ref @f020_multiResult : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) -// CHECK: %[[IN:.*]] = alloc_stack $Builtin.Int64 -// CHECK: store %0 to %[[IN]] : $*Builtin.Int64 -// CHECK: %[[OUT1:.*]] = alloc_stack $Builtin.Int64 -// CHECK: %[[OUT2:.*]] = alloc_stack $Builtin.Int64 -// CHECK: %[[OUT3:.*]] = alloc_stack $Builtin.Int64 -// CHECK: %{{.*}} = apply %[[FN]](%[[OUT1]], %[[OUT2]], %[[OUT3]], %[[IN]]) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) -// CHECK: %[[R3:.*]] = load %[[OUT3]] : $*Builtin.Int64 -// CHECK: dealloc_stack %[[OUT3]] : $*Builtin.Int64 -// CHECK: %[[R2:.*]] = load %[[OUT2]] : $*Builtin.Int64 -// CHECK: dealloc_stack %[[OUT2]] : $*Builtin.Int64 -// CHECK: %[[R1:.*]] = load %[[OUT1]] : $*Builtin.Int64 -// CHECK: dealloc_stack %[[OUT1]] : $*Builtin.Int64 -// CHECK: dealloc_stack %[[IN]] : $*Builtin.Int64 -// CHECK: %[[R:.*]] = tuple (%[[R1]] : $Builtin.Int64, %[[R2]] : $Builtin.Int64, %[[R3]] : $Builtin.Int64) -// CHECK: return %[[R]] : $(Builtin.Int64, Builtin.Int64, Builtin.Int64) -// CHECK-LABEL: } // end sil function 'f021_callMultiResult' -sil @f021_callMultiResult : $@convention(thin) (Int) -> (Int, Int, Int) { -bb0(%0 : $Int): - %1 = function_ref @f020_multiResult : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) - %2 = apply %1(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) - %3 = tuple_extract %2 : $(Int, Int, Int), 0 - %4 = tuple_extract %2 : $(Int, Int, Int), 1 - %5 = tuple_extract %2 : $(Int, Int, Int), 2 - %6 = tuple (%3 : $Int, %4 : $Int, %5 : $Int) - return %6 : $(Int, Int, Int) -} - -// CHECK-LABEL: sil @f030_returnPair : $@convention(thin) (@in T) -> (@out T, @out T) { -// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T): -// CHECK: copy_addr %2 to [initialization] %0 : $*T -// CHECK: copy_addr [take] %2 to [initialization] %1 : $*T -// CHECK: %[[R:.*]] = tuple () -// CHECK: return %[[R]] : $() -// CHECK-LABEL: } // end sil function 'f030_returnPair' -sil @f030_returnPair : $@convention(thin) (@in T) -> (@out T, @out T) { -bb0(%0 : $T): - %2 = copy_value %0 : $T - %3 = tuple (%2 : $T, %0 : $T) - return %3 : $(T, T) -} - -// CHECK-LABEL: sil @f031_unusedIndirect : $@convention(thin) (@in T) -> @out T { -// CHECK: bb0(%0 : $*T, %1 : $*T): -// CHECK: %[[LOC0:.*]] = alloc_stack $T -// CHECK: %[[OUT1:.*]] = alloc_stack $T -// CHECK: %[[LOC1:.*]] = alloc_stack $T -// CHECK: %[[OUT2:.*]] = alloc_stack $T -// CHECK: // function_ref f030_returnPair -// CHECK: %[[F:.*]] = function_ref @f030_returnPair : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0) -// CHECK: copy_addr %1 to [initialization] %[[LOC0]] : $*T -// CHECK: %[[R0:.*]] = apply %[[F]](%[[OUT1]], %[[OUT2]], %[[LOC0]]) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0) -// CHECK: copy_addr %[[OUT1]] to [initialization] %[[LOC1]] : $*T -// CHECK: copy_addr %[[OUT2]] to [initialization] %0 : $*T -// CHECK: destroy_addr %[[OUT1]] : $*T -// CHECK: destroy_addr %[[OUT2]] : $*T -// CHECK: destroy_addr %[[LOC1]] : $*T -// CHECK: destroy_addr %1 : $*T -// CHECK: %[[R:.*]] = tuple () -// CHECK: dealloc_stack %[[OUT2]] : $*T -// CHECK: dealloc_stack %[[LOC1]] : $*T -// CHECK: dealloc_stack %[[OUT1]] : $*T -// CHECK: dealloc_stack %[[LOC0]] : $*T -// CHECK: return %[[R]] : $() -// CHECK-LABEL: } // end sil function 'f031_unusedIndirect' -sil @f031_unusedIndirect : $@convention(thin) (@in T) -> @out T { -bb0(%0 : $T): - %2 = function_ref @f030_returnPair : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0) - %3 = copy_value %0 : $T - %4 = apply %2(%3) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0) - %5 = tuple_extract %4 : $(T, T), 0 - %6 = copy_value %5 : $T - %7 = tuple_extract %4 : $(T, T), 1 - %8 = copy_value %7 : $T - destroy_value %4 : $(T, T) - destroy_value %6 : $T - destroy_value %0 : $T - return %8 : $T -} - -sil hidden @f040_consumeArg : $@convention(thin) (@in T) -> () { -bb0(%0 : $T): - destroy_value %0 : $T - %3 = tuple () - return %3 : $() -} - -// CHECK-LABEL: sil @f041_opaqueArg : $@convention(thin) (@in T) -> () { -// CHECK: bb0(%0 : $*T): -// CHECK: %[[LOC:.*]] = alloc_stack $T -// CHECK: %[[FN:.*]] = function_ref @f040_consumeArg : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () -// CHECK: copy_addr %0 to [initialization] %[[LOC]] : $*T -// CHECK: %{{.*}} = apply %[[FN]](%[[LOC]]) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () -// CHECK: destroy_addr %0 : $*T -// CHECK: %[[R:.*]] = tuple () -// CHECK: dealloc_stack %[[LOC]] : $*T -// CHECK: return %[[R]] : $() -// CHECK-LABEL: } // end sil function 'f041_opaqueArg' -sil @f041_opaqueArg : $@convention(thin) (@in T) -> () { -bb0(%0 : $T): - %2 = function_ref @f040_consumeArg : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () - %3 = copy_value %0 : $T - %4 = apply %2(%3) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () - destroy_value %0 : $T - %6 = tuple () - return %6 : $() -} - -// CHECK-LABEL: sil @f050_storeinout : $@convention(thin) (@inout T, @inout T, @in T) -> () { -// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T): -// CHECK: %[[PREV1:.*]] = alloc_stack $T -// CHECK: %[[PREV2:.*]] = alloc_stack $T -// CHECK: debug_value %0 : $*T, var, name "t", argno 1, expr op_deref -// CHECK: debug_value %1 : $*T, var, name "u", argno 2, expr op_deref -// CHECK: debug_value %2 : $*T, {{.*}} expr op_deref -// CHECK: copy_addr [take] %0 to [initialization] %[[PREV1]] : $*T -// CHECK: copy_addr %2 to [initialization] %0 : $*T -// CHECK: destroy_addr %[[PREV1]] : $*T -// CHECK: copy_addr [take] %1 to [initialization] %[[PREV2]] : $*T -// CHECK: copy_addr %2 to [initialization] %1 : $*T -// CHECK: destroy_addr %[[PREV2]] : $*T -// CHECK: destroy_addr %2 : $*T -// CHECK: %[[R:.*]] = tuple () -// CHECK: dealloc_stack %[[PREV2]] : $*T -// CHECK: dealloc_stack %[[PREV1]] : $*T -// CHECK: return %[[R]] : $() -// CHECK-LABEL: } // end sil function 'f050_storeinout' -sil @f050_storeinout : $@convention(thin) (@inout T, @inout T, @in T) -> () { -bb0(%0 : $*T, %1 : $*T, %2 : $T): - debug_value %0 : $*T, var, name "t", argno 1, expr op_deref - debug_value %1 : $*T, var, name "u", argno 2, expr op_deref - debug_value %2 : $T, let, name "x", argno 3 - %6 = copy_value %2 : $T - %7 = load %0 : $*T - store %6 to %0 : $*T - destroy_value %7 : $T - %10 = copy_value %2 : $T - %11 = load %1 : $*T - store %10 to %1 : $*T - destroy_value %11 : $T - destroy_value %2 : $T - %15 = tuple () - return %15 : $() -} - -sil hidden @f060_mutate : $@convention(thin) (@inout T, @in T) -> () { -bb0(%0 : $*T, %1 : $T): - %4 = copy_value %1 : $T - %5 = load %0 : $*T - store %4 to %0 : $*T - destroy_value %5 : $T - destroy_value %1 : $T - %9 = tuple () - return %9 : $() -} - -// CHECK-LABEL: sil @f061_callinout : $@convention(thin) (@in T) -> () { -// CHECK: bb0(%0 : $*T): -// CHECK: %[[LOC2:.*]] = alloc_stack $T -// CHECK: %[[LOC1:.*]] = alloc_stack $T -// CHECK: copy_addr %0 to [initialization] %[[LOC1]] : $*T -// CHECK: %[[FN:.*]] = function_ref @f060_mutate : $@convention(thin) <τ_0_0> (@inout τ_0_0, @in τ_0_0) -> () -// CHECK: copy_addr %0 to [initialization] %[[LOC2]] : $*T -// CHECK: %{{.*}} = apply %[[FN]](%[[LOC1]], %[[LOC2]]) : $@convention(thin) <τ_0_0> (@inout τ_0_0, @in τ_0_0) -> () -// CHECK: destroy_addr %[[LOC1]] : $*T -// CHECK: destroy_addr %0 : $*T -// CHECK: %[[R:.*]] = tuple () -// CHECK: dealloc_stack %[[LOC1]] : $*T -// CHECK: dealloc_stack %[[LOC2]] : $*T -// CHECK: return %[[R]] : $() -// CHECK-LABEL: } // end sil function 'f061_callinout' -sil @f061_callinout : $@convention(thin) (@in T) -> () { -bb0(%0 : $T): - %1 = alloc_stack $T, var, name "u" - %3 = copy_value %0 : $T - store %3 to %1 : $*T - %5 = function_ref @f060_mutate : $@convention(thin) <τ_0_0> (@inout τ_0_0, @in τ_0_0) -> () - %6 = copy_value %0 : $T - %7 = apply %5(%1, %6) : $@convention(thin) <τ_0_0> (@inout τ_0_0, @in τ_0_0) -> () - destroy_addr %1 : $*T - destroy_value %0 : $T - %10 = tuple () - dealloc_stack %1 : $*T - return %10 : $() -} - -public protocol C : class {} - -// CHECK-LABEL: sil @f070_mixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { -// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $C): -// CHECK: copy_addr [take] %1 to [initialization] %0 : $*T -// CHECK: return %2 : $C -// CHECK-LABEL: } // end sil function 'f070_mixedResult1' -sil @f070_mixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { -bb0(%0 : $T, %1 : $C): - %4 = tuple (%0 : $T, %1 : $C) - return %4 : $(T, C) -} - -// CHECK-LABEL: sil @f071_mixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { -// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T, %3 : $C): -// CHECK: copy_addr %2 to [initialization] %0 : $*T -// CHECK: strong_retain %3 : $C -// CHECK: copy_addr [take] %2 to [initialization] %1 : $*T -// CHECK: %[[T:.*]] = tuple (%3 : $C, %3 : $C) -// CHECK: return %[[T]] : $(C, C) -// CHECK-LABEL: } // end sil function 'f071_mixedResult2' -sil @f071_mixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { -bb0(%0 : $T, %1 : $C): - %4 = copy_value %0 : $T - strong_retain %1 : $C - %6 = tuple (%4 : $T, %0 : $T, %1 : $C, %1 : $C) - return %6 : $(T, T, C, C) -} - -// CHECK-LABEL: sil @f072_callMixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { -// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $C): -// CHECK: %[[LIN:.*]] = alloc_stack $T -// CHECK: %[[OUT:.*]] = alloc_stack $T -// CHECK: // function_ref f070_mixedResult1 -// CHECK: %[[F:.*]] = function_ref @f070_mixedResult1 : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @owned C) -// CHECK: copy_addr %1 to [initialization] %[[LIN]] : $*T -// CHECK: strong_retain %2 : $C -// CHECK: %[[R:.*]] = apply %[[F]](%[[OUT]], %[[LIN]], %2) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @owned C) -// CHECK: copy_addr %[[OUT]] to [initialization] %0 : $*T -// CHECK: strong_retain %[[R]] : $C -// CHECK: destroy_addr %[[OUT]] : $*T -// CHECK: strong_release %[[R]] : $C -// CHECK: strong_release %2 : $C -// CHECK: destroy_addr %1 : $*T -// CHECK: dealloc_stack %[[OUT]] : $*T -// CHECK: dealloc_stack %[[LIN]] : $*T -// CHECK: return %[[R]] : $C -// CHECK-LABEL: } // end sil function 'f072_callMixedResult1' -sil @f072_callMixedResult1 : $@convention(thin) (@in T, @owned C) -> (@out T, @owned C) { -bb0(%0 : $T, %1 : $C): - %4 = function_ref @f070_mixedResult1 : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @owned C) - %5 = copy_value %0 : $T - strong_retain %1 : $C - %7 = apply %4(%5, %1) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @owned C) - %8 = tuple_extract %7 : $(T, C), 0 - %9 = copy_value %8 : $T - %10 = tuple_extract %7 : $(T, C), 1 - strong_retain %10 : $C - destroy_value %7 : $(T, C) - strong_release %1 : $C - destroy_value %0 : $T - %15 = tuple (%9 : $T, %10 : $C) - return %15 : $(T, C) -} - -// CHECK-LABEL: sil @f073_callMixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { -// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T, %3 : $C): -// CHECK: %[[LOC0:.*]] = alloc_stack $T -// CHECK: %[[OUT1:.*]] = alloc_stack $T -// CHECK: %[[OUT2:.*]] = alloc_stack $T -// CHECK: %[[F:.*]] = function_ref @f071_mixedResult2 : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @out τ_0_0, @owned C, @owned C) -// CHECK: copy_addr %2 to [initialization] %[[LOC0]] : $*T -// CHECK: strong_retain %3 : $C -// CHECK: %[[R:.*]] = apply %[[F]](%[[OUT1]], %[[OUT2]], %[[LOC0]], %3) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @out τ_0_0, @owned C, @owned C) -// CHECK: %[[T2:.*]] = tuple_extract %[[R]] : $(C, C), 1 -// CHECK: %[[T1:.*]] = tuple_extract %[[R]] : $(C, C), 0 -// CHECK: copy_addr %[[OUT1]] to [initialization] %0 : $*T -// CHECK: copy_addr %[[OUT2]] to [initialization] %1 : $*T -// CHECK: strong_retain %[[T1]] : $C -// CHECK: strong_retain %[[T2]] : $C -// CHECK: destroy_addr %[[OUT1]] : $*T -// CHECK: destroy_addr %[[OUT2]] : $*T -// CHECK: strong_release %[[T1]] : $C -// CHECK: strong_release %[[T2]] : $C -// CHECK: strong_release %3 : $C -// CHECK: destroy_addr %2 : $*T -// CHECK: %[[T:.*]] = tuple (%[[T1]] : $C, %[[T2]] : $C) -// CHECK: dealloc_stack %[[OUT2]] : $*T -// CHECK: dealloc_stack %[[OUT1]] : $*T -// CHECK: dealloc_stack %[[LOC0]] : $*T -// CHECK: return %[[T]] : $(C, C) -// CHECK-LABEL: } // end sil function 'f073_callMixedResult2' -sil @f073_callMixedResult2 : $@convention(thin) (@in T, @owned C) -> (@out T, @out T, @owned C, @owned C) { -bb0(%0 : $T, %1 : $C): - %4 = function_ref @f071_mixedResult2 : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @out τ_0_0, @owned C, @owned C) - %5 = copy_value %0 : $T - strong_retain %1 : $C - %7 = apply %4(%5, %1) : $@convention(thin) <τ_0_0> (@in τ_0_0, @owned C) -> (@out τ_0_0, @out τ_0_0, @owned C, @owned C) - %8 = tuple_extract %7 : $(T, T, C, C), 0 - %9 = copy_value %8 : $T - %10 = tuple_extract %7 : $(T, T, C, C), 1 - %11 = copy_value %10 : $T - %12 = tuple_extract %7 : $(T, T, C, C), 2 - strong_retain %12 : $C - %14 = tuple_extract %7 : $(T, T, C, C), 3 - strong_retain %14 : $C - destroy_value %7 : $(T, T, C, C) - strong_release %1 : $C - destroy_value %0 : $T - %19 = tuple (%9 : $T, %11 : $T, %12 : $C, %14 : $C) - return %19 : $(T, T, C, C) -} - -sil_default_witness_table C {} - -enum Optional { - case none - case some(T) -} - -// CHECK-LABEL: sil @f080_optional : $@convention(thin) (@in T) -> @out Optional { -// CHECK: bb0(%0 : $*Optional, %1 : $*T): -// CHECK: %[[DATA:.*]] = init_enum_data_addr %0 : $*Optional, #Optional.some!enumelt -// CHECK: copy_addr %1 to [initialization] %[[DATA]] : $*T -// CHECK: inject_enum_addr %0 : $*Optional, #Optional.some!enumelt -// CHECK: destroy_addr %1 : $*T -// CHECK: %[[T:.*]] = tuple () -// CHECK: return %[[T]] : $() -// CHECK-LABEL: } // end sil function 'f080_optional' -sil @f080_optional : $@convention(thin) (@in T) -> @out Optional { -bb0(%0 : $T): - %cpy = copy_value %0 : $T - %opt = enum $Optional, #Optional.some!enumelt, %cpy : $T - destroy_value %0 : $T - return %opt : $Optional -} - -// CHECK-LABEL: sil @f090_tupletuple : $@convention(thin) ((Builtin.Int64, Builtin.Int64), Builtin.Int64) -> (@out (Builtin.Int64, Builtin.Int64), @out (Builtin.Int64, Builtin.Int64), Builtin.Int64, Builtin.Int64) { -// CHECK: bb0(%0 : $*(Builtin.Int64, Builtin.Int64), %1 : $*(Builtin.Int64, Builtin.Int64), %2 : $(Builtin.Int64, Builtin.Int64), %3 : $Builtin.Int64): -// CHECK: store %2 to %0 : $*(Builtin.Int64, Builtin.Int64) -// CHECK: store %2 to %1 : $*(Builtin.Int64, Builtin.Int64) -// CHECK: %[[T:.*]] = tuple (%3 : $Builtin.Int64, %3 : $Builtin.Int64) -// CHECK: return %[[T]] : $(Builtin.Int64, Builtin.Int64) -// CHECK-LABEL: } // end sil function 'f090_tupletuple' -sil @f090_tupletuple : $@convention(thin) ((Int, Int), Int) -> (@out (Int, Int), @out (Int, Int), Int, Int) { -bb0(%0 : $(Int, Int), %1 : $Int): - %2 = tuple (%0 : $(Int, Int), %0 : $(Int, Int), %1 : $Int, %1 : $Int) - return %2 : $((Int, Int), (Int, Int), Int, Int) -} - -// CHECK-LABEL: sil @f091_callTuple : $@convention(thin) (Builtin.Int64) -> (Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64) { -// CHECK: bb0(%0 : $Builtin.Int64): -// CHECK: %[[T1:.*]] = tuple (%0 : $Builtin.Int64, %0 : $Builtin.Int64) -// CHECK: %[[F:.*]] = function_ref @f090_tupletuple : $@convention(thin) ((Builtin.Int64, Builtin.Int64), Builtin.Int64) -> (@out (Builtin.Int64, Builtin.Int64), @out (Builtin.Int64, Builtin.Int64), Builtin.Int64, Builtin.Int64) -// CHECK: %[[O1:.*]] = alloc_stack $(Builtin.Int64, Builtin.Int64) -// CHECK: %[[O2:.*]] = alloc_stack $(Builtin.Int64, Builtin.Int64) -// CHECK: %[[RT:.*]] = apply %[[F]](%[[O1]], %4, %1, %0) : $@convention(thin) ((Builtin.Int64, Builtin.Int64), Builtin.Int64) -> (@out (Builtin.Int64, Builtin.Int64), @out (Builtin.Int64, Builtin.Int64), Builtin.Int64, Builtin.Int64) -// CHECK: %[[R1:.*]] = tuple_extract %[[RT]] : $(Builtin.Int64, Builtin.Int64), 1 -// CHECK: %[[R0:.*]] = tuple_extract %[[RT]] : $(Builtin.Int64, Builtin.Int64), 0 -// CHECK: %[[L2:.*]] = load %[[O2]] : $*(Builtin.Int64, Builtin.Int64) -// CHECK: dealloc_stack %[[O2]] : $*(Builtin.Int64, Builtin.Int64) -// CHECK: %[[L1:.*]] = load %[[O1]] : $*(Builtin.Int64, Builtin.Int64) -// CHECK: dealloc_stack %[[O1]] : $*(Builtin.Int64, Builtin.Int64) -// CHECK: %[[E10:.*]] = tuple_extract %[[L1]] : $(Builtin.Int64, Builtin.Int64), 0 -// CHECK: %[[E11:.*]] = tuple_extract %[[L1]] : $(Builtin.Int64, Builtin.Int64), 1 -// CHECK: %[[E20:.*]] = tuple_extract %[[L2]] : $(Builtin.Int64, Builtin.Int64), 0 -// CHECK: %[[E21:.*]] = tuple_extract %[[L2]] : $(Builtin.Int64, Builtin.Int64), 1 -// CHECK: %[[RET:.*]] = tuple (%[[E10]] : $Builtin.Int64, %[[E11]] : $Builtin.Int64, %[[E20]] : $Builtin.Int64, %[[E21]] : $Builtin.Int64, %[[R0]] : $Builtin.Int64, %[[R1]] : $Builtin.Int64) -// CHECK: return %[[RET]] : $(Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64, Builtin.Int64) -// CHECK-LABEL: } // end sil function 'f091_callTuple' -sil @f091_callTuple : $@convention(thin) (Int) -> (Int, Int, Int, Int, Int, Int) { -bb0(%0: $Int): - %1 = tuple (%0 : $Int, %0 : $Int) - %2 = function_ref @f090_tupletuple : $@convention(thin) ((Int, Int), Int) -> (@out (Int, Int), @out (Int, Int), Int, Int) - %3 = apply %2(%1, %0) : $@convention(thin) ((Int, Int), Int) -> (@out (Int, Int), @out (Int, Int), Int, Int) - %9 = tuple_extract %3 : $((Int, Int), (Int, Int), Int, Int), 0 - %10 = tuple_extract %3 : $((Int, Int), (Int, Int), Int, Int), 1 - %11 = tuple_extract %3 : $((Int, Int), (Int, Int), Int, Int), 2 - %12 = tuple_extract %3 : $((Int, Int), (Int, Int), Int, Int), 3 - %13 = tuple_extract %9 : $(Int, Int), 0 - %14 = tuple_extract %9 : $(Int, Int), 1 - %15 = tuple_extract %10 : $(Int, Int), 0 - %16 = tuple_extract %10 : $(Int, Int), 1 - %17 = tuple (%13 : $Int, %14 : $Int, %15 : $Int, %16 : $Int, %11 : $Int, %12 : $Int) - return %17 : $(Int, Int, Int, Int, Int, Int) -} - -// CHECK-LABEL: sil hidden @f100_any : $@convention(thin) (@in Any) -> () { -// CHECK: bb0(%0 : $*Any): -// CHECK: destroy_addr %0 : $*Any -// CHECK: %[[T:.*]] = tuple () -// CHECK: return %[[T]] : $() -// CHECK-LABEL: } // end sil function 'f100_any' -sil hidden @f100_any : $@convention(thin) (@in Any) -> () { -bb0(%0 : $Any): - debug_value %0 : $Any, let, name "any", argno 1 - destroy_value %0 : $Any - %3 = tuple () - return %3 : $() -} - -// CHECK-LABEL: sil @f101_passAny : $@convention(thin) (@in T) -> () { -// CHECK: bb0(%0 : $*T): -// CHECK: %[[A:.*]] = alloc_stack $Any -// CHECK: %[[F:.*]] = function_ref @f100_any : $@convention(thin) (@in Any) -> () -// CHECK: %[[T:.*]] = init_existential_addr %[[A]] : $*Any, $T -// CHECK: copy_addr %0 to [initialization] %[[T]] : $*T -// CHECK: %{{.*}} = apply %[[F]](%[[A]]) : $@convention(thin) (@in Any) -> () -// CHECK: destroy_addr %0 : $*T -// CHECK: %[[R:.*]] = tuple () -// CHECK: dealloc_stack %[[A]] : $*Any -// CHECK: return %[[R]] : $() -// CHECK-LABEL: } // end sil function 'f101_passAny' -sil @f101_passAny : $@convention(thin) (@in T) -> () { -bb0(%0 : $T): - %2 = function_ref @f100_any : $@convention(thin) (@in Any) -> () - %3 = copy_value %0 : $T - %4 = init_existential_value %3 : $T, $T, $Any - %5 = apply %2(%4) : $@convention(thin) (@in Any) -> () - destroy_value %0 : $T - %7 = tuple () - return %7 : $() -} diff --git a/test/SILOptimizer/copy_propagation_opaque.sil b/test/SILOptimizer/copy_propagation_opaque.sil index 25e184ff83ba2..8fda26d12dfcf 100644 --- a/test/SILOptimizer/copy_propagation_opaque.sil +++ b/test/SILOptimizer/copy_propagation_opaque.sil @@ -12,7 +12,7 @@ // // REQUIRES: asserts -sil_stage canonical +sil_stage raw import Builtin import Swift diff --git a/test/SILOptimizer/opaque_values_mandatory.sil b/test/SILOptimizer/opaque_values_mandatory.sil index 91a998b4211e7..7a5128afc550f 100644 --- a/test/SILOptimizer/opaque_values_mandatory.sil +++ b/test/SILOptimizer/opaque_values_mandatory.sil @@ -4,6 +4,9 @@ // RUN: -enable-ossa-modules -enable-copy-propagation \ // RUN: -enable-lexical-borrow-scopes | \ // RUN: %FileCheck %s +// +// These tests assume that opaque values are not lowered until OSSA lowering. +// REQUIRES: enable_opaque_values import Builtin @@ -33,7 +36,7 @@ bb0(%0 : @owned $T): // CHECK: destroy_value %1 : $T // CHECK: return %{{.*}} : $() // CHECK-LABEL: } // end sil function 'f020_assign_inout' -sil hidden [ossa] @f020_assign_inout : $@convention(thin) (@inout T, @in T) -> () { +sil [ossa] @f020_assign_inout : $@convention(thin) (@inout T, @in T) -> () { bb0(%0 : $*T, %1 : @owned $T): %2 = copy_value %1 : $T assign %2 to %0 : $*T @@ -51,20 +54,18 @@ bb0(%0 : $*T, %1 : @owned $T): // Note: the tuple construction is simplified away. // CHECK: return %2 : $(Builtin.Int64, Builtin.Int64, Builtin.Int64) // CHECK-LABEL: } // end sil function 'f030_callMultiResult' -sil @f030_callMultiResult : $@convention(thin) (Int) -> (Int, Int, Int) { +sil [ossa] @f030_callMultiResult : $@convention(thin) (Int) -> (Int, Int, Int) { bb0(%0 : $Int): %1 = function_ref @f040_multiResult : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) %2 = apply %1(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) - %3 = tuple_extract %2 : $(Int, Int, Int), 0 - %4 = tuple_extract %2 : $(Int, Int, Int), 1 - %5 = tuple_extract %2 : $(Int, Int, Int), 2 + (%3, %4, %5) = destructure_tuple %2 : $(Int, Int, Int) %6 = tuple (%3 : $Int, %4 : $Int, %5 : $Int) return %6 : $(Int, Int, Int) } // Test returning an opaque tuple of tuples. // --- -// CHECK-LABEL: sil hidden [noinline] @f040_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { +// CHECK-LABEL: sil [noinline] @f040_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { // CHECK: bb0(%0 : $T): // CHECK: %1 = copy_value %0 : $T // CHECK: %2 = copy_value %0 : $T @@ -73,8 +74,8 @@ bb0(%0 : $Int): // CHECK: %5 = tuple (%1 : $T, %2 : $T, %3 : $T) // CHECK: return %5 : $(T, T, T) // CHECK-LABEL: } // end sil function 'f040_multiResult' -sil hidden [noinline] @f040_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { -bb0(%0 : $T): +sil [noinline] [ossa] @f040_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { +bb0(%0 : @owned $T): %2 = copy_value %0 : $T %3 = copy_value %0 : $T %4 = copy_value %0 : $T diff --git a/test/SILOptimizer/opaque_values_opt.sil b/test/SILOptimizer/opaque_values_opt.sil index f1618f2db9d76..430f157d2ba1f 100644 --- a/test/SILOptimizer/opaque_values_opt.sil +++ b/test/SILOptimizer/opaque_values_opt.sil @@ -1,9 +1,12 @@ // RUN: %target-sil-opt -O -enable-sil-opaque-values -emit-sorted-sil %s | %FileCheck %s -// REQUIRES: atrick-to-look-at +// +// These tests assume that opaque values are lowered in the +// optimization pipeline. They are currently only lowered in raw sil. +// +// REQUIRES: enable_opaque_values import Builtin -// CHECK-LABEL: sil_stage canonical sil_stage canonical public typealias Int = Builtin.Int64 @@ -12,12 +15,12 @@ public typealias Int = Builtin.Int64 // CHECK: bb0(%0 : $Builtin.Int64): // CHECK: return %0 : $Builtin.Int64 // CHECK: } // end sil function '$s20f010_genericIdentityBi64__Tg5' -sil hidden [noinline] @f010_genericIdentity : $@convention(thin) (@in T) -> @out T { -bb0(%0 : $T): +sil hidden [noinline] [ossa] @f010_genericIdentity : $@convention(thin) (@in T) -> @out T { +bb0(%0 : @owned $T): return %0 : $T } -sil @f015_callGeneric : $@convention(thin) (Builtin.Int64) -> Builtin.Int64 { +sil [ossa] @f015_callGeneric : $@convention(thin) (Builtin.Int64) -> Builtin.Int64 { bb0(%0 : $Builtin.Int64): %2 = function_ref @f010_genericIdentity : $@convention(thin) <τ_0_0> (@in τ_0_0) -> @out τ_0_0 %3 = apply %2(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> @out τ_0_0 @@ -37,8 +40,8 @@ bb0(%0 : $Builtin.Int64): // CHECK: %5 = tuple (%1 : $T, %2 : $T, %3 : $T) // CHECK: return %5 : $(T, T, T) // CHECK-LABEL: } // end sil function 'f020_multiResult' -sil hidden [noinline] @f020_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { -bb0(%0 : $T): +sil hidden [noinline] [ossa] @f020_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { +bb0(%0 : @owned $T): %2 = copy_value %0 : $T %3 = copy_value %0 : $T %4 = copy_value %0 : $T @@ -56,7 +59,7 @@ bb0(%0 : $T): // CHECK: %2 = apply %1(%0) : $@convention(thin) (Builtin.Int64) -> (Builtin.Int64, @out Builtin.Int64, @out Builtin.Int64) // CHECK: return %2 : $(Builtin.Int64, Builtin.Int64, Builtin.Int64) // CHECK-LABEL: } // end sil function 'f030_callMultiResult' -sil @f030_callMultiResult : $@convention(thin) (Int) -> (Int, Int, Int) { +sil [ossa] @f030_callMultiResult : $@convention(thin) (Int) -> (Int, Int, Int) { bb0(%0 : $Int): %1 = function_ref @f020_multiResult : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) %2 = apply %1(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) diff --git a/test/SILOptimizer/specialize_opaque.sil b/test/SILOptimizer/specialize_opaque.sil index 97f93f58fbf47..a3ffb78d75ad0 100644 --- a/test/SILOptimizer/specialize_opaque.sil +++ b/test/SILOptimizer/specialize_opaque.sil @@ -1,6 +1,6 @@ // RUN: %target-sil-opt -enable-sil-opaque-values -enable-sil-verify-all -generic-specializer %s | %FileCheck %s -sil_stage canonical +sil_stage raw import Builtin diff --git a/test/SILOptimizer/specialize_opaque_ossa.sil b/test/SILOptimizer/specialize_opaque_ossa.sil index 447e5e1b576b5..36a6690fda7fa 100644 --- a/test/SILOptimizer/specialize_opaque_ossa.sil +++ b/test/SILOptimizer/specialize_opaque_ossa.sil @@ -1,6 +1,6 @@ // RUN: %target-sil-opt -enable-sil-opaque-values -enable-sil-verify-all -generic-specializer %s | %FileCheck %s -sil_stage canonical +sil_stage raw import Builtin diff --git a/test/sil-passpipeline-dump/basic.test-sh b/test/sil-passpipeline-dump/basic.test-sh index 1aab22a8fce32..750e697688c91 100644 --- a/test/sil-passpipeline-dump/basic.test-sh +++ b/test/sil-passpipeline-dump/basic.test-sh @@ -1,7 +1,7 @@ // RUN: %sil-passpipeline-dumper -Onone | %FileCheck %s // CHECK: --- -// CHECK: name: non-Diagnostic Enabling Mandatory Optimizations +// CHECK: name: Non-Diagnostic Mandatory Optimizations // CHECK: passes: [ "for-each-loop-unroll", "mandatory-combine", // CHECK: "mandatory-arc-opts" ] // CHECK: --- From 4ceed0ef647d070ed91575ecc75d05d6688c382a Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Tue, 1 Mar 2022 13:22:42 -0800 Subject: [PATCH 05/29] Add support for indirect tuple-type results. This could happen as a result of specialization or concrete address-only values. For now, it's just tested by SIL unit tests. --- include/swift/SIL/ApplySite.h | 8 +-- .../Mandatory/AddressLowering.cpp | 67 +++++++++++-------- lib/SILOptimizer/Mandatory/AddressLowering.h | 6 +- lib/SILOptimizer/Utils/Generics.cpp | 4 +- test/SILOptimizer/address_lowering.sil | 57 ++++++++++++++++ 5 files changed, 104 insertions(+), 38 deletions(-) diff --git a/include/swift/SIL/ApplySite.h b/include/swift/SIL/ApplySite.h index 5bc9b004b5084..e74c27716fa3e 100644 --- a/include/swift/SIL/ApplySite.h +++ b/include/swift/SIL/ApplySite.h @@ -585,9 +585,9 @@ class FullApplySite : public ApplySite { } /// Get the SIL value that represents all of the given call's results. For a - /// single direct result, returns the result. For multiple results, returns a - /// fake tuple value. The tuple has no storage of its own. The real results - /// must be extracted from it. + /// single direct result, returns the actual result. For multiple results, + /// returns a pseudo-result tuple. The tuple has no storage of its own. The + /// real results must be extracted from it. /// /// For ApplyInst, returns the single-value instruction itself. /// @@ -596,7 +596,7 @@ class FullApplySite : public ApplySite { /// For BeginApplyInst, returns an invalid value. For coroutines, there is no /// single value representing all results. Yielded values are generally /// handled differently since they have the convention of incoming arguments. - SILValue getPseudoResult() const { + SILValue getResult() const { switch (getKind()) { case FullApplySiteKind::ApplyInst: return SILValue(cast(getInstruction())); diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index 81efd270a1f6c..1e8065c1d9599 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -174,14 +174,19 @@ cleanupAfterCall(FullApplySite apply, // that lowers values to storage. //===----------------------------------------------------------------------===// -/// If \p pseudoResult has multiple results, return the destructure. -static DestructureTupleInst *getCallMultiResult(SILValue pseudoResult) { - if (pseudoResult->getType().is()) { - if (auto *use = pseudoResult->getSingleUse()) - return cast(use->getUser()); +/// If \p pseudoResult represents multiple results and at least one result is +/// used, then return the destructure. +static DestructureTupleInst *getCallDestructure(FullApplySite apply) { + if (apply.getSubstCalleeConv().getNumDirectSILResults() == 1) + return nullptr; - assert(pseudoResult->use_empty() && "pseudo result can't be used"); - } + SILValue pseudoResult = apply.getResult(); + assert(pseudoResult->getType().is()); + if (auto *use = pseudoResult->getSingleUse()) + return cast(use->getUser()); + + assert(pseudoResult->use_empty() + && "pseudo result can only be used by a single destructure_tuple"); return nullptr; } @@ -205,19 +210,18 @@ static bool visitCallResults(FullApplySite apply, llvm::function_ref visitor) { auto fnConv = apply.getSubstCalleeConv(); - SILValue pseudoResult = apply.getPseudoResult(); - if (auto *destructure = getCallMultiResult(pseudoResult)) { + if (auto *destructure = getCallDestructure(apply)) { return visitCallMultiResults(destructure, fnConv, visitor); } - return visitor(pseudoResult, *fnConv.getDirectSILResults().begin()); + return visitor(apply.getResult(), *fnConv.getDirectSILResults().begin()); } /// Return true if the given value is either a "fake" tuple that represents all /// of a call's results or an empty tuple of no results. This may return true /// for either tuple_inst or a block argument. static bool isPseudoCallResult(SILValue value) { - if (isa(value)) - return value->getType().is(); + if (auto *apply = dyn_cast(value)) + return ApplySite(apply).getSubstCalleeConv().getNumDirectSILResults() > 1; auto *bbArg = dyn_cast(value); if (!bbArg) @@ -227,11 +231,18 @@ static bool isPseudoCallResult(SILValue value) { if (!term) return false; - return isa(term) && bbArg->getType().is(); + auto *tryApply = dyn_cast(term); + if (!tryApply) + return false; + + return ApplySite(tryApply).getSubstCalleeConv().getNumDirectSILResults() > 1; } /// Return true if this is a pseudo-return value. static bool isPseudoReturnValue(SILValue value) { + if (value->getFunction()->getConventions().getNumDirectSILResults() < 2) + return false; + if (auto *tuple = dyn_cast(value)) { Operand *singleUse = tuple->getSingleUse(); return singleUse && isa(singleUse->getUser()); @@ -261,9 +272,12 @@ static SILValue getTupleStorageValue(Operand *operand) { if (!singleUse || !isa(singleUse->getUser())) return tuple; + SILFunction *function = tuple->getFunction(); + if (function->getConventions().getNumDirectSILResults() < 2) + return tuple; + unsigned resultIdx = tuple->getElementIndex(operand); - SILFunction *function = tuple->getFunction(); auto loweredFnConv = getLoweredFnConv(function); assert(loweredFnConv.getResults().size() == tuple->getElements().size()); @@ -279,11 +293,11 @@ static SILValue getTupleStorageValue(Operand *operand) { /// Return the value representing storage for a single return value. /// -/// bb0(%loweredIndirectResult : $*T, ...) +/// bb0(%loweredIndirectResult : $*T, ...) // function entry /// return %oper /// /// For %oper, return %loweredIndirectResult -static SILValue getSingleReturnValue(Operand *operand) { +static SILValue getSingleReturnAddress(Operand *operand) { assert(!isPseudoReturnValue(operand->get())); auto *function = operand->getParentFunction(); @@ -612,7 +626,7 @@ void OpaqueValueVisitor::visitValue(SILValue value) { // Canonicalize returned values. // -// Given: +// Given $() -> @out (T, T): // %t = def : $(T, T) // use %t : $(T, T) // return %t : $(T, T) @@ -807,7 +821,7 @@ static SILValue getProjectedUseValue(Operand *operand) { // Return instructions can project into the return value. case SILInstructionKind::ReturnInst: - return getSingleReturnValue(operand); + return getSingleReturnAddress(operand); } return SILValue(); } @@ -1420,9 +1434,7 @@ AddressMaterialization::materializeProjectionIntoUse(Operand *operand, } case SILInstructionKind::TupleInst: { auto *tupleInst = cast(user); - // Function return values. - if (tupleInst->hasOneUse() - && isa(tupleInst->use_begin()->getUser())) { + if (isPseudoReturnValue(tupleInst)) { unsigned resultIdx = tupleInst->getElementIndex(operand); assert(resultIdx < pass.loweredFnConv.getNumIndirectSILResults()); // Cannot call getIndirectSILResults here because that API uses the @@ -1830,9 +1842,8 @@ void ApplyRewriter::convertApplyWithIndirectResults() { // Populate newCallArgs. makeIndirectArgs(newCallArgs); - // Record the original results before potentially removing the apply - // (try_apply is removed during rewriting). - auto *destructure = getCallMultiResult(apply.getPseudoResult()); + // Record the original result destructure before deleting a try_apply. + auto *destructure = getCallDestructure(apply); switch (apply.getKind()) { case FullApplySiteKind::ApplyInst: { @@ -2071,7 +2082,7 @@ void ApplyRewriter::rewriteTryApply(ArrayRef newCallArgs) { tryApply->getNormalBB(), tryApply->getErrorBB(), tryApply->getApplyOptions(), tryApply->getSpecializationInfo()); - auto *resultArg = cast(apply.getPseudoResult()); + auto *resultArg = cast(apply.getResult()); auto replaceTermResult = [&](SILValue newResultVal) { SILType resultTy = loweredCalleeConv.getSILResultType(typeCtx); @@ -2091,8 +2102,6 @@ void ApplyRewriter::rewriteTryApply(ArrayRef newCallArgs) { // Handle a single opaque result value. if (pass.valueStorageMap.contains(resultArg)) { - assert(!resultArg->getType().is()); - // Storage was materialized by materializeIndirectResultAddress. auto &origStorage = pass.valueStorageMap.getStorage(resultArg); assert(origStorage.isRewritten); @@ -2142,7 +2151,7 @@ void ApplyRewriter::rewriteTryApply(ArrayRef newCallArgs) { // // no uses of %d1, %d2 // void ApplyRewriter::replaceDirectResults(DestructureTupleInst *oldDestructure) { - SILValue newPseudoResult = apply.getPseudoResult(); + SILValue newPseudoResult = apply.getResult(); DestructureTupleInst *newDestructure = nullptr; if (loweredCalleeConv.getNumDirectSILResults() > 1) { @@ -2950,7 +2959,7 @@ static void rewriteIndirectApply(FullApplySite apply, ApplyRewriter(apply, pass).convertApplyWithIndirectResults(); if (!apply.getInstruction()->isDeleted()) { - assert(!getCallMultiResult(apply.getPseudoResult()) + assert(!getCallDestructure(apply) && "replaceDirectResults deletes the destructure"); pass.deleter.forceDelete(apply.getInstruction()); } diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.h b/lib/SILOptimizer/Mandatory/AddressLowering.h index e4c6ae79b0c2e..c4f8a99bfe25f 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.h +++ b/lib/SILOptimizer/Mandatory/AddressLowering.h @@ -38,13 +38,13 @@ namespace swift { /// After allocation, before materialization or rewriting, we may have: /// /// %result_addr = alloc_stack // storage for %result -/// %result = apply () -> @out T +/// %result = apply : $() -> @out T /// %extract = struct_extact %result // def-projection of %result /// /// Or, a projection may project into a composing use (use-projection): /// -/// %struct_addr = alloc_stack // storage for %struct -/// %result = apply () -> @out T // use-projection of %struct at operand #0 +/// %struct_addr = alloc_stack // storage for %struct +/// %result = apply : $() -> @out T // use-projection of %struct at operand #0 /// %struct = struct %result /// /// A phi-projection is a use projection that projects its entire value diff --git a/lib/SILOptimizer/Utils/Generics.cpp b/lib/SILOptimizer/Utils/Generics.cpp index 7f4d0160fce49..ecd63f31cd909 100644 --- a/lib/SILOptimizer/Utils/Generics.cpp +++ b/lib/SILOptimizer/Utils/Generics.cpp @@ -2242,7 +2242,7 @@ SILFunction *ReabstractionThunkGenerator::createThunk() { Arguments.push_back(NewArg); } FullApplySite ApplySite = createReabstractionThunkApply(Builder); - SILValue ReturnValue = ApplySite.getPseudoResult(); + SILValue ReturnValue = ApplySite.getResult(); assert(ReturnValue && "getPseudoResult out of sync with ApplySite?!"); Builder.createReturn(Loc, ReturnValue); @@ -2255,7 +2255,7 @@ SILFunction *ReabstractionThunkGenerator::createThunk() { FullApplySite ApplySite = createReabstractionThunkApply(Builder); - SILValue ReturnValue = ApplySite.getPseudoResult(); + SILValue ReturnValue = ApplySite.getResult(); assert(ReturnValue && "getPseudoResult out of sync with ApplySite?!"); if (ReturnValueAddr) { diff --git a/test/SILOptimizer/address_lowering.sil b/test/SILOptimizer/address_lowering.sil index afcbbfb965fad..5ee49ed89b145 100644 --- a/test/SILOptimizer/address_lowering.sil +++ b/test/SILOptimizer/address_lowering.sil @@ -79,6 +79,63 @@ bb0(%0 : @owned $T): return %0 : $T } +// This could happen as a result of either partial specialization from +// a single type parameter into a generic tuple, or specialization +// from a single type parameter into a tuple of concrete address-only +// types. +// +// CHECK-LABEL: sil [ossa] @f011_identity_tuple : $@convention(thin) (@in (T, T)) -> @out (T, T) { +// CHECK: bb0(%0 : $*(T, T), %1 : $*(T, T)): +// CHECK: copy_addr [take] %1 to [initialization] %0 : $*(T, T) +// CHECK-LABEL: } // end sil function 'f011_identity_tuple' +sil [ossa] @f011_identity_tuple : $@convention(thin) (@in (T, T)) -> @out (T, T) { +bb0(%0 : @owned $(T, T)): + return %0 : $(T, T) +} + +// CHECK-LABEL: sil [ossa] @f012_decompose_tuple_arg : $@convention(thin) (@in (T, T)) -> @out (T, T) { +// CHECK: bb0(%0 : $*(T, T), %1 : $*(T, T)): +// CHECK: [[ARG0:%.*]] = tuple_element_addr %1 : $*(T, T), 0 +// CHECK: [[ARG1:%.*]] = tuple_element_addr %1 : $*(T, T), 1 +// CHECK: [[RET0:%.*]] = tuple_element_addr %0 : $*(T, T), 0 +// CHECK: apply %{{.*}}([[RET0]], [[ARG0]]) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> @out τ_0_0 +// CHECK: [[RET1:%.*]] = tuple_element_addr %0 : $*(T, T), 1 +// CHECK: copy_addr [take] [[ARG1]] to [initialization] [[RET1]] : $*T +// CHECK-LABEL: } // end sil function 'f012_decompose_tuple_arg' +sil [ossa] @f012_decompose_tuple_arg : $@convention(thin) (@in (T, T)) -> @out (T, T) { +bb0(%0 : @owned $(T, T)): + (%arg0, %arg1) = destructure_tuple %0 : $(T, T) + %f = function_ref @f010_addrlower_identity : $@convention(thin) (@in T) -> @out T + %call = apply %f(%arg0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> @out τ_0_0 + %result = tuple (%call : $T, %arg1 : $T) + return %result : $(T, T) +} + +// CHECK-LABEL: sil [ossa] @f013_pass_tuple_arg : $@convention(thin) (@in T) -> @out T { +// CHECK: bb0(%0 : $*T, %1 : $*T): +// CHECK: [[IN:%.*]] = alloc_stack $(T, T) +// CHECK: [[OUT:%.*]] = alloc_stack $(T, T) +// CHECK: [[IN1:%.*]] = tuple_element_addr [[IN]] : $*(T, T), 1 +// CHECK: copy_addr %1 to [initialization] [[IN1]] : $*T +// CHECK: [[IN0:%.*]] = tuple_element_addr %2 : $*(T, T), 0 +// CHECK: copy_addr [take] %1 to [initialization] [[IN0]] : $*T +// CHECK: apply %{{.*}}([[OUT]], [[IN]]) : $@convention(thin) <τ_0_0> (@in (τ_0_0, τ_0_0)) -> @out (τ_0_0, τ_0_0) +// CHECK: [[RET:%.*]] = tuple_element_addr [[OUT]] : $*(T, T), 0 +// CHECK: [[DEAD:%.*]] = tuple_element_addr [[OUT]] : $*(T, T), 1 +// CHECK: destroy_addr [[DEAD]] : $*T +// CHECK: copy_addr [take] [[RET]] to [initialization] %0 : $*T +// CHECK-LABEL: } // end sil function 'f013_pass_tuple_arg' +sil [ossa] @f013_pass_tuple_arg : $@convention(thin) (@in T) -> @out T { +bb0(%0 : @owned $T): + %copy0 = copy_value %0 : $T + %arg = tuple (%0 : $T, %copy0 : $T) + %f = function_ref @f011_identity_tuple : $@convention(thin) (@in (T, T)) -> @out (T, T) + %call = apply %f(%arg) : $@convention(thin) (@in (T, T)) -> @out (T, T) + (%call0, %call1) = destructure_tuple %call : $(T, T) + destroy_value %call1 : $T + return %call0 : $T +} + // CHECK-LABEL: sil [ossa] @f020_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { // CHECK: %0 "$return_value" // CHECK: %1 "$return_value" From bbf6177eaeff2e1de02e65adcb6dc8bfee6381a3 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Mon, 7 Mar 2022 23:11:19 -0800 Subject: [PATCH 06/29] [SIL-opaque] Code review suggestions Mostly documentation and typos. --- docs/SIL.rst | 20 ++++ lib/SIL/Verifier/SILVerifier.cpp | 16 +++- .../Mandatory/AddressLowering.cpp | 91 +++++++++---------- lib/SILOptimizer/Mandatory/AddressLowering.h | 47 ++++++---- 4 files changed, 101 insertions(+), 73 deletions(-) diff --git a/docs/SIL.rst b/docs/SIL.rst index abc26102a908c..722dbc63ea7ce 100644 --- a/docs/SIL.rst +++ b/docs/SIL.rst @@ -2193,6 +2193,26 @@ parts:: return %1 : $Klass } +Forwarding Address-Only Values +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Address-only values are potentially unmovable when borrowed. This +means that they cannot be forwarded with guaranteed ownership unless +the forwarded value has the same representation as in the original +value and can reuse the same storage. Non-destructive projection is +allowed, such as `struct_extract`. Aggregation, such as `struct`, and +destructive disaggregation, such as `switch_enum` is not allowed. This +is an invariant for OSSA with opaque SIL values for these reasons: + +1. To avoid implicit semantic copies. For move-only values, this allows +complete diagnostics. And in general, it makes it impossible for SIL +passes to "accidentally" create copies. + +2. To reuse borrowed storage. This allows the optimizer to share the same +storage for multiple exclusive reads of the same variable, avoiding +copies. It may also be necessary to support native Swift atomics, which +will be unmovable-when-borrowed. + Borrowed Object based Safe Interior Pointers ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/lib/SIL/Verifier/SILVerifier.cpp b/lib/SIL/Verifier/SILVerifier.cpp index 51f1e635da8c4..8863f2ebd4c59 100644 --- a/lib/SIL/Verifier/SILVerifier.cpp +++ b/lib/SIL/Verifier/SILVerifier.cpp @@ -1008,6 +1008,13 @@ class SILVerifier : public SILVerifierBase { auto *TI = predBB->getTerminator(); if (F.hasOwnership()) { require(isa(TI), "All phi inputs must be branch operands."); + + // Address-only values are potentially unmovable when borrowed. See also + // checkOwnershipForwardingInst. A phi implies a move of its arguments + // because they can't necessarilly all reuse the same storage. + require((!arg->getType().isAddressOnly(F) + || arg->getOwnershipKind() != OwnershipKind::Guaranteed), + "Guaranteed address-only phi not allowed--implies a copy"); } else { // FIXME: when critical edges are removed and cond_br arguments are // disallowed, only allow BranchInst. @@ -1269,10 +1276,11 @@ class SILVerifier : public SILVerifierBase { checkOwnershipForwardingTermInst(term); } - // Address-only values are potentially move-only, and unmovable if they are - // borrowed. Ensure that guaranteed address-only values are forwarded with - // the same representation. Non-destructive projection is - // allowed. Aggregation and destructive disaggregation is not allowed. + // Address-only values are potentially unmovable when borrowed. Ensure that + // guaranteed address-only values are forwarded with the same + // representation. Non-destructive projection is allowed. Aggregation and + // destructive disaggregation is not allowed. See SIL.rst, Forwarding + // Addres-Only Values. if (ownership == OwnershipKind::Guaranteed && OwnershipForwardingMixin::isAddressOnly(i)) { require(OwnershipForwardingMixin::hasSameRepresentation(i), diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index 1e8065c1d9599..983b87bd73766 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -14,21 +14,23 @@ /// memory locations such as a stack locations. This is mandatory for IRGen. /// /// Lowering to LLVM IR requires each SILValue's type to be a valid "SIL storage -/// type". Opaque SILValues have address-only types. Address-only values require -/// indirect storage in LLVM, so their SIL storage type must be an address type. +/// type". Opaque SILValues have address-only types. These require indirect +/// storage in LLVM, so their SIL storage type must be an address type. /// -/// This pass should not introduce any semantic copies. Guaranteed values always -/// reuse the borrowed value's storage. This means that we SIL cannot allow -/// guaranteed opaque uses unless they are projections of the definition. In -/// particular, borrowed structs, tuples, and enums of address-only types are -/// not allowed. +/// This pass never creates copies except to replace explicit value copies +/// (copy_value, load [copy], store). For move-only values, this allows complete +/// diagnostics. And in general, it makes it impossible for SIL passes to +/// "accidentally" create copies. /// -/// When owned values are consumed by phis, multiple storage locations are -/// required to avoid interfering with other phi operands. However, the value -/// never needs to be live in multiple storage locations a once. When the value -/// is consumed by a phi, either it's own storage is coalesced with the phi -/// storage (they have the same address), or the value is bitwise moved into the -/// phi's storage. +/// This pass inserts moves (copy_addr [take] [initialize]) of owned values to +/// - compose aggregates +/// - resolve phi interference +/// +/// For guarantee values, this pass inserts neither copies nor moves. Opaque +/// values are potentially unmovable when borrowed. This means that guaranteed +/// address-only aggregates and phis are prohibited. This SIL invariant is +/// enforced by SILVerifier::checkOwnershipForwardingInst() and +/// SILVerifier::visitSILPhiArgument(). /// /// ## Step #1: Map opaque values /// @@ -58,7 +60,8 @@ /// during rewriting. /// /// After allocating storage for all non-phi opaque values, phi storage is -/// allocated. This is handled by a PhiStorageOptimizer that checks for +/// allocated. (Phi values are block arguments in which phi's arguments are +/// branch operands). This is handled by a PhiStorageOptimizer that checks for /// interference among the phi operands and reuses storage allocated to other /// values. /// @@ -169,7 +172,7 @@ cleanupAfterCall(FullApplySite apply, // Calls are currently SILValues, but when the result type is a tuple, the call // value does not represent a real value with storage. This is a bad situation // for address lowering because there's no way to tell from any given value -// whether its legal to assign storage to that value. As a result, the +// whether it's legal to assign storage to that value. As a result, the // implementation of call lowering doesn't fall out naturally from the algorithm // that lowers values to storage. //===----------------------------------------------------------------------===// @@ -218,7 +221,7 @@ visitCallResults(FullApplySite apply, /// Return true if the given value is either a "fake" tuple that represents all /// of a call's results or an empty tuple of no results. This may return true -/// for either tuple_inst or a block argument. +/// for either an apply instruction or a block argument. static bool isPseudoCallResult(SILValue value) { if (auto *apply = dyn_cast(value)) return ApplySite(apply).getSubstCalleeConv().getNumDirectSILResults() > 1; @@ -255,7 +258,7 @@ static bool isPseudoReturnValue(SILValue value) { /// the tuple is a pseudo-return value, return the indirect function argument /// for the corresponding result after lowering. /// -/// bb0(%loweredIndirectResult : $*T, ...) +/// bb0(..., %loweredIndirectResult : $*T, ...) /// .... /// %tuple = tuple(..., %operand, ...) /// return %tuple @@ -268,16 +271,12 @@ static bool isPseudoReturnValue(SILValue value) { /// (see insertIndirectReturnArgs()). static SILValue getTupleStorageValue(Operand *operand) { auto *tuple = cast(operand->getUser()); - Operand *singleUse = tuple->getSingleUse(); - if (!singleUse || !isa(singleUse->getUser())) - return tuple; - - SILFunction *function = tuple->getFunction(); - if (function->getConventions().getNumDirectSILResults() < 2) + if (!isPseudoReturnValue(tuple)) return tuple; unsigned resultIdx = tuple->getElementIndex(operand); + auto *function = tuple->getFunction(); auto loweredFnConv = getLoweredFnConv(function); assert(loweredFnConv.getResults().size() == tuple->getElements().size()); @@ -286,14 +285,14 @@ static SILValue getTupleStorageValue(Operand *operand) { if (loweredFnConv.isSILIndirect(result)) ++indirectResultIdx; } - // Cannot call F->getIndirectSILResults here because that API uses the + // Cannot call function->getIndirectSILResults here because that API uses the // function conventions before address lowering. return function->getArguments()[indirectResultIdx]; } /// Return the value representing storage for a single return value. /// -/// bb0(%loweredIndirectResult : $*T, ...) // function entry +/// bb0(..., %loweredIndirectResult : $*T, ...) // function entry /// return %oper /// /// For %oper, return %loweredIndirectResult @@ -301,9 +300,7 @@ static SILValue getSingleReturnAddress(Operand *operand) { assert(!isPseudoReturnValue(operand->get())); auto *function = operand->getParentFunction(); - auto loweredFnConv = getLoweredFnConv(function); - assert(loweredFnConv.getNumIndirectSILResults() == 1); - (void)loweredFnConv; + assert(getLoweredFnConv(function).getNumIndirectSILResults() == 1); // Cannot call getIndirectSILResults here because that API uses the // function conventions before address lowering. @@ -331,7 +328,7 @@ static bool isStoreCopy(SILValue value) { return isa(user) || isa(user); } -ValueStorage &ValueStorageMap::insertValue(SILValue value) { +void ValueStorageMap::insertValue(SILValue value, SILValue storageAddress) { assert(!stableStorage && "cannot grow stable storage map"); auto hashResult = @@ -339,9 +336,7 @@ ValueStorage &ValueStorageMap::insertValue(SILValue value) { (void)hashResult; assert(hashResult.second && "SILValue already mapped"); - valueVector.emplace_back(value, ValueStorage()); - - return valueVector.back().storage; + valueVector.emplace_back(value, ValueStorage(storageAddress)); } void ValueStorageMap::replaceValue(SILValue oldValue, SILValue newValue) { @@ -409,7 +404,7 @@ struct AddressLoweringState { SmallBlotSetVector indirectApplies; // All function-exiting terminators (return or throw instructions). - SmallVector exitingInsts; + SmallVector exitingInsts; // Copies from a phi's operand storage to the phi storage. These logically // occur on the CFG edge. Keep track of them to resolve anti-dependencies. @@ -462,7 +457,7 @@ struct AddressLoweringState { /// Before populating the ValueStorageMap, replace each value-typed argument to /// the current function with an address-typed argument by inserting a temporary /// load instruction. -static void convertIndirectFunctionArgs(AddressLoweringState &pass) { +static void convertDirectToIndirectFunctionArgs(AddressLoweringState &pass) { // Insert temporary argument loads at the top of the function. SILBuilder argBuilder = pass.getBuilder(pass.function->getEntryBlock()->begin()); @@ -490,9 +485,7 @@ static void convertIndirectFunctionArgs(AddressLoweringState &pass) { // Indirect calling convention may be used for loadable types. In that // case, generating the argument loads is sufficient. if (addrType.isAddressOnly(*pass.function)) { - auto &storage = pass.valueStorageMap.insertValue(loadArg); - storage.storageAddress = arg; - storage.isRewritten = true; + pass.valueStorageMap.insertValue(loadArg, arg); } } ++argIdx; @@ -520,10 +513,9 @@ static unsigned insertIndirectReturnArgs(AddressLoweringState &pass) { argIdx, bodyResultTy.getAddressType(), OwnershipKind::None, var); // Insert function results into valueStorageMap so that the caller storage // can be projected onto values inside the function as use projections. - auto &storage = pass.valueStorageMap.insertValue(funcArg); + // // This is the only case where a value defines its own storage. - storage.storageAddress = funcArg; - storage.isRewritten = true; + pass.valueStorageMap.insertValue(funcArg, funcArg); ++argIdx; } @@ -621,10 +613,11 @@ void OpaqueValueVisitor::visitValue(SILValue value) { pass.valueStorageMap.getStorage(value).storageAddress)); return; } - pass.valueStorageMap.insertValue(value); + pass.valueStorageMap.insertValue(value, SILValue()); } -// Canonicalize returned values. +// Canonicalize returned values. For multiple direct results, the operand of the +// return instruction must be a tuple with no other uses. // // Given $() -> @out (T, T): // %t = def : $(T, T) @@ -688,7 +681,7 @@ void OpaqueValueVisitor::canonicalizeReturnValues() { /// function. static void prepareValueStorage(AddressLoweringState &pass) { // Fixup this function's argument types with temporary loads. - convertIndirectFunctionArgs(pass); + convertDirectToIndirectFunctionArgs(pass); // Create a new function argument for each indirect result. insertIndirectReturnArgs(pass); @@ -2012,7 +2005,7 @@ void ApplyRewriter::rewriteApply(ArrayRef newCallArgs) { // Replace \p tryApply with a new try_apply using \p newCallArgs. // -// If the old result was a single address-only value, then create and return a +// If the old result was a single opaque value, then create and return a // fake load that takes its place in the storage map. Otherwise, return an // invalid SILValue. // @@ -3056,8 +3049,8 @@ static void removeOpaquePhis(SILBasicBlock *bb, AddressLoweringState &pass) { } } -// Instructions that use an address-only value without producing one are already -// deleted. The rest of the address-only definitions are now removed bottom-up +// Instructions that use an opaque value without producing one are already +// deleted. The rest of the opaque definitions are now removed bottom-up // by visiting valuestorageMap. // // Phis are removed here after all other instructions. @@ -3145,12 +3138,12 @@ void AddressLowering::runOnFunction(SILFunction *function) { // ## Step #1: Map opaque values // // First, rewrite this function's arguments and return values, then populate - // pass.valueStorageMap with an entry for each address-only value. + // pass.valueStorageMap with an entry for each opaque value. prepareValueStorage(pass); // ## Step #2: Allocate storage // - // For each address-only value mapped in step #1, either create an + // For each opaque value mapped in step #1, either create an // alloc_stack/dealloc_stack pair, or mark its ValueStorage entry as a // def-projection out of its operand's def or a use projection into its // composing use or into a phi (branch operand). @@ -3162,7 +3155,7 @@ void AddressLowering::runOnFunction(SILFunction *function) { // ## Step #3. Rewrite opaque values // - // Rewrite all instructions that either define or use an address-only value. + // Rewrite all instructions that either define or use an opaque value. // Creates new '_addr' variants of instructions, obtaining the storage // address from the 'valueStorageMap'. This materializes projections in // forward order, setting 'storageAddress' for each projection as it goes. diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.h b/lib/SILOptimizer/Mandatory/AddressLowering.h index c4f8a99bfe25f..c75f893385ae0 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.h +++ b/lib/SILOptimizer/Mandatory/AddressLowering.h @@ -17,18 +17,20 @@ namespace swift { -/// Track a value's storage. Stages in the storage life-cycle: +/// Track an opaque value's storage. An opaque value is a SILValue with +/// address-only type. Stages in the storage life-cycle: /// /// 1. Unallocated /// -/// 2. Allocated. Either (a) 'storageAddress' is an alloc_stack, or (b) -/// 'projectedStorageID' refers to a different ValueStorage, which recursively -/// leads to a valid 'storageAddress'. +/// 2. Allocated. Either (a) it is a root value where 'storageAddress' is an +/// alloc_stack, or (b) it is a projection where 'projectedStorageID' refers to +/// the parent ValueStorage, which recursively leads to a root value with a +/// valid 'storageAddress'. /// /// 3. Materialized. 'storageAddress' is valid. Address projections have been /// emitted at the point that this value is defined. /// -/// 4. Rewritten. The definition of this address-only value is fully translated +/// 4. Rewritten. The definition of this opaque value is fully translated /// into lowered SIL. Instructions are typically materialized and rewritten at /// the same time. A indirect result, however, is materialized as soon as its /// alloc_stack is emitted, but only rewritten once the call itself is @@ -45,19 +47,23 @@ namespace swift { /// /// %struct_addr = alloc_stack // storage for %struct /// %result = apply : $() -> @out T // use-projection of %struct at operand #0 -/// %struct = struct %result +/// %struct = struct (%result) /// /// A phi-projection is a use projection that projects its entire value /// through a phi rather than into a composing use. It has an invalid -/// 'projectedOperandNum'. +/// 'projectedOperandNum': /// -/// Operations that destructively resuse storage (open_existential_value, +/// %result = apply : $() -> @out T // use-projection of %phi +/// br bb1(%result) +/// bb1(%phi : @owned $T) +/// +/// Operations that destructively reuse storage (open_existential_value, /// unchecked_enum_data, and switch_enum), are not considered storage /// projections. Instead, these values have no ValueStorage but are rewritten to /// directly reuse their operand's storage. /// /// To materialize projections, address lowering follows the original def-use -/// edges for address-only values. Consequently, values that have storage cannot +/// edges for opaque values. Consequently, values that have storage cannot /// be removed from SIL or from the storage map until rewriting is /// complete. Mapped values can, however, be substituted on-the-fly by emitting /// a place-holder value and updating the map entry. This works because the @@ -76,11 +82,11 @@ struct ValueStorage { /// When either isDefProjection or isUseProjection is set, this refers to the /// storage whose "def" this value projects out of or whose operand this /// storage projects into via its "use. - uint32_t projectedStorageID; + uint32_t projectedStorageID = InvalidID; /// For use-projections, identifies the operand index of the composing use. /// Only valid for non-phi use projections. - uint16_t projectedOperandNum; + uint16_t projectedOperandNum = InvalidOper; /// Projection out of a storage def. e.g. this value is a destructure. unsigned isDefProjection : 1; @@ -96,16 +102,17 @@ struct ValueStorage { // across phis, which would result in piecewise initialization. unsigned initializesEnum : 1; - ValueStorage() { clear(); } - - void clear() { - storageAddress = SILValue(); - projectedStorageID = InvalidID; - projectedOperandNum = InvalidOper; - isUseProjection = false; + ValueStorage(SILValue storageAddress): storageAddress(storageAddress) { isDefProjection = false; + isUseProjection = false; isRewritten = false; initializesEnum = false; + + // The initial storage address is only valid when the value is effectively + // already rewritten. + if (storageAddress) { + isRewritten = true; + } } bool isAllocated() const { @@ -251,10 +258,10 @@ class ValueStorageMap { /// Insert a value in the map, creating a ValueStorage object for it. This /// must be called in RPO order. - ValueStorage &insertValue(SILValue value); + void insertValue(SILValue value, SILValue storageAddress); /// Replace a value that is mapped to storage with another value. This allows - /// limited rewritting of original address-only values. For example, block + /// limited rewriting of original opaque values. For example, block /// arguments can be replaced with fake loads in order to rewrite their /// corresponding terminator. void replaceValue(SILValue oldValue, SILValue newValue); From 862c7bc34a4d7cb505853d9fa3af9ca7a40118db Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Tue, 8 Mar 2022 17:31:38 -0800 Subject: [PATCH 07/29] Fix alloc_stack placement for open_existential. Compute the latestOpeningInst, not the firstOpeningInst. --- lib/SILOptimizer/Mandatory/AddressLowering.cpp | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index 983b87bd73766..eaff802dbf136 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -1129,8 +1129,9 @@ createStackAllocation(SILValue value) { // For opened existential types, allocate stack space at the type // definition. Allocating as early as possible provides more opportunity for - // creating use projections into value. - SILInstruction *firstOpeningInst = nullptr; + // creating use projections into value. But allocation must be no earlier then + // the latest type definition. + SILInstruction *latestOpeningInst = nullptr; allocTy.getASTType().visit([&](CanType type) { auto archetype = dyn_cast(type); if (!archetype) @@ -1142,15 +1143,15 @@ createStackAllocation(SILValue value) { auto *openingInst = openingVal->getDefiningInstruction(); assert(openingVal && "all opened archetypes should be resolved"); - if (firstOpeningInst - && pass.domInfo->dominates(firstOpeningInst, openingInst)) { + if (latestOpeningInst + && pass.domInfo->dominates(openingInst, latestOpeningInst)) { return; } - firstOpeningInst = openingInst; + latestOpeningInst = openingInst; } }); - auto allocPt = firstOpeningInst ? std::next(firstOpeningInst->getIterator()) - : pass.function->begin()->begin(); + auto allocPt = latestOpeningInst ? std::next(latestOpeningInst->getIterator()) + : pass.function->begin()->begin(); auto allocBuilder = pass.getBuilder(allocPt); AllocStackInst *alloc = allocBuilder.createAllocStack(pass.genLoc(), allocTy); @@ -1158,7 +1159,7 @@ createStackAllocation(SILValue value) { auto deallocBuilder = pass.getBuilder(insertPt); deallocBuilder.createDeallocStack(pass.genLoc(), alloc); }; - if (firstOpeningInst) { + if (latestOpeningInst) { // Deallocate at the dominance frontier to ensure that allocation encloses // not only the uses of the current value, but also of any values reusing // this storage as a use projection. From 222477a0b60de26cf5e0a4157fffffcec9224512 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Tue, 8 Mar 2022 17:32:13 -0800 Subject: [PATCH 08/29] Rename "phi copy" to "phi move" for consistency with documentation. In classic compiler terminology, this is a "phi copy" algorithm. But the documentation now tries to clearly distinguish between "semantics copies" vs. moves, where moves are "storage copies". --- .../Mandatory/AddressLowering.cpp | 98 +++++++++---------- 1 file changed, 49 insertions(+), 49 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index eaff802dbf136..086f51e477a51 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -406,8 +406,7 @@ struct AddressLoweringState { // All function-exiting terminators (return or throw instructions). SmallVector exitingInsts; - // Copies from a phi's operand storage to the phi storage. These logically - // occur on the CFG edge. Keep track of them to resolve anti-dependencies. + // Handle moves from a phi's operand storage to the phi storage. std::unique_ptr phiRewriter; AddressLoweringState(SILFunction *function, DominanceInfo *domInfo) @@ -758,10 +757,11 @@ static Operand *getProjectedDefOperand(SILValue value) { } } -/// Return the operand of the reused storage. These operations are always -/// rewritten by the use rewriter and destructively reuse their operand's -/// storage. If the result is address-only, then the operand must be -/// address-only (otherwise, the operand would not necessarilly have storage). +/// If \p value is a an existential or enum, then return the existential or enum +/// operand. These operations are always rewritten by the UseRewriter and always +/// destructively reuse the same storage as their operand. Note that if the +/// operation's result is address-only, then the operand must be address-only +/// and therefore must mapped to ValueStorage. static Operand *getReusedStorageOperand(SILValue value) { switch (value->getKind()) { default: @@ -785,7 +785,7 @@ static Operand *getReusedStorageOperand(SILValue value) { } /// If \p operand can project into its user, return the SILValue representing -/// user's storage. The user may composes an aggregate from its operands or +/// user's storage. The user may compose an aggregate from its operands or /// forwards its operands to arguments. /// /// TODO: Handle SwitchValueInst, CheckedCastValueBranchInst. @@ -1446,7 +1446,7 @@ AddressMaterialization::materializeProjectionIntoUse(Operand *operand, //===----------------------------------------------------------------------===// // PhiRewriter // -// Insert copies on CFG edges to break phi operand interferences. +// Insert moves on CFG edges to break phi operand interferences. //===----------------------------------------------------------------------===// namespace { @@ -1456,18 +1456,18 @@ namespace { // 1. Materialize the phi address. If the phi projects into a use, this requires // initialization of the user's storage in each predecessor. // -// 2. If the phi operand is not coalesced, then copy the operand into the +// 2. If the phi operand is not coalesced, then move the operand into the // materialized phi address. // -// For blocks with multiple phis, all copies of phi operands semantically occur +// For blocks with multiple phis, all moves of phi operands semantically occur // in parallel on the CFG edge from the predecessor to the phi block. As these -// copies are inserted into the predecessor's intruction list, maintain the -// illusion of parallel copies by resolving any interference between the phi -// copies. This is done by checking for anti-dependencies to or from other phi -// copies. If one phi copy's source reads from another phi copy's dest, then the +// moves are inserted into the predecessor's intruction list, maintain the +// illusion of parallel moves by resolving any interference between the phi +// moves. This is done by checking for anti-dependencies to or from other phi +// moves. If one phi move's source reads from another phi move's dest, then the // read must occur before the write. // -// Insert a second copy to break an anti-dependence cycle when both the source +// Insert a second move to break an anti-dependence cycle when both the source // and destination of the new phi interferes with other phis (the classic // phi-swap problem). // @@ -1486,18 +1486,18 @@ namespace { // br bb3(val0, val1) // bb2: // temp = alloc_stack -// copy_addr addr0 to temp -// copy_addr addr1 to addr0 -// copy_addr temp to addr1 +// copy_addr [take] addr0 to [initialization] temp +// copy_addr [take] addr1 to [initialization] addr0 +// copy_addr [take] temp to [initialization] addr1 // dealloc_stack temp // br bb3(val1, val1) // bb3(phi0, phi1): class PhiRewriter { AddressLoweringState &pass; - // A set of copies from a phi operand storage to phi storage. These logically + // A set of moves from a phi operand storage to phi storage. These logically // occur on the CFG edge. Keep track of them to resolve anti-dependencies. - SmallPtrSet phiCopies; + SmallPtrSet phiMoves; public: PhiRewriter(AddressLoweringState &pass) : pass(pass) {} @@ -1508,18 +1508,18 @@ class PhiRewriter { PhiRewriter(const PhiRewriter &) = delete; PhiRewriter &operator=(const PhiRewriter &) = delete; - CopyAddrInst *createPhiCopy(SILBuilder &builder, SILValue from, SILValue to) { - auto *copy = builder.createCopyAddr(pass.genLoc(), from, to, IsTake, + CopyAddrInst *createPhiMove(SILBuilder &builder, SILValue from, SILValue to) { + auto *move = builder.createCopyAddr(pass.genLoc(), from, to, IsTake, IsInitialization); - phiCopies.insert(copy); - return copy; + phiMoves.insert(move); + return move; } - struct CopyPosition { - SILBasicBlock::iterator latestCopyPos; + struct MovePosition { + SILBasicBlock::iterator latestMovePos; bool foundAntiDependenceCycle = false; }; - CopyPosition findPhiCopyPosition(PhiOperand phiOper); + MovePosition findPhiMovePosition(PhiOperand phiOper); }; } // anonymous namespace @@ -1529,15 +1529,15 @@ void PhiRewriter::materializeOperand(PhiOperand phiOper) { if (operStorage.isPhiProjection()) { if (operStorage.projectedStorageID == pass.valueStorageMap.getOrdinal(phiOper.getValue())) { - // This operand was coalesced with this particular phi. No copy needed. + // This operand was coalesced with this particular phi. No move needed. return; } } auto phiOperAddress = operStorage.getMaterializedAddress(); - auto copyPos = findPhiCopyPosition(phiOper); + auto movePos = findPhiMovePosition(phiOper); - auto builder = pass.getBuilder(copyPos.latestCopyPos); + auto builder = pass.getBuilder(movePos.latestMovePos); AddressMaterialization addrMat(pass, builder); auto &phiStorage = pass.valueStorageMap.getStorage(phiOper.getValue()); @@ -1545,16 +1545,16 @@ void PhiRewriter::materializeOperand(PhiOperand phiOper) { addrMat.materializeUseProjectionStorage(phiStorage, /*intoPhiOperand*/ true); - if (!copyPos.foundAntiDependenceCycle) { - createPhiCopy(builder, phiOperAddress, phiAddress); + if (!movePos.foundAntiDependenceCycle) { + createPhiMove(builder, phiOperAddress, phiAddress); return; } AllocStackInst *alloc = builder.createAllocStack(pass.genLoc(), phiOper.getValue()->getType()); - createPhiCopy(builder, phiOperAddress, alloc); + createPhiMove(builder, phiOperAddress, alloc); auto tempBuilder = pass.getBuilder(phiOper.getBranch()->getIterator()); - createPhiCopy(tempBuilder, alloc, phiAddress); + createPhiMove(tempBuilder, alloc, phiAddress); tempBuilder.createDeallocStack(pass.genLoc(), alloc); } @@ -1565,9 +1565,9 @@ PhiRewriter &AddressLoweringState::getPhiRewriter() { return *(this->phiRewriter.get()); } -// Return the latest position at which a copy into this phi may be emitted -// without violating an anti-dependence on another phi copy. -PhiRewriter::CopyPosition PhiRewriter::findPhiCopyPosition(PhiOperand phiOper) { +// Return the latest position at which a move into this phi may be emitted +// without violating an anti-dependence on another phi move. +PhiRewriter::MovePosition PhiRewriter::findPhiMovePosition(PhiOperand phiOper) { auto phiBaseAddress = pass.valueStorageMap.getBaseStorage(phiOper.getValue()).storageAddress; @@ -1578,34 +1578,34 @@ PhiRewriter::CopyPosition PhiRewriter::findPhiCopyPosition(PhiOperand phiOper) { auto insertPt = phiOper.getBranch()->getIterator(); bool foundEarliestInsertPoint = false; - CopyPosition copyPos; - copyPos.latestCopyPos = insertPt; + MovePosition movePos; + movePos.latestMovePos = insertPt; - // Continue scanning until all phi copies have been checked for interference. + // Continue scanning until all phi moves have been checked for interference. for (auto beginIter = phiOper.predBlock->begin(); insertPt != beginIter;) { --insertPt; - auto *phiCopy = dyn_cast(&*insertPt); - if (!phiCopy || !phiCopies.contains(phiCopy)) + auto *phiMove = dyn_cast(&*insertPt); + if (!phiMove || !phiMoves.contains(phiMove)) break; if (!foundEarliestInsertPoint - && getAccessBase(phiCopy->getSrc()) == phiBaseAddress) { - // Anti-dependence from the phi copy to the phi value. Do not copy into + && getAccessBase(phiMove->getSrc()) == phiBaseAddress) { + // Anti-dependence from the phi move to the phi value. Do not move into // the phi storage before this point. foundEarliestInsertPoint = true; } - if (getAccessBase(phiCopy->getDest()) == operBaseAddress) { - // Anti-dependence from the phi operand to the phi copy. Do not copy out + if (getAccessBase(phiMove->getDest()) == operBaseAddress) { + // Anti-dependence from the phi operand to the phi move. Do not move out // of the operand storage after this point. - copyPos.latestCopyPos = insertPt; + movePos.latestMovePos = insertPt; // If the earliest and latest points conflict, allocate a temporary. if (foundEarliestInsertPoint) { - copyPos.foundAntiDependenceCycle = true; + movePos.foundAntiDependenceCycle = true; } } } - return copyPos; + return movePos; } //===----------------------------------------------------------------------===// From 2a9c3053a84b9963aef052518f64d478a7a85c7f Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Wed, 9 Mar 2022 10:04:52 -0800 Subject: [PATCH 09/29] [SIL-opaque] avoid handling operands past 64k --- lib/SILOptimizer/Mandatory/AddressLowering.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index 086f51e477a51..6c7325b6e007d 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -840,7 +840,11 @@ void ValueStorageMap::recordComposingUseProjection(Operand *oper, auto &storage = getStorage(oper->get()); assert(!storage.isAllocated()); storage.projectedStorageID = getOrdinal(userValue); + storage.projectedOperandNum = oper->getOperandNumber(); + assert(storage.projectedOperandNum == oper->getOperandNumber() && + "operand overflow"); + storage.isUseProjection = true; if (EnumDecl *enumDecl = userValue->getType().getEnumOrBoundGenericEnum()) { @@ -1011,6 +1015,10 @@ bool OpaqueStorageAllocation::findProjectionIntoUseImpl( assert(!getProjectedDefOperand(userValue) && "storage cannot project in two directions."); + // Avoid handling preposterous types. + if (use->getOperandNumber() > UINT16_MAX) + continue; + // Recurse through all storage projections to find the uniquely allocated // storage. Enum storage cannot be reused across multiple subobjects because // it must be initialized via a single init_enum_data_addr instruction. From 7c663a464ead9651a61e33c4480354a1b39bd293 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Wed, 9 Mar 2022 11:50:13 -0800 Subject: [PATCH 10/29] [SIL-opaque] More file-level documentation Explain high-level objectives and terminology with more precision. --- .../Mandatory/AddressLowering.cpp | 64 +++++++++++++++---- 1 file changed, 51 insertions(+), 13 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index 6c7325b6e007d..e8f8c90d2f060 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -19,37 +19,75 @@ /// /// This pass never creates copies except to replace explicit value copies /// (copy_value, load [copy], store). For move-only values, this allows complete -/// diagnostics. And in general, it makes it impossible for SIL passes to +/// diagnostics. And in general, this makes it impossible for SIL passes to /// "accidentally" create copies. /// /// This pass inserts moves (copy_addr [take] [initialize]) of owned values to /// - compose aggregates /// - resolve phi interference /// -/// For guarantee values, this pass inserts neither copies nor moves. Opaque +/// For guaranteed values, this pass inserts neither copies nor moves. Opaque /// values are potentially unmovable when borrowed. This means that guaranteed /// address-only aggregates and phis are prohibited. This SIL invariant is /// enforced by SILVerifier::checkOwnershipForwardingInst() and /// SILVerifier::visitSILPhiArgument(). /// +/// The simplest approach to address lowering is to map each opaque SILValue to +/// a separate alloc_stack. This pass avoids doing that in the following cases: +/// +/// 1. Reused-storage: Some operations are guaranteed to reuse their operand's +/// storage. This includes extracting an enum payload and opening an existential +/// value. This is required avoid introducing new copies or moves. +/// +/// // %data's storage must reuse storage allocated for %enum +/// %data = unchecked_enum_data %enum : $Optional, #Optional.some!enumelt +/// +/// 2. Def-projection: Some operations are guaranteed to directly project out of +/// their operand's storage. This is also required to avoid introducing new +/// copies or moves. Unlike reused-storage, such projections are non-destructive +/// and repeatable. +/// +/// // %field's storage is part of the storage allocated for %struct +/// %field = struct_extract %struct, #field +/// +/// 3. Use-projection: Operations that compose aggregates may optionally allow +/// their operands to project into the storage allocated for their result. This +/// is only an optimization but is essential for reasonable code generation. +/// +/// // %field's storage may be part of the storage allocated for %struct +/// %struct = struct(..., %field, ...) +/// +/// 4. Phi-projection: Phi's may optionally allow their (branch) operands to +/// reuse the storage allocated for their result (block argument). This is only +/// an optimization, but is important to avoid many useless moves: +/// +/// // %arg's storage may be part of the storage allocated for %phi +/// br bb(%arg) +/// bb(%phi : @owned $T) +/// +/// The algorithm proceeds as follows: +/// /// ## Step #1: Map opaque values /// /// Populate a map from each opaque SILValue to its ValueStorage in forward /// order (RPO). Each opaque value is mapped to an ordinal ID representing the /// storage. Storage locations can now be optimized by remapping the values. /// +/// Reused-storage operations are not mapped to ValueStorage. +/// /// ## Step #2: Allocate storage /// /// In reverse order (PO), allocate the parent storage object for each opaque /// value. /// -/// If the value is a subobject extraction (struct_extract, tuple_extract, -/// open_existential_value, unchecked_enum_data), then mark the value's storage -/// as a projection from the def's storage. +/// Handle def-projection: If the value is a subobject extraction +/// (struct_extract, tuple_extract, open_existential_value, +/// unchecked_enum_data), then mark the value's storage as a projection from the +/// def's storage. /// -/// If the value's use composes a parent object from this value (struct, tuple, -/// enum), and the use's storage dominates this value, then mark the value's -/// storage as a projection into the use's storage. +/// Handle use-projection: If the value's use composes a parent object from this +/// value (struct, tuple, enum), and the use's storage dominates this value, +/// then mark the value's storage as a projection into the use's storage. /// /// ValueStorage projections can be chained. A non-projection ValueStorage is /// the root of a tree of projections. @@ -59,11 +97,11 @@ /// projections are not mapped to a `storageAddress` at this point. That happens /// during rewriting. /// -/// After allocating storage for all non-phi opaque values, phi storage is -/// allocated. (Phi values are block arguments in which phi's arguments are -/// branch operands). This is handled by a PhiStorageOptimizer that checks for -/// interference among the phi operands and reuses storage allocated to other -/// values. +/// Handle phi-projection: After allocating storage for all non-phi opaque +/// values, phi storage is allocated. (Phi values are block arguments in which +/// phi's arguments are branch operands). This is handled by a +/// PhiStorageOptimizer that checks for interference among the phi operands and +/// reuses storage allocated to other values. /// /// ## Step #3. Rewrite opaque values /// From 5181705f17471be64a9569f606d3904b8a850267 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Wed, 9 Mar 2022 11:52:51 -0800 Subject: [PATCH 11/29] [SIL-opaque] Add section-level comment explaining storage allocation --- lib/SILOptimizer/Mandatory/AddressLowering.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index e8f8c90d2f060..ba2c6c6aee025 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -860,7 +860,9 @@ static SILValue getProjectedUseValue(Operand *operand) { //===----------------------------------------------------------------------===// // OpaqueStorageAllocation // -// Generate alloc_stack and address projections for abstract storage locations. +// For each ValueStorage, first determine whether it can project out of its +// definition's storage or into the storage of a use. If so, record the +// projection information. Otherwise emit an alloc_stack for this storage root. // ===---------------------------------------------------------------------===// // Record a storage projection from the source of the given operand into its From f164c379436e1033813f230c8c0a3bcd83d9c263 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Wed, 9 Mar 2022 11:53:34 -0800 Subject: [PATCH 12/29] [SIL-opaque] remove cleanupAfterCall helper --- .../Mandatory/AddressLowering.cpp | 49 ++++++------------- 1 file changed, 14 insertions(+), 35 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index ba2c6c6aee025..d80978aae9d99 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -176,30 +176,6 @@ static SILFunctionConventions getLoweredCallConv(ApplySite call) { SILModuleConventions::getLoweredAddressConventions(call.getModule())); } -/// Invoke \p cleanup on all paths exiting a call. -static void -cleanupAfterCall(FullApplySite apply, - llvm::function_ref cleanup) { - switch (apply.getKind()) { - case FullApplySiteKind::ApplyInst: { - cleanup(std::next(apply.getInstruction()->getIterator())); - break; - } - case FullApplySiteKind::TryApplyInst: { - auto *tryApply = cast(apply.getInstruction()); - cleanup(tryApply->getNormalBB()->begin()); - cleanup(tryApply->getErrorBB()->begin()); - break; - } - case FullApplySiteKind::BeginApplyInst: { - // FIXME: Unimplemented - // - // This should be as simple as calling cleanup for all the end_applies. - llvm::report_fatal_error("Unimplemented coroutine"); - } - } -} - //===----------------------------------------------------------------------===// // Multi-Result // @@ -482,6 +458,12 @@ struct AddressLoweringState { builder.setCurrentDebugScope(originalInst->getDebugScope()); return builder; } + + void prepareBuilder(SILBuilder &builder) { + builder.setSILConventions( + SILModuleConventions::getLoweredAddressConventions( + builder.getModule())); + }; }; } // end anonymous namespace @@ -1733,22 +1715,20 @@ void CallArgRewriter::rewriteIndirectArgument(Operand *operand) { if (apply.getArgumentConvention(*operand).isOwnedConvention()) { argBuilder.createTrivialStoreOr(apply.getLoc(), argValue, allocInst, StoreOwnershipQualifier::Init); - cleanupAfterCall(apply, [&](SILBasicBlock::iterator insertPt) { - auto deallocBuilder = pass.getBuilder(insertPt); - deallocBuilder.createDeallocStack(callLoc, allocInst); + apply.insertAfterFullEvaluation([&](SILBuilder &callBuilder) { + callBuilder.createDeallocStack(callLoc, allocInst); }); } else { auto borrow = argBuilder.emitBeginBorrowOperation(callLoc, argValue); auto *storeInst = argBuilder.emitStoreBorrowOperation(callLoc, borrow, allocInst); - cleanupAfterCall(apply, [&](SILBasicBlock::iterator insertPt) { - auto cleanupBuilder = pass.getBuilder(insertPt); + apply.insertAfterFullEvaluation([&](SILBuilder &callBuilder) { if (auto *storeBorrow = dyn_cast(storeInst)) { - cleanupBuilder.emitEndBorrowOperation(callLoc, storeBorrow); + callBuilder.emitEndBorrowOperation(callLoc, storeBorrow); } - cleanupBuilder.emitEndBorrowOperation(callLoc, borrow); - cleanupBuilder.createDeallocStack(callLoc, allocInst); + callBuilder.emitEndBorrowOperation(callLoc, borrow); + callBuilder.createDeallocStack(callLoc, allocInst); }); } } @@ -2020,9 +2000,8 @@ SILValue ApplyRewriter::materializeIndirectResultAddress(SILValue oldResult, // Instead of using resultBuilder, insert dealloc immediately after the call // for stack discpline across loadable indirect results. - cleanupAfterCall(apply, [&](SILBasicBlock::iterator insertPt) { - auto cleanupBuilder = pass.getBuilder(insertPt); - cleanupBuilder.createDeallocStack(callLoc, allocInst); + apply.insertAfterFullEvaluation([&](SILBuilder &callBuilder) { + callBuilder.createDeallocStack(callLoc, allocInst); }); if (oldResult && !oldResult->use_empty()) { From bb0dbf37a63555d99708b2a50a26adf535d73790 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Wed, 9 Mar 2022 21:22:07 -0800 Subject: [PATCH 13/29] [SIL-opaque] add a test case for phi coalescing Avoid attempting to coalesce enum payloads. --- .../Mandatory/PhiStorageOptimizer.cpp | 19 ++++++----- test/SILOptimizer/address_lowering_phi.sil | 32 +++++++++++++++++++ 2 files changed, 43 insertions(+), 8 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp b/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp index 4e4af608679a3..545daab64d090 100644 --- a/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp +++ b/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp @@ -116,7 +116,6 @@ void PhiStorageOptimizer::optimize() { coalescedPhi.coalescedOperands.push_back(phi.getOperand(predecessor)); return; } - occupiedBlocks.insert(phi.phiBlock); for (auto *incomingPred : phi.phiBlock->getPredecessorBlocks()) { tryCoalesceOperand(incomingPred); } @@ -145,14 +144,15 @@ bool PhiStorageOptimizer::canCoalesceValue(SILValue incomingVal) { auto &incomingStorage = valueStorageMap.getStorage(incomingVal); - // If the incoming use is pre-allocated it can't be coalesced. - // This also handles incoming values that are already coalesced with - // another use. + // If the incoming use directly reuses its def storage, projects out of its + // def storage, or is pre-allocated, then it can't be coalesced. When incoming + // storage is directly reused, isAllocated() is false. isProjection() covers + // the other cases. // // Coalescing use projections from incomingVal into its other non-phi uses - // would require by recursively following uses across projections when - // computing liveness. - if (incomingStorage.isProjection()) + // could be handled, but would require by recursively following uses across + // projections when computing liveness. + if (!incomingStorage.isAllocated() || incomingStorage.isProjection()) return false; auto *defInst = incomingVal->getDefiningInstruction(); @@ -163,7 +163,6 @@ bool PhiStorageOptimizer::canCoalesceValue(SILValue incomingVal) { // analysis of the whole phi web before coalescing phi operands. return false; } - assert(incomingStorage.isAllocated() && "nonphi must be allocated"); // Don't coalesce an incoming value unless it's storage is from a stack // allocation, which can be replaced with another alloc_stack. @@ -213,7 +212,11 @@ bool PhiStorageOptimizer::recordUseLiveness(SILValue incomingVal, for (auto *use : incomingVal->getUses()) { StackList liveBBWorklist(getFunction()); + // If \p liveBB is already occupied by another value, return + // false. Otherwise, mark \p liveBB live and push it onto liveBBWorklist. auto visitLiveBlock = [&](SILBasicBlock *liveBB) { + assert(liveBB != phi.phiBlock && "phi operands are consumed"); + if (occupiedBlocks.contains(liveBB)) return false; diff --git a/test/SILOptimizer/address_lowering_phi.sil b/test/SILOptimizer/address_lowering_phi.sil index a7ec7e882ac8b..5df8244fc7f4a 100644 --- a/test/SILOptimizer/address_lowering_phi.sil +++ b/test/SILOptimizer/address_lowering_phi.sil @@ -10,6 +10,11 @@ typealias AnyObject = Builtin.AnyObject typealias Int = Builtin.Int64 typealias Bool = Builtin.Int1 +enum Optional { + case none + case some(T) +} + struct SRef { @_hasStorage var object: AnyObject { get set } @_hasStorage var element: T { get set } @@ -438,3 +443,30 @@ bb6(%phi6 : @owned $InnerStruct): %outer = struct $OuterStruct (%phi6 : $InnerStruct, %3 : $AnyObject) return %outer : $OuterStruct } + +// CHECK-LABEL: sil [ossa] @f090_payloadPhiOperand : $@convention(thin) (@in Optional, @in T) -> @out T { +// CHECK: bb0(%0 : $*T, %1 : $*Optional, %2 : $*T): +// CHECK: cond_br undef, bb2, bb1 +// CHECK: bb1: +// CHECK: destroy_addr %2 : $*T +// CHECK: [[P:%.*]] = unchecked_take_enum_data_addr %1 : $*Optional, #Optional.some!enumelt +// CHECK: copy_addr [take] [[P]] to [initialization] %0 : $*T +// CHECK: br bb3 +// CHECK: bb2: +// CHECK: destroy_addr %1 : $*Optional +// CHECK: copy_addr [take] %2 to [initialization] %0 : $*T +// CHECK: br bb3 +// CHECK-LABEL: } // end sil function 'f090_payloadPhiOperand' +sil [ossa] @f090_payloadPhiOperand : $@convention(thin) (@in Optional, @in T) -> @out T { +bb0(%0 : @owned $Optional, %1 : @owned $T): + cond_br undef, bb2, bb1 +bb1: + destroy_value %1 : $T + %payload = unchecked_enum_data %0 : $Optional, #Optional.some!enumelt + br bb3(%payload : $T) +bb2: + destroy_value %0 : $Optional + br bb3(%1 : $T) +bb3(%phi : @owned $T): + return %phi : $T +} From ba3e613a9efac8ddf18cb1ef072aa6690af9fcef Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Wed, 9 Mar 2022 21:24:37 -0800 Subject: [PATCH 14/29] [SIL-opaque] in-depth top-level documentation for phi coalescing. --- .../Mandatory/PhiStorageOptimizer.cpp | 57 ++++++++++++++++++- 1 file changed, 56 insertions(+), 1 deletion(-) diff --git a/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp b/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp index 545daab64d090..bd3e3defd644f 100644 --- a/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp +++ b/lib/SILOptimizer/Mandatory/PhiStorageOptimizer.cpp @@ -13,11 +13,66 @@ /// PhiStorageOptimizer implements an analysis used by AddressLowering /// to reuse storage across block arguments. /// +/// In OSSA, phi operands can often be coalesced because they are +/// consuming--they end the lifetime of their operand. This optimization may +/// fail to coalesce an operand for two major reasons: +/// +/// 1. This phi operand is already coalesced with other storage, possibly of a +/// different type: +/// +/// %field = struct_extract %struct : $Struct, #field +/// br bb(%field : $T) +/// +/// bb(%phi : @owned $T): +/// ... +/// +/// 2. This phi operand interferes with another coalesced phi operand. +/// +/// Only one of the call results below, either %get0 or %get1, can be coalesced +/// with %phi. The %phi will itself be coalesced with this function's indirect +/// @out argument. +/// +/// sil [ossa] @function : $@convention(thin) () -> @out T { +/// bb0: +/// %get0 = apply %get() : $@convention(thin) <τ_0_0>() -> @out τ_0_0 +/// %get1 = apply %get() : $@convention(thin) <τ_0_0>() -> @out τ_0_0 +/// cond_br undef, bb2, bb1 +/// +/// bb1: +/// destroy_value %get0 : $T +/// br bb3(%get1 : $T) +/// +/// bb2: +/// destroy_value %get1 : $T +/// br bb3(%get0 : $T) +/// +/// bb3(%phi : @owned $T): +/// return %phi : $T +/// +/// TODO: Liveness is currently recorded at the block level. This could be +/// extended to handle operand with nonoverlapping liveness in the same +/// block. In this case, %get0 and %get1 could both be coalesced with a bit of +/// extra book-keeping: +/// +/// bb0: +/// %get0 = apply %get() : $@convention(thin) <τ_0_0>() -> @out τ_0_0 +/// +/// bb1: +/// destroy_value %get0 : $T +/// %get1 = apply %get() : $@convention(thin) <τ_0_0>() -> @out τ_0_0 +/// br bb3(%get1 : $T) +/// +/// bb2: +/// br bb3(%get0 : $T) +/// +/// bb3(%phi : @owned $T): +/// /// TODO: This does not yet coalesce the copy_value instructions that produce a /// phi operand. Such a copy implies that both the operand and phi value are /// live past the phi. Nonetheleses, they could still be coalesced as /// follows... First coalesce all direct phi operands. Then transitively -/// coalesce copies by redoing the liveness traversal from the uses of the copy. +/// coalesce copies by checking if the copy's source is coalescable, then +/// redoing the liveness traversal from the uses of the copy. /// /// TODO: This approach uses on-the-fly liveness discovery for all incoming /// values at once. It requires no storage for liveness. Hopefully this is From 752173c1af2a24c5324f818b5d75a07e41c8f574 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Wed, 9 Mar 2022 22:00:22 -0800 Subject: [PATCH 15/29] [SIL-opaque] minor NFC review feedback --- lib/SILOptimizer/Mandatory/AddressLowering.cpp | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index d80978aae9d99..8f4db85ea42f3 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -1092,8 +1092,7 @@ checkStorageDominates(AllocStackInst *allocInst, void OpaqueStorageAllocation::allocatePhi(PhiValue phi) { // Coalesces phi operand storage with the phi storage. The algorithm processes - // all incoming values at once, so it is is run when visiting the block - // argument. + // all incoming values at once, so it is run when visiting the block argument. // // The phi operand projections are computed first to give them priority. Then // we determine if the phi itself can share storage with one of its users. @@ -1143,17 +1142,12 @@ createStackAllocation(SILValue value) { assert(value.getOwnershipKind() != OwnershipKind::Guaranteed && "creating storage for a guaranteed value implies a copy"); -#ifndef NDEBUG // Instructions that produce an opened type never reach here because they // have guaranteed ownership--they project their storage. We reach this // point after the opened value has been copied. - if (auto *defInst = value->getDefiningInstruction()) { - if (auto *singleValue = dyn_cast(defInst)) { - assert(!cast(defInst)->getDefinedOpenedArchetype() - && "owned open_existential is unsupported"); - } - } -#endif + assert((!isa(value) + || !cast(value)->getDefinedOpenedArchetype()) + && "owned open_existential is unsupported"); SILType allocTy = value->getType(); From 04f4bcd2187685863d3362bfd3fa499b845c15d8 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Wed, 9 Mar 2022 22:16:08 -0800 Subject: [PATCH 16/29] [SIL-opaque] rename materialization functions. --- .../Mandatory/AddressLowering.cpp | 37 +++++++++++-------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index 8f4db85ea42f3..99fc3f8bcef1f 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -1231,7 +1231,7 @@ class AddressMaterialization { return storage.storageAddress; if (storage.isUseProjection) { - materializeUseProjectionStorage(storage, /*intoPhiOperand*/ false); + recursivelyMaterializeStorage(storage, /*intoPhiOperand*/ false); } else { assert(storage.isDefProjection); storage.storageAddress = materializeDefProjection(origValue); @@ -1241,8 +1241,8 @@ class AddressMaterialization { void initializeOperand(Operand *operand); - SILValue materializeUseProjectionStorage(ValueStorage &storage, - bool intoPhiOperand); + SILValue recursivelyMaterializeStorage(ValueStorage &storage, + bool intoPhiOperand); SILValue materializeDefProjection(SILValue origValue); @@ -1257,8 +1257,8 @@ class AddressMaterialization { SILValue materializeComposingUser(SingleValueInstruction *user, bool intoPhiOperand) { - return materializeUseProjectionStorage( - pass.valueStorageMap.getStorage(user), intoPhiOperand); + return recursivelyMaterializeStorage(pass.valueStorageMap.getStorage(user), + intoPhiOperand); } }; } // anonymous namespace @@ -1290,19 +1290,26 @@ void AddressMaterialization::initializeOperand(Operand *operand) { StoreOwnershipQualifier::Init); } -// Recursively materialize the address for storage at the point that a use -// projects into it via either a composing-use (struct, tuple, enum) or phi -// projection. This only materializes the address that the operands project -// into. It does not materialize the storage for the result. e.g. it -// materializes init_enum_data_addr, not inject_enum_addr. +// Recursively materialize the address for storage at the point that an operand +// may project into it via either a composing-use (struct, tuple, enum) or phi +// projection. +// +// Precondition: \p storage is not a def-projection. // // If \p intoPhiOperand is true, this materializes the address in the path that -// reaches a phi operand, not the phi block itself. +// reaches a phi operand, not the phi block itself. Do not map the storage onto +// the materialized address. // // If \p intoPhiOperand is false, then the materialized address is guaranteed to // domaninate the composing user. Map the user onto this address to avoid // rematerialization. -SILValue AddressMaterialization::materializeUseProjectionStorage( +// +// Note: This only materializes the address for the purpose of projection an +// operand into the storage. It does not materialize the final address of +// storage after materializing the result. In particular, it materializes +// init_enum_data_addr, but not inject_enum_addr. +// +SILValue AddressMaterialization::recursivelyMaterializeStorage( ValueStorage &storage, bool intoPhiOperand = false) { // If this storage is already materialized, then simply return its // address. This not only avoids redundant projections, but is necessary for @@ -1331,7 +1338,7 @@ SILValue AddressMaterialization::materializeUseProjectionStorage( return recordAddress(useStorage.storage.storageAddress); } if (storage.isPhiProjection()) { - return recordAddress(materializeUseProjectionStorage( + return recordAddress(recursivelyMaterializeStorage( pass.valueStorageMap.getProjectedStorage(storage).storage, /*intoPhiOperand*/ true)); } @@ -1566,8 +1573,8 @@ void PhiRewriter::materializeOperand(PhiOperand phiOper) { auto &phiStorage = pass.valueStorageMap.getStorage(phiOper.getValue()); SILValue phiAddress = - addrMat.materializeUseProjectionStorage(phiStorage, - /*intoPhiOperand*/ true); + addrMat.recursivelyMaterializeStorage(phiStorage, + /*intoPhiOperand*/ true); if (!movePos.foundAntiDependenceCycle) { createPhiMove(builder, phiOperAddress, phiAddress); From 906bee318ea1a0261040adbbeb109fd893883f92 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Wed, 9 Mar 2022 22:27:11 -0800 Subject: [PATCH 17/29] [SIL-opaque] rename initializeComposingUse --- lib/SILOptimizer/Mandatory/AddressLowering.cpp | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index 99fc3f8bcef1f..f82a66fce22da 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -1239,7 +1239,7 @@ class AddressMaterialization { return storage.storageAddress; } - void initializeOperand(Operand *operand); + void initializeComposingUse(Operand *operand); SILValue recursivelyMaterializeStorage(ValueStorage &storage, bool intoPhiOperand); @@ -1269,7 +1269,7 @@ class AddressMaterialization { /// /// If the operand projects into its use, then the memory was already /// initialized when visiting the use. -void AddressMaterialization::initializeOperand(Operand *operand) { +void AddressMaterialization::initializeComposingUse(Operand *operand) { SILValue def = operand->get(); if (def->getType().isAddressOnly(*pass.function)) { ValueStorage &storage = pass.valueStorageMap.getStorage(def); @@ -2903,7 +2903,7 @@ class DefRewriter : SILInstructionVisitor { void visitEnumInst(EnumInst *enumInst) { if (enumInst->hasOperand()) { // Handle operands here because loadable operands must also be copied. - addrMat.initializeOperand(&enumInst->getOperandRef()); + addrMat.initializeComposingUse(&enumInst->getOperandRef()); } SILValue enumAddr = addrMat.materializeAddress(enumInst); @@ -2916,7 +2916,7 @@ class DefRewriter : SILInstructionVisitor { InitExistentialValueInst *initExistentialValue) { // Initialize memory for the operand which may be opaque or loadable. - addrMat.initializeOperand(&initExistentialValue->getOperandRef()); + addrMat.initializeComposingUse(&initExistentialValue->getOperandRef()); } // Project an opaque value out of a box-type existential. @@ -2948,7 +2948,7 @@ class DefRewriter : SILInstructionVisitor { // For each element, initialize the operand's memory. Some struct elements // may be loadable types. for (Operand &operand : structInst->getAllOperands()) - addrMat.initializeOperand(&operand); + addrMat.initializeComposingUse(&operand); } // Define an opaque tuple. @@ -2956,7 +2956,7 @@ class DefRewriter : SILInstructionVisitor { // For each element, initialize the operand's memory. Some tuple elements // may be loadable types. for (Operand &operand : tupleInst->getAllOperands()) - addrMat.initializeOperand(&operand); + addrMat.initializeComposingUse(&operand); } }; } // end anonymous namespace From 97eec75932cda1d33dfd2d4d3575623f1896fa3c Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Sat, 5 Mar 2022 17:45:02 -0800 Subject: [PATCH 18/29] [SIL-opaque] Removed [Unconditional]CheckedCastValue --- include/swift/SIL/DynamicCasts.h | 54 ------------ include/swift/SIL/SILBuilder.h | 22 ----- include/swift/SIL/SILCloner.h | 34 -------- include/swift/SIL/SILInstruction.h | 73 ---------------- include/swift/SIL/SILNode.h | 2 - include/swift/SIL/SILNodes.def | 8 +- .../swift/SILOptimizer/Utils/CastOptimizer.h | 8 -- include/swift/SILOptimizer/Utils/SCCVisitor.h | 1 - lib/IRGen/IRGenSIL.cpp | 13 --- lib/SIL/IR/OperandOwnership.cpp | 2 - lib/SIL/IR/SILArgument.cpp | 2 - lib/SIL/IR/SILFunction.cpp | 3 - lib/SIL/IR/SILInstruction.cpp | 2 - lib/SIL/IR/SILInstructions.cpp | 36 -------- lib/SIL/IR/SILPrinter.cpp | 13 --- lib/SIL/IR/ValueOwnership.cpp | 1 - lib/SIL/Parser/ParseSIL.cpp | 28 ------ lib/SIL/Utils/BasicBlockUtils.cpp | 10 --- lib/SIL/Utils/DynamicCasts.cpp | 5 ++ lib/SIL/Utils/InstructionUtils.cpp | 5 -- lib/SIL/Utils/MemAccessUtils.cpp | 3 - lib/SIL/Verifier/SILVerifier.cpp | 26 ------ lib/SILGen/SILGenBuilder.cpp | 25 +----- lib/SILGen/SILGenBuilder.h | 16 ---- lib/SILGen/SILGenDynamicCast.cpp | 45 +++------- lib/SILGen/SILGenLazyConformance.cpp | 7 -- lib/SILOptimizer/ARC/ARCSequenceOptUtils.cpp | 1 - .../Differentiation/VJPCloner.cpp | 14 --- .../Mandatory/AddressLowering.cpp | 16 +--- .../Mandatory/DiagnoseInfiniteRecursion.cpp | 3 +- .../Mandatory/Differentiation.cpp | 1 - .../Mandatory/PredictableMemOpt.cpp | 3 +- .../Transforms/DeadCodeElimination.cpp | 2 - lib/SILOptimizer/Transforms/SimplifyCFG.cpp | 48 ---------- .../UtilityPasses/SerializeSILPass.cpp | 2 - lib/SILOptimizer/Utils/CFGOptUtils.cpp | 17 ---- lib/SILOptimizer/Utils/CastOptimizer.cpp | 87 ------------------- lib/SILOptimizer/Utils/SILInliner.cpp | 2 - lib/Serialization/DeserializeSIL.cpp | 30 ------- lib/Serialization/SerializeSIL.cpp | 37 -------- test/SIL/Parser/opaque_values_parse.sil | 30 ------- .../Serialization/opaque_values_serialize.sil | 30 ------- .../opaque_use_verifier.sil | 20 ----- test/SILOptimizer/latecodemotion.sil | 16 ---- test/SILOptimizer/side-effect.sil | 9 -- 45 files changed, 24 insertions(+), 788 deletions(-) diff --git a/include/swift/SIL/DynamicCasts.h b/include/swift/SIL/DynamicCasts.h index 8b9e0a562f3b6..161011565abde 100644 --- a/include/swift/SIL/DynamicCasts.h +++ b/include/swift/SIL/DynamicCasts.h @@ -187,14 +187,10 @@ struct SILDynamicCastInst { // checked_cast_value_br yet. Should we ever support it, please // review this code. case SILDynamicCastKind::CheckedCastBranchInst: - case SILDynamicCastKind::CheckedCastValueBranchInst: - return CastConsumptionKind::CopyOnSuccess; case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: return CastConsumptionKind::TakeAlways; case SILDynamicCastKind::UnconditionalCheckedCastInst: return CastConsumptionKind::CopyOnSuccess; - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - llvm_unreachable("unsupported"); } llvm_unreachable("covered switch"); } @@ -203,10 +199,8 @@ struct SILDynamicCastInst { switch (getKind()) { case SILDynamicCastKind::CheckedCastAddrBranchInst: case SILDynamicCastKind::CheckedCastBranchInst: - case SILDynamicCastKind::CheckedCastValueBranchInst: case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: case SILDynamicCastKind::UnconditionalCheckedCastInst: - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: llvm_unreachable("unsupported"); } } @@ -217,13 +211,9 @@ struct SILDynamicCastInst { return cast(inst)->getSuccessBB(); case SILDynamicCastKind::CheckedCastBranchInst: return cast(inst)->getSuccessBB(); - case SILDynamicCastKind::CheckedCastValueBranchInst: - return cast(inst)->getSuccessBB(); case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: case SILDynamicCastKind::UnconditionalCheckedCastInst: return nullptr; - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - llvm_unreachable("unsupported"); } llvm_unreachable("covered switch"); } @@ -234,13 +224,9 @@ struct SILDynamicCastInst { llvm_unreachable("unsupported"); case SILDynamicCastKind::CheckedCastBranchInst: return cast(inst)->getTrueBBCount(); - case SILDynamicCastKind::CheckedCastValueBranchInst: - llvm_unreachable("unsupported"); case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: case SILDynamicCastKind::UnconditionalCheckedCastInst: return None; - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - llvm_unreachable("unsupported"); } llvm_unreachable("covered switch"); } @@ -255,13 +241,9 @@ struct SILDynamicCastInst { return cast(inst)->getFailureBB(); case SILDynamicCastKind::CheckedCastBranchInst: return cast(inst)->getFailureBB(); - case SILDynamicCastKind::CheckedCastValueBranchInst: - return cast(inst)->getFailureBB(); case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: case SILDynamicCastKind::UnconditionalCheckedCastInst: return nullptr; - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - llvm_unreachable("unsupported"); } llvm_unreachable("covered switch"); } @@ -272,13 +254,9 @@ struct SILDynamicCastInst { llvm_unreachable("unsupported"); case SILDynamicCastKind::CheckedCastBranchInst: return cast(inst)->getFalseBBCount(); - case SILDynamicCastKind::CheckedCastValueBranchInst: - llvm_unreachable("unsupported"); case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: case SILDynamicCastKind::UnconditionalCheckedCastInst: return None; - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - llvm_unreachable("unsupported"); } llvm_unreachable("covered switch"); } @@ -293,14 +271,10 @@ struct SILDynamicCastInst { return cast(inst)->getSrc(); case SILDynamicCastKind::CheckedCastBranchInst: return cast(inst)->getOperand(); - case SILDynamicCastKind::CheckedCastValueBranchInst: - return cast(inst)->getOperand(); case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: return cast(inst)->getSrc(); case SILDynamicCastKind::UnconditionalCheckedCastInst: return cast(inst)->getOperand(); - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - llvm_unreachable("unsupported"); } llvm_unreachable("covered switch"); } @@ -311,7 +285,6 @@ struct SILDynamicCastInst { case SILDynamicCastKind::CheckedCastAddrBranchInst: return cast(inst)->getDest(); case SILDynamicCastKind::CheckedCastBranchInst: - case SILDynamicCastKind::CheckedCastValueBranchInst: // TODO: Shouldn't this return getSuccessBlock()->getArgument(0)? return SILValue(); case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: @@ -321,8 +294,6 @@ struct SILDynamicCastInst { // // return cast(inst); return SILValue(); - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - llvm_unreachable("unimplemented"); } llvm_unreachable("covered switch"); } @@ -333,14 +304,10 @@ struct SILDynamicCastInst { return cast(inst)->getSourceFormalType(); case SILDynamicCastKind::CheckedCastBranchInst: return cast(inst)->getSourceFormalType(); - case SILDynamicCastKind::CheckedCastValueBranchInst: - return cast(inst)->getSourceFormalType(); case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: return cast(inst)->getSourceFormalType(); case SILDynamicCastKind::UnconditionalCheckedCastInst: return cast(inst)->getSourceFormalType(); - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - return cast(inst)->getSourceFormalType(); } llvm_unreachable("covered switch"); } @@ -351,14 +318,10 @@ struct SILDynamicCastInst { return cast(inst)->getSourceLoweredType(); case SILDynamicCastKind::CheckedCastBranchInst: return cast(inst)->getSourceLoweredType(); - case SILDynamicCastKind::CheckedCastValueBranchInst: - return cast(inst)->getSourceLoweredType(); case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: return cast(inst)->getSourceLoweredType(); case SILDynamicCastKind::UnconditionalCheckedCastInst: return cast(inst)->getSourceLoweredType(); - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - return cast(inst)->getSourceLoweredType(); } llvm_unreachable("covered switch"); } @@ -369,14 +332,10 @@ struct SILDynamicCastInst { return cast(inst)->getTargetFormalType(); case SILDynamicCastKind::CheckedCastBranchInst: return cast(inst)->getTargetFormalType(); - case SILDynamicCastKind::CheckedCastValueBranchInst: - return cast(inst)->getTargetFormalType(); case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: return cast(inst)->getTargetFormalType(); case SILDynamicCastKind::UnconditionalCheckedCastInst: return cast(inst)->getTargetFormalType(); - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - return cast(inst)->getTargetFormalType(); } llvm_unreachable("covered switch"); } @@ -387,28 +346,21 @@ struct SILDynamicCastInst { return cast(inst)->getDest()->getType(); case SILDynamicCastKind::CheckedCastBranchInst: return cast(inst)->getTargetLoweredType(); - case SILDynamicCastKind::CheckedCastValueBranchInst: - return cast(inst)->getTargetLoweredType(); case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: return cast(inst)->getDest()->getType(); case SILDynamicCastKind::UnconditionalCheckedCastInst: return cast(inst)->getTargetLoweredType(); - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - return cast(inst)->getTargetLoweredType(); } llvm_unreachable("covered switch"); } bool isSourceTypeExact() const { switch (getKind()) { - case SILDynamicCastKind::CheckedCastValueBranchInst: case SILDynamicCastKind::CheckedCastBranchInst: case SILDynamicCastKind::CheckedCastAddrBranchInst: case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: case SILDynamicCastKind::UnconditionalCheckedCastInst: return isa(getSource()); - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - llvm_unreachable("unsupported"); } llvm_unreachable("covered switch"); } @@ -476,15 +428,9 @@ struct SILDynamicCastInst { auto f = classifyFeasibility(false /*allow wmo*/); return f == DynamicCastFeasibility::MaySucceed; } - case SILDynamicCastKind::CheckedCastValueBranchInst: { - auto f = classifyFeasibility(false /*allow wmo opts*/); - return f == DynamicCastFeasibility::MaySucceed; - } case SILDynamicCastKind::UnconditionalCheckedCastAddrInst: case SILDynamicCastKind::UnconditionalCheckedCastInst: return false; - case SILDynamicCastKind::UnconditionalCheckedCastValueInst: - llvm_unreachable("unsupported"); } llvm_unreachable("covered switch"); } diff --git a/include/swift/SIL/SILBuilder.h b/include/swift/SIL/SILBuilder.h index 5da1ae39cae7c..ae594dac9fc08 100644 --- a/include/swift/SIL/SILBuilder.h +++ b/include/swift/SIL/SILBuilder.h @@ -1314,16 +1314,6 @@ class SILBuilder { dest, targetFormalType, getFunction())); } - UnconditionalCheckedCastValueInst * - createUnconditionalCheckedCastValue(SILLocation Loc, - SILValue op, CanType srcFormalTy, - SILType destLoweredTy, - CanType destFormalTy) { - return insert(UnconditionalCheckedCastValueInst::create( - getSILDebugLocation(Loc), op, srcFormalTy, - destLoweredTy, destFormalTy, getFunction())); - } - RetainValueInst *createRetainValue(SILLocation Loc, SILValue operand, Atomicity atomicity) { assert(!hasOwnership()); @@ -2324,18 +2314,6 @@ class SILBuilder { ProfileCounter Target1Count = ProfileCounter(), ProfileCounter Target2Count = ProfileCounter()); - CheckedCastValueBranchInst * - createCheckedCastValueBranch(SILLocation Loc, - SILValue op, CanType srcFormalTy, - SILType destLoweredTy, - CanType destFormalTy, - SILBasicBlock *successBB, - SILBasicBlock *failureBB) { - return insertTerminator(CheckedCastValueBranchInst::create( - getSILDebugLocation(Loc), op, srcFormalTy, - destLoweredTy, destFormalTy, successBB, failureBB, getFunction())); - } - CheckedCastAddrBranchInst * createCheckedCastAddrBranch(SILLocation Loc, CastConsumptionKind consumption, SILValue src, CanType sourceFormalType, diff --git a/include/swift/SIL/SILCloner.h b/include/swift/SIL/SILCloner.h index 5f43f2de11d42..aa64fb4aa4f3a 100644 --- a/include/swift/SIL/SILCloner.h +++ b/include/swift/SIL/SILCloner.h @@ -1706,24 +1706,6 @@ SILCloner::visitUnconditionalCheckedCastAddrInst( OpLoc, SrcValue, SrcType, DestValue, TargetType)); } -template -void SILCloner::visitUnconditionalCheckedCastValueInst( - UnconditionalCheckedCastValueInst *Inst) { - SILLocation OpLoc = getOpLocation(Inst->getLoc()); - SILValue OpValue = getOpValue(Inst->getOperand()); - CanType SrcFormalType = getOpASTType(Inst->getSourceFormalType()); - SILType OpLoweredType = getOpType(Inst->getTargetLoweredType()); - CanType OpFormalType = getOpASTType(Inst->getTargetFormalType()); - getBuilder().setCurrentDebugScope(getOpScope(Inst->getDebugScope())); - recordClonedInstruction( - Inst, - getBuilder().createUnconditionalCheckedCastValue(OpLoc, - OpValue, - SrcFormalType, - OpLoweredType, - OpFormalType)); -} - template void SILCloner::visitRetainValueInst(RetainValueInst *Inst) { getBuilder().setCurrentDebugScope(getOpScope(Inst->getDebugScope())); @@ -2737,22 +2719,6 @@ SILCloner::visitCheckedCastBranchInst(CheckedCastBranchInst *Inst) { Inst->getForwardingOwnershipKind(), TrueCount, FalseCount)); } -template -void SILCloner::visitCheckedCastValueBranchInst( - CheckedCastValueBranchInst *Inst) { - SILBasicBlock *OpSuccBB = getOpBasicBlock(Inst->getSuccessBB()); - SILBasicBlock *OpFailBB = getOpBasicBlock(Inst->getFailureBB()); - getBuilder().setCurrentDebugScope(getOpScope(Inst->getDebugScope())); - recordClonedInstruction( - Inst, getBuilder().createCheckedCastValueBranch( - getOpLocation(Inst->getLoc()), - getOpValue(Inst->getOperand()), - getOpASTType(Inst->getSourceFormalType()), - getOpType(Inst->getTargetLoweredType()), - getOpASTType(Inst->getTargetFormalType()), - OpSuccBB, OpFailBB)); -} - template void SILCloner::visitCheckedCastAddrBranchInst( CheckedCastAddrBranchInst *Inst) { diff --git a/include/swift/SIL/SILInstruction.h b/include/swift/SIL/SILInstruction.h index ab4fc6fe38176..81c148e031b40 100644 --- a/include/swift/SIL/SILInstruction.h +++ b/include/swift/SIL/SILInstruction.h @@ -5664,39 +5664,6 @@ class UnconditionalCheckedCastInst final SILType getTargetLoweredType() const { return getType(); } }; -/// Perform an unconditional checked cast that aborts if the cast fails. -/// The result of the checked cast is left in the destination. -class UnconditionalCheckedCastValueInst final - : public UnaryInstructionWithTypeDependentOperandsBase< - SILInstructionKind::UnconditionalCheckedCastValueInst, - UnconditionalCheckedCastValueInst, ConversionInst> { - CanType SourceFormalTy; - CanType DestFormalTy; - friend SILBuilder; - - UnconditionalCheckedCastValueInst(SILDebugLocation DebugLoc, - SILValue Operand, CanType SourceFormalTy, - ArrayRef TypeDependentOperands, - SILType DestLoweredTy, CanType DestFormalTy) - : UnaryInstructionWithTypeDependentOperandsBase( - DebugLoc, Operand, TypeDependentOperands, - DestLoweredTy), - SourceFormalTy(SourceFormalTy), - DestFormalTy(DestFormalTy) {} - - static UnconditionalCheckedCastValueInst * - create(SILDebugLocation DebugLoc, - SILValue Operand, CanType SourceFormalTy, - SILType DestLoweredTy, CanType DestFormalTy, SILFunction &F); - -public: - SILType getSourceLoweredType() const { return getOperand()->getType(); } - CanType getSourceFormalType() const { return SourceFormalTy; } - - SILType getTargetLoweredType() const { return getType(); } - CanType getTargetFormalType() const { return DestFormalTy; } -}; - /// StructInst - Represents a constructed loadable struct. class StructInst final : public InstructionBaseWithTrailingOperands< SILInstructionKind::StructInst, StructInst, @@ -8132,7 +8099,6 @@ class TermInst : public NonValueInstruction { case TermKind::SwitchEnumAddrInst: case TermKind::DynamicMethodBranchInst: case TermKind::CheckedCastAddrBranchInst: - case TermKind::CheckedCastValueBranchInst: case TermKind::AwaitAsyncContinuationInst: return false; case TermKind::SwitchEnumInst: @@ -9159,45 +9125,6 @@ class CheckedCastBranchInst final CanType getTargetFormalType() const { return DestFormalTy; } }; -/// Perform a checked cast operation and branch on whether the cast succeeds. -/// The success branch destination block receives the cast result as a BB -/// argument. -class CheckedCastValueBranchInst final - : public UnaryInstructionWithTypeDependentOperandsBase< - SILInstructionKind::CheckedCastValueBranchInst, - CheckedCastValueBranchInst, CastBranchInstBase> { - friend SILBuilder; - - CanType SourceFormalTy; - SILType DestLoweredTy; - CanType DestFormalTy; - - CheckedCastValueBranchInst(SILDebugLocation DebugLoc, SILValue Operand, - CanType SourceFormalTy, - ArrayRef TypeDependentOperands, - SILType DestLoweredTy, CanType DestFormalTy, - SILBasicBlock *SuccessBB, SILBasicBlock *FailureBB) - : UnaryInstructionWithTypeDependentOperandsBase( - DebugLoc, Operand, TypeDependentOperands, SuccessBB, FailureBB, - ProfileCounter(), ProfileCounter()), - SourceFormalTy(SourceFormalTy), DestLoweredTy(DestLoweredTy), - DestFormalTy(DestFormalTy) {} - - static CheckedCastValueBranchInst * - create(SILDebugLocation DebugLoc, - SILValue Operand, CanType SourceFormalTy, - SILType DestLoweredTy, CanType DestFormalTy, - SILBasicBlock *SuccessBB, SILBasicBlock *FailureBB, - SILFunction &F); - -public: - SILType getSourceLoweredType() const { return getOperand()->getType(); } - CanType getSourceFormalType() const { return SourceFormalTy; } - - SILType getTargetLoweredType() const { return DestLoweredTy; } - CanType getTargetFormalType() const { return DestFormalTy; } -}; - /// Perform a checked cast operation and branch on whether the cast succeeds. /// The result of the checked cast is left in the destination address. class CheckedCastAddrBranchInst final diff --git a/include/swift/SIL/SILNode.h b/include/swift/SIL/SILNode.h index 86439cc099afa..d9a1ca77c609d 100644 --- a/include/swift/SIL/SILNode.h +++ b/include/swift/SIL/SILNode.h @@ -375,7 +375,6 @@ class alignas(8) SILNode : UIWTDOB_BITFIELD_EMPTY(UncheckedTrivialBitCastInst, ConversionInst); UIWTDOB_BITFIELD_EMPTY(UncheckedBitwiseCastInst, ConversionInst); UIWTDOB_BITFIELD_EMPTY(ThinToThickFunctionInst, ConversionInst); - UIWTDOB_BITFIELD_EMPTY(UnconditionalCheckedCastValueInst, ConversionInst); UIWTDOB_BITFIELD_EMPTY(InitExistentialAddrInst, SingleValueInstruction); UIWTDOB_BITFIELD_EMPTY(InitExistentialValueInst, SingleValueInstruction); UIWTDOB_BITFIELD_EMPTY(InitExistentialRefInst, SingleValueInstruction); @@ -383,7 +382,6 @@ class alignas(8) SILNode : SWIFT_INLINE_BITFIELD_EMPTY(TermInst, SILInstruction); UIWTDOB_BITFIELD_EMPTY(CheckedCastBranchInst, SingleValueInstruction); - UIWTDOB_BITFIELD_EMPTY(CheckedCastValueBranchInst, SingleValueInstruction); // Ensure that BranchInst bitfield does not overflow. IBWTO_BITFIELD_EMPTY(BranchInst, TermInst); diff --git a/include/swift/SIL/SILNodes.def b/include/swift/SIL/SILNodes.def index 8cf56e480a301..2c37b0bdfe471 100644 --- a/include/swift/SIL/SILNodes.def +++ b/include/swift/SIL/SILNodes.def @@ -425,10 +425,6 @@ ABSTRACT_VALUE_AND_INST(SingleValueInstruction, ValueBase, SILInstruction) ConversionInst, None, DoesNotRelease) BRIDGED_SINGLE_VALUE_INST(ObjCExistentialMetatypeToObjectInst, objc_existential_metatype_to_object, ConversionInst, None, DoesNotRelease) - // unconditional_checked_cast_value reads the source value and produces - // a new value with a potentially different representation. - DYNAMICCAST_SINGLE_VALUE_INST(UnconditionalCheckedCastValueInst, unconditional_checked_cast_value, - ConversionInst, MayRead, MayRelease) // unconditional_checked_cast_inst is only MayRead to prevent a subsequent // release of the cast's source from being hoisted above the cast: // retain X @@ -687,9 +683,7 @@ ABSTRACT_INST(TermInst, SILInstruction) TermInst, None, DoesNotRelease) DYNAMICCAST_TERMINATOR(CheckedCastAddrBranchInst, checked_cast_addr_br, TermInst, MayHaveSideEffects, MayRelease) - DYNAMICCAST_TERMINATOR(CheckedCastValueBranchInst, checked_cast_value_br, - TermInst, None, DoesNotRelease) - INST_RANGE(TermInst, UnreachableInst, CheckedCastValueBranchInst) + INST_RANGE(TermInst, UnreachableInst, CheckedCastAddrBranchInst) // Deallocation instructions. ABSTRACT_INST(DeallocationInst, SILInstruction) diff --git a/include/swift/SILOptimizer/Utils/CastOptimizer.h b/include/swift/SILOptimizer/Utils/CastOptimizer.h index b80938b972ac1..26b50e8125ed4 100644 --- a/include/swift/SILOptimizer/Utils/CastOptimizer.h +++ b/include/swift/SILOptimizer/Utils/CastOptimizer.h @@ -97,10 +97,6 @@ class CastOptimizer { /// Simplify checked_cast_br. It may change the control flow. SILInstruction *simplifyCheckedCastBranchInst(CheckedCastBranchInst *Inst); - /// Simplify checked_cast_value_br. It may change the control flow. - SILInstruction * - simplifyCheckedCastValueBranchInst(CheckedCastValueBranchInst *Inst); - /// Simplify checked_cast_addr_br. It may change the control flow. SILInstruction * simplifyCheckedCastAddrBranchInst(CheckedCastAddrBranchInst *Inst); @@ -108,10 +104,6 @@ class CastOptimizer { /// Optimize checked_cast_br. This cannot change the control flow. SILInstruction *optimizeCheckedCastBranchInst(CheckedCastBranchInst *Inst); - /// Optimize checked_cast_value_br. This cannot change the control flow. - SILInstruction * - optimizeCheckedCastValueBranchInst(CheckedCastValueBranchInst *Inst); - /// Optimize checked_cast_addr_br. This cannot change the control flow. SILInstruction * optimizeCheckedCastAddrBranchInst(CheckedCastAddrBranchInst *Inst); diff --git a/include/swift/SILOptimizer/Utils/SCCVisitor.h b/include/swift/SILOptimizer/Utils/SCCVisitor.h index 2bc658529e355..e2b4417429cdd 100644 --- a/include/swift/SILOptimizer/Utils/SCCVisitor.h +++ b/include/swift/SILOptimizer/Utils/SCCVisitor.h @@ -123,7 +123,6 @@ class SCCVisitor { case TermKind::SwitchEnumInst: case TermKind::SwitchEnumAddrInst: case TermKind::CheckedCastBranchInst: - case TermKind::CheckedCastValueBranchInst: case TermKind::CheckedCastAddrBranchInst: case TermKind::DynamicMethodBranchInst: assert(Index == 0 && "Expected argument index to always be zero!"); diff --git a/lib/IRGen/IRGenSIL.cpp b/lib/IRGen/IRGenSIL.cpp index dbcd1ece22177..5394c2b40a43e 100644 --- a/lib/IRGen/IRGenSIL.cpp +++ b/lib/IRGen/IRGenSIL.cpp @@ -1331,8 +1331,6 @@ class IRGenSILFunction : void visitObjCToThickMetatypeInst(ObjCToThickMetatypeInst *i); void visitUnconditionalCheckedCastInst(UnconditionalCheckedCastInst *i); void visitUnconditionalCheckedCastAddrInst(UnconditionalCheckedCastAddrInst *i); - void - visitUnconditionalCheckedCastValueInst(UnconditionalCheckedCastValueInst *i); void visitObjCMetatypeToObjectInst(ObjCMetatypeToObjectInst *i); void visitObjCExistentialMetatypeToObjectInst( ObjCExistentialMetatypeToObjectInst *i); @@ -1363,7 +1361,6 @@ class IRGenSILFunction : void visitSwitchEnumAddrInst(SwitchEnumAddrInst *i); void visitDynamicMethodBranchInst(DynamicMethodBranchInst *i); void visitCheckedCastBranchInst(CheckedCastBranchInst *i); - void visitCheckedCastValueBranchInst(CheckedCastValueBranchInst *i); void visitCheckedCastAddrBranchInst(CheckedCastAddrBranchInst *i); void visitGetAsyncContinuationInst(GetAsyncContinuationInst *i); @@ -6370,16 +6367,6 @@ void IRGenSILFunction::visitUnconditionalCheckedCastAddrInst( CheckedCastMode::Unconditional); } -void IRGenSILFunction::visitUnconditionalCheckedCastValueInst( - swift::UnconditionalCheckedCastValueInst *i) { - llvm_unreachable("unsupported instruction during IRGen"); -} - -void IRGenSILFunction::visitCheckedCastValueBranchInst( - swift::CheckedCastValueBranchInst *i) { - llvm_unreachable("unsupported instruction during IRGen"); -} - void IRGenSILFunction::visitCheckedCastBranchInst( swift::CheckedCastBranchInst *i) { FailableCastResult castResult; diff --git a/lib/SIL/IR/OperandOwnership.cpp b/lib/SIL/IR/OperandOwnership.cpp index 786669d0b9a98..2d9d3eab3d098 100644 --- a/lib/SIL/IR/OperandOwnership.cpp +++ b/lib/SIL/IR/OperandOwnership.cpp @@ -260,8 +260,6 @@ OPERAND_OWNERSHIP(DestroyingConsume, EndCOWMutation) OPERAND_OWNERSHIP(DestroyingConsume, MoveValue) // Instructions that move an owned value. -OPERAND_OWNERSHIP(ForwardingConsume, CheckedCastValueBranch) -OPERAND_OWNERSHIP(ForwardingConsume, UnconditionalCheckedCastValue) OPERAND_OWNERSHIP(ForwardingConsume, InitExistentialValue) OPERAND_OWNERSHIP(ForwardingConsume, DeinitExistentialValue) OPERAND_OWNERSHIP(ForwardingConsume, MarkUninitialized) diff --git a/lib/SIL/IR/SILArgument.cpp b/lib/SIL/IR/SILArgument.cpp index efedffe41d178..c6c2a675feba3 100644 --- a/lib/SIL/IR/SILArgument.cpp +++ b/lib/SIL/IR/SILArgument.cpp @@ -253,8 +253,6 @@ getSingleTerminatorOperandForPred(const SILBasicBlock *parentBlock, ->getArgForDestBB(parentBlock, argIndex); case TermKind::CheckedCastBranchInst: return cast(predTermInst)->getOperand(); - case TermKind::CheckedCastValueBranchInst: - return cast(predTermInst)->getOperand(); case TermKind::SwitchEnumInst: return cast(predTermInst)->getOperand(); } diff --git a/lib/SIL/IR/SILFunction.cpp b/lib/SIL/IR/SILFunction.cpp index cfe9aa1b4e11f..d5d2072758e54 100644 --- a/lib/SIL/IR/SILFunction.cpp +++ b/lib/SIL/IR/SILFunction.cpp @@ -602,9 +602,6 @@ struct DOTGraphTraits : public DefaultDOTGraphTraits { if (auto *CCBI = dyn_cast(Term)) return (Succ == CCBI->getSuccessBB()) ? "T" : "F"; - if (auto *CCBI = dyn_cast(Term)) - return (Succ == CCBI->getSuccessBB()) ? "T" : "F"; - if (auto *CCBI = dyn_cast(Term)) return (Succ == CCBI->getSuccessBB()) ? "T" : "F"; diff --git a/lib/SIL/IR/SILInstruction.cpp b/lib/SIL/IR/SILInstruction.cpp index 0b05765e085e0..5605df2dd87e3 100644 --- a/lib/SIL/IR/SILInstruction.cpp +++ b/lib/SIL/IR/SILInstruction.cpp @@ -1162,7 +1162,6 @@ bool SILInstruction::mayRelease() const { return true; case SILInstructionKind::UnconditionalCheckedCastAddrInst: - case SILInstructionKind::UnconditionalCheckedCastValueInst: case SILInstructionKind::UncheckedOwnershipConversionInst: return true; @@ -1368,7 +1367,6 @@ bool SILInstruction::mayTrap() const { case SILInstructionKind::CondFailInst: case SILInstructionKind::UnconditionalCheckedCastInst: case SILInstructionKind::UnconditionalCheckedCastAddrInst: - case SILInstructionKind::UnconditionalCheckedCastValueInst: return true; default: return false; diff --git a/lib/SIL/IR/SILInstructions.cpp b/lib/SIL/IR/SILInstructions.cpp index 29319d6f01692..634e41901fb11 100644 --- a/lib/SIL/IR/SILInstructions.cpp +++ b/lib/SIL/IR/SILInstructions.cpp @@ -1546,7 +1546,6 @@ bool TermInst::isFunctionExiting() const { case TermKind::SwitchEnumAddrInst: case TermKind::DynamicMethodBranchInst: case TermKind::CheckedCastBranchInst: - case TermKind::CheckedCastValueBranchInst: case TermKind::CheckedCastAddrBranchInst: case TermKind::UnreachableInst: case TermKind::TryApplyInst: @@ -1571,7 +1570,6 @@ bool TermInst::isProgramTerminating() const { case TermKind::SwitchEnumAddrInst: case TermKind::DynamicMethodBranchInst: case TermKind::CheckedCastBranchInst: - case TermKind::CheckedCastValueBranchInst: case TermKind::CheckedCastAddrBranchInst: case TermKind::ReturnInst: case TermKind::ThrowInst: @@ -2279,22 +2277,6 @@ UnconditionalCheckedCastInst *UnconditionalCheckedCastInst::create( forwardingOwnershipKind); } -UnconditionalCheckedCastValueInst *UnconditionalCheckedCastValueInst::create( - SILDebugLocation DebugLoc, - SILValue Operand, CanType SrcFormalTy, - SILType DestLoweredTy, CanType DestFormalTy, SILFunction &F) { - SILModule &Mod = F.getModule(); - SmallVector TypeDependentOperands; - collectTypeDependentOperands(TypeDependentOperands, F, DestFormalTy); - unsigned size = - totalSizeToAlloc(1 + TypeDependentOperands.size()); - void *Buffer = - Mod.allocateInst(size, alignof(UnconditionalCheckedCastValueInst)); - return ::new (Buffer) UnconditionalCheckedCastValueInst( - DebugLoc, Operand, SrcFormalTy, TypeDependentOperands, - DestLoweredTy, DestFormalTy); -} - CheckedCastBranchInst *CheckedCastBranchInst::create( SILDebugLocation DebugLoc, bool IsExact, SILValue Operand, SILType DestLoweredTy, CanType DestFormalTy, SILBasicBlock *SuccessBB, @@ -2313,24 +2295,6 @@ CheckedCastBranchInst *CheckedCastBranchInst::create( forwardingOwnershipKind); } -CheckedCastValueBranchInst * -CheckedCastValueBranchInst::create(SILDebugLocation DebugLoc, - SILValue Operand, CanType SrcFormalTy, - SILType DestLoweredTy, CanType DestFormalTy, - SILBasicBlock *SuccessBB, SILBasicBlock *FailureBB, - SILFunction &F) { - SILModule &Mod = F.getModule(); - SmallVector TypeDependentOperands; - collectTypeDependentOperands(TypeDependentOperands, F, DestFormalTy); - unsigned size = - totalSizeToAlloc(1 + TypeDependentOperands.size()); - void *Buffer = Mod.allocateInst(size, alignof(CheckedCastValueBranchInst)); - return ::new (Buffer) CheckedCastValueBranchInst( - DebugLoc, Operand, SrcFormalTy, TypeDependentOperands, - DestLoweredTy, DestFormalTy, - SuccessBB, FailureBB); -} - MetatypeInst *MetatypeInst::create(SILDebugLocation Loc, SILType Ty, SILFunction *F) { SILModule &Mod = F->getModule(); diff --git a/lib/SIL/IR/SILPrinter.cpp b/lib/SIL/IR/SILPrinter.cpp index eab187d728645..a1e497a8f778b 100644 --- a/lib/SIL/IR/SILPrinter.cpp +++ b/lib/SIL/IR/SILPrinter.cpp @@ -1747,25 +1747,12 @@ class SILPrinter : public SILInstructionVisitor { printForwardingOwnershipKind(CI, CI->getOperand()); } - void visitCheckedCastValueBranchInst(CheckedCastValueBranchInst *CI) { - *this << CI->getSourceFormalType() << " in " - << getIDAndType(CI->getOperand()) << " to " << CI->getTargetFormalType() - << ", " << Ctx.getID(CI->getSuccessBB()) << ", " - << Ctx.getID(CI->getFailureBB()); - } - void visitUnconditionalCheckedCastAddrInst(UnconditionalCheckedCastAddrInst *CI) { *this << CI->getSourceFormalType() << " in " << getIDAndType(CI->getSrc()) << " to " << CI->getTargetFormalType() << " in " << getIDAndType(CI->getDest()); } - void visitUnconditionalCheckedCastValueInst( - UnconditionalCheckedCastValueInst *CI) { - *this << CI->getSourceFormalType() << " in " << getIDAndType(CI->getOperand()) - << " to " << CI->getTargetFormalType(); - } - void visitCheckedCastAddrBranchInst(CheckedCastAddrBranchInst *CI) { *this << getCastConsumptionKindName(CI->getConsumptionKind()) << ' ' << CI->getSourceFormalType() << " in " << getIDAndType(CI->getSrc()) diff --git a/lib/SIL/IR/ValueOwnership.cpp b/lib/SIL/IR/ValueOwnership.cpp index 42c11d4f00ac4..ac7e4ae7b58d8 100644 --- a/lib/SIL/IR/ValueOwnership.cpp +++ b/lib/SIL/IR/ValueOwnership.cpp @@ -179,7 +179,6 @@ CONSTANT_OR_NONE_OWNERSHIP_INST(Guaranteed, LinearFunctionExtract) // be borrowed sub-objects of the parent CoW box. CONSTANT_OR_NONE_OWNERSHIP_INST(Guaranteed, OpenExistentialValue) CONSTANT_OR_NONE_OWNERSHIP_INST(Guaranteed, OpenExistentialBoxValue) -CONSTANT_OR_NONE_OWNERSHIP_INST(Owned, UnconditionalCheckedCastValue) // Given an owned value, mark_uninitialized always forwards an owned value since // we want to make sure that all destroys of that value must come through the diff --git a/lib/SIL/Parser/ParseSIL.cpp b/lib/SIL/Parser/ParseSIL.cpp index d16c45e0d9a27..74df229ce9076 100644 --- a/lib/SIL/Parser/ParseSIL.cpp +++ b/lib/SIL/Parser/ParseSIL.cpp @@ -3849,19 +3849,6 @@ bool SILParser::parseSpecificSILInstruction(SILBuilder &B, InstLoc, SourceAddr, SourceType, DestAddr, TargetType); break; - case SILInstructionKind::UnconditionalCheckedCastValueInst: { - if (parseASTType(SourceType) || parseVerbatim("in") || - parseTypedValueRef(Val, B) || parseVerbatim("to") || - parseASTType(TargetType) || parseSILDebugLocation(InstLoc, B)) - return true; - - auto opaque = Lowering::AbstractionPattern::getOpaque(); - ResultVal = B.createUnconditionalCheckedCastValue( - InstLoc, Val, SourceType, F->getLoweredType(opaque, TargetType), - TargetType); - break; - } - case SILInstructionKind::UnconditionalCheckedCastInst: { if (parseTypedValueRef(Val, B) || parseVerbatim("to") || parseASTType(TargetType)) @@ -3902,21 +3889,6 @@ bool SILParser::parseSpecificSILInstruction(SILBuilder &B, getBBForReference(FailureBBName, FailureBBLoc), forwardingOwnership); break; } - case SILInstructionKind::CheckedCastValueBranchInst: { - if (parseASTType(SourceType) || parseVerbatim("in") - || parseTypedValueRef(Val, B) || parseVerbatim("to") - || parseASTType(TargetType) || parseConditionalBranchDestinations() - || parseSILDebugLocation(InstLoc, B)) - return true; - - auto opaque = Lowering::AbstractionPattern::getOpaque(); - ResultVal = B.createCheckedCastValueBranch( - InstLoc, Val, SourceType, F->getLoweredType(opaque, TargetType), - TargetType, getBBForReference(SuccessBBName, SuccessBBLoc), - getBBForReference(FailureBBName, FailureBBLoc)); - break; - } - case SILInstructionKind::MarkUninitializedInst: { if (P.parseToken(tok::l_square, diag::expected_tok_in_sil_instr, "[")) return true; diff --git a/lib/SIL/Utils/BasicBlockUtils.cpp b/lib/SIL/Utils/BasicBlockUtils.cpp index 3a1f46bdd532f..a0a522270cb6a 100644 --- a/lib/SIL/Utils/BasicBlockUtils.cpp +++ b/lib/SIL/Utils/BasicBlockUtils.cpp @@ -214,16 +214,6 @@ void swift::getEdgeArgs(TermInst *T, unsigned edgeIdx, SILBasicBlock *newEdgeBB, succBB->getArgument(0)->getOwnershipKind())); return; } - case SILInstructionKind::CheckedCastValueBranchInst: { - auto CBI = cast(T); - auto succBB = edgeIdx == 0 ? CBI->getSuccessBB() : CBI->getFailureBB(); - if (!succBB->getNumArguments()) - return; - args.push_back(newEdgeBB->createPhiArgument( - succBB->getArgument(0)->getType(), - succBB->getArgument(0)->getOwnershipKind())); - return; - } case SILInstructionKind::TryApplyInst: { auto *TAI = cast(T); diff --git a/lib/SIL/Utils/DynamicCasts.cpp b/lib/SIL/Utils/DynamicCasts.cpp index ae167f9c9ec57..bb8640d9988ad 100644 --- a/lib/SIL/Utils/DynamicCasts.cpp +++ b/lib/SIL/Utils/DynamicCasts.cpp @@ -1188,6 +1188,11 @@ bool swift::emitSuccessfulIndirectUnconditionalCast( /// Can the given cast be performed by the scalar checked-cast /// instructions? +/// +/// TODO: in OSSA-with-opaque-values SIL, all casts could be modeled using +/// scalar casts by setting 'OwnershipForwardingMixin::directlyForwards = +/// false'. This would simplify SIL analysis. Temporaries would be emitted +/// during address lowering. bool swift::canUseScalarCheckedCastInstructions(SILModule &M, CanType sourceFormalType, CanType targetFormalType) { diff --git a/lib/SIL/Utils/InstructionUtils.cpp b/lib/SIL/Utils/InstructionUtils.cpp index d445966c27e37..25733595d0e68 100644 --- a/lib/SIL/Utils/InstructionUtils.cpp +++ b/lib/SIL/Utils/InstructionUtils.cpp @@ -569,7 +569,6 @@ RuntimeEffect swift::getRuntimeEffect(SILInstruction *inst, SILType &impactType) //return RuntimeEffect::NoEffect; } - case SILInstructionKind::UnconditionalCheckedCastValueInst: case SILInstructionKind::UnconditionalCheckedCastInst: impactType = inst->getOperand(0)->getType(); return RuntimeEffect::Casting | metadataEffect(impactType) | @@ -584,10 +583,6 @@ RuntimeEffect swift::getRuntimeEffect(SILInstruction *inst, SILType &impactType) impactType = inst->getOperand(0)->getType(); return RuntimeEffect::Casting | metadataEffect(impactType) | metadataEffect(cast(inst)->getTargetLoweredType()); - case SILInstructionKind::CheckedCastValueBranchInst: - impactType = inst->getOperand(0)->getType(); - return RuntimeEffect::Casting | metadataEffect(impactType) | - metadataEffect(cast(inst)->getTargetLoweredType()); case SILInstructionKind::AllocStackInst: case SILInstructionKind::ProjectBoxInst: diff --git a/lib/SIL/Utils/MemAccessUtils.cpp b/lib/SIL/Utils/MemAccessUtils.cpp index 03b908f9c989e..dd315a49c4034 100644 --- a/lib/SIL/Utils/MemAccessUtils.cpp +++ b/lib/SIL/Utils/MemAccessUtils.cpp @@ -759,7 +759,6 @@ bool swift::isIdentityAndOwnershipPreservingRefCast( case SILInstructionKind::BridgeObjectToRefInst: return true; case SILInstructionKind::UnconditionalCheckedCastInst: - case SILInstructionKind::UnconditionalCheckedCastValueInst: return SILDynamicCastInst(svi).isRCIdentityPreserving(); // Ignore markers case SILInstructionKind::MarkUninitializedInst: @@ -2627,7 +2626,6 @@ void swift::visitAccessedAddress(SILInstruction *I, case SILInstructionKind::BeginUnpairedAccessInst: case SILInstructionKind::BindMemoryInst: case SILInstructionKind::RebindMemoryInst: - case SILInstructionKind::CheckedCastValueBranchInst: case SILInstructionKind::CondFailInst: case SILInstructionKind::CopyBlockInst: case SILInstructionKind::CopyBlockWithoutEscapingInst: @@ -2659,7 +2657,6 @@ void swift::visitAccessedAddress(SILInstruction *I, case SILInstructionKind::UncheckedOwnershipConversionInst: case SILInstructionKind::UncheckedRefCastAddrInst: case SILInstructionKind::UnconditionalCheckedCastAddrInst: - case SILInstructionKind::UnconditionalCheckedCastValueInst: case SILInstructionKind::ValueMetatypeInst: // TODO: Is this correct? case SILInstructionKind::GetAsyncContinuationInst: diff --git a/lib/SIL/Verifier/SILVerifier.cpp b/lib/SIL/Verifier/SILVerifier.cpp index 8863f2ebd4c59..d36cc258727d0 100644 --- a/lib/SIL/Verifier/SILVerifier.cpp +++ b/lib/SIL/Verifier/SILVerifier.cpp @@ -4025,13 +4025,6 @@ class SILVerifier : public SILVerifierBase { verifyOpenedArchetype(CI, CI->getType().getASTType()); } - void checkUnconditionalCheckedCastValueInst( - UnconditionalCheckedCastValueInst *CI) { - verifyCheckedCast(/*exact*/ false, CI->getOperand()->getType(), - CI->getType(), true); - verifyOpenedArchetype(CI, CI->getType().getASTType()); - } - // Make sure that opcodes handled by isRCIdentityPreservingCast cannot cast // from a trivial to a reference type. Such a cast may dynamically // instantiate a new reference-counted object. @@ -4135,25 +4128,6 @@ class SILVerifier : public SILVerifierBase { } } - void checkCheckedCastValueBranchInst(CheckedCastValueBranchInst *CBI) { - verifyCheckedCast(false, - CBI->getSourceLoweredType(), - CBI->getTargetLoweredType(), - true); - verifyOpenedArchetype(CBI, CBI->getTargetFormalType()); - - require(CBI->getSuccessBB()->args_size() == 1, - "success dest of checked_cast_value_br must take one argument"); - requireSameType( - CBI->getSuccessBB()->args_begin()[0]->getType(), - CBI->getTargetLoweredType(), - "success dest block argument of checked_cast_value_br must match " - "type of cast"); - require(F.hasOwnership() || CBI->getFailureBB()->args_empty(), - "failure dest of checked_cast_value_br in unqualified ownership " - "sil must take no arguments"); - } - void checkCheckedCastAddrBranchInst(CheckedCastAddrBranchInst *CCABI) { require(CCABI->getSrc()->getType().isAddress(), "checked_cast_addr_br src must be an address"); diff --git a/lib/SILGen/SILGenBuilder.cpp b/lib/SILGen/SILGenBuilder.cpp index 780e9e55033df..81ed188bdfee7 100644 --- a/lib/SILGen/SILGenBuilder.cpp +++ b/lib/SILGen/SILGenBuilder.cpp @@ -408,7 +408,8 @@ ManagedValue SILGenBuilder::createLoadTake(SILLocation loc, ManagedValue v, lowering.emitLoadOfCopy(*this, loc, v.forward(SGF), IsTake); if (lowering.isTrivial()) return ManagedValue::forUnmanaged(result); - assert(!lowering.isAddressOnly() && "cannot retain an unloadable type"); + assert((!lowering.isAddressOnly() || !SGF.silConv.useLoweredAddresses()) && + "cannot retain an unloadable type"); return SGF.emitManagedRValueWithCleanup(result, lowering); } @@ -509,16 +510,6 @@ ManagedValue SILGenBuilder::createEnum(SILLocation loc, ManagedValue payload, return SGF.emitManagedRValueWithCleanup(result); } -ManagedValue SILGenBuilder::createUnconditionalCheckedCastValue( - SILLocation loc, ManagedValue op, CanType srcFormalTy, - SILType destLoweredTy, CanType destFormalTy) { - SILValue result = - createUnconditionalCheckedCastValue(loc, op.forward(SGF), - srcFormalTy, destLoweredTy, - destFormalTy); - return SGF.emitManagedRValueWithCleanup(result); -} - ManagedValue SILGenBuilder::createUnconditionalCheckedCast( SILLocation loc, ManagedValue op, SILType destLoweredTy, CanType destFormalTy) { @@ -548,18 +539,6 @@ void SILGenBuilder::createCheckedCastBranch(SILLocation loc, bool isExact, Target1Count, Target2Count); } -void SILGenBuilder::createCheckedCastValueBranch(SILLocation loc, - ManagedValue op, - CanType srcFormalTy, - SILType destLoweredTy, - CanType destFormalTy, - SILBasicBlock *trueBlock, - SILBasicBlock *falseBlock) { - createCheckedCastValueBranch(loc, op.forward(SGF), srcFormalTy, - destLoweredTy, destFormalTy, - trueBlock, falseBlock); -} - ManagedValue SILGenBuilder::createUpcast(SILLocation loc, ManagedValue original, SILType type) { CleanupCloner cloner(*this, original); diff --git a/lib/SILGen/SILGenBuilder.h b/lib/SILGen/SILGenBuilder.h index 9b1dc72fa0a3b..6c2c255a7decd 100644 --- a/lib/SILGen/SILGenBuilder.h +++ b/lib/SILGen/SILGenBuilder.h @@ -248,13 +248,6 @@ class SILGenBuilder : public SILBuilder { const TypeLowering &lowering, SGFContext context, llvm::function_ref rvalueEmitter); - using SILBuilder::createUnconditionalCheckedCastValue; - ManagedValue - createUnconditionalCheckedCastValue(SILLocation loc, - ManagedValue op, - CanType srcFormalTy, - SILType destLoweredTy, - CanType destFormalTy); using SILBuilder::createUnconditionalCheckedCast; ManagedValue createUnconditionalCheckedCast(SILLocation loc, ManagedValue op, @@ -271,15 +264,6 @@ class SILGenBuilder : public SILBuilder { ProfileCounter Target1Count, ProfileCounter Target2Count); - using SILBuilder::createCheckedCastValueBranch; - void createCheckedCastValueBranch(SILLocation loc, - ManagedValue op, - CanType srcFormalTy, - SILType destLoweredTy, - CanType destFormalTy, - SILBasicBlock *trueBlock, - SILBasicBlock *falseBlock); - using SILBuilder::createUpcast; ManagedValue createUpcast(SILLocation loc, ManagedValue original, SILType type); diff --git a/lib/SILGen/SILGenDynamicCast.cpp b/lib/SILGen/SILGenDynamicCast.cpp index 1db750a48cff6..13bf42c10ce9f 100644 --- a/lib/SILGen/SILGenDynamicCast.cpp +++ b/lib/SILGen/SILGenDynamicCast.cpp @@ -55,7 +55,7 @@ namespace { SGFContext ctx; std::unique_ptr temporary; - if (isOperandIndirect() && SGF.silConv.useLoweredAddresses()) { + if (isOperandIndirect()) { temporary = SGF.emitTemporary(Loc, origSourceTL); ctx = SGFContext(temporary.get()); } @@ -63,7 +63,7 @@ namespace { auto result = SGF.emitRValueAsOrig(operand, mostGeneral, origSourceTL, ctx); - if (isOperandIndirect() && SGF.silConv.useLoweredAddresses()) { + if (isOperandIndirect()) { // Force the result into the temporary if it's not already there. if (!result.isInContext()) { result.forwardInto(SGF, Loc, temporary->getAddress()); @@ -86,8 +86,7 @@ namespace { // If we're using checked_cast_addr, take the operand (which // should be an address) and build into the destination buffer. - if (Strategy == CastStrategy::Address && - SGF.silConv.useLoweredAddresses()) { + if (Strategy == CastStrategy::Address) { SILValue resultBuffer = createAbstractResultBuffer(hasAbstraction, origTargetTL, ctx); SGF.B.createUnconditionalCheckedCastAddr(Loc, @@ -98,17 +97,10 @@ namespace { abstraction, origTargetTL, ctx)); } - ManagedValue result; - if (Strategy == CastStrategy::Address) { - result = SGF.B.createUnconditionalCheckedCastValue( - Loc, operand, SourceType, - origTargetTL.getLoweredType(), TargetType); - } else { - result = SGF.B.createUnconditionalCheckedCast( - Loc, operand, - origTargetTL.getLoweredType(), TargetType); - } - + ManagedValue result = + SGF.B.createUnconditionalCheckedCast(Loc, operand, + origTargetTL.getLoweredType(), + TargetType); return RValue(SGF, Loc, TargetType, finishFromResultScalar(hasAbstraction, result, CastConsumptionKind::TakeAlways, @@ -137,21 +129,13 @@ namespace { // Emit the branch. ManagedValue operandValue; SILValue resultBuffer; - if (Strategy == CastStrategy::Address && - SGF.silConv.useLoweredAddresses()) { + if (Strategy == CastStrategy::Address) { assert(operand.getType().isAddress()); resultBuffer = createAbstractResultBuffer(hasAbstraction, origTargetTL, ctx); SGF.B.createCheckedCastAddrBranch( Loc, consumption, operand.forward(SGF), SourceType, resultBuffer, TargetType, trueBB, falseBB, TrueCount, FalseCount); - } else if (Strategy == CastStrategy::Address) { - // Opaque value mode - operandValue = std::move(operand); - SGF.B.createCheckedCastValueBranch( - Loc, operandValue, SourceType, - origTargetTL.getLoweredType(), TargetType, - trueBB, falseBB); } else { // Tolerate being passed an address here. It comes up during switch // emission. @@ -176,8 +160,7 @@ namespace { FullExpr scope(SGF.Cleanups, CleanupLocation(Loc)); ManagedValue result; - if (Strategy == CastStrategy::Address && - SGF.silConv.useLoweredAddresses()) { + if (Strategy == CastStrategy::Address) { result = finishFromResultBuffer(hasAbstraction, resultBuffer, abstraction, origTargetTL, ctx); } else { @@ -386,11 +369,8 @@ adjustForConditionalCheckedCastOperand(SILLocation loc, ManagedValue src, bool hasAbstraction = (src.getType() != srcAbstractTL.getLoweredType()); // Fast path: no re-abstraction required. - if (!hasAbstraction && - (!requiresAddress || - (src.getType().isAddress() || !SGF.silConv.useLoweredAddresses()))) { + if (!hasAbstraction && (!requiresAddress || src.getType().isAddress())) return src; - } std::unique_ptr init; if (requiresAddress) { @@ -470,8 +450,9 @@ RValue Lowering::emitConditionalCheckedCast( SILValue resultObjectBuffer; Optional resultObjectTemp; SGFContext resultObjectCtx; - if ((resultTL.isAddressOnly() && SGF.silConv.useLoweredAddresses()) || - (C.getEmitInto() && C.getEmitInto()->canPerformInPlaceInitialization())) { + if ((resultTL.isAddressOnly()) + || (C.getEmitInto() + && C.getEmitInto()->canPerformInPlaceInitialization())) { SILType resultTy = resultTL.getLoweredType(); resultBuffer = SGF.getBufferForExprResult(loc, resultTy, C); resultObjectBuffer = SGF.B.createInitEnumDataAddr( diff --git a/lib/SILGen/SILGenLazyConformance.cpp b/lib/SILGen/SILGenLazyConformance.cpp index 227672b578d95..a50be17715fa9 100644 --- a/lib/SILGen/SILGenLazyConformance.cpp +++ b/lib/SILGen/SILGenLazyConformance.cpp @@ -183,13 +183,6 @@ class LazyConformanceEmitter : public SILInstructionVisitorgetTargetFormalType()); } - void visitCheckedCastValueBranchInst(CheckedCastValueBranchInst *CCVBI) { - SGM.useConformancesFromType(CCVBI->getSourceFormalType()); - SGM.useConformancesFromType(CCVBI->getTargetFormalType()); - SGM.useConformancesFromObjectiveCType(CCVBI->getSourceFormalType()); - SGM.useConformancesFromObjectiveCType(CCVBI->getTargetFormalType()); - } - void visitCopyAddrInst(CopyAddrInst *CAI) { SGM.useConformancesFromType(CAI->getSrc()->getType().getASTType()); SGM.useConformancesFromType(CAI->getDest()->getType().getASTType()); diff --git a/lib/SILOptimizer/ARC/ARCSequenceOptUtils.cpp b/lib/SILOptimizer/ARC/ARCSequenceOptUtils.cpp index 62c501669051b..e29e1ea47ac16 100644 --- a/lib/SILOptimizer/ARC/ARCSequenceOptUtils.cpp +++ b/lib/SILOptimizer/ARC/ARCSequenceOptUtils.cpp @@ -38,7 +38,6 @@ bool isARCSignificantTerminator(TermInst *TI) { case TermKind::SwitchEnumAddrInst: case TermKind::DynamicMethodBranchInst: case TermKind::CheckedCastBranchInst: - case TermKind::CheckedCastValueBranchInst: case TermKind::CheckedCastAddrBranchInst: return true; } diff --git a/lib/SILOptimizer/Differentiation/VJPCloner.cpp b/lib/SILOptimizer/Differentiation/VJPCloner.cpp index 2e07f82ef1767..5a274a32b4c3f 100644 --- a/lib/SILOptimizer/Differentiation/VJPCloner.cpp +++ b/lib/SILOptimizer/Differentiation/VJPCloner.cpp @@ -377,20 +377,6 @@ class VJPCloner::Implementation final ccbi->getTrueBBCount(), ccbi->getFalseBBCount()); } - void visitCheckedCastValueBranchInst(CheckedCastValueBranchInst *ccvbi) { - Builder.setCurrentDebugScope(getOpScope(ccvbi->getDebugScope())); - // Build pullback struct value for original block. - auto *pbStructVal = buildPullbackValueStructValue(ccvbi); - // Create a new `checked_cast_value_branch` instruction. - getBuilder().createCheckedCastValueBranch( - ccvbi->getLoc(), getOpValue(ccvbi->getOperand()), - getOpASTType(ccvbi->getSourceFormalType()), - getOpType(ccvbi->getTargetLoweredType()), - getOpASTType(ccvbi->getTargetFormalType()), - createTrampolineBasicBlock(ccvbi, pbStructVal, ccvbi->getSuccessBB()), - createTrampolineBasicBlock(ccvbi, pbStructVal, ccvbi->getFailureBB())); - } - void visitCheckedCastAddrBranchInst(CheckedCastAddrBranchInst *ccabi) { Builder.setCurrentDebugScope(getOpScope(ccabi->getDebugScope())); // Build pullback struct value for original block. diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index f82a66fce22da..1c72876d762d9 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -808,7 +808,7 @@ static Operand *getReusedStorageOperand(SILValue value) { /// user's storage. The user may compose an aggregate from its operands or /// forwards its operands to arguments. /// -/// TODO: Handle SwitchValueInst, CheckedCastValueBranchInst. +/// TODO: Handle SwitchValueInst static SILValue getProjectedUseValue(Operand *operand) { auto *user = operand->getUser(); switch (user->getKind()) { @@ -2418,13 +2418,6 @@ class UseRewriter : SILInstructionVisitor { use->set(SILUndef::get(use->get()->getType(), *pass.function)); } - // Opaque checked cast source. - void visitCheckedCastValueBranchInst( - CheckedCastValueBranchInst *checkedBranchInst) { - // FIXME: Unimplemented - llvm::report_fatal_error("Unimplemented CheckCastValueBranch use."); - } - // Copy from an opaque source operand. void visitCopyValueInst(CopyValueInst *copyInst) { SILValue srcVal = copyInst->getOperand(); @@ -2540,13 +2533,6 @@ class UseRewriter : SILInstructionVisitor { } void visitUncheckedEnumDataInst(UncheckedEnumDataInst *enumDataInst); - - void visitUnconditionalCheckedCastValueInst( - UnconditionalCheckedCastValueInst *checkedCastInst) { - - // FIXME: Unimplemented - llvm::report_fatal_error("Unimplemented UnconditionalCheckedCast use."); - } }; } // end anonymous namespace diff --git a/lib/SILOptimizer/Mandatory/DiagnoseInfiniteRecursion.cpp b/lib/SILOptimizer/Mandatory/DiagnoseInfiniteRecursion.cpp index d1985258f5d44..2c853ee9e98b2 100644 --- a/lib/SILOptimizer/Mandatory/DiagnoseInfiniteRecursion.cpp +++ b/lib/SILOptimizer/Mandatory/DiagnoseInfiniteRecursion.cpp @@ -225,8 +225,7 @@ class Invariants { case TermKind::CondBranchInst: case TermKind::SwitchValueInst: case TermKind::SwitchEnumInst: - case TermKind::CheckedCastBranchInst: - case TermKind::CheckedCastValueBranchInst: { + case TermKind::CheckedCastBranchInst: { SmallPtrSet visited; return isInvariantValue(term->getOperand(0), visited); } diff --git a/lib/SILOptimizer/Mandatory/Differentiation.cpp b/lib/SILOptimizer/Mandatory/Differentiation.cpp index f7f6579f9404a..572659e6e2b81 100644 --- a/lib/SILOptimizer/Mandatory/Differentiation.cpp +++ b/lib/SILOptimizer/Mandatory/Differentiation.cpp @@ -171,7 +171,6 @@ static bool diagnoseUnsupportedControlFlow(ADContext &context, if (isa(term) || isa(term) || isa(term) || isa(term) || isa(term) || - isa(term) || isa(term) || isa(term)) continue; // If terminator is an unsupported branching terminator, emit an error. diff --git a/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp b/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp index 52f236bb01adf..51b4fc66e6d36 100644 --- a/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp +++ b/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp @@ -2768,8 +2768,7 @@ bool AllocOptimize::tryToRemoveDeadAllocation() { case TermKind::DynamicMethodBranchInst: case TermKind::AwaitAsyncContinuationInst: case TermKind::CheckedCastBranchInst: - case TermKind::CheckedCastAddrBranchInst: - case TermKind::CheckedCastValueBranchInst: { + case TermKind::CheckedCastAddrBranchInst: { // Otherwise, we insert the destroy_addr /after/ the // terminator. All of these are guaranteed to have each successor // to have the block as its only predecessor block. diff --git a/lib/SILOptimizer/Transforms/DeadCodeElimination.cpp b/lib/SILOptimizer/Transforms/DeadCodeElimination.cpp index 55b0eb266747b..7fafb8b91be9c 100644 --- a/lib/SILOptimizer/Transforms/DeadCodeElimination.cpp +++ b/lib/SILOptimizer/Transforms/DeadCodeElimination.cpp @@ -367,7 +367,6 @@ void DCE::markTerminatorArgsLive(SILBasicBlock *Pred, case TermKind::DynamicMethodBranchInst: case TermKind::SwitchEnumInst: case TermKind::CheckedCastBranchInst: - case TermKind::CheckedCastValueBranchInst: assert(ArgIndex == 0 && "Expected a single argument!"); // We do not need to do anything with these. If the resulting @@ -472,7 +471,6 @@ void DCE::propagateLiveness(SILInstruction *I) { case TermKind::AwaitAsyncContinuationInst: case TermKind::CheckedCastBranchInst: - case TermKind::CheckedCastValueBranchInst: case TermKind::CheckedCastAddrBranchInst: case TermKind::TryApplyInst: case TermKind::SwitchValueInst: diff --git a/lib/SILOptimizer/Transforms/SimplifyCFG.cpp b/lib/SILOptimizer/Transforms/SimplifyCFG.cpp index 7f88de028524f..5d2ad690911c8 100644 --- a/lib/SILOptimizer/Transforms/SimplifyCFG.cpp +++ b/lib/SILOptimizer/Transforms/SimplifyCFG.cpp @@ -239,7 +239,6 @@ class SimplifyCFG { bool simplifyBranchBlock(BranchInst *BI); bool simplifyCondBrBlock(CondBranchInst *BI); bool simplifyCheckedCastBranchBlock(CheckedCastBranchInst *CCBI); - bool simplifyCheckedCastValueBranchBlock(CheckedCastValueBranchInst *CCBI); bool simplifyCheckedCastAddrBranchBlock(CheckedCastAddrBranchInst *CCABI); bool simplifyTryApplyBlock(TryApplyInst *TAI); bool simplifySwitchValueBlock(SwitchValueInst *SVI); @@ -2471,49 +2470,6 @@ bool SimplifyCFG::simplifyCheckedCastBranchBlock(CheckedCastBranchInst *CCBI) { return MadeChange; } -bool SimplifyCFG::simplifyCheckedCastValueBranchBlock( - CheckedCastValueBranchInst *CCBI) { - // TODO: OSSA; handle cleanups for opaque cases (simplify_cfg_opaque.sil). - if (!EnableOSSARewriteTerminator && Fn.hasOwnership()) { - return false; - } - - auto SuccessBB = CCBI->getSuccessBB(); - auto FailureBB = CCBI->getFailureBB(); - auto ThisBB = CCBI->getParent(); - - bool MadeChange = false; - CastOptimizer CastOpt( - FuncBuilder, nullptr /*SILBuilderContext*/, - /* replaceValueUsesAction */ - [&MadeChange](SILValue oldValue, SILValue newValue) { - MadeChange = true; - }, - /* replaceInstUsesAction */ - [&MadeChange](SILInstruction *I, ValueBase *V) { MadeChange = true; }, - /* eraseInstAction */ - [&MadeChange](SILInstruction *I) { - MadeChange = true; - I->eraseFromParent(); - }, - /* willSucceedAction */ - [&]() { - MadeChange |= removeIfDead(FailureBB); - addToWorklist(ThisBB); - }, - /* willFailAction */ - [&]() { - MadeChange |= removeIfDead(SuccessBB); - addToWorklist(ThisBB); - }); - - MadeChange |= bool(CastOpt.simplifyCheckedCastValueBranchInst(CCBI)); - - LLVM_DEBUG(if (MadeChange) - llvm::dbgs() << "simplify checked_cast_value block\n"); - return MadeChange; -} - bool SimplifyCFG:: simplifyCheckedCastAddrBranchBlock(CheckedCastAddrBranchInst *CCABI) { @@ -2908,10 +2864,6 @@ bool SimplifyCFG::simplifyBlocks() { case TermKind::CheckedCastBranchInst: Changed |= simplifyCheckedCastBranchBlock(cast(TI)); break; - case TermKind::CheckedCastValueBranchInst: - Changed |= simplifyCheckedCastValueBranchBlock( - cast(TI)); - break; case TermKind::CheckedCastAddrBranchInst: Changed |= simplifyCheckedCastAddrBranchBlock(cast(TI)); break; diff --git a/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp b/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp index 6bfcc4fb7943e..67e6d47204b0e 100644 --- a/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp +++ b/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp @@ -195,7 +195,6 @@ static bool hasOpaqueArchetype(TypeExpansionContext context, case SILInstructionKind::ObjCToThickMetatypeInst: case SILInstructionKind::ObjCMetatypeToObjectInst: case SILInstructionKind::ObjCExistentialMetatypeToObjectInst: - case SILInstructionKind::UnconditionalCheckedCastValueInst: case SILInstructionKind::UnconditionalCheckedCastInst: case SILInstructionKind::ClassifyBridgeObjectInst: case SILInstructionKind::ValueToBridgeObjectInst: @@ -275,7 +274,6 @@ static bool hasOpaqueArchetype(TypeExpansionContext context, case SILInstructionKind::DynamicMethodBranchInst: case SILInstructionKind::CheckedCastBranchInst: case SILInstructionKind::CheckedCastAddrBranchInst: - case SILInstructionKind::CheckedCastValueBranchInst: case SILInstructionKind::DeallocStackInst: case SILInstructionKind::DeallocStackRefInst: case SILInstructionKind::DeallocRefInst: diff --git a/lib/SILOptimizer/Utils/CFGOptUtils.cpp b/lib/SILOptimizer/Utils/CFGOptUtils.cpp index 3a01c94a3a572..aaca172bc27dc 100644 --- a/lib/SILOptimizer/Utils/CFGOptUtils.cpp +++ b/lib/SILOptimizer/Utils/CFGOptUtils.cpp @@ -380,22 +380,6 @@ void swift::replaceBranchTarget(TermInst *t, SILBasicBlock *oldDest, return; } - case TermKind::CheckedCastValueBranchInst: { - auto cbi = cast(t); - assert(oldDest == cbi->getSuccessBB() - || oldDest == cbi->getFailureBB() && "Invalid edge index"); - auto successBB = - oldDest == cbi->getSuccessBB() ? newDest : cbi->getSuccessBB(); - auto failureBB = - oldDest == cbi->getFailureBB() ? newDest : cbi->getFailureBB(); - builder.createCheckedCastValueBranch( - cbi->getLoc(), cbi->getOperand(), cbi->getSourceFormalType(), - cbi->getTargetLoweredType(), cbi->getTargetFormalType(), - successBB, failureBB); - cbi->eraseFromParent(); - return; - } - case TermKind::CheckedCastAddrBranchInst: { auto cbi = cast(t); assert(oldDest == cbi->getSuccessBB() @@ -738,7 +722,6 @@ static bool isSafeNonExitTerminator(TermInst *ti) { case TermKind::SwitchEnumAddrInst: case TermKind::DynamicMethodBranchInst: case TermKind::CheckedCastBranchInst: - case TermKind::CheckedCastValueBranchInst: case TermKind::CheckedCastAddrBranchInst: return true; case TermKind::UnreachableInst: diff --git a/lib/SILOptimizer/Utils/CastOptimizer.cpp b/lib/SILOptimizer/Utils/CastOptimizer.cpp index 1d8fbc5c5732e..0f2109a0023a7 100644 --- a/lib/SILOptimizer/Utils/CastOptimizer.cpp +++ b/lib/SILOptimizer/Utils/CastOptimizer.cpp @@ -1085,87 +1085,6 @@ CastOptimizer::simplifyCheckedCastBranchInst(CheckedCastBranchInst *Inst) { return NewI; } -SILInstruction *CastOptimizer::simplifyCheckedCastValueBranchInst( - CheckedCastValueBranchInst *Inst) { - if (auto *I = optimizeCheckedCastValueBranchInst(Inst)) - Inst = dyn_cast(I); - - if (!Inst) - return nullptr; - - SILDynamicCastInst dynamicCast(Inst); - auto SourceFormalType = dynamicCast.getSourceFormalType(); - auto TargetLoweredType = dynamicCast.getTargetLoweredType(); - auto TargetFormalType = dynamicCast.getTargetFormalType(); - auto Loc = dynamicCast.getLocation(); - auto *SuccessBB = dynamicCast.getSuccessBlock(); - auto *FailureBB = dynamicCast.getFailureBlock(); - auto Op = dynamicCast.getSource(); - auto *F = dynamicCast.getFunction(); - - // Check if we can statically predict the outcome of the cast. - auto Feasibility = dynamicCast.classifyFeasibility(false /*allow wmo opts*/); - - SILBuilderWithScope Builder(Inst, builderContext); - - if (Feasibility == DynamicCastFeasibility::WillFail) { - auto *NewI = Builder.createBranch(Loc, FailureBB); - eraseInstAction(Inst); - willFailAction(); - return NewI; - } - - // Casting will succeed. - - bool ResultNotUsed = SuccessBB->getArgument(0)->use_empty(); - SILValue CastedValue; - if (Op->getType() != TargetLoweredType) { - // Apply the bridged cast optimizations. - // TODO: Bridged casts cannot be expressed by checked_cast_value_br yet. - // Once the support for opaque values has landed, please review this - // code. - auto *BridgedI = optimizeBridgedCasts(dynamicCast); - if (BridgedI) { - llvm_unreachable( - "Bridged casts cannot be expressed by checked_cast_value_br yet"); - } else { - // If the cast may succeed or fail and can't be turned into a bridging - // call, then let it be. - if (Feasibility == DynamicCastFeasibility::MaySucceed) { - return nullptr; - } - - assert(Feasibility == DynamicCastFeasibility::WillSucceed); - - // Replace by unconditional_cast, followed by a branch. - // The unconditional_cast can be skipped, if the result of a cast - // is not used afterwards. - - if (!dynamicCast.canUseScalarCheckedCastInstructions()) - return nullptr; - - if (!ResultNotUsed) { - CastedValue = - emitSuccessfulScalarUnconditionalCast(Builder, Loc, dynamicCast); - } else { - CastedValue = SILUndef::get(TargetLoweredType, *F); - } - } - if (!CastedValue) - CastedValue = Builder.createUnconditionalCheckedCastValue( - Loc, Op, SourceFormalType, - TargetLoweredType, TargetFormalType); - } else { - // No need to cast. - CastedValue = Op; - } - - auto *NewI = Builder.createBranch(Loc, SuccessBB, CastedValue); - eraseInstAction(Inst); - willSucceedAction(); - return NewI; -} - SILInstruction *CastOptimizer::optimizeCheckedCastAddrBranchInst( CheckedCastAddrBranchInst *Inst) { auto Loc = Inst->getLoc(); @@ -1251,12 +1170,6 @@ SILInstruction *CastOptimizer::optimizeCheckedCastAddrBranchInst( return nullptr; } -SILInstruction *CastOptimizer::optimizeCheckedCastValueBranchInst( - CheckedCastValueBranchInst *Inst) { - // TODO - return nullptr; -} - SILInstruction * CastOptimizer::optimizeCheckedCastBranchInst(CheckedCastBranchInst *Inst) { if (Inst->isExact()) diff --git a/lib/SILOptimizer/Utils/SILInliner.cpp b/lib/SILOptimizer/Utils/SILInliner.cpp index 01e9bc5497aaa..4e674246d7906 100644 --- a/lib/SILOptimizer/Utils/SILInliner.cpp +++ b/lib/SILOptimizer/Utils/SILInliner.cpp @@ -876,7 +876,6 @@ InlineCost swift::instructionInlineCost(SILInstruction &I) { case SILInstructionKind::AssignInst: case SILInstructionKind::AssignByWrapperInst: case SILInstructionKind::CheckedCastBranchInst: - case SILInstructionKind::CheckedCastValueBranchInst: case SILInstructionKind::CheckedCastAddrBranchInst: case SILInstructionKind::ClassMethodInst: case SILInstructionKind::ObjCMethodInst: @@ -946,7 +945,6 @@ InlineCost swift::instructionInlineCost(SILInstruction &I) { case SILInstructionKind::UncheckedTakeEnumDataAddrInst: case SILInstructionKind::UnconditionalCheckedCastInst: case SILInstructionKind::UnconditionalCheckedCastAddrInst: - case SILInstructionKind::UnconditionalCheckedCastValueInst: case SILInstructionKind::IsEscapingClosureInst: case SILInstructionKind::IsUniqueInst: case SILInstructionKind::BeginCOWMutationInst: diff --git a/lib/Serialization/DeserializeSIL.cpp b/lib/Serialization/DeserializeSIL.cpp index 5a9f105096954..f3771114907f0 100644 --- a/lib/Serialization/DeserializeSIL.cpp +++ b/lib/Serialization/DeserializeSIL.cpp @@ -2623,36 +2623,6 @@ bool SILDeserializer::readSILInstruction(SILFunction *Fn, forwardingOwnership); break; } - case SILInstructionKind::CheckedCastValueBranchInst: { - CanType srcFormalType = MF->getType(ListOfValues[0])->getCanonicalType(); - SILType srcLoweredType = getSILType(MF->getType(ListOfValues[2]), - (SILValueCategory)ListOfValues[3], Fn); - SILValue op = getLocalValue(ListOfValues[1], srcLoweredType); - SILType targetLoweredType = - getSILType(MF->getType(TyID), (SILValueCategory)TyCategory, Fn); - CanType targetFormalType = - MF->getType(ListOfValues[4])->getCanonicalType(); - auto *successBB = getBBForReference(Fn, ListOfValues[5]); - auto *failureBB = getBBForReference(Fn, ListOfValues[6]); - - ResultInst = Builder.createCheckedCastValueBranch( - Loc, op, srcFormalType, targetLoweredType, targetFormalType, successBB, - failureBB); - break; - } - case SILInstructionKind::UnconditionalCheckedCastValueInst: { - CanType srcFormalType = MF->getType(ListOfValues[0])->getCanonicalType(); - SILType srcLoweredType = getSILType(MF->getType(ListOfValues[2]), - (SILValueCategory)ListOfValues[3], Fn); - SILValue src = getLocalValue(ListOfValues[1], srcLoweredType); - - SILType targetLoweredType = - getSILType(MF->getType(TyID), (SILValueCategory)TyCategory, Fn); - CanType targetFormalType = MF->getType(ListOfValues[4])->getCanonicalType(); - ResultInst = Builder.createUnconditionalCheckedCastValue( - Loc, src, srcFormalType, targetLoweredType, targetFormalType); - break; - } case SILInstructionKind::UnconditionalCheckedCastAddrInst: { // ignore attr. CanType srcFormalType = MF->getType(ListOfValues[0])->getCanonicalType(); diff --git a/lib/Serialization/SerializeSIL.cpp b/lib/Serialization/SerializeSIL.cpp index 6dd3be56ef52a..2da03dcca8e26 100644 --- a/lib/Serialization/SerializeSIL.cpp +++ b/lib/Serialization/SerializeSIL.cpp @@ -1810,22 +1810,6 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { llvm::makeArrayRef(listOfValues)); break; } - case SILInstructionKind::UnconditionalCheckedCastValueInst: { - auto CI = cast(&SI); - ValueID listOfValues[] = { - S.addTypeRef(CI->getSourceFormalType()), - addValueRef(CI->getOperand()), - S.addTypeRef(CI->getSourceLoweredType().getASTType()), - (unsigned)CI->getSourceLoweredType().getCategory(), - S.addTypeRef(CI->getTargetFormalType()) - }; - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), - S.addTypeRef(CI->getTargetLoweredType().getASTType()), - (unsigned)CI->getTargetLoweredType().getCategory(), - llvm::makeArrayRef(listOfValues)); - break; - } case SILInstructionKind::UncheckedRefCastAddrInst: { auto CI = cast(&SI); ValueID listOfValues[] = { @@ -2262,27 +2246,6 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { llvm::makeArrayRef(listOfValues)); break; } - case SILInstructionKind::CheckedCastValueBranchInst: { - const CheckedCastValueBranchInst *CBI = - cast(&SI); - ValueID listOfValues[] = { - S.addTypeRef(CBI->getSourceFormalType()), - addValueRef(CBI->getOperand()), - S.addTypeRef(CBI->getSourceLoweredType().getASTType()), - (unsigned)CBI->getSourceLoweredType().getCategory(), - S.addTypeRef(CBI->getTargetFormalType()), - BasicBlockMap[CBI->getSuccessBB()], - BasicBlockMap[CBI->getFailureBB()] - }; - - SILOneTypeValuesLayout::emitRecord( - Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], - (unsigned)SI.getKind(), - S.addTypeRef(CBI->getTargetLoweredType().getASTType()), - (unsigned)CBI->getTargetLoweredType().getCategory(), - llvm::makeArrayRef(listOfValues)); - break; - } case SILInstructionKind::CheckedCastAddrBranchInst: { auto CBI = cast(&SI); ValueID listOfValues[] = { diff --git a/test/SIL/Parser/opaque_values_parse.sil b/test/SIL/Parser/opaque_values_parse.sil index 2ca772047fdae..ff31edd08c819 100644 --- a/test/SIL/Parser/opaque_values_parse.sil +++ b/test/SIL/Parser/opaque_values_parse.sil @@ -14,36 +14,6 @@ struct S : Foo { init() } -// CHECK-LABEL: sil @castOpaque : $@convention(thin) (Int) -> () { -// CHECK: bb0([[ARG:%.*]] : $Int): -// CHECK: unconditional_checked_cast_value Int in [[ARG]] : $Int to Foo -// CHECK-LABEL: } // end sil function 'castOpaque' -sil @castOpaque : $@convention(thin) (Int) -> () { -bb0(%0 : $Int): - %c = unconditional_checked_cast_value Int in %0 : $Int to Foo - %t = tuple () - return %t : $() -} - -// CHECK-LABEL: sil @condCastOpaque : $@convention(thin) (Int) -> () { -// CHECK: bb0([[ARG:%.*]] : $Int): -// CHECK: checked_cast_value_br Int in [[ARG]] : $Int to Int -// CHECK-LABEL: } // end sil function 'condCastOpaque' -sil @condCastOpaque : $@convention(thin) (Int) -> () { -bb0(%0 : $Int): - checked_cast_value_br Int in %0 : $Int to Int, bb2, bb1 - -bb1: - br bb3 - -bb2(%i : $Int): - br bb3 - -bb3: - %t = tuple () - return %t : $() -} - // CHECK-LABEL: sil @initDeinitExistentialValue : $@convention(thin) (@in T) -> () { // CHECK: bb0([[ARG:%.*]] : $T): // CHECK: [[IE:%.*]] = init_existential_value [[ARG]] : $T, $T, $Any diff --git a/test/SIL/Serialization/opaque_values_serialize.sil b/test/SIL/Serialization/opaque_values_serialize.sil index 976dd7683b82a..73f62f5e60c57 100644 --- a/test/SIL/Serialization/opaque_values_serialize.sil +++ b/test/SIL/Serialization/opaque_values_serialize.sil @@ -19,36 +19,6 @@ struct S : Foo { init() } -// CHECK-LABEL: sil [serialized] @castOpaque : $@convention(thin) (Int) -> () { -// CHECK: bb0([[ARG:%.*]] : $Int): -// CHECK: unconditional_checked_cast_value Int in [[ARG]] : $Int to Foo -// CHECK-LABEL: } // end sil function 'castOpaque' -sil [serialized] @castOpaque : $@convention(thin) (Int) -> () { -bb0(%0 : $Int): - %c = unconditional_checked_cast_value Int in %0 : $Int to Foo - %t = tuple () - return %t : $() -} - -// CHECK-LABEL: sil [serialized] @condCastOpaque : $@convention(thin) (Int) -> () { -// CHECK: bb0([[ARG:%.*]] : $Int): -// CHECK: checked_cast_value_br Int in [[ARG]] : $Int to Int -// CHECK-LABEL: } // end sil function 'condCastOpaque' -sil [serialized] @condCastOpaque : $@convention(thin) (Int) -> () { -bb0(%0 : $Int): - checked_cast_value_br Int in %0 : $Int to Int, bb2, bb1 - -bb1: - br bb3 - -bb2(%i : $Int): - br bb3 - -bb3: - %t = tuple () - return %t : $() -} - // CHECK-LABEL: sil [serialized] @initDeinitExistentialValue : $@convention(thin) (@in T) -> () { // CHECK: bb0([[ARG:%.*]] : $T): // CHECK: [[IE:%.*]] = init_existential_value [[ARG]] : $T, $T, $Any diff --git a/test/SIL/ownership-verifier/opaque_use_verifier.sil b/test/SIL/ownership-verifier/opaque_use_verifier.sil index 90c4131a3bced..64f71496d0bdd 100644 --- a/test/SIL/ownership-verifier/opaque_use_verifier.sil +++ b/test/SIL/ownership-verifier/opaque_use_verifier.sil @@ -9,12 +9,6 @@ sil_stage raw import Builtin -sil [ossa] @unconditional_checked_cast_value_test : $@convention(thin) (Builtin.Int32) -> @out T { -bb0(%0 : $Builtin.Int32): - %1 = unconditional_checked_cast_value Builtin.Int32 in %0 : $Builtin.Int32 to T - return %1 : $T -} - sil [ossa] @opaque_identity : $@convention(thin) (@in T) -> @out T { bb0(%0 : @owned $T): return %0 : $T @@ -66,20 +60,6 @@ bb0(%0 : $@thick AnyObject.Type): return %18 : $() } -// Test an unconditional cast from an owned value to a trivial value. -sil [ossa] @castToTrivial : $@convention(thin) (@owned AnyObject) -> () { -bb0(%0 : @owned $AnyObject): - %6 = function_ref @takeType : $@convention(thin) (@thick AnyObject.Type) -> () - %8 = begin_borrow %0 : $AnyObject - %9 = copy_value %8 : $AnyObject - %10 = unconditional_checked_cast_value AnyObject in %9 : $AnyObject to @thick AnyObject.Type - %11 = apply %6(%10) : $@convention(thin) (@thick AnyObject.Type) -> () - end_borrow %8 : $AnyObject - destroy_value %0 : $AnyObject - %18 = tuple () - return %18 : $() -} - sil [ossa] @passTrivialAsOpaqueValue : $@convention(thin) (Builtin.Int64) -> () { bb0(%0 : $Builtin.Int64): %1 = function_ref @opaque_copy : $@convention(thin) (@in_guaranteed T) -> @out T diff --git a/test/SILOptimizer/latecodemotion.sil b/test/SILOptimizer/latecodemotion.sil index e364e93472961..d9e8843a4c623 100644 --- a/test/SILOptimizer/latecodemotion.sil +++ b/test/SILOptimizer/latecodemotion.sil @@ -1386,19 +1386,3 @@ bb0(%0 : $Builtin.NativeObject, %1: $Builtin.NativeObject): %5 = tuple() return %5 : $() } - -// CHECK: sil @dont_hoist_release_accross_cast_value -// CHECK: retain -// CHECK: apply -// CHECK: unconditional_checked_cast -// CHECK: release -sil @dont_hoist_release_accross_cast_value : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject, %1: $Builtin.NativeObject): - strong_retain %0: $Builtin.NativeObject - %2 = function_ref @blocker : $@convention(thin) () -> () - apply %2() : $@convention(thin) () -> () - %c = unconditional_checked_cast_value Builtin.NativeObject in %0 : $Builtin.NativeObject to B - strong_release %0: $Builtin.NativeObject - %5 = tuple() - return %5 : $() -} diff --git a/test/SILOptimizer/side-effect.sil b/test/SILOptimizer/side-effect.sil index 91ef07e40f407..f1472a8d2bec2 100644 --- a/test/SILOptimizer/side-effect.sil +++ b/test/SILOptimizer/side-effect.sil @@ -219,15 +219,6 @@ bb0(%0 : $Builtin.NativeObject, %1 : $X): return %r : $() } -// CHECK-LABEL: sil @checkedcastvalue -// CHECK: -sil @checkedcastvalue : $@convention(thin) (Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject): - %1 = unconditional_checked_cast_value Builtin.NativeObject in %0 : $Builtin.NativeObject to X - %r = tuple () - return %r : $() -} - sil_global public @sil_global1 : $Int32 // Test the propagation of side-effects through the call graph. From 1a0a92ab79fe2b459c57dd1c65509aeec978738e Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Tue, 1 Mar 2022 23:02:57 -0800 Subject: [PATCH 19/29] [SIL-opaque] Cleanup and reenable SILGen unit tests CHECK lines still need to be updated for OSSA --- test/SILGen/opaque_ownership.swift | 277 ---- test/SILGen/opaque_values_silgen.swift | 1400 +++-------------- test/SILGen/opaque_values_silgen_lib.swift | 1094 ++++++++++++- test/SILGen/opaque_values_silgen_todo.swift | 100 ++ test/SILGen/opaque_values_silgen_vtable.swift | 53 + 5 files changed, 1463 insertions(+), 1461 deletions(-) delete mode 100644 test/SILGen/opaque_ownership.swift create mode 100644 test/SILGen/opaque_values_silgen_vtable.swift diff --git a/test/SILGen/opaque_ownership.swift b/test/SILGen/opaque_ownership.swift deleted file mode 100644 index 7ee241a5572f2..0000000000000 --- a/test/SILGen/opaque_ownership.swift +++ /dev/null @@ -1,277 +0,0 @@ - -// RUN: %target-swift-emit-silgen -enable-sil-opaque-values -emit-sorted-sil -Xllvm -sil-full-demangle -parse-stdlib -parse-as-library -module-name Swift %s | %FileCheck %s -// RUN: %target-swift-emit-silgen -target x86_64-apple-macosx10.9 -enable-sil-opaque-values -emit-sorted-sil -Xllvm -sil-full-demangle -parse-stdlib -parse-as-library -module-name Swift %s | %FileCheck --check-prefix=CHECK-OSX %s - -public typealias AnyObject = Builtin.AnyObject - -precedencegroup AssignmentPrecedence {} -precedencegroup CastingPrecedence {} -precedencegroup ComparisonPrecedence {} - -public protocol _ObjectiveCBridgeable {} - -public protocol UnkeyedDecodingContainer { - var isAtEnd: Builtin.Int1 { get } -} - -public protocol Decoder { - func unkeyedContainer() throws -> UnkeyedDecodingContainer -} - -// Test open_existential_value ownership -// --- -// CHECK-LABEL: sil [ossa] @$ss11takeDecoder4fromBi1_s0B0_p_tKF : $@convention(thin) (@in_guaranteed Decoder) -> (Builtin.Int1, @error Error) { -// CHECK: bb0(%0 : @guaranteed $Decoder): -// CHECK: [[OPENED:%.*]] = open_existential_value %0 : $Decoder to $@opened("{{.*}}") Decoder -// CHECK: [[WT:%.*]] = witness_method $@opened("{{.*}}") Decoder, #Decoder.unkeyedContainer : (Self) -> () throws -> UnkeyedDecodingContainer, %3 : $@opened("{{.*}}") Decoder : $@convention(witness_method: Decoder) <τ_0_0 where τ_0_0 : Decoder> (@in_guaranteed τ_0_0) -> (@out UnkeyedDecodingContainer, @error Error) -// CHECK: try_apply [[WT]]<@opened("{{.*}}") Decoder>([[OPENED]]) : $@convention(witness_method: Decoder) <τ_0_0 where τ_0_0 : Decoder> (@in_guaranteed τ_0_0) -> (@out UnkeyedDecodingContainer, @error Error), normal bb2, error bb1 -// -// CHECK:bb{{.*}}([[RET1:%.*]] : @owned $UnkeyedDecodingContainer): -// CHECK: [[BORROW2:%.*]] = begin_borrow [lexical] [[RET1]] : $UnkeyedDecodingContainer -// CHECK: [[OPENED2:%.*]] = open_existential_value [[BORROW2]] : $UnkeyedDecodingContainer to $@opened("{{.*}}") UnkeyedDecodingContainer -// CHECK: [[WT2:%.*]] = witness_method $@opened("{{.*}}") UnkeyedDecodingContainer, #UnkeyedDecodingContainer.isAtEnd!getter : (Self) -> () -> Builtin.Int1, [[OPENED2]] : $@opened("{{.*}}") UnkeyedDecodingContainer : $@convention(witness_method: UnkeyedDecodingContainer) <τ_0_0 where τ_0_0 : UnkeyedDecodingContainer> (@in_guaranteed τ_0_0) -> Builtin.Int1 -// CHECK: [[RET2:%.*]] = apply [[WT2]]<@opened("{{.*}}") UnkeyedDecodingContainer>([[OPENED2]]) : $@convention(witness_method: UnkeyedDecodingContainer) <τ_0_0 where τ_0_0 : UnkeyedDecodingContainer> (@in_guaranteed τ_0_0) -> Builtin.Int1 -// CHECK: end_borrow [[BORROW2]] : $UnkeyedDecodingContainer -// CHECK: destroy_value [[RET1]] : $UnkeyedDecodingContainer -// CHECK-NOT: destroy_value %0 : $Decoder -// CHECK: return [[RET2]] : $Builtin.Int1 -// CHECK-LABEL: } // end sil function '$ss11takeDecoder4fromBi1_s0B0_p_tKF' -public func takeDecoder(from decoder: Decoder) throws -> Builtin.Int1 { - let container = try decoder.unkeyedContainer() - return container.isAtEnd -} - -// Test unsafe_bitwise_cast nontrivial ownership. -// --- -// CHECK-LABEL: sil [ossa] @$ss13unsafeBitCast_2toq_x_q_mtr0_lF : $@convention(thin) (@in_guaranteed T, @thick U.Type) -> @out U { -// CHECK: bb0([[ARG0:%.*]] : @guaranteed $T, [[ARG1:%.*]] : $@thick U.Type): -// CHECK: [[ARG_COPY:%.*]] = copy_value [[ARG0]] : $T -// CHECK: [[RESULT:%.*]] = unchecked_bitwise_cast [[ARG_COPY]] : $T to $U -// CHECK: [[RESULT_COPY:%.*]] = copy_value [[RESULT]] : $U -// CHECK: destroy_value [[ARG_COPY]] : $T -// CHECK: return [[RESULT_COPY]] : $U -// CHECK-LABEL: } // end sil function '$ss13unsafeBitCast_2toq_x_q_mtr0_lF' -public func unsafeBitCast(_ x: T, to type: U.Type) -> U { - return Builtin.reinterpretCast(x) -} - -// A lot of standard library support is necessary to support raw enums. -// -------------------------------------------------------------------- - -infix operator == : ComparisonPrecedence -infix operator ~= : ComparisonPrecedence - -public struct Bool { - var _value: Builtin.Int1 - - public init() { - let zero: Int64 = 0 - self._value = Builtin.trunc_Int64_Int1(zero._value) - } - - internal init(_ v: Builtin.Int1) { self._value = v } - - public init(_ value: Bool) { - self = value - } -} - -extension Bool { - public func _getBuiltinLogicValue() -> Builtin.Int1 { - return _value - } -} - -public protocol Equatable { - /// Returns a Boolean value indicating whether two values are equal. - /// - /// Equality is the inverse of inequality. For any values `a` and `b`, - /// `a == b` implies that `a != b` is `false`. - /// - /// - Parameters: - /// - lhs: A value to compare. - /// - rhs: Another value to compare. - static func == (lhs: Self, rhs: Self) -> Bool -} - -public func ~= (a: T, b: T) -> Bool { - return a == b -} - -public protocol RawRepresentable { - associatedtype RawValue - - init?(rawValue: RawValue) - - var rawValue: RawValue { get } -} - -public func == (lhs: T, rhs: T) -> Bool - where T.RawValue : Equatable { - return lhs.rawValue == rhs.rawValue -} - -public typealias _MaxBuiltinIntegerType = Builtin.IntLiteral - -public protocol _ExpressibleByBuiltinIntegerLiteral { - init(_builtinIntegerLiteral value: _MaxBuiltinIntegerType) -} - -public protocol ExpressibleByIntegerLiteral { - associatedtype IntegerLiteralType : _ExpressibleByBuiltinIntegerLiteral - - init(integerLiteral value: IntegerLiteralType) -} - -extension ExpressibleByIntegerLiteral - where Self : _ExpressibleByBuiltinIntegerLiteral { - @_transparent - public init(integerLiteral value: Self) { - self = value - } -} - -public protocol ExpressibleByStringLiteral {} -public protocol ExpressibleByFloatLiteral {} -public protocol ExpressibleByUnicodeScalarLiteral {} -public protocol ExpressibleByExtendedGraphemeClusterLiteral {} - -public struct Int64 : ExpressibleByIntegerLiteral, _ExpressibleByBuiltinIntegerLiteral, Equatable { - public var _value: Builtin.Int64 - public init(_builtinIntegerLiteral x: _MaxBuiltinIntegerType) { - _value = Builtin.s_to_s_checked_trunc_IntLiteral_Int64(x).0 - } - public typealias IntegerLiteralType = Int64 - public init(integerLiteral value: Int64) { - self = value - } - public static func ==(_ lhs: Int64, rhs: Int64) -> Bool { - return Bool(Builtin.cmp_eq_Int64(lhs._value, rhs._value)) - } -} - -public struct Int : _ExpressibleByBuiltinIntegerLiteral, ExpressibleByIntegerLiteral, Equatable { - var _value: Builtin.Int64 - public init() { - self = 0 - } - public typealias IntegerLiteralType = Int - public init(_builtinIntegerLiteral x: _MaxBuiltinIntegerType) { - _value = Builtin.s_to_s_checked_trunc_IntLiteral_Int64(x).0 - } - - public init(integerLiteral value: Int) { - self = value - } - - public static func ==(_ lhs: Int, rhs: Int) -> Bool { - return Bool(Builtin.cmp_eq_Int64(lhs._value, rhs._value)) - } -} - -// Test ownership of multi-case Enum values in the context of to @in thunks. -// --- -// CHECK-LABEL: sil shared [transparent] [serialized] [thunk] [ossa] @$ss17FloatingPointSignOSQsSQ2eeoiySbx_xtFZTW : -// CHECK: bb0(%0 : $FloatingPointSign, %1 : $FloatingPointSign, %2 : $@thick FloatingPointSign.Type): -// CHECK: %3 = metatype $@thin FloatingPointSign.Type // user: %5 -// CHECK: %4 = function_ref @$ss17FloatingPointSignO21__derived_enum_equalsySbAB_ABtFZ : $@convention(method) (FloatingPointSign, FloatingPointSign, @thin FloatingPointSign.Type) -> Bool // user: %5 -// CHECK: %5 = apply %4(%0, %1, %3) : $@convention(method) (FloatingPointSign, FloatingPointSign, @thin FloatingPointSign.Type) -> Bool // user: %6 -// CHECK: return %5 : $Bool -// CHECK-LABEL: } // end sil function '$ss17FloatingPointSignOSQsSQ2eeoiySbx_xtFZTW' -public enum FloatingPointSign { - /// The sign for a positive value. - case plus - - /// The sign for a negative value. - case minus -} - -#if os(macOS) -// Test open_existential_value used in a conversion context. -// (the actual bridging call is dropped because we don't import Swift). -// --- -// CHECK-OSX-LABEL: sil [ossa] @$ss26_unsafeDowncastToAnyObject04fromD0yXlyp_tF : $@convention(thin) (@in_guaranteed Any) -> @owned AnyObject { -// CHECK-OSX: bb0(%0 : @guaranteed $Any): -// CHECK-OSX: [[COPY:%.*]] = copy_value %0 : $Any -// CHECK-OSX: [[BORROW2:%.*]] = begin_borrow [[COPY]] : $Any -// CHECK-OSX: [[VAL:%.*]] = open_existential_value [[BORROW2]] : $Any to $@opened -// CHECK-OSX: [[COPY2:%.*]] = copy_value [[VAL]] : $@opened -// CHECK-OSX: end_borrow [[BORROW2]] : $Any -// CHECK-OSX: destroy_value [[COPY2]] : $@opened -// CHECK-OSX: destroy_value [[COPY]] : $Any -// CHECK-OSX-NOT: destroy_value %0 : $Any -// CHECK-OSX: return undef : $AnyObject -// CHECK-OSX-LABEL: } // end sil function '$ss26_unsafeDowncastToAnyObject04fromD0yXlyp_tF' -public func _unsafeDowncastToAnyObject(fromAny any: Any) -> AnyObject { - return any as AnyObject -} -#endif - -public protocol Error {} - -#if os(macOS) -// Test open_existential_box_value in a conversion context. -// --- -// CHECK-OSX-LABEL: sil [ossa] @$ss3foo1eys5Error_pSg_tF : $@convention(thin) (@guaranteed Optional) -> () { -// CHECK-OSX: [[BORROW:%.*]] = begin_borrow [lexical] %{{.*}} : $Error -// CHECK-OSX: [[VAL:%.*]] = open_existential_box_value [[BORROW]] : $Error to $@opened -// CHECK-OSX: [[COPY:%.*]] = copy_value [[VAL]] : $@opened -// CHECK-OSX: [[ANY:%.*]] = init_existential_value [[COPY]] : $@opened -// CHECK-OSX: end_borrow [[BORROW]] : $Error -// CHECK-OSX-LABEL: } // end sil function '$ss3foo1eys5Error_pSg_tF' -public func foo(e: Error?) { - if let u = e { - let a: Any = u - _ = a - } -} -#endif - -public enum Optional { - case none - case some(Wrapped) -} - -public protocol IP {} - -public protocol Seq { - associatedtype Iterator : IP - - func makeIterator() -> Iterator -} - -extension Seq where Self.Iterator == Self { - public func makeIterator() -> Self { - return self - } -} - -public struct EnumIter : IP, Seq { - internal var _base: Base - - public typealias Iterator = EnumIter -} - -// Test passing a +1 RValue to @in_guaranteed. -// --- -// CHECK-LABEL: sil [ossa] @$ss7EnumSeqV12makeIterators0A4IterVy0D0QzGyF : $@convention(method) (@in_guaranteed EnumSeq) -> @out EnumIter { -// CHECK: bb0(%0 : @guaranteed $EnumSeq): -// CHECK: [[MT:%.*]] = metatype $@thin EnumIter.Type -// CHECK: [[FIELD:%.*]] = struct_extract %0 : $EnumSeq, #EnumSeq._base -// CHECK: [[COPY:%.*]] = copy_value [[FIELD]] : $Base -// CHECK: [[WT:%.*]] = witness_method $Base, #Seq.makeIterator : (Self) -> () -> Self.Iterator : $@convention(witness_method: Seq) <τ_0_0 where τ_0_0 : Seq> (@in_guaranteed τ_0_0) -> @out τ_0_0.Iterator -// CHECK: [[ITER:%.*]] = apply [[WT]]([[COPY]]) : $@convention(witness_method: Seq) <τ_0_0 where τ_0_0 : Seq> (@in_guaranteed τ_0_0) -> @out τ_0_0.Iterator -// CHECK: destroy_value [[COPY]] : $Base -// CHECK: [[FN:%.*]] = function_ref @$ss8EnumIterV5_baseAByxGx_tcfC : $@convention(method) <τ_0_0 where τ_0_0 : IP> (@in τ_0_0, @thin EnumIter<τ_0_0>.Type) -> @out EnumIter<τ_0_0> -// CHECK: [[RET:%.*]] = apply [[FN]]([[ITER]], [[MT]]) : $@convention(method) <τ_0_0 where τ_0_0 : IP> (@in τ_0_0, @thin EnumIter<τ_0_0>.Type) -> @out EnumIter<τ_0_0> -// CHECK: return [[RET]] : $EnumIter -// CHECK-LABEL: } // end sil function '$ss7EnumSeqV12makeIterators0A4IterVy0D0QzGyF' -public struct EnumSeq : Seq { - public typealias Iterator = EnumIter - - internal var _base: Base - - public func makeIterator() -> Iterator { - return EnumIter(_base: _base.makeIterator()) - } -} diff --git a/test/SILGen/opaque_values_silgen.swift b/test/SILGen/opaque_values_silgen.swift index 3f95233c63f53..1192630ee8414 100644 --- a/test/SILGen/opaque_values_silgen.swift +++ b/test/SILGen/opaque_values_silgen.swift @@ -1,915 +1,158 @@ -// XFAIL: * +// RUN: %target-swift-emit-silgen -enable-sil-opaque-values -Xllvm -sil-full-demangle %s | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-%target-runtime +// RUN: %target-swift-emit-silgen -enable-sil-opaque-values -Xllvm -sil-full-demangle -target x86_64-apple-macosx10.9 %s | %FileCheck --check-prefix=CHECK-OSX %s -// RUN: %target-swift-emit-silgen -enable-sil-opaque-values -emit-sorted-sil -Xllvm -sil-full-demangle %s | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-%target-runtime +// Test SILGen -enable-sil-opaque-values with tests that depend on the stdlib. -struct TrivialStruct { - var x: Int -} - -protocol Foo { - func foo() -} - -protocol P { - var x : Int { get } -} - -protocol P2 : P {} - -extension TrivialStruct : P2 {} - -struct Box { - let t: T -} - -protocol EmptyP {} - -struct AddressOnlyStruct : EmptyP {} - -struct AnyStruct { - let a: Any -} - -protocol Clonable { - func maybeClone() -> Self? -} - -indirect enum IndirectEnum { - case Nil - case Node(T) -} - -protocol SubscriptableGet { - subscript(a : Int) -> Int { get } -} - -protocol SubscriptableGetSet { - subscript(a : Int) -> Int { get set } -} - -var subscriptableGet : SubscriptableGet -var subscriptableGetSet : SubscriptableGetSet - -class OpaqueClass { - typealias ObnoxiousTuple = (T, (T.Type, (T) -> T)) - - func inAndOut(x: T) -> T { return x } - func variantOptionalityTuples(x: ObnoxiousTuple) -> ObnoxiousTuple? { return x } -} - -class StillOpaqueClass: OpaqueClass { - override func variantOptionalityTuples(x: ObnoxiousTuple?) -> ObnoxiousTuple { return x! } -} - -class OpaqueTupleClass: OpaqueClass<(U, U)> { - override func inAndOut(x: (U, U)) -> (U, U) { return x } -} - -func unreachableF() -> (Int, T)? { } - -func s010_hasVarArg(_ args: Any...) {} - -// Tests Address only enums's construction -// CHECK-LABEL: sil shared [transparent] @$s20opaque_values_silgen15AddressOnlyEnumO4mereyAcA6EmptyP_pcACmF : $@convention(method) (@in EmptyP, @thin AddressOnlyEnum.Type) -> @out AddressOnlyEnum { -// CHECK: bb0([[ARG0:%.*]] : $EmptyP, [[ARG1:%.*]] : $@thin AddressOnlyEnum.Type): -// CHECK: [[RETVAL:%.*]] = enum $AddressOnlyEnum, #AddressOnlyEnum.mere!enumelt, [[ARG0]] : $EmptyP -// CHECK: return [[RETVAL]] : $AddressOnlyEnum -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen15AddressOnlyEnumO4mereyAcA6EmptyP_pcACmF' - -// CHECK-LABEL: sil shared [transparent] [thunk] @$s20opaque_values_silgen15AddressOnlyEnumO4mereyAcA6EmptyP_pcACmFTc : $@convention(thin) (@thin AddressOnlyEnum.Type) -> @owned @callee_guaranteed (@in_guaranteed EmptyP) -> @out AddressOnlyEnum { -// CHECK: bb0([[ARG:%.*]] : $@thin AddressOnlyEnum.Type): -// CHECK: [[RETVAL:%.*]] = partial_apply {{.*}}([[ARG]]) : $@convention(method) (@in EmptyP, @thin AddressOnlyEnum.Type) -> @out AddressOnlyEnum -// CHECK: [[CANONICAL_THUNK_FN:%.*]] = function_ref @$s20opaque_values_silgen6EmptyP_pAA15AddressOnlyEnumOIegir_AaB_pADIegnr_TR : $@convention(thin) (@in_guaranteed EmptyP, @guaranteed @callee_guaranteed (@in EmptyP) -> @out AddressOnlyEnum) -> @out AddressOnlyEnum -// CHECK: [[CANONICAL_THUNK:%.*]] = partial_apply [callee_guaranteed] [[CANONICAL_THUNK_FN]]([[RETVAL]]) -// CHECK: return [[CANONICAL_THUNK]] : $@callee_guaranteed (@in_guaranteed EmptyP) -> @out AddressOnlyEnum -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen15AddressOnlyEnumO4mereyAcA6EmptyP_pcACmFTc' -enum AddressOnlyEnum { - case nought - case mere(EmptyP) - case phantom(AddressOnlyStruct) -} - -// Test vtables - OpaqueTupleClass -// --- -// CHECK-LABEL: sil private @$s20opaque_values_silgen16OpaqueTupleClassC8inAndOut1xx_xtx_xt_tFAA0dF0CAdExx_tFTV : $@convention(method) (@in_guaranteed (U, U), @guaranteed OpaqueTupleClass) -> @out (U, U) { -// CHECK: bb0([[ARG0:%.*]] : $(U, U), [[ARG1:%.*]] : $OpaqueTupleClass): -// CHECK: ([[TELEM0:%.*]], [[TELEM1:%.*]]) = destructure_tuple [[ARG0]] : $(U, U) -// CHECK: [[APPLY:%.*]] = apply {{.*}}([[TELEM0]], [[TELEM1]], [[ARG1]]) : $@convention(method) <τ_0_0> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0, @guaranteed OpaqueTupleClass<τ_0_0>) -> (@out τ_0_0, @out τ_0_0) -// CHECK: [[BORROWED_CALL:%.*]] = begin_borrow [[APPLY]] -// CHECK: [[BORROWED_CALL_EXT0:%.*]] = tuple_extract [[BORROWED_CALL]] : $(U, U), 0 -// CHECK: [[RETVAL0:%.*]] = copy_value [[BORROWED_CALL_EXT0]] : $U -// CHECK: [[BORROWED_CALL_EXT1:%.*]] = tuple_extract [[BORROWED_CALL]] : $(U, U), 1 -// CHECK: [[RETVAL1:%.*]] = copy_value [[BORROWED_CALL_EXT1]] : $U -// CHECK: end_borrow [[BORROWED_CALL]] -// CHECK: [[RETVAL:%.*]] = tuple ([[RETVAL0]] : $U, [[RETVAL1]] : $U) -// CHECK: return [[RETVAL]] -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen16OpaqueTupleClassC8inAndOut1xx_xtx_xt_tFAA0dF0CAdExx_tFTV' +// FIXME: "HECK" lines all need to be updated for OSSA. -// Test vtables - StillOpaqueClass -// --- -// CHECK-LABEL: sil private @$s20opaque_values_silgen16StillOpaqueClassC24variantOptionalityTuples1xx_xm_xxcttx_xm_xxcttSg_tFAA0eF0CAdeFx_xm_xxctt_tFTV : $@convention(method) (@in_guaranteed T, @thick T.Type, @guaranteed @callee_guaranteed (@in_guaranteed T) -> @out T, @guaranteed StillOpaqueClass) -> @out Optional<(T, (@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T))> { -// CHECK: bb0([[ARG0:%.*]] : $T, [[ARG1:%.*]] : $@thick T.Type, [[ARG2:%.*]] : $@callee_guaranteed (@in_guaranteed T) -> @out T, [[ARG3:%.*]] : $StillOpaqueClass): -// CHECK: [[TELEM0:%.*]] = tuple ([[ARG1]] : $@thick T.Type, [[ARG2]] : $@callee_guaranteed (@in_guaranteed T) -> @out T) -// CHECK: [[TELEM1:%.*]] = tuple ([[ARG0]] : $T, [[TELEM0]] : $(@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T)) -// CHECK: [[ENUMOPT0:%.*]] = enum $Optional<(T, (@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T))>, #Optional.some!enumelt, [[TELEM1]] : $(T, (@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T)) -// CHECK: [[APPLY:%.*]] = apply {{.*}}([[ENUMOPT0]], [[ARG3]]) : $@convention(method) <τ_0_0> (@in_guaranteed Optional<(τ_0_0, (@thick τ_0_0.Type, @callee_guaranteed (@in_guaranteed τ_0_0) -> @out τ_0_0))>, @guaranteed StillOpaqueClass<τ_0_0>) -> (@out τ_0_0, @thick τ_0_0.Type, @owned @callee_guaranteed (@in_guaranteed τ_0_0) -> @out τ_0_0) -// CHECK: [[BORROWED_T:%.*]] = begin_borrow [[APPLY]] -// CHECK: [[BORROWED_T_EXT0:%.*]] = tuple_extract [[BORROWED_T]] : $(T, @thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T), 0 -// CHECK: [[RETVAL0:%.*]] = copy_value [[BORROWED_T_EXT0]] -// CHECK: [[BORROWED_T_EXT1:%.*]] = tuple_extract [[BORROWED_T]] : $(T, @thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T), 1 -// CHECK: [[BORROWED_T_EXT2:%.*]] = tuple_extract [[BORROWED_T]] : $(T, @thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T), 2 -// CHECK: [[RETVAL1:%.*]] = copy_value [[BORROWED_T_EXT2]] -// CHECK: end_borrow [[BORROWED_T]] -// CHECK: [[RETTUPLE0:%.*]] = tuple ([[BORROWED_T_EXT1]] : $@thick T.Type, [[RETVAL1]] : $@callee_guaranteed (@in_guaranteed T) -> @out T) -// CHECK: [[RETTUPLE1:%.*]] = tuple ([[RETVAL0]] : $T, [[RETTUPLE0]] : $(@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T)) -// CHECK: [[RETVAL:%.*]] = enum $Optional<(T, (@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T))>, #Optional.some!enumelt, [[RETTUPLE1]] : $(T, (@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T)) -// CHECK: return [[RETVAL]] -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen16StillOpaqueClassC24variantOptionalityTuples1xx_xm_xxcttx_xm_xxcttSg_tFAA0eF0CAdeFx_xm_xxctt_tFTV' - - -// part of s280_convExistTrivial: conversion between existential types - reabstraction thunk -// --- -// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] @$s20opaque_values_silgen1P_pAA13TrivialStructVIegnd_AA2P2_pAaE_pIegnr_TR : $@convention(thin) (@in_guaranteed P2, @guaranteed @callee_guaranteed (@in_guaranteed P) -> TrivialStruct) -> @out P2 { -// CHECK: bb0([[ARG0:%.*]] : $P2, [[ARG1:%.*]] : $@callee_guaranteed (@in_guaranteed P) -> TrivialStruct): -// CHECK: [[OPENED_ARG:%.*]] = open_existential_value [[ARG]] : $P2 to $@opened({{.*}}) P2 -// CHECK: [[COPIED_VAL:%.*]] = copy_value [[OPENED_ARG]] -// CHECK: [[INIT_P:%.*]] = init_existential_value [[COPIED_VAL]] : $@opened({{.*}}) P2, $@opened({{.*}}) P2, $P -// CHECK: [[BORROWED_INIT_P:%.*]] = begin_borrow [[INIT_P]] -// CHECK: [[APPLY_P:%.*]] = apply [[ARG1]]([[BORROWED_INIT_P]]) : $@callee_guaranteed (@in_guaranteed P) -> TrivialStruct -// CHECK: [[RETVAL:%.*]] = init_existential_value [[APPLY_P]] : $TrivialStruct, $TrivialStruct, $P2 -// CHECK: end_borrow [[BORROWED_INIT_P]] -// CHECK-NOT: destroy_value [[ARG0]] -// CHECK: return [[RETVAL]] : $P2 -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen1P_pAA13TrivialStructVIegnd_AA2P2_pAaE_pIegnr_TR' +func genericInout(_: inout T) {} -// part of s290_convOptExistTriv: conversion between existential types - reabstraction thunk - optionals case -// --- -// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] @$s20opaque_values_silgen1P_pSgAA13TrivialStructVIegnd_AESgAA2P2_pIegyr_TR : $@convention(thin) (Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct) -> @out P2 { -// CHECK: bb0([[ARG0:%.*]] : $Optional, [[ARG1:%.*]] : $@callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct): -// CHECK: switch_enum [[ARG0]] : $Optional, case #Optional.some!enumelt: bb2, case #Optional.none!enumelt: bb1 -// CHECK: bb1: -// CHECK: [[ONONE:%.*]] = enum $Optional

, #Optional.none!enumelt -// CHECK: br bb3([[ONONE]] : $Optional

) -// CHECK: bb2([[OSOME:%.*]] : $TrivialStruct): -// CHECK: [[INIT_S:%.*]] = init_existential_value [[OSOME]] : $TrivialStruct, $TrivialStruct, $P -// CHECK: [[ENUM_S:%.*]] = enum $Optional

, #Optional.some!enumelt, [[INIT_S]] : $P -// CHECK: br bb3([[ENUM_S]] : $Optional

) -// CHECK: bb3([[OPT_S:%.*]] : $Optional

): -// CHECK: [[BORROWED_OPT_S:%.*]] = begin_borrow [[OPT_S]] -// CHECK: [[APPLY_P:%.*]] = apply [[ARG1]]([[BORROWED_OPT_S]]) : $@callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct -// CHECK: [[RETVAL:%.*]] = init_existential_value [[APPLY_P]] : $TrivialStruct, $TrivialStruct, $P2 -// CHECK: return [[RETVAL]] : $P2 -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen1P_pSgAA13TrivialStructVIegnd_AESgAA2P2_pIegyr_TR' +func hasVarArg(_ args: Any...) {} // Test array initialization - we are still (somewhat) using addresses // --- -// CHECK-LABEL: sil @$s20opaque_values_silgen21s020_______callVarArgyyF : $@convention(thin) () -> () { -// CHECK: %[[APY:.*]] = apply %{{.*}}(%{{.*}}) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer) -// CHECK: %[[BRW:.*]] = begin_borrow %[[APY]] -// CHECK: %[[TPL:.*]] = tuple_extract %[[BRW]] : $(Array, Builtin.RawPointer), 1 -// CHECK: end_borrow %[[BRW]] : $(Array, Builtin.RawPointer) -// CHECK: destroy_value %[[APY]] -// CHECK: %[[PTR:.*]] = pointer_to_address %[[TPL]] : $Builtin.RawPointer to [strict] $*Any -// CHECK: [[IOPAQUE:%.*]] = init_existential_value %{{.*}} : $Int, $Int, $Any -// CHECK: store [[IOPAQUE]] to [init] %[[PTR]] : $*Any -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s020_______callVarArgyyF' -public func s020_______callVarArg() { - s010_hasVarArg(3) -} - -// Test emitSemanticStore. -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s030______assigninoutyyxz_xtlF : $@convention(thin) (@inout T, @in_guaranteed T) -> () { -// CHECK: bb0([[ARG0:%.*]] : $*T, [[ARG1:%.*]] : $T): -// CHECK: [[CPY:%.*]] = copy_value [[ARG1]] : $T -// CHECK: [[READ:%.*]] = begin_access [modify] [unknown] [[ARG0]] : $*T -// CHECK: assign [[CPY]] to [[READ]] : $*T -// CHECK-NOT: destroy_value [[ARG1]] : $T -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s030______assigninoutyyxz_xtlF' -func s030______assigninout(_ a: inout T, _ b: T) { - a = b -} - -// Test that we no longer use copy_addr or tuple_element_addr when copy by value is possible -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s040___tupleReturnIntyS2i_xt_tlF : $@convention(thin) (Int, @in_guaranteed T) -> Int { -// CHECK: bb0([[ARG0:%.*]] : $Int, [[ARG1:%.*]] : $T): -// CHECK: [[ARG1_COPY:%.*]] = copy_value [[ARG1]] -// CHECK: [[TPL:%.*]] = tuple ([[ARG0]] : $Int, [[ARG1_COPY]] : $T) -// CHECK: [[BORROWED_ARG1:%.*]] = begin_borrow [[TPL]] : $(Int, T) -// CHECK: [[CPY:%.*]] = copy_value [[BORROWED_ARG1]] : $(Int, T) -// CHECK: [[BORROWED_CPY:%.*]] = begin_borrow [[CPY]] -// CHECK: [[INT:%.*]] = tuple_extract [[BORROWED_CPY]] : $(Int, T), 0 -// CHECK: [[GEN:%.*]] = tuple_extract [[BORROWED_CPY]] : $(Int, T), 1 -// CHECK: [[COPY_GEN:%.*]] = copy_value [[GEN]] -// CHECK: destroy_value [[COPY_GEN]] -// CHECK: end_borrow [[BORROWED_CPY]] -// CHECK: destroy_value [[CPY]] -// CHECK: end_borrow [[BORROWED_ARG1]] : $(Int, T) -// CHECK: destroy_value [[TPL]] : $(Int, T) -// CHECK: return [[INT]] -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s040___tupleReturnIntyS2i_xt_tlF' -func s040___tupleReturnInt(_ x: (Int, T)) -> Int { - let y = x.0 - return y -} - -// Test returning an opaque tuple of tuples. -// --- -// CHECK-LABEL: sil hidden [noinline] @$s20opaque_values_silgen21s050______multiResultyx_x_xttxlF : $@convention(thin) (@in_guaranteed T) -> (@out T, @out T, @out T) { -// CHECK: bb0(%0 : $T): -// CHECK: %[[CP1:.*]] = copy_value %{{.*}} : $T -// CHECK: %[[CP2:.*]] = copy_value %{{.*}} : $T -// CHECK: %[[CP3:.*]] = copy_value %{{.*}} : $T -// CHECK-NOT: destroy_value %0 : $T -// CHECK: %[[TPL:.*]] = tuple (%[[CP1]] : $T, %[[CP2]] : $T, %[[CP3]] : $T) -// CHECK: return %[[TPL]] : $(T, T, T) -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s050______multiResultyx_x_xttxlF' -@inline(never) -func s050______multiResult(_ t: T) -> (T, (T, T)) { - return (t, (t, t)) -} - -// Test returning an opaque tuple of tuples as a concrete tuple. -// --- -// CHECK-LABEL: sil @$s20opaque_values_silgen21s060__callMultiResult1iSi_Si_SittSi_tF : $@convention(thin) (Int) -> (Int, Int, Int) { -// CHECK: bb0(%0 : $Int): -// CHECK: %[[FN:.*]] = function_ref @$s20opaque_values_silgen21s050______multiResultyx_x_xttxlF : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) -// CHECK: %[[TPL:.*]] = apply %[[FN]](%0) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) -// CHECK: %[[I1:.*]] = tuple_extract %[[TPL]] : $(Int, Int, Int), 0 -// CHECK: %[[I2:.*]] = tuple_extract %[[TPL]] : $(Int, Int, Int), 1 -// CHECK: %[[I3:.*]] = tuple_extract %[[TPL]] : $(Int, Int, Int), 2 -// CHECK: %[[R:.*]] = tuple (%[[I1]] : $Int, %[[I2]] : $Int, %[[I3]] : $Int) -// CHECK: return %[[R]] : $(Int, Int, Int) -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s060__callMultiResult1iSi_Si_SittSi_tF' -public func s060__callMultiResult(i: Int) -> (Int, (Int, Int)) { - return s050______multiResult(i) -} - -// SILGen, prepareArchetypeCallee. Materialize a -// non-class-constrainted self from a class-constrained archetype. -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s070__materializeSelf1tyx_tRlzCAA3FooRzlF : $@convention(thin) (@guaranteed T) -> () { -// CHECK: bb0([[ARG:%.*]] : $T): -// CHECK: [[WITNESS_METHOD:%.*]] = witness_method $T, #Foo.foo : (Self) -> () -> () : $@convention(witness_method: Foo) <τ_0_0 where τ_0_0 : Foo> (@in_guaranteed τ_0_0) -> () -// CHECK: apply [[WITNESS_METHOD]]([[ARG]]) : $@convention(witness_method: Foo) <τ_0_0 where τ_0_0 : Foo> (@in_guaranteed τ_0_0) -> () -// CHECK-NOT: destroy_value [[ARG]] : $T -// CHECK: return %{{[0-9]+}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s070__materializeSelf1tyx_tRlzCAA3FooRzlF' -func s070__materializeSelf(t: T) where T: AnyObject { - t.foo() -} - -// Test open existential with opaque values -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s080______________bar1pSiAA1P_p_tF : $@convention(thin) (@in_guaranteed P) -> Int { -// CHECK: bb0([[ARG:%.*]] : $P): -// CHECK: [[OPENED_ARG:%.*]] = open_existential_value [[ARG]] : $P to $@opened -// CHECK: [[WITNESS_FUNC:%.*]] = witness_method $@opened -// CHECK: [[RESULT:%.*]] = apply [[WITNESS_FUNC]]<{{.*}}>([[OPENED_ARG]]) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> Int -// CHECK-NOT: destroy_value [[ARG]] : $P -// CHECK: return [[RESULT]] : $Int -func s080______________bar(p: P) -> Int { - return p.x -} - -// Test OpaqueTypeLowering copyValue and destroyValue. -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s090___________calleryxxlF : $@convention(thin) (@in_guaranteed T) -> @out T { -// CHECK: bb0([[ARG:%.*]] : $T): -// CHECK-NOT: copy_value -// CHECK: [[RESULT:%.*]] = apply {{%.*}}([[ARG]]) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> @out τ_0_0 -// CHECK-NOT: destroy_value [[ARG]] : $T -// CHECK: return %{{.*}} : $T -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s090___________calleryxxlF' -func s090___________caller(_ t: T) -> T { - return s090___________caller(t) -} - -// Test a simple opaque parameter and return value. -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s100_________identityyxxlF : $@convention(thin) (@in_guaranteed T) -> @out T { -// CHECK: bb0([[ARG:%.*]] : $T): -// CHECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] : $T -// CHECK-NOT: destroy_value [[ARG]] : $T -// CHECK: return [[COPY_ARG]] : $T -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s100_________identityyxxlF' -func s100_________identity(_ t: T) -> T { - return t -} - -// Test a guaranteed opaque parameter. -// --- -// CHECK-LABEL: sil private [transparent] [thunk] @$s20opaque_values_silgen21s110___GuaranteedSelfVAA3FooA2aDP3fooyyFTW : $@convention(witness_method: Foo) (@in_guaranteed s110___GuaranteedSelf) -> () { -// CHECK: bb0(%0 : $s110___GuaranteedSelf): -// CHECK: %[[F:.*]] = function_ref @$s20opaque_values_silgen21s110___GuaranteedSelfV3fooyyF : $@convention(method) (s110___GuaranteedSelf) -> () -// CHECK: apply %[[F]](%0) : $@convention(method) (s110___GuaranteedSelf) -> () -// CHECK: return -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s110___GuaranteedSelfVAA3FooA2aDP3fooyyFTW' -struct s110___GuaranteedSelf : Foo { - func foo() {} -} - -// Tests a corner case wherein we used to do a temporary and return a pointer to T instead of T -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s120______returnValueyxxlF : $@convention(thin) (@in_guaranteed T) -> @out T { -// CHECK: bb0([[ARG:%.*]] : $T): -// CHECK: [[COPY_ARG1:%.*]] = copy_value [[ARG]] : $T -// CHECK: [[BORROWED_ARG2:%.*]] = begin_borrow [[COPY_ARG1]] -// CHECK: [[COPY_ARG2:%.*]] = copy_value [[BORROWED_ARG2]] : $T -// CHECK: end_borrow [[BORROWED_ARG2]] -// CHECK: return [[COPY_ARG2]] : $T -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s120______returnValueyxxlF' -func s120______returnValue(_ x: T) -> T { - let y = x - return y -} - -// Tests Optional initialization by value -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s130_____________wrapyxSgxlF : $@convention(thin) (@in_guaranteed T) -> @out Optional { -// CHECK: bb0([[ARG:%.*]] : $T): -// CHECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] : $T -// CHECK: [[OPTIONAL_ARG:%.*]] = enum $Optional, #Optional.some!enumelt, [[COPY_ARG]] : $T -// CHECK-NOT: destroy_value [[ARG]] : $T -// CHECK: return [[OPTIONAL_ARG]] : $Optional -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s130_____________wrapyxSgxlF' -func s130_____________wrap(_ x: T) -> T? { - return x +// CHECK-LABEL: sil [ossa] @$s20opaque_values_silgen10callVarArgyyF : $@convention(thin) () -> () { +// HECK: %[[APY:.*]] = apply %{{.*}}(%{{.*}}) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer) +// HECK: %[[BRW:.*]] = begin_borrow %[[APY]] +// HECK: %[[TPL:.*]] = tuple_extract %[[BRW]] : $(Array, Builtin.RawPointer), 1 +// HECK: end_borrow %[[BRW]] : $(Array, Builtin.RawPointer) +// HECK: destroy_value %[[APY]] +// HECK: %[[PTR:.*]] = pointer_to_address %[[TPL]] : $Builtin.RawPointer to [strict] $*Any +// HECK: [[IOPAQUE:%.*]] = init_existential_value %{{.*}} : $Int, $Int, $Any +// HECK: store [[IOPAQUE]] to [init] %[[PTR]] : $*Any +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen10callVarArgyyF' +public func callVarArg() { + hasVarArg(3) } // Tests For-each statements // --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s140______forEachStmtyyF : $@convention(thin) () -> () { -// CHECK: bb0: -// CHECK: [[PROJ_BOX_ARG:%.*]] = project_box %{{.*}} : ${ var IndexingIterator> } -// CHECK: [[APPLY_ARG1:%.*]] = apply -// CHECK-NOT: alloc_stack $Int -// CHECK-NOT: store [[APPLY_ARG1]] to [trivial] -// CHECK-NOT: alloc_stack $Range -// CHECK-NOT: dealloc_stack -// CHECK: [[APPLY_ARG2:%.*]] = apply %{{.*}}> -// CHECK: store [[APPLY_ARG2]] to [trivial] [[PROJ_BOX_ARG]] -// CHECK: br bb1 -// CHECK: bb1: +// CHECK-LABEL: sil hidden [ossa] @$s20opaque_values_silgen11forEachStmtyyF : $@convention(thin) () -> () { +// HECK: bb0: +// HECK: [[PROJ_BOX_ARG:%.*]] = project_box %{{.*}} : ${ var IndexingIterator> } +// HECK: [[APPLY_ARG1:%.*]] = apply +// HECK-NOT: alloc_stack $Int +// HECK-NOT: store [[APPLY_ARG1]] to [trivial] +// HECK-NOT: alloc_stack $Range +// HECK-NOT: dealloc_stack +// HECK: [[APPLY_ARG2:%.*]] = apply %{{.*}}> +// HECK: store [[APPLY_ARG2]] to [trivial] [[PROJ_BOX_ARG]] +// HECK: br bb1 +// HECK: bb1: // CHECK-NOT: alloc_stack $Optional -// CHECK: [[APPLY_ARG3:%.*]] = apply %{{.*}}> +// HECK: [[APPLY_ARG3:%.*]] = apply %{{.*}}> // CHECK-NOT: dealloc_stack -// CHECK: switch_enum [[APPLY_ARG3]] -// CHECK: bb2: -// CHECK: br bb3 -// CHECK: bb3: -// CHECK: return %{{.*}} : $() -// CHECK: bb4([[ENUM_ARG:%.*]] : $Int): +// HECK: switch_enum [[APPLY_ARG3]] +// HECK: bb2: +// HECK: br bb3 +// HECK: bb3: +// HECK: return %{{.*}} : $() +// HECK: bb4([[ENUM_ARG:%.*]] : $Int): // CHECK-NOT: unchecked_enum_data -// CHECK: br bb1 -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s140______forEachStmtyyF' -func s140______forEachStmt() { +// HECK: br bb1 +// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen11forEachStmtyyF' +func forEachStmt() { for _ in 1..<42 { } } -func s150___________anyArg(_: Any) {} - -// Tests init of opaque existentials -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s160_______callAnyArgyyF : $@convention(thin) () -> () { -// CHECK: bb0: -// CHECK: [[INT_TYPE:%.*]] = metatype $@thin Int.Type -// CHECK: [[INT_LIT:%.*]] = integer_literal $Builtin.IntLiteral, 42 -// CHECK: [[INT_ARG:%.*]] = apply %{{.*}}([[INT_LIT]], [[INT_TYPE]]) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int -// CHECK: [[INIT_OPAQUE:%.*]] = init_existential_value [[INT_ARG]] : $Int, $Int, $Any -// CHECK: apply %{{.*}}([[INIT_OPAQUE]]) : $@convention(thin) (@in_guaranteed Any) -> () -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s160_______callAnyArgyyF' -func s160_______callAnyArg() { - s150___________anyArg(42) -} - -// Tests unconditional_checked_cast for opaque values -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s170____force_convertxylF : $@convention(thin) () -> @out T { -// CHECK: bb0: -// CHECK-NOT: alloc_stack -// CHECK: [[INT_TYPE:%.*]] = metatype $@thin Int.Type -// CHECK: [[INT_LIT:%.*]] = integer_literal $Builtin.IntLiteral, 42 -// CHECK: [[INT_ARG:%.*]] = apply %{{.*}}([[INT_LIT]], [[INT_TYPE]]) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int -// CHECK: [[INT_CAST:%.*]] = unconditional_checked_cast_value [[INT_ARG]] : $Int to $T -// CHECK: [[CAST_BORROW:%.*]] = begin_borrow [[INT_CAST]] : $T -// CHECK: [[RETURN_VAL:%.*]] = copy_value [[CAST_BORROW]] : $T -// CHECK: end_borrow [[CAST_BORROW]] : $T -// CHECK: destroy_value [[INT_CAST]] : $T -// CHECK: return [[RETURN_VAL]] : $T -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s170____force_convertxylF' -func s170____force_convert() -> T { - let x : T = 42 as! T - return x -} - -// Tests supporting function for s190___return_foo_var - cast and return of protocol -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s180_______return_fooAA3Foo_pyF : $@convention(thin) () -> @out Foo { -// CHECK: bb0: -// CHECK: [[INT_LIT:%.*]] = integer_literal $Builtin.IntLiteral, 42 -// CHECK: [[INT_ARG:%.*]] = apply %{{.*}}([[INT_LIT]], [[INT_TYPE]]) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int -// CHECK: [[INT_CAST:%.*]] = unconditional_checked_cast_value [[INT_ARG]] : $Int to $Foo -// CHECK: return [[INT_CAST]] : $Foo -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s180_______return_fooAA3Foo_pyF' -func s180_______return_foo() -> Foo { - return 42 as! Foo -} -var foo_var : Foo = s180_______return_foo() - -// Tests return of global variables by doing a load of copy -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s190___return_foo_varAA3Foo_pyF : $@convention(thin) () -> @out Foo { -// CHECK: bb0: -// CHECK: [[GLOBAL:%.*]] = global_addr {{.*}} : $*Foo -// CHECK: [[READ:%.*]] = begin_access [read] [dynamic] [[GLOBAL]] : $*Foo -// CHECK: [[LOAD_GLOBAL:%.*]] = load [copy] [[READ]] : $*Foo -// CHECK: return [[LOAD_GLOBAL]] : $Foo -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s190___return_foo_varAA3Foo_pyF' -func s190___return_foo_var() -> Foo { - return foo_var -} - -// Tests deinit of opaque existentials -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s200______use_foo_varyyF : $@convention(thin) () -> () { -// CHECK: bb0: -// CHECK: [[GLOBAL:%.*]] = global_addr {{.*}} : $*Foo -// CHECK: [[READ:%.*]] = begin_access [read] [dynamic] [[GLOBAL]] : $*Foo -// CHECK: [[LOAD_GLOBAL:%.*]] = load [copy] [[READ]] : $*Foo -// CHECK: [[BORROW:%.*]] = begin_borrow [[LOAD_GLOBAL]] : $Foo -// CHECK: [[OPEN_VAR:%.*]] = open_existential_value [[BORROW]] : $Foo -// CHECK: [[WITNESS:%.*]] = witness_method $@opened -// CHECK: apply [[WITNESS]] -// CHECK: end_borrow [[BORROW]] -// CHECK: destroy_value [[LOAD_GLOBAL]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s200______use_foo_varyyF' -func s200______use_foo_var() { - foo_var.foo() -} - -// Tests composition erasure of opaque existentials + copy into of opaques -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s210______compErasureys5Error_psAC_AA3FoopF : $@convention(thin) (@in_guaranteed Error & Foo) -> @owned Error { -// CHECK: bb0([[ARG:%.*]] : $Error & Foo): -// CHECK: [[OPAQUE_ARG:%.*]] = open_existential_value [[ARG]] : $Error & Foo to $@opened({{.*}}) Error & Foo -// CHECK: [[EXIST_BOX:%.*]] = alloc_existential_box $Error, $@opened({{.*}}) Error & Foo -// CHECK: [[PROJ_BOX:%.*]] = project_existential_box $@opened({{.*}}) Error & Foo in [[EXIST_BOX]] -// CHECK: [[COPY_OPAQUE:%.*]] = copy_value [[OPAQUE_ARG]] : $@opened({{.*}}) Error & Foo -// CHECK: store [[COPY_OPAQUE]] to [init] [[PROJ_BOX]] : $*@opened({{.*}}) Error & Foo -// CHECK-NOT: destroy_value [[ARG]] : $Error & Foo -// CHECK: return [[EXIST_BOX]] : $Error -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s210______compErasureys5Error_psAC_AA3FoopF' -func s210______compErasure(_ x: Foo & Error) -> Error { - return x -} - // Tests that existential boxes can contain opaque types // --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s220_____openExistBoxySSs5Error_pF : $@convention(thin) (@guaranteed Error) -> @owned String { -// CHECK: bb0([[ARG:%.*]] : $Error): -// CHECK: [[OPAQUE_ARG:%.*]] = open_existential_box_value [[ARG]] : $Error to $@opened({{.*}}) Error -// CHECK: [[ALLOC_OPEN:%.*]] = alloc_stack $@opened({{.*}}) Error -// CHECK: store_borrow [[OPAQUE_ARG]] to [[ALLOC_OPEN]] -// CHECK: dealloc_stack [[ALLOC_OPEN]] +// CHECK-LABEL: sil hidden [ossa] @$s20opaque_values_silgen12openExistBoxySSs5Error_pF : $@convention(thin) (@guaranteed Error) -> @owned String { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $Error): +// HECK: [[OPAQUE_ARG:%.*]] = open_existential_box_value [[ARG]] : $Error to $@opened({{.*}}) Error +// HECK: [[ALLOC_OPEN:%.*]] = alloc_stack $@opened({{.*}}) Error +// HECK: store_borrow [[OPAQUE_ARG]] to [[ALLOC_OPEN]] +// HECK: dealloc_stack [[ALLOC_OPEN]] // CHECK-NOT: destroy_value [[ARG]] : $Error -// CHECK: return {{.*}} : $String -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s220_____openExistBoxySSs5Error_pF' -func s220_____openExistBox(_ x: Error) -> String { +// HECK: return {{.*}} : $String +// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen12openExistBoxySSs5Error_pF' +func openExistBox(_ x: Error) -> String { return x._domain } // Tests conditional value casts and correspondingly generated reabstraction thunk // --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s230______condFromAnyyyypF : $@convention(thin) (@in_guaranteed Any) -> () { -// CHECK: bb0([[ARG:%.*]] : $Any): -// CHECK: [[COPY__ARG:%.*]] = copy_value [[ARG]] -// CHECK: checked_cast_value_br [[COPY__ARG]] : $Any to $@callee_guaranteed (@in_guaranteed (Int, (Int, (Int, Int)), Int)) -> @out (Int, (Int, (Int, Int)), Int), bb2, bb1 -// CHECK: bb2([[THUNK_PARAM:%.*]] : $@callee_guaranteed (@in_guaranteed (Int, (Int, (Int, Int)), Int)) -> @out (Int, (Int, (Int, Int)), Int)): -// CHECK: [[THUNK_REF:%.*]] = function_ref @{{.*}} : $@convention(thin) (Int, Int, Int, Int, Int, @guaranteed @callee_guaranteed (@in_guaranteed (Int, (Int, (Int, Int)), Int)) -> @out (Int, (Int, (Int, Int)), Int)) -> (Int, Int, Int, Int, Int) -// CHECK: partial_apply [callee_guaranteed] [[THUNK_REF]]([[THUNK_PARAM]]) -// CHECK: bb6: -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s230______condFromAnyyyypF' -func s230______condFromAny(_ x: Any) { +// CHECK-LABEL: sil hidden [ossa] @$s20opaque_values_silgen11condFromAnyyyypF : $@convention(thin) (@in_guaranteed Any) -> () { +// HECK: bb0([[ARG:%.*]] : $Any): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: checked_cast_br [[COPY_ARG]] : $Any to $@callee_guaranteed (@in_guaranteed (Int, (Int, (Int, Int)), Int)) -> @out (Int, (Int, (Int, Int)), Int), bb2, bb1 +// HECK: bb2([[THUNK_PARAM:%.*]] : $@callee_guaranteed (@in_guaranteed (Int, (Int, (Int, Int)), Int)) -> @out (Int, (Int, (Int, Int)), Int)): +// HECK: [[THUNK_REF:%.*]] = function_ref @{{.*}} : $@convention(thin) (Int, Int, Int, Int, Int, @guaranteed @callee_guaranteed (@in_guaranteed (Int, (Int, (Int, Int)), Int)) -> @out (Int, (Int, (Int, Int)), Int)) -> (Int, Int, Int, Int, Int) +// HECK: partial_apply [callee_guaranteed] [[THUNK_REF]]([[THUNK_PARAM]]) +// HECK: bb6: +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen11condFromAnyyyypF' +func condFromAny(_ x: Any) { if let f = x as? (Int, (Int, (Int, Int)), Int) -> (Int, (Int, (Int, Int)), Int) { _ = f(24, (4,(2, 42)), 42) } } -// Tests LValue of error types / existential boxes -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s240_____propOfLValueySSs5Error_pF : $@convention(thin) (@guaranteed Error) -> @owned String { -// CHECK: bb0([[ARG:%.*]] : $Error): -// CHECK: [[ALLOC_OF_BOX:%.*]] = alloc_box ${ var Error } -// CHECK: [[PROJ_BOX:%.*]] = project_box [[ALLOC_OF_BOX]] -// CHECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] -// CHECK: store [[COPY_ARG]] to [init] [[PROJ_BOX]] -// CHECK: [[READ:%.*]] = begin_access [read] [unknown] [[PROJ_BOX]] : $*Error -// CHECK: [[LOAD_BOX:%.*]] = load [copy] [[READ]] -// CHECK: [[OPAQUE_ARG:%.*]] = open_existential_box [[LOAD_BOX]] : $Error to $*@opened({{.*}}) Error -// CHECK: [[LOAD_OPAQUE:%.*]] = load [copy] [[OPAQUE_ARG]] -// CHECK: [[ALLOC_OPEN:%.*]] = alloc_stack $@opened({{.*}}) Error -// CHECK: store [[LOAD_OPAQUE]] to [init] [[ALLOC_OPEN]] -// CHECK: [[RET_VAL:%.*]] = apply {{.*}}<@opened({{.*}}) Error>([[ALLOC_OPEN]]) -// CHECK: return [[RET_VAL]] : $String -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s240_____propOfLValueySSs5Error_pF' -func s240_____propOfLValue(_ x: Error) -> String { - var x = x - return x._domain -} - -// Tests Implicit Value Construction under Opaque value mode -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s250_________testBoxTyyF : $@convention(thin) () -> () { -// CHECK: bb0: -// CHECK: [[BOX_MTYPE:%.*]] = metatype $@thin Box.Type -// CHECK: [[MTYPE:%.*]] = metatype $@thin Int.Type -// CHECK: [[INTLIT:%.*]] = integer_literal $Builtin.IntLiteral, 42 -// CHECK: [[AINT:%.*]] = apply {{.*}}([[INTLIT]], [[MTYPE]]) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int -// CHECK: apply {{.*}}([[AINT]], [[BOX_MTYPE]]) : $@convention(method) <τ_0_0> (@in τ_0_0, @thin Box<τ_0_0>.Type) -> @out Box<τ_0_0> -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s250_________testBoxTyyF' -func s250_________testBoxT() { - let _ = Box(t: 42) -} - -// Tests Address only enums -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s260_______AOnly_enumyyAA17AddressOnlyStructVF : $@convention(thin) (AddressOnlyStruct) -> () { -// CHECK: bb0([[ARG:%.*]] : $AddressOnlyStruct): -// CHECK: [[MTYPE1:%.*]] = metatype $@thin AddressOnlyEnum.Type -// CHECK: [[APPLY1:%.*]] = apply {{.*}}([[MTYPE1]]) : $@convention(thin) (@thin AddressOnlyEnum.Type) -> @owned @callee_guaranteed (@in_guaranteed EmptyP) -> @out AddressOnlyEnum -// CHECK: destroy_value [[APPLY1]] -// CHECK: [[MTYPE2:%.*]] = metatype $@thin AddressOnlyEnum.Type -// CHECK: [[ENUM1:%.*]] = enum $AddressOnlyEnum, #AddressOnlyEnum.nought!enumelt -// CHECK: [[MTYPE3:%.*]] = metatype $@thin AddressOnlyEnum.Type -// CHECK: [[INIT_OPAQUE:%.*]] = init_existential_value [[ARG]] : $AddressOnlyStruct, $AddressOnlyStruct, $EmptyP -// CHECK: [[ENUM2:%.*]] = enum $AddressOnlyEnum, #AddressOnlyEnum.mere!enumelt, [[INIT_OPAQUE]] : $EmptyP -// CHECK: destroy_value [[ENUM2]] -// CHECK: [[MTYPE4:%.*]] = metatype $@thin AddressOnlyEnum.Type -// CHECK: [[ENUM3:%.*]] = enum $AddressOnlyEnum, #AddressOnlyEnum.phantom!enumelt, [[ARG]] : $AddressOnlyStruct -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s260_______AOnly_enumyyAA17AddressOnlyStructVF' -func s260_______AOnly_enum(_ s: AddressOnlyStruct) { - _ = AddressOnlyEnum.mere - - _ = AddressOnlyEnum.nought - - _ = AddressOnlyEnum.mere(s) - - _ = AddressOnlyEnum.phantom(s) -} - -// Tests InjectOptional for opaque value types + conversion of opaque structs -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s270_convOptAnyStructyyAA0gH0VADSgcF : $@convention(thin) (@guaranteed @callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct) -> () { -// CHECK: bb0([[ARG:%.*]] : $@callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct): -// CHECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] -// CHECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (@in_guaranteed Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct) -> @out Optional -// CHECK: destroy_value [[PAPPLY]] : $@callee_guaranteed (@in_guaranteed Optional) -> @out Optional -// CHECK-NOT: destroy_value [[ARG]] : $@callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s270_convOptAnyStructyyAA0gH0VADSgcF' -func s270_convOptAnyStruct(_ a1: @escaping (AnyStruct?) -> AnyStruct) { - let _: (AnyStruct?) -> AnyStruct? = a1 -} - -// Tests conversion between existential types -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s280_convExistTrivialyyAA0G6StructVAA1P_pcF : $@convention(thin) (@guaranteed @callee_guaranteed (@in_guaranteed P) -> TrivialStruct) -> () { -// CHECK: bb0([[ARG:%.*]] : $@callee_guaranteed (@in_guaranteed P) -> TrivialStruct): -// CHECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] -// CHECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (@in_guaranteed P2, @guaranteed @callee_guaranteed (@in_guaranteed P) -> TrivialStruct) -> @out P2 -// CHECK: destroy_value [[PAPPLY]] : $@callee_guaranteed (@in_guaranteed P2) -> @out P2 -// CHECK-NOT: destroy_value [[ARG]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s280_convExistTrivialyyAA0G6StructVAA1P_pcF' -func s280_convExistTrivial(_ s: @escaping (P) -> TrivialStruct) { - let _: (P2) -> P2 = s -} - -// Tests conversion between existential types - optionals case -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s290_convOptExistTrivyyAA13TrivialStructVAA1P_pSgcF : $@convention(thin) (@guaranteed @callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct) -> () { -// CHECK: bb0([[ARG:%.*]] : $@callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct): -// CHECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] -// CHECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct) -> @out P2 -// CHECK: destroy_value [[PAPPLY]] : $@callee_guaranteed (Optional) -> @out P2 -// CHECK-NOT: destroy_value [[ARG]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s290_convOptExistTrivyyAA13TrivialStructVAA1P_pSgcF' -func s290_convOptExistTriv(_ s: @escaping (P?) -> TrivialStruct) { - let _: (TrivialStruct?) -> P2 = s -} - -// Tests corner-case: reabstraction of an empty tuple to any -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s300__convETupleToAnyyyyycF : $@convention(thin) (@guaranteed @callee_guaranteed () -> ()) -> () { -// CHECK: bb0([[ARG:%.*]] : $@callee_guaranteed () -> ()): -// CHECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] -// CHECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (@guaranteed @callee_guaranteed () -> ()) -> @out Any -// CHECK: destroy_value [[PAPPLY]] : $@callee_guaranteed () -> @out Any -// CHECK-NOT: destroy_value [[ARG]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s300__convETupleToAnyyyyycF' -func s300__convETupleToAny(_ t: @escaping () -> ()) { - let _: () -> Any = t -} - -// Tests corner-case: reabstraction of a non-empty tuple to any -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s310__convIntTupleAnyyySi_SitycF : $@convention(thin) (@guaranteed @callee_guaranteed () -> (Int, Int)) -> () { -// CHECK: bb0([[ARG:%.*]] : $@callee_guaranteed () -> (Int, Int)): -// CHECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] -// CHECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (@guaranteed @callee_guaranteed () -> (Int, Int)) -> @out Any -// CHECK: destroy_value [[PAPPLY]] : $@callee_guaranteed () -> @out Any -// CHECK-NOT: destroy_value [[ARG]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s310__convIntTupleAnyyySi_SitycF' -func s310__convIntTupleAny(_ t: @escaping () -> (Int, Int)) { - let _: () -> Any = t -} - -// Tests translating and imploding into Any under opaque value mode -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s320__transImplodeAnyyyyypcF : $@convention(thin) (@guaranteed @callee_guaranteed (@in_guaranteed Any) -> ()) -> () { -// CHECK: bb0([[ARG:%.*]] : $@callee_guaranteed (@in_guaranteed Any) -> ()): -// CHECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] -// CHECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (Int, Int, @guaranteed @callee_guaranteed (@in_guaranteed Any) -> ()) -> () -// CHECK: destroy_value [[PAPPLY]] : $@callee_guaranteed (Int, Int) -> () -// CHECK-NOT: destroy_value [[ARG]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s320__transImplodeAnyyyyypcF' -func s320__transImplodeAny(_ t: @escaping (Any) -> ()) { - let _: ((Int, Int)) -> () = t -} - -// Tests support for address only let closures under opaque value mode - they are not by-address anymore -// --- -// CHECK-LABEL: sil private @$s20opaque_values_silgen21s330___addrLetClosureyxxlFxyXEfU_xyXEfU_ : $@convention(thin) (@in_guaranteed T) -> @out T { -// CHECK: bb0([[ARG:%.*]] : $T): -// CHECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] : $T -// CHECK: return [[COPY_ARG]] : $T -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s330___addrLetClosureyxxlFxyXEfU_xyXEfU_' -func s330___addrLetClosure(_ x:T) -> T { - return { { x }() }() -} - -// Tests support for capture of a mutable opaque value type +// Tests support for if statements for opaque value(s) under new mode // --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s340_______captureBoxyyF : $@convention(thin) () -> () { -// CHECK: bb0: -// CHECK: [[ALLOC_OF_BOX:%.*]] = alloc_box ${ var EmptyP }, var, name "mutableAddressOnly" -// CHECK: [[PROJ_BOX:%.*]] = project_box [[ALLOC_OF_BOX]] -// CHECK: [[APPLY_FOR_BOX:%.*]] = apply %{{.*}}(%{{.*}}) : $@convention(method) (@thin AddressOnlyStruct.Type) -> AddressOnlyStruct -// CHECK: [[INIT_OPAQUE:%.*]] = init_existential_value [[APPLY_FOR_BOX]] : $AddressOnlyStruct, $AddressOnlyStruct, $EmptyP -// CHECK: store [[INIT_OPAQUE]] to [init] [[PROJ_BOX]] : $*EmptyP -// CHECK: [[BORROW_BOX:%.*]] = begin_borrow [[ALLOC_OF_BOX]] : ${ var EmptyP } -// CHECK: mark_function_escape [[PROJ_BOX]] : $*EmptyP -// CHECK: apply %{{.*}}([[BORROW_BOX]]) : $@convention(thin) (@guaranteed { var EmptyP }) -> () -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s340_______captureBoxyyF' -func s340_______captureBox() { - var mutableAddressOnly: EmptyP = AddressOnlyStruct() - - func captureEverything() { - _ = s100_________identity((mutableAddressOnly)) - } +protocol EmptyP {} - captureEverything() -} +struct AddressOnlyStruct : EmptyP {} -// Tests support for if statements for opaque value(s) under new mode -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s350_______addrOnlyIf1xAA6EmptyP_pSb_tF : $@convention(thin) (Bool) -> @out EmptyP { -// CHECK: bb0([[ARG:%.*]] : $Bool): -// CHECK: [[ALLOC_OF_BOX:%.*]] = alloc_box ${ var EmptyP }, var -// CHECK: [[PROJ_BOX:%.*]] = project_box [[ALLOC_OF_BOX]] -// CHECK: [[APPLY_FOR_BOX:%.*]] = apply %{{.*}}(%{{.*}}) : $@convention(method) (@thin AddressOnlyStruct.Type) -> AddressOnlyStruct -// CHECK: [[INIT_OPAQUE:%.*]] = init_existential_value [[APPLY_FOR_BOX]] : $AddressOnlyStruct, $AddressOnlyStruct, $EmptyP -// CHECK: store [[INIT_OPAQUE]] to [init] [[PROJ_BOX]] : $*EmptyP -// CHECK: [[APPLY_FOR_BRANCH:%.*]] = apply %{{.*}}([[ARG]]) : $@convention(method) (Bool) -> Builtin.Int1 -// CHECK: cond_br [[APPLY_FOR_BRANCH]], bb2, bb1 -// CHECK: bb1: -// CHECK: [[READ:%.*]] = begin_access [read] [unknown] [[PROJ_BOX]] : $*EmptyP -// CHECK: [[RETVAL1:%.*]] = load [copy] [[READ]] : $*EmptyP -// CHECK: br bb3([[RETVAL1]] : $EmptyP) -// CHECK: bb2: -// CHECK: [[READ:%.*]] = begin_access [read] [unknown] [[PROJ_BOX]] : $*EmptyP -// CHECK: [[RETVAL2:%.*]] = load [copy] [[READ]] : $*EmptyP -// CHECK: br bb3([[RETVAL2]] : $EmptyP) -// CHECK: bb3([[RETVAL:%.*]] : $EmptyP): -// CHECK: destroy_value [[ALLOC_OF_BOX]] -// CHECK: return [[RETVAL]] : $EmptyP -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s350_______addrOnlyIf1xAA6EmptyP_pSb_tF' -func s350_______addrOnlyIf(x: Bool) -> EmptyP { +// CHECK-LABEL: sil hidden [ossa] @$s20opaque_values_silgen10addrOnlyIf1xAA6EmptyP_pSb_tF : $@convention(thin) (Bool) -> @out EmptyP { +// HECK: bb0([[ARG:%.*]] : $Bool): +// HECK: [[ALLOC_OF_BOX:%.*]] = alloc_box ${ var EmptyP }, var +// HECK: [[PROJ_BOX:%.*]] = project_box [[ALLOC_OF_BOX]] +// HECK: [[APPLY_FOR_BOX:%.*]] = apply %{{.*}}(%{{.*}}) : $@convention(method) (@thin AddressOnlyStruct.Type) -> AddressOnlyStruct +// HECK: [[INIT_OPAQUE:%.*]] = init_existential_value [[APPLY_FOR_BOX]] : $AddressOnlyStruct, $AddressOnlyStruct, $EmptyP +// HECK: store [[INIT_OPAQUE]] to [init] [[PROJ_BOX]] : $*EmptyP +// HECK: [[APPLY_FOR_BRANCH:%.*]] = apply %{{.*}}([[ARG]]) : $@convention(method) (Bool) -> Builtin.Int1 +// HECK: cond_br [[APPLY_FOR_BRANCH]], bb2, bb1 +// HECK: bb1: +// HECK: [[READ:%.*]] = begin_access [read] [unknown] [[PROJ_BOX]] : $*EmptyP +// HECK: [[RETVAL1:%.*]] = load [copy] [[READ]] : $*EmptyP +// HECK: br bb3([[RETVAL1]] : $EmptyP) +// HECK: bb2: +// HECK: [[READ:%.*]] = begin_access [read] [unknown] [[PROJ_BOX]] : $*EmptyP +// HECK: [[RETVAL2:%.*]] = load [copy] [[READ]] : $*EmptyP +// HECK: br bb3([[RETVAL2]] : $EmptyP) +// HECK: bb3([[RETVAL:%.*]] : $EmptyP): +// HECK: destroy_value [[ALLOC_OF_BOX]] +// HECK: return [[RETVAL]] : $EmptyP +// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen10addrOnlyIf1xAA6EmptyP_pSb_tF' +func addrOnlyIf(x: Bool) -> EmptyP { var a : EmptyP = AddressOnlyStruct() - + genericInout(&a) return x ? a : a } -// Tests support for guards and indirect enums for opaque values -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s360________guardEnumyyAA08IndirectF0OyxGlF : $@convention(thin) (@guaranteed IndirectEnum) -> () { -// CHECK: bb0([[ARG:%.*]] : $IndirectEnum): -// CHECK: [[COPY__ARG:%.*]] = copy_value [[ARG]] -// CHECK: switch_enum [[COPY__ARG]] : $IndirectEnum, case #IndirectEnum.Node!enumelt: [[NODE_BB:bb[0-9]+]], case #IndirectEnum.Nil!enumelt: [[NIL_BB:bb[0-9]+]] -// -// CHECK: [[NIL_BB]]: -// CHECK: br [[NIL_TRAMPOLINE:bb[0-9]+]] -// -// CHECK: [[NIL_TRAMPOLINE]]: -// CHECK: br [[EPILOG_BB:bb[0-9]+]] -// -// CHECK: [[NODE_BB]]([[EARG:%.*]] : $<τ_0_0> { var τ_0_0 } ): -// CHECK: [[PROJ_BOX:%.*]] = project_box [[EARG]] -// CHECK: [[LOAD_BOX:%.*]] = load [take] [[PROJ_BOX]] : $*T -// CHECK: [[COPY_BOX:%.*]] = copy_value [[LOAD_BOX]] : $T -// CHECK: destroy_value [[EARG]] -// CHECK: br [[CONT_BB:bb[0-9]+]] -// -// CHECK: [[CONT_BB]]: -// CHECK: destroy_value [[COPY_BOX]] -// CHECK: br [[EPILOG_BB]] -// -// CHECK: [[EPILOG_BB]]: -// CHECK-NOT: destroy_value [[ARG]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s360________guardEnumyyAA08IndirectF0OyxGlF' -func s360________guardEnum(_ e: IndirectEnum) { - do { - guard case .Node(let x) = e else { return } - _ = x - } -} - -// Tests contextual init() of opaque value types -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s370_____optToOptCastyxSgAClF : $@convention(thin) (@in_guaranteed Optional) -> @out Optional { -// CHECK: bb0([[ARG:%.*]] : $Optional): -// CHECK: [[COPY__ARG:%.*]] = copy_value [[ARG]] -// CHECK-NOT: destroy_value [[ARG]] -// CHECK: return [[COPY__ARG]] : $Optional -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s370_____optToOptCastyxSgAClF' -func s370_____optToOptCast(_ x : T!) -> T? { - return x -} - -// Tests casting optional opaques to optional opaques -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s380___contextualInityySiSgF : $@convention(thin) (Optional) -> () { -// CHECK: bb0([[ARG:%.*]] : $Optional): -// CHECK: [[ALLOC_OF_BOX:%.*]] = alloc_box ${ var Optional }, var -// CHECK: [[PROJ_BOX:%.*]] = project_box [[ALLOC_OF_BOX]] -// CHECK: store [[ARG]] to [trivial] [[PROJ_BOX]] : $*Optional -// CHECK: destroy_value [[ALLOC_OF_BOX]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s380___contextualInityySiSgF' -func s380___contextualInit(_ a : Int?) { - var x: Int! = a - _ = x -} - -// Tests opaque call result types -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s390___addrCallResultyyxycSglF : $@convention(thin) (@guaranteed Optional<@callee_guaranteed () -> @out T>) -> () { -// CHECK: bb0([[ARG:%.*]] : $Optional<@callee_guaranteed () -> @out T>): -// CHECK: [[ALLOC_OF_BOX:%.*]] = alloc_box $<τ_0_0> { var Optional<τ_0_0> } -// CHECK: [[PROJ_BOX:%.*]] = project_box [[ALLOC_OF_BOX]] -// CHECK: [[COPY__ARG:%.*]] = copy_value [[ARG]] -// CHECK: [[SENUM:%.*]] = select_enum [[COPY__ARG]] -// CHECK: cond_br [[SENUM]], bb3, bb1 -// CHECK: bb1: -// CHECK: br bb2 -// CHECK: bb2: -// CHECK: [[ONONE:%.*]] = enum $Optional, #Optional.none!enumelt -// CHECK: br bb4([[ONONE]] : $Optional) -// CHECK: bb4(%{{.*}} : $Optional): -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s390___addrCallResultyyxycSglF' -func s390___addrCallResult(_ f: (() -> T)?) { - var x = f?() - _ = x -} - -// Tests reabstraction / partial apply of protocols under opaque value mode -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s400______maybeCloneP1cyAA8Clonable_p_tF : $@convention(thin) (@in_guaranteed Clonable) -> () { -// CHECK: bb0([[ARG:%.*]] : $Clonable): -// CHECK: [[OPEN_ARG:%.*]] = open_existential_value [[ARG]] : $Clonable -// CHECK: [[APPLY_OPAQUE:%.*]] = apply %{{.*}}<@opened({{.*}}) Clonable>([[OPEN_ARG]]) : $@convention(thin) <τ_0_0 where τ_0_0 : Clonable> (@in_guaranteed τ_0_0) -> @owned @callee_guaranteed () -> @out Optional<τ_0_0> -// CHECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}<@opened({{.*}}) Clonable>([[APPLY_OPAQUE]]) : $@convention(thin) <τ_0_0 where τ_0_0 : Clonable> (@guaranteed @callee_guaranteed () -> @out Optional<τ_0_0>) -> @out Optional -// CHECK-NOT: destroy_value [[ARG]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s400______maybeCloneP1cyAA8Clonable_p_tF' -func s400______maybeCloneP(c: Clonable) { - let _: () -> Clonable? = c.maybeClone -} - -// Tests global opaque values / subscript rvalues -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s410__globalRvalueGetyS2iF : $@convention(thin) (Int) -> Int { -// CHECK: bb0([[ARG:%.*]] : $Int): -// CHECK: [[GLOBAL_ADDR:%.*]] = global_addr @$s20opaque_values_silgen16subscriptableGetAA013SubscriptableE0_pvp : $*SubscriptableGet -// CHECK: [[READ:%.*]] = begin_access [read] [dynamic] [[GLOBAL_ADDR]] : $*SubscriptableGet -// CHECK: [[OPEN_ARG:%.*]] = open_existential_addr immutable_access [[READ]] : $*SubscriptableGet to $*@opened -// CHECK: [[GET_OPAQUE:%.*]] = load [copy] [[OPEN_ARG]] : $*@opened -// CHECK: [[RETVAL:%.*]] = apply %{{.*}}<@opened({{.*}}) SubscriptableGet>([[ARG]], [[GET_OPAQUE]]) : $@convention(witness_method: SubscriptableGet) <τ_0_0 where τ_0_0 : SubscriptableGet> (Int, @in_guaranteed τ_0_0) -> Int -// CHECK: destroy_value [[GET_OPAQUE]] -// CHECK: return [[RETVAL]] : $Int -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s410__globalRvalueGetyS2iF' -func s410__globalRvalueGet(_ i : Int) -> Int { - return subscriptableGet[i] -} - -// Tests global opaque values / subscript lvalues -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s420__globalLvalueGetyS2iF : $@convention(thin) (Int) -> Int { -// CHECK: bb0([[ARG:%.*]] : $Int): -// CHECK: [[GLOBAL_ADDR:%.*]] = global_addr @$s20opaque_values_silgen19subscriptableGetSetAA013SubscriptableeF0_pvp : $*SubscriptableGetSet -// CHECK: [[READ:%.*]] = begin_access [read] [dynamic] [[GLOBAL_ADDR]] : $*SubscriptableGetSet -// CHECK: [[OPEN_ARG:%.*]] = open_existential_addr immutable_access [[READ]] : $*SubscriptableGetSet to $*@opened -// CHECK: [[GET_OPAQUE:%.*]] = load [copy] [[OPEN_ARG]] : $*@opened -// CHECK: [[RETVAL:%.*]] = apply %{{.*}}<@opened({{.*}}) SubscriptableGetSet>([[ARG]], [[GET_OPAQUE]]) : $@convention(witness_method: SubscriptableGetSet) <τ_0_0 where τ_0_0 : SubscriptableGetSet> (Int, @in_guaranteed τ_0_0) -> Int -// CHECK: destroy_value [[GET_OPAQUE]] -// CHECK: return [[RETVAL]] : $Int -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s420__globalLvalueGetyS2iF' -func s420__globalLvalueGet(_ i : Int) -> Int { - return subscriptableGetSet[i] -} - -// Tests tuple transformation -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s430_callUnreachableF1tyx_tlF : $@convention(thin) (@in_guaranteed T) -> () { -// CHECK: bb0([[ARG:%.*]] : $T): -// CHECK: [[APPLY_T:%.*]] = apply %{{.*}}<((T) -> (), T)>() : $@convention(thin) <τ_0_0> () -> @out Optional<(Int, τ_0_0)> -// CHECK: switch_enum [[APPLY_T]] : $Optional<(Int, (@callee_guaranteed (@in_guaranteed T) -> @out (), T))>, case #Optional.some!enumelt: bb2, case #Optional.none!enumelt: bb1 -// CHECK: bb2([[ENUMARG:%.*]] : $(Int, (@callee_guaranteed (@in_guaranteed T) -> @out (), T))): -// CHECK: ([[TELEM0:%.*]], [[TELEM1:%.*]]) = destructure_tuple [[ENUMARG]] : $(Int, (@callee_guaranteed (@in_guaranteed T) -> @out (), T)) -// CHECK: ([[TELEM10:%.*]], [[TELEM11:%.*]]) = destructure_tuple [[TELEM1]] : $(@callee_guaranteed (@in_guaranteed T) -> @out (), T) -// CHECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[TELEM10]]) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0, @guaranteed @callee_guaranteed (@in_guaranteed τ_0_0) -> @out ()) -> () -// CHECK: [[NEWT0:%.*]] = tuple ([[PAPPLY]] : $@callee_guaranteed (@in_guaranteed T) -> (), [[TELEM11]] : $T) -// CHECK: [[NEWT1:%.*]] = tuple ([[TELEM0]] : $Int, [[NEWT0]] : $(@callee_guaranteed (@in_guaranteed T) -> (), T)) -// CHECK: [[NEWENUM:%.*]] = enum $Optional<(Int, (@callee_guaranteed (@in_guaranteed T) -> (), T))>, #Optional.some!enumelt, [[NEWT1]] : $(Int, (@callee_guaranteed (@in_guaranteed T) -> (), T)) -// CHECK: br bb3([[NEWENUM]] : $Optional<(Int, (@callee_guaranteed (@in_guaranteed T) -> (), T))>) -// CHECK: bb3([[ENUMIN:%.*]] : $Optional<(Int, (@callee_guaranteed (@in_guaranteed T) -> (), T))>): -// CHECK: destroy_value [[ENUMIN]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s430_callUnreachableF1tyx_tlF' -func s430_callUnreachableF(t: T) { - let _: (Int, ((T) -> (), T))? = unreachableF() -} - -// Further testing for conditional checked cast under opaque value mode - make sure we don't create a buffer for results +// Tests LValue of error types / existential boxes // --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s440__cleanupEmissionyyxlF : $@convention(thin) (@in_guaranteed T) -> () { -// CHECK: bb0([[ARG:%.*]] : $T): -// CHECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] -// CHECK: checked_cast_value_br [[COPY_ARG]] : $T to $EmptyP, bb2, bb1 -// -// CHECK: bb2([[PTYPE:%.*]] : $EmptyP): -// CHECK: [[PSOME:%.*]] = enum $Optional, #Optional.some!enumelt, [[PTYPE]] : $EmptyP -// CHECK: br bb3([[PSOME]] : $Optional) -// -// CHECK: bb3([[ENUMRES:%.*]] : $Optional): -// CHECK: switch_enum [[ENUMRES]] : $Optional, case #Optional.some!enumelt: [[SOME_BB:bb[0-9]+]], case #Optional.none!enumelt: [[NONE_BB:bb[0-9]+]] -// -// CHECK: [[NONE_BB]]: -// CHECK: br [[NONE_TRAMPOLINE:bb[0-9]+]] -// -// CHECK: [[NONE_TRAMPOLINE]]: -// CHECK: br [[EPILOG_BB:bb[0-9]+]] -// -// CHECK: [[SOME_BB]]([[ENUMRES2:%.*]] : $EmptyP): -// CHECK: br [[CONT_BB:bb[0-9]+]] -// -// CHECK: [[CONT_BB]]: -// CHECK: destroy_value [[ENUMRES2]] -// CHECK: br [[EPILOG_BB]] -// -// CHECK: [[EPILOG_BB]]: -// CHECK-NOT: destroy_value [[ARG]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s440__cleanupEmissionyyxlF' -func s440__cleanupEmission(_ x: T) { - guard let x2 = x as? EmptyP else { return } - _ = x2 +// CHECK-LABEL: sil hidden [ossa] @$s20opaque_values_silgen12propOfLValueySSs5Error_pF : $@convention(thin) (@guaranteed Error) -> @owned String { +// HECK: bb0([[ARG:%.*]] : $Error): +// HECK: [[ALLOC_OF_BOX:%.*]] = alloc_box ${ var Error } +// HECK: [[PROJ_BOX:%.*]] = project_box [[ALLOC_OF_BOX]] +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: store [[COPY_ARG]] to [init] [[PROJ_BOX]] +// HECK: [[READ:%.*]] = begin_access [read] [unknown] [[PROJ_BOX]] : $*Error +// HECK: [[LOAD_BOX:%.*]] = load [copy] [[READ]] +// HECK: [[OPAQUE_ARG:%.*]] = open_existential_box [[LOAD_BOX]] : $Error to $*@opened({{.*}}) Error +// HECK: [[LOAD_OPAQUE:%.*]] = load [copy] [[OPAQUE_ARG]] +// HECK: [[ALLOC_OPEN:%.*]] = alloc_stack $@opened({{.*}}) Error +// HECK: store [[LOAD_OPAQUE]] to [init] [[ALLOC_OPEN]] +// HECK: [[RET_VAL:%.*]] = apply {{.*}}<@opened({{.*}}) Error>([[ALLOC_OPEN]]) +// HECK: return [[RET_VAL]] : $String +// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen12propOfLValueySSs5Error_pF' +func propOfLValue(_ x: Error) -> String { + var x = x + genericInout(&x) + return x._domain } // Test SILGenBuilder.loadCopy(). // --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s450__________lastValyxxd_tlF : $@convention(thin) (@guaranteed Array) -> @out T -// CHECK: [[LOAD:%.*]] = load [copy] %{{.*}} : $*T -// CHECK: return [[LOAD]] : $T -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s450__________lastValyxxd_tlF' -func s450__________lastVal(_ rest: T...) -> T { +// CHECK-LABEL: sil hidden [ossa] @$s20opaque_values_silgen7lastValyxxd_tlF : $@convention(thin) (@guaranteed Array) -> @out T { +// HECK: [[LOAD:%.*]] = load [copy] %{{.*}} : $*T +// HECK: return [[LOAD]] : $T +// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen7lastValyxxd_tlF' +func lastVal(_ rest: T...) -> T { var minValue: T for value in rest { minValue = value @@ -919,269 +162,76 @@ func s450__________lastVal(_ rest: T...) -> T { // Test SILGenFunction::emitPointerToPointer. // --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s460______________foo1pSRyxGSPyxG_tlF : $@convention(thin) (UnsafePointer) -> UnsafeBufferPointer { -// CHECK: [[F:%.*]] = function_ref @$ss017_convertPointerToB8Argumentyq_xs01_B0RzsABR_r0_lF : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : _Pointer, τ_0_1 : _Pointer> (@in_guaranteed τ_0_0) -> @out τ_0_1 -// CHECK: apply [[F]], UnsafePointer>(%0) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : _Pointer, τ_0_1 : _Pointer> (@in_guaranteed τ_0_0) -> @out τ_0_1 -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s460______________foo1pSRyxGSPyxG_tlF' -func s460______________foo(p: UnsafePointer) -> UnsafeBufferPointer { +// CHECK-LABEL: sil hidden [ossa] @$s20opaque_values_silgen3foo1pSRyxGSPyxG_tlF : $@convention(thin) (UnsafePointer) -> UnsafeBufferPointer { +// HECK: [[F:%.*]] = function_ref @$sconvertPointerToB8Argumentyq_xB0RzsABR_r0_lF : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : _Pointer, τ_0_1 : _Pointer> (@in_guaranteed τ_0_0) -> @out τ_0_1 +// HECK: apply [[F]], UnsafePointer>(%0) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : _Pointer, τ_0_1 : _Pointer> (@in_guaranteed τ_0_0) -> @out τ_0_1 +// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen3foo1pSRyxGSPyxG_tlF' +func foo(p: UnsafePointer) -> UnsafeBufferPointer { return UnsafeBufferPointer(start: p, count: 1) } -// Test emitNativeToCBridgedNonoptionalValue. -// --- -// CHECK-objc-LABEL: sil hidden @$s20opaque_values_silgen21s470________nativeToC7fromAnyyXlyp_tF : $@convention(thin) (@in_guaranteed Any) -> @owned AnyObject { -// CHECK-objc: bb0(%0 : $Any): -// CHECK-objc: [[BORROW:%.*]] = begin_borrow %0 : $Any -// CHECK-objc: [[SRC:%.*]] = copy_value [[BORROW]] : $Any -// CHECK-objc: [[OPEN:%.*]] = open_existential_opaque [[SRC]] : $Any to $@opened -// CHECK-objc: [[COPY:%.*]] = copy_value [[OPEN]] : $@opened -// CHECK-objc: [[F:%.*]] = function_ref @$ss27_bridgeAnythingToObjectiveCyyXlxlF : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> @owned AnyObject -// CHECK-objc: [[RET:%.*]] = apply [[F]]<@opened("{{.*}}") Any>([[COPY]]) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> @owned AnyObject -// CHECK-objc: destroy_value [[SRC]] : $Any -// CHECK-objc: destroy_value %0 : $Any -// CHECK-objc: return [[RET]] : $AnyObject -// CHECK-objc-LABEL: } // end sil function '$s20opaque_values_silgen21s470________nativeToC7fromAnyyXlyp_tF' -#if _runtime(_ObjC) -func s470________nativeToC(fromAny any: Any) -> AnyObject { - return any as AnyObject -} -#endif - -// Test emitOpenExistential. +// Test SILBuilder.createLoadBorrow. // --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s480_________getError04someF0yps0F0_p_tF : $@convention(thin) (@guaranteed Error) -> @out Any { -// CHECK: bb0([[ARG:%.*]] : $Error): -// CHECK: [[VAL:%.*]] = open_existential_box_value [[ARG]] : $Error to $@opened("{{.*}}") Error -// CHECK: [[COPY:%.*]] = copy_value [[VAL]] : $@opened("{{.*}}") Error -// CHECK: [[ANY:%.*]] = init_existential_value [[COPY]] : $@opened("{{.*}}") Error, $@opened("{{.*}}") Error, $Any -// CHECK-NOT: destroy_value [[ARG]] : $Error -// CHECK: return [[ANY]] : $Any -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s480_________getError04someF0yps0F0_p_tF' -func s480_________getError(someError: Error) -> Any { - return someError +protocol FooP { + func foo() } -// Test SILBuilder.createLoadBorrow. -// --- -// CHECK-LABEL: sil private @$s20opaque_values_silgen21s490_______loadBorrowyyF3FooL_V3foo3pos7ElementQzSg5IndexQz_tF : $@convention(method) (@in_guaranteed Elements.Index, @inout Foo) -> @out Optional { -// CHECK: bb0([[ARG0:%.*]] : $Elements.Index, [[ARG1:%.*]] : $*Foo): -// CHECK: [[READ:%.*]] = begin_access [read] [unknown] [[ARG1]] : $*Foo -// CHECK: [[LOAD:%.*]] = load [copy] [[READ]] : $*Foo -// CHECK: end_access [[READ]] : $*Foo -// CHECK: [[BORROW_LOAD:%.*]] = begin_borrow [[LOAD]] -// CHECK: [[EXTRACT:%.*]] = struct_extract [[BORROW_LOAD]] : $Foo, #Foo._elements -// CHECK: [[COPYELT:%.*]] = copy_value [[EXTRACT]] : $Elements -// CHECK: [[COPYIDX:%.*]] = copy_value [[ARG0]] : $Elements.Index -// CHECK: [[WT:%.*]] = witness_method $Elements, #Collection.subscript!getter : (Self) -> (Self.Index) -> Self.Element : $@convention(witness_method: Collection) <τ_0_0 where τ_0_0 : Collection> (@in_guaranteed τ_0_0.Index, @in_guaranteed τ_0_0) -> @out τ_0_0.Element -// CHECK: [[RESULT:%.*]] = apply [[WT]]([[COPYIDX]], [[COPYELT]]) : $@convention(witness_method: Collection) <τ_0_0 where τ_0_0 : Collection> (@in_guaranteed τ_0_0.Index, @in_guaranteed τ_0_0) -> @out τ_0_0.Element -// CHECK: destroy_value [[COPYELT]] : $Elements -// CHECK: [[ENUM_RESULT:%.*]] = enum $Optional, #Optional.some!enumelt, [[RESULT]] : $Elements.Element -// CHECK: destroy_value [[LOAD]] +// CHECK-LABEL: sil private [ossa] @$s20opaque_values_silgen10loadBorrowyyF4FooPL_V3foo3pos7ElementQzSg5IndexQz_tF : $@convention(method) (@in_guaranteed Elements.Index, @inout FooP) -> @out Optional { +// CHECK: bb0([[ARG0:%.*]] : @guaranteed $Elements.Index, [[ARG1:%.*]] : $*FooP): +// HECK: [[READ:%.*]] = begin_access [read] [unknown] [[ARG1]] : $*FooP +// HECK: [[LOAD:%.*]] = load [copy] [[READ]] : $*FooP +// HECK: end_access [[READ]] : $*FooP +// HECK: [[BORROW_LOAD:%.*]] = begin_borrow [[LOAD]] +// HECK: [[EXTRACT:%.*]] = struct_extract [[BORROW_LOAD]] : $FooP, #FooP._elements +// HECK: [[COPYELT:%.*]] = copy_value [[EXTRACT]] : $Elements +// HECK: [[COPYIDX:%.*]] = copy_value [[ARG0]] : $Elements.Index +// HECK: [[WT:%.*]] = witness_method $Elements, #Collection.subscript!getter : (Self) -> (Self.Index) -> Self.Element : $@convention(witness_method: Collection) <τ_0_0 where τ_0_0 : Collection> (@in_guaranteed τ_0_0.Index, @in_guaranteed τ_0_0) -> @out τ_0_0.Element +// HECK: [[RESULT:%.*]] = apply [[WT]]([[COPYIDX]], [[COPYELT]]) : $@convention(witness_method: Collection) <τ_0_0 where τ_0_0 : Collection> (@in_guaranteed τ_0_0.Index, @in_guaranteed τ_0_0) -> @out τ_0_0.Element +// HECK: destroy_value [[COPYELT]] : $Elements +// HECK: [[ENUM_RESULT:%.*]] = enum $Optional, #Optional.some!enumelt, [[RESULT]] : $Elements.Element +// HECK: destroy_value [[LOAD]] // CHECK-NOT: destroy_value [[ARG0]] : $Elements.Index -// CHECK: return [[ENUM_RESULT]] : $Optional -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s490_______loadBorrowyyF3FooL_V3foo3pos7ElementQzSg5IndexQz_tF' - -func s490_______loadBorrow() { - struct Foo { +// HECK: return [[ENUM_RESULT]] : $Optional +// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen10loadBorrowyyF4FooPL_V3foo3pos7ElementQzSg5IndexQz_tF' +func loadBorrow() { + struct FooP { internal let _elements: Elements public mutating func foo(pos: Elements.Index) -> Elements.Element? { return _elements[pos] } } - var foo = Foo(_elements: []) + var foo = FooP(_elements: []) _ = foo.foo(pos: 1) } -protocol ConvertibleToP { - func asP() -> P -} - -// Test visitBindOptionalExpr -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s500_______getAnyHashyAA1P_pSgAA14ConvertibleToP_pSgF : $@convention(thin) (@in_guaranteed Optional) -> @out Optional

{ -// CHECK: bb0(%0 : $Optional): -// CHECK: [[COPY:%.*]] = copy_value [[ARG]] : $Optional -// CHECK: [[DATA:%.*]] = unchecked_enum_data [[COPY]] : $Optional, #Optional.some!enumelt -// CHECK: [[BORROW_DATA:%.*]] = begin_borrow [[DATA]] : $ConvertibleToP -// CHECK: [[VAL:%.*]] = open_existential_value [[BORROW_DATA]] : $ConvertibleToP to $@opened("{{.*}}") ConvertibleToP -// CHECK: [[WT:%.*]] = witness_method $@opened("{{.*}}") ConvertibleToP, #ConvertibleToP.asP : (Self) -> () -> P, [[VAL]] : $@opened("{{.*}}") ConvertibleToP : $@convention(witness_method: ConvertibleToP) <τ_0_0 where τ_0_0 : ConvertibleToP> (@in_guaranteed τ_0_0) -> @out P -// CHECK: [[AS_P:%.*]] = apply [[WT]]<@opened("{{.*}}") ConvertibleToP>([[VAL]]) : $@convention(witness_method: ConvertibleToP) <τ_0_0 where τ_0_0 : ConvertibleToP> (@in_guaranteed τ_0_0) -> @out P -// CHECK: [[ENUM:%.*]] = enum $Optional

, #Optional.some!enumelt, [[AS_P]] : $P -// CHECK: destroy_value [[DATA]] : $ConvertibleToP -// CHECK: br bb{{.*}}([[ENUM]] : $Optional

) -// CHECK: // end sil function '$s20opaque_values_silgen21s500_______getAnyHashyAA1P_pSgAA14ConvertibleToP_pSgF' -func s500_______getAnyHash(_ value: ConvertibleToP?) -> P? { - return value?.asP() -} - -public protocol FooP { - func foo() -> Self -} - -// Test emitting a protocol witness for a method (with @in_guaranteed self) on a dependent generic type. -// --- -// CHECK-LABEL: sil private [transparent] [thunk] @$s20opaque_values_silgen21s510_______OpaqueSelfVyxGAA4FooPA2aEP3fooxyFTW : $@convention(witness_method: FooP) <τ_0_0> (@in_guaranteed s510_______OpaqueSelf<τ_0_0>) -> @out s510_______OpaqueSelf<τ_0_0> { -// CHECK: bb0(%0 : $s510_______OpaqueSelf<τ_0_0>): -// CHECK: [[FN:%.*]] = function_ref @$s20opaque_values_silgen21s510_______OpaqueSelfV3fooACyxGyF : $@convention(method) <τ_0_0> (@in_guaranteed s510_______OpaqueSelf<τ_0_0>) -> @out s510_______OpaqueSelf<τ_0_0> -// CHECK: [[RESULT:%.*]] = apply [[FN]]<τ_0_0>(%0) : $@convention(method) <τ_0_0> (@in_guaranteed s510_______OpaqueSelf<τ_0_0>) -> @out s510_______OpaqueSelf<τ_0_0> -// CHECK: return [[RESULT]] : $s510_______OpaqueSelf<τ_0_0> -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s510_______OpaqueSelfVyxGAA4FooPA2aEP3fooxyFTW' -struct s510_______OpaqueSelf : FooP { - var x: Base - - func foo() -> s510_______OpaqueSelf { - return self - } -} - -// Tests conditional value casts and correspondingly generated reabstraction thunk, with types -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen21s999_____condTFromAnyyyyp_xtlF : $@convention(thin) (@in_guaranteed Any, @in_guaranteed T) -> () { -// CHECK: bb0([[ARG0:%.*]] : $Any, [[ARG1:%.*]] : $T): -// CHECK: [[COPY__ARG:%.*]] = copy_value [[ARG]] -// CHECK: checked_cast_value_br [[COPY__ARG]] : $Any to $@callee_guaranteed (@in_guaranteed (Int, T)) -> @out (Int, T), bb2, bb1 -// CHECK: bb2([[THUNK_PARAM:%.*]] : $@callee_guaranteed (@in_guaranteed (Int, T)) -> @out (Int, T)): -// CHECK: [[THUNK_REF:%.*]] = function_ref @{{.*}} : $@convention(thin) <τ_0_0> (Int, @in_guaranteed τ_0_0, @guaranteed @callee_guaranteed (@in_guaranteed (Int, τ_0_0)) -> @out (Int, τ_0_0)) -> (Int, @out τ_0_0) -// CHECK: partial_apply [callee_guaranteed] [[THUNK_REF]]([[THUNK_PARAM]]) -// CHECK: bb6: -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen21s999_____condTFromAnyyyyp_xtlF' -func s999_____condTFromAny(_ x: Any, _ y: T) { - if let f = x as? (Int, T) -> (Int, T) { - _ = f(42, y) - } -} - -// Make sure that we insert a destroy of the box even though we used an Int type. -// CHECK-LABEL: sil @$s20opaque_values_silgen22s020_______assignToVaryyF : $@convention(thin) () -> () { -// CHECK: bb0: -// CHECK: [[Y_BOX:%.*]] = alloc_box ${ var Int }, var, name "y" -// CHECK: [[PROJECT_Y_BOX:%.*]] = project_box [[Y_BOX]] : ${ var Int }, 0 -// CHECK: [[X_BOX:%.*]] = alloc_box ${ var Any }, var, name "x" -// CHECK: [[PROJECT_X_BOX:%.*]] = project_box [[X_BOX]] : ${ var Any }, 0 -// CHECK: [[ACCESS_PROJECT_Y_BOX:%.*]] = begin_access [read] [unknown] [[PROJECT_Y_BOX]] : $*Int -// CHECK: [[Y:%.*]] = load [trivial] [[ACCESS_PROJECT_Y_BOX]] : $*Int -// CHECK: [[Y_ANY_FOR_X:%.*]] = init_existential_value [[Y]] : $Int, $Int, $Any -// CHECK: store [[Y_ANY_FOR_X]] to [init] [[PROJECT_X_BOX]] -// CHECK: [[ACCESS_PROJECT_Y_BOX:%.*]] = begin_access [read] [unknown] [[PROJECT_Y_BOX]] : $*Int -// CHECK: [[Y:%.*]] = load [trivial] [[ACCESS_PROJECT_Y_BOX]] : $*Int -// CHECK: [[Y_ANY_FOR_Z:%.*]] = init_existential_value [[Y]] : $Int, $Int, $Any -// CHECK: destroy_value [[Y_ANY_FOR_Z]] -// CEHCK: destroy_value [[X_BOX]] -// CHECK: destroy_value [[Y_BOX]] -// CHECK: } // end sil function '$s20opaque_values_silgen22s020_______assignToVaryyF' -public func s020_______assignToVar() { - var y: Int = 3 - var x: Any = y - let z: Any = y -} - -// s250_________testBoxT continued Test Implicit Value Construction under Opaque value mode -// --- -// CHECK-LABEL: sil hidden @$s20opaque_values_silgen3BoxV1tACyxGx_tcfC : $@convention(method) (@in T, @thin Box.Type) -> @out Box { -// CHECK: bb0([[ARG0:%.*]] : $T, [[ARG1:%.*]] : $@thin Box.Type): -// CHECK: [[RETVAL:%.*]] = struct $Box ([[ARG0]] : $T) -// CHECK: return [[RETVAL]] : $Box -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen3BoxV1tACyxGx_tcfC' - -// s270_convOptAnyStruct continued Test: reabstraction thunk helper -// --- -// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] @$s20opaque_values_silgen9AnyStructVSgACIegnr_A2DIegnr_TR : $@convention(thin) (@in_guaranteed Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct) -> @out Optional { -// CHECK: bb0([[ARG0:%.*]] : $Optional, [[ARG1:%.*]] : $@callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct): -// CHECK: [[APPLYARG:%.*]] = apply [[ARG1]]([[ARG0]]) : $@callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct -// CHECK: [[RETVAL:%.*]] = enum $Optional, #Optional.some!enumelt, [[APPLYARG]] : $AnyStruct -// CHECK: return [[RETVAL]] : $Optional -// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen9AnyStructVSgACIegnr_A2DIegnr_TR' - -// s300__convETupleToAny continued Test: reabstraction of () to Any -// --- -// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] @$sIeg_ypIegr_TR : $@convention(thin) (@guaranteed @callee_guaranteed () -> ()) -> @out Any { -// CHECK: bb0([[ARG:%.*]] : $@callee_guaranteed () -> ()): -// CHECK: [[ASTACK:%.*]] = alloc_stack $Any -// CHECK: [[IADDR:%.*]] = init_existential_addr [[ASTACK]] : $*Any, $() -// CHECK: [[APPLYARG:%.*]] = apply [[ARG]]() : $@callee_guaranteed () -> () -// CHECK: [[LOAD_EXIST:%.*]] = load [trivial] [[IADDR]] : $*() -// CHECK: [[RETVAL:%.*]] = init_existential_value [[LOAD_EXIST]] : $(), $(), $Any -// CHECK: return [[RETVAL]] : $Any -// CHECK-LABEL: } // end sil function '$sIeg_ypIegr_TR' - -// s310_convIntTupleAny continued Test: reabstraction of non-empty tuple to Any -// --- -// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] @$sS2iIegdd_ypIegr_TR : $@convention(thin) (@guaranteed @callee_guaranteed () -> (Int, Int)) -> @out Any { -// CHECK: bb0([[ARG:%.*]] : $@callee_guaranteed () -> (Int, Int)): -// CHECK: [[ASTACK:%.*]] = alloc_stack $Any -// CHECK: [[IADDR:%.*]] = init_existential_addr [[ASTACK]] : $*Any, $(Int, Int) -// CHECK: [[TADDR0:%.*]] = tuple_element_addr [[IADDR]] : $*(Int, Int), 0 -// CHECK: [[TADDR1:%.*]] = tuple_element_addr [[IADDR]] : $*(Int, Int), 1 -// CHECK: [[APPLYARG:%.*]] = apply [[ARG]]() : $@callee_guaranteed () -> (Int, Int) -// CHECK: [[TEXTRACT0:%.*]] = tuple_extract [[APPLYARG]] : $(Int, Int), 0 -// CHECK: [[TEXTRACT1:%.*]] = tuple_extract [[APPLYARG]] : $(Int, Int), 1 -// CHECK: store [[TEXTRACT0]] to [trivial] [[TADDR0]] : $*Int -// CHECK: store [[TEXTRACT1]] to [trivial] [[TADDR1]] : $*Int -// CHECK: [[LOAD_EXIST:%.*]] = load [trivial] [[IADDR]] : $*(Int, Int) -// CHECK: [[RETVAL:%.*]] = init_existential_value [[LOAD_EXIST]] : $(Int, Int), $(Int, Int), $Any -// CHECK: dealloc_stack [[ASTACK]] : $*Any -// CHECK: return [[RETVAL]] : $Any -// CHECK-LABEL: } // end sil function '$sS2iIegdd_ypIegr_TR' - - -// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] @{{.*}} : $@convention(thin) (Int, Int, Int, Int, Int, @guaranteed @callee_guaranteed (@in_guaranteed (Int, (Int, (Int, Int)), Int)) -> @out (Int, (Int, (Int, Int)), Int)) -> (Int, Int, Int, Int, Int) -// CHECK: bb0([[ARG0:%.*]] : $Int, [[ARG1:%.*]] : $Int, [[ARG2:%.*]] : $Int, [[ARG3:%.*]] : $Int, [[ARG4:%.*]] : $Int, [[ARG5:%.*]] : $@callee_guaranteed (@in_guaranteed (Int, (Int, (Int, Int)), Int)) -> @out (Int, (Int, (Int, Int)), Int)): -// CHECK: [[TUPLE_TO_APPLY0:%.*]] = tuple ([[ARG2]] : $Int, [[ARG3]] : $Int) -// CHECK: [[TUPLE_TO_APPLY1:%.*]] = tuple ([[ARG1]] : $Int, [[TUPLE_TO_APPLY0]] : $(Int, Int)) -// CHECK: [[TUPLE_TO_APPLY2:%.*]] = tuple ([[ARG0]] : $Int, [[TUPLE_TO_APPLY1]] : $(Int, (Int, Int)), [[ARG4]] : $Int) -// CHECK: [[TUPLE_APPLY:%.*]] = apply [[ARG5]]([[TUPLE_TO_APPLY2]]) : $@callee_guaranteed (@in_guaranteed (Int, (Int, (Int, Int)), Int)) -> @out (Int, (Int, (Int, Int)), Int) -// CHECK: [[RET_VAL0:%.*]] = tuple_extract [[TUPLE_APPLY]] : $(Int, (Int, (Int, Int)), Int), 0 -// CHECK: [[TUPLE_EXTRACT1:%.*]] = tuple_extract [[TUPLE_APPLY]] : $(Int, (Int, (Int, Int)), Int), 1 -// CHECK: [[RET_VAL1:%.*]] = tuple_extract [[TUPLE_EXTRACT1]] : $(Int, (Int, Int)), 0 -// CHECK: [[TUPLE_EXTRACT2:%.*]] = tuple_extract [[TUPLE_EXTRACT1]] : $(Int, (Int, Int)), 1 -// CHECK: [[RET_VAL2:%.*]] = tuple_extract [[TUPLE_EXTRACT2]] : $(Int, Int), 0 -// CHECK: [[RET_VAL3:%.*]] = tuple_extract [[TUPLE_EXTRACT2]] : $(Int, Int), 1 -// CHECK: [[RET_VAL4:%.*]] = tuple_extract [[TUPLE_APPLY]] : $(Int, (Int, (Int, Int)), Int), 2 -// CHECK: [[RET_VAL_TUPLE:%.*]] = tuple ([[RET_VAL0]] : $Int, [[RET_VAL1]] : $Int, [[RET_VAL2]] : $Int, [[RET_VAL3]] : $Int, [[RET_VAL4]] : $Int) -// CHECK: return [[RET_VAL_TUPLE]] : $(Int, Int, Int, Int, Int) -// CHECK-LABEL: } // end sil function '{{.*}}' - -// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] @{{.*}} : $@convention(thin) (Int, @in_guaranteed T, @guaranteed @callee_guaranteed (@in_guaranteed (Int, T)) -> @out (Int, T)) -> (Int, @out T) { -// CHECK: bb0([[ARG0:%.*]] : $Int, [[ARG1:%.*]] : $T, [[ARG2:%.*]] : $@callee_guaranteed (@in_guaranteed (Int, T)) -> @out (Int, T)): -// CHECK: [[TUPLE_TO_APPLY:%.*]] = tuple ([[ARG0]] : $Int, [[ARG1]] : $T) -// CHECK: [[TUPLE_APPLY:%.*]] = apply [[ARG2]]([[TUPLE_TO_APPLY]]) : $@callee_guaranteed (@in_guaranteed (Int, T)) -> @out (Int, T) -// CHECK: [[TUPLE_BORROW:%.*]] = begin_borrow [[TUPLE_APPLY]] : $(Int, T) -// CHECK: [[RET_VAL0:%.*]] = tuple_extract [[TUPLE_BORROW]] : $(Int, T), 0 -// CHECK: [[TUPLE_EXTRACT:%.*]] = tuple_extract [[TUPLE_BORROW]] : $(Int, T), 1 -// CHECK: [[RET_VAL1:%.*]] = copy_value [[TUPLE_EXTRACT]] : $T -// CHECK: end_borrow [[TUPLE_BORROW]] : $(Int, T) -// CHECK: destroy_value [[TUPLE_APPLY]] : $(Int, T) -// CHECK: [[RET_VAL_TUPLE:%.*]] = tuple ([[RET_VAL0]] : $Int, [[RET_VAL1]] : $T) -// CHECK: return [[RET_VAL_TUPLE]] : $(Int, T) -// CHECK-LABEL: } // end sil function '{{.*}}' - // Tests LogicalPathComponent's writeback for opaque value types // --- -// CHECK-LABEL: sil @$sSD20opaque_values_silgenE22inoutAccessOfSubscript3keyyq__tF : $@convention(method) (@in_guaranteed Value, @inout Dictionary) -> () { -// CHECK: bb0([[ARG0:%.*]] : $Value, [[ARG1:%.*]] : $*Dictionary): -// CHECK: [[WRITE:%.*]] = begin_access [modify] [unknown] [[ARG1]] : $*Dictionary -// CHECK: [[OPTIONAL_ALLOC:%.*]] = alloc_stack $Optional -// CHECK: switch_enum_addr [[OPTIONAL_ALLOC]] : $*Optional, case #Optional.some!enumelt: bb2, case #Optional.none!enumelt: bb1 -// CHECK: bb2: -// CHECK: [[OPTIONAL_LOAD:%.*]] = load [take] [[OPTIONAL_ALLOC]] : $*Optional -// CHECK: apply {{.*}}([[OPTIONAL_LOAD]], {{.*}}, [[WRITE]]) : $@convention(method) <τ_0_0, τ_0_1 where τ_0_0 : Hashable> (@in Optional<τ_0_1>, @in τ_0_1, @inout Dictionary<τ_0_0, τ_0_1>) -> () -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$sSD20opaque_values_silgenE22inoutAccessOfSubscript3keyyq__tF' +// Dictionary.subscript.getter +// CHECK-LABEL: sil [always_inline] [ossa] @$sSD20opaque_values_silgenEyq_Sgq_cig : $@convention(method) (@in_guaranteed Value, @guaranteed Dictionary) -> @out Optional { +// HECK: bb0([[ARG0:%.*]] : $Value, [[ARG1:%.*]] : $*Dictionary): +// HECK: [[WRITE:%.*]] = begin_access [modify] [unknown] [[ARG1]] : $*Dictionary +// HECK: [[OPTIONAL_ALLOC:%.*]] = alloc_stack $Optional +// HECK: switch_enum_addr [[OPTIONAL_ALLOC]] : $*Optional, case #Optional.some!enumelt: bb2, case #Optional.none!enumelt: bb1 +// HECK: bb2: +// HECK: [[OPTIONAL_LOAD:%.*]] = load [take] [[OPTIONAL_ALLOC]] : $*Optional +// HECK: apply {{.*}}([[OPTIONAL_LOAD]], {{.*}}, [[WRITE]]) : $@convention(method) <τ_0_0, τ_0_1 where τ_0_0 : Hashable> (@in Optional<τ_0_1>, @in τ_0_1, @inout Dictionary<τ_0_0, τ_0_1>) -> () +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$sSD20opaque_values_silgenEyq_Sgq_cig' // Tests materializeForSet's createSetterCallback for opaque values // --- -// CHECK-LABEL: sil shared [transparent] [serialized] @$sSD20opaque_values_silgenEyq_Sgq_cimytfU_ : $@convention(method) (Builtin.RawPointer, @inout Builtin.UnsafeValueBuffer, @inout Dictionary, @thick Dictionary.Type) -> () { -// CHECK: bb0([[ARG0:%.*]] : $Builtin.RawPointer, [[ARG1:%.*]] : $*Builtin.UnsafeValueBuffer, [[ARG2:%.*]] : $*Dictionary, [[ARG3:%.*]] : $@thick Dictionary.Type): -// CHECK: [[PROJ_VAL1:%.*]] = project_value_buffer $Value in [[ARG1]] : $*Builtin.UnsafeValueBuffer -// CHECK: [[LOAD_VAL1:%.*]] = load [take] [[PROJ_VAL1]] : $*Value -// CHECK: [[ADDR_VAL0:%.*]] = pointer_to_address [[ARG0]] : $Builtin.RawPointer to [strict] $*Optional -// CHECK: [[LOAD_VAL0:%.*]] = load [take] [[ADDR_VAL0]] : $*Optional -// CHECK: apply {{.*}}([[LOAD_VAL0]], [[LOAD_VAL1]], [[ARG2]]) : $@convention(method) <τ_0_0, τ_0_1 where τ_0_0 : Hashable> (@in Optional<τ_0_1>, @in τ_0_1, @inout Dictionary<τ_0_0, τ_0_1>) -> () -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$sSD20opaque_values_silgenEyq_Sgq_cimytfU_' +// Dictionary.subscript.setter +// CHECK-LABEL: sil [ossa] @$sSD20opaque_values_silgenEyq_Sgq_cis : $@convention(method) (@in Optional, @in Value, @inout Dictionary) -> () { +// HECK: bb0([[ARG0:%.*]] : $Builtin.RawPointer, [[ARG1:%.*]] : $*Builtin.UnsafeValueBuffer, [[ARG2:%.*]] : $*Dictionary, [[ARG3:%.*]] : $@thick Dictionary.Type): +// HECK: [[PROJ_VAL1:%.*]] = project_value_buffer $Value in [[ARG1]] : $*Builtin.UnsafeValueBuffer +// HECK: [[LOAD_VAL1:%.*]] = load [take] [[PROJ_VAL1]] : $*Value +// HECK: [[ADDR_VAL0:%.*]] = pointer_to_address [[ARG0]] : $Builtin.RawPointer to [strict] $*Optional +// HECK: [[LOAD_VAL0:%.*]] = load [take] [[ADDR_VAL0]] : $*Optional +// HECK: apply {{.*}}([[LOAD_VAL0]], [[LOAD_VAL1]], [[ARG2]]) : $@convention(method) <τ_0_0, τ_0_1 where τ_0_0 : Hashable> (@in Optional<τ_0_1>, @in τ_0_1, @inout Dictionary<τ_0_0, τ_0_1>) -> () +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$sSD20opaque_values_silgenEyq_Sgq_cis' extension Dictionary { public subscript(key: Value) -> Value? { @inline(__always) @@ -1199,37 +249,105 @@ extension Dictionary { } } -// s400______maybeCloneP continued Test: reabstraction thunk +// Test ownership of multi-case Enum values in the context of to @in thunks. // --- -// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] @$sxSgIegr_20opaque_values_silgen8Clonable_pSgIegr_AbCRzlTR : $@convention(thin) <τ_0_0 where τ_0_0 : Clonable> (@guaranteed @callee_guaranteed () -> @out Optional<τ_0_0>) -> @out Optional { -// CHECK: bb0([[ARG:%.*]] : $@callee_guaranteed () -> @out Optional<τ_0_0>): -// CHECK: [[APPLY_ARG:%.*]] = apply [[ARG]]() : $@callee_guaranteed () -> @out Optional<τ_0_0> -// CHECK: switch_enum [[APPLY_ARG]] : $Optional<τ_0_0>, case #Optional.some!enumelt: bb2, case #Optional.none!enumelt: bb1 -// CHECK: bb1: -// CHECK: [[ONONE:%.*]] = enum $Optional, #Optional.none!enumelt -// CHECK: br bb3([[ONONE]] : $Optional) -// CHECK: bb2([[ENUM_SOME:%.*]] : $τ_0_0): -// CHECK: [[INIT_OPAQUE:%.*]] = init_existential_value [[ENUM_SOME]] : $τ_0_0, $τ_0_0, $Clonable -// CHECK: [[OSOME:%.*]] = enum $Optional, #Optional.some!enumelt, [[INIT_OPAQUE]] : $Clonable -// CHECK: br bb3([[OSOME]] : $Optional) -// CHECK: bb3([[RETVAL:%.*]] : $Optional): -// CHECK: return [[RETVAL]] : $Optional -// CHECK-LABEL: } // end sil function '$sxSgIegr_20opaque_values_silgen8Clonable_pSgIegr_AbCRzlTR' +// protocol witness for static Swift.Equatable.== infix(A, A) -> Swift.Bool in conformance Swift.FloatingPointSign : Swift.Equatable +// CHECK-LABEL: sil shared [transparent] [serialized] [thunk] [ossa] @$s20opaque_values_silgen17FloatingPointSignOSQAASQ2eeoiySbx_xtFZTW : $@convention(witness_method: Equatable) (@in_guaranteed FloatingPointSign, @in_guaranteed FloatingPointSign, @thick FloatingPointSign.Type) -> Bool { +// HECK: bb0(%0 : $FloatingPointSign, %1 : $FloatingPointSign, %2 : $@thick FloatingPointSign.Type): +// HECK: %3 = metatype $@thin FloatingPointSign.Type // user: %5 +// HECK: %4 = function_ref @$ss17FloatingPointSignO21__derived_enum_equalsySbAB_ABtFZ : $@convention(method) (FloatingPointSign, FloatingPointSign, @thin FloatingPointSign.Type) -> Bool // user: %5 +// HECK: %5 = apply %4(%0, %1, %3) : $@convention(method) (FloatingPointSign, FloatingPointSign, @thin FloatingPointSign.Type) -> Bool // user: %6 +// HECK: return %5 : $Bool +// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen17FloatingPointSignOSQAASQ2eeoiySbx_xtFZTW' +public enum FloatingPointSign { + /// The sign for a positive value. + case plus + + /// The sign for a negative value. + case minus +} + +#if os(macOS) +// Test open_existential_value used in a conversion context. +// +// TODO: Subsequent OSSA optimization should optimize away one of both of these copies. +// --- +// CHECK-OSX-LABEL: sil [ossa] @$s20opaque_values_silgen25unsafeDowncastToAnyObject04fromG0yXlyp_tF : $@convention(thin) (@in_guaranteed Any) -> @owned AnyObject { +// CHECK-OSX: bb0(%0 : @guaranteed $Any): +// CHECK-OSX: [[COPY:%.*]] = copy_value %0 : $Any +// CHECK-OSX: [[BORROW2:%.*]] = begin_borrow [[COPY]] : $Any +// CHECK-OSX: [[VAL:%.*]] = open_existential_value [[BORROW2]] : $Any to $@opened +// CHECK-OSX: [[COPY2:%.*]] = copy_value [[VAL]] : $@opened +// CHECK-OSX: end_borrow [[BORROW2]] : $Any +// CHECK-OSX: [[RESULT:%.*]] = apply %{{.*}}<@opened("{{.*}}") Any>([[COPY2]]) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> @owned AnyObject +// CHECK-OSX: destroy_value [[COPY2]] : $@opened +// CHECK-OSX: destroy_value [[COPY]] : $Any +// CHECK-OSX-NOT: destroy_value %0 : $Any +// CHECK-OSX: return [[RESULT]] : $AnyObject +// CHECK-OSX-LABEL: } // end sil function '$s20opaque_values_silgen25unsafeDowncastToAnyObject04fromG0yXlyp_tF' +public func unsafeDowncastToAnyObject(fromAny any: Any) -> AnyObject { + return any as AnyObject +} +#endif + +#if os(macOS) +// Test open_existential_box_value in a conversion context. +// --- +// CHECK-OSX-LABEL: sil [ossa] @$s20opaque_values_silgen22testOpenExistentialBox1eys5Error_pSg_tF : $@convention(thin) (@guaranteed Optional) -> () { +// CHECK-OSX: [[BORROW:%.*]] = begin_borrow [lexical] %{{.*}} : $Error +// CHECK-OSX: [[VAL:%.*]] = open_existential_box_value [[BORROW]] : $Error to $@opened +// CHECK-OSX: [[COPY:%.*]] = copy_value [[VAL]] : $@opened +// CHECK-OSX: [[ANY:%.*]] = init_existential_value [[COPY]] : $@opened +// CHECK-OSX: end_borrow [[BORROW]] : $Error +// CHECK-OSX-LABEL: } // end sil function '$s20opaque_values_silgen22testOpenExistentialBox1eys5Error_pSg_tF' +public func testOpenExistentialBox(e: Error?) { + if let u = e { + let a: Any = u + _ = a + } +} +#endif -// s320__transImplodeAny continued Test: reabstraction thunk +// Test passing a +1 RValue to @in_guaranteed. // --- -// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] @$sypIegn_S2iIegyy_TR : $@convention(thin) (Int, Int, @guaranteed @callee_guaranteed (@in_guaranteed Any) -> ()) -> () { -// CHECK: bb0([[ARG0:%.*]] : $Int, [[ARG1:%.*]] : $Int, [[ARG2:%.*]] : $@callee_guaranteed (@in_guaranteed Any) -> ()): -// CHECK: [[ASTACK:%.*]] = alloc_stack $Any -// CHECK: [[IADDR:%.*]] = init_existential_addr [[ASTACK]] : $*Any, $(Int, Int) -// CHECK: [[TADDR0:%.*]] = tuple_element_addr [[IADDR]] : $*(Int, Int), 0 -// CHECK: store [[ARG0]] to [trivial] [[TADDR0]] : $*Int -// CHECK: [[TADDR1:%.*]] = tuple_element_addr [[IADDR]] : $*(Int, Int), 1 -// CHECK: store [[ARG1]] to [trivial] [[TADDR1]] : $*Int -// CHECK: [[LOAD_EXIST:%.*]] = load [trivial] [[IADDR]] : $*(Int, Int) -// CHECK: [[INIT_OPAQUE:%.*]] = init_existential_value [[LOAD_EXIST]] : $(Int, Int), $(Int, Int), $Any -// CHECK: [[BORROWED_INIT_OPAQUE:%.*]] = begin_borrow [[INIT_OPAQUE]] -// CHECK: [[APPLYARG:%.*]] = apply [[ARG2]]([[BORROWED_INIT_OPAQUE]]) : $@callee_guaranteed (@in_guaranteed Any) -> () -// CHECK: dealloc_stack [[ASTACK]] : $*Any -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$sypIegn_S2iIegyy_TR' +public protocol IP {} + +public protocol Seq { + associatedtype Iterator : IP + + func makeIterator() -> Iterator +} + +extension Seq where Self.Iterator == Self { + public func makeIterator() -> Self { + return self + } +} + +public struct EnumIter : IP, Seq { + internal var _base: Base + + public typealias Iterator = EnumIter +} + +// CHECK-LABEL: sil [ossa] @$s20opaque_values_silgen7EnumSeqV12makeIteratorAA0D4IterVy0G0QzGyF : $@convention(method) (@in_guaranteed EnumSeq) -> @out EnumIter { +// HECK: bb0(%0 : @guaranteed $EnumSeq): +// HECK: [[MT:%.*]] = metatype $@thin EnumIter.Type +// HECK: [[FIELD:%.*]] = struct_extract %0 : $EnumSeq, #EnumSeq._base +// HECK: [[COPY:%.*]] = copy_value [[FIELD]] : $Base +// HECK: [[WT:%.*]] = witness_method $Base, #Seq.makeIterator : (Self) -> () -> Self.Iterator : $@convention(witness_method: Seq) <τ_0_0 where τ_0_0 : Seq> (@in_guaranteed τ_0_0) -> @out τ_0_0.Iterator +// HECK: [[ITER:%.*]] = apply [[WT]]([[COPY]]) : $@convention(witness_method: Seq) <τ_0_0 where τ_0_0 : Seq> (@in_guaranteed τ_0_0) -> @out τ_0_0.Iterator +// HECK: destroy_value [[COPY]] : $Base +// HECK: [[FN:%.*]] = function_ref @$ss8EnumIterV5_baseAByxGx_tcfC : $@convention(method) <τ_0_0 where τ_0_0 : IP> (@in τ_0_0, @thin EnumIter<τ_0_0>.Type) -> @out EnumIter<τ_0_0> +// HECK: [[RET:%.*]] = apply [[FN]]([[ITER]], [[MT]]) : $@convention(method) <τ_0_0 where τ_0_0 : IP> (@in τ_0_0, @thin EnumIter<τ_0_0>.Type) -> @out EnumIter<τ_0_0> +// HECK: return [[RET]] : $EnumIter +// CHECK-LABEL: } // end sil function '$s20opaque_values_silgen7EnumSeqV12makeIteratorAA0D4IterVy0G0QzGyF' +public struct EnumSeq : Seq { + public typealias Iterator = EnumIter + + internal var _base: Base + + public func makeIterator() -> Iterator { + return EnumIter(_base: _base.makeIterator()) + } +} diff --git a/test/SILGen/opaque_values_silgen_lib.swift b/test/SILGen/opaque_values_silgen_lib.swift index f72300ceaa4c3..47e95d60a7ea7 100644 --- a/test/SILGen/opaque_values_silgen_lib.swift +++ b/test/SILGen/opaque_values_silgen_lib.swift @@ -1,72 +1,1080 @@ +// RUN: %target-swift-emit-silgen -enable-sil-opaque-values -Xllvm -sil-full-demangle -parse-stdlib -parse-as-library -module-name Swift %s | %FileCheck %s --check-prefix=CHECK -// RUN: %target-swift-emit-silgen -enable-sil-opaque-values -emit-sorted-sil -Xllvm -sil-full-demangle -parse-stdlib -parse-as-library -module-name Swift %s | %FileCheck %s +// Test SILGen -enable-sil-opaque-values -precedencegroup AssignmentPrecedence { assignment: true } +typealias AnyObject = Builtin.AnyObject -enum Optional { +public enum Optional { case none - case some(Wrapped) + case some(T) +} + +public protocol ExpressibleByNilLiteral { + init(nilLiteral: ()) +} + +extension Optional : ExpressibleByNilLiteral { + public init(nilLiteral: ()) { + self = .none + } +} + +func _diagnoseUnexpectedNilOptional(_filenameStart: Builtin.RawPointer, + _filenameLength: Builtin.Word, + _filenameIsASCII: Builtin.Int1, + _line: Builtin.Word, + _isImplicitUnwrap: Builtin.Int1) { + // This would usually contain an assert, but we don't need one since we are + // just emitting SILGen. } +precedencegroup AssignmentPrecedence { assignment: true } +precedencegroup CastingPrecedence {} +precedencegroup ComparisonPrecedence {} +precedencegroup TernaryPrecedence {} + +public protocol Error {} + +public protocol _ObjectiveCBridgeable {} + protocol EmptyP {} +struct AddressOnlyStruct : EmptyP {} + struct String { var ptr: Builtin.NativeObject } +public typealias _MaxBuiltinIntegerType = Builtin.IntLiteral + +public protocol _ExpressibleByBuiltinIntegerLiteral { + init(_builtinIntegerLiteral value: _MaxBuiltinIntegerType) +} + +public protocol ExpressibleByIntegerLiteral { + associatedtype IntegerLiteralType : _ExpressibleByBuiltinIntegerLiteral + + init(integerLiteral value: IntegerLiteralType) +} + +extension ExpressibleByIntegerLiteral + where Self : _ExpressibleByBuiltinIntegerLiteral { + @_transparent + public init(integerLiteral value: Self) { + self = value + } +} + +public protocol ExpressibleByFloatLiteral {} + +typealias Bool = Builtin.Int1 + +public struct Int64 : ExpressibleByIntegerLiteral, _ExpressibleByBuiltinIntegerLiteral { + public var _value: Builtin.Int64 + public init(_builtinIntegerLiteral x: _MaxBuiltinIntegerType) { + _value = Builtin.s_to_s_checked_trunc_IntLiteral_Int64(x).0 + } + public typealias IntegerLiteralType = Int64 + public init(integerLiteral value: Int64) { + self = value + } +} + +public protocol UnkeyedDecodingContainer { + var isAtEnd: Builtin.Int1 { get } +} + +public protocol Decoder { + func unkeyedContainer() throws -> UnkeyedDecodingContainer +} + +protocol FooP { + func foo() +} + +struct AnyStruct { + let a: Any +} + +protocol P { + var x : Builtin.Int64 { get } +} + +protocol P2 : P {} + +struct TrivialStruct { + var x: Builtin.Int64 +} + +extension TrivialStruct : P2 {} + +protocol Clonable { + func maybeClone() -> Self? +} + +func unreachableF() -> (Builtin.Int64, T)? { /* no body */ } + +protocol ConvertibleToP { + func asP() -> P +} + +indirect enum IndirectEnum { + case Nil + case Node(T) +} + +protocol SubscriptableGet { + subscript(a : Builtin.Int64) -> Builtin.Int64 { get } +} + +protocol SubscriptableGetSet { + subscript(a : Builtin.Int64) -> Builtin.Int64 { get set } +} + +var subscriptableGet : SubscriptableGet? +var subscriptableGetSet : SubscriptableGetSet? + +func genericInout(_: inout T) {} + +// ============================================================================= +// Begin Test Cases +// ============================================================================= + +enum PAndSEnum { case A(EmptyP, String) } + // Tests Empty protocol + Builtin.NativeObject enum (including opaque tuples as a return value) // --- -// CHECK-LABEL: sil hidden [ossa] @$ss21s010______PAndS_casesyyF : $@convention(thin) () -> () { -// CHECK: bb0: -// CHECK: [[MTYPE:%.*]] = metatype $@thin PAndSEnum.Type -// CHECK: [[EAPPLY:%.*]] = apply {{.*}}([[MTYPE]]) : $@convention(thin) (@thin PAndSEnum.Type) -> @owned @callee_guaranteed (@in_guaranteed EmptyP, @guaranteed String) -> @out PAndSEnum -// CHECK: destroy_value [[EAPPLY]] -// CHECK: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function '$ss21s010______PAndS_casesyyF' -func s010______PAndS_cases() { +// Swift.f010_PAndS_cases() -> () +// CHECK-LABEL: sil hidden [ossa] @$ss16f010_PAndS_casesyyF : $@convention(thin) () -> () { +// HECK: bb0: +// HECK: [[MTYPE:%.*]] = metatype $@thin PAndSEnum.Type +// HECK: [[EAPPLY:%.*]] = apply {{.*}}([[MTYPE]]) : $@convention(thin) (@thin PAndSEnum.Type) -> @owned @callee_guaranteed (@in_guaranteed EmptyP, @guaranteed String) -> @out PAndSEnum +// HECK: destroy_value [[EAPPLY]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss16f010_PAndS_casesyyF' +func f010_PAndS_cases() { _ = PAndSEnum.A } // Init of Empty protocol + Builtin.NativeObject enum (including opaque tuples as a return value) // --- -// CHECK-LABEL: sil private [ossa] @$ss21s010______PAndS_casesyyFs0B5SEnumOs6EmptyP_p_SStcACmcfu_ACsAD_p_SStcfu0_ : $@convention(thin) (@in_guaranteed EmptyP, @guaranteed String, @thin PAndSEnum.Type) -> @out PAndSEnum { -// CHECK: bb0([[ARG0:%.*]] : @guaranteed $EmptyP, [[ARG1:%.*]] : @guaranteed $String, [[ARG2:%.*]] : $@thin PAndSEnum.Type): -// CHECK: [[COPY0:%.*]] = copy_value [[ARG0]] -// CHECK: [[COPY1:%.*]] = copy_value [[ARG1]] -// CHECK: [[RTUPLE:%.*]] = tuple ([[COPY0]] : $EmptyP, [[COPY1]] : $String) -// CHECK: [[RETVAL:%.*]] = enum $PAndSEnum, #PAndSEnum.A!enumelt, [[RTUPLE]] : $(EmptyP, String) -// CHECK: return [[RETVAL]] : $PAndSEnum -// CHECK-LABEL: } // end sil function '$ss21s010______PAndS_casesyyFs0B5SEnumOs6EmptyP_p_SStcACmcfu_ACsAD_p_SStcfu0_' -enum PAndSEnum { case A(EmptyP, String) } - +// implicit closure #2 (Swift.EmptyP, Swift.String) -> Swift.PAndSEnum in implicit closure #1 (Swift.PAndSEnum.Type) -> (Swift.EmptyP, Swift.String) -> Swift.PAndSEnum in Swift.f010_PAndS_cases() -> () +// CHECK-LABEL: sil private [ossa] @$ss16f010_PAndS_casesyyFs0B5SEnumOs6EmptyP_p_SStcACmcfu_ACsAD_p_SStcfu0_ : $@convention(thin) (@in_guaranteed EmptyP, @guaranteed String, @thin PAndSEnum.Type) -> @out PAndSEnum { +// HECK: bb0([[ARG0:%.*]] : @guaranteed $EmptyP, [[ARG1:%.*]] : @guaranteed $String, [[ARG2:%.*]] : $@thin PAndSEnum.Type): +// HECK: [[COPY0:%.*]] = copy_value [[ARG0]] +// HECK: [[COPY1:%.*]] = copy_value [[ARG1]] +// HECK: [[RTUPLE:%.*]] = tuple ([[COPY0]] : $EmptyP, [[COPY1]] : $String) +// HECK: [[RETVAL:%.*]] = enum $PAndSEnum, #PAndSEnum.A!enumelt, [[RTUPLE]] : $(EmptyP, String) +// HECK: return [[RETVAL]] : $PAndSEnum +// CHECK-LABEL: } // end sil function '$ss16f010_PAndS_casesyyFs0B5SEnumOs6EmptyP_p_SStcACmcfu_ACsAD_p_SStcfu0_' // Test emitBuiltinReinterpretCast. // --- -// CHECK-LABEL: sil hidden [ossa] @$ss21s020__________bitCast_2toq_x_q_mtr0_lF : $@convention(thin) (@in_guaranteed T, @thick U.Type) -> @out U { -// CHECK: bb0([[ARG:%.*]] : @guaranteed $T, -// CHECK: [[COPY:%.*]] = copy_value [[ARG]] : $T -// CHECK: [[CAST:%.*]] = unchecked_bitwise_cast [[COPY]] : $T to $U -// CHECK: [[RET:%.*]] = copy_value [[CAST]] : $U -// CHECK: destroy_value [[COPY]] : $T -// CHECK: return [[RET]] : $U -// CHECK-LABEL: } // end sil function '$ss21s020__________bitCast_2toq_x_q_mtr0_lF' -func s020__________bitCast(_ x: T, to type: U.Type) -> U { +// CHECK-LABEL: sil hidden [ossa] @$ss12f020_bitCast_2toq_x_q_mtr0_lF : $@convention(thin) (@in_guaranteed T, @thick U.Type) -> @out U { +// HECK: bb0([[ARG:%.*]] : @guaranteed $T, +// HECK: [[COPY:%.*]] = copy_value [[ARG]] : $T +// HECK: [[CAST:%.*]] = unchecked_bitwise_cast [[COPY]] : $T to $U +// HECK: [[RET:%.*]] = copy_value [[CAST]] : $U +// HECK: destroy_value [[COPY]] : $T +// HECK: return [[RET]] : $U +// CHECK-LABEL: } // end sil function '$ss12f020_bitCast_2toq_x_q_mtr0_lF' +func f020_bitCast(_ x: T, to type: U.Type) -> U { return Builtin.reinterpretCast(x) } // Test emitBuiltinCastReference // --- -// CHECK-LABEL: sil hidden [ossa] @$ss21s030__________refCast_2toq_x_q_mtr0_lF : $@convention(thin) (@in_guaranteed T, @thick U.Type) -> @out U { +// CHECK-LABEL: sil hidden [ossa] @$ss12f021_refCast_2toq_x_q_mtr0_lF : $@convention(thin) (@in_guaranteed T, @thick U.Type) -> @out U { // CHECK: bb0([[ARG:%.*]] : @guaranteed $T, %1 : $@thick U.Type): -// CHECK: [[COPY:%.*]] = copy_value [[ARG]] : $T -// CHECK: [[SRC:%.*]] = alloc_stack $T -// CHECK: store [[COPY]] to [init] [[SRC]] : $*T -// CHECK: [[DEST:%.*]] = alloc_stack $U -// CHECK: unchecked_ref_cast_addr T in [[SRC]] : $*T to U in [[DEST]] : $*U -// CHECK: [[LOAD:%.*]] = load [take] [[DEST]] : $*U -// CHECK: dealloc_stack [[DEST]] : $*U -// CHECK: dealloc_stack [[SRC]] : $*T +// HECK: [[COPY:%.*]] = copy_value [[ARG]] : $T +// HECK: [[SRC:%.*]] = alloc_stack $T +// HECK: store [[COPY]] to [init] [[SRC]] : $*T +// HECK: [[DEST:%.*]] = alloc_stack $U +// HECK: unchecked_ref_cast_addr T in [[SRC]] : $*T to U in [[DEST]] : $*U +// HECK: [[LOAD:%.*]] = load [take] [[DEST]] : $*U +// HECK: dealloc_stack [[DEST]] : $*U +// HECK: dealloc_stack [[SRC]] : $*T // CHECK-NOT: destroy_value [[ARG]] : $T -// CHECK: return [[LOAD]] : $U -// CHECK-LABEL: } // end sil function '$ss21s030__________refCast_2toq_x_q_mtr0_lF' -func s030__________refCast(_ x: T, to: U.Type) -> U { +// HECK: return [[LOAD]] : $U +// CHECK-LABEL: } // end sil function '$ss12f021_refCast_2toq_x_q_mtr0_lF' +func f021_refCast(_ x: T, to: U.Type) -> U { return Builtin.castReference(x) } + +// Test unsafe_bitwise_cast nontrivial ownership. +// --- +// CHECK-LABEL: sil [ossa] @$ss18f022_unsafeBitCast_2toq_x_q_mtr0_lF : $@convention(thin) (@in_guaranteed T, @thick U.Type) -> @out U { +// HECK: bb0([[ARG0:%.*]] : @guaranteed $T, [[ARG1:%.*]] : $@thick U.Type): +// HECK: [[ARG_COPY:%.*]] = copy_value [[ARG0]] : $T +// HECK: [[RESULT:%.*]] = unchecked_bitwise_cast [[ARG_COPY]] : $T to $U +// HECK: [[RESULT_COPY:%.*]] = copy_value [[RESULT]] : $U +// HECK: destroy_value [[ARG_COPY]] : $T +// HECK: return [[RESULT_COPY]] : $U +// CHECK-LABEL: } // end sil function '$ss18f022_unsafeBitCast_2toq_x_q_mtr0_lF' +public func f022_unsafeBitCast(_ x: T, to type: U.Type) -> U { + return Builtin.reinterpretCast(x) +} + +// Test emitSemanticStore. +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss16f030_assigninoutyyxz_xtlF : $@convention(thin) (@inout T, @in_guaranteed T) -> () { +// CHECK: bb0([[ARG0:%.*]] : $*T, [[ARG1:%.*]] : @guaranteed $T): +// HECK: [[CPY:%.*]] = copy_value [[ARG1]] : $T +// HECK: [[READ:%.*]] = begin_access [modify] [unknown] [[ARG0]] : $*T +// HECK: assign [[CPY]] to [[READ]] : $*T +// CHECK-NOT: destroy_value [[ARG1]] : $T +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss16f030_assigninoutyyxz_xtlF' +func f030_assigninout(_ a: inout T, _ b: T) { + a = b +} + +// Test that we no longer use copy_addr or tuple_element_addr when copy by value is possible +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss19f040_tupleReturnIntyBi64_Bi64__xt_tlF : $@convention(thin) (Builtin.Int64, @in_guaranteed T) -> Builtin.Int64 { +// HECK: bb0([[ARG0:%.*]] : $Builtin.Int64, [[ARG1:%.*]] : $T): +// HECK: [[ARG1_COPY:%.*]] = copy_value [[ARG1]] +// HECK: [[TPL:%.*]] = tuple ([[ARG0]] : $Builtin.Int64, [[ARG1_COPY]] : $T) +// HECK: [[BORROWED_ARG1:%.*]] = begin_borrow [[TPL]] : $(Builtin.Int64, T) +// HECK: [[CPY:%.*]] = copy_value [[BORROWED_ARG1]] : $(Builtin.Int64, T) +// HECK: [[BORROWED_CPY:%.*]] = begin_borrow [[CPY]] +// HECK: [[INT:%.*]] = tuple_extract [[BORROWED_CPY]] : $(Builtin.Int64, T), 0 +// HECK: [[GEN:%.*]] = tuple_extract [[BORROWED_CPY]] : $(Builtin.Int64, T), 1 +// HECK: [[COPY_GEN:%.*]] = copy_value [[GEN]] +// HECK: destroy_value [[COPY_GEN]] +// HECK: end_borrow [[BORROWED_CPY]] +// HECK: destroy_value [[CPY]] +// HECK: end_borrow [[BORROWED_ARG1]] : $(Builtin.Int64, T) +// HECK: destroy_value [[TPL]] : $(Builtin.Int64, T) +// HECK: return [[INT]] +// CHECK-LABEL: } // end sil function '$ss19f040_tupleReturnIntyBi64_Bi64__xt_tlF' +func f040_tupleReturnInt(_ x: (Builtin.Int64, T)) -> Builtin.Int64 { + let y = x.0 + return y +} + +// Test returning an opaque tuple of tuples. +// --- +// CHECK-LABEL: sil hidden [noinline] [ossa] @$ss16f050_multiResultyx_x_xttxlF : $@convention(thin) (@in_guaranteed T) -> (@out T, @out T, @out T) { +// HECK: bb0(%0 : $T): +// HECK: %[[CP1:.*]] = copy_value %{{.*}} : $T +// HECK: %[[CP2:.*]] = copy_value %{{.*}} : $T +// HECK: %[[CP3:.*]] = copy_value %{{.*}} : $T +// CHECK-NOT: destroy_value %0 : $T +// HECK: %[[TPL:.*]] = tuple (%[[CP1]] : $T, %[[CP2]] : $T, %[[CP3]] : $T) +// HECK: return %[[TPL]] : $(T, T, T) +// CHECK-LABEL: } // end sil function '$ss16f050_multiResultyx_x_xttxlF' +@inline(never) +func f050_multiResult(_ t: T) -> (T, (T, T)) { + return (t, (t, t)) +} + +// Test returning an opaque tuple of tuples as a concrete tuple. +// --- +// CHECK-LABEL: sil [ossa] @$ss20f060_callMultiResult1iBi64__Bi64__Bi64_ttBi64__tF : $@convention(thin) (Builtin.Int64) -> (Builtin.Int64, Builtin.Int64, Builtin.Int64) { +// HECK: bb0(%0 : $Builtin.Int64): +// HECK: %[[FN:.*]] = function_ref @$s20opaque_values_silgen21f050_multiResultyx_x_xttxlF : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) +// HECK: %[[TPL:.*]] = apply %[[FN]](%0) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0, @out τ_0_0) +// HECK: %[[I1:.*]] = tuple_extract %[[TPL]] : $(Builtin.Int64, Builtin.Int64, Builtin.Int64), 0 +// HECK: %[[I2:.*]] = tuple_extract %[[TPL]] : $(Builtin.Int64, Builtin.Int64, Builtin.Int64), 1 +// HECK: %[[I3:.*]] = tuple_extract %[[TPL]] : $(Builtin.Int64, Builtin.Int64, Builtin.Int64), 2 +// HECK: %[[R:.*]] = tuple (%[[I1]] : $Builtin.Int64, %[[I2]] : $Builtin.Int64, %[[I3]] : $Builtin.Int64) +// HECK: return %[[R]] : $(Builtin.Int64, Builtin.Int64, Builtin.Int64) +// CHECK-LABEL: } // end sil function '$ss20f060_callMultiResult1iBi64__Bi64__Bi64_ttBi64__tF' +public func f060_callMultiResult(i: Builtin.Int64) -> (Builtin.Int64, (Builtin.Int64, Builtin.Int64)) { + return f050_multiResult(i) +} + +// SILGen, prepareArchetypeCallee. Materialize a +// non-class-constrainted self from a class-constrained archetype. +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss20f070_materializeSelf1tyx_tRlzCs4FooPRzlF : $@convention(thin) (@guaranteed T) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $T): +// HECK: [[WITNESS_METHOD:%.*]] = witness_method $T, #FooP.foo : (Self) -> () -> () : $@convention(witness_method: FooP) <τ_0_0 where τ_0_0 : FooP> (@in_guaranteed τ_0_0) -> () +// HECK: apply [[WITNESS_METHOD]]([[ARG]]) : $@convention(witness_method: FooP) <τ_0_0 where τ_0_0 : FooP> (@in_guaranteed τ_0_0) -> () +// CHECK-NOT: destroy_value [[ARG]] : $T +// HECK: return %{{[0-9]+}} : $() +// CHECK-LABEL: } // end sil function '$ss20f070_materializeSelf1tyx_tRlzCs4FooPRzlF' +func f070_materializeSelf(t: T) where T: AnyObject { + t.foo() +} + +// Test open existential with opaque values +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss8f080_bar1pBi64_s1P_p_tF : $@convention(thin) (@in_guaranteed P) -> Builtin.Int64 { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $P): +// HECK: [[OPENED_ARG:%.*]] = open_existential_value [[ARG]] : $P to $@opened +// HECK: [[WITNESS_FUNC:%.*]] = witness_method $@opened +// HECK: [[RESULT:%.*]] = apply [[WITNESS_FUNC]]<{{.*}}>([[OPENED_ARG]]) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> Builtin.Int64 +// CHECK-NOT: destroy_value [[ARG]] : $P +// HECK: return [[RESULT]] : $Builtin.Int64 +// CHECK-LABEL: } // end sil function '$ss8f080_bar1pBi64_s1P_p_tF' +func f080_bar(p: P) -> Builtin.Int64 { + return p.x +} + +// Test OpaqueTypeLowering copyValue and destroyValue. +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss11f090_calleryxxlF : $@convention(thin) (@in_guaranteed T) -> @out T { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $T): +// CHECK-NOT: copy_value +// HECK: [[RESULT:%.*]] = apply {{%.*}}([[ARG]]) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> @out τ_0_0 +// CHECK-NOT: destroy_value [[ARG]] : $T +// HECK: return %{{.*}} : $T +// CHECK-LABEL: } // end sil function '$ss11f090_calleryxxlF' +func f090_caller(_ t: T) -> T { + return f090_caller(t) +} + +// Test a simple opaque parameter and return value. +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss13f100_identityyxxlF : $@convention(thin) (@in_guaranteed T) -> @out T { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $T): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] : $T +// CHECK-NOT: destroy_value [[ARG]] : $T +// HECK: return [[COPY_ARG]] : $T +// CHECK-LABEL: } // end sil function '$ss13f100_identityyxxlF' +func f100_identity(_ t: T) -> T { + return t +} + +// Test a guaranteed opaque parameter. +// --- +// CHECK-LABEL: sil private [transparent] [thunk] [ossa] @$ss19f110_GuaranteedSelfVs4FooPssACP3fooyyFTW : $@convention(witness_method: FooP) (@in_guaranteed f110_GuaranteedSelf) -> () { +// CHECK: bb0(%0 : $f110_GuaranteedSelf): +// HECK: %[[F:.*]] = function_ref @$s20opaque_values_silgen21f110_GuaranteedSelfV3fooyyF : $@convention(method) (f110_GuaranteedSelf) -> () +// HECK: apply %[[F]](%0) : $@convention(method) (f110_GuaranteedSelf) -> () +// HECK: return +// CHECK-LABEL: } // end sil function '$ss19f110_GuaranteedSelfVs4FooPssACP3fooyyFTW' +struct f110_GuaranteedSelf : FooP { + func foo() {} +} + +// Tests a corner case wherein we used to do a temporary and return a pointer to T instead of T +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss16f120_returnValueyxxlF : $@convention(thin) (@in_guaranteed T) -> @out T { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $T): +// HECK: [[COPY_ARG1:%.*]] = copy_value [[ARG]] : $T +// HECK: [[BORROWED_ARG2:%.*]] = begin_borrow [[COPY_ARG1]] +// HECK: [[COPY_ARG2:%.*]] = copy_value [[BORROWED_ARG2]] : $T +// HECK: end_borrow [[BORROWED_ARG2]] +// HECK: return [[COPY_ARG2]] : $T +// CHECK-LABEL: } // end sil function '$ss16f120_returnValueyxxlF' +func f120_returnValue(_ x: T) -> T { + let y = x + return y +} + +// Tests Optional initialization by value +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss9f130_wrapyxSgxlF : $@convention(thin) (@in_guaranteed T) -> @out Optional { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $T): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] : $T +// HECK: [[OPTIONAL_ARG:%.*]] = enum $Optional, #Optional.some!enumelt, [[COPY_ARG]] : $T +// CHECK-NOT: destroy_value [[ARG]] : $T +// HECK: return [[OPTIONAL_ARG]] : $Optional +// CHECK-LABEL: } // end sil function '$ss9f130_wrapyxSgxlF' +func f130_wrap(_ x: T) -> T? { + return x +} + +func f150_anyArg(_: Any) {} + +// Tests init of opaque existentials +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss15f160_callAnyArgyyF : $@convention(thin) () -> () { +// CHECK: bb0: +// HECK: [[INT_TYPE:%.*]] = metatype $@thin Builtin.Int64.Type +// HECK: [[INT_LIT:%.*]] = integer_literal $Builtin.Builtin.Int64Literal, 42 +// HECK: [[INT_ARG:%.*]] = apply %{{.*}}([[INT_LIT]], [[INT_TYPE]]) : $@convention(method) (Builtin.Builtin.Int64Literal, @thin Builtin.Int64.Type) -> Builtin.Int64 +// HECK: [[INIT_OPAQUE:%.*]] = init_existential_value [[INT_ARG]] : $Builtin.Int64, $Builtin.Int64, $Any +// HECK: apply %{{.*}}([[INIT_OPAQUE]]) : $@convention(thin) (@in_guaranteed Any) -> () +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss15f160_callAnyArgyyF' +func f160_callAnyArg() { + f150_anyArg(Int64(42)) +} + +// Tests unconditional_checked_cast for opaque values +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss18f170_force_convertxylF : $@convention(thin) () -> @out T { +// CHECK: bb0: +// HECK-NOT: alloc_stack +// HECK: [[INT_TYPE:%.*]] = metatype $@thin Builtin.Int64.Type +// HECK: [[INT_LIT:%.*]] = integer_literal $Builtin.Builtin.Int64Literal, 42 +// HECK: [[INT_ARG:%.*]] = apply %{{.*}}([[INT_LIT]], [[INT_TYPE]]) : $@convention(method) (Builtin.Builtin.Int64Literal, @thin Builtin.Int64.Type) -> Builtin.Int64 +// HECK: [[INT_CAST:%.*]] = unconditional_checked_cast_value [[INT_ARG]] : $Builtin.Int64 to $T +// HECK: [[CAST_BORROW:%.*]] = begin_borrow [[INT_CAST]] : $T +// HECK: [[RETURN_VAL:%.*]] = copy_value [[CAST_BORROW]] : $T +// HECK: end_borrow [[CAST_BORROW]] : $T +// HECK: destroy_value [[INT_CAST]] : $T +// HECK: return [[RETURN_VAL]] : $T +// CHECK-LABEL: } // end sil function '$ss18f170_force_convertxylF' +func f170_force_convert() -> T { + let x : T = Int64(42) as! T + return x +} + +// Tests supporting function for f190_return_foo_var - cast and return of protocol +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss15f180_return_foos4FooP_pyF : $@convention(thin) () -> @out FooP { +// CHECK: bb0: +// HECK: [[INT_LIT:%.*]] = integer_literal $Builtin.Builtin.Int64Literal, 42 +// HECK: [[INT_ARG:%.*]] = apply %{{.*}}([[INT_LIT]], [[INT_TYPE]]) : $@convention(method) (Builtin.Builtin.Int64Literal, @thin Builtin.Int64.Type) -> Builtin.Int64 +// HECK: [[INT_CAST:%.*]] = unconditional_checked_cast_value [[INT_ARG]] : $Builtin.Int64 to $FooP +// HECK: return [[INT_CAST]] : $FooP +// CHECK-LABEL: } // end sil function '$ss15f180_return_foos4FooP_pyF' +func f180_return_foo() -> FooP { + return Int64(42) as! FooP +} +var foo_var : FooP = f180_return_foo() + +// Tests return of global variables by doing a load of copy +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss19f190_return_foo_vars4FooP_pyF : $@convention(thin) () -> @out FooP { +// CHECK: bb0: +// HECK: [[GLOBAL:%.*]] = global_addr {{.*}} : $*FooP +// HECK: [[READ:%.*]] = begin_access [read] [dynamic] [[GLOBAL]] : $*FooP +// HECK: [[LOAD_GLOBAL:%.*]] = load [copy] [[READ]] : $*FooP +// HECK: return [[LOAD_GLOBAL]] : $FooP +// CHECK-LABEL: } // end sil function '$ss19f190_return_foo_vars4FooP_pyF' +func f190_return_foo_var() -> FooP { + return foo_var +} + +// Tests deinit of opaque existentials +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss16f200_use_foo_varyyF : $@convention(thin) () -> () { +// CHECK: bb0: +// HECK: [[GLOBAL:%.*]] = global_addr {{.*}} : $*FooP +// HECK: [[READ:%.*]] = begin_access [read] [dynamic] [[GLOBAL]] : $*FooP +// HECK: [[LOAD_GLOBAL:%.*]] = load [copy] [[READ]] : $*FooP +// HECK: [[BORROW:%.*]] = begin_borrow [[LOAD_GLOBAL]] : $FooP +// HECK: [[OPEN_VAR:%.*]] = open_existential_value [[BORROW]] : $FooP +// HECK: [[WITNESS:%.*]] = witness_method $@opened +// HECK: apply [[WITNESS]] +// HECK: end_borrow [[BORROW]] +// HECK: destroy_value [[LOAD_GLOBAL]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss16f200_use_foo_varyyF' +func f200_use_foo_var() { + foo_var.foo() +} + + +// Tests composition erasure of opaque existentials + copy into of opaques +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss16f210_compErasureys5Error_psAB_s4FooPpF : $@convention(thin) (@in_guaranteed Error & FooP) -> @owned Error { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $Error & FooP): +// HECK: [[OPAQUE_ARG:%.*]] = open_existential_value [[ARG]] : $Error & FooP to $@opened({{.*}}) Error & FooP +// HECK: [[EXIST_BOX:%.*]] = alloc_existential_box $Error, $@opened({{.*}}) Error & FooP +// HECK: [[PROJ_BOX:%.*]] = project_existential_box $@opened({{.*}}) Error & FooP in [[EXIST_BOX]] +// HECK: [[COPY_OPAQUE:%.*]] = copy_value [[OPAQUE_ARG]] : $@opened({{.*}}) Error & FooP +// HECK: store [[COPY_OPAQUE]] to [init] [[PROJ_BOX]] : $*@opened({{.*}}) Error & FooP +// CHECK-NOT: destroy_value [[ARG]] : $Error & FooP +// HECK: return [[EXIST_BOX]] : $Error +// CHECK-LABEL: } // end sil function '$ss16f210_compErasureys5Error_psAB_s4FooPpF' +func f210_compErasure(_ x: FooP & Error) -> Error { + return x +} + +// Tests Implicit Value Construction under Opaque value mode +// --- + +// f250_testBoxT continued Test Implicit Value Construction under Opaque value mode +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss3BoxV1tAByxGx_tcfC : $@convention(method) (@in T, @thin Box.Type) -> @out Box { +// CHECK: bb0([[ARG0:%.*]] : @owned $T, [[ARG1:%.*]] : $@thin Box.Type): +// HECK: [[RETVAL:%.*]] = struct $Box ([[ARG0]] : $T) +// HECK: return [[RETVAL]] : $Box +// CHECK-LABEL: } // end sil function '$ss3BoxV1tAByxGx_tcfC' +struct Box { + let t: T +} + +// CHECK-LABEL: sil hidden [ossa] @$ss13f250_testBoxTyyF : $@convention(thin) () -> () { +// CHECK: bb0: +// HECK: [[BOX_MTYPE:%.*]] = metatype $@thin Box.Type +// HECK: [[MTYPE:%.*]] = metatype $@thin Builtin.Int64.Type +// HECK: [[INTLIT:%.*]] = integer_literal $Builtin.Builtin.Int64Literal, 42 +// HECK: [[AINT:%.*]] = apply {{.*}}([[INTLIT]], [[MTYPE]]) : $@convention(method) (Builtin.Builtin.Int64Literal, @thin Builtin.Int64.Type) -> Builtin.Int64 +// HECK: apply {{.*}}([[AINT]], [[BOX_MTYPE]]) : $@convention(method) <τ_0_0> (@in τ_0_0, @thin Box<τ_0_0>.Type) -> @out Box<τ_0_0> +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss13f250_testBoxTyyF' +func f250_testBoxT() { + let _ = Box(t: Int64(42)) +} + +enum AddressOnlyEnum { + case nought + case mere(EmptyP) + case phantom(AddressOnlyStruct) +} + +// Tests Address only enums +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss15f260_AOnly_enumyys17AddressOnlyStructVF : $@convention(thin) (AddressOnlyStruct) -> () { +// CHECK: bb0([[ARG:%.*]] : $AddressOnlyStruct): +// HECK: [[MTYPE1:%.*]] = metatype $@thin AddressOnlyEnum.Type +// HECK: [[APPLY1:%.*]] = apply {{.*}}([[MTYPE1]]) : $@convention(thin) (@thin AddressOnlyEnum.Type) -> @owned @callee_guaranteed (@in_guaranteed EmptyP) -> @out AddressOnlyEnum +// HECK: destroy_value [[APPLY1]] +// HECK: [[MTYPE2:%.*]] = metatype $@thin AddressOnlyEnum.Type +// HECK: [[ENUM1:%.*]] = enum $AddressOnlyEnum, #AddressOnlyEnum.nought!enumelt +// HECK: [[MTYPE3:%.*]] = metatype $@thin AddressOnlyEnum.Type +// HECK: [[INIT_OPAQUE:%.*]] = init_existential_value [[ARG]] : $AddressOnlyStruct, $AddressOnlyStruct, $EmptyP +// HECK: [[ENUM2:%.*]] = enum $AddressOnlyEnum, #AddressOnlyEnum.mere!enumelt, [[INIT_OPAQUE]] : $EmptyP +// HECK: destroy_value [[ENUM2]] +// HECK: [[MTYPE4:%.*]] = metatype $@thin AddressOnlyEnum.Type +// HECK: [[ENUM3:%.*]] = enum $AddressOnlyEnum, #AddressOnlyEnum.phantom!enumelt, [[ARG]] : $AddressOnlyStruct +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss15f260_AOnly_enumyys17AddressOnlyStructVF' +func f260_AOnly_enum(_ s: AddressOnlyStruct) { + _ = AddressOnlyEnum.mere + + _ = AddressOnlyEnum.nought + + _ = AddressOnlyEnum.mere(s) + + _ = AddressOnlyEnum.phantom(s) +} + +// Tests InjectOptional for opaque value types + conversion of opaque structs +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss21f270_convOptAnyStructyys0dE0VACSgcF : $@convention(thin) (@guaranteed @callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct) -> () { +// HECK: bb0([[ARG:%.*]] : $@callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (@in_guaranteed Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct) -> @out Optional +// HECK: destroy_value [[PAPPLY]] : $@callee_guaranteed (@in_guaranteed Optional) -> @out Optional +// HECK-NOT: destroy_value [[ARG]] : $@callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss21f270_convOptAnyStructyys0dE0VACSgcF' +func f270_convOptAnyStruct(_ a1: @escaping (AnyStruct?) -> AnyStruct) { + let _: (AnyStruct?) -> AnyStruct? = a1 +} + +// f270_convOptAnyStruct continued Test: reabstraction thunk helper +// --- +// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$ss9AnyStructVSgABIegnr_A2CIegnr_TR : $@convention(thin) (@in_guaranteed Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct) -> @out Optional { +// CHECK: bb0([[ARG0:%.*]] : @guaranteed $Optional, [[ARG1:%.*]] : @guaranteed $@callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct): +// HECK: [[APPLYARG:%.*]] = apply [[ARG1]]([[ARG0]]) : $@callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct +// HECK: [[RETVAL:%.*]] = enum $Optional, #Optional.some!enumelt, [[APPLYARG]] : $AnyStruct +// HECK: return [[RETVAL]] : $Optional +// CHECK-LABEL: } // end sil function '$ss9AnyStructVSgABIegnr_A2CIegnr_TR' + +// Tests conversion between existential types +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss21f280_convExistTrivialyys0D6StructVs1P_pcF : $@convention(thin) (@guaranteed @callee_guaranteed (@in_guaranteed P) -> TrivialStruct) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $@callee_guaranteed (@in_guaranteed P) -> TrivialStruct): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (@in_guaranteed P2, @guaranteed @callee_guaranteed (@in_guaranteed P) -> TrivialStruct) -> @out P2 +// HECK: destroy_value [[PAPPLY]] : $@callee_guaranteed (@in_guaranteed P2) -> @out P2 +// CHECK-NOT: destroy_value [[ARG]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss21f280_convExistTrivialyys0D6StructVs1P_pcF' +func f280_convExistTrivial(_ s: @escaping (P) -> TrivialStruct) { + let _: (P2) -> P2 = s +} + +// part of f280_convExistTrivial: conversion between existential types - reabstraction thunk +// --- +// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$ss1P_ps13TrivialStructVIegnd_s2P2_psAD_pIegnr_TR : $@convention(thin) (@in_guaranteed P2, @guaranteed @callee_guaranteed (@in_guaranteed P) -> TrivialStruct) -> @out P2 { +// CHECK: bb0([[ARG0:%.*]] : @guaranteed $P2, [[ARG1:%.*]] : @guaranteed $@callee_guaranteed (@in_guaranteed P) -> TrivialStruct): +// HECK: [[OPENED_ARG:%.*]] = open_existential_value [[ARG]] : $P2 to $@opened({{.*}}) P2 +// HECK: [[COPIED_VAL:%.*]] = copy_value [[OPENED_ARG]] +// HECK: [[INIT_P:%.*]] = init_existential_value [[COPIED_VAL]] : $@opened({{.*}}) P2, $@opened({{.*}}) P2, $P +// HECK: [[BORROWED_INIT_P:%.*]] = begin_borrow [[INIT_P]] +// HECK: [[APPLY_P:%.*]] = apply [[ARG1]]([[BORROWED_INIT_P]]) : $@callee_guaranteed (@in_guaranteed P) -> TrivialStruct +// HECK: [[RETVAL:%.*]] = init_existential_value [[APPLY_P]] : $TrivialStruct, $TrivialStruct, $P2 +// HECK: end_borrow [[BORROWED_INIT_P]] +// CHECK-NOT: destroy_value [[ARG0]] +// HECK: return [[RETVAL]] : $P2 +// CHECK-LABEL: } // end sil function '$ss1P_ps13TrivialStructVIegnd_s2P2_psAD_pIegnr_TR' + +// Tests conversion between existential types - optionals case +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss21f290_convOptExistTrivyys13TrivialStructVs1P_pSgcF : $@convention(thin) (@guaranteed @callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $@callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct) -> @out P2 +// HECK: destroy_value [[PAPPLY]] : $@callee_guaranteed (Optional) -> @out P2 +// CHECK-NOT: destroy_value [[ARG]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss21f290_convOptExistTrivyys13TrivialStructVs1P_pSgcF' +func f290_convOptExistTriv(_ s: @escaping (P?) -> TrivialStruct) { + let _: (TrivialStruct?) -> P2 = s +} + +// part of f290_convOptExistTriv: conversion between existential types - reabstraction thunk - optionals case +// --- +// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$ss1P_pSgs13TrivialStructVIegnd_ADSgs2P2_pIegyr_TR : $@convention(thin) (Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct) -> @out P2 { +// CHECK: bb0([[ARG0:%.*]] : $Optional, [[ARG1:%.*]] : @guaranteed $@callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct): +// HECK: switch_enum [[ARG0]] : $Optional, case #Optional.some!enumelt: bb2, case #Optional.none!enumelt: bb1 +// HECK: bb1: +// HECK: [[ONONE:%.*]] = enum $Optional

, #Optional.none!enumelt +// HECK: br bb3([[ONONE]] : $Optional

) +// HECK: bb2([[OSOME:%.*]] : $TrivialStruct): +// HECK: [[INIT_S:%.*]] = init_existential_value [[OSOME]] : $TrivialStruct, $TrivialStruct, $P +// HECK: [[ENUM_S:%.*]] = enum $Optional

, #Optional.some!enumelt, [[INIT_S]] : $P +// HECK: br bb3([[ENUM_S]] : $Optional

) +// HECK: bb3([[OPT_S:%.*]] : $Optional

): +// HECK: [[BORROWED_OPT_S:%.*]] = begin_borrow [[OPT_S]] +// HECK: [[APPLY_P:%.*]] = apply [[ARG1]]([[BORROWED_OPT_S]]) : $@callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct +// HECK: [[RETVAL:%.*]] = init_existential_value [[APPLY_P]] : $TrivialStruct, $TrivialStruct, $P2 +// HECK: return [[RETVAL]] : $P2 +// CHECK-LABEL: } // end sil function '$ss1P_pSgs13TrivialStructVIegnd_ADSgs2P2_pIegyr_TR' + +// Tests corner-case: reabstraction of an empty tuple to any +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss20f300_convETupleToAnyyyyycF : $@convention(thin) (@guaranteed @callee_guaranteed () -> ()) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $@callee_guaranteed () -> ()): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (@guaranteed @callee_guaranteed () -> ()) -> @out Any +// HECK: destroy_value [[PAPPLY]] : $@callee_guaranteed () -> @out Any +// CHECK-NOT: destroy_value [[ARG]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss20f300_convETupleToAnyyyyycF' +func f300_convETupleToAny(_ t: @escaping () -> ()) { + let _: () -> Any = t +} + +// f300_convETupleToAny continued Test: reabstraction of () to Any +// --- +// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$sIeg_ypIegr_TR : $@convention(thin) (@guaranteed @callee_guaranteed () -> ()) -> @out Any { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $@callee_guaranteed () -> ()): +// HECK: [[ASTACK:%.*]] = alloc_stack $Any +// HECK: [[IADDR:%.*]] = init_existential_addr [[ASTACK]] : $*Any, $() +// HECK: [[APPLYARG:%.*]] = apply [[ARG]]() : $@callee_guaranteed () -> () +// HECK: [[LOAD_EXIST:%.*]] = load [trivial] [[IADDR]] : $*() +// HECK: [[RETVAL:%.*]] = init_existential_value [[LOAD_EXIST]] : $(), $(), $Any +// HECK: return [[RETVAL]] : $Any +// CHECK-LABEL: } // end sil function '$sIeg_ypIegr_TR' + +// Tests corner-case: reabstraction of a non-empty tuple to any +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss21f310_convnIntTupleAnyyyBi64__Bi64_tycF : $@convention(thin) (@guaranteed @callee_guaranteed () -> (Builtin.Int64, Builtin.Int64)) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $@callee_guaranteed () -> (Builtin.Int64, Builtin.Int64)): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (@guaranteed @callee_guaranteed () -> (Builtin.Int64, Builtin.Int64)) -> @out Any +// HECK: destroy_value [[PAPPLY]] : $@callee_guaranteed () -> @out Any +// CHECK-NOT: destroy_value [[ARG]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss21f310_convnIntTupleAnyyyBi64__Bi64_tycF' +func f310_convnIntTupleAny(_ t: @escaping () -> (Builtin.Int64, Builtin.Int64)) { + let _: () -> Any = t +} + +// f310_convIntTupleAny continued Test: reabstraction of non-empty tuple to Any +// --- +// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$sBi64_Bi64_Iegdd_ypIegr_TR : $@convention(thin) (@guaranteed @callee_guaranteed () -> (Builtin.Int64, Builtin.Int64)) -> @out Any { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $@callee_guaranteed () -> (Builtin.Int64, Builtin.Int64)): +// HECK: [[ASTACK:%.*]] = alloc_stack $Any +// HECK: [[IADDR:%.*]] = init_existential_addr [[ASTACK]] : $*Any, $(Builtin.Int64, Builtin.Int64) +// HECK: [[TADDR0:%.*]] = tuple_element_addr [[IADDR]] : $*(Builtin.Int64, Builtin.Int64), 0 +// HECK: [[TADDR1:%.*]] = tuple_element_addr [[IADDR]] : $*(Builtin.Int64, Builtin.Int64), 1 +// HECK: [[APPLYARG:%.*]] = apply [[ARG]]() : $@callee_guaranteed () -> (Builtin.Int64, Builtin.Int64) +// HECK: [[TEXTRACT0:%.*]] = tuple_extract [[APPLYARG]] : $(Builtin.Int64, Builtin.Int64), 0 +// HECK: [[TEXTRACT1:%.*]] = tuple_extract [[APPLYARG]] : $(Builtin.Int64, Builtin.Int64), 1 +// HECK: store [[TEXTRACT0]] to [trivial] [[TADDR0]] : $*Builtin.Int64 +// HECK: store [[TEXTRACT1]] to [trivial] [[TADDR1]] : $*Builtin.Int64 +// HECK: [[LOAD_EXIST:%.*]] = load [trivial] [[IADDR]] : $*(Builtin.Int64, Builtin.Int64) +// HECK: [[RETVAL:%.*]] = init_existential_value [[LOAD_EXIST]] : $(Builtin.Int64, Builtin.Int64), $(Builtin.Int64, Builtin.Int64), $Any +// HECK: dealloc_stack [[ASTACK]] : $*Any +// HECK: return [[RETVAL]] : $Any +// CHECK-LABEL: } // end sil function '$sBi64_Bi64_Iegdd_ypIegr_TR' + +// Tests translating and imploding into Any under opaque value mode +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss20f320_transImplodeAnyyyyypcF : $@convention(thin) (@guaranteed @callee_guaranteed (@in_guaranteed Any) -> ()) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $@callee_guaranteed (@in_guaranteed Any) -> ()): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[COPY_ARG]]) : $@convention(thin) (Builtin.Int64, Builtin.Int64, @guaranteed @callee_guaranteed (@in_guaranteed Any) -> ()) -> () +// HECK: destroy_value [[PAPPLY]] : $@callee_guaranteed (Builtin.Int64, Builtin.Int64) -> () +// CHECK-NOT: destroy_value [[ARG]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss20f320_transImplodeAnyyyyypcF' +func f320_transImplodeAny(_ t: @escaping (Any) -> ()) { + let _: ((Builtin.Int64, Builtin.Int64)) -> () = t +} + +// f320_transImplodeAny continued Test: reabstraction thunk +// --- +// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$sypIegn_Bi64_Bi64_Iegyy_TR : $@convention(thin) (Builtin.Int64, Builtin.Int64, @guaranteed @callee_guaranteed (@in_guaranteed Any) -> ()) -> () { +// CHECK: bb0([[ARG0:%.*]] : $Builtin.Int64, [[ARG1:%.*]] : $Builtin.Int64, [[ARG2:%.*]] : @guaranteed $@callee_guaranteed (@in_guaranteed Any) -> ()): +// HECK: [[ASTACK:%.*]] = alloc_stack $Any +// HECK: [[IADDR:%.*]] = init_existential_addr [[ASTACK]] : $*Any, $(Builtin.Int64, Builtin.Int64) +// HECK: [[TADDR0:%.*]] = tuple_element_addr [[IADDR]] : $*(Builtin.Int64, Builtin.Int64), 0 +// HECK: store [[ARG0]] to [trivial] [[TADDR0]] : $*Builtin.Int64 +// HECK: [[TADDR1:%.*]] = tuple_element_addr [[IADDR]] : $*(Builtin.Int64, Builtin.Int64), 1 +// HECK: store [[ARG1]] to [trivial] [[TADDR1]] : $*Builtin.Int64 +// HECK: [[LOAD_EXIST:%.*]] = load [trivial] [[IADDR]] : $*(Builtin.Int64, Builtin.Int64) +// HECK: [[INIT_OPAQUE:%.*]] = init_existential_value [[LOAD_EXIST]] : $(Builtin.Int64, Builtin.Int64), $(Builtin.Int64, Builtin.Int64), $Any +// HECK: [[BORROWED_INIT_OPAQUE:%.*]] = begin_borrow [[INIT_OPAQUE]] +// HECK: [[APPLYARG:%.*]] = apply [[ARG2]]([[BORROWED_INIT_OPAQUE]]) : $@callee_guaranteed (@in_guaranteed Any) -> () +// HECK: dealloc_stack [[ASTACK]] : $*Any +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$sypIegn_Bi64_Bi64_Iegyy_TR' + +// Tests support for address only let closures under opaque value mode - they are not by-address anymore +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss19f330_addrLetClosureyxxlF : $@convention(thin) (@in_guaranteed T) -> @out T { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $T): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] : $T +// HECK: return [[COPY_ARG]] : $T +// CHECK-LABEL: } // end sil function '$ss19f330_addrLetClosureyxxlF' +func f330_addrLetClosure(_ x:T) -> T { + return { { x }() }() +} + +// Tests support for capture of a mutable opaque value type +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss15f340_captureBoxyyF : $@convention(thin) () -> () { +// CHECK: bb0: +// HECK: [[ALLOC_OF_BOX:%.*]] = alloc_box ${ var EmptyP }, var, name "mutableAddressOnly" +// HECK: [[PROJ_BOX:%.*]] = project_box [[ALLOC_OF_BOX]] +// HECK: [[APPLY_FOR_BOX:%.*]] = apply %{{.*}}(%{{.*}}) : $@convention(method) (@thin AddressOnlyStruct.Type) -> AddressOnlyStruct +// HECK: [[INIT_OPAQUE:%.*]] = init_existential_value [[APPLY_FOR_BOX]] : $AddressOnlyStruct, $AddressOnlyStruct, $EmptyP +// HECK: store [[INIT_OPAQUE]] to [init] [[PROJ_BOX]] : $*EmptyP +// HECK: [[BORROW_BOX:%.*]] = begin_borrow [[ALLOC_OF_BOX]] : ${ var EmptyP } +// HECK: mark_function_escape [[PROJ_BOX]] : $*EmptyP +// HECK: apply %{{.*}}([[BORROW_BOX]]) : $@convention(thin) (@guaranteed { var EmptyP }) -> () +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss15f340_captureBoxyyF' +func f340_captureBox() { + var mutableAddressOnly: EmptyP = AddressOnlyStruct() + + func captureEverything() { + genericInout(&mutableAddressOnly) + } + + captureEverything() +} + +// Tests support for guards and indirect enums for opaque values +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss14f360_guardEnumyys08IndirectC0OyxGlF : $@convention(thin) (@guaranteed IndirectEnum) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $IndirectEnum): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: switch_enum [[COPY_ARG]] : $IndirectEnum, case #IndirectEnum.Node!enumelt: [[NODE_BB:bb[0-9]+]], case #IndirectEnum.Nil!enumelt: [[NIL_BB:bb[0-9]+]] +// +// HECK: [[NIL_BB]]: +// HECK: br [[NIL_TRAMPOLINE:bb[0-9]+]] +// +// HECK: [[NIL_TRAMPOLINE]]: +// HECK: br [[EPILOG_BB:bb[0-9]+]] +// +// HECK: [[NODE_BB]]([[EARG:%.*]] : $<τ_0_0> { var τ_0_0 } ): +// HECK: [[PROJ_BOX:%.*]] = project_box [[EARG]] +// HECK: [[LOAD_BOX:%.*]] = load [take] [[PROJ_BOX]] : $*T +// HECK: [[COPY_BOX:%.*]] = copy_value [[LOAD_BOX]] : $T +// HECK: destroy_value [[EARG]] +// HECK: br [[CONT_BB:bb[0-9]+]] +// +// HECK: [[CONT_BB]]: +// HECK: destroy_value [[COPY_BOX]] +// HECK: br [[EPILOG_BB]] +// +// HECK: [[EPILOG_BB]]: +// CHECK-NOT: destroy_value [[ARG]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss14f360_guardEnumyys08IndirectC0OyxGlF' +func f360_guardEnum(_ e: IndirectEnum) { + do { + guard case .Node(let x) = e else { return } + _ = x + } +} + +// Tests contextual init() of opaque value types +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss17f370_optToOptCastyxSgABlF : $@convention(thin) (@in_guaranteed Optional) -> @out Optional { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $Optional): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// CHECK-NOT: destroy_value [[ARG]] +// HECK: return [[COPY_ARG]] : $Optional +// CHECK-LABEL: } // end sil function '$ss17f370_optToOptCastyxSgABlF' +func f370_optToOptCast(_ x : T!) -> T? { + return x +} + +// Tests casting optional opaques to optional opaques +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss19f380_contextualInityyBi64_SgF : $@convention(thin) (Optional) -> () { +// CHECK: bb0([[ARG:%.*]] : $Optional): +// HECK: [[ALLOC_OF_BOX:%.*]] = alloc_box ${ var Optional }, var +// HECK: [[PROJ_BOX:%.*]] = project_box [[ALLOC_OF_BOX]] +// HECK: store [[ARG]] to [trivial] [[PROJ_BOX]] : $*Optional +// HECK: destroy_value [[ALLOC_OF_BOX]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss19f380_contextualInityyBi64_SgF' +func f380_contextualInit(_ a : Builtin.Int64?) { + var x: Builtin.Int64? = a + genericInout(&x) + _ = x +} + +// Tests opaque call result types +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss19f390_addrCallResultyyxycSglF : $@convention(thin) (@guaranteed Optional<@callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for >) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $Optional<@callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for >): +// HECK: [[ALLOC_OF_BOX:%.*]] = alloc_box $<τ_0_0> { var Optional<τ_0_0> } +// HECK: [[PROJ_BOX:%.*]] = project_box [[ALLOC_OF_BOX]] +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: [[SENUM:%.*]] = select_enum [[COPY_ARG]] +// HECK: cond_br [[SENUM]], bb3, bb1 +// HECK: bb1: +// HECK: br bb2 +// HECK: bb2: +// HECK: [[ONONE:%.*]] = enum $Optional, #Optional.none!enumelt +// HECK: br bb4([[ONONE]] : $Optional) +// HECK: bb4(%{{.*}} : $Optional): +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss19f390_addrCallResultyyxycSglF' +func f390_addrCallResult(_ f: (() -> T)?) { + var x = f?() + genericInout(&x) + _ = x +} + +// Tests reabstraction / partial apply of protocols under opaque value mode +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss16f400_maybeCloneP1cys8Clonable_p_tF : $@convention(thin) (@in_guaranteed Clonable) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $Clonable): +// HECK: [[OPEN_ARG:%.*]] = open_existential_value [[ARG]] : $Clonable +// HECK: [[APPLY_OPAQUE:%.*]] = apply %{{.*}}<@opened({{.*}}) Clonable>([[OPEN_ARG]]) : $@convention(thin) <τ_0_0 where τ_0_0 : Clonable> (@in_guaranteed τ_0_0) -> @owned @callee_guaranteed () -> @out Optional<τ_0_0> +// HECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}<@opened({{.*}}) Clonable>([[APPLY_OPAQUE]]) : $@convention(thin) <τ_0_0 where τ_0_0 : Clonable> (@guaranteed @callee_guaranteed () -> @out Optional<τ_0_0>) -> @out Optional +// CHECK-NOT: destroy_value [[ARG]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss16f400_maybeCloneP1cys8Clonable_p_tF' +func f400_maybeCloneP(c: Clonable) { + let _: () -> Clonable? = c.maybeClone +} + +// Tests global opaque values / subscript rvalues +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss20f410_globalRvalueGetyBi64_Bi64_F : $@convention(thin) (Builtin.Int64) -> Builtin.Int64 { +// CHECK: bb0([[ARG:%.*]] : $Builtin.Int64): +// HECK: [[GLOBAL_ADDR:%.*]] = global_addr @$s20opaque_values_silgen16subscriptableGetAA013SubscriptableE0_pvp : $*SubscriptableGet +// HECK: [[READ:%.*]] = begin_access [read] [dynamic] [[GLOBAL_ADDR]] : $*SubscriptableGet +// HECK: [[OPEN_ARG:%.*]] = open_existential_addr immutable_access [[READ]] : $*SubscriptableGet to $*@opened +// HECK: [[GET_OPAQUE:%.*]] = load [copy] [[OPEN_ARG]] : $*@opened +// HECK: [[RETVAL:%.*]] = apply %{{.*}}<@opened({{.*}}) SubscriptableGet>([[ARG]], [[GET_OPAQUE]]) : $@convention(witness_method: SubscriptableGet) <τ_0_0 where τ_0_0 : SubscriptableGet> (Builtin.Int64, @in_guaranteed τ_0_0) -> Builtin.Int64 +// HECK: destroy_value [[GET_OPAQUE]] +// HECK: return [[RETVAL]] : $Builtin.Int64 +// CHECK-LABEL: } // end sil function '$ss20f410_globalRvalueGetyBi64_Bi64_F' +func f410_globalRvalueGet(_ i : Builtin.Int64) -> Builtin.Int64 { + return subscriptableGet![i] +} + +// Tests global opaque values / subscript lvalues +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss20f420_globalLvalueGetyBi64_SgBi64_F : $@convention(thin) (Builtin.Int64) -> Optional { +// CHECK: bb0([[ARG:%.*]] : $Builtin.Int64): +// HECK: [[GLOBAL_ADDR:%.*]] = global_addr @$s20opaque_values_silgen19subscriptableGetSetAA013SubscriptableeF0_pvp : $*SubscriptableGetSet +// HECK: [[READ:%.*]] = begin_access [read] [dynamic] [[GLOBAL_ADDR]] : $*SubscriptableGetSet +// HECK: [[OPEN_ARG:%.*]] = open_existential_addr immutable_access [[READ]] : $*SubscriptableGetSet to $*@opened +// HECK: [[GET_OPAQUE:%.*]] = load [copy] [[OPEN_ARG]] : $*@opened +// HECK: [[RETVAL:%.*]] = apply %{{.*}}<@opened({{.*}}) SubscriptableGetSet>([[ARG]], [[GET_OPAQUE]]) : $@convention(witness_method: SubscriptableGetSet) <τ_0_0 where τ_0_0 : SubscriptableGetSet> (Builtin.Int64, @in_guaranteed τ_0_0) -> Builtin.Int64 +// HECK: destroy_value [[GET_OPAQUE]] +// HECK: return [[RETVAL]] : $Builtin.Int64 +// CHECK-LABEL: } // end sil function '$ss20f420_globalLvalueGetyBi64_SgBi64_F' +func f420_globalLvalueGet(_ i : Builtin.Int64) -> Builtin.Int64? { + return subscriptableGetSet![i] +} + +// Tests tuple transformation +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss21f430_callUnreachableF1tyx_tlF : $@convention(thin) (@in_guaranteed T) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $T): +// HECK: [[APPLY_T:%.*]] = apply %{{.*}}<((T) -> (), T)>() : $@convention(thin) <τ_0_0> () -> @out Optional<(Builtin.Int64, τ_0_0)> +// HECK: switch_enum [[APPLY_T]] : $Optional<(Builtin.Int64, (@callee_guaranteed (@in_guaranteed T) -> @out (), T))>, case #Optional.some!enumelt: bb2, case #Optional.none!enumelt: bb1 +// HECK: bb2([[ENUMARG:%.*]] : $(Builtin.Int64, (@callee_guaranteed (@in_guaranteed T) -> @out (), T))): +// HECK: ([[TELEM0:%.*]], [[TELEM1:%.*]]) = destructure_tuple [[ENUMARG]] : $(Builtin.Int64, (@callee_guaranteed (@in_guaranteed T) -> @out (), T)) +// HECK: ([[TELEM10:%.*]], [[TELEM11:%.*]]) = destructure_tuple [[TELEM1]] : $(@callee_guaranteed (@in_guaranteed T) -> @out (), T) +// HECK: [[PAPPLY:%.*]] = partial_apply [callee_guaranteed] %{{.*}}([[TELEM10]]) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0, @guaranteed @callee_guaranteed (@in_guaranteed τ_0_0) -> @out ()) -> () +// HECK: [[NEWT0:%.*]] = tuple ([[PAPPLY]] : $@callee_guaranteed (@in_guaranteed T) -> (), [[TELEM11]] : $T) +// HECK: [[NEWT1:%.*]] = tuple ([[TELEM0]] : $Builtin.Int64, [[NEWT0]] : $(@callee_guaranteed (@in_guaranteed T) -> (), T)) +// HECK: [[NEWENUM:%.*]] = enum $Optional<(Builtin.Int64, (@callee_guaranteed (@in_guaranteed T) -> (), T))>, #Optional.some!enumelt, [[NEWT1]] : $(Builtin.Int64, (@callee_guaranteed (@in_guaranteed T) -> (), T)) +// HECK: br bb3([[NEWENUM]] : $Optional<(Builtin.Int64, (@callee_guaranteed (@in_guaranteed T) -> (), T))>) +// HECK: bb3([[ENUMIN:%.*]] : $Optional<(Builtin.Int64, (@callee_guaranteed (@in_guaranteed T) -> (), T))>): +// HECK: destroy_value [[ENUMIN]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss21f430_callUnreachableF1tyx_tlF' +func f430_callUnreachableF(t: T) { + let _: (Builtin.Int64, ((T) -> (), T))? = unreachableF() +} + + +// Further testing for conditional checked cast under opaque value mode - make sure we don't create a buffer for results +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss20f440_cleanupEmissionyyxlF : $@convention(thin) (@in_guaranteed T) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $T): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: checked_cast_value_br [[COPY_ARG]] : $T to $EmptyP, bb2, bb1 +// +// HECK: bb2([[PTYPE:%.*]] : $EmptyP): +// HECK: [[PSOME:%.*]] = enum $Optional, #Optional.some!enumelt, [[PTYPE]] : $EmptyP +// HECK: br bb3([[PSOME]] : $Optional) +// +// HECK: bb3([[ENUMRES:%.*]] : $Optional): +// HECK: switch_enum [[ENUMRES]] : $Optional, case #Optional.some!enumelt: [[SOME_BB:bb[0-9]+]], case #Optional.none!enumelt: [[NONE_BB:bb[0-9]+]] +// +// HECK: [[NONE_BB]]: +// HECK: br [[NONE_TRAMPOLINE:bb[0-9]+]] +// +// HECK: [[NONE_TRAMPOLINE]]: +// HECK: br [[EPILOG_BB:bb[0-9]+]] +// +// HECK: [[SOME_BB]]([[ENUMRES2:%.*]] : $EmptyP): +// HECK: br [[CONT_BB:bb[0-9]+]] +// +// HECK: [[CONT_BB]]: +// HECK: destroy_value [[ENUMRES2]] +// HECK: br [[EPILOG_BB]] +// +// HECK: [[EPILOG_BB]]: +// CHECK-NOT: destroy_value [[ARG]] +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss20f440_cleanupEmissionyyxlF' +func f440_cleanupEmission(_ x: T) { + guard let x2 = x as? EmptyP else { return } + _ = x2 +} + + +// Test emitNativeToCBridgedNonoptionalValue. +// --- +// CHECK-objc-LABEL: sil hidden [ossa] @$ss14f470_nativeToC7fromAnyyXlyp_tF : $@convention(thin) (@in_guaranteed Any) -> @owned AnyObject { +// CHECK-objc: bb0(%0 : $Any): +// CHECK-objc: [[BORROW:%.*]] = begin_borrow %0 : $Any +// CHECK-objc: [[SRC:%.*]] = copy_value [[BORROW]] : $Any +// CHECK-objc: [[OPEN:%.*]] = open_existential_opaque [[SRC]] : $Any to $@opened +// CHECK-objc: [[COPY:%.*]] = copy_value [[OPEN]] : $@opened +// CHECK-objc: [[F:%.*]] = function_ref @$sf27_bridgeAnythingToObjectiveCyyXlxlF : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> @owned AnyObject +// CHECK-objc: [[RET:%.*]] = apply [[F]]<@opened("{{.*}}") Any>([[COPY]]) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> @owned AnyObject +// CHECK-objc: destroy_value [[SRC]] : $Any +// CHECK-objc: destroy_value %0 : $Any +// CHECK-objc: return [[RET]] : $AnyObject +// CHECK-objc-LABEL: } // end sil function '$ss14f470_nativeToC7fromAnyyXlyp_tF' +#if _runtime(_ObjC) +func f470_nativeToC(fromAny any: Any) -> AnyObject { + return any as AnyObject +} +#endif + + +// Test emitOpenExistential. +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss13f480_getError04someC0yps0C0_p_tF : $@convention(thin) (@guaranteed Error) -> @out Any { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $Error): +// HECK: [[VAL:%.*]] = open_existential_box_value [[ARG]] : $Error to $@opened("{{.*}}") Error +// HECK: [[COPY:%.*]] = copy_value [[VAL]] : $@opened("{{.*}}") Error +// HECK: [[ANY:%.*]] = init_existential_value [[COPY]] : $@opened("{{.*}}") Error, $@opened("{{.*}}") Error, $Any +// CHECK-NOT: destroy_value [[ARG]] : $Error +// HECK: return [[ANY]] : $Any +// CHECK-LABEL: } // end sil function '$ss13f480_getError04someC0yps0C0_p_tF' +func f480_getError(someError: Error) -> Any { + return someError +} + +// Test visitBindOptionalExpr +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss15f500_getAnyHashys1P_pSgs14ConvertibleToP_pSgF : $@convention(thin) (@in_guaranteed Optional) -> @out Optional

{ +// CHECK: bb0(%0 : @guaranteed $Optional): +// HECK: [[COPY:%.*]] = copy_value [[ARG]] : $Optional +// HECK: [[DATA:%.*]] = unchecked_enum_data [[COPY]] : $Optional, #Optional.some!enumelt +// HECK: [[BORROW_DATA:%.*]] = begin_borrow [[DATA]] : $ConvertibleToP +// HECK: [[VAL:%.*]] = open_existential_value [[BORROW_DATA]] : $ConvertibleToP to $@opened("{{.*}}") ConvertibleToP +// HECK: [[WT:%.*]] = witness_method $@opened("{{.*}}") ConvertibleToP, #ConvertibleToP.asP : (Self) -> () -> P, [[VAL]] : $@opened("{{.*}}") ConvertibleToP : $@convention(witness_method: ConvertibleToP) <τ_0_0 where τ_0_0 : ConvertibleToP> (@in_guaranteed τ_0_0) -> @out P +// HECK: [[AS_P:%.*]] = apply [[WT]]<@opened("{{.*}}") ConvertibleToP>([[VAL]]) : $@convention(witness_method: ConvertibleToP) <τ_0_0 where τ_0_0 : ConvertibleToP> (@in_guaranteed τ_0_0) -> @out P +// HECK: [[ENUM:%.*]] = enum $Optional

, #Optional.some!enumelt, [[AS_P]] : $P +// HECK: destroy_value [[DATA]] : $ConvertibleToP +// HECK: br bb{{.*}}([[ENUM]] : $Optional

) +// HECK: } // end sil function '$ss15f500_getAnyHashys1P_pSgs14ConvertibleToP_pSgF' +func f500_getAnyHash(_ value: ConvertibleToP?) -> P? { + return value?.asP() +} +public protocol FooPP { + func foo() -> Self +} + +// Test emitting a protocol witness for a method (with @in_guaranteed self) on a dependent generic type. +// --- +// CHECK-LABEL: sil private [transparent] [thunk] [ossa] @$ss15f510_OpaqueSelfVyxGs5FooPPssADP3fooxyFTW : $@convention(witness_method: FooPP) <τ_0_0> (@in_guaranteed f510_OpaqueSelf<τ_0_0>) -> @out f510_OpaqueSelf<τ_0_0> { +// CHECK: bb0(%0 : @guaranteed $f510_OpaqueSelf<τ_0_0>): +// HECK: [[FN:%.*]] = function_ref @$s20opaque_values_silgen21f510_OpaqueSelfV3fooACyxGyF : $@convention(method) <τ_0_0> (@in_guaranteed f510_OpaqueSelf<τ_0_0>) -> @out f510_OpaqueSelf<τ_0_0> +// HECK: [[RESULT:%.*]] = apply [[FN]]<τ_0_0>(%0) : $@convention(method) <τ_0_0> (@in_guaranteed f510_OpaqueSelf<τ_0_0>) -> @out f510_OpaqueSelf<τ_0_0> +// HECK: return [[RESULT]] : $f510_OpaqueSelf<τ_0_0> +// CHECK-LABEL: } // end sil function '$ss15f510_OpaqueSelfVyxGs5FooPPssADP3fooxyFTW' +struct f510_OpaqueSelf : FooPP { + var x: Base + + func foo() -> f510_OpaqueSelf { + return self + } +} + +// Tests conditional value casts and correspondingly generated reabstraction thunk, with types +// --- +// CHECK-LABEL: sil hidden [ossa] @$ss17f520_condTFromAnyyyyp_xtlF : $@convention(thin) (@in_guaranteed Any, @in_guaranteed T) -> () { +// CHECK: bb0([[ARG0:%.*]] : @guaranteed $Any, [[ARG1:%.*]] : @guaranteed $T): +// HECK: [[COPY_ARG:%.*]] = copy_value [[ARG]] +// HECK: checked_cast_value_br [[COPY_ARG]] : $Any to $@callee_guaranteed (@in_guaranteed (Builtin.Int64, T)) -> @out (Builtin.Int64, T), bb2, bb1 +// HECK: bb2([[THUNK_PARAM:%.*]] : $@callee_guaranteed (@in_guaranteed (Builtin.Int64, T)) -> @out (Builtin.Int64, T)): +// HECK: [[THUNK_REF:%.*]] = function_ref @{{.*}} : $@convention(thin) <τ_0_0> (Builtin.Int64, @in_guaranteed τ_0_0, @guaranteed @callee_guaranteed (@in_guaranteed (Builtin.Int64, τ_0_0)) -> @out (Builtin.Int64, τ_0_0)) -> (Builtin.Int64, @out τ_0_0) +// HECK: partial_apply [callee_guaranteed] [[THUNK_REF]]([[THUNK_PARAM]]) +// CHECK: bb6: +// HECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function '$ss17f520_condTFromAnyyyyp_xtlF' +func f520_condTFromAny(_ x: Any, _ y: T) { + if let f = x as? (Int64, T) -> (Int64, T) { + _ = f(Int64(42), y) + } +} + +// Make sure that we insert a destroy of the box even though we used an Builtin.Int64 type. +// CHECK-LABEL: sil [ossa] @$ss16f530_assignToVaryyF : $@convention(thin) () -> () { +// CHECK: bb0: +// HECK: [[Y_BOX:%.*]] = alloc_box ${ var Builtin.Int64 }, var, name "y" +// HECK: [[PROJECT_Y_BOX:%.*]] = project_box [[Y_BOX]] : ${ var Builtin.Int64 }, 0 +// HECK: [[X_BOX:%.*]] = alloc_box ${ var Any }, var, name "x" +// HECK: [[PROJECT_X_BOX:%.*]] = project_box [[X_BOX]] : ${ var Any }, 0 +// HECK: [[ACCESS_PROJECT_Y_BOX:%.*]] = begin_access [read] [unknown] [[PROJECT_Y_BOX]] : $*Builtin.Int64 +// HECK: [[Y:%.*]] = load [trivial] [[ACCESS_PROJECT_Y_BOX]] : $*Builtin.Int64 +// HECK: [[Y_ANY_FOR_X:%.*]] = init_existential_value [[Y]] : $Builtin.Int64, $Builtin.Int64, $Any +// HECK: store [[Y_ANY_FOR_X]] to [init] [[PROJECT_X_BOX]] +// HECK: [[ACCESS_PROJECT_Y_BOX:%.*]] = begin_access [read] [unknown] [[PROJECT_Y_BOX]] : $*Builtin.Int64 +// HECK: [[Y:%.*]] = load [trivial] [[ACCESS_PROJECT_Y_BOX]] : $*Builtin.Int64 +// HECK: [[Y_ANY_FOR_Z:%.*]] = init_existential_value [[Y]] : $Builtin.Int64, $Builtin.Int64, $Any +// HECK: destroy_value [[Y_ANY_FOR_Z]] +// HECK: destroy_value [[X_BOX]] +// HECK: destroy_value [[Y_BOX]] +// HECK: } // end sil function '$ss16f530_assignToVaryyF' +public func f530_assignToVar() { + var y: Int64 = 3 + var x: Any = y + let z: Any = y + genericInout(&y) + genericInout(&x) + _ = z +} + +// Test open_existential_value ownership +// --- +// CHECK-LABEL: sil [ossa] @$ss16f540_takeDecoder4fromBi1_s0C0_p_tKF : $@convention(thin) (@in_guaranteed Decoder) -> (Builtin.Int1, @error Error) { +// CHECK: bb0(%0 : @guaranteed $Decoder): +// HECK: [[OPENED:%.*]] = open_existential_value %0 : $Decoder to $@opened("{{.*}}") Decoder +// HECK: [[WT:%.*]] = witness_method $@opened("{{.*}}") Decoder, #Decoder.unkeyedContainer : (Self) -> () throws -> UnkeyedDecodingContainer, %3 : $@opened("{{.*}}") Decoder : $@convention(witness_method: Decoder) <τ_0_0 where τ_0_0 : Decoder> (@in_guaranteed τ_0_0) -> (@out UnkeyedDecodingContainer, @error Error) +// HECK: try_apply [[WT]]<@opened("{{.*}}") Decoder>([[OPENED]]) : $@convention(witness_method: Decoder) <τ_0_0 where τ_0_0 : Decoder> (@in_guaranteed τ_0_0) -> (@out UnkeyedDecodingContainer, @error Error), normal bb2, error bb1 +// +// CHECK:bb{{.*}}([[RET1:%.*]] : @owned $UnkeyedDecodingContainer): +// HECK: [[BORROW2:%.*]] = begin_borrow [lexical] [[RET1]] : $UnkeyedDecodingContainer +// HECK: [[OPENED2:%.*]] = open_existential_value [[BORROW2]] : $UnkeyedDecodingContainer to $@opened("{{.*}}") UnkeyedDecodingContainer +// HECK: [[WT2:%.*]] = witness_method $@opened("{{.*}}") UnkeyedDecodingContainer, #UnkeyedDecodingContainer.isAtEnd!getter : (Self) -> () -> Builtin.Int1, [[OPENED2]] : $@opened("{{.*}}") UnkeyedDecodingContainer : $@convention(witness_method: UnkeyedDecodingContainer) <τ_0_0 where τ_0_0 : UnkeyedDecodingContainer> (@in_guaranteed τ_0_0) -> Builtin.Int1 +// HECK: [[RET2:%.*]] = apply [[WT2]]<@opened("{{.*}}") UnkeyedDecodingContainer>([[OPENED2]]) : $@convention(witness_method: UnkeyedDecodingContainer) <τ_0_0 where τ_0_0 : UnkeyedDecodingContainer> (@in_guaranteed τ_0_0) -> Builtin.Int1 +// HECK: end_borrow [[BORROW2]] : $UnkeyedDecodingContainer +// HECK: destroy_value [[RET1]] : $UnkeyedDecodingContainer +// CHECK-NOT: destroy_value %0 : $Decoder +// HECK: return [[RET2]] : $Builtin.Int1 +// CHECK-LABEL: } // end sil function '$ss16f540_takeDecoder4fromBi1_s0C0_p_tKF' +public func f540_takeDecoder(from decoder: Decoder) throws -> Builtin.Int1 { + let container = try decoder.unkeyedContainer() + return container.isAtEnd +} diff --git a/test/SILGen/opaque_values_silgen_todo.swift b/test/SILGen/opaque_values_silgen_todo.swift index 24d10eb4a1b73..42659a3881050 100644 --- a/test/SILGen/opaque_values_silgen_todo.swift +++ b/test/SILGen/opaque_values_silgen_todo.swift @@ -1,2 +1,102 @@ // RUN: %target-swift-emit-silgen -enable-sil-opaque-values -emit-sorted-sil -Xllvm -sil-full-demangle %s | %FileCheck %s // REQUIRES: EnableSILOpaqueValues + +public protocol C : AnyObject {} + +public protocol _ObjectiveCBridgeable {} + +// A lot of standard library support is necessary to support raw enums. +// -------------------------------------------------------------------- + +infix operator == : ComparisonPrecedence +infix operator ~= : ComparisonPrecedence + +public struct Bool { + var _value: Builtin.Int1 + + public init() { + let zero: Int64 = 0 + self._value = Builtin.trunc_Int64_Int1(zero._value) + } + + internal init(_ v: Builtin.Int1) { self._value = v } + + public init(_ value: Bool) { + self = value + } +} + +extension Bool { + public func _getBuiltinLogicValue() -> Builtin.Int1 { + return _value + } +} + +public protocol Equatable { + /// Returns a Boolean value indicating whether two values are equal. + /// + /// Equality is the inverse of inequality. For any values `a` and `b`, + /// `a == b` implies that `a != b` is `false`. + /// + /// - Parameters: + /// - lhs: A value to compare. + /// - rhs: Another value to compare. + static func == (lhs: Self, rhs: Self) -> Bool +} + +public func ~= (a: T, b: T) -> Bool { + return a == b +} + +public protocol RawRepresentable { + associatedtype RawValue + + init?(rawValue: RawValue) + + var rawValue: RawValue { get } +} + +public func == (lhs: T, rhs: T) -> Bool + where T.RawValue : Equatable { + return lhs.rawValue == rhs.rawValue +} + +public protocol ExpressibleByStringLiteral {} +public protocol ExpressibleByFloatLiteral {} +public protocol ExpressibleByUnicodeScalarLiteral {} +public protocol ExpressibleByExtendedGraphemeClusterLiteral {} + +public struct Int64 : ExpressibleByIntegerLiteral, _ExpressibleByBuiltinIntegerLiteral, Equatable { + public var _value: Builtin.Int64 + public init(_builtinIntegerLiteral x: _MaxBuiltinIntegerType) { + _value = Builtin.s_to_s_checked_trunc_IntLiteral_Int64(x).0 + } + public typealias IntegerLiteralType = Int64 + public init(integerLiteral value: Int64) { + self = value + } + public static func ==(_ lhs: Int64, rhs: Int64) -> Bool { + return Bool(Builtin.cmp_eq_Int64(lhs._value, rhs._value)) + } +} + +public struct Int : _ExpressibleByBuiltinIntegerLiteral, ExpressibleByIntegerLiteral, Equatable { + var _value: Builtin.Int64 + public init() { + self = 0 + } + public typealias IntegerLiteralType = Int + public init(_builtinIntegerLiteral x: _MaxBuiltinIntegerType) { + _value = Builtin.s_to_s_checked_trunc_IntLiteral_Int64(x).0 + } + + public init(integerLiteral value: Int) { + self = value + } + + public static func ==(_ lhs: Int, rhs: Int) -> Bool { + return Bool(Builtin.cmp_eq_Int64(lhs._value, rhs._value)) + } +} + +// ----------------------------------------------------------------------------- diff --git a/test/SILGen/opaque_values_silgen_vtable.swift b/test/SILGen/opaque_values_silgen_vtable.swift new file mode 100644 index 0000000000000..0c3f59a8da4ff --- /dev/null +++ b/test/SILGen/opaque_values_silgen_vtable.swift @@ -0,0 +1,53 @@ +// RUN: %target-swift-emit-silgen -enable-sil-opaque-values -emit-sorted-sil -Xllvm -sil-full-demangle %s | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-%target-runtime + +class OpaqueClass { + typealias ObnoxiousTuple = (T, (T.Type, (T) -> T)) + + func inAndOut(x: T) -> T { return x } + func variantOptionalityTuples(x: ObnoxiousTuple) -> ObnoxiousTuple? { return x } +} + +class OpaqueTupleClass: OpaqueClass<(U, U)> { + override func inAndOut(x: (U, U)) -> (U, U) { return x } +} + +class StillOpaqueClass: OpaqueClass { + override func variantOptionalityTuples(x: ObnoxiousTuple?) -> ObnoxiousTuple { return x! } +} + +// Test vtables - OpaqueTupleClass +// --- +// CHECK-LABEL: sil private [thunk] [ossa] @$s27opaque_values_silgen_vtable16OpaqueTupleClassC8inAndOut1xx_xtx_xt_tFAA0eG0CAdExx_tFTV : $@convention(method) <τ_0_0> (@in_guaranteed (τ_0_0, τ_0_0), @guaranteed OpaqueTupleClass<τ_0_0>) -> @out (τ_0_0, τ_0_0) { +// HECK: bb0([[ARG0:%.*]] : $(U, U), [[ARG1:%.*]] : $OpaqueTupleClass): +// HECK: ([[TELEM0:%.*]], [[TELEM1:%.*]]) = destructure_tuple [[ARG0]] : $(U, U) +// HECK: [[APPLY:%.*]] = apply {{.*}}([[TELEM0]], [[TELEM1]], [[ARG1]]) : $@convention(method) <τ_0_0> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0, @guaranteed OpaqueTupleClass<τ_0_0>) -> (@out τ_0_0, @out τ_0_0) +// HECK: [[BORROWED_CALL:%.*]] = begin_borrow [[APPLY]] +// HECK: [[BORROWED_CALL_EXT0:%.*]] = tuple_extract [[BORROWED_CALL]] : $(U, U), 0 +// HECK: [[RETVAL0:%.*]] = copy_value [[BORROWED_CALL_EXT0]] : $U +// HECK: [[BORROWED_CALL_EXT1:%.*]] = tuple_extract [[BORROWED_CALL]] : $(U, U), 1 +// HECK: [[RETVAL1:%.*]] = copy_value [[BORROWED_CALL_EXT1]] : $U +// HECK: end_borrow [[BORROWED_CALL]] +// HECK: [[RETVAL:%.*]] = tuple ([[RETVAL0]] : $U, [[RETVAL1]] : $U) +// HECK: return [[RETVAL]] +// CHECK-LABEL: } // end sil function '$s27opaque_values_silgen_vtable16OpaqueTupleClassC8inAndOut1xx_xtx_xt_tFAA0eG0CAdExx_tFTV' + +// Test vtables - StillOpaqueClass +// --- +// CHECK-LABEL: sil private [thunk] [ossa] @$s27opaque_values_silgen_vtable16StillOpaqueClassC24variantOptionalityTuples1xx_xm_xxcttx_xm_xxcttSg_tFAA0fG0CAdeFx_xm_xxctt_tFTV : $@convention(method) <τ_0_0> (@in_guaranteed τ_0_0, @thick τ_0_0.Type, @guaranteed @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0) -> @out τ_0_1 for <τ_0_0, τ_0_0>, @guaranteed StillOpaqueClass<τ_0_0>) -> @out Optional<(τ_0_0, (@thick τ_0_0.Type, @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0) -> @out τ_0_1 for <τ_0_0, τ_0_0>))> { +// HECK: bb0([[ARG0:%.*]] : $T, [[ARG1:%.*]] : $@thick T.Type, [[ARG2:%.*]] : $@callee_guaranteed (@in_guaranteed T) -> @out T, [[ARG3:%.*]] : $StillOpaqueClass): +// HECK: [[TELEM0:%.*]] = tuple ([[ARG1]] : $@thick T.Type, [[ARG2]] : $@callee_guaranteed (@in_guaranteed T) -> @out T) +// HECK: [[TELEM1:%.*]] = tuple ([[ARG0]] : $T, [[TELEM0]] : $(@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T)) +// HECK: [[ENUMOPT0:%.*]] = enum $Optional<(T, (@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T))>, #Optional.some!enumelt, [[TELEM1]] : $(T, (@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T)) +// HECK: [[APPLY:%.*]] = apply {{.*}}([[ENUMOPT0]], [[ARG3]]) : $@convention(method) <τ_0_0> (@in_guaranteed Optional<(τ_0_0, (@thick τ_0_0.Type, @callee_guaranteed (@in_guaranteed τ_0_0) -> @out τ_0_0))>, @guaranteed StillOpaqueClass<τ_0_0>) -> (@out τ_0_0, @thick τ_0_0.Type, @owned @callee_guaranteed (@in_guaranteed τ_0_0) -> @out τ_0_0) +// HECK: [[BORROWED_T:%.*]] = begin_borrow [[APPLY]] +// HECK: [[BORROWED_T_EXT0:%.*]] = tuple_extract [[BORROWED_T]] : $(T, @thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T), 0 +// HECK: [[RETVAL0:%.*]] = copy_value [[BORROWED_T_EXT0]] +// HECK: [[BORROWED_T_EXT1:%.*]] = tuple_extract [[BORROWED_T]] : $(T, @thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T), 1 +// HECK: [[BORROWED_T_EXT2:%.*]] = tuple_extract [[BORROWED_T]] : $(T, @thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T), 2 +// HECK: [[RETVAL1:%.*]] = copy_value [[BORROWED_T_EXT2]] +// HECK: end_borrow [[BORROWED_T]] +// HECK: [[RETTUPLE0:%.*]] = tuple ([[BORROWED_T_EXT1]] : $@thick T.Type, [[RETVAL1]] : $@callee_guaranteed (@in_guaranteed T) -> @out T) +// HECK: [[RETTUPLE1:%.*]] = tuple ([[RETVAL0]] : $T, [[RETTUPLE0]] : $(@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T)) +// HECK: [[RETVAL:%.*]] = enum $Optional<(T, (@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T))>, #Optional.some!enumelt, [[RETTUPLE1]] : $(T, (@thick T.Type, @callee_guaranteed (@in_guaranteed T) -> @out T)) +// HECK: return [[RETVAL]] +// CHECK-LABEL: // end sil function '$s27opaque_values_silgen_vtable16StillOpaqueClassC24variantOptionalityTuples1xx_xm_xxcttx_xm_xxcttSg_tFAA0fG0CAdeFx_xm_xxctt_tFTV' From d4bc86af86c484386b036a577c70a1244a5d8d14 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Fri, 4 Mar 2022 23:08:51 -0800 Subject: [PATCH 20/29] [SIL-opaque] Various SILGen fixes --- lib/SILGen/SILGenApply.cpp | 2 +- lib/SILGen/SILGenDynamicCast.cpp | 4 ++-- lib/SILGen/SILGenExpr.cpp | 2 +- lib/SILGen/SILGenFunction.cpp | 23 ++++++++++++++--------- lib/SILGen/SILGenPoly.cpp | 9 ++++++++- lib/SILGen/SILGenProlog.cpp | 15 +++++++++++---- 6 files changed, 37 insertions(+), 18 deletions(-) diff --git a/lib/SILGen/SILGenApply.cpp b/lib/SILGen/SILGenApply.cpp index 88c602a75b754..9ab9cc59b366e 100644 --- a/lib/SILGen/SILGenApply.cpp +++ b/lib/SILGen/SILGenApply.cpp @@ -3964,7 +3964,7 @@ SILGenFunction::emitBeginApply(SILLocation loc, ManagedValue fn, yields.push_back(ManagedValue::forLValue(value)); } else if (info.isConsumed()) { yields.push_back(emitManagedRValueWithCleanup(value)); - } else if (info.isDirectGuaranteed()) { + } else if (info.isGuaranteed()) { yields.push_back(ManagedValue::forBorrowedRValue(value)); } else { yields.push_back(ManagedValue::forTrivialRValue(value)); diff --git a/lib/SILGen/SILGenDynamicCast.cpp b/lib/SILGen/SILGenDynamicCast.cpp index 13bf42c10ce9f..017ed7d724207 100644 --- a/lib/SILGen/SILGenDynamicCast.cpp +++ b/lib/SILGen/SILGenDynamicCast.cpp @@ -252,7 +252,7 @@ namespace { } ManagedValue result; - if (!origTargetTL.isAddressOnly()) { + if (!origTargetTL.isAddressOnly() || !SGF.useLoweredAddresses()) { result = SGF.emitLoad(Loc, buffer, origTargetTL, ctx, IsTake); } else { result = SGF.emitManagedBufferWithCleanup(buffer, origTargetTL); @@ -450,7 +450,7 @@ RValue Lowering::emitConditionalCheckedCast( SILValue resultObjectBuffer; Optional resultObjectTemp; SGFContext resultObjectCtx; - if ((resultTL.isAddressOnly()) + if ((resultTL.isAddressOnly() && SGF.useLoweredAddresses()) || (C.getEmitInto() && C.getEmitInto()->canPerformInPlaceInitialization())) { SILType resultTy = resultTL.getLoweredType(); diff --git a/lib/SILGen/SILGenExpr.cpp b/lib/SILGen/SILGenExpr.cpp index 13eda8f003612..b1e64d299681c 100644 --- a/lib/SILGen/SILGenExpr.cpp +++ b/lib/SILGen/SILGenExpr.cpp @@ -4726,7 +4726,7 @@ ManagedValue SILGenFunction::emitBindOptional(SILLocation loc, // If optValue was loadable, we emitted a switch_enum. In such a case, return // the argument from hasValueBB. - if (optValue.getType().isLoadable(F)) { + if (optValue.getType().isLoadable(F) || !silConv.useLoweredAddresses()) { return emitManagedRValueWithCleanup(hasValueBB->getArgument(0)); } diff --git a/lib/SILGen/SILGenFunction.cpp b/lib/SILGen/SILGenFunction.cpp index 27dc9410142a5..7b30081a9b4da 100644 --- a/lib/SILGen/SILGenFunction.cpp +++ b/lib/SILGen/SILGenFunction.cpp @@ -271,6 +271,7 @@ void SILGenFunction::emitCaptures(SILLocation loc, break; case CaptureKind::Immutable: case CaptureKind::StorageAddress: + // FIXME_addrlower: only call getAddressType for M.useLoweredAddresses() capturedArgs.push_back(emitUndef(getLoweredType(type).getAddressType())); break; case CaptureKind::Box: { @@ -290,11 +291,13 @@ void SILGenFunction::emitCaptures(SILLocation loc, // Get an address value for a SILValue if it is address only in an type // expansion context without opaque archetype substitution. auto getAddressValue = [&](SILValue entryValue) -> SILValue { - if (SGM.Types.getTypeLowering( - valueType, - TypeExpansionContext::noOpaqueTypeArchetypesSubstitution( - expansion.getResilienceExpansion())) - .isAddressOnly() + if (SGM.M.useLoweredAddresses() + && SGM.Types + .getTypeLowering( + valueType, + TypeExpansionContext::noOpaqueTypeArchetypesSubstitution( + expansion.getResilienceExpansion())) + .isAddressOnly() && !entryValue->getType().isAddress()) { auto addr = emitTemporaryAllocation(vd, entryValue->getType()); @@ -342,13 +345,15 @@ void SILGenFunction::emitCaptures(SILLocation loc, } case CaptureKind::Immutable: { if (canGuarantee) { - auto entryValue = getAddressValue(Entry.value); // No-escaping stored declarations are captured as the // address of the value. - assert(entryValue->getType().isAddress() && "no address for captured var!"); - capturedArgs.push_back(ManagedValue::forLValue(entryValue)); + auto entryValue = getAddressValue(Entry.value); + capturedArgs.push_back(ManagedValue::forBorrowedRValue(entryValue)); } - else { + else if (!silConv.useLoweredAddresses()) { + capturedArgs.push_back( + B.createCopyValue(loc, ManagedValue::forUnmanaged(Entry.value))); + } else { auto entryValue = getAddressValue(Entry.value); // We cannot pass a valid SILDebugVariable while creating the temp here // See rdar://60425582 diff --git a/lib/SILGen/SILGenPoly.cpp b/lib/SILGen/SILGenPoly.cpp index 220d6755dd9d1..167156fa081f6 100644 --- a/lib/SILGen/SILGenPoly.cpp +++ b/lib/SILGen/SILGenPoly.cpp @@ -1167,6 +1167,10 @@ namespace { outputOrigEltType, outputEltType, elt, loweredOutputEltTy); + // Aggregation of address-only values requires ownership. + if (loweredOutputTy.isAddressOnly(SGF.F)) { + elt = elt.ensurePlusOne(SGF, Loc); + } elements.push_back(elt); } @@ -1175,7 +1179,10 @@ namespace { forwarded.push_back(elt.forward(SGF)); auto tuple = SGF.B.createTuple(Loc, loweredOutputTy, forwarded); - return SGF.emitManagedRValueWithCleanup(tuple); + if (tuple->getOwnershipKind() == OwnershipKind::Owned) + return SGF.emitManagedRValueWithCleanup(tuple); + + return ManagedValue::forUnmanaged(tuple); } /// Handle a tuple that has been exploded in the input but wrapped in diff --git a/lib/SILGen/SILGenProlog.cpp b/lib/SILGen/SILGenProlog.cpp index 4168d7599fc3a..6c2de16c6ad77 100644 --- a/lib/SILGen/SILGenProlog.cpp +++ b/lib/SILGen/SILGenProlog.cpp @@ -456,11 +456,18 @@ static void emitCaptureArguments(SILGenFunction &SGF, case CaptureKind::StorageAddress: { // Non-escaping stored decls are captured as the address of the value. auto type = getVarTypeInCaptureContext(); - SILType ty = SGF.getLoweredType(type).getAddressType(); - SILValue addr = SGF.F.begin()->createFunctionArgument(ty, VD); - SGF.VarLocs[VD] = SILGenFunction::VarLoc::get(addr); + SILType ty = SGF.getLoweredType(type); + if (SGF.SGM.M.useLoweredAddresses()) { + ty = ty.getAddressType(); + } + SILValue arg = SGF.F.begin()->createFunctionArgument(ty, VD); + SGF.VarLocs[VD] = SILGenFunction::VarLoc::get(arg); SILDebugVariable DbgVar(VD->isLet(), ArgNo); - SGF.B.createDebugValueAddr(Loc, addr, DbgVar); + if (ty.isAddress()) { + SGF.B.createDebugValueAddr(Loc, arg, DbgVar); + } else { + SGF.B.createDebugValue(Loc, arg, DbgVar); + } break; } } From 23516ec4457232b5a425f9048486bca7b0fa5179 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Sun, 6 Mar 2022 19:35:53 -0800 Subject: [PATCH 21/29] Add a test file for combined SILGen + AddressLowering --- test/SILOptimizer/opaque_values_Onone.swift | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 test/SILOptimizer/opaque_values_Onone.swift diff --git a/test/SILOptimizer/opaque_values_Onone.swift b/test/SILOptimizer/opaque_values_Onone.swift new file mode 100644 index 0000000000000..648805e09a13a --- /dev/null +++ b/test/SILOptimizer/opaque_values_Onone.swift @@ -0,0 +1,10 @@ +// RUN: %target-swift-frontend -enable-sil-opaque-values -parse-as-library -emit-sil -Onone %s | %FileCheck %s + +// CHECK-LABEL: sil hidden @$s19opaque_values_Onone16generic_identity1txx_tlF : $@convention(thin) (@in_guaranteed T) -> @out T { +// CHECK: bb0(%0 : $*T, %1 : $*T): +// CHECK: debug_value %1 : $*T, let, name "t", argno 1 +// CHECK: copy_addr %1 to [initialization] %0 : $*T +// CHECK-LABEL: } // end sil function '$s19opaque_values_Onone16generic_identity1txx_tlF' +func generic_identity(t: T) -> T { + return t +} From 06948a858e69c18b5b1dfe8c056b406eb1457185 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Tue, 15 Mar 2022 23:45:05 -0700 Subject: [PATCH 22/29] [SIL-opaque] use generated SILLocations Anywhere that code is not obviously inserted immediately adjacent to the origin instruction. --- .../Mandatory/AddressLowering.cpp | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index 1c72876d762d9..3421ceac87c3f 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -1269,6 +1269,9 @@ class AddressMaterialization { /// /// If the operand projects into its use, then the memory was already /// initialized when visiting the use. +/// +/// It's ok for the builder to reuse the user's SILLocation because +/// initializeComposingUse always inserts code immediately before the user. void AddressMaterialization::initializeComposingUse(Operand *operand) { SILValue def = operand->get(); if (def->getType().isAddressOnly(*pass.function)) { @@ -1405,7 +1408,7 @@ SILValue AddressMaterialization::materializeStructExtract( SILValue srcAddr = pass.getMaterializedAddress(structVal); auto *structType = structVal->getType().getStructOrBoundGenericStruct(); auto *varDecl = structType->getStoredProperties()[fieldIdx]; - return B.createStructElementAddr(extractInst->getLoc(), srcAddr, varDecl, + return B.createStructElementAddr(pass.genLoc(), srcAddr, varDecl, elementValue->getType().getAddressType()); } @@ -1413,7 +1416,7 @@ SILValue AddressMaterialization::materializeStructExtract( SILValue AddressMaterialization::materializeTupleExtract( SILInstruction *extractInst, SILValue elementValue, unsigned fieldIdx) { SILValue srcAddr = pass.getMaterializedAddress(extractInst->getOperand(0)); - return B.createTupleElementAddr(extractInst->getLoc(), srcAddr, fieldIdx, + return B.createTupleElementAddr(pass.genLoc(), srcAddr, fieldIdx, elementValue->getType().getAddressType()); } @@ -1431,7 +1434,7 @@ AddressMaterialization::materializeProjectionIntoUse(Operand *operand, case SILInstructionKind::EnumInst: { auto *enumInst = cast(user); SILValue enumAddr = materializeComposingUser(enumInst, intoPhiOperand); - return B.createInitEnumDataAddr(enumInst->getLoc(), enumAddr, + return B.createInitEnumDataAddr(pass.genLoc(), enumAddr, enumInst->getElement(), operand->get()->getType().getAddressType()); } @@ -1443,8 +1446,8 @@ AddressMaterialization::materializeProjectionIntoUse(Operand *operand, auto opaque = Lowering::AbstractionPattern::getOpaque(); auto &concreteTL = pass.function->getTypeLowering(opaque, canTy); return B.createInitExistentialAddr( - initExistentialValue->getLoc(), containerAddr, canTy, - concreteTL.getLoweredType(), initExistentialValue->getConformances()); + pass.genLoc(), containerAddr, canTy, + concreteTL.getLoweredType(), initExistentialValue->getConformances()); } case SILInstructionKind::StructInst: { auto *structInst = cast(user); @@ -1454,8 +1457,8 @@ AddressMaterialization::materializeProjectionIntoUse(Operand *operand, SILValue structAddr = materializeComposingUser(structInst, intoPhiOperand); return B.createStructElementAddr( - structInst->getLoc(), structAddr, *fieldIter, - operand->get()->getType().getAddressType()); + pass.genLoc(), structAddr, *fieldIter, + operand->get()->getType().getAddressType()); } case SILInstructionKind::TupleInst: { auto *tupleInst = cast(user); @@ -1467,7 +1470,7 @@ AddressMaterialization::materializeProjectionIntoUse(Operand *operand, return pass.function->getArguments()[resultIdx]; } SILValue tupleAddr = materializeComposingUser(tupleInst, intoPhiOperand); - return B.createTupleElementAddr(tupleInst->getLoc(), tupleAddr, + return B.createTupleElementAddr(pass.genLoc(), tupleAddr, operand->getOperandNumber(), operand->get()->getType().getAddressType()); } @@ -2739,7 +2742,7 @@ void UseRewriter::visitSwitchEnumInst(SwitchEnumInst * switchEnum) { auto *caseAddr = caseBuilder.createUncheckedTakeEnumDataAddr(loc, enumAddr, caseDecl); auto *caseLoad = caseBuilder.createTrivialLoadOr( - switchEnum->getLoc(), caseAddr, LoadOwnershipQualifier::Take); + loc, caseAddr, LoadOwnershipQualifier::Take); caseArg->replaceAllUsesWith(caseLoad); if (caseArg->getType().isAddressOnly(*pass.function)) { // Remap caseArg to the new dummy load which will be deleted during From 95add90e5e31ec28603f7207c71da71a17db8715 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Fri, 18 Mar 2022 17:37:21 -0700 Subject: [PATCH 23/29] Remove an unused test file (opaque_value_silgen_todo). --- test/SILGen/opaque_values_silgen_todo.swift | 102 -------------------- 1 file changed, 102 deletions(-) delete mode 100644 test/SILGen/opaque_values_silgen_todo.swift diff --git a/test/SILGen/opaque_values_silgen_todo.swift b/test/SILGen/opaque_values_silgen_todo.swift deleted file mode 100644 index 42659a3881050..0000000000000 --- a/test/SILGen/opaque_values_silgen_todo.swift +++ /dev/null @@ -1,102 +0,0 @@ -// RUN: %target-swift-emit-silgen -enable-sil-opaque-values -emit-sorted-sil -Xllvm -sil-full-demangle %s | %FileCheck %s -// REQUIRES: EnableSILOpaqueValues - -public protocol C : AnyObject {} - -public protocol _ObjectiveCBridgeable {} - -// A lot of standard library support is necessary to support raw enums. -// -------------------------------------------------------------------- - -infix operator == : ComparisonPrecedence -infix operator ~= : ComparisonPrecedence - -public struct Bool { - var _value: Builtin.Int1 - - public init() { - let zero: Int64 = 0 - self._value = Builtin.trunc_Int64_Int1(zero._value) - } - - internal init(_ v: Builtin.Int1) { self._value = v } - - public init(_ value: Bool) { - self = value - } -} - -extension Bool { - public func _getBuiltinLogicValue() -> Builtin.Int1 { - return _value - } -} - -public protocol Equatable { - /// Returns a Boolean value indicating whether two values are equal. - /// - /// Equality is the inverse of inequality. For any values `a` and `b`, - /// `a == b` implies that `a != b` is `false`. - /// - /// - Parameters: - /// - lhs: A value to compare. - /// - rhs: Another value to compare. - static func == (lhs: Self, rhs: Self) -> Bool -} - -public func ~= (a: T, b: T) -> Bool { - return a == b -} - -public protocol RawRepresentable { - associatedtype RawValue - - init?(rawValue: RawValue) - - var rawValue: RawValue { get } -} - -public func == (lhs: T, rhs: T) -> Bool - where T.RawValue : Equatable { - return lhs.rawValue == rhs.rawValue -} - -public protocol ExpressibleByStringLiteral {} -public protocol ExpressibleByFloatLiteral {} -public protocol ExpressibleByUnicodeScalarLiteral {} -public protocol ExpressibleByExtendedGraphemeClusterLiteral {} - -public struct Int64 : ExpressibleByIntegerLiteral, _ExpressibleByBuiltinIntegerLiteral, Equatable { - public var _value: Builtin.Int64 - public init(_builtinIntegerLiteral x: _MaxBuiltinIntegerType) { - _value = Builtin.s_to_s_checked_trunc_IntLiteral_Int64(x).0 - } - public typealias IntegerLiteralType = Int64 - public init(integerLiteral value: Int64) { - self = value - } - public static func ==(_ lhs: Int64, rhs: Int64) -> Bool { - return Bool(Builtin.cmp_eq_Int64(lhs._value, rhs._value)) - } -} - -public struct Int : _ExpressibleByBuiltinIntegerLiteral, ExpressibleByIntegerLiteral, Equatable { - var _value: Builtin.Int64 - public init() { - self = 0 - } - public typealias IntegerLiteralType = Int - public init(_builtinIntegerLiteral x: _MaxBuiltinIntegerType) { - _value = Builtin.s_to_s_checked_trunc_IntLiteral_Int64(x).0 - } - - public init(integerLiteral value: Int) { - self = value - } - - public static func ==(_ lhs: Int, rhs: Int) -> Bool { - return Bool(Builtin.cmp_eq_Int64(lhs._value, rhs._value)) - } -} - -// ----------------------------------------------------------------------------- From 390ab4db789202dcc203bc0c389719c32d4370dd Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Fri, 18 Mar 2022 17:55:58 -0700 Subject: [PATCH 24/29] Update SILGen/opaque_values_silgen_lib.swift for [serialized] The function attribute's name changed on main. --- test/SILGen/opaque_values_silgen_lib.swift | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/SILGen/opaque_values_silgen_lib.swift b/test/SILGen/opaque_values_silgen_lib.swift index 47e95d60a7ea7..22e9aa97f6b94 100644 --- a/test/SILGen/opaque_values_silgen_lib.swift +++ b/test/SILGen/opaque_values_silgen_lib.swift @@ -552,7 +552,7 @@ func f270_convOptAnyStruct(_ a1: @escaping (AnyStruct?) -> AnyStruct) { // f270_convOptAnyStruct continued Test: reabstraction thunk helper // --- -// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$ss9AnyStructVSgABIegnr_A2CIegnr_TR : $@convention(thin) (@in_guaranteed Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct) -> @out Optional { +// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] [ossa] @$ss9AnyStructVSgABIegnr_A2CIegnr_TR : $@convention(thin) (@in_guaranteed Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct) -> @out Optional { // CHECK: bb0([[ARG0:%.*]] : @guaranteed $Optional, [[ARG1:%.*]] : @guaranteed $@callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct): // HECK: [[APPLYARG:%.*]] = apply [[ARG1]]([[ARG0]]) : $@callee_guaranteed (@in_guaranteed Optional) -> @out AnyStruct // HECK: [[RETVAL:%.*]] = enum $Optional, #Optional.some!enumelt, [[APPLYARG]] : $AnyStruct @@ -575,7 +575,7 @@ func f280_convExistTrivial(_ s: @escaping (P) -> TrivialStruct) { // part of f280_convExistTrivial: conversion between existential types - reabstraction thunk // --- -// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$ss1P_ps13TrivialStructVIegnd_s2P2_psAD_pIegnr_TR : $@convention(thin) (@in_guaranteed P2, @guaranteed @callee_guaranteed (@in_guaranteed P) -> TrivialStruct) -> @out P2 { +// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] [ossa] @$ss1P_ps13TrivialStructVIegnd_s2P2_psAD_pIegnr_TR : $@convention(thin) (@in_guaranteed P2, @guaranteed @callee_guaranteed (@in_guaranteed P) -> TrivialStruct) -> @out P2 { // CHECK: bb0([[ARG0:%.*]] : @guaranteed $P2, [[ARG1:%.*]] : @guaranteed $@callee_guaranteed (@in_guaranteed P) -> TrivialStruct): // HECK: [[OPENED_ARG:%.*]] = open_existential_value [[ARG]] : $P2 to $@opened({{.*}}) P2 // HECK: [[COPIED_VAL:%.*]] = copy_value [[OPENED_ARG]] @@ -604,7 +604,7 @@ func f290_convOptExistTriv(_ s: @escaping (P?) -> TrivialStruct) { // part of f290_convOptExistTriv: conversion between existential types - reabstraction thunk - optionals case // --- -// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$ss1P_pSgs13TrivialStructVIegnd_ADSgs2P2_pIegyr_TR : $@convention(thin) (Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct) -> @out P2 { +// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] [ossa] @$ss1P_pSgs13TrivialStructVIegnd_ADSgs2P2_pIegyr_TR : $@convention(thin) (Optional, @guaranteed @callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct) -> @out P2 { // CHECK: bb0([[ARG0:%.*]] : $Optional, [[ARG1:%.*]] : @guaranteed $@callee_guaranteed (@in_guaranteed Optional

) -> TrivialStruct): // HECK: switch_enum [[ARG0]] : $Optional, case #Optional.some!enumelt: bb2, case #Optional.none!enumelt: bb1 // HECK: bb1: @@ -637,7 +637,7 @@ func f300_convETupleToAny(_ t: @escaping () -> ()) { // f300_convETupleToAny continued Test: reabstraction of () to Any // --- -// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$sIeg_ypIegr_TR : $@convention(thin) (@guaranteed @callee_guaranteed () -> ()) -> @out Any { +// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] [ossa] @$sIeg_ypIegr_TR : $@convention(thin) (@guaranteed @callee_guaranteed () -> ()) -> @out Any { // CHECK: bb0([[ARG:%.*]] : @guaranteed $@callee_guaranteed () -> ()): // HECK: [[ASTACK:%.*]] = alloc_stack $Any // HECK: [[IADDR:%.*]] = init_existential_addr [[ASTACK]] : $*Any, $() @@ -663,7 +663,7 @@ func f310_convnIntTupleAny(_ t: @escaping () -> (Builtin.Int64, Builtin.Int64)) // f310_convIntTupleAny continued Test: reabstraction of non-empty tuple to Any // --- -// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$sBi64_Bi64_Iegdd_ypIegr_TR : $@convention(thin) (@guaranteed @callee_guaranteed () -> (Builtin.Int64, Builtin.Int64)) -> @out Any { +// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] [ossa] @$sBi64_Bi64_Iegdd_ypIegr_TR : $@convention(thin) (@guaranteed @callee_guaranteed () -> (Builtin.Int64, Builtin.Int64)) -> @out Any { // CHECK: bb0([[ARG:%.*]] : @guaranteed $@callee_guaranteed () -> (Builtin.Int64, Builtin.Int64)): // HECK: [[ASTACK:%.*]] = alloc_stack $Any // HECK: [[IADDR:%.*]] = init_existential_addr [[ASTACK]] : $*Any, $(Builtin.Int64, Builtin.Int64) @@ -696,7 +696,7 @@ func f320_transImplodeAny(_ t: @escaping (Any) -> ()) { // f320_transImplodeAny continued Test: reabstraction thunk // --- -// CHECK-LABEL: sil shared [transparent] [serializable] [reabstraction_thunk] [ossa] @$sypIegn_Bi64_Bi64_Iegyy_TR : $@convention(thin) (Builtin.Int64, Builtin.Int64, @guaranteed @callee_guaranteed (@in_guaranteed Any) -> ()) -> () { +// CHECK-LABEL: sil shared [transparent] [serialized] [reabstraction_thunk] [ossa] @$sypIegn_Bi64_Bi64_Iegyy_TR : $@convention(thin) (Builtin.Int64, Builtin.Int64, @guaranteed @callee_guaranteed (@in_guaranteed Any) -> ()) -> () { // CHECK: bb0([[ARG0:%.*]] : $Builtin.Int64, [[ARG1:%.*]] : $Builtin.Int64, [[ARG2:%.*]] : @guaranteed $@callee_guaranteed (@in_guaranteed Any) -> ()): // HECK: [[ASTACK:%.*]] = alloc_stack $Any // HECK: [[IADDR:%.*]] = init_existential_addr [[ASTACK]] : $*Any, $(Builtin.Int64, Builtin.Int64) From 434faeec937e86e793b4bef2356b79f2cfc6ec6b Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Sun, 20 Mar 2022 00:04:28 -0700 Subject: [PATCH 25/29] [SIL-opaque] Add @in_guaranteed function argument support. Temporarily map storage to a fake load_borrow. --- .../Mandatory/AddressLowering.cpp | 39 ++++++++++++------- test/SILOptimizer/address_lowering.sil | 17 +++++++- 2 files changed, 42 insertions(+), 14 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index 3421ceac87c3f..e01303b8c3b38 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -425,7 +425,12 @@ struct AddressLoweringState { AddressLoweringState(SILFunction *function, DominanceInfo *domInfo) : function(function), loweredFnConv(getLoweredFnConv(function)), - domInfo(domInfo) {} + domInfo(domInfo) { + for (auto &block : *function) { + if (block.getTerminator()->isFunctionExiting()) + exitingInsts.push_back(block.getTerminator()); + } + } SILModule *getModule() const { return &function->getModule(); } @@ -489,28 +494,39 @@ static void convertDirectToIndirectFunctionArgs(AddressLoweringState &pass) { if (param.isFormalIndirect() && !fnConv.isSILIndirect(param)) { SILArgument *arg = pass.function->getArgument(argIdx); SILType addrType = arg->getType().getAddressType(); - LoadInst *loadArg = argBuilder.createTrivialLoadOr( - SILValue(arg).getLoc(), SILUndef::get(addrType, *pass.function), - LoadOwnershipQualifier::Take); - - arg->replaceAllUsesWith(loadArg); + auto loc = SILValue(arg).getLoc(); + SILValue undefAddress = SILUndef::get(addrType, *pass.function); + SingleValueInstruction *load; + if (param.isConsumed()) { + load = argBuilder.createTrivialLoadOr(loc, undefAddress, + LoadOwnershipQualifier::Take); + } else { + load = cast( + argBuilder.emitLoadBorrowOperation(loc, undefAddress)); + for (SILInstruction *termInst : pass.exitingInsts) { + pass.getBuilder(termInst->getIterator()) + .createEndBorrow(pass.genLoc(), load); + } + } + arg->replaceAllUsesWith(load); assert(!pass.valueStorageMap.contains(arg)); arg = arg->getParent()->replaceFunctionArgument( arg->getIndex(), addrType, OwnershipKind::None, arg->getDecl()); - loadArg->setOperand(arg); + assert(isa(load) || isa(load)); + load->setOperand(0, arg); // Indirect calling convention may be used for loadable types. In that // case, generating the argument loads is sufficient. if (addrType.isAddressOnly(*pass.function)) { - pass.valueStorageMap.insertValue(loadArg, arg); + pass.valueStorageMap.insertValue(load, arg); } } ++argIdx; } - assert(argIdx - == fnConv.getSILArgIndexOfFirstParam() + fnConv.getNumSILArguments()); + assert(argIdx == + fnConv.getSILArgIndexOfFirstParam() + fnConv.getNumSILArguments()); } /// Before populating the ValueStorageMap, insert function arguments for any @@ -575,9 +591,6 @@ class OpaqueValueVisitor { /// to valueStorageMap in RPO. void OpaqueValueVisitor::mapValueStorage() { for (auto *block : postorderInfo.getReversePostOrder()) { - if (block->getTerminator()->isFunctionExiting()) - pass.exitingInsts.push_back(block->getTerminator()); - // Opaque function arguments have already been replaced. if (block != pass.function->getEntryBlock()) { for (auto *arg : block->getArguments()) { diff --git a/test/SILOptimizer/address_lowering.sil b/test/SILOptimizer/address_lowering.sil index 5ee49ed89b145..95d19ed09f74d 100644 --- a/test/SILOptimizer/address_lowering.sil +++ b/test/SILOptimizer/address_lowering.sil @@ -277,7 +277,7 @@ sil [ossa] @f044_indirectGuaranteed : $@convention(thin) (@in_guaranteed T) - // CHECK: apply %{{.*}}(%0) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () // CHECK: destroy_addr %0 : $*T // CHECK-LABEL: } // end sil function 'f045_indirectGuaranteedArg' -sil [ossa] @f045_indirectGuaranteedArg : $@convention(thin) (@in T) -> () { +sil [ossa] @f045_indirectGuaranteedCallArg : $@convention(thin) (@in T) -> () { bb0(%0 : @owned $T): %1 = function_ref @f044_indirectGuaranteed : $@convention(thin) <τ_0_0>(@in_guaranteed τ_0_0) -> () %2 = apply %1(%0) : $@convention(thin) <τ_0_0>(@in_guaranteed τ_0_0) -> () @@ -286,6 +286,21 @@ bb0(%0 : @owned $T): return %6 : $() } +// CHECK-LABEL: sil [ossa] @f046_indirectGuaranteedFunctionArg : $@convention(thin) (@in_guaranteed T) -> () { +// CHECK: bb0(%0 : $*T): +// CHECK-NOT: load +// CHECK: apply %{{.*}}(%0) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () +// CHECK-NOT: end_borrow +// CHECK-LABEL: } // end sil function 'f046_indirectGuaranteedFunctionArg' +sil [ossa] @f046_indirectGuaranteedFunctionArg : $@convention(thin) (@in_guaranteed T) -> () { +bb0(%0 : @guaranteed $T): + %1 = function_ref @f044_indirectGuaranteed : $@convention(thin) <τ_0_0>(@in_guaranteed τ_0_0) -> () + %2 = apply %1(%0) : $@convention(thin) <τ_0_0>(@in_guaranteed τ_0_0) -> () + destroy_value %0 : $T + %6 = tuple () + return %6 : $() +} + // CHECK-LABEL: sil [ossa] @f050_storeinout : $@convention(thin) (@inout T, @inout T, @in T) -> () { // CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T): // CHECK: %[[PREV1:.*]] = alloc_stack $T From d6f0c73eeb84be7820b9c1e93a8029b60a1f84a3 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Sun, 20 Mar 2022 00:05:49 -0700 Subject: [PATCH 26/29] [SIL-opaque] Add an assert for open_existential_value. Add comments. Add a basic dominance sanity check. --- .../Mandatory/AddressLowering.cpp | 24 +++++++++++++------ test/SILOptimizer/address_lowering.sil | 2 +- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index e01303b8c3b38..f1055e00d3765 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -792,9 +792,16 @@ static Operand *getProjectedDefOperand(SILValue value) { /// If \p value is a an existential or enum, then return the existential or enum /// operand. These operations are always rewritten by the UseRewriter and always -/// destructively reuse the same storage as their operand. Note that if the -/// operation's result is address-only, then the operand must be address-only -/// and therefore must mapped to ValueStorage. +/// reuse the same storage as their operand. Note that if the operation's result +/// is address-only, then the operand must be address-only and therefore must +/// mapped to ValueStorage. +/// +/// open_existential_value must reuse storage because the boxed value is shared +/// with other instances of the existential. An explicit copy is needed to +/// obtain an owned value. +/// +/// unchecked_enum_data and switch_enum must reuse storage because extracting +/// the payload destroys the enum value. static Operand *getReusedStorageOperand(SILValue value) { switch (value->getKind()) { default: @@ -1180,15 +1187,18 @@ createStackAllocation(SILValue value) { auto *openingInst = openingVal->getDefiningInstruction(); assert(openingVal && "all opened archetypes should be resolved"); - if (latestOpeningInst - && pass.domInfo->dominates(openingInst, latestOpeningInst)) { - return; + if (latestOpeningInst) { + if (pass.domInfo->dominates(openingInst, latestOpeningInst)) + return; + + assert(pass.domInfo->dominates(latestOpeningInst, openingInst) && + "opened archetypes must dominate their uses"); } latestOpeningInst = openingInst; } }); auto allocPt = latestOpeningInst ? std::next(latestOpeningInst->getIterator()) - : pass.function->begin()->begin(); + : pass.function->begin()->begin(); auto allocBuilder = pass.getBuilder(allocPt); AllocStackInst *alloc = allocBuilder.createAllocStack(pass.genLoc(), allocTy); diff --git a/test/SILOptimizer/address_lowering.sil b/test/SILOptimizer/address_lowering.sil index 95d19ed09f74d..d68d308454226 100644 --- a/test/SILOptimizer/address_lowering.sil +++ b/test/SILOptimizer/address_lowering.sil @@ -934,7 +934,7 @@ bb0(%0 : @owned $P): %9 = witness_method $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P, #P.foo, %8 : $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () %cpy = copy_value %8 : $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P end_borrow %b : $P - // This optional is an aggregate that contains an opened exsitential. May sure it's allocated after open_existential_addr. + // This optional is an aggregate that contains an opened existential. Make sure it is allocated after open_existential_addr. %opt = enum $Optional<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P>, #Optional.some!enumelt, %cpy : $@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P %some = unchecked_enum_data %opt : $Optional<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P>, #Optional.some!enumelt %11 = apply %9<@opened("EF755EF2-B636-11E7-B7B4-A45E60ECC541") P>(%some) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () From 2d53e0369520386f7a6c3c4709f0a21a1488a26c Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Sun, 20 Mar 2022 00:07:39 -0700 Subject: [PATCH 27/29] [SIL-opaque] [NFC] clang-format AddressLowering.cpp --- .../Mandatory/AddressLowering.cpp | 102 +++++++++--------- 1 file changed, 49 insertions(+), 53 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/AddressLowering.cpp b/lib/SILOptimizer/Mandatory/AddressLowering.cpp index f1055e00d3765..b922d5f99fc3a 100644 --- a/lib/SILOptimizer/Mandatory/AddressLowering.cpp +++ b/lib/SILOptimizer/Mandatory/AddressLowering.cpp @@ -1089,9 +1089,8 @@ bool OpaqueStorageAllocation::findProjectionIntoUseImpl( return false; } -bool OpaqueStorageAllocation:: -checkStorageDominates(AllocStackInst *allocInst, - ArrayRef incomingValues) { +bool OpaqueStorageAllocation::checkStorageDominates( + AllocStackInst *allocInst, ArrayRef incomingValues) { for (SILValue incomingValue : incomingValues) { if (auto *defInst = incomingValue->getDefiningInstruction()) { @@ -1102,8 +1101,8 @@ checkStorageDominates(AllocStackInst *allocInst, // Handle both phis and terminator results. auto *bbArg = cast(incomingValue); // The storage block must strictly dominate the phi. - if (!pass.domInfo->properlyDominates( - allocInst->getParent(), bbArg->getParent())) { + if (!pass.domInfo->properlyDominates(allocInst->getParent(), + bbArg->getParent())) { return false; } } @@ -1157,9 +1156,8 @@ void OpaqueStorageAllocation::removeAllocation(SILValue value) { // Any value that may be used by a return instruction must be deallocated // immediately before the return. This allows the return to be rewritten by // loading from storage. -AllocStackInst *OpaqueStorageAllocation:: -createStackAllocation(SILValue value) { - assert(value.getOwnershipKind() != OwnershipKind::Guaranteed && +AllocStackInst *OpaqueStorageAllocation::createStackAllocation(SILValue value) { + assert(value.getOwnershipKind() != OwnershipKind::Guaranteed && "creating storage for a guaranteed value implies a copy"); // Instructions that produce an opened type never reach here because they @@ -1354,7 +1352,7 @@ SILValue AddressMaterialization::recursivelyMaterializeStorage( SILValue useVal = useStorage.value; if (auto *defInst = useVal->getDefiningInstruction()) { Operand *useOper = - &defInst->getAllOperands()[storage.projectedOperandNum]; + &defInst->getAllOperands()[storage.projectedOperandNum]; return recordAddress( materializeProjectionIntoUse(useOper, intoPhiOperand)); } @@ -1368,8 +1366,8 @@ SILValue AddressMaterialization::recursivelyMaterializeStorage( pass.valueStorageMap.getProjectedStorage(storage).storage, /*intoPhiOperand*/ true)); } - assert(!storage.isProjection() - && "a composing user may not also be a def projection"); + assert(!storage.isProjection() && + "a composing user may not also be a def projection"); return storage.storageAddress; } @@ -1468,9 +1466,9 @@ AddressMaterialization::materializeProjectionIntoUse(Operand *operand, auto canTy = initExistentialValue->getFormalConcreteType(); auto opaque = Lowering::AbstractionPattern::getOpaque(); auto &concreteTL = pass.function->getTypeLowering(opaque, canTy); - return B.createInitExistentialAddr( - pass.genLoc(), containerAddr, canTy, - concreteTL.getLoweredType(), initExistentialValue->getConformances()); + return B.createInitExistentialAddr(pass.genLoc(), containerAddr, canTy, + concreteTL.getLoweredType(), + initExistentialValue->getConformances()); } case SILInstructionKind::StructInst: { auto *structInst = cast(user); @@ -1480,8 +1478,8 @@ AddressMaterialization::materializeProjectionIntoUse(Operand *operand, SILValue structAddr = materializeComposingUser(structInst, intoPhiOperand); return B.createStructElementAddr( - pass.genLoc(), structAddr, *fieldIter, - operand->get()->getType().getAddressType()); + pass.genLoc(), structAddr, *fieldIter, + operand->get()->getType().getAddressType()); } case SILInstructionKind::TupleInst: { auto *tupleInst = cast(user); @@ -1584,8 +1582,8 @@ void PhiRewriter::materializeOperand(PhiOperand phiOper) { auto &operStorage = pass.valueStorageMap.getStorage(phiOper.getOperand()->get()); if (operStorage.isPhiProjection()) { - if (operStorage.projectedStorageID - == pass.valueStorageMap.getOrdinal(phiOper.getValue())) { + if (operStorage.projectedStorageID == + pass.valueStorageMap.getOrdinal(phiOper.getValue())) { // This operand was coalesced with this particular phi. No move needed. return; } @@ -1646,8 +1644,8 @@ PhiRewriter::MovePosition PhiRewriter::findPhiMovePosition(PhiOperand phiOper) { if (!phiMove || !phiMoves.contains(phiMove)) break; - if (!foundEarliestInsertPoint - && getAccessBase(phiMove->getSrc()) == phiBaseAddress) { + if (!foundEarliestInsertPoint && + getAccessBase(phiMove->getSrc()) == phiBaseAddress) { // Anti-dependence from the phi move to the phi value. Do not move into // the phi storage before this point. foundEarliestInsertPoint = true; @@ -1698,8 +1696,8 @@ bool CallArgRewriter::rewriteArguments() { bool changed = false; auto origConv = apply.getSubstCalleeConv(); - assert(apply.getNumArguments() == origConv.getNumParameters() - && "results should not yet be rewritten"); + assert(apply.getNumArguments() == origConv.getNumParameters() && + "results should not yet be rewritten"); for (unsigned argIdx = apply.getCalleeArgIndexOfFirstAppliedArg(), endArgIdx = argIdx + apply.getNumArguments(); @@ -1968,8 +1966,8 @@ void ApplyRewriter::makeIndirectArgs(MutableArrayRef newCallArgs) { loweredCalleeConv.getSILArgIndexOfFirstIndirectResult(); auto visitCallResult = [&](SILValue result, SILResultInfo resultInfo) { - assert(!opaqueCalleeConv.isSILIndirect(resultInfo) - && "canonical call results are always direct"); + assert(!opaqueCalleeConv.isSILIndirect(resultInfo) && + "canonical call results are always direct"); if (loweredCalleeConv.isSILIndirect(resultInfo)) { SILValue indirectResultAddr = materializeIndirectResultAddress( @@ -2048,8 +2046,8 @@ void ApplyRewriter::rewriteApply(ArrayRef newCallArgs) { auto *oldCall = cast(apply.getInstruction()); auto *newCall = argBuilder.createApply( - callLoc, apply.getCallee(), apply.getSubstitutionMap(), newCallArgs, - oldCall->getApplyOptions(), oldCall->getSpecializationInfo()); + callLoc, apply.getCallee(), apply.getSubstitutionMap(), newCallArgs, + oldCall->getApplyOptions(), oldCall->getSpecializationInfo()); this->apply = FullApplySite(newCall); @@ -2134,9 +2132,8 @@ void ApplyRewriter::rewriteTryApply(ArrayRef newCallArgs) { auto replaceTermResult = [&](SILValue newResultVal) { SILType resultTy = loweredCalleeConv.getSILResultType(typeCtx); - auto ownership = resultTy.isTrivial(*pass.function) - ? OwnershipKind::None - : OwnershipKind::Owned; + auto ownership = resultTy.isTrivial(*pass.function) ? OwnershipKind::None + : OwnershipKind::Owned; resultArg->replaceAllUsesWith(newResultVal); assert(resultArg->getIndex() == 0); @@ -2209,8 +2206,8 @@ void ApplyRewriter::replaceDirectResults(DestructureTupleInst *oldDestructure) { unsigned newDirectResultIdx = 0; auto visitOldCallResult = [&](SILValue result, SILResultInfo resultInfo) { - assert(!opaqueCalleeConv.isSILIndirect(resultInfo) - && "canonical call results are always direct"); + assert(!opaqueCalleeConv.isSILIndirect(resultInfo) && + "canonical call results are always direct"); if (loweredCalleeConv.isSILIndirect(resultInfo)) { if (result->getType().isAddressOnly(*pass.function)) { @@ -2283,8 +2280,8 @@ void ReturnRewriter::rewriteReturn(ReturnInst *returnInst) { // Find the point before allocated storage has been deallocated. auto insertPt = SILBasicBlock::iterator(returnInst); - for (auto bbStart = returnInst->getParent()->begin(); - insertPt != bbStart; --insertPt) { + for (auto bbStart = returnInst->getParent()->begin(); insertPt != bbStart; + --insertPt) { if (!isa(*std::prev(insertPt))) break; } @@ -2308,23 +2305,22 @@ void ReturnRewriter::rewriteReturn(ReturnInst *returnInst) { pass.loweredFnConv.getSILArgIndexOfFirstIndirectResult(); // Initialize the indirect result arguments and populate newDirectResults. - for_each( - pass.function->getLoweredFunctionType()->getResults(), oldResults, - [&](SILResultInfo resultInfo, SILValue oldResult) { - // Assume that all original results are direct in SIL. - assert(!opaqueFnConv.isSILIndirect(resultInfo)); - if (!pass.loweredFnConv.isSILIndirect(resultInfo)) { - newDirectResults.push_back(oldResult); - return; - } - SILArgument *newResultArg = - pass.function->getArgument(newResultArgIdx); - rewriteElement(oldResult, newResultArg, returnBuilder); - ++newResultArgIdx; - }); - - assert(newDirectResults.size() - == pass.loweredFnConv.getNumDirectSILResults()); + for_each(pass.function->getLoweredFunctionType()->getResults(), oldResults, + [&](SILResultInfo resultInfo, SILValue oldResult) { + // Assume that all original results are direct in SIL. + assert(!opaqueFnConv.isSILIndirect(resultInfo)); + if (!pass.loweredFnConv.isSILIndirect(resultInfo)) { + newDirectResults.push_back(oldResult); + return; + } + SILArgument *newResultArg = + pass.function->getArgument(newResultArgIdx); + rewriteElement(oldResult, newResultArg, returnBuilder); + ++newResultArgIdx; + }); + + assert(newDirectResults.size() == + pass.loweredFnConv.getNumDirectSILResults()); assert(newResultArgIdx == pass.loweredFnConv.getSILArgIndexOfFirstParam()); // Generate a new return_inst for the new direct results. @@ -2335,9 +2331,9 @@ void ReturnRewriter::rewriteReturn(ReturnInst *returnInst) { } else if (newDirectResults.size() == 1) { newReturnVal = newDirectResults[0]; } else { - newReturnVal = returnBuilder.createTuple(pass.genLoc(), - pass.loweredFnConv.getSILResultType(typeCtx), - newDirectResults); + newReturnVal = returnBuilder.createTuple( + pass.genLoc(), pass.loweredFnConv.getSILResultType(typeCtx), + newDirectResults); } // Rewrite the returned value. SILValue origFullResult = returnInst->getOperand(); From 757a9d2ed3e8fbc780e2210a532be2647fe1b441 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Sun, 20 Mar 2022 00:08:03 -0700 Subject: [PATCH 28/29] [SIL-opaque] Add address lowering test case. For borrowing a projection. --- test/SILOptimizer/address_lowering.sil | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/test/SILOptimizer/address_lowering.sil b/test/SILOptimizer/address_lowering.sil index d68d308454226..91e15e3635884 100644 --- a/test/SILOptimizer/address_lowering.sil +++ b/test/SILOptimizer/address_lowering.sil @@ -807,6 +807,28 @@ bb0(%0 : @owned $(AnyObject, T)): return %tuple : $(AnyObject, T) } +// CHECK-LABEL: sil [ossa] @f126_testDestructureAndBorrow : $@convention(method) (@in (SI, I)) -> (@out Element, @out I) { +// CHECK: bb0(%0 : $*Element, %1 : $*I, %2 : $*(SI, I)): +// CHECK: [[SI:%.*]] = tuple_element_addr %2 : $*(SI, I), 0 +// CHECK: [[I:%.*]] = tuple_element_addr %2 : $*(SI, I), 1 +// CHECK: [[LD:%.*]] = load [trivial] [[I]] : $*I +// CHECK: [[E:%.*]] = struct_element_addr [[SI]] : $*SI, #SI.element +// CHECK: copy_addr [[E]] to [initialization] %0 : $*Element +// CHECK: destroy_addr [[SI]] : $*SI +// CHECK: store [[LD]] to [trivial] %1 : $*I +// CHECK-LABEL: } // end sil function 'f126_testDestructureAndBorrow' +sil [ossa] @f126_testDestructureAndBorrow : $@convention(method) (@in (SI, I)) -> (@out Element, @out I) { +bb0(%0 : @owned $(SI, I)): + (%si, %i) = destructure_tuple %0 : $(SI, I) + %borrow = begin_borrow %si : $SI + %element = struct_extract %borrow : $SI, #SI.element + %copy = copy_value %element : $Element + end_borrow %borrow : $SI + destroy_value %si : $SI + %tuple = tuple(%copy : $Element, %i : $I) + return %tuple : $(Element, I) +} + // CHECK-LABEL: sil [ossa] @f130_testReleaseValue : $@convention(thin) (@in T) -> () { // CHECK: bb0(%0 : $*T): // CHECK: destroy_addr %0 : $*T From 3ddce99c1912ef07a4def1919f50bc70481769a4 Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Mon, 21 Mar 2022 16:01:46 -0700 Subject: [PATCH 29/29] [SIL-opaque] Don't override arg value category. Previously, when emitting block arguments, the value category of the SILType was overridden to be address for indirect arguments. With opaque types, that distinction is made later during AddressLowering. So only do that when opaque types are disabled. --- lib/SILGen/SILGenProlog.cpp | 5 +++-- test/SILGen/opaque_values_silgen.swift | 17 +++++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/lib/SILGen/SILGenProlog.cpp b/lib/SILGen/SILGenProlog.cpp index 6c2de16c6ad77..83eeb79cf0f42 100644 --- a/lib/SILGen/SILGenProlog.cpp +++ b/lib/SILGen/SILGenProlog.cpp @@ -78,8 +78,9 @@ class EmitBBArguments : public CanTypeVisitor : Seq { return EnumIter(_base: _base.makeIterator()) } } + +extension Collection { + func transformEachElement(_ cl: (Element) -> U) -> [U] { + return map(cl) + } +} + +extension Array where Element == Int { + // CHECK-LABEL: sil private [ossa] @$sSa20opaque_values_silgenSiRszlE20incrementEachElementSaySiGyFS2iXEfU_ : {{.*}} { + // CHECK: {{bb[0-9]+}}({{%[^,]+}} : $Int): + // CHECK-LABEL: } // end sil function '$sSa20opaque_values_silgenSiRszlE20incrementEachElementSaySiGyFS2iXEfU_' + func incrementEachElement() -> [Int] { + return transformEachElement { element in + return element + 1 + } + } +}