diff --git a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/CMakeLists.txt b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/CMakeLists.txt index 317e208d7145b..0c38352a19759 100644 --- a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/CMakeLists.txt +++ b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/CMakeLists.txt @@ -12,6 +12,7 @@ swift_compiler_sources(Optimizer AsyncDemotion.swift BooleanLiteralFolding.swift CleanupDebugSteps.swift + ClosureSpecialization.swift ComputeEscapeEffects.swift ComputeSideEffects.swift DeadStoreElimination.swift diff --git a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/ClosureSpecialization.swift b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/ClosureSpecialization.swift new file mode 100644 index 0000000000000..370c03d9d0530 --- /dev/null +++ b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/ClosureSpecialization.swift @@ -0,0 +1,720 @@ +//===--- ClosureSpecialization.swift ---------------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===-----------------------------------------------------------------------===// + +/// This file contains the closure-specialization optimizations for general and differentiable Swift. + +/// General Closure Specialization +/// ------------------------------------ +/// TODO: Add description when the functionality is added. + +/// AutoDiff Closure Specialization +/// ------------------------------- +/// This optimization performs closure specialization tailored for the patterns seen in Swift Autodiff. In principle, +/// the optimization does the same thing as the existing closure specialization pass. However, it is tailored to the +/// patterns of Swift Autodiff. +/// +/// The compiler performs reverse-mode differentiation on functions marked with `@differentiable(reverse)`. In doing so, +/// it generates corresponding VJP and Pullback functions, which perform the forward and reverse pass respectively. You +/// can think of VJPs as functions that "differentiate" an original function and Pullbacks as the calculated +/// "derivative" of the original function. +/// +/// VJPs always return a tuple of 2 values -- the original result and the Pullback. Pullbacks are essentially a chain +/// of closures, where the closure-contexts are implicitly used as the so-called "tape" during the reverse +/// differentiation process. It is this chain of closures contained within the Pullbacks that this optimization aims +/// to optimize via closure specialization. +/// +/// The code patterns that this optimization targets, look similar to the one below: +/// ``` swift +/// +/// // Since `foo` is marked with the `differentiable(reverse)` attribute the compiler +/// // will generate corresponding VJP and Pullback functions in SIL. Let's assume that +/// // these functions are called `vjp_foo` and `pb_foo` respectively. +/// @differentiable(reverse) +/// func foo(_ x: Float) -> Float { +/// return sin(x) +/// } +/// +/// //============== Before closure specialization ==============// +/// // VJP of `foo`. Returns the original result and the Pullback of `foo`. +/// sil @vjp_foo: $(Float) -> (originalResult: Float, pullback: (Float) -> Float) { +/// bb0(%0: $Float): +/// // __Inlined__ `vjp_sin`: It is important for all intermediate VJPs to have +/// // been inlined in `vjp_foo`, otherwise `vjp_foo` will not be able to determine +/// // that `pb_foo` is closing over other closures and no specialization will happen. +/// \ +/// %originalResult = apply @sin(%0): $(Float) -> Float \__ Inlined `vjp_sin` +/// %partially_applied_pb_sin = partial_apply pb_sin(%0): $(Float) -> Float / +/// / +/// +/// %pb_foo = function_ref @pb_foo: $@convention(thin) (Float, (Float) -> Float) -> Float +/// %partially_applied_pb_foo = partial_apply %pb_foo(%partially_applied_pb_sin): $(Float, (Float) -> Float) -> Float +/// +/// return (%originalResult, %partially_applied_pb_foo) +/// } +/// +/// // Pullback of `foo`. +/// // +/// // It receives what are called as intermediate closures that represent +/// // the calculations that the Pullback needs to perform to calculate a function's +/// // derivative. +/// // +/// // The intermediate closures may themselves contain intermediate closures and +/// // that is why the Pullback for a function differentiated at the "top" level +/// // may end up being a "chain" of closures. +/// sil @pb_foo: $(Float, (Float) -> Float) -> Float { +/// bb0(%0: $Float, %pb_sin: $(Float) -> Float): +/// %derivative_of_sin = apply %pb_sin(%0): $(Float) -> Float +/// return %derivative_of_sin: Float +/// } +/// +/// //============== After closure specialization ==============// +/// sil @vjp_foo: $(Float) -> (originalResult: Float, pullback: (Float) -> Float) { +/// bb0(%0: $Float): +/// %originalResult = apply @sin(%0): $(Float) -> Float +/// +/// // Before the optimization, pullback of `foo` used to take a closure for computing +/// // pullback of `sin`. Now, the specialized pullback of `foo` takes the arguments that +/// // pullback of `sin` used to close over and pullback of `sin` is instead copied over +/// // inside pullback of `foo`. +/// %specialized_pb_foo = function_ref @specialized_pb_foo: $@convention(thin) (Float, Float) -> Float +/// %partially_applied_pb_foo = partial_apply %specialized_pb_foo(%0): $(Float, Float) -> Float +/// +/// return (%originalResult, %partially_applied_pb_foo) +/// } +/// +/// sil @specialized_pb_foo: $(Float, Float) -> Float { +/// bb0(%0: $Float, %1: $Float): +/// %2 = partial_apply @pb_sin(%1): $(Float) -> Float +/// %3 = apply %2(): $() -> Float +/// return %3: $Float +/// } +/// ``` + +import SIL + +private let verbose = false + +private func log(_ message: @autoclosure () -> String) { + if verbose { + print("### \(message())") + } +} + +// =========== Entry point =========== // +let generalClosureSpecialization = FunctionPass(name: "experimental-swift-based-closure-specialization") { + (function: Function, context: FunctionPassContext) in + // TODO: Implement general closure specialization optimization + print("NOT IMPLEMENTED") +} + +let autodiffClosureSpecialization = FunctionPass(name: "autodiff-closure-specialization") { + (function: Function, context: FunctionPassContext) in + // TODO: Pass is a WIP and current implementation is incomplete + if !function.isAutodiffVJP { + return + } + + print("Specializing closures in function: \(function.name)") + print("===============================================") + var callSites = gatherCallSites(in: function, context) + + callSites.forEach { callSite in + print("PartialApply call site: \(callSite.applySite)") + print("Passed in closures: ") + for index in callSite.closureArgDescriptors.indices { + var closureArgDescriptor = callSite.closureArgDescriptors[index] + print("\(index+1). \(closureArgDescriptor.closureInfo.closure)") + } + } + print("\n") +} + +// =========== Top-level functions ========== // + +private let specializationLevelLimit = 2 + +private func gatherCallSites(in caller: Function, _ context: FunctionPassContext) -> [CallSite] { + /// __Root__ closures created via `partial_apply` or `thin_to_thick_function` may be converted and reabstracted + /// before finally being used at an apply site. We do not want to handle these intermediate closures separately + /// as they are handled and cloned into the specialized function as part of the root closures. Therefore, we keep + /// track of these intermediate closures in a set. + /// + /// This set is populated via the `markConvertedAndReabstractedClosuresAsUsed` function which is called when we're + /// handling the different uses of our root closures. + /// + /// Below SIL example illustrates the above point. + /// ``` + /// // The below set of a "root" closure and its reabstractions/conversions + /// // will be handled as a unit and the entire set will be copied over + /// // in the specialized version of `takesClosure` if we determine that we + /// // can specialize `takesClosure` against its closure argument. + /// __ + /// %someFunction = function_ref @someFunction: $@convention(thin) (Int, Int) -> Int \ + /// %rootClosure = partial_apply [callee_guaranteed] %someFunction (%someInt): $(Int, Int) -> Int \ + /// %thunk = function_ref @reabstractionThunk : $@convention(thin) (@callee_guaranteed (Int) -> Int) -> @out Int / + /// %reabstractedClosure = partial_apply [callee_guaranteed] %thunk(%rootClosure) : / + /// $@convention(thin) (@callee_guaranteed (Int) -> Int) -> @out Int __/ + /// + /// %takesClosure = function_ref @takesClosure : $@convention(thin) (@owned @callee_guaranteed (Int) -> @out Int) -> Int + /// %result = partial_apply %takesClosure(%reabstractedClosure) : $@convention(thin) (@owned @callee_guaranteed () -> @out Int) -> Int + /// ret %result + /// ``` + var convertedAndReabstractedClosures = InstructionSet(context) + + defer { + convertedAndReabstractedClosures.deinitialize() + } + + var callSiteMap = CallSiteMap() + + for inst in caller.instructions { + if !convertedAndReabstractedClosures.contains(inst), + let rootClosure = inst.asSupportedClosure + { + updateCallSites(for: rootClosure, in: &callSiteMap, + convertedAndReabstractedClosures: &convertedAndReabstractedClosures, context) + } + } + + return callSiteMap.callSites +} + +// ===================== Utility functions and extensions ===================== // + +private func updateCallSites(for rootClosure: SingleValueInstruction, in callSiteMap: inout CallSiteMap, + convertedAndReabstractedClosures: inout InstructionSet, _ context: FunctionPassContext) { + var rootClosurePossibleLifeRange = InstructionRange(begin: rootClosure, context) + defer { + rootClosurePossibleLifeRange.deinitialize() + } + + var rootClosureApplies = OperandWorklist(context) + defer { + rootClosureApplies.deinitialize() + } + + // A "root" closure undergoing conversions and/or reabstractions has additional restrictions placed upon it, in order + // for a call site to be specialized against it. We handle conversion/reabstraction uses before we handle apply uses + // to gather the parameters required to evaluate these restrictions or to skip call site uses of "unsupported" + // closures altogether. + // + // There are currently 2 restrictions that are evaluated prior to specializing a callsite against a converted and/or + // reabstracted closure - + // 1. A reabstracted root closure can only be specialized against, if the reabstracted closure is ultimately passed + // trivially (as a noescape+thick function) into the call site. + // + // 2. A root closure may be a partial_apply [stack], in which case we need to make sure that all mark_dependence + // bases for it will be available in the specialized callee in case the call site is specialized against this root + // closure. + + let (foundUnexpectedUse, haveUsedReabstraction) = + handleNonApplies(for: rootClosure, rootClosureApplies: &rootClosureApplies, + rootClosurePossibleLifeRange: &rootClosurePossibleLifeRange, context); + + + if foundUnexpectedUse { + return + } + + let intermediateClosureArgDescriptorData = + handleApplies(for: rootClosure, callSiteMap: &callSiteMap, rootClosureApplies: &rootClosureApplies, + rootClosurePossibleLifeRange: &rootClosurePossibleLifeRange, + convertedAndReabstractedClosures: &convertedAndReabstractedClosures, + haveUsedReabstraction: haveUsedReabstraction, context) + + finalizeCallSites(for: rootClosure, in: &callSiteMap, + rootClosurePossibleLifeRange: rootClosurePossibleLifeRange, + intermediateClosureArgDescriptorData: intermediateClosureArgDescriptorData, context) +} + +/// Handles all non-apply direct and transitive uses of `rootClosure`. +/// +/// Returns: +/// haveUsedReabstraction - whether the root closure is reabstracted via a thunk +/// foundUnexpectedUse - whether the root closure is directly or transitively used in an instruction that we don't know +/// how to handle. If true, then `rootClosure` should not be specialized against. +private func handleNonApplies(for rootClosure: SingleValueInstruction, + rootClosureApplies: inout OperandWorklist, + rootClosurePossibleLifeRange: inout InstructionRange, + _ context: FunctionPassContext) + -> (foundUnexpectedUse: Bool, haveUsedReabstraction: Bool) +{ + var foundUnexpectedUse = false + var haveUsedReabstraction = false + + /// The root closure or an intermediate closure created by reabstracting the root closure may be a `partial_apply + /// [stack]` and we need to make sure that all `mark_dependence` bases for this `onStack` closure will be available in + /// the specialized callee, in case the call site is specialized against this root closure. + /// + /// `possibleMarkDependenceBases` keeps track of all potential values that may be used as bases for creating + /// `mark_dependence`s for our `onStack` root/reabstracted closures. For root closures these values are non-trivial + /// closure captures (which are always available as function arguments in the specialized callee). For reabstracted + /// closures these values may be the root closure or its conversions (below is a short SIL example representing this + /// case). + /// ``` + /// %someFunction = function_ref @someFunction : $@convention(thin) (Int) -> Int + /// %rootClosure = partial_apply [callee_guaranteed] %someFunction(%someInt) : $@convention(thin) (Int) -> Int + /// %noescapeRootClosure = convert_escape_to_noescape %rootClosure : $@callee_guaranteed () -> Int to $@noescape @callee_guaranteed () -> Int + /// %thunk = function_ref @reabstractionThunk : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + /// %thunkedRootClosure = partial_apply [callee_guaranteed] [on_stack] %thunk(%noescapeRootClosure) : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + /// %dependency = mark_dependence %thunkedRootClosure : $@noescape @callee_guaranteed () -> @out Int on %noescapeClosure : $@noescape @callee_guaranteed () -> Int + /// %takesClosure = function_ref @takesClosure : $@convention(thin) (@owned @noescape @callee_guaranteed () -> @out Int) + /// %ret = apply %takesClosure(%dependency) : $@convention(thin) (@owned @noescape @callee_guaranteed () -> @out Int) + /// ``` + /// + /// Any value outside of the aforementioned values is not going to be available in the specialized callee and a + /// `mark_dependence` of the root closure on such a value means that we cannot specialize the call site against it. + var possibleMarkDependenceBases = ValueSet(context) + defer { + possibleMarkDependenceBases.deinitialize() + } + + var rootClosureConversionsAndReabstractions = OperandWorklist(context) + rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: rootClosure.uses) + defer { + rootClosureConversionsAndReabstractions.deinitialize() + } + + if let pai = rootClosure as? PartialApplyInst { + for arg in pai.arguments { + possibleMarkDependenceBases.insert(arg) + } + } + + while let use = rootClosureConversionsAndReabstractions.pop() { + switch use.instruction { + case let cfi as ConvertFunctionInst: + rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: cfi.uses) + possibleMarkDependenceBases.insert(cfi) + rootClosurePossibleLifeRange.insert(use.instruction) + + case let cvt as ConvertEscapeToNoEscapeInst: + rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: cvt.uses) + possibleMarkDependenceBases.insert(cvt) + rootClosurePossibleLifeRange.insert(use.instruction) + + case let pai as PartialApplyInst: + if !pai.isPullbackInResultOfAutodiffVJP, + pai.isPartialApplyOfReabstractionThunk, + pai.isSupportedClosure, + pai.arguments[0].type.isNoEscapeFunction, + pai.arguments[0].type.isThickFunction + { + rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: pai.uses) + possibleMarkDependenceBases.insert(pai) + rootClosurePossibleLifeRange.insert(use.instruction) + haveUsedReabstraction = true + } else { + rootClosureApplies.pushIfNotVisited(use) + } + + case let mv as MoveValueInst: + rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: mv.uses) + possibleMarkDependenceBases.insert(mv) + rootClosurePossibleLifeRange.insert(use.instruction) + + // Uses of a copy of root-closure do not count as + // uses of the root-closure + case is CopyValueInst: + rootClosurePossibleLifeRange.insert(use.instruction) + + case is DestroyValueInst: + rootClosurePossibleLifeRange.insert(use.instruction) + + case let mdi as MarkDependenceInst: + if possibleMarkDependenceBases.contains(mdi.base), + mdi.value == use.value, + mdi.value.type.isNoEscapeFunction, + mdi.value.type.isThickFunction + { + rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: mdi.uses) + rootClosurePossibleLifeRange.insert(use.instruction) + } + + default: + foundUnexpectedUse = true + log("Found unexpected direct or transitive user of root closure: \(use.instruction)") + return (foundUnexpectedUse, haveUsedReabstraction) + } + } + + return (foundUnexpectedUse, haveUsedReabstraction) +} + +private typealias IntermediateClosureArgDescriptorDatum = (applySite: SingleValueInstruction, closureArgIndex: Int, paramInfo: ParameterInfo) + +private func handleApplies(for rootClosure: SingleValueInstruction, callSiteMap: inout CallSiteMap, + rootClosureApplies: inout OperandWorklist, + rootClosurePossibleLifeRange: inout InstructionRange, + convertedAndReabstractedClosures: inout InstructionSet, haveUsedReabstraction: Bool, + _ context: FunctionPassContext) -> [IntermediateClosureArgDescriptorDatum] +{ + var intermediateClosureArgDescriptorData: [IntermediateClosureArgDescriptorDatum] = [] + + while let use = rootClosureApplies.pop() { + rootClosurePossibleLifeRange.insert(use.instruction) + + // TODO [extend to general swift]: Handle full apply sites + guard let pai = use.instruction as? PartialApplyInst else { + continue + } + + // TODO: Handling generic closures may be possible but is not yet implemented + if pai.hasSubstitutions || !pai.calleeIsDynamicFunctionRef || !pai.isPullbackInResultOfAutodiffVJP { + continue + } + + guard let callee = pai.referencedFunction else { + continue + } + + if callee.isAvailableExternally { + continue + } + + // Don't specialize non-fragile (read as non-serialized) callees if the caller is fragile; the specialized callee + // will have shared linkage, and thus cannot be referenced from the fragile caller. + let caller = rootClosure.parentFunction + if caller.isSerialized && !callee.isSerialized { + continue + } + + // If the callee uses a dynamic Self, we cannot specialize it, since the resulting specialization might no longer + // have 'self' as the last parameter. + // + // TODO: We could fix this by inserting new arguments more carefully, or changing how we model dynamic Self + // altogether. + if callee.mayBindDynamicSelf { + continue + } + + // Proceed if the closure is passed as an argument (and not called). If it is called we have nothing to do. + // + // `closureArgumentIndex` is the index of the closure in the callee's argument list. + guard let closureArgumentIndex = pai.calleeArgumentIndex(of: use) else { + continue + } + + // Ok, we know that we can perform the optimization but not whether or not the optimization is profitable. Check if + // the closure is actually called in the callee (or in a function called by the callee). + if !isClosureApplied(in: callee, closureArgIndex: closureArgumentIndex) { + continue + } + + // We currently only support copying intermediate reabstraction closures if the final closure is ultimately passed + // trivially. + let closureType = use.value.type + let isClosurePassedTrivially = closureType.isNoEscapeFunction && closureType.isThickFunction + + // Mark the converted/reabstracted closures as used. + if haveUsedReabstraction { + markConvertedAndReabstractedClosuresAsUsed(rootClosure: rootClosure, convertedAndReabstractedClosure: use.value, + convertedAndReabstractedClosures: &convertedAndReabstractedClosures) + + if !isClosurePassedTrivially { + continue + } + } + + let onlyHaveThinToThickClosure = rootClosure is ThinToThickFunctionInst && !haveUsedReabstraction + + guard let closureParamInfo = pai.operandConventions[parameter: use.index] else { + fatalError("While handling apply uses, parameter info not found for operand: \(use)!") + } + + if (closureParamInfo.convention.isGuaranteed || isClosurePassedTrivially) + && !onlyHaveThinToThickClosure + { + continue + } + + // Functions with a readnone, readonly or releasenone effect and a nontrivial context cannot be specialized. + // Inserting a release in such a function results in miscompilation after other optimizations. For now, the + // specialization is disabled. + // + // TODO: A @noescape closure should never be converted to an @owned argument regardless of the function's effect + // attribute. + if !callee.effectAllowsSpecialization && !onlyHaveThinToThickClosure { + continue + } + + // Avoid an infinite specialization loop caused by repeated runs of ClosureSpecializer and CapturePropagation. + // CapturePropagation propagates constant function-literals. Such function specializations can then be optimized + // again by the ClosureSpecializer and so on. This happens if a closure argument is called _and_ referenced in + // another closure, which is passed to a recursive call. E.g. + // + // func foo(_ c: @escaping () -> ()) { + // c() foo({ c() }) + // } + // + // A limit of 2 is good enough and will not be exceed in "regular" optimization scenarios. + let closureCallee = rootClosure is PartialApplyInst + ? (rootClosure as! PartialApplyInst).referencedFunction! + : (rootClosure as! ThinToThickFunctionInst).referencedFunction! + + if closureCallee.specializationLevel > specializationLevelLimit { + continue + } + + if callSiteMap[pai] == nil { + callSiteMap.insert(key: pai, value: CallSite(applySite: pai)) + } + + intermediateClosureArgDescriptorData + .append((applySite: pai, closureArgIndex: closureArgumentIndex, paramInfo: closureParamInfo)) + } + + return intermediateClosureArgDescriptorData +} + +/// Finalizes the call sites for a given root closure by adding a corresponding `ClosureArgDescriptor` +/// to all call sites where the closure is ultimately passed as an argument. +private func finalizeCallSites(for rootClosure: SingleValueInstruction, in callSiteMap: inout CallSiteMap, + rootClosurePossibleLifeRange: InstructionRange, + intermediateClosureArgDescriptorData: [IntermediateClosureArgDescriptorDatum], + _ context: FunctionPassContext) +{ + let closureInfo = ClosureInfo(closure: rootClosure, lifetimeFrontier: Array(rootClosurePossibleLifeRange.ends)) + + for (applySite, closureArgumentIndex, parameterInfo) in intermediateClosureArgDescriptorData { + guard var callSite = callSiteMap[applySite] else { + fatalError("While finalizing call sites, call site descriptor not found for call site: \(applySite)!") + } + let closureArgDesc = ClosureArgDescriptor(closureInfo: closureInfo, closureArgumentIndex: closureArgumentIndex, + parameterInfo: parameterInfo) + callSite.appendClosureArgDescriptor(closureArgDesc) + callSiteMap.update(key: applySite, value: callSite) + } +} + +private func isClosureApplied(in callee: Function, closureArgIndex index: Int) -> Bool { + func inner(_ callee: Function, _ index: Int, _ handledFuncs: inout Set) -> Bool { + let closureArg = callee.argument(at: index) + + for use in closureArg.uses { + if let fai = use.instruction as? FullApplySite { + if fai.callee == closureArg { + return true + } + + if let faiCallee = fai.referencedFunction, + !faiCallee.blocks.isEmpty, + handledFuncs.insert(faiCallee).inserted, + handledFuncs.count <= recursionBudget + { + if inner(faiCallee, fai.calleeArgumentIndex(of: use)!, &handledFuncs) { + return true + } + } + } + } + + return false + } + + // Limit the number of recursive calls to not go into exponential behavior in corner cases. + let recursionBudget = 8 + var handledFuncs: Set = [] + return inner(callee, index, &handledFuncs) +} + +/// Marks any converted/reabstracted closures, corresponding to a given root closure as used. We do not want to +/// look at such closures separately as during function specialization they will be handled as part of the root closure. +private func markConvertedAndReabstractedClosuresAsUsed(rootClosure: Value, convertedAndReabstractedClosure: Value, + convertedAndReabstractedClosures: inout InstructionSet) +{ + if convertedAndReabstractedClosure != rootClosure { + switch convertedAndReabstractedClosure { + case let pai as PartialApplyInst: + convertedAndReabstractedClosures.insert(pai) + return + markConvertedAndReabstractedClosuresAsUsed(rootClosure: rootClosure, + convertedAndReabstractedClosure: pai.arguments[0], + convertedAndReabstractedClosures: &convertedAndReabstractedClosures) + case let cvt as ConvertFunctionInst: + convertedAndReabstractedClosures.insert(cvt) + return + markConvertedAndReabstractedClosuresAsUsed(rootClosure: rootClosure, + convertedAndReabstractedClosure: cvt.fromFunction, + convertedAndReabstractedClosures: &convertedAndReabstractedClosures) + case let cvt as ConvertEscapeToNoEscapeInst: + convertedAndReabstractedClosures.insert(cvt) + return + markConvertedAndReabstractedClosuresAsUsed(rootClosure: rootClosure, + convertedAndReabstractedClosure: cvt.fromFunction, + convertedAndReabstractedClosures: &convertedAndReabstractedClosures) + case let mdi as MarkDependenceInst: + convertedAndReabstractedClosures.insert(mdi) + return + markConvertedAndReabstractedClosuresAsUsed(rootClosure: rootClosure, convertedAndReabstractedClosure: mdi.value, + convertedAndReabstractedClosures: &convertedAndReabstractedClosures) + default: + fatalError("While marking converted/reabstracted closures as used, found unexpected instruction: \(convertedAndReabstractedClosure)") + } + } +} + +private extension PartialApplyInst { + /// True, if the closure obtained from this partial_apply is the + /// pullback returned from an autodiff VJP + var isPullbackInResultOfAutodiffVJP: Bool { + if self.parentFunction.isAutodiffVJP, + let use = self.uses.singleUse, + let tupleInst = use.instruction as? TupleInst, + let returnInst = self.parentFunction.returnInstruction, + tupleInst == returnInst.returnedValue + { + return true + } + + return false + } + + var hasOnlyInoutIndirectArguments: Bool { + self.argumentOperands + .filter { !$0.value.type.isObject } + .allSatisfy { self.convention(of: $0)!.isInout } + } +} + +private extension Instruction { + var asSupportedClosure: SingleValueInstruction? { + switch self { + case let tttf as ThinToThickFunctionInst where tttf.callee is FunctionRefInst: + return tttf + // TODO: figure out what to do with non-inout indirect arguments + // https://forums.swift.org/t/non-inout-indirect-types-not-supported-in-closure-specialization-optimization/70826 + case let pai as PartialApplyInst where pai.callee is FunctionRefInst && pai.hasOnlyInoutIndirectArguments: + return pai + default: + return nil + } + } + + var isSupportedClosure: Bool { + asSupportedClosure != nil + } +} + +private extension ApplySite { + var calleeIsDynamicFunctionRef: Bool { + return !(callee is DynamicFunctionRefInst || callee is PreviousDynamicFunctionRefInst) + } +} + +private extension Function { + var effectAllowsSpecialization: Bool { + switch self.effectAttribute { + case .readNone, .readOnly, .releaseNone: return false + default: return true + } + } +} + +// ===================== Utility Types ===================== // + +private struct OrderedDict { + private var valueIndexDict: [Key: Int] = [:] + private var entryList: [(Key, Value)] = [] + + public subscript(key: Key) -> Value? { + if let index = valueIndexDict[key] { + return entryList[index].1 + } + return nil + } + + public mutating func insert(key: Key, value: Value) { + if valueIndexDict[key] == nil { + valueIndexDict[key] = entryList.count + entryList.append((key, value)) + } + } + + public mutating func update(key: Key, value: Value) { + if let index = valueIndexDict[key] { + entryList[index].1 = value + } + } + + public var keys: LazyMapSequence, Key> { + entryList.lazy.map { $0.0 } + } + + public var values: LazyMapSequence, Value> { + entryList.lazy.map { $0.1 } + } +} + +private typealias CallSiteMap = OrderedDict + +private extension CallSiteMap { + var callSites: [CallSite] { + Array(self.values) + } +} + + +/// Represents all the information required to represent a closure in isolation, i.e., outside of a callsite context +/// where the closure may be getting passed as an argument. +/// +/// Composed with other information inside a `ClosureArgDescriptor` to represent a closure as an argument at a callsite. +private struct ClosureInfo { + let closure: SingleValueInstruction + let lifetimeFrontier: [Instruction] + + init(closure: SingleValueInstruction, lifetimeFrontier: [Instruction]) { + self.closure = closure + self.lifetimeFrontier = lifetimeFrontier + } + +} + +/// Represents a closure as an argument at a callsite. +private struct ClosureArgDescriptor { + let closureInfo: ClosureInfo + /// The index of the closure in the callsite's argument list. + let closureArgumentIndex: Int + let parameterInfo: ParameterInfo +} + +/// Represents a callsite containing one or more closure arguments. +private struct CallSite { + let applySite: ApplySite + var closureArgDescriptors: [ClosureArgDescriptor] = [] + + public init(applySite: ApplySite) { + self.applySite = applySite + } + + public mutating func appendClosureArgDescriptor(_ descriptor: ClosureArgDescriptor) { + self.closureArgDescriptors.append(descriptor) + } +} + +// ===================== Unit tests ===================== // + +let gatherCallSitesTest = FunctionTest("closure_specialize_gather_call_sites") { function, arguments, context in + print("Specializing closures in function: \(function.name)") + print("===============================================") + var callSites = gatherCallSites(in: function, context) + + callSites.forEach { callSite in + print("PartialApply call site: \(callSite.applySite)") + print("Passed in closures: ") + for index in callSite.closureArgDescriptors.indices { + var closureArgDescriptor = callSite.closureArgDescriptors[index] + print("\(index+1). \(closureArgDescriptor.closureInfo.closure)") + } + } + print("\n") +} diff --git a/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift b/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift index 4acc6782bd28a..dbd483d136a1c 100644 --- a/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift +++ b/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift @@ -93,6 +93,9 @@ private func registerSwiftPasses() { registerPass(lifetimeDependenceDiagnosticsPass, { lifetimeDependenceDiagnosticsPass.run($0) }) registerPass(lifetimeDependenceInsertionPass, { lifetimeDependenceInsertionPass.run($0) }) registerPass(lifetimeDependenceScopeFixupPass, { lifetimeDependenceScopeFixupPass.run($0) }) + registerPass(generalClosureSpecialization, { generalClosureSpecialization.run($0) }) + registerPass(autodiffClosureSpecialization, { autodiffClosureSpecialization.run($0) }) + // Instruction passes registerForSILCombine(BeginCOWMutationInst.self, { run(BeginCOWMutationInst.self, $0) }) registerForSILCombine(GlobalValueInst.self, { run(GlobalValueInst.self, $0) }) diff --git a/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift b/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift index 9ee723c123fe3..72b1b9e6e54b2 100644 --- a/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift +++ b/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift @@ -398,6 +398,23 @@ extension LoadInst { } } +extension PartialApplyInst { + var isPartialApplyOfReabstractionThunk: Bool { + // A partial_apply of a reabstraction thunk either has a single capture + // (a function) or two captures (function and dynamic Self type). + if self.numArguments == 1 || self.numArguments == 2, + let fun = self.referencedFunction, + fun.isReabstractionThunk, + self.arguments[0].type.isFunction, + self.arguments[0].type.isReferenceCounted(in: self.parentFunction) || self.callee.type.isThickFunction + { + return true + } + + return false + } +} + extension FunctionPassContext { /// Returns true if any blocks were removed. func removeDeadBlocks(in function: Function) -> Bool { @@ -540,6 +557,10 @@ extension Function { } return nil } + + var mayBindDynamicSelf: Bool { + self.bridged.mayBindDynamicSelf() + } } extension FullApplySite { diff --git a/SwiftCompilerSources/Sources/Optimizer/Utilities/Test.swift b/SwiftCompilerSources/Sources/Optimizer/Utilities/Test.swift index 187a9093017e5..4debbd801ac3f 100644 --- a/SwiftCompilerSources/Sources/Optimizer/Utilities/Test.swift +++ b/SwiftCompilerSources/Sources/Optimizer/Utilities/Test.swift @@ -164,7 +164,8 @@ public func registerOptimizerTests() { lifetimeDependenceUseTest, linearLivenessTest, parseTestSpecificationTest, - variableIntroducerTest + variableIntroducerTest, + gatherCallSitesTest ) // Finally register the thunk they all call through. diff --git a/SwiftCompilerSources/Sources/SIL/ApplySite.swift b/SwiftCompilerSources/Sources/SIL/ApplySite.swift index f53c4a178784a..c32687f84f435 100644 --- a/SwiftCompilerSources/Sources/SIL/ApplySite.swift +++ b/SwiftCompilerSources/Sources/SIL/ApplySite.swift @@ -108,6 +108,10 @@ public protocol ApplySite : Instruction { extension ApplySite { public var callee: Value { operands[ApplyOperandConventions.calleeIndex].value } + public var hasSubstitutions: Bool { + return substitutionMap.hasAnySubstitutableParams + } + public var isAsync: Bool { return callee.type.isAsyncFunction } @@ -126,7 +130,15 @@ extension ApplySite { return false } - /// Returns the subset of operands that are argument operands. + public var isCalleeNoReturn: Bool { + bridged.ApplySite_isCalleeNoReturn() + } + + public var isCalleeTrapNoReturn: Bool { + referencedFunction?.isTrapNoReturn ?? false + } + + /// Returns the subset of operands which are argument operands. /// /// This does not include the callee function operand. public var argumentOperands: OperandArray { diff --git a/SwiftCompilerSources/Sources/SIL/BasicBlock.swift b/SwiftCompilerSources/Sources/SIL/BasicBlock.swift index 234b9a9d7eb53..c62a322812c25 100644 --- a/SwiftCompilerSources/Sources/SIL/BasicBlock.swift +++ b/SwiftCompilerSources/Sources/SIL/BasicBlock.swift @@ -57,6 +57,26 @@ final public class BasicBlock : CustomStringConvertible, HasShortDescription, Ha successors.count == 1 ? successors[0] : nil } + /// All function exiting blocks except for ones with an `unreachable` terminator, + /// not immediately preceded by an apply of a no-return function. + public var isReachableExitBlock: Bool { + switch terminator { + case let termInst where termInst.isFunctionExiting: + return true + case is UnreachableInst: + if let instBeforeUnreachable = terminator.previous, + let ai = instBeforeUnreachable as? ApplyInst, + ai.isCalleeNoReturn && !ai.isCalleeTrapNoReturn + { + return true + } + + return false + default: + return false + } + } + /// The index of the basic block in its function. /// This has O(n) complexity. Only use it for debugging public var index: Int { diff --git a/SwiftCompilerSources/Sources/SIL/Function.swift b/SwiftCompilerSources/Sources/SIL/Function.swift index 4eb55fcc62f67..127a924e5deeb 100644 --- a/SwiftCompilerSources/Sources/SIL/Function.swift +++ b/SwiftCompilerSources/Sources/SIL/Function.swift @@ -31,6 +31,12 @@ final public class Function : CustomStringConvertible, HasShortDescription, Hash hasher.combine(ObjectIdentifier(self)) } + public var isTrapNoReturn: Bool { bridged.isTrapNoReturn() } + + public var isAutodiffVJP: Bool { bridged.isAutodiffVJP() } + + public var specializationLevel: Int { bridged.specializationLevel() } + public var hasOwnership: Bool { bridged.hasOwnership() } public var hasLoweredAddresses: Bool { bridged.hasLoweredAddresses() } @@ -59,6 +65,10 @@ final public class Function : CustomStringConvertible, HasShortDescription, Hash entryBlock.arguments.lazy.map { $0 as! FunctionArgument } } + public func argument(at index: Int) -> FunctionArgument { + entryBlock.arguments[index] as! FunctionArgument + } + /// All instructions of all blocks. public var instructions: LazySequence>> { blocks.lazy.flatMap { $0.instructions } @@ -83,6 +93,8 @@ final public class Function : CustomStringConvertible, HasShortDescription, Hash public var isAsync: Bool { bridged.isAsync() } + public var isReabstractionThunk: Bool { bridged.isReabstractionThunk() } + /// True if this is a `[global_init]` function. /// /// Such a function is typically a global addressor which calls the global's diff --git a/SwiftCompilerSources/Sources/SIL/Instruction.swift b/SwiftCompilerSources/Sources/SIL/Instruction.swift index 5ecb2dbb0bb12..b0ab1eb8024d0 100644 --- a/SwiftCompilerSources/Sources/SIL/Instruction.swift +++ b/SwiftCompilerSources/Sources/SIL/Instruction.swift @@ -893,7 +893,16 @@ class ConvertFunctionInst : SingleValueInstruction, UnaryInstruction { } final public -class ThinToThickFunctionInst : SingleValueInstruction, UnaryInstruction {} +class ThinToThickFunctionInst : SingleValueInstruction, UnaryInstruction { + public var callee: Value { operand.value } + + public var referencedFunction: Function? { + if let fri = callee as? FunctionRefInst { + return fri.referencedFunction + } + return nil + } +} final public class ThickToObjCMetatypeInst : SingleValueInstruction {} final public class ObjCToThickMetatypeInst : SingleValueInstruction {} diff --git a/SwiftCompilerSources/Sources/SIL/SubstitutionMap.swift b/SwiftCompilerSources/Sources/SIL/SubstitutionMap.swift index 4d62f6c081b18..e2b600b529b95 100644 --- a/SwiftCompilerSources/Sources/SIL/SubstitutionMap.swift +++ b/SwiftCompilerSources/Sources/SIL/SubstitutionMap.swift @@ -25,6 +25,8 @@ public struct SubstitutionMap { public var isEmpty: Bool { bridged.isEmpty() } + public var hasAnySubstitutableParams: Bool { bridged.hasAnySubstitutableParams() } + public var replacementTypes: OptionalTypeArray { let types = BridgedTypeArray.fromReplacementTypes(bridged) return OptionalTypeArray(bridged: types) diff --git a/SwiftCompilerSources/Sources/SIL/Type.swift b/SwiftCompilerSources/Sources/SIL/Type.swift index a9cc836de73b9..1c75b8141d1de 100644 --- a/SwiftCompilerSources/Sources/SIL/Type.swift +++ b/SwiftCompilerSources/Sources/SIL/Type.swift @@ -58,6 +58,7 @@ public struct Type : CustomStringConvertible, NoReflectionChildren { public var isMetatype: Bool { bridged.isMetatype() } public var isNoEscapeFunction: Bool { bridged.isNoEscapeFunction() } public var containsNoEscapeFunction: Bool { bridged.containsNoEscapeFunction() } + public var isThickFunction: Bool { bridged.isThickFunction() } public var isAsyncFunction: Bool { bridged.isAsyncFunction() } public var canBeClass: BridgedType.TraitResult { bridged.canBeClass() } diff --git a/include/swift/AST/Builtins.def b/include/swift/AST/Builtins.def index f1b4b6b23e076..f0369e86c9635 100644 --- a/include/swift/AST/Builtins.def +++ b/include/swift/AST/Builtins.def @@ -175,6 +175,8 @@ BUILTIN_UNARY_OPERATION(FNeg, "fneg", "n", FloatOrVector) BUILTIN_UNARY_OPERATION(AssumeNonNegative, "assumeNonNegative", "n", Integer) // It only works on i1. BUILTIN_UNARY_OPERATION(AssumeTrue, "assume", "", Integer) +// Converts poison/undef to an indeterminate but valid value. +BUILTIN_UNARY_OPERATION(Freeze, "freeze", "n", IntegerOrVector) // Binary predicates have type (T,T) -> i1 or (T, T) -> Vector for scalars // and vectors, respectively. diff --git a/include/swift/AST/Decl.h b/include/swift/AST/Decl.h index 116bbb27c6f28..b23d0b1ccaff3 100644 --- a/include/swift/AST/Decl.h +++ b/include/swift/AST/Decl.h @@ -6010,10 +6010,6 @@ enum class PropertyWrapperSynthesizedPropertyKind { class VarDecl : public AbstractStorageDecl { friend class NamingPatternRequest; NamedPattern *NamingPattern = nullptr; - /// When the variable is declared in context of a for-in loop over the elements of - /// a parameter pack, this is the opened element environment of the pack expansion - /// to use as the variable's context generic environment. - GenericEnvironment *OpenedElementEnvironment = nullptr; public: enum class Introducer : uint8_t { @@ -6155,13 +6151,6 @@ class VarDecl : public AbstractStorageDecl { NamedPattern *getNamingPattern() const; void setNamingPattern(NamedPattern *Pat); - GenericEnvironment *getOpenedElementEnvironment() const { - return OpenedElementEnvironment; - } - void setOpenedElementEnvironment(GenericEnvironment *Env) { - OpenedElementEnvironment = Env; - } - /// If this is a VarDecl that does not belong to a CaseLabelItem's pattern, /// return this. Otherwise, this VarDecl must belong to a CaseStmt's /// CaseLabelItem. In that case, return the first case label item of the first diff --git a/include/swift/AST/DiagnosticsSema.def b/include/swift/AST/DiagnosticsSema.def index 902d9b617e2d5..299c426cf34c3 100644 --- a/include/swift/AST/DiagnosticsSema.def +++ b/include/swift/AST/DiagnosticsSema.def @@ -2044,6 +2044,8 @@ ERROR(expose_move_only_to_cxx,none, "noncopyable %kind0 can not yet be represented in C++", (ValueDecl *)) ERROR(expose_nested_type_to_cxx,none, "nested %kind0 can not yet be represented in C++", (ValueDecl *)) +ERROR(expose_macro_to_cxx,none, + "Swift macro can not yet be represented in C++", (ValueDecl *)) ERROR(unexposed_other_decl_in_cxx,none, "%kind0 is not yet exposed to C++", (ValueDecl *)) ERROR(unsupported_other_decl_in_cxx,none, diff --git a/include/swift/AST/PrintOptions.h b/include/swift/AST/PrintOptions.h index b0172d171261e..35bfee499e2b2 100644 --- a/include/swift/AST/PrintOptions.h +++ b/include/swift/AST/PrintOptions.h @@ -394,6 +394,9 @@ struct PrintOptions { /// Suppress printing of '~Proto' for suppressible, non-invertible protocols. bool SuppressConformanceSuppression = false; + /// Replace BitwiseCopyable with _BitwiseCopyable. + bool SuppressBitwiseCopyable = false; + /// List of attribute kinds that should not be printed. std::vector ExcludeAttrList = { DeclAttrKind::Transparent, DeclAttrKind::Effects, diff --git a/include/swift/AST/SILOptions.h b/include/swift/AST/SILOptions.h index 3fc8e500d9e5a..6d4e064324514 100644 --- a/include/swift/AST/SILOptions.h +++ b/include/swift/AST/SILOptions.h @@ -290,6 +290,10 @@ class SILOptions { /// Are we building in embedded Swift + -no-allocations? bool NoAllocations = false; + /// Should we use the experimental Swift based closure-specialization + /// optimization pass instead of the existing C++ one. + bool EnableExperimentalSwiftBasedClosureSpecialization = false; + /// The name of the file to which the backend should save optimization /// records. std::string OptRecordFile; diff --git a/include/swift/AST/SearchPathOptions.h b/include/swift/AST/SearchPathOptions.h index 166107440504a..eb5081033ea02 100644 --- a/include/swift/AST/SearchPathOptions.h +++ b/include/swift/AST/SearchPathOptions.h @@ -372,10 +372,10 @@ class SearchPathOptions { FrameworkSearchPaths.size() - 1); } - std::optional WinSDKRoot = std::nullopt; - std::optional WinSDKVersion = std::nullopt; - std::optional VCToolsRoot = std::nullopt; - std::optional VCToolsVersion = std::nullopt; + std::optional WinSDKRoot = std::nullopt; + std::optional WinSDKVersion = std::nullopt; + std::optional VCToolsRoot = std::nullopt; + std::optional VCToolsVersion = std::nullopt; public: StringRef getSDKPath() const { return SDKPath; } diff --git a/include/swift/AST/SimpleRequest.h b/include/swift/AST/SimpleRequest.h index d59c61618079e..6a25aaef7c451 100644 --- a/include/swift/AST/SimpleRequest.h +++ b/include/swift/AST/SimpleRequest.h @@ -100,6 +100,11 @@ template()>::type> SourceLoc maybeExtractNearestSourceLoc(const T& value) { + if constexpr (std::is_pointer_v) { + if (value == nullptr) { + return SourceLoc(); + } + } return extractNearestSourceLoc(value); } diff --git a/include/swift/AST/SwiftNameTranslation.h b/include/swift/AST/SwiftNameTranslation.h index 3793b769649e4..58667c5539dc9 100644 --- a/include/swift/AST/SwiftNameTranslation.h +++ b/include/swift/AST/SwiftNameTranslation.h @@ -83,6 +83,7 @@ enum RepresentationError { UnrepresentableProtocol, UnrepresentableMoveOnly, UnrepresentableNested, + UnrepresentableMacro, }; /// Constructs a diagnostic that describes the given C++ representation error. diff --git a/include/swift/AST/Types.h b/include/swift/AST/Types.h index a2de36188520c..302a9ec1cb624 100644 --- a/include/swift/AST/Types.h +++ b/include/swift/AST/Types.h @@ -128,10 +128,9 @@ class RecursiveTypeProperties { /// This type expression contains a TypeVariableType. HasTypeVariable = 0x01, - /// This type expression contains a context-dependent archetype, either a - /// \c PrimaryArchetypeType, \c OpenedArchetypeType, - /// \c ElementArchetypeType, or \c PackArchetype. - HasArchetype = 0x02, + /// This type expression contains a PrimaryArchetypeType + /// or PackArchetypeType. + HasPrimaryArchetype = 0x02, /// This type expression contains a GenericTypeParamType. HasTypeParameter = 0x04, @@ -171,7 +170,7 @@ class RecursiveTypeProperties { /// This type contains a parameterized existential type \c any P. HasParameterizedExistential = 0x2000, - /// This type contains an ElementArchetype. + /// This type contains an ElementArchetypeType. HasElementArchetype = 0x4000, /// Whether the type is allocated in the constraint solver arena. This can @@ -205,9 +204,9 @@ class RecursiveTypeProperties { /// variable? bool hasTypeVariable() const { return Bits & HasTypeVariable; } - /// Does a type with these properties structurally contain a - /// context-dependent archetype (that is, a Primary- or OpenedArchetype)? - bool hasArchetype() const { return Bits & HasArchetype; } + /// Does a type with these properties structurally contain a primary + /// archetype? + bool hasPrimaryArchetype() const { return Bits & HasPrimaryArchetype; } /// Does a type with these properties structurally contain an /// archetype from an opaque type declaration? @@ -696,9 +695,21 @@ class alignas(1 << TypeAlignInBits) TypeBase return getRecursiveProperties().hasPlaceholder(); } - /// Determine whether the type involves a context-dependent archetype. + /// Determine whether the type involves a primary archetype. + bool hasPrimaryArchetype() const { + return getRecursiveProperties().hasPrimaryArchetype(); + } + + /// Whether the type contains a PackArchetypeType. + bool hasPackArchetype() const { + return getRecursiveProperties().hasPackArchetype(); + } + + /// Determine whether the type involves a primary, pack or local archetype. + /// + /// FIXME: Replace all remaining callers with a more precise check. bool hasArchetype() const { - return getRecursiveProperties().hasArchetype(); + return hasPrimaryArchetype() || hasLocalArchetype(); } /// Determine whether the type involves an opened existential archetype. @@ -727,11 +738,6 @@ class alignas(1 << TypeAlignInBits) TypeBase return getRecursiveProperties().hasPack(); } - /// Whether the type contains a PackArchetypeType. - bool hasPackArchetype() const { - return getRecursiveProperties().hasPackArchetype(); - } - /// Whether the type has any flavor of pack. bool hasAnyPack() const { return hasParameterPack() || hasPack() || hasPackArchetype(); diff --git a/include/swift/Basic/Features.def b/include/swift/Basic/Features.def index 79feed48fa1f9..d6e9c3945c19f 100644 --- a/include/swift/Basic/Features.def +++ b/include/swift/Basic/Features.def @@ -176,9 +176,9 @@ LANGUAGE_FEATURE(BuiltinCreateTask, 0, "Builtin.createTask and Builtin.createDis SUPPRESSIBLE_LANGUAGE_FEATURE(AssociatedTypeImplements, 0, "@_implements on associated types") LANGUAGE_FEATURE(BuiltinAddressOfRawLayout, 0, "Builtin.addressOfRawLayout") LANGUAGE_FEATURE(MoveOnlyPartialConsumption, 429, "Partial consumption of noncopyable values") -/// Enable bitwise-copyable feature. LANGUAGE_FEATURE(BitwiseCopyable, 426, "BitwiseCopyable protocol") SUPPRESSIBLE_LANGUAGE_FEATURE(ConformanceSuppression, 426, "Suppressible inferred conformances") +SUPPRESSIBLE_LANGUAGE_FEATURE(BitwiseCopyable2, 426, "BitwiseCopyable feature") SUPPRESSIBLE_LANGUAGE_FEATURE(NoncopyableGenerics, 427, "Noncopyable generics") // Swift 6 diff --git a/include/swift/Option/FrontendOptions.td b/include/swift/Option/FrontendOptions.td index d407ad29f4143..8384fb41cfef0 100644 --- a/include/swift/Option/FrontendOptions.td +++ b/include/swift/Option/FrontendOptions.td @@ -359,6 +359,10 @@ def enable_experimental_async_top_level : // HIDDEN FLAGS let Flags = [FrontendOption, NoDriverOption, HelpHidden] in { +def enable_experimental_swift_based_closure_specialization : + Flag<["-"], "experimental-swift-based-closure-specialization">, + HelpText<"Use the experimental Swift based closure-specialization optimization pass instead of the existing C++ one">; + def checked_async_objc_bridging : Joined<["-"], "checked-async-objc-bridging=">, HelpText<"Control whether checked continuations are used when bridging " "async calls from Swift to ObjC: 'on', 'off' ">; diff --git a/include/swift/RemoteInspection/BitMask.h b/include/swift/RemoteInspection/BitMask.h new file mode 100644 index 0000000000000..f64f2126996a1 --- /dev/null +++ b/include/swift/RemoteInspection/BitMask.h @@ -0,0 +1,339 @@ +//===--- Bitmask.h - Swift Bitmask type for Reflection ----*- C++ -*-===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// +// +// Used by TypeLowering logic to compute masks for in-memory representations +// +//===----------------------------------------------------------------------===// + +#ifndef SWIFT_REFLECTION_BITMASK_H +#define SWIFT_REFLECTION_BITMASK_H + +#include "swift/Remote/MemoryReader.h" +#include + +namespace swift { +namespace reflection { + +// A variable-length bitmap used to track "spare bits" for general multi-payload +// enums. Note: These are not arbitrary-sized! They are always a multiple +// of 8 bits in size, and always aligned on an 8-bit boundary. +class BitMask { + static constexpr unsigned maxSize = 128 * 1024 * 1024; // 128MB + + unsigned size; // Size of mask _in bytes_ + uint8_t *mask; +public: + ~BitMask() { + free(mask); + } +private: + // Construct a bitmask of the appropriate number of bytes + // initialized to all bits set + BitMask(unsigned sizeInBytes = 0): size(sizeInBytes) { + assert(size < maxSize && "Trying to build a too-large bitmask"); + if (size > maxSize || size == 0) { + size = 0; + mask = nullptr; + return; + } + + mask = (uint8_t *)malloc(size); + + if (!mask) { + // Malloc might fail if size is large due to some bad data. Assert in + // asserts builds, and fail gracefully in non-asserts builds by + // constructing an empty BitMask. + assert(false && "Failed to allocate BitMask"); + size = 0; + return; + } + + memset(mask, 0xff, size); + } + +public: + static BitMask zeroMask(unsigned sizeInBytes) { + auto mask = BitMask(sizeInBytes); + mask.makeZero(); + return mask; + } + + static BitMask oneMask(unsigned sizeInBytes) { + auto mask = BitMask(sizeInBytes); + return mask; + } + + BitMask(unsigned sizeInBytes, uint64_t sourceMask): size(sizeInBytes) { + mask = (uint8_t *)calloc(1, sizeInBytes); + memcpy(mask, &sourceMask, sizeInBytes); + } + + // Construct a bitmask of the appropriate number of bytes + // initialized with bits from the specified buffer + BitMask(unsigned sizeInBytes, const uint8_t *initialValue, + unsigned initialValueBytes, unsigned offset) + : size(sizeInBytes) { + // Gracefully fail by constructing an empty mask if we exceed the size + // limit. + if (size > maxSize) { + size = 0; + mask = nullptr; + return; + } + + // Bad data could cause the initial value location to be off the end of our + // size. If initialValueBytes + offset is beyond sizeInBytes (or overflows), + // assert in asserts builds, and fail gracefully in non-asserts builds by + // constructing an empty BitMask. + bool overflowed = false; + unsigned initialValueEnd = + llvm::SaturatingAdd(initialValueBytes, offset, &overflowed); + if (overflowed) { + assert(false && "initialValueBytes + offset overflowed"); + size = 0; + mask = nullptr; + return; + } + assert(initialValueEnd <= sizeInBytes); + if (initialValueEnd > size) { + assert(false && "initialValueBytes + offset is greater than size"); + size = 0; + mask = nullptr; + return; + } + + mask = (uint8_t *)calloc(1, size); + + if (!mask) { + // Malloc might fail if size is large due to some bad data. Assert in + // asserts builds, and fail gracefully in non-asserts builds by + // constructing an empty BitMask. + assert(false && "Failed to allocate BitMask"); + size = 0; + return; + } + + memcpy(mask + offset, initialValue, initialValueBytes); + } + // Move constructor moves ownership and zeros the src + BitMask(BitMask&& src) noexcept: size(src.size), mask(std::move(src.mask)) { + src.size = 0; + src.mask = nullptr; + } + // Copy constructor makes a copy of the mask storage + BitMask(const BitMask& src) noexcept: size(src.size), mask(nullptr) { + mask = (uint8_t *)malloc(size); + memcpy(mask, src.mask, size); + } + + std::string str() const { + std::ostringstream buff; + buff << size << ":0x"; + for (unsigned i = 0; i < size; i++) { + buff << std::hex << ((mask[i] >> 4) & 0x0f) << (mask[i] & 0x0f); + } + return buff.str(); + } + + bool operator==(const BitMask& rhs) const { + // The two masks may be of different sizes. + // The common prefix must be identical. + size_t common = std::min(size, rhs.size); + if (memcmp(mask, rhs.mask, common) != 0) + return false; + // The remainder of the longer mask must be + // all zero bits. + unsigned mustBeZeroSize = std::max(size, rhs.size) - common; + uint8_t *mustBeZero; + if (size < rhs.size) { + mustBeZero = rhs.mask + size; + } else if (size > rhs.size) { + mustBeZero = mask + rhs.size; + } + for (unsigned i = 0; i < mustBeZeroSize; ++i) { + if (mustBeZero[i] != 0) { + return false; + } + } + return true; + } + + bool operator!=(const BitMask& rhs) const { + return !(*this == rhs); + } + + bool isNonZero() const { return !isZero(); } + + bool isZero() const { + for (unsigned i = 0; i < size; ++i) { + if (mask[i] != 0) { + return false; + } + } + return true; + } + + void makeZero() { + memset(mask, 0, size * sizeof(mask[0])); + } + + void complement() { + for (unsigned i = 0; i < size; ++i) { + mask[i] = ~mask[i]; + } + } + + int countSetBits() const { + static const int counter[] = + {0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4}; + int bits = 0; + for (unsigned i = 0; i < size; ++i) { + bits += counter[mask[i] >> 4] + counter[mask[i] & 15]; + } + return bits; + } + + int countZeroBits() const { + return (size * 8) - countSetBits(); + } + + // Treat the provided value as a mask, `and` it with + // the part of the mask at the provided byte offset. + // Bits outside the specified area are unchanged. + template + void andMask(IntegerType value, unsigned byteOffset) { + andMask((void *)&value, sizeof(value), byteOffset); + } + + // As above, but using the provided bitmask instead + // of an integer. + void andMask(BitMask mask, unsigned offset) { + andMask(mask.mask, mask.size, offset); + } + + // As above, but using the complement of the + // provided mask. + void andNotMask(BitMask mask, unsigned offset) { + if (offset < size) { + andNotMask(mask.mask, mask.size, offset); + } + } + + // Zero all bits except for the `n` most significant ones. + void keepOnlyMostSignificantBits(unsigned n) { + if (size < 1) { + return; + } +#if defined(__BIG_ENDIAN__) + assert(false && "Big endian not supported for readMaskedInteger"); +#else + unsigned count = 0; + unsigned i = size; + while (i > 0) { + i -= 1; + if (count < n) { + for (int b = 128; b > 0; b >>= 1) { + if (count >= n) { + mask[i] &= ~b; + } else if ((mask[i] & b) != 0) { + ++count; + } + } + } else { + mask[i] = 0; + } + } +#endif + } + + void keepOnlyLeastSignificantBytes(unsigned n) { + if (size > n) { + size = n; + } + } + + unsigned numBits() const { + return size * 8; + } + + unsigned numSetBits() const { + unsigned count = 0; + for (unsigned i = 0; i < size; ++i) { + if (mask[i] != 0) { + for (unsigned b = 1; b < 256; b <<= 1) { + if ((mask[i] & b) != 0) { + ++count; + } + } + } + } + return count; + } + + // Read a mask-sized area from the target and collect + // the masked bits into a single integer. + template + bool readMaskedInteger(remote::MemoryReader &reader, + remote::RemoteAddress address, + IntegerType *dest) const { + auto data = reader.readBytes(address, size); + if (!data) { + return false; + } +#if defined(__BIG_ENDIAN__) + assert(false && "Big endian not supported for readMaskedInteger"); +#else + IntegerType result = 0; + IntegerType resultBit = 1; // Start from least-significant bit + auto bytes = static_cast(data.get()); + for (unsigned i = 0; i < size; ++i) { + for (unsigned b = 1; b < 256; b <<= 1) { + if ((mask[i] & b) != 0) { + if ((bytes[i] & b) != 0) { + result |= resultBit; + } + resultBit <<= 1; + } + } + } + *dest = result; + return true; +#endif + } + +private: + void andMask(void *maskData, unsigned len, unsigned offset) { + if (offset < size) { + unsigned common = std::min(len, size - offset); + uint8_t *maskBytes = (uint8_t *)maskData; + for (unsigned i = 0; i < common; ++i) { + mask[i + offset] &= maskBytes[i]; + } + } + } + + void andNotMask(void *maskData, unsigned len, unsigned offset) { + assert(offset < size); + if (offset < size) { + unsigned common = std::min(len, size - offset); + uint8_t *maskBytes = (uint8_t *)maskData; + for (unsigned i = 0; i < common; ++i) { + mask[i + offset] &= ~maskBytes[i]; + } + } + } +}; + +} // namespace reflection +} // namespace swift + +#endif diff --git a/include/swift/RemoteInspection/TypeLowering.h b/include/swift/RemoteInspection/TypeLowering.h index ffe5b15bbdeea..0fdaa2e5dc6c1 100644 --- a/include/swift/RemoteInspection/TypeLowering.h +++ b/include/swift/RemoteInspection/TypeLowering.h @@ -23,6 +23,7 @@ #include "llvm/Support/Casting.h" #include "swift/Remote/MetadataReader.h" #include "swift/Remote/TypeInfoProvider.h" +#include "swift/RemoteInspection/BitMask.h" #include "swift/RemoteInspection/DescriptorFinder.h" #include @@ -34,6 +35,7 @@ using llvm::cast; using llvm::dyn_cast; using remote::RemoteRef; +class TypeConverter; class TypeRef; class TypeRefBuilder; class BuiltinTypeDescriptor; @@ -158,6 +160,11 @@ class TypeInfo { return false; } + // Calculate and return the spare bit mask for this type + virtual BitMask getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const { + return BitMask::zeroMask(getSize()); + } + virtual ~TypeInfo() { } }; @@ -195,6 +202,8 @@ class BuiltinTypeInfo : public TypeInfo { remote::RemoteAddress address, int *extraInhabitantIndex) const override; + BitMask getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const override; + static bool classof(const TypeInfo *TI) { return TI->getKind() == TypeInfoKind::Builtin; } @@ -222,6 +231,8 @@ class RecordTypeInfo : public TypeInfo { remote::RemoteAddress address, int *index) const override; + BitMask getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const override; + static bool classof(const TypeInfo *TI) { return TI->getKind() == TypeInfoKind::Record; } @@ -330,6 +341,8 @@ class ReferenceTypeInfo : public TypeInfo { return reader.readHeapObjectExtraInhabitantIndex(address, extraInhabitantIndex); } + BitMask getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const override; + static bool classof(const TypeInfo *TI) { return TI->getKind() == TypeInfoKind::Reference; } diff --git a/include/swift/Runtime/EnvironmentVariables.h b/include/swift/Runtime/EnvironmentVariables.h index 5f17f116828d1..48f7b2b37283e 100644 --- a/include/swift/Runtime/EnvironmentVariables.h +++ b/include/swift/Runtime/EnvironmentVariables.h @@ -53,6 +53,10 @@ SWIFT_RUNTIME_STDLIB_SPI bool concurrencyEnableJobDispatchIntegration(); // Concurrency library can call. SWIFT_RUNTIME_STDLIB_SPI bool concurrencyValidateUncheckedContinuations(); +// Wrapper around SWIFT_IS_CURRENT_EXECUTOR_LEGACY_MODE_OVERRIDE that the +// Concurrency library can call. +SWIFT_RUNTIME_STDLIB_SPI const char *concurrencyIsCurrentExecutorLegacyModeOverride(); + } // end namespace environment } // end namespace runtime } // end namespace swift diff --git a/include/swift/SIL/OSSALifetimeCompletion.h b/include/swift/SIL/OSSALifetimeCompletion.h index dfd33dbf2422a..2320c46b6b9e5 100644 --- a/include/swift/SIL/OSSALifetimeCompletion.h +++ b/include/swift/SIL/OSSALifetimeCompletion.h @@ -57,24 +57,24 @@ class OSSALifetimeCompletion { // Availability: "As late as possible." Consume the value in the last blocks // beyond the non-consuming uses in which the value has been // consumed on no incoming paths. + // AvailabilityWithLeaks: "As late as possible or later." Consume the value + // in the last blocks beyond the non-consuming uses in + // which the value has been consumed on no incoming + // paths, unless that block's terminator isn't an + // unreachable, in which case, don't consume it there. + // + // This boundary works around bugs where SILGen emits + // illegal OSSA lifetimes. struct Boundary { enum Value : uint8_t { Liveness, Availability, + AvailabilityWithLeaks, }; Value value; Boundary(Value value) : value(value){}; operator Value() const { return value; } - - static std::optional getForcingLiveness(bool force) { - if (!force) - return {}; - return {Liveness}; - } - - bool isLiveness() { return value == Liveness; } - bool isAvailable() { return !isLiveness(); } }; /// Insert a lifetime-ending instruction on every path to complete the OSSA @@ -95,9 +95,7 @@ class OSSALifetimeCompletion { /// lifetime. /// /// TODO: We also need to complete scoped addresses (e.g. store_borrow)! - LifetimeCompletion - completeOSSALifetime(SILValue value, - std::optional maybeBoundary = std::nullopt) { + LifetimeCompletion completeOSSALifetime(SILValue value, Boundary boundary) { if (value->getOwnershipKind() == OwnershipKind::None) return LifetimeCompletion::NoLifetime; @@ -112,16 +110,19 @@ class OSSALifetimeCompletion { if (!completedValues.insert(value)) return LifetimeCompletion::AlreadyComplete; - Boundary boundary = maybeBoundary.value_or( - value->isLexical() ? Boundary::Availability : Boundary::Liveness); - return analyzeAndUpdateLifetime(value, boundary) ? LifetimeCompletion::WasCompleted : LifetimeCompletion::AlreadyComplete; } + enum AllowLeaks_t : bool { + AllowLeaks = true, + DoNotAllowLeaks = false, + }; + static void visitUnreachableLifetimeEnds( - SILValue value, const SSAPrunedLiveness &liveness, + SILValue value, AllowLeaks_t allowLeaks, + const SSAPrunedLiveness &liveness, llvm::function_ref visit); protected: @@ -170,6 +171,22 @@ class UnreachableLifetimeCompletion { bool completeLifetimes(); }; +inline llvm::raw_ostream & +operator<<(llvm::raw_ostream &OS, OSSALifetimeCompletion::Boundary boundary) { + switch (boundary) { + case OSSALifetimeCompletion::Boundary::Liveness: + OS << "liveness"; + break; + case OSSALifetimeCompletion::Boundary::Availability: + OS << "availability"; + break; + case OSSALifetimeCompletion::Boundary::AvailabilityWithLeaks: + OS << "availability_with_leaks"; + break; + } + return OS; +} + } // namespace swift #endif diff --git a/include/swift/SIL/SILBridging.h b/include/swift/SIL/SILBridging.h index abeabe161563c..1b35125464a80 100644 --- a/include/swift/SIL/SILBridging.h +++ b/include/swift/SIL/SILBridging.h @@ -347,6 +347,7 @@ struct BridgedType { BRIDGED_INLINE bool isMetatype() const; BRIDGED_INLINE bool isNoEscapeFunction() const; BRIDGED_INLINE bool containsNoEscapeFunction() const; + BRIDGED_INLINE bool isThickFunction() const; BRIDGED_INLINE bool isAsyncFunction() const; BRIDGED_INLINE bool isEmpty(BridgedFunction f) const; BRIDGED_INLINE TraitResult canBeClass() const; @@ -373,7 +374,8 @@ struct BridgedType { BRIDGED_INLINE bool isEndCaseIterator(EnumElementIterator i) const; SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedType getEnumCasePayload(EnumElementIterator i, BridgedFunction f) const; BRIDGED_INLINE SwiftInt getNumTupleElements() const; - SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedType getTupleElementType(SwiftInt idx) const; + SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedType + getTupleElementType(SwiftInt idx) const; SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedType getFunctionTypeWithNoEscape(bool withNoEscape) const; }; @@ -547,6 +549,7 @@ struct BridgedFunction { BRIDGED_INLINE bool isAvailableExternally() const; BRIDGED_INLINE bool isTransparent() const; BRIDGED_INLINE bool isAsync() const; + BRIDGED_INLINE bool isReabstractionThunk() const; BRIDGED_INLINE bool isGlobalInitFunction() const; BRIDGED_INLINE bool isGlobalInitOnceFunction() const; BRIDGED_INLINE bool isDestructor() const; @@ -554,6 +557,7 @@ struct BridgedFunction { BRIDGED_INLINE bool hasSemanticsAttr(BridgedStringRef attrName) const; BRIDGED_INLINE bool hasUnsafeNonEscapableResult() const; BRIDGED_INLINE bool hasResultDependsOnSelf() const; + bool mayBindDynamicSelf() const; BRIDGED_INLINE EffectsKind getEffectAttribute() const; BRIDGED_INLINE PerformanceConstraints getPerformanceConstraints() const; BRIDGED_INLINE InlineStrategy getInlineStrategy() const; @@ -566,6 +570,9 @@ struct BridgedFunction { BRIDGED_INLINE void setIsPerformanceConstraint(bool isPerfConstraint) const; BRIDGED_INLINE bool isResilientNominalDecl(BridgedNominalTypeDecl decl) const; BRIDGED_INLINE BridgedType getLoweredType(BridgedASTType type) const; + bool isTrapNoReturn() const; + bool isAutodiffVJP() const; + SwiftInt specializationLevel() const; enum class ParseEffectsMode { argumentEffectsFromSource, @@ -658,6 +665,7 @@ struct BridgedSubstitutionMap { BRIDGED_INLINE BridgedSubstitutionMap(); BRIDGED_INLINE bool isEmpty() const; + BRIDGED_INLINE bool hasAnySubstitutableParams() const; }; struct BridgedTypeArray { @@ -917,6 +925,7 @@ struct BridgedInstruction { SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedSubstitutionMap ApplySite_getSubstitutionMap() const; SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedASTType ApplySite_getSubstitutedCalleeType() const; BRIDGED_INLINE SwiftInt ApplySite_getNumArguments() const; + BRIDGED_INLINE bool ApplySite_isCalleeNoReturn() const; BRIDGED_INLINE SwiftInt FullApplySite_numIndirectResultArguments() const; // =========================================================================// diff --git a/include/swift/SIL/SILBridgingImpl.h b/include/swift/SIL/SILBridgingImpl.h index 63a9564f9f6ea..3a44a0f7aed28 100644 --- a/include/swift/SIL/SILBridgingImpl.h +++ b/include/swift/SIL/SILBridgingImpl.h @@ -262,6 +262,10 @@ bool BridgedType::containsNoEscapeFunction() const { return unbridged().containsNoEscapeFunction(); } +bool BridgedType::isThickFunction() const { + return unbridged().isThickFunction(); +} + bool BridgedType::isAsyncFunction() const { return unbridged().isAsyncFunction(); } @@ -530,6 +534,10 @@ bool BridgedSubstitutionMap::isEmpty() const { return unbridged().empty(); } +bool BridgedSubstitutionMap::hasAnySubstitutableParams() const { + return unbridged().hasAnySubstitutableParams(); +} + //===----------------------------------------------------------------------===// // BridgedLocation //===----------------------------------------------------------------------===// @@ -635,6 +643,10 @@ bool BridgedFunction::isAsync() const { return getFunction()->isAsync(); } +bool BridgedFunction::isReabstractionThunk() const { + return getFunction()->isThunk() == swift::IsReabstractionThunk; +} + bool BridgedFunction::isGlobalInitFunction() const { return getFunction()->isGlobalInit(); } @@ -1283,6 +1295,10 @@ SwiftInt BridgedInstruction::ApplySite_getNumArguments() const { return swift::ApplySite(unbridged()).getNumArguments(); } +bool BridgedInstruction::ApplySite_isCalleeNoReturn() const { + return swift::ApplySite(unbridged()).isCalleeNoReturn(); +} + SwiftInt BridgedInstruction::FullApplySite_numIndirectResultArguments() const { auto fas = swift::FullApplySite(unbridged()); return fas.getNumIndirectSILResults(); diff --git a/include/swift/SIL/SILType.h b/include/swift/SIL/SILType.h index 9f4f8b6eabe30..65d09f3644f06 100644 --- a/include/swift/SIL/SILType.h +++ b/include/swift/SIL/SILType.h @@ -461,7 +461,7 @@ class SILType { } /// Returns true if the referenced type is expressed in terms of one - /// or more opened existential types. + /// or more opened existential archetypes. bool hasOpenedExistential() const { return getASTType()->hasOpenedExistential(); } @@ -470,6 +470,12 @@ class SILType { return getASTType()->canBeClass(); } + /// Returns true if the referenced type is expressed in terms of one + /// or more element archetypes. + bool hasElementArchetype() const { + return getASTType()->hasElementArchetype(); + } + /// Returns true if the referenced type is expressed in terms of one /// or more local archetypes. bool hasLocalArchetype() const { @@ -555,6 +561,13 @@ class SILType { // Handle whatever AST types are known to hold functions. Namely tuples. return ty->isNoEscape(); } + + bool isThickFunction() const { + if (auto *fTy = getASTType()->getAs()) { + return fTy->getRepresentation() == SILFunctionType::Representation::Thick; + } + return false; + } bool isAsyncFunction() const { if (auto *fTy = getASTType()->getAs()) { @@ -563,9 +576,14 @@ class SILType { return false; } - /// True if the type involves any archetypes. + /// True if the type involves any primary or local archetypes. bool hasArchetype() const { return getASTType()->hasArchetype(); } + /// True if the type involves any primary archetypes. + bool hasPrimaryArchetype() const { + return getASTType()->hasPrimaryArchetype(); + } + /// True if the type involves any opaque archetypes. bool hasOpaqueArchetype() const { return getASTType()->hasOpaqueArchetype(); diff --git a/include/swift/SILOptimizer/IPO/ClosureSpecializer.h b/include/swift/SILOptimizer/IPO/ClosureSpecializer.h new file mode 100644 index 0000000000000..30c831d10fa19 --- /dev/null +++ b/include/swift/SILOptimizer/IPO/ClosureSpecializer.h @@ -0,0 +1,34 @@ +//===-------------------------- ClosureSpecializer.h ------------------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2019 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===-----------------------------------------------------------------------------===// +#ifndef SWIFT_SILOPTIMIZER_CLOSURESPECIALIZER_H +#define SWIFT_SILOPTIMIZER_CLOSURESPECIALIZER_H + +#include "swift/SIL/SILFunction.h" + +namespace swift { + +/// If \p function is a function-signature specialization for a constant- +/// propagated function argument, returns 1. +/// If \p function is a specialization of such a specialization, returns 2. +/// And so on. +int getSpecializationLevel(SILFunction *f); + +enum class AutoDiffFunctionComponent : char { JVP = 'f', VJP = 'r' }; + +/// Returns true if the function is the JVP or the VJP corresponding to +/// a differentiable function. +bool isDifferentiableFuncComponent( + SILFunction *f, + AutoDiffFunctionComponent component = AutoDiffFunctionComponent::VJP); + +} // namespace swift +#endif \ No newline at end of file diff --git a/include/swift/SILOptimizer/OptimizerBridging.h b/include/swift/SILOptimizer/OptimizerBridging.h index 2d2b44276e7e4..0cbca79e57e63 100644 --- a/include/swift/SILOptimizer/OptimizerBridging.h +++ b/include/swift/SILOptimizer/OptimizerBridging.h @@ -239,6 +239,9 @@ struct BridgedPassContext { SWIFT_IMPORT_UNSAFE BridgedOwnedString mangleWithDeadArgs(const SwiftInt * _Nullable deadArgs, SwiftInt numDeadArgs, BridgedFunction function) const; + SWIFT_IMPORT_UNSAFE BridgedOwnedString mangleWithClosureArgs(BridgedValueArray closureArgs, + BridgedArrayRef closureArgIndices, + BridgedFunction applySiteCallee) const; SWIFT_IMPORT_UNSAFE BridgedGlobalVar createGlobalVariable(BridgedStringRef name, BridgedType type, bool isPrivate) const; diff --git a/include/swift/SILOptimizer/OptimizerBridgingImpl.h b/include/swift/SILOptimizer/OptimizerBridgingImpl.h index c79133a6c5447..1dbe3fe42ea02 100644 --- a/include/swift/SILOptimizer/OptimizerBridgingImpl.h +++ b/include/swift/SILOptimizer/OptimizerBridgingImpl.h @@ -19,11 +19,12 @@ #ifndef SWIFT_SILOPTIMIZER_OPTIMIZERBRIDGING_IMPL_H #define SWIFT_SILOPTIMIZER_OPTIMIZERBRIDGING_IMPL_H -#include "swift/SILOptimizer/OptimizerBridging.h" +#include "swift/Demangling/Demangle.h" #include "swift/SILOptimizer/Analysis/AliasAnalysis.h" #include "swift/SILOptimizer/Analysis/BasicCalleeAnalysis.h" #include "swift/SILOptimizer/Analysis/DeadEndBlocksAnalysis.h" #include "swift/SILOptimizer/Analysis/DominanceAnalysis.h" +#include "swift/SILOptimizer/OptimizerBridging.h" #include "swift/SILOptimizer/PassManager/PassManager.h" #include "swift/SILOptimizer/Utils/InstOptUtils.h" diff --git a/include/swift/SILOptimizer/PassManager/Passes.def b/include/swift/SILOptimizer/PassManager/Passes.def index 9216066dbcef9..3b066f21e9405 100644 --- a/include/swift/SILOptimizer/PassManager/Passes.def +++ b/include/swift/SILOptimizer/PassManager/Passes.def @@ -146,6 +146,13 @@ PASS(CapturePropagation, "capture-prop", "Captured Constant Propagation") PASS(ClosureSpecializer, "closure-specialize", "Closure Specialization on Constant Function Arguments") + +// NOTE - ExperimentalSwiftBasedClosureSpecialization and AutodiffClosureSpecialization are a WIP +SWIFT_FUNCTION_PASS(ExperimentalSwiftBasedClosureSpecialization, "experimental-swift-based-closure-specialization", + "General closure-specialization pass written in Swift") +SWIFT_FUNCTION_PASS(AutodiffClosureSpecialization, "autodiff-closure-specialization", + "Autodiff specific closure-specialization pass") + PASS(ClosureLifetimeFixup, "closure-lifetime-fixup", "Closure Lifetime Fixup") PASS(CodeSinking, "code-sinking", diff --git a/include/swift/SILOptimizer/Utils/CFGOptUtils.h b/include/swift/SILOptimizer/Utils/CFGOptUtils.h index 0296b86dece86..6022181e4955c 100644 --- a/include/swift/SILOptimizer/Utils/CFGOptUtils.h +++ b/include/swift/SILOptimizer/Utils/CFGOptUtils.h @@ -190,6 +190,8 @@ bool mergeBasicBlockWithSuccessor(SILBasicBlock *bb, DominanceInfo *domInfo, /// quadratic. bool mergeBasicBlocks(SILFunction *f); +bool isTrapNoReturnFunction(SILFunction *f); + /// Return true if we conservatively find all bb's that are non-failure exit /// basic blocks and place them in \p bbs. If we find something we don't /// understand, bail. diff --git a/lib/AST/ASTPrinter.cpp b/lib/AST/ASTPrinter.cpp index 5a59e0fd99eea..eeffb89be5874 100644 --- a/lib/AST/ASTPrinter.cpp +++ b/lib/AST/ASTPrinter.cpp @@ -3154,6 +3154,15 @@ suppressingFeatureConformanceSuppression(PrintOptions &options, options.ExcludeAttrList.resize(originalExcludeAttrCount); } +static void +suppressingFeatureBitwiseCopyable2(PrintOptions &options, + llvm::function_ref action) { + unsigned originalExcludeAttrCount = options.ExcludeAttrList.size(); + llvm::SaveAndRestore scope(options.SuppressBitwiseCopyable, true); + action(); + options.ExcludeAttrList.resize(originalExcludeAttrCount); +} + /// Suppress the printing of a particular feature. static void suppressingFeature(PrintOptions &options, Feature feature, llvm::function_ref action) { @@ -3436,6 +3445,14 @@ void PrintAST::visitOpaqueTypeDecl(OpaqueTypeDecl *decl) { } void PrintAST::visitTypeAliasDecl(TypeAliasDecl *decl) { + auto name = decl->getName(); + bool suppressingBitwiseCopyable = + Options.SuppressBitwiseCopyable && + decl->getModuleContext()->isStdlibModule() && + (decl->getNameStr() == "_BitwiseCopyable"); + if (suppressingBitwiseCopyable) { + name = decl->getASTContext().getIdentifier("BitwiseCopyable"); + } printDocumentationComment(decl); printAttributes(decl); printAccess(decl); @@ -3443,10 +3460,14 @@ void PrintAST::visitTypeAliasDecl(TypeAliasDecl *decl) { printContextIfNeeded(decl); recordDeclLoc(decl, [&]{ - Printer.printName(decl->getName(), getTypeMemberPrintNameContext(decl)); + Printer.printName(name, getTypeMemberPrintNameContext(decl)); }, [&]{ // Signature printGenericDeclGenericParams(decl); }); + if (suppressingBitwiseCopyable) { + Printer << " = Swift._BitwiseCopyable"; + return; + } bool ShouldPrint = true; Type Ty = decl->getUnderlyingType(); @@ -3627,6 +3648,12 @@ void PrintAST::printPrimaryAssociatedTypes(ProtocolDecl *decl) { } void PrintAST::visitProtocolDecl(ProtocolDecl *decl) { + auto name = decl->getName(); + if (Options.SuppressBitwiseCopyable && + decl->getModuleContext()->isStdlibModule() && + (decl->getNameStr() == "BitwiseCopyable")) { + name = decl->getASTContext().getIdentifier("_BitwiseCopyable"); + } printDocumentationComment(decl); printAttributes(decl); printAccess(decl); @@ -3640,7 +3667,7 @@ void PrintAST::visitProtocolDecl(ProtocolDecl *decl) { printContextIfNeeded(decl); recordDeclLoc(decl, [&]{ - Printer.printName(decl->getName()); + Printer.printName(name); }); if (Options.PrintPrimaryAssociatedTypes) { diff --git a/lib/AST/Decl.cpp b/lib/AST/Decl.cpp index 46263924ce01c..3a9d374a35bde 100644 --- a/lib/AST/Decl.cpp +++ b/lib/AST/Decl.cpp @@ -5423,8 +5423,8 @@ Type TypeAliasDecl::getUnderlyingType() const { void TypeAliasDecl::setUnderlyingType(Type underlying) { // lldb creates global typealiases containing archetypes // sometimes... - if (underlying->hasArchetype() && isGenericContext()) - underlying = underlying->mapTypeOutOfContext(); + assert(!underlying->hasArchetype() || !isGenericContext()); + getASTContext().evaluator.cacheOutput( StructuralTypeRequest{const_cast(this)}, std::move(underlying)); @@ -7260,12 +7260,6 @@ VarDecl::VarDecl(DeclKind kind, bool isStatic, VarDecl::Introducer introducer, } Type VarDecl::getTypeInContext() const { - // If the variable is declared in context of a for-in loop over the elements - // of a parameter pack, its interface type must be mapped into context using - // the opened element environment of the pack expansion. - if (auto *env = getOpenedElementEnvironment()) - return GenericEnvironment::mapTypeIntoContext(env, getInterfaceType()); - return getDeclContext()->mapTypeIntoContext(getInterfaceType()); } @@ -8144,7 +8138,7 @@ Type VarDecl::getPropertyWrapperInitValueInterfaceType() const { return Type(); Type valueInterfaceTy = initInfo.getWrappedValuePlaceholder()->getType(); - if (valueInterfaceTy->hasArchetype()) + if (valueInterfaceTy->hasPrimaryArchetype()) valueInterfaceTy = valueInterfaceTy->mapTypeOutOfContext(); return valueInterfaceTy; diff --git a/lib/AST/DeclContext.cpp b/lib/AST/DeclContext.cpp index 7fe964fabcdec..912bf50db0ad3 100644 --- a/lib/AST/DeclContext.cpp +++ b/lib/AST/DeclContext.cpp @@ -1418,6 +1418,8 @@ bool DeclContext::isAsyncContext() const { } SourceLoc swift::extractNearestSourceLoc(const DeclContext *dc) { + assert(dc && "Expected non-null DeclContext!"); + switch (dc->getContextKind()) { case DeclContextKind::Package: case DeclContextKind::Module: diff --git a/lib/AST/FeatureSet.cpp b/lib/AST/FeatureSet.cpp index 6675e6382673e..06f406b1f0092 100644 --- a/lib/AST/FeatureSet.cpp +++ b/lib/AST/FeatureSet.cpp @@ -508,7 +508,7 @@ UNINTERESTING_FEATURE(SuppressedAssociatedTypes) static bool disallowFeatureSuppression(StringRef featureName, Decl *decl); -static bool allBoundTypesAreCopyable(Type type, DeclContext *context) { +static bool allSubstTypesAreCopyable(Type type, DeclContext *context) { assert(type->getAnyNominal()); auto bgt = type->getAs(); if (!bgt) @@ -549,11 +549,14 @@ static bool usesFeatureNoncopyableGenerics(Decl *decl) { return false; // If we only _refer_ to a TypeDecl that uses NoncopyableGenerics, - // and a suppressed version of that decl is in the interface, then we're - // only referring to the un-suppressed version if any of the bound types - // are noncopyable. (rdar://127389991) + // and a suppressed version of that decl is in the interface, and + // if we only substitute Copyable types for the generic parameters, + // then we can say this decl is not "using" the feature such that + // a feature guard is required. In other words, this reference to the + // type will always be valid, regardless of whether the feature is + // enabled or not. (rdar://127389991) if (!disallowFeatureSuppression("NoncopyableGenerics", nominalDecl) - && allBoundTypesAreCopyable(type, context)) { + && allSubstTypesAreCopyable(type, context)) { return false; } @@ -724,6 +727,19 @@ static bool usesFeatureConformanceSuppression(Decl *decl) { return false; } +static bool usesFeatureBitwiseCopyable2(Decl *decl) { + if (!decl->getModuleContext()->isStdlibModule()) { + return false; + } + if (auto *proto = dyn_cast(decl)) { + return proto->getNameStr() == "BitwiseCopyable"; + } + if (auto *typealias = dyn_cast(decl)) { + return typealias->getNameStr() == "_BitwiseCopyable"; + } + return false; +} + static bool usesFeatureIsolatedAny(Decl *decl) { return usesTypeMatching(decl, [](Type type) { if (auto fnType = type->getAs()) { diff --git a/lib/AST/GenericEnvironment.cpp b/lib/AST/GenericEnvironment.cpp index b08608849380f..5981d4788b9a0 100644 --- a/lib/AST/GenericEnvironment.cpp +++ b/lib/AST/GenericEnvironment.cpp @@ -404,12 +404,11 @@ GenericEnvironment::maybeApplyOuterContextSubstitutions(Type type) const { Type GenericEnvironment::mapTypeIntoContext(GenericEnvironment *env, Type type) { - assert((!type->hasArchetype() || type->hasLocalArchetype()) && - "already have a contextual type"); - assert((env || !type->hasTypeParameter()) && - "no generic environment provided for type with type parameters"); + assert(!type->hasPrimaryArchetype() && "already have a contextual type"); if (!env) { + assert(!type->hasTypeParameter() && + "no generic environment provided for type with type parameters"); return type; } @@ -417,15 +416,12 @@ Type GenericEnvironment::mapTypeIntoContext(GenericEnvironment *env, } Type MapTypeOutOfContext::operator()(SubstitutableType *type) const { - auto archetype = cast(type); - if (isa(archetype->getRoot())) - return Type(); - - // Leave opened archetypes alone; they're handled contextually. - if (isa(archetype)) - return Type(type); + if (isa(type) || + isa(type)) { + return cast(type)->getInterfaceType(); + } - return archetype->getInterfaceType(); + return type; } Type TypeBase::mapTypeOutOfContext() { @@ -632,8 +628,7 @@ Type QueryInterfaceTypeSubstitutions::operator()(SubstitutableType *type) const{ Type GenericEnvironment::mapTypeIntoContext( Type type, LookupConformanceFn lookupConformance) const { - assert((!type->hasArchetype() || type->hasLocalArchetype()) && - "already have a contextual type"); + assert(!type->hasPrimaryArchetype() && "already have a contextual type"); Type result = type.subst(QueryInterfaceTypeSubstitutions(this), lookupConformance, @@ -668,7 +663,7 @@ GenericEnvironment::mapContextualPackTypeIntoElementContext(Type type) const { assert(getKind() == Kind::OpenedElement); assert(!type->hasTypeParameter() && "expected contextual type"); - if (!type->hasArchetype()) return type; + if (!type->hasPackArchetype()) return type; auto sig = getGenericSignature(); auto shapeClass = getOpenedElementShapeClass(); @@ -698,9 +693,9 @@ GenericEnvironment::mapContextualPackTypeIntoElementContext(CanType type) const Type GenericEnvironment::mapPackTypeIntoElementContext(Type type) const { assert(getKind() == Kind::OpenedElement); - assert(!type->hasArchetype()); + assert(!type->hasPackArchetype()); - if (!type->hasTypeParameter()) return type; + if (!type->hasParameterPack()) return type; // Get a contextual type in the original generic environment, not the // substituted one, which is what mapContextualPackTypeIntoElementContext() @@ -720,26 +715,35 @@ GenericEnvironment::mapElementTypeIntoPackContext(Type type) const { // generic environment. assert(type->hasElementArchetype()); - ElementArchetypeType *element = nullptr; - type.visit([&](Type type) { - auto archetype = type->getAs(); - if (!element && archetype) - element = archetype; - }); + GenericEnvironment *elementEnv = nullptr; - auto sig = getGenericSignature(); - auto *elementEnv = element->getGenericEnvironment(); - auto shapeClass = elementEnv->getOpenedElementShapeClass(); - QueryInterfaceTypeSubstitutions substitutions(this); + // Map element archetypes to interface types in the element generic + // environment's signature. + type = type.subst( + [&](SubstitutableType *type) -> Type { + auto *archetype = cast(type); - type = type->mapTypeOutOfContext(); + if (isa(archetype)) + return archetype; - auto interfaceType = element->getInterfaceType(); + if (isa(archetype)) { + assert(!elementEnv || + elementEnv == archetype->getGenericEnvironment()); + elementEnv = archetype->getGenericEnvironment(); + } - llvm::SmallDenseMap - packParamForElement; - auto elementDepth = interfaceType->getRootGenericParam()->getDepth(); + return archetype->getInterfaceType(); + }, + MakeAbstractConformanceForGenericType(), + SubstFlags::AllowLoweredTypes | + SubstFlags::PreservePackExpansionLevel); + + auto shapeClass = elementEnv->getOpenedElementShapeClass(); + + llvm::SmallVector members; + auto elementDepth = elementEnv->getGenericSignature()->getMaxDepth(); + auto sig = getGenericSignature(); for (auto *genericParam : sig.getGenericParams()) { if (!genericParam->isParameterPack()) continue; @@ -747,25 +751,22 @@ GenericEnvironment::mapElementTypeIntoPackContext(Type type) const { if (!sig->haveSameShape(genericParam, shapeClass)) continue; - GenericParamKey elementKey(/*isParameterPack*/false, - /*depth*/elementDepth, - /*index*/packParamForElement.size()); - packParamForElement[elementKey] = genericParam; + members.push_back(genericParam); } - // Map element archetypes to the pack archetypes by converting - // element types to interface types and adding the isParameterPack - // bit. Then, map type parameters to archetypes. + // Map element interface types to pack archetypes. + QueryInterfaceTypeSubstitutions mapIntoContext(this); return type.subst( [&](SubstitutableType *type) { auto *genericParam = type->getAs(); if (!genericParam) return Type(); - if (auto *packParam = packParamForElement[{genericParam}]) - return substitutions(packParam); - - return substitutions(genericParam); + if (genericParam->getDepth() == elementDepth) { + genericParam = members[genericParam->getIndex()]; + assert(genericParam->isParameterPack()); + } + return mapIntoContext(genericParam); }, LookUpConformanceInSignature(sig.getPointer()), SubstFlags::PreservePackExpansionLevel); diff --git a/lib/AST/PluginRegistry.cpp b/lib/AST/PluginRegistry.cpp index 3bd5d0e870291..12a82ff9f36d9 100644 --- a/lib/AST/PluginRegistry.cpp +++ b/lib/AST/PluginRegistry.cpp @@ -172,11 +172,11 @@ LoadedExecutablePlugin::PluginProcess::~PluginProcess() { #if defined(_WIN32) _close(input); _close(output); - CloseHandle(process.Process); #else close(input); close(output); #endif + llvm::sys::Wait(process, /*SecondsToWait=*/0); } LoadedExecutablePlugin::~LoadedExecutablePlugin() { diff --git a/lib/AST/SwiftNameTranslation.cpp b/lib/AST/SwiftNameTranslation.cpp index aac0204bb1cc6..d9e93c85394d4 100644 --- a/lib/AST/SwiftNameTranslation.cpp +++ b/lib/AST/SwiftNameTranslation.cpp @@ -215,6 +215,8 @@ swift::cxx_translation::getDeclRepresentation(const ValueDecl *VD) { return {Unsupported, UnrepresentableObjC}; if (getActorIsolation(const_cast(VD)).isActorIsolated()) return {Unsupported, UnrepresentableIsolatedInActor}; + if (isa(VD)) + return {Unsupported, UnrepresentableMacro}; GenericSignature genericSignature; // Don't expose @_alwaysEmitIntoClient decls as they require their // bodies to be emitted into client. @@ -382,5 +384,7 @@ swift::cxx_translation::diagnoseRepresenationError(RepresentationError error, return Diagnostic(diag::expose_move_only_to_cxx, vd); case UnrepresentableNested: return Diagnostic(diag::expose_nested_type_to_cxx, vd); + case UnrepresentableMacro: + return Diagnostic(diag::expose_macro_to_cxx, vd); } } diff --git a/lib/AST/Type.cpp b/lib/AST/Type.cpp index a0f89a263d01c..f16e9d2d929a0 100644 --- a/lib/AST/Type.cpp +++ b/lib/AST/Type.cpp @@ -3454,7 +3454,7 @@ PrimaryArchetypeType::PrimaryArchetypeType(const ASTContext &Ctx, ArrayRef ConformsTo, Type Superclass, LayoutConstraint Layout) : ArchetypeType(TypeKind::PrimaryArchetype, Ctx, - RecursiveTypeProperties::HasArchetype, + RecursiveTypeProperties::HasPrimaryArchetype, InterfaceType, ConformsTo, Superclass, Layout, GenericEnv) { assert(!InterfaceType->isParameterPack()); @@ -3518,8 +3518,7 @@ OpenedArchetypeType::OpenedArchetypeType( LayoutConstraint layout) : LocalArchetypeType(TypeKind::OpenedArchetype, interfaceType->getASTContext(), - RecursiveTypeProperties::HasArchetype - | RecursiveTypeProperties::HasOpenedExistential, + RecursiveTypeProperties::HasOpenedExistential, interfaceType, conformsTo, superclass, layout, environment) { @@ -3535,8 +3534,8 @@ PackArchetypeType::PackArchetypeType( ArrayRef ConformsTo, Type Superclass, LayoutConstraint Layout, PackShape Shape) : ArchetypeType(TypeKind::PackArchetype, Ctx, - RecursiveTypeProperties::HasArchetype | - RecursiveTypeProperties::HasPackArchetype, + RecursiveTypeProperties::HasPrimaryArchetype | + RecursiveTypeProperties::HasPackArchetype, InterfaceType, ConformsTo, Superclass, Layout, GenericEnv) { assert(InterfaceType->isParameterPack()); *getTrailingObjects() = Shape; @@ -3586,7 +3585,6 @@ ElementArchetypeType::ElementArchetypeType( ArrayRef ConformsTo, Type Superclass, LayoutConstraint Layout) : LocalArchetypeType(TypeKind::ElementArchetype, Ctx, - RecursiveTypeProperties::HasArchetype | RecursiveTypeProperties::HasElementArchetype, InterfaceType, ConformsTo, Superclass, Layout, GenericEnv) { diff --git a/lib/AST/TypeCheckRequests.cpp b/lib/AST/TypeCheckRequests.cpp index b073e5d72cd13..ff777f29072e8 100644 --- a/lib/AST/TypeCheckRequests.cpp +++ b/lib/AST/TypeCheckRequests.cpp @@ -1074,7 +1074,9 @@ void InterfaceTypeRequest::cacheResult(Type type) const { if (type) { assert(!type->hasTypeVariable() && "Type variable in interface type"); assert(!type->is() && "Interface type must be materializable"); - assert(!type->hasArchetype() && "Archetype in interface type"); + assert(!type->hasPrimaryArchetype() && "Archetype in interface type"); + assert(decl->getDeclContext()->isLocalContext() || !type->hasLocalArchetype() && + "Local archetype in interface type of non-local declaration"); } decl->TypeAndAccess.setPointer(type); } diff --git a/lib/Basic/Program.cpp b/lib/Basic/Program.cpp index b7d62d104c9ad..1d53cdef6b300 100644 --- a/lib/Basic/Program.cpp +++ b/lib/Basic/Program.cpp @@ -187,6 +187,7 @@ swift::ExecuteWithPipe(llvm::StringRef program, close(p2.write); llvm::sys::ProcessInfo proc; proc.Pid = pid; + proc.Process = pid; return ChildProcessInfo(proc, p1.write, p2.read); } @@ -277,6 +278,7 @@ swift::ExecuteWithPipe(llvm::StringRef program, output[PI_READ].release(); llvm::sys::ProcessInfo proc; + proc.Pid = pi.dwProcessId; proc.Process = pi.hProcess; return ChildProcessInfo(proc, ifd, ofd); } diff --git a/lib/ClangImporter/ClangIncludePaths.cpp b/lib/ClangImporter/ClangIncludePaths.cpp index 11d95131b701b..7228aaab670da 100644 --- a/lib/ClangImporter/ClangIncludePaths.cpp +++ b/lib/ClangImporter/ClangIncludePaths.cpp @@ -185,7 +185,7 @@ static bool shouldInjectLibcModulemap(const llvm::Triple &triple) { static SmallVector, 2> getLibcFileMapping(ASTContext &ctx, StringRef modulemapFileName, - std::optional maybeHeaderFileName, + std::optional> maybeHeaderFileNames, const llvm::IntrusiveRefCntPtr &vfs) { const llvm::Triple &triple = ctx.LangOpts.Target; if (!shouldInjectLibcModulemap(triple)) @@ -227,18 +227,20 @@ getLibcFileMapping(ASTContext &ctx, StringRef modulemapFileName, SmallVector, 2> vfsMappings{ {std::string(injectedModuleMapPath), std::string(actualModuleMapPath)}}; - if (maybeHeaderFileName) { - // TODO: remove the SwiftGlibc.h header and reference all Glibc headers - // directly from the modulemap. - Path actualHeaderPath = actualModuleMapPath; - llvm::sys::path::remove_filename(actualHeaderPath); - llvm::sys::path::append(actualHeaderPath, maybeHeaderFileName.value()); + if (maybeHeaderFileNames) { + for (const auto &filename : *maybeHeaderFileNames) { + // TODO: remove the SwiftGlibc.h header and reference all Glibc headers + // directly from the modulemap. + Path actualHeaderPath = actualModuleMapPath; + llvm::sys::path::remove_filename(actualHeaderPath); + llvm::sys::path::append(actualHeaderPath, filename); - Path injectedHeaderPath(libcDir); - llvm::sys::path::append(injectedHeaderPath, maybeHeaderFileName.value()); + Path injectedHeaderPath(libcDir); + llvm::sys::path::append(injectedHeaderPath, filename); - vfsMappings.push_back( - {std::string(injectedHeaderPath), std::string(actualHeaderPath)}); + vfsMappings.push_back( + {std::string(injectedHeaderPath), std::string(actualHeaderPath)}); + } } return vfsMappings; @@ -559,8 +561,13 @@ ClangInvocationFileMapping swift::getClangInvocationFileMapping( } else if (triple.isMusl()) { libcFileMapping = getLibcFileMapping(ctx, "musl.modulemap", StringRef("SwiftMusl.h"), vfs); + } else if (triple.isAndroid()) { + // Android uses the android-specific module map that overlays the NDK. + StringRef headerFiles[] = {"SwiftAndroidNDK.h", "SwiftBionic.h"}; + libcFileMapping = + getLibcFileMapping(ctx, "android.modulemap", headerFiles, vfs); } else { - // Android/BSD/Linux Mappings + // BSD/Linux Mappings libcFileMapping = getLibcFileMapping(ctx, "glibc.modulemap", StringRef("SwiftGlibc.h"), vfs); diff --git a/lib/Frontend/CompilerInvocation.cpp b/lib/Frontend/CompilerInvocation.cpp index 5475ff7931a7c..4d9e24ee062d1 100644 --- a/lib/Frontend/CompilerInvocation.cpp +++ b/lib/Frontend/CompilerInvocation.cpp @@ -2610,6 +2610,9 @@ static bool ParseSILArgs(SILOptions &Opts, ArgList &Args, Opts.NoAllocations = Args.hasArg(OPT_no_allocations); + Opts.EnableExperimentalSwiftBasedClosureSpecialization = + Args.hasArg(OPT_enable_experimental_swift_based_closure_specialization); + return false; } diff --git a/lib/IRGen/GenBuiltin.cpp b/lib/IRGen/GenBuiltin.cpp index cac9788268130..c34954d3acbba 100644 --- a/lib/IRGen/GenBuiltin.cpp +++ b/lib/IRGen/GenBuiltin.cpp @@ -637,6 +637,9 @@ void irgen::emitBuiltinCall(IRGenFunction &IGF, const BuiltinInfo &Builtin, // Don't generate any code for the builtin. return out.add(v); } + if (Builtin.ID == BuiltinValueKind::Freeze) { + return out.add(IGF.Builder.CreateFreeze(args.claimNext())); + } if (Builtin.ID == BuiltinValueKind::AllocRaw) { auto size = args.claimNext(); diff --git a/lib/IRGen/LoadableByAddress.cpp b/lib/IRGen/LoadableByAddress.cpp index 267de70038c85..399606139481c 100644 --- a/lib/IRGen/LoadableByAddress.cpp +++ b/lib/IRGen/LoadableByAddress.cpp @@ -4079,8 +4079,8 @@ class RewriteUser : SILInstructionVisitor { SILBuilder caseBuilder = assignment.getBuilder(caseBB->begin()); auto *caseAddr = - caseBuilder.createUncheckedTakeEnumDataAddr(loc, opdAddr, caseDecl); - + caseBuilder.createUncheckedTakeEnumDataAddr(loc, opdAddr, caseDecl, + caseArg->getType().getAddressType()); if (assignment.isLargeLoadableType(caseArg->getType())) { assignment.mapValueToAddress(caseArg, caseAddr); assignment.markBlockArgumentForDeletion(caseBB); diff --git a/lib/SIL/IR/Bridging.cpp b/lib/SIL/IR/Bridging.cpp index 3c13c805085a5..dc310fce6e371 100644 --- a/lib/SIL/IR/Bridging.cpp +++ b/lib/SIL/IR/Bridging.cpp @@ -138,7 +138,7 @@ Type TypeConverter::getLoweredCBridgedType(AbstractionPattern pattern, if (nativeBoolTy && t->isEqual(nativeBoolTy)) { // If we have a Clang type that was imported as Bool, it had better be // one of a small set of types. - if (clangTy) { + if (clangTy && clangTy->isBuiltinType()) { auto builtinTy = clangTy->castAs(); if (builtinTy->getKind() == clang::BuiltinType::Bool) return t; diff --git a/lib/SIL/IR/OperandOwnership.cpp b/lib/SIL/IR/OperandOwnership.cpp index 9af138bc6195a..70baa169c7917 100644 --- a/lib/SIL/IR/OperandOwnership.cpp +++ b/lib/SIL/IR/OperandOwnership.cpp @@ -810,6 +810,7 @@ BUILTIN_OPERAND_OWNERSHIP(InstantaneousUse, GenericFRem) BUILTIN_OPERAND_OWNERSHIP(InstantaneousUse, FSub) BUILTIN_OPERAND_OWNERSHIP(InstantaneousUse, GenericFSub) BUILTIN_OPERAND_OWNERSHIP(InstantaneousUse, Fence) +BUILTIN_OPERAND_OWNERSHIP(TrivialUse, Freeze) BUILTIN_OPERAND_OWNERSHIP(InstantaneousUse, Ifdef) BUILTIN_OPERAND_OWNERSHIP(InstantaneousUse, GetObjCTypeEncoding) BUILTIN_OPERAND_OWNERSHIP(InstantaneousUse, ICMP_EQ) diff --git a/lib/SIL/IR/ValueOwnership.cpp b/lib/SIL/IR/ValueOwnership.cpp index c7ed8453476f0..24e21097abb4f 100644 --- a/lib/SIL/IR/ValueOwnership.cpp +++ b/lib/SIL/IR/ValueOwnership.cpp @@ -491,6 +491,7 @@ CONSTANT_OWNERSHIP_BUILTIN(None, FRem) CONSTANT_OWNERSHIP_BUILTIN(None, GenericFRem) CONSTANT_OWNERSHIP_BUILTIN(None, FSub) CONSTANT_OWNERSHIP_BUILTIN(None, GenericFSub) +CONSTANT_OWNERSHIP_BUILTIN(None, Freeze) CONSTANT_OWNERSHIP_BUILTIN(None, ICMP_EQ) CONSTANT_OWNERSHIP_BUILTIN(None, ICMP_NE) CONSTANT_OWNERSHIP_BUILTIN(None, ICMP_SGE) diff --git a/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp b/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp index f3b8cf88144c6..3a53c9e498067 100644 --- a/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp +++ b/lib/SIL/Utils/FieldSensitivePrunedLiveness.cpp @@ -1137,9 +1137,18 @@ void FieldSensitivePrunedLiveRange::computeBoundary( for (SILBasicBlock *succBB : block->getSuccessors()) { if (FieldSensitivePrunedLiveBlocks::isDead( getBlockLiveness(succBB, index))) { - PRUNED_LIVENESS_LOG(llvm::dbgs() << "Marking succBB as boundary edge: bb" - << succBB->getDebugID() << '\n'); - boundary.getBoundaryEdgeBits(succBB).set(index); + // If the basic block ends in unreachable, don't consider it a + // boundary. + // TODO: Should also do this if the block's successors all always + // end in unreachable too. + if (isa(succBB->getTerminator())) { + PRUNED_LIVENESS_LOG(llvm::dbgs() << "succBB ends in unreachable, skipping as boundary edge: bb" + << succBB->getDebugID() << '\n'); + } else { + PRUNED_LIVENESS_LOG(llvm::dbgs() << "Marking succBB as boundary edge: bb" + << succBB->getDebugID() << '\n'); + boundary.getBoundaryEdgeBits(succBB).set(index); + } } } asImpl().findBoundariesInBlock(block, index, /*isLiveOut*/ true, diff --git a/lib/SIL/Utils/OSSALifetimeCompletion.cpp b/lib/SIL/Utils/OSSALifetimeCompletion.cpp index 7775725f78d99..8e2a84e131cec 100644 --- a/lib/SIL/Utils/OSSALifetimeCompletion.cpp +++ b/lib/SIL/Utils/OSSALifetimeCompletion.cpp @@ -122,14 +122,21 @@ class VisitUnreachableLifetimeEnds { /// The value whose dead-end block lifetime ends are to be visited. SILValue value; + /// Whether to allow leaks. + /// + /// Here, that entails allowing walks to reach non-unreachable terminators and + /// not creating lifetime ends before them. + OSSALifetimeCompletion::AllowLeaks_t allowLeaks; + /// The non-lifetime-ending boundary of `value`. BasicBlockSet starts; /// The region between (inclusive) the `starts` and the unreachable blocks. BasicBlockSetVector region; public: - VisitUnreachableLifetimeEnds(SILValue value) - : value(value), starts(value->getFunction()), + VisitUnreachableLifetimeEnds(SILValue value, + OSSALifetimeCompletion::AllowLeaks_t allowLeaks) + : value(value), allowLeaks(allowLeaks), starts(value->getFunction()), region(value->getFunction()) {} /// Region discovery. @@ -232,9 +239,17 @@ void VisitUnreachableLifetimeEnds::computeRegion( // (3) Forward walk to find the region in which `value` might be available. while (auto *block = regionWorklist.pop()) { if (block->succ_empty()) { - // This assert will fail unless there is already a lifetime-ending - // instruction on each path to normal function exits. - assert(isa(block->getTerminator())); + // This is a function-exiting block. + // + // In valid-but-lifetime-incomplete OSSA there must be a lifetime-ending + // instruction on each path from the def that exits the function normally. + // Thus finding a value available at the end of such a block means that + // the block does _not_ must not exits the function normally; in other + // words its terminator must be an UnreachableInst. + // + // In invalid OSSA, indicated by the `allowLeaks` flag, no such guarantee + // exists. + assert(isa(block->getTerminator()) || allowLeaks); } for (auto *successor : block->getSuccessorBlocks()) { regionWorklist.pushIfNotVisited(successor); @@ -291,16 +306,22 @@ void VisitUnreachableLifetimeEnds::visitAvailabilityBoundary( if (!available) { continue; } - auto hasUnreachableSuccessor = [&]() { + auto hasUnavailableSuccessor = [&]() { // Use a lambda to avoid checking if possible. return llvm::any_of(block->getSuccessorBlocks(), [&result](auto *block) { return result.getState(block) == State::Unavailable; }); }; - if (!block->succ_empty() && !hasUnreachableSuccessor()) { + if (!block->succ_empty() && !hasUnavailableSuccessor()) { + continue; + } + if (allowLeaks && block->succ_empty() && + !isa(block->getTerminator())) { + // Availability extends to the end of a function-exiting-normally block. + // If leaks are allowed, don't visit. continue; } - assert(hasUnreachableSuccessor() || + assert(hasUnavailableSuccessor() || isa(block->getTerminator())); visit(block->getTerminator()); } @@ -308,10 +329,10 @@ void VisitUnreachableLifetimeEnds::visitAvailabilityBoundary( } // end anonymous namespace void OSSALifetimeCompletion::visitUnreachableLifetimeEnds( - SILValue value, const SSAPrunedLiveness &liveness, + SILValue value, AllowLeaks_t allowLeaks, const SSAPrunedLiveness &liveness, llvm::function_ref visit) { - VisitUnreachableLifetimeEnds visitor(value); + VisitUnreachableLifetimeEnds visitor(value, allowLeaks); visitor.computeRegion(liveness); @@ -322,12 +343,12 @@ void OSSALifetimeCompletion::visitUnreachableLifetimeEnds( visitor.visitAvailabilityBoundary(result, visit); } -static bool -endLifetimeAtAvailabilityBoundary(SILValue value, - const SSAPrunedLiveness &liveness) { +static bool endLifetimeAtAvailabilityBoundary( + SILValue value, OSSALifetimeCompletion::AllowLeaks_t allowLeaks, + const SSAPrunedLiveness &liveness) { bool changed = false; OSSALifetimeCompletion::visitUnreachableLifetimeEnds( - value, liveness, [&](auto *unreachable) { + value, allowLeaks, liveness, [&](auto *unreachable) { SILBuilderWithScope builder(unreachable); endOSSALifetime(value, builder); changed = true; @@ -342,20 +363,25 @@ bool OSSALifetimeCompletion::analyzeAndUpdateLifetime(SILValue value, Boundary boundary) { // Called for inner borrows, inner adjacent reborrows, inner reborrows, and // scoped addresses. - auto handleInnerScope = [this](SILValue innerBorrowedValue) { - completeOSSALifetime(innerBorrowedValue); + auto handleInnerScope = [this, boundary](SILValue innerBorrowedValue) { + completeOSSALifetime(innerBorrowedValue, boundary); }; InteriorLiveness liveness(value); liveness.compute(domInfo, handleInnerScope); bool changed = false; switch (boundary) { - case Boundary::Availability: - changed |= endLifetimeAtAvailabilityBoundary(value, liveness.getLiveness()); - break; case Boundary::Liveness: changed |= endLifetimeAtLivenessBoundary(value, liveness.getLiveness()); break; + case Boundary::Availability: + changed |= endLifetimeAtAvailabilityBoundary(value, DoNotAllowLeaks, + liveness.getLiveness()); + break; + case Boundary::AvailabilityWithLeaks: + changed |= endLifetimeAtAvailabilityBoundary(value, AllowLeaks, + liveness.getLiveness()); + break; } // TODO: Rebuild outer adjacent phis on demand (SILGen does not currently // produce guaranteed phis). See FindEnclosingDefs & @@ -371,16 +397,19 @@ namespace swift::test { // Dumps: // - function static FunctionTest OSSALifetimeCompletionTest( - "ossa-lifetime-completion", + "ossa_lifetime_completion", [](auto &function, auto &arguments, auto &test) { SILValue value = arguments.takeValue(); - std::optional kind = std::nullopt; - if (arguments.hasUntaken()) { - kind = arguments.takeBool() - ? OSSALifetimeCompletion::Boundary::Liveness - : OSSALifetimeCompletion::Boundary::Availability; - } - llvm::outs() << "OSSA lifetime completion: " << value; + OSSALifetimeCompletion::Boundary kind = + llvm::StringSwitch( + arguments.takeString()) + .Case("liveness", OSSALifetimeCompletion::Boundary::Liveness) + .Case("availability", + OSSALifetimeCompletion::Boundary::Availability) + .Case("availability_with_leaks", + OSSALifetimeCompletion::Boundary::AvailabilityWithLeaks); + llvm::outs() << "OSSA lifetime completion on " << kind + << " boundary: " << value; OSSALifetimeCompletion completion(&function, /*domInfo*/ nullptr); completion.completeOSSALifetime(value, kind); function.print(llvm::outs()); @@ -462,8 +491,9 @@ bool UnreachableLifetimeCompletion::completeLifetimes() { bool changed = false; for (auto value : incompleteValues) { - if (completion.completeOSSALifetime(value) - == LifetimeCompletion::WasCompleted) { + if (completion.completeOSSALifetime( + value, OSSALifetimeCompletion::Boundary::Availability) == + LifetimeCompletion::WasCompleted) { changed = true; } } diff --git a/lib/SIL/Verifier/SILOwnershipVerifier.cpp b/lib/SIL/Verifier/SILOwnershipVerifier.cpp index 1bc54503a27ae..c6d660d5d0bc3 100644 --- a/lib/SIL/Verifier/SILOwnershipVerifier.cpp +++ b/lib/SIL/Verifier/SILOwnershipVerifier.cpp @@ -491,7 +491,7 @@ bool SILValueOwnershipChecker::checkFunctionArgWithoutLifetimeEndingUses( return true; return !errorBuilder.handleMalformedSIL([&] { - llvm::errs() << "Owned function parameter without life ending uses!\n" + llvm::errs() << "Owned function parameter without lifetime ending uses!\n" << "Value: " << *arg << '\n'; }); } @@ -510,7 +510,7 @@ bool SILValueOwnershipChecker::checkYieldWithoutLifetimeEndingUses( return true; } return !errorBuilder.handleMalformedSIL([&] { - llvm::errs() << "Owned yield without life ending uses!\n" + llvm::errs() << "Owned yield without lifetime ending uses!\n" << "Value: " << *yield << '\n'; }); case OwnershipKind::Guaranteed: @@ -607,7 +607,7 @@ bool SILValueOwnershipChecker::isGuaranteedFunctionArgWithLifetimeEndingUses( return true; return errorBuilder.handleMalformedSIL([&] { - llvm::errs() << "Guaranteed function parameter with life ending uses!\n" + llvm::errs() << "Guaranteed function parameter with lifetime ending uses!\n" << "Value: " << *arg; for (const auto *use : lifetimeEndingUsers) { llvm::errs() << "Lifetime Ending User: " << *use->getUser(); @@ -620,7 +620,7 @@ bool SILValueOwnershipChecker::isSubobjectProjectionWithLifetimeEndingUses( SILValue value, const llvm::SmallVectorImpl &lifetimeEndingUsers) const { return errorBuilder.handleMalformedSIL([&] { - llvm::errs() << "Subobject projection with life ending uses!\n" + llvm::errs() << "Subobject projection with lifetime ending uses!\n" << "Value: " << *value; for (const auto *use : lifetimeEndingUsers) { llvm::errs() << "Lifetime Ending User: " << *use->getUser(); diff --git a/lib/SILGen/SILGenApply.cpp b/lib/SILGen/SILGenApply.cpp index 17473d1c6f104..8278930296a94 100644 --- a/lib/SILGen/SILGenApply.cpp +++ b/lib/SILGen/SILGenApply.cpp @@ -3223,6 +3223,14 @@ static StorageRefResult findStorageReferenceExprForBorrow(Expr *e) { Expr *SILGenFunction::findStorageReferenceExprForMoveOnly(Expr *argExpr, StorageReferenceOperationKind kind) { + ForceValueExpr *forceUnwrap = nullptr; + // Check for a force unwrap. This might show up inside or outside of the + // load. + if (auto *fu = dyn_cast(argExpr)) { + forceUnwrap = fu; + argExpr = fu->getSubExpr(); + } + // If there's a load around the outer part of this arg expr, look past it. bool sawLoad = false; if (auto *li = dyn_cast(argExpr)) { @@ -3230,34 +3238,39 @@ Expr *SILGenFunction::findStorageReferenceExprForMoveOnly(Expr *argExpr, sawLoad = true; } + // Check again for a force unwrap before the load. + if (auto *fu = dyn_cast(argExpr)) { + forceUnwrap = fu; + argExpr = fu->getSubExpr(); + } + // If we're consuming instead, then the load _must_ have been there. if (kind == StorageReferenceOperationKind::Consume && !sawLoad) return nullptr; - // If we did not see a load and our argExpr is a - // declref_expr, return nullptr. We have an object not something that will be - // in memory. This can happen with classes or with values captured by a - // closure. - // - // NOTE: If we see a member_ref_expr from a decl_ref_expr, we still process it - // since the declref_expr could be from a class. + // TODO: This section should be removed eventually. Decl refs should not be + // handled different from other storage. Removing it breaks some things + // currently. if (!sawLoad) { if (auto *declRef = dyn_cast(argExpr)) { assert(!declRef->getType()->is() && "Shouldn't ever have an lvalue type here!"); - - // Proceed if the storage references a global or static let. - // TODO: We should treat any storage reference as a borrow, it seems, but - // that currently disrupts what the move checker expects. It would also - // be valuable to borrow copyable global lets, but this is a targeted - // fix to allow noncopyable globals to work properly. - bool isGlobal = false; - if (auto vd = dyn_cast(declRef->getDecl())) { - isGlobal = vd->isGlobalStorage(); - } - - if (!isGlobal) { - return nullptr; + + // Proceed if the storage reference is a force unwrap. + if (!forceUnwrap) { + // Proceed if the storage references a global or static let. + // TODO: We should treat any storage reference as a borrow, it seems, but + // that currently disrupts what the move checker expects. It would also + // be valuable to borrow copyable global lets, but this is a targeted + // fix to allow noncopyable globals to work properly. + bool isGlobal = false; + if (auto vd = dyn_cast(declRef->getDecl())) { + isGlobal = vd->isGlobalStorage(); + } + + if (!isGlobal) { + return nullptr; + } } } } @@ -3302,6 +3315,10 @@ Expr *SILGenFunction::findStorageReferenceExprForMoveOnly(Expr *argExpr, } if (!isMoveOnly) return nullptr; + + if (forceUnwrap) { + return forceUnwrap; + } return result.getTransitiveRoot(); } diff --git a/lib/SILGen/SILGenConvert.cpp b/lib/SILGen/SILGenConvert.cpp index f8d0190e6935a..73a2072372d32 100644 --- a/lib/SILGen/SILGenConvert.cpp +++ b/lib/SILGen/SILGenConvert.cpp @@ -199,9 +199,8 @@ SILGenFunction::emitPreconditionOptionalHasValue(SILLocation loc, auto someDecl = getASTContext().getOptionalSomeDecl(); auto noneDecl = getASTContext().getOptionalNoneDecl(); - // If we have an object, make sure the object is at +1. All switch_enum of - // objects is done at +1. bool isAddress = optional.getType().isAddress(); + bool isBorrow = !optional.isPlusOneOrTrivial(*this); SwitchEnumInst *switchEnum = nullptr; if (isAddress) { // We forward in the creation routine for @@ -209,6 +208,12 @@ SILGenFunction::emitPreconditionOptionalHasValue(SILLocation loc, B.createSwitchEnumAddr(loc, optional.getValue(), /*defaultDest*/ nullptr, {{someDecl, contBB}, {noneDecl, failBB}}); + } else if (isBorrow) { + hadCleanup = false; + hadLValue = false; + switchEnum = B.createSwitchEnum(loc, optional.getValue(), + /*defaultDest*/ nullptr, + {{someDecl, contBB}, {noneDecl, failBB}}); } else { optional = optional.ensurePlusOne(*this, loc); hadCleanup = true; diff --git a/lib/SILGen/SILGenLValue.cpp b/lib/SILGen/SILGenLValue.cpp index 20921e3291dbf..124733d1a6399 100644 --- a/lib/SILGen/SILGenLValue.cpp +++ b/lib/SILGen/SILGenLValue.cpp @@ -948,6 +948,9 @@ namespace { ManagedValue base) && override { // Assert that the optional value is present and return the projected out // payload. + if (isConsumeAccess(getTypeData().getAccessKind())) { + base = base.ensurePlusOne(SGF, loc); + } return SGF.emitPreconditionOptionalHasValue(loc, base, isImplicitUnwrap); } @@ -4355,6 +4358,16 @@ getOptionalObjectTypeData(SILGenFunction &SGF, SGFAccessKind accessKind, LValue SILGenLValue::visitForceValueExpr(ForceValueExpr *e, SGFAccessKind accessKind, LValueOptions options) { + // Since Sema doesn't reason about borrows, a borrowed force expr + // might end up type checked with the load inside of the force. + auto subExpr = e->getSubExpr(); + if (auto load = dyn_cast(subExpr)) { + assert((isBorrowAccess(accessKind) || isConsumeAccess(accessKind)) + && "should only see a (force_value (load)) lvalue as part of a " + "borrow or consume"); + subExpr = load->getSubExpr(); + } + // Like BindOptional, this is a read even if we only write to the result. // (But it's unnecessary to use a force this way!) LValue lv = visitRec(e->getSubExpr(), diff --git a/lib/SILOptimizer/IPO/ClosureSpecializer.cpp b/lib/SILOptimizer/IPO/ClosureSpecializer.cpp index 4531b2475266d..7e20513c60346 100644 --- a/lib/SILOptimizer/IPO/ClosureSpecializer.cpp +++ b/lib/SILOptimizer/IPO/ClosureSpecializer.cpp @@ -56,7 +56,9 @@ //===----------------------------------------------------------------------===// #define DEBUG_TYPE "closure-specialization" +#include "swift/SILOptimizer/IPO/ClosureSpecializer.h" #include "swift/Basic/Range.h" +#include "swift/Demangling/Demangle.h" #include "swift/Demangling/Demangler.h" #include "swift/SIL/InstructionUtils.h" #include "swift/SIL/SILCloner.h" @@ -103,6 +105,101 @@ static bool isSupportedClosureKind(const SILInstruction *I) { return isa(I) || isa(I); } +static const int SpecializationLevelLimit = 2; + +static int getSpecializationLevelRecursive(StringRef funcName, + Demangler &parent) { + using namespace Demangle; + + Demangler demangler; + demangler.providePreallocatedMemory(parent); + + // Check for this kind of node tree: + // + // kind=Global + // kind=FunctionSignatureSpecialization + // kind=SpecializationPassID, index=1 + // kind=FunctionSignatureSpecializationParam + // kind=FunctionSignatureSpecializationParamKind, index=5 + // kind=FunctionSignatureSpecializationParamPayload, text="..." + // + Node *root = demangler.demangleSymbol(funcName); + if (!root) + return 0; + if (root->getKind() != Node::Kind::Global) + return 0; + Node *funcSpec = root->getFirstChild(); + if (!funcSpec || funcSpec->getNumChildren() < 2) + return 0; + if (funcSpec->getKind() != Node::Kind::FunctionSignatureSpecialization) + return 0; + + // Match any function specialization. We check for constant propagation at the + // parameter level. + Node *param = funcSpec->getChild(0); + if (param->getKind() != Node::Kind::SpecializationPassID) + return SpecializationLevelLimit + 1; // unrecognized format + + unsigned maxParamLevel = 0; + for (unsigned paramIdx = 1; paramIdx < funcSpec->getNumChildren(); + ++paramIdx) { + Node *param = funcSpec->getChild(paramIdx); + if (param->getKind() != Node::Kind::FunctionSignatureSpecializationParam) + return SpecializationLevelLimit + 1; // unrecognized format + + // A parameter is recursive if it has a kind with index and type payload + if (param->getNumChildren() < 2) + continue; + + Node *kindNd = param->getChild(0); + if (kindNd->getKind() != + Node::Kind::FunctionSignatureSpecializationParamKind) { + return SpecializationLevelLimit + 1; // unrecognized format + } + auto kind = FunctionSigSpecializationParamKind(kindNd->getIndex()); + if (kind != FunctionSigSpecializationParamKind::ConstantPropFunction) + continue; + Node *payload = param->getChild(1); + if (payload->getKind() != + Node::Kind::FunctionSignatureSpecializationParamPayload) { + return SpecializationLevelLimit + 1; // unrecognized format + } + // Check if the specialized function is a specialization itself. + unsigned paramLevel = + 1 + getSpecializationLevelRecursive(payload->getText(), demangler); + if (paramLevel > maxParamLevel) + maxParamLevel = paramLevel; + } + return maxParamLevel; +} + +//===----------------------------------------------------------------------===// +// Publicly visible for bridging +//===----------------------------------------------------------------------===// + +int swift::getSpecializationLevel(SILFunction *f) { + Demangle::StackAllocatedDemangler<1024> demangler; + return getSpecializationLevelRecursive(f->getName(), demangler); +} + +bool swift::isDifferentiableFuncComponent( + SILFunction *f, AutoDiffFunctionComponent expectedComponent) { + Demangle::Context Ctx; + if (auto *root = Ctx.demangleSymbolAsNode(f->getName())) { + if (auto *node = + root->findByKind(Demangle::Node::Kind::AutoDiffFunctionKind, 3)) { + if (node->hasIndex()) { + auto component = (char)node->getIndex(); + if (component == (char)expectedComponent) { + return true; + } + } + } + } + + return false; +} + //===----------------------------------------------------------------------===// // Closure Spec Cloner Interface //===----------------------------------------------------------------------===// @@ -1084,82 +1181,6 @@ static bool canSpecializeFullApplySite(FullApplySiteKind kind) { llvm_unreachable("covered switch"); } -const int SpecializationLevelLimit = 2; - -static int getSpecializationLevelRecursive(StringRef funcName, Demangler &parent) { - using namespace Demangle; - - Demangler demangler; - demangler.providePreallocatedMemory(parent); - - // Check for this kind of node tree: - // - // kind=Global - // kind=FunctionSignatureSpecialization - // kind=SpecializationPassID, index=1 - // kind=FunctionSignatureSpecializationParam - // kind=FunctionSignatureSpecializationParamKind, index=5 - // kind=FunctionSignatureSpecializationParamPayload, text="..." - // - Node *root = demangler.demangleSymbol(funcName); - if (!root) - return 0; - if (root->getKind() != Node::Kind::Global) - return 0; - Node *funcSpec = root->getFirstChild(); - if (!funcSpec || funcSpec->getNumChildren() < 2) - return 0; - if (funcSpec->getKind() != Node::Kind::FunctionSignatureSpecialization) - return 0; - - // Match any function specialization. We check for constant propagation at the - // parameter level. - Node *param = funcSpec->getChild(0); - if (param->getKind() != Node::Kind::SpecializationPassID) - return SpecializationLevelLimit + 1; // unrecognized format - - unsigned maxParamLevel = 0; - for (unsigned paramIdx = 1; paramIdx < funcSpec->getNumChildren(); - ++paramIdx) { - Node *param = funcSpec->getChild(paramIdx); - if (param->getKind() != Node::Kind::FunctionSignatureSpecializationParam) - return SpecializationLevelLimit + 1; // unrecognized format - - // A parameter is recursive if it has a kind with index and type payload - if (param->getNumChildren() < 2) - continue; - - Node *kindNd = param->getChild(0); - if (kindNd->getKind() - != Node::Kind::FunctionSignatureSpecializationParamKind) { - return SpecializationLevelLimit + 1; // unrecognized format - } - auto kind = FunctionSigSpecializationParamKind(kindNd->getIndex()); - if (kind != FunctionSigSpecializationParamKind::ConstantPropFunction) - continue; - Node *payload = param->getChild(1); - if (payload->getKind() - != Node::Kind::FunctionSignatureSpecializationParamPayload) { - return SpecializationLevelLimit + 1; // unrecognized format - } - // Check if the specialized function is a specialization itself. - unsigned paramLevel = - 1 + getSpecializationLevelRecursive(payload->getText(), demangler); - if (paramLevel > maxParamLevel) - maxParamLevel = paramLevel; - } - return maxParamLevel; -} - -/// If \p function is a function-signature specialization for a constant- -/// propagated function argument, returns 1. -/// If \p function is a specialization of such a specialization, returns 2. -/// And so on. -static int getSpecializationLevel(SILFunction *f) { - Demangle::StackAllocatedDemangler<1024> demangler; - return getSpecializationLevelRecursive(f->getName(), demangler); -} - bool SILClosureSpecializerTransform::gatherCallSites( SILFunction *Caller, llvm::SmallVectorImpl> &ClosureCandidates, diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp index 3bd80987ba82a..1add2e76302fd 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp @@ -1713,9 +1713,6 @@ struct CopiedLoadBorrowEliminationVisitor useWorklist.push_back(use); } - // If we have a switch_enum, we always need to convert it to a load - // [copy] since we need to destructure through it. - shouldConvertToLoadCopy |= isa(nextUse->getUser()); continue; } case OperandOwnership::Borrow: @@ -4017,7 +4014,8 @@ bool MoveOnlyAddressChecker::completeLifetimes() { [](auto *user) { return isa(user); })) { continue; } - if (completion.completeOSSALifetime(result) == + if (completion.completeOSSALifetime( + result, OSSALifetimeCompletion::Boundary::Availability) == LifetimeCompletion::WasCompleted) { changed = true; } @@ -4027,7 +4025,8 @@ bool MoveOnlyAddressChecker::completeLifetimes() { if (arg->isReborrow()) { continue; } - if (completion.completeOSSALifetime(arg) == + if (completion.completeOSSALifetime( + arg, OSSALifetimeCompletion::Boundary::Availability) == LifetimeCompletion::WasCompleted) { changed = true; } diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyChecker.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyChecker.cpp index 1043917102e03..48b8df00c46b1 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyChecker.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyChecker.cpp @@ -163,7 +163,8 @@ void MoveOnlyChecker::completeObjectLifetimes( for (auto result : inst.getResults()) { if (!transitiveValues.isVisited(result)) continue; - if (completion.completeOSSALifetime(result) == + if (completion.completeOSSALifetime( + result, OSSALifetimeCompletion::Boundary::Availability) == LifetimeCompletion::WasCompleted) { madeChange = true; } @@ -173,7 +174,8 @@ void MoveOnlyChecker::completeObjectLifetimes( assert(!arg->isReborrow() && "reborrows not legal at this SIL stage"); if (!transitiveValues.isVisited(arg)) continue; - if (completion.completeOSSALifetime(arg) == + if (completion.completeOSSALifetime( + arg, OSSALifetimeCompletion::Boundary::Availability) == LifetimeCompletion::WasCompleted) { madeChange = true; } diff --git a/lib/SILOptimizer/Mandatory/PerformanceDiagnostics.cpp b/lib/SILOptimizer/Mandatory/PerformanceDiagnostics.cpp index c3b5eec5f1109..78f14a756ca97 100644 --- a/lib/SILOptimizer/Mandatory/PerformanceDiagnostics.cpp +++ b/lib/SILOptimizer/Mandatory/PerformanceDiagnostics.cpp @@ -458,7 +458,7 @@ static bool metatypeUsesAreNotRelevant(MetatypeInst *mt) { } static bool allowedMetadataUseInEmbeddedSwift(SILInstruction *inst) { - // Only diagnose metatype and value_metatype instructions, for now. + // Only diagnose metatype, value_metatype instructions, ... if ((isa(inst) || isa(inst))) { auto metaTy = cast(inst)->getType().castTo(); if (metaTy->getRepresentation() == MetatypeRepresentation::Thick) { @@ -468,6 +468,9 @@ static bool allowedMetadataUseInEmbeddedSwift(SILInstruction *inst) { // Class metadata are supported in embedded Swift return instTy->getClassOrBoundGenericClass() ? true : false; } + // ... and alloc_ref_dynamic, for now. + } else if (isa(inst)) { + return false; } return true; diff --git a/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp b/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp index 40260c9b3e5f4..4434008e70721 100644 --- a/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp +++ b/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp @@ -2673,8 +2673,9 @@ bool AllocOptimize::tryToRemoveDeadAllocation() { // Lexical enums can have incomplete lifetimes in non payload paths that // don't end in unreachable. Force their lifetime to end immediately after // the last use instead. - auto boundary = OSSALifetimeCompletion::Boundary::getForcingLiveness( - v->getType().isOrHasEnum()); + auto boundary = v->getType().isOrHasEnum() + ? OSSALifetimeCompletion::Boundary::Liveness + : OSSALifetimeCompletion::Boundary::Availability; LLVM_DEBUG(llvm::dbgs() << "Completing lifetime of: "); LLVM_DEBUG(v->dump()); completion.completeOSSALifetime(v, boundary); diff --git a/lib/SILOptimizer/Mandatory/SILGenCleanup.cpp b/lib/SILOptimizer/Mandatory/SILGenCleanup.cpp index fa5b880f00211..caf325df576ea 100644 --- a/lib/SILOptimizer/Mandatory/SILGenCleanup.cpp +++ b/lib/SILOptimizer/Mandatory/SILGenCleanup.cpp @@ -117,7 +117,9 @@ bool SILGenCleanup::completeOSSALifetimes(SILFunction *function) { for (auto *block : postOrder->getPostOrder()) { for (SILInstruction &inst : reverse(*block)) { for (auto result : inst.getResults()) { - if (completion.completeOSSALifetime(result) == + if (completion.completeOSSALifetime( + result, + OSSALifetimeCompletion::Boundary::AvailabilityWithLeaks) == LifetimeCompletion::WasCompleted) { changed = true; } @@ -125,7 +127,8 @@ bool SILGenCleanup::completeOSSALifetimes(SILFunction *function) { } for (SILArgument *arg : block->getArguments()) { assert(!arg->isReborrow() && "reborrows not legal at this SIL stage"); - if (completion.completeOSSALifetime(arg) == + if (completion.completeOSSALifetime( + arg, OSSALifetimeCompletion::Boundary::AvailabilityWithLeaks) == LifetimeCompletion::WasCompleted) { changed = true; } diff --git a/lib/SILOptimizer/PassManager/PassManager.cpp b/lib/SILOptimizer/PassManager/PassManager.cpp index d2422e90a52e1..d9e86f8469d65 100644 --- a/lib/SILOptimizer/PassManager/PassManager.cpp +++ b/lib/SILOptimizer/PassManager/PassManager.cpp @@ -13,22 +13,27 @@ #define DEBUG_TYPE "sil-passmanager" #include "swift/SILOptimizer/PassManager/PassManager.h" +#include "../../IRGen/IRGenModule.h" #include "swift/AST/ASTMangler.h" #include "swift/AST/SILOptimizerRequests.h" #include "swift/Demangling/Demangle.h" -#include "../../IRGen/IRGenModule.h" +#include "swift/Demangling/Demangler.h" #include "swift/SIL/ApplySite.h" #include "swift/SIL/DynamicCasts.h" +#include "swift/SIL/SILBridging.h" #include "swift/SIL/SILCloner.h" #include "swift/SIL/SILFunction.h" #include "swift/SIL/SILModule.h" #include "swift/SILOptimizer/Analysis/BasicCalleeAnalysis.h" #include "swift/SILOptimizer/Analysis/FunctionOrder.h" +#include "swift/SILOptimizer/IPO/ClosureSpecializer.h" #include "swift/SILOptimizer/OptimizerBridging.h" #include "swift/SILOptimizer/PassManager/PrettyStackTrace.h" #include "swift/SILOptimizer/PassManager/Transforms.h" -#include "swift/SILOptimizer/Utils/Devirtualize.h" +#include "swift/SILOptimizer/Utils/CFGOptUtils.h" #include "swift/SILOptimizer/Utils/ConstantFolding.h" +#include "swift/SILOptimizer/Utils/Devirtualize.h" +#include "swift/SILOptimizer/Utils/InstOptUtils.h" #include "swift/SILOptimizer/Utils/OptimizerStatsUtils.h" #include "swift/SILOptimizer/Utils/SILInliner.h" #include "swift/SILOptimizer/Utils/SILOptFunctionBuilder.h" @@ -41,7 +46,6 @@ #include "llvm/Support/Debug.h" #include "llvm/Support/GraphWriter.h" #include "llvm/Support/ManagedStatic.h" - #include using namespace swift; @@ -1582,6 +1586,26 @@ void SwiftPassInvocation::endVerifyFunction() { SwiftPassInvocation::~SwiftPassInvocation() {} +//===----------------------------------------------------------------------===// +// SIL Bridging +//===----------------------------------------------------------------------===// +bool BridgedFunction::mayBindDynamicSelf() const { + return swift::mayBindDynamicSelf(getFunction()); +} + +bool BridgedFunction::isTrapNoReturn() const { + return swift::isTrapNoReturnFunction(getFunction()); +} + +bool BridgedFunction::isAutodiffVJP() const { + return swift::isDifferentiableFuncComponent( + getFunction(), swift::AutoDiffFunctionComponent::VJP); +} + +SwiftInt BridgedFunction::specializationLevel() const { + return swift::getSpecializationLevel(getFunction()); +} + //===----------------------------------------------------------------------===// // OptimizerBridging //===----------------------------------------------------------------------===// @@ -1771,6 +1795,40 @@ BridgedOwnedString BridgedPassContext::mangleWithDeadArgs(const SwiftInt * _Null return Mangler.mangle(); } +BridgedOwnedString BridgedPassContext::mangleWithClosureArgs( + BridgedValueArray bridgedClosureArgs, + BridgedArrayRef bridgedClosureArgIndices, + BridgedFunction applySiteCallee +) const { + auto pass = Demangle::SpecializationPass::ClosureSpecializer; + auto isSerialized = applySiteCallee.getFunction()->isSerialized(); + Mangle::FunctionSignatureSpecializationMangler mangler( + pass, isSerialized, applySiteCallee.getFunction()); + + llvm::SmallVector closureArgsStorage; + auto closureArgs = bridgedClosureArgs.getValues(closureArgsStorage); + auto closureArgIndices = bridgedClosureArgIndices.unbridged(); + + assert(closureArgs.size() == closureArgIndices.size() && + "Number of closures arguments and number of closure indices do not match!"); + + for (size_t i = 0; i < closureArgs.size(); i++) { + auto closureArg = closureArgs[i]; + auto closureArgIndex = closureArgIndices[i]; + + if (auto *PAI = dyn_cast(closureArg)) { + mangler.setArgumentClosureProp(closureArgIndex, + const_cast(PAI)); + } else { + auto *TTTFI = cast(closureArg); + mangler.setArgumentClosureProp(closureArgIndex, + const_cast(TTTFI)); + } + } + + return mangler.mangle(); +} + BridgedGlobalVar BridgedPassContext::createGlobalVariable(BridgedStringRef name, BridgedType type, bool isPrivate) const { return {SILGlobalVariable::create( *invocation->getPassManager()->getModule(), @@ -2011,4 +2069,4 @@ void SILPassManager::runSwiftModuleVerification() { for (SILFunction &f : *Mod) { runSwiftFunctionVerification(&f); } -} +} \ No newline at end of file diff --git a/lib/SILOptimizer/PassManager/PassPipeline.cpp b/lib/SILOptimizer/PassManager/PassPipeline.cpp index a8460b1f9fb14..5fe28b088d4de 100644 --- a/lib/SILOptimizer/PassManager/PassPipeline.cpp +++ b/lib/SILOptimizer/PassManager/PassPipeline.cpp @@ -768,7 +768,11 @@ static void addMidLevelFunctionPipeline(SILPassPipelinePlan &P) { P.addCapturePropagation(); // Specialize closure. - P.addClosureSpecializer(); + if (P.getOptions().EnableExperimentalSwiftBasedClosureSpecialization) { + P.addExperimentalSwiftBasedClosureSpecialization(); + } else { + P.addClosureSpecializer(); + } // Do the second stack promotion on low-level SIL. P.addStackPromotion(); diff --git a/lib/SILOptimizer/Utils/CFGOptUtils.cpp b/lib/SILOptimizer/Utils/CFGOptUtils.cpp index 4d7c7811b548b..5537f04638093 100644 --- a/lib/SILOptimizer/Utils/CFGOptUtils.cpp +++ b/lib/SILOptimizer/Utils/CFGOptUtils.cpp @@ -505,7 +505,7 @@ bool swift::splitAllCondBrCriticalEdgesWithNonTrivialArgs( return true; } -static bool isSafeNonExitTerminator(TermInst *ti) { +bool isSafeNonExitTerminator(TermInst *ti) { switch (ti->getTermKind()) { case TermKind::BranchInst: case TermKind::CondBranchInst: @@ -534,14 +534,13 @@ static bool isSafeNonExitTerminator(TermInst *ti) { llvm_unreachable("Unhandled TermKind in switch."); } -static bool isTrapNoReturnFunction(ApplyInst *ai) { +bool swift::isTrapNoReturnFunction(SILFunction *f) { const char *fatalName = MANGLE_AS_STRING( MANGLE_SYM(s18_fatalErrorMessageyys12StaticStringV_AcCSutF)); - auto *fn = ai->getReferencedFunctionOrNull(); // We use ends_with here since if we specialize fatal error we will always // prepend the specialization records to fatalName. - if (!fn || !fn->getName().ends_with(fatalName)) + if (!f || !f->getName().ends_with(fatalName)) return false; return true; @@ -576,7 +575,8 @@ bool swift::findAllNonFailureExitBBs( // non-failure exit bb. Add it to our list and continue. auto prevIter = std::prev(SILBasicBlock::iterator(ti)); if (auto *ai = dyn_cast(&*prevIter)) { - if (ai->isCalleeNoReturn() && !isTrapNoReturnFunction(ai)) { + if (ai->isCalleeNoReturn() && + !isTrapNoReturnFunction(ai->getReferencedFunctionOrNull())) { bbs.push_back(&bb); continue; } diff --git a/lib/SILOptimizer/Utils/CanonicalizeOSSALifetime.cpp b/lib/SILOptimizer/Utils/CanonicalizeOSSALifetime.cpp index 2a2795a7b0462..9434e808bddc0 100644 --- a/lib/SILOptimizer/Utils/CanonicalizeOSSALifetime.cpp +++ b/lib/SILOptimizer/Utils/CanonicalizeOSSALifetime.cpp @@ -274,7 +274,8 @@ void CanonicalizeOSSALifetime::extendLivenessToDeinitBarriers() { } OSSALifetimeCompletion::visitUnreachableLifetimeEnds( - getCurrentDef(), completeLiveness, [&](auto *unreachable) { + getCurrentDef(), OSSALifetimeCompletion::DoNotAllowLeaks, + completeLiveness, [&](auto *unreachable) { recordUnreachableLifetimeEnd(unreachable); unreachable->visitPriorInstructions([&](auto *inst) { liveness->extendToNonUse(inst); diff --git a/lib/Sema/CSApply.cpp b/lib/Sema/CSApply.cpp index 90bd3bf8f2778..57dd95f5ad75e 100644 --- a/lib/Sema/CSApply.cpp +++ b/lib/Sema/CSApply.cpp @@ -9370,35 +9370,6 @@ static std::optional applySolutionToForEachStmt( std::optional(SyntacticElementTarget)> rewriteTarget) { - // A special walker to record opened element environment for var decls in a - // for-each loop. - class Walker : public ASTWalker { - GenericEnvironment *Environment; - - public: - Walker(GenericEnvironment *Environment) { this->Environment = Environment; } - - PreWalkResult walkToStmtPre(Stmt *S) override { - if (isa(S)) { - return Action::SkipNode(S); - } - return Action::Continue(S); - } - - PreWalkAction walkToDeclPre(Decl *D) override { - if (auto *decl = dyn_cast(D)) { - decl->setOpenedElementEnvironment(Environment); - } - if (isa(D)) { - return Action::SkipNode(); - } - if (isa(D)) { - return Action::SkipNode(); - } - return Action::Continue(); - } - }; - auto &cs = solution.getConstraintSystem(); auto *sequenceExpr = stmt->getParsedSequence(); PackExpansionExpr *expansion = cast(sequenceExpr); @@ -9412,11 +9383,6 @@ static std::optional applySolutionToForEachStmt( // Simplify the pattern type of the pack expansion. info.patternType = solution.simplifyType(info.patternType); - // Record the opened element environment for the VarDecls inside the loop - Walker forEachWalker(expansion->getGenericEnvironment()); - stmt->getPattern()->walk(forEachWalker); - stmt->getBody()->walk(forEachWalker); - return info; } diff --git a/lib/Sema/TypeCheckConcurrency.cpp b/lib/Sema/TypeCheckConcurrency.cpp index 6ae5baa44dba0..32328bad0e170 100644 --- a/lib/Sema/TypeCheckConcurrency.cpp +++ b/lib/Sema/TypeCheckConcurrency.cpp @@ -4545,6 +4545,37 @@ getIsolationFromConformances(NominalTypeDecl *nominal) { return foundIsolation; } +/// Compute the isolation of a protocol +static std::optional +getIsolationFromInheritedProtocols(ProtocolDecl *protocol) { + std::optional foundIsolation; + for (auto inherited : protocol->getInheritedProtocols()) { + switch (auto protoIsolation = getActorIsolation(inherited)) { + case ActorIsolation::ActorInstance: + case ActorIsolation::Unspecified: + case ActorIsolation::Nonisolated: + case ActorIsolation::NonisolatedUnsafe: + break; + + case ActorIsolation::Erased: + llvm_unreachable("protocol cannot have erased isolation"); + + case ActorIsolation::GlobalActor: + if (!foundIsolation) { + foundIsolation = protoIsolation; + continue; + } + + if (*foundIsolation != protoIsolation) + return std::nullopt; + + break; + } + } + + return foundIsolation; +} + /// Compute the isolation of a nominal type from the property wrappers on /// any stored properties. static std::optional @@ -5236,6 +5267,17 @@ ActorIsolation ActorIsolationRequest::evaluate( if (auto inferred = inferredIsolation(*conformanceIsolation)) return inferred; + // For a protocol, inherit isolation from the directly-inherited + // protocols. + if (ctx.LangOpts.hasFeature(Feature::GlobalActorIsolatedTypesUsability)) { + if (auto proto = dyn_cast(nominal)) { + if (auto protoIsolation = getIsolationFromInheritedProtocols(proto)) { + if (auto inferred = inferredIsolation(*protoIsolation)) + return inferred; + } + } + } + // Before Swift 6: If the declaration is a nominal type and any property // wrappers on its stored properties require isolation, use that. if (auto wrapperIsolation = getIsolationFromWrappers(nominal)) { diff --git a/lib/Serialization/DeserializeSIL.cpp b/lib/Serialization/DeserializeSIL.cpp index 499c3b6558a5d..2298b9c1959f4 100644 --- a/lib/Serialization/DeserializeSIL.cpp +++ b/lib/Serialization/DeserializeSIL.cpp @@ -430,10 +430,11 @@ SILFunction *SILDeserializer::getFuncForReference(StringRef name, if (auto *decl = dyn_cast_or_null(dc->getAsDecl())) fnName = decl->getNameStr(); } - fn->getModule().getASTContext().Diags.diagnose( - fn->getLocation().getSourceLoc(), - diag::deserialize_function_type_mismatch, - fnName, fnType.getASTType(), type.getASTType()); + auto &diags = fn->getModule().getASTContext().Diags; + diags.diagnose(fn->getLocation().getSourceLoc(), + diag::deserialize_function_type_mismatch, + fnName, fnType.getASTType(), type.getASTType()); + diags.flushConsumers(); exit(1); } return fn; diff --git a/stdlib/cmake/modules/AddSwiftStdlib.cmake b/stdlib/cmake/modules/AddSwiftStdlib.cmake index 8bef94ac1529a..61673dd54fde8 100644 --- a/stdlib/cmake/modules/AddSwiftStdlib.cmake +++ b/stdlib/cmake/modules/AddSwiftStdlib.cmake @@ -1752,6 +1752,9 @@ endfunction() # SWIFT_MODULE_DEPENDS_WASI # Swift modules this library depends on when built for WASI. # +# SWIFT_MODULE_DEPENDS_ANDROID +# Swift modules this library depends on when built for Android. +# # FRAMEWORK_DEPENDS # System frameworks this library depends on. # @@ -1875,6 +1878,7 @@ function(add_swift_target_library name) SWIFT_COMPILE_FLAGS_XROS SWIFT_COMPILE_FLAGS_LINUX SWIFT_MODULE_DEPENDS + SWIFT_MODULE_DEPENDS_ANDROID SWIFT_MODULE_DEPENDS_CYGWIN SWIFT_MODULE_DEPENDS_FREEBSD SWIFT_MODULE_DEPENDS_FREESTANDING @@ -2086,12 +2090,15 @@ function(add_swift_target_library name) elseif(sdk STREQUAL "OPENBSD") list(APPEND swiftlib_module_depends_flattened ${SWIFTLIB_SWIFT_MODULE_DEPENDS_OPENBSD}) - elseif(sdk STREQUAL "LINUX" OR sdk STREQUAL "ANDROID") + elseif(sdk STREQUAL "LINUX") list(APPEND swiftlib_module_depends_flattened ${SWIFTLIB_SWIFT_MODULE_DEPENDS_LINUX}) elseif(sdk STREQUAL "LINUX_STATIC") list(APPEND swiftlib_module_depends_flattened ${SWIFTLIB_SWIFT_MODULE_DEPENDS_LINUX_STATIC}) + elseif(sdk STREQUAL "ANDROID") + list(APPEND swiftlib_module_depends_flattened + ${SWIFTLIB_SWIFT_MODULE_DEPENDS_ANDROID}) elseif(sdk STREQUAL "CYGWIN") list(APPEND swiftlib_module_depends_flattened ${SWIFTLIB_SWIFT_MODULE_DEPENDS_CYGWIN}) @@ -2905,6 +2912,7 @@ function(add_swift_target_executable name) DEPENDS LINK_LIBRARIES SWIFT_MODULE_DEPENDS + SWIFT_MODULE_DEPENDS_ANDROID SWIFT_MODULE_DEPENDS_CYGWIN SWIFT_MODULE_DEPENDS_FREEBSD SWIFT_MODULE_DEPENDS_FREESTANDING @@ -3015,12 +3023,15 @@ function(add_swift_target_executable name) elseif(sdk STREQUAL "OPENBSD") list(APPEND swiftexe_module_depends_flattened ${SWIFTEXE_TARGET_SWIFT_MODULE_DEPENDS_OPENBSD}) - elseif(sdk STREQUAL "LINUX" OR sdk STREQUAL "ANDROID") + elseif(sdk STREQUAL "LINUX") list(APPEND swiftexe_module_depends_flattened ${SWIFTEXE_TARGET_SWIFT_MODULE_DEPENDS_LINUX}) elseif(sdk STREQUAL "LINUX_STATIC") list(APPEND swiftexe_module_depends_flattened ${SWIFTEXE_TARGET_SWIFT_MODULE_DEPENDS_LINUX_STATIC}) + elseif(sdk STREQUAL "ANDROID") + list(APPEND swiftexe_module_depends_flattened + ${SWIFTEXE_TARGET_SWIFT_MODULE_DEPENDS_ANDROID}) elseif(sdk STREQUAL "CYGWIN") list(APPEND swiftexe_module_depends_flattened ${SWIFTEXE_TARGET_SWIFT_MODULE_DEPENDS_CYGWIN}) diff --git a/stdlib/private/RuntimeUnittest/CMakeLists.txt b/stdlib/private/RuntimeUnittest/CMakeLists.txt index e23d3a155c869..992f90ad33d2a 100644 --- a/stdlib/private/RuntimeUnittest/CMakeLists.txt +++ b/stdlib/private/RuntimeUnittest/CMakeLists.txt @@ -8,6 +8,7 @@ add_swift_target_library(swiftRuntimeUnittest ${SWIFT_STDLIB_LIBRARY_BUILD_TYPES ExclusivityTests.cpp SWIFT_MODULE_DEPENDS StdlibUnittest + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_FREEBSD Glibc SWIFT_MODULE_DEPENDS_OPENBSD Glibc diff --git a/stdlib/private/StdlibCollectionUnittest/CMakeLists.txt b/stdlib/private/StdlibCollectionUnittest/CMakeLists.txt index 12fc845a887ef..e063340a4e638 100644 --- a/stdlib/private/StdlibCollectionUnittest/CMakeLists.txt +++ b/stdlib/private/StdlibCollectionUnittest/CMakeLists.txt @@ -19,6 +19,7 @@ add_swift_target_library(swiftStdlibCollectionUnittest ${SWIFT_STDLIB_LIBRARY_BU WriteBackMutableSlice.swift SWIFT_MODULE_DEPENDS StdlibUnittest + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_FREEBSD Glibc SWIFT_MODULE_DEPENDS_OPENBSD Glibc diff --git a/stdlib/private/StdlibUnicodeUnittest/CMakeLists.txt b/stdlib/private/StdlibUnicodeUnittest/CMakeLists.txt index 6914812465a54..44920a8f62fa8 100644 --- a/stdlib/private/StdlibUnicodeUnittest/CMakeLists.txt +++ b/stdlib/private/StdlibUnicodeUnittest/CMakeLists.txt @@ -10,6 +10,7 @@ add_swift_target_library(swiftStdlibUnicodeUnittest ${SWIFT_STDLIB_LIBRARY_BUILD WordBreaking.swift SWIFT_MODULE_DEPENDS StdlibUnittest + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_FREEBSD Glibc SWIFT_MODULE_DEPENDS_OPENBSD Glibc diff --git a/stdlib/private/StdlibUnittest/CMakeLists.txt b/stdlib/private/StdlibUnittest/CMakeLists.txt index 711ee520376c0..dfcbb067d8ff7 100644 --- a/stdlib/private/StdlibUnittest/CMakeLists.txt +++ b/stdlib/private/StdlibUnittest/CMakeLists.txt @@ -62,6 +62,7 @@ add_swift_target_library(swiftStdlibUnittest ${SWIFT_STDLIB_LIBRARY_BUILD_TYPES} SWIFT_MODULE_DEPENDS_XROS ${swift_stdlib_unittest_darwin_dependencies} SWIFT_MODULE_DEPENDS_MACCATALYST ${swift_stdlib_unittest_darwin_dependencies} SWIFT_MODULE_DEPENDS_FREESTANDING "${SWIFT_FREESTANDING_TEST_DEPENDENCIES}" + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_FREEBSD Glibc SWIFT_MODULE_DEPENDS_OPENBSD Glibc diff --git a/stdlib/private/StdlibUnittest/RaceTest.swift b/stdlib/private/StdlibUnittest/RaceTest.swift index d3aab9cd252fd..0a94d4da05ffb 100644 --- a/stdlib/private/StdlibUnittest/RaceTest.swift +++ b/stdlib/private/StdlibUnittest/RaceTest.swift @@ -45,6 +45,8 @@ import Darwin import Glibc #elseif canImport(Musl) import Musl +#elseif canImport(Android) +import Android #elseif os(WASI) import WASILibc #elseif os(Windows) diff --git a/stdlib/private/StdlibUnittest/StdlibCoreExtras.swift b/stdlib/private/StdlibUnittest/StdlibCoreExtras.swift index 9b5ef8be2e81a..3afef278d4cd5 100644 --- a/stdlib/private/StdlibUnittest/StdlibCoreExtras.swift +++ b/stdlib/private/StdlibUnittest/StdlibCoreExtras.swift @@ -18,6 +18,8 @@ import Darwin import Glibc #elseif canImport(Musl) import Musl +#elseif canImport(Android) +import Android #elseif os(WASI) import WASILibc #elseif os(Windows) diff --git a/stdlib/private/StdlibUnittest/StdlibUnittest.swift b/stdlib/private/StdlibUnittest/StdlibUnittest.swift index 11e60e1dbad41..55171a7114414 100644 --- a/stdlib/private/StdlibUnittest/StdlibUnittest.swift +++ b/stdlib/private/StdlibUnittest/StdlibUnittest.swift @@ -24,6 +24,8 @@ import Darwin import Glibc #elseif canImport(Musl) import Musl +#elseif canImport(Android) +import Android #elseif os(WASI) import WASILibc #elseif os(Windows) diff --git a/stdlib/private/SwiftPrivate/CMakeLists.txt b/stdlib/private/SwiftPrivate/CMakeLists.txt index 480781284ebb7..1e0bbfea54e13 100644 --- a/stdlib/private/SwiftPrivate/CMakeLists.txt +++ b/stdlib/private/SwiftPrivate/CMakeLists.txt @@ -27,6 +27,7 @@ add_swift_target_library(swiftSwiftPrivate ${SWIFT_STDLIB_LIBRARY_BUILD_TYPES} I SWIFT_MODULE_DEPENDS_WATCHOS ${swift_swiftprivate_darwin_depencencies} SWIFT_MODULE_DEPENDS_MACCATALYST ${swift_swiftprivate_darwin_depencencies} SWIFT_MODULE_DEPENDS_FREESTANDING "${SWIFT_FREESTANDING_TEST_DEPENDENCIES}" + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_LINUX_STATIC Musl SWIFT_MODULE_DEPENDS_FREEBSD Glibc diff --git a/stdlib/private/SwiftPrivate/IO.swift b/stdlib/private/SwiftPrivate/IO.swift index 9e0f3c5ddfe75..e8bf2c659fa0d 100644 --- a/stdlib/private/SwiftPrivate/IO.swift +++ b/stdlib/private/SwiftPrivate/IO.swift @@ -23,6 +23,8 @@ import Darwin import Glibc #elseif canImport(Musl) import Musl +#elseif canImport(Android) +import Android #elseif canImport(WASILibc) import WASILibc #endif diff --git a/stdlib/private/SwiftPrivateLibcExtras/CMakeLists.txt b/stdlib/private/SwiftPrivateLibcExtras/CMakeLists.txt index db317dcdf6882..927eae491caa5 100644 --- a/stdlib/private/SwiftPrivateLibcExtras/CMakeLists.txt +++ b/stdlib/private/SwiftPrivateLibcExtras/CMakeLists.txt @@ -38,6 +38,7 @@ add_swift_target_library(swiftSwiftPrivateLibcExtras ${SWIFT_STDLIB_LIBRARY_BUIL SWIFT_MODULE_DEPENDS_XROS ${swift_private_libc_extras_darwin_depencencies} SWIFT_MODULE_DEPENDS_MACCATALYST ${swift_private_libc_extras_darwin_depencencies} SWIFT_MODULE_DEPENDS_FREESTANDING "${SWIFT_FREESTANDING_TEST_DEPENDENCIES}" + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_LINUX_STATIC Musl SWIFT_MODULE_DEPENDS_FREEBSD Glibc diff --git a/stdlib/private/SwiftPrivateLibcExtras/Subprocess.swift b/stdlib/private/SwiftPrivateLibcExtras/Subprocess.swift index 47e8972a276c2..ad15200ac7518 100644 --- a/stdlib/private/SwiftPrivateLibcExtras/Subprocess.swift +++ b/stdlib/private/SwiftPrivateLibcExtras/Subprocess.swift @@ -17,6 +17,8 @@ import Darwin import Glibc #elseif canImport(Musl) import Musl +#elseif canImport(Android) +import Android #elseif os(WASI) import WASILibc #elseif os(Windows) diff --git a/stdlib/private/SwiftPrivateLibcExtras/SwiftPrivateLibcExtras.swift b/stdlib/private/SwiftPrivateLibcExtras/SwiftPrivateLibcExtras.swift index 146cc404931e2..27ee869f70609 100644 --- a/stdlib/private/SwiftPrivateLibcExtras/SwiftPrivateLibcExtras.swift +++ b/stdlib/private/SwiftPrivateLibcExtras/SwiftPrivateLibcExtras.swift @@ -17,6 +17,8 @@ import Darwin import Glibc #elseif canImport(Musl) import Musl +#elseif canImport(Android) +import Android #elseif os(WASI) import WASILibc #elseif os(Windows) diff --git a/stdlib/private/SwiftPrivateThreadExtras/CMakeLists.txt b/stdlib/private/SwiftPrivateThreadExtras/CMakeLists.txt index d0a791ad8a47f..4002af837db10 100644 --- a/stdlib/private/SwiftPrivateThreadExtras/CMakeLists.txt +++ b/stdlib/private/SwiftPrivateThreadExtras/CMakeLists.txt @@ -19,6 +19,7 @@ add_swift_target_library(swiftSwiftPrivateThreadExtras ${SWIFT_STDLIB_LIBRARY_BU SWIFT_MODULE_DEPENDS_XROS ${swift_private_thread_extras_darwin_depencencies} SWIFT_MODULE_DEPENDS_MACCATALYST ${swift_private_thread_extras_darwin_depencencies} SWIFT_MODULE_DEPENDS_FREESTANDING "${SWIFT_FREESTANDING_TEST_DEPENDENCIES}" + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_LINUX_STATIC Musl SWIFT_MODULE_DEPENDS_FREEBSD Glibc diff --git a/stdlib/private/SwiftPrivateThreadExtras/SwiftPrivateThreadExtras.swift b/stdlib/private/SwiftPrivateThreadExtras/SwiftPrivateThreadExtras.swift index 20d5cdf214087..4ddf17a803136 100644 --- a/stdlib/private/SwiftPrivateThreadExtras/SwiftPrivateThreadExtras.swift +++ b/stdlib/private/SwiftPrivateThreadExtras/SwiftPrivateThreadExtras.swift @@ -21,6 +21,8 @@ import Darwin import Glibc #elseif canImport(Musl) import Musl +#elseif canImport(Android) +import Android #elseif os(WASI) import WASILibc #elseif os(Windows) diff --git a/stdlib/private/SwiftPrivateThreadExtras/ThreadBarriers.swift b/stdlib/private/SwiftPrivateThreadExtras/ThreadBarriers.swift index 97a5439d9ed4d..e7c43bf02567f 100644 --- a/stdlib/private/SwiftPrivateThreadExtras/ThreadBarriers.swift +++ b/stdlib/private/SwiftPrivateThreadExtras/ThreadBarriers.swift @@ -16,6 +16,8 @@ import Darwin import Glibc #elseif canImport(Musl) import Musl +#elseif canImport(Android) +import Android #elseif os(WASI) import WASILibc #elseif os(Windows) diff --git a/stdlib/private/SwiftReflectionTest/CMakeLists.txt b/stdlib/private/SwiftReflectionTest/CMakeLists.txt index f28b657be7d30..8ad688393765e 100644 --- a/stdlib/private/SwiftReflectionTest/CMakeLists.txt +++ b/stdlib/private/SwiftReflectionTest/CMakeLists.txt @@ -14,6 +14,7 @@ if (SWIFT_INCLUDE_TESTS AND SWIFT_BUILD_DYNAMIC_STDLIB) SWIFT_MODULE_DEPENDS_TVOS ${swift_reflection_test_darwin_depencencies} SWIFT_MODULE_DEPENDS_WATCHOS ${swift_reflection_test_darwin_depencencies} SWIFT_MODULE_DEPENDS_XROS ${swift_reflection_test_darwin_depencencies} + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_LINUX_STATIC Musl SWIFT_MODULE_DEPENDS_FREEBSD Glibc diff --git a/stdlib/private/SwiftReflectionTest/SwiftReflectionTest.swift b/stdlib/private/SwiftReflectionTest/SwiftReflectionTest.swift index 44175a187da1b..041206cd4244e 100644 --- a/stdlib/private/SwiftReflectionTest/SwiftReflectionTest.swift +++ b/stdlib/private/SwiftReflectionTest/SwiftReflectionTest.swift @@ -131,6 +131,8 @@ import SwiftShims import Glibc #elseif canImport(Musl) import Musl +#elseif canImport(Android) +import Android #endif let rtldDefault: UnsafeMutableRawPointer? = nil diff --git a/stdlib/public/Concurrency/Actor.cpp b/stdlib/public/Concurrency/Actor.cpp index a39b343f00000..abb8b948be036 100644 --- a/stdlib/public/Concurrency/Actor.cpp +++ b/stdlib/public/Concurrency/Actor.cpp @@ -30,6 +30,7 @@ #include "swift/Runtime/Bincompat.h" #include "swift/Runtime/Casting.h" #include "swift/Runtime/DispatchShims.h" +#include "swift/Runtime/EnvironmentVariables.h" #include "swift/Threading/Mutex.h" #include "swift/Threading/Once.h" #include "swift/Threading/Thread.h" @@ -351,15 +352,16 @@ static void checkIsCurrentExecutorMode(void *context) { // Potentially, override the platform detected mode, primarily used in tests. #if SWIFT_STDLIB_HAS_ENVIRON - const char *modeStr = getenv("SWIFT_IS_CURRENT_EXECUTOR_LEGACY_MODE_OVERRIDE"); - if (!modeStr) - return; - - if (strcmp(modeStr, "nocrash") == 0) { - useLegacyMode = Legacy_NoCheckIsolated_NonCrashing; - } else if (strcmp(modeStr, "crash") == 0) { - useLegacyMode = Default_UseCheckIsolated_AllowCrash; - } // else, just use the platform detected mode + if (const char *modeStr = + runtime::environment::concurrencyIsCurrentExecutorLegacyModeOverride()) { + if (modeStr) { + if (strcmp(modeStr, "nocrash") == 0) { + useLegacyMode = true; + } else if (strcmp(modeStr, "crash") == 0) { + useLegacyMode = false; + } // else, just use the platform detected mode + } + } #endif // SWIFT_STDLIB_HAS_ENVIRON isCurrentExecutorMode = useLegacyMode ? Legacy_NoCheckIsolated_NonCrashing : Default_UseCheckIsolated_AllowCrash; diff --git a/stdlib/public/Concurrency/CMakeLists.txt b/stdlib/public/Concurrency/CMakeLists.txt index d4831ef988aef..2582041ad8b6f 100644 --- a/stdlib/public/Concurrency/CMakeLists.txt +++ b/stdlib/public/Concurrency/CMakeLists.txt @@ -156,6 +156,7 @@ add_swift_target_library(swift_Concurrency ${SWIFT_STDLIB_LIBRARY_BUILD_TYPES} I ${SWIFT_RUNTIME_CONCURRENCY_C_SOURCES} ${SWIFT_RUNTIME_CONCURRENCY_SWIFT_SOURCES} + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_FREEBSD Glibc SWIFT_MODULE_DEPENDS_OPENBSD Glibc diff --git a/stdlib/public/Differentiation/CMakeLists.txt b/stdlib/public/Differentiation/CMakeLists.txt index 64e0ec3ee15e8..16a296bbbe772 100644 --- a/stdlib/public/Differentiation/CMakeLists.txt +++ b/stdlib/public/Differentiation/CMakeLists.txt @@ -42,6 +42,7 @@ add_swift_target_library(swift_Differentiation ${SWIFT_STDLIB_LIBRARY_BUILD_TYPE SWIFT_MODULE_DEPENDS_TVOS ${swiftDifferentiationDarwinDependencies} SWIFT_MODULE_DEPENDS_WATCHOS ${swiftDifferentiationDarwinDependencies} SWIFT_MODULE_DEPENDS_XROS ${swiftDifferentiationDarwinDependencies} + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_LINUX_STATIC Musl SWIFT_MODULE_DEPENDS_FREEBSD Glibc diff --git a/stdlib/public/Differentiation/TgmathDerivatives.swift.gyb b/stdlib/public/Differentiation/TgmathDerivatives.swift.gyb index a517dade14502..ce28c9c5d079f 100644 --- a/stdlib/public/Differentiation/TgmathDerivatives.swift.gyb +++ b/stdlib/public/Differentiation/TgmathDerivatives.swift.gyb @@ -18,12 +18,14 @@ import Swift import Darwin.C.tgmath #elseif canImport(Musl) import Musl -#elseif os(Linux) || os(FreeBSD) || os(OpenBSD) || os(PS4) || os(Android) || os(Cygwin) || os(Haiku) +#elseif os(Linux) || os(FreeBSD) || os(OpenBSD) || os(PS4) || os(Cygwin) || os(Haiku) import Glibc #elseif os(WASI) import WASILibc #elseif os(Windows) import CRT +#elseif os(Android) + import Android #else #error("Unsupported platform") #endif diff --git a/stdlib/public/Distributed/CMakeLists.txt b/stdlib/public/Distributed/CMakeLists.txt index 3a0dcb21d9832..5dbea7d12f822 100644 --- a/stdlib/public/Distributed/CMakeLists.txt +++ b/stdlib/public/Distributed/CMakeLists.txt @@ -34,6 +34,7 @@ add_swift_target_library(swiftDistributed ${SWIFT_STDLIB_LIBRARY_BUILD_TYPES} IS SWIFT_MODULE_DEPENDS_OSX ${swift_distributed_darwin_depencencies} SWIFT_MODULE_DEPENDS_TVOS ${swift_distributed_darwin_depencencies} SWIFT_MODULE_DEPENDS_WATCHOS ${swift_distributed_darwin_depencencies} + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_FREEBSD Glibc SWIFT_MODULE_DEPENDS_OPENBSD Glibc diff --git a/stdlib/public/Distributed/LocalTestingDistributedActorSystem.swift b/stdlib/public/Distributed/LocalTestingDistributedActorSystem.swift index 47e31d9019800..33c208bff38ea 100644 --- a/stdlib/public/Distributed/LocalTestingDistributedActorSystem.swift +++ b/stdlib/public/Distributed/LocalTestingDistributedActorSystem.swift @@ -18,6 +18,8 @@ import Darwin import Glibc #elseif canImport(Musl) import Musl +#elseif canImport(Android) +import Android #elseif os(Windows) import WinSDK #endif diff --git a/stdlib/public/Platform/Android.swift b/stdlib/public/Platform/Android.swift new file mode 100644 index 0000000000000..32f71ebdd9b08 --- /dev/null +++ b/stdlib/public/Platform/Android.swift @@ -0,0 +1,88 @@ +//===----------------------------------------------------------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// + +@_exported import SwiftAndroid // Clang module + +@available(swift, deprecated: 3.0, message: "Please use 'Double.pi' or '.pi' to get the value of correct type and avoid casting.") +public let M_PI = Double.pi +@available(swift, deprecated: 3.0, message: "Please use 'Double.pi / 2' or '.pi / 2' to get the value of correct type and avoid casting.") +public let M_PI_2 = Double.pi / 2 +@available(swift, deprecated: 3.0, message: "Please use 'Double.pi / 4' or '.pi / 4' to get the value of correct type and avoid casting.") +public let M_PI_4 = Double.pi / 4 + +@available(swift, deprecated: 3.0, message: "Please use 2.squareRoot()'.") +public let M_SQRT2 = 2.squareRoot() + +@available(swift, deprecated: 3.0, message: "Please use 0.5.squareRoot()'.") +public let M_SQRT1_2 = 0.5.squareRoot() + +@available(swift, deprecated: 3.0, message: "Please use 'T.radix' to get the radix of a FloatingPoint type 'T'.") +public let FLT_RADIX = Double.radix + +// Where does the 1 come from? C counts the usually-implicit leading +// significand bit, but Swift does not. Neither is really right or wrong. +@available(swift, deprecated: 3.0, message: "Please use 'Float.significandBitCount + 1'.") +public let FLT_MANT_DIG = Float.significandBitCount + 1 + +// Where does the 1 come from? C models floating-point numbers as having a +// significand in [0.5, 1), but Swift (following IEEE 754) considers the +// significand to be in [1, 2). This rationale applies to FLT_MIN_EXP +// as well. +@available(swift, deprecated: 3.0, message: "Please use 'Float.greatestFiniteMagnitude.exponent + 1'.") +public let FLT_MAX_EXP = Float.greatestFiniteMagnitude.exponent + 1 + +@available(swift, deprecated: 3.0, message: "Please use 'Float.leastNormalMagnitude.exponent + 1'.") +public let FLT_MIN_EXP = Float.leastNormalMagnitude.exponent + 1 + +@available(swift, deprecated: 3.0, message: "Please use 'Float.greatestFiniteMagnitude' or '.greatestFiniteMagnitude'.") +public let FLT_MAX = Float.greatestFiniteMagnitude + +@available(swift, deprecated: 3.0, message: "Please use 'Float.ulpOfOne' or '.ulpOfOne'.") +public let FLT_EPSILON = Float.ulpOfOne + +@available(swift, deprecated: 3.0, message: "Please use 'Float.leastNormalMagnitude' or '.leastNormalMagnitude'.") +public let FLT_MIN = Float.leastNormalMagnitude + +@available(swift, deprecated: 3.0, message: "Please use 'Float.leastNonzeroMagnitude' or '.leastNonzeroMagnitude'.") +public let FLT_TRUE_MIN = Float.leastNonzeroMagnitude + + +// Where does the 1 come from? C counts the usually-implicit leading +// significand bit, but Swift does not. Neither is really right or wrong. +@available(swift, deprecated: 3.0, message: "Please use 'Double.significandBitCount + 1'.") +public let DBL_MANT_DIG = Double.significandBitCount + 1 + +// Where does the 1 come from? C models floating-point numbers as having a +// significand in [0.5, 1), but Swift (following IEEE 754) considers the +// significand to be in [1, 2). This rationale applies to DBL_MIN_EXP +// as well. +@available(swift, deprecated: 3.0, message: "Please use 'Double.greatestFiniteMagnitude.exponent + 1'.") +public let DBL_MAX_EXP = Double.greatestFiniteMagnitude.exponent + 1 + +@available(swift, deprecated: 3.0, message: "Please use 'Double.leastNormalMagnitude.exponent + 1'.") +public let DBL_MIN_EXP = Double.leastNormalMagnitude.exponent + 1 + +@available(swift, deprecated: 3.0, message: "Please use 'Double.greatestFiniteMagnitude' or '.greatestFiniteMagnitude'.") +public let DBL_MAX = Double.greatestFiniteMagnitude + +@available(swift, deprecated: 3.0, message: "Please use 'Double.ulpOfOne' or '.ulpOfOne'.") +public let DBL_EPSILON = Double.ulpOfOne + +@available(swift, deprecated: 3.0, message: "Please use 'Double.leastNormalMagnitude' or '.leastNormalMagnitude'.") +public let DBL_MIN = Double.leastNormalMagnitude + +@available(swift, deprecated: 3.0, message: "Please use 'Double.leastNonzeroMagnitude' or '.leastNonzeroMagnitude'.") +public let DBL_TRUE_MIN = Double.leastNonzeroMagnitude + +public let M_LN2 = SwiftAndroid.M_LN2 +public let M_LOG10E = SwiftAndroid.M_LOG10E +public let M_2_SQRTPI = SwiftAndroid.M_2_SQRTPI diff --git a/stdlib/public/Platform/CMakeLists.txt b/stdlib/public/Platform/CMakeLists.txt index 31419b092c057..23ddb0a20857c 100644 --- a/stdlib/public/Platform/CMakeLists.txt +++ b/stdlib/public/Platform/CMakeLists.txt @@ -127,9 +127,9 @@ if(SWIFT_SHOULD_BUILD_EMBEDDED_STDLIB) endforeach() endif() -set(swiftGlibc_target_sdks ANDROID CYGWIN FREEBSD OPENBSD LINUX HAIKU) +set(swiftGlibc_target_sdks CYGWIN FREEBSD OPENBSD LINUX HAIKU) if(SWIFT_FREESTANDING_FLAVOR STREQUAL "linux") - set(swiftGlibc_target_sdks ANDROID CYGWIN FREEBSD OPENBSD LINUX HAIKU FREESTANDING) + set(swiftGlibc_target_sdks CYGWIN FREEBSD OPENBSD LINUX HAIKU FREESTANDING) endif() add_swift_target_library(swiftGlibc ${SWIFT_STDLIB_LIBRARY_BUILD_TYPES} IS_SDK_OVERLAY ${swift_platform_sources} @@ -273,12 +273,28 @@ add_custom_target(musl_modulemap DEPENDS ${musl_modulemap_target_list}) set_property(TARGET musl_modulemap PROPERTY FOLDER "Miscellaneous") add_dependencies(sdk-overlay musl_modulemap) +add_swift_target_library(swiftAndroid ${SWIFT_STDLIB_LIBRARY_BUILD_TYPES} IS_SDK_OVERLAY + Android.swift + ${swift_platform_sources} + POSIXError.swift + + GYB_SOURCES + ${swift_platform_gyb_sources} + + SWIFT_COMPILE_FLAGS + ${SWIFT_RUNTIME_SWIFT_COMPILE_FLAGS} + ${SWIFT_STANDARD_LIBRARY_SWIFT_FLAGS} + ${swift_platform_compile_flags} + LINK_FLAGS "${SWIFT_RUNTIME_SWIFT_LINK_FLAGS}" + TARGET_SDKS "ANDROID" + INSTALL_IN_COMPONENT sdk-overlay + DEPENDS android_modulemap) + set(glibc_modulemap_target_list) foreach(sdk ${SWIFT_SDKS}) if(NOT "${sdk}" STREQUAL "LINUX" AND NOT "${sdk}" STREQUAL "FREEBSD" AND NOT "${sdk}" STREQUAL "OPENBSD" AND - NOT "${sdk}" STREQUAL "ANDROID" AND NOT "${sdk}" STREQUAL "CYGWIN" AND NOT "${sdk}" STREQUAL "HAIKU") continue() @@ -376,6 +392,79 @@ add_custom_target(glibc_modulemap DEPENDS ${glibc_modulemap_target_list}) set_property(TARGET glibc_modulemap PROPERTY FOLDER "Miscellaneous") add_dependencies(sdk-overlay glibc_modulemap) +set(android_modulemap_target_list) +if("ANDROID" IN_LIST SWIFT_SDKS) + set(android_modulemap_source "android.modulemap") + set(android_ndk_header_source "SwiftAndroidNDK.h") + set(android_bionic_header_source "SwiftBionic.h") + + foreach(arch ${SWIFT_SDK_ANDROID_ARCHITECTURES}) + set(arch_subdir "${SWIFT_SDK_ANDROID_LIB_SUBDIR}/${arch}") + set(module_dir "${SWIFTLIB_DIR}/${arch_subdir}") + set(module_dir_static "${SWIFTSTATICLIB_DIR}/${arch_subdir}") + + add_custom_command_target( + copy_android_modulemap_resource + COMMAND + "${CMAKE_COMMAND}" "-E" "make_directory" ${module_dir} ${module_dir_static} + COMMAND + "${CMAKE_COMMAND}" "-E" "copy_if_different" + "${CMAKE_CURRENT_SOURCE_DIR}/${android_modulemap_source}" ${module_dir} + COMMAND + "${CMAKE_COMMAND}" "-E" "copy_if_different" + "${CMAKE_CURRENT_SOURCE_DIR}/${android_modulemap_source}" ${module_dir_static} + OUTPUT ${module_dir}/${android_modulemap_source} ${module_dir_static}/${android_modulemap_source} + COMMENT "Copying Android modulemap to resource directories") + add_custom_command_target( + copy_android_ndk_neader_resource + COMMAND + "${CMAKE_COMMAND}" "-E" "make_directory" ${module_dir} ${module_dir_static} + COMMAND + "${CMAKE_COMMAND}" "-E" "copy_if_different" + "${CMAKE_CURRENT_SOURCE_DIR}/${android_ndk_header_source}" ${module_dir} + COMMAND + "${CMAKE_COMMAND}" "-E" "copy_if_different" + "${CMAKE_CURRENT_SOURCE_DIR}/${android_ndk_header_source}" ${module_dir_static} + OUTPUT ${module_dir}/${android_ndk_header_source} ${module_dir_static}/${android_ndk_header_source} + COMMENT "Copying Android NDK header to resource directories") + add_custom_command_target( + copy_android_bionic_neader_resource + COMMAND + "${CMAKE_COMMAND}" "-E" "make_directory" ${module_dir} ${module_dir_static} + COMMAND + "${CMAKE_COMMAND}" "-E" "copy_if_different" + "${CMAKE_CURRENT_SOURCE_DIR}/${android_bionic_header_source}" ${module_dir} + COMMAND + "${CMAKE_COMMAND}" "-E" "copy_if_different" + "${CMAKE_CURRENT_SOURCE_DIR}/${android_bionic_header_source}" ${module_dir_static} + OUTPUT ${module_dir}/${android_bionic_header_source} ${module_dir_static}/${android_bionic_header_source} + COMMENT "Copying Android NDK header to resource directories") + + add_dependencies(sdk-overlay ${copy_android_modulemap_resource} + ${copy_android_ndk_neader_resource} + ${copy_android_bionic_neader_resource}) + list(APPEND android_modulemap_target_list ${copy_android_modulemap_resource} + ${copy_android_ndk_neader_resource} + ${copy_android_bionic_neader_resource}) + + swift_install_in_component(FILES "${android_modulemap_source}" + "${android_ndk_header_source}" + "${android_bionic_header_source}" + DESTINATION "lib/swift/${arch_subdir}" + COMPONENT sdk-overlay) + if(SWIFT_BUILD_STATIC_STDLIB) + swift_install_in_component(FILES "${android_modulemap_source}" + "${android_ndk_header_source}" + "${android_bionic_header_source}" + DESTINATION "lib/swift_static/${arch_subdir}" + COMPONENT sdk-overlay) + endif() + endforeach() +endif() +add_custom_target(android_modulemap DEPENDS ${android_modulemap_target_list}) +set_property(TARGET android_modulemap PROPERTY FOLDER "Miscellaneous") +add_dependencies(sdk-overlay android_modulemap) + set(wasilibc_modulemap_target_list) if("WASI" IN_LIST SWIFT_SDKS) set(wasilibc_modulemap_source "wasi-libc.modulemap") diff --git a/stdlib/public/Platform/Platform.swift b/stdlib/public/Platform/Platform.swift index 0c2d63a0cffdb..b6316445ee573 100644 --- a/stdlib/public/Platform/Platform.swift +++ b/stdlib/public/Platform/Platform.swift @@ -241,7 +241,7 @@ public var S_IFIFO: Int32 { return Int32(0x1000) } public var S_IREAD: Int32 { return Int32(0x0100) } public var S_IWRITE: Int32 { return Int32(0x0080) } public var S_IEXEC: Int32 { return Int32(0x0040) } -#else +#elseif !os(Android) public var S_IFMT: mode_t { return mode_t(0o170000) } public var S_IFIFO: mode_t { return mode_t(0o010000) } public var S_IFCHR: mode_t { return mode_t(0o020000) } diff --git a/stdlib/public/Platform/SwiftAndroidNDK.h b/stdlib/public/Platform/SwiftAndroidNDK.h new file mode 100644 index 0000000000000..4aae88ba41bb5 --- /dev/null +++ b/stdlib/public/Platform/SwiftAndroidNDK.h @@ -0,0 +1,150 @@ +//===--- SwiftAndroidNDK.h ------------------------------------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// + +#ifndef SWIFT_ANDROID_NDK_MODULE +#define SWIFT_ANDROID_NDK_MODULE + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// C headers that are included with the compiler. +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include + +#endif // SWIFT_ANDROID_NDK_MODULE diff --git a/stdlib/public/Platform/SwiftBionic.h b/stdlib/public/Platform/SwiftBionic.h new file mode 100644 index 0000000000000..b3e173030f18b --- /dev/null +++ b/stdlib/public/Platform/SwiftBionic.h @@ -0,0 +1,38 @@ +//===--- SwiftBionic.h ----------------------------------------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// + +#ifndef SWIFT_BIONIC_MODULE +#define SWIFT_BIONIC_MODULE + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#endif // SWIFT_BIONIC_MODULE diff --git a/stdlib/public/Platform/android.modulemap b/stdlib/public/Platform/android.modulemap new file mode 100644 index 0000000000000..9a47b4cfca816 --- /dev/null +++ b/stdlib/public/Platform/android.modulemap @@ -0,0 +1,677 @@ +//===--- android.modulemap ------------------------------------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// +// The module map for the Android NDK. +// A portion of the modules in this file are prefixed +// with an underscore, to discourage users from importing them from +// Swift directly, as these modules might be overriden by the C++ stdlib. +// Instead, users should import 'Android' or another +// umbrella module that includes these headers. + +// The top level 'Android' module can be included from Swift. +module SwiftAndroid [system] { + header "SwiftAndroidNDK.h" + export * +} + +// The top level 'Bionic' module is the C standard library +// used in the Android NDK. +module Bionic [system] { + header "SwiftBionic.h" + export * +} + +// The individual modules for the Bionic C standard library used +// by the Android NDK. +module _assert [system] { + // 's use of NDEBUG requires textual inclusion. + textual header "assert.h" +} +module _complex [system] { + header "complex.h" + export * +} +module _ctype [system] { + header "ctype.h" + export * +} +module _errno [system] { + header "errno.h" + export * +} +module _fenv [system] { + header "fenv.h" + export * +} +// Use 'no_undeclared_includes' to avoid pulling in the libc++ headers. +// FIXME: this can be dropped once NDK's libc++ 'std' module is split +// into individual submodules. +module _inttypes [system] [no_undeclared_includes] { + header "inttypes.h" + export * + use _stdint + use _sys_cdefs +} +module _limits [system] { + header "limits.h" + export * + explicit module posix_limits { + header "bits/posix_limits.h" + export * + } +} +module _locale [system] { + header "locale.h" + export * +} +module _malloc [system] { + header "malloc.h" + export * +} +module _math [system] { + header "math.h" + export * + link "m" +} +module _setjmp [system] { + header "setjmp.h" + export * +} +module _signal [system] { + header "signal.h" + // The 'signal.h' header unfortunately has a circular include + // with 'sys/ucontext.h' and thus it must be part of this module. + header "sys/ucontext.h" + export * + explicit module bits_signaltypes { + header "bits/signal_types.h" + export * + } +} +module _stdatomic [system] { + header "stdatomic.h" + export * +} +module _stdint [system] { + header "stdint.h" + export * +} +module _stdio [system] { + header "stdio.h" + export * + explicit module stdio_ext { + header "stdio_ext.h" + export * + } +} +module _stdlib [system] { + header "stdlib.h" + export * +} +module _string [system] { + header "string.h" + export * +} +module _threads [system] { + header "threads.h" + export * + explicit module threads_inlines { + header "bits/threads_inlines.h" + export * + } +} +module _time [system] { + header "time.h" + export * + explicit module sys_time { + header "sys/time.h" + export * + } + explicit module sys_times { + header "sys/times.h" + export * + } +} +module _uchar [system] { + header "uchar.h" + export * +} +module _wchar [system] { + header "wchar.h" + export * + explicit module mbstate_t { + header "bits/mbstate_t.h" + export * + } +} + +// POSIX and another android NDK headers. +module alloca [system] { + header "alloca.h" + export * +} + +module ar [system] { + header "ar.h" + export * +} + +module cpio [system] { + header "cpio.h" + export * +} + +module posix_filesystem [system] { + // This module groups all file, paths and filesystem + // operations into one module. + explicit module dirent { + header "dirent.h" + export * + } + explicit module fcntl { + header "fcntl.h" + export * + } + explicit module fnmatch { + header "fnmatch.h" + export * + } + explicit module fts { + header "fts.h" + export * + } + explicit module ftw { + header "ftw.h" + export * + } + explicit module glob { + header "glob.h" + export * + } + explicit module mntent { + header "mntent.h" + export * + } + explicit module libgen { + header "libgen.h" + export * + } + explicit module nl_types { + header "nl_types.h" + export * + } + explicit module paths { + header "paths.h" + export * + } + explicit module poll { + header "poll.h" + export * + } + explicit module pwd { + header "pwd.h" + export * + } + explicit module utime { + header "utime.h" + export * + } + explicit module bits_ioctl { + header "bits/ioctl.h" + export * + } + explicit module sys_epoll { + header "sys/epoll.h" + export * + } + explicit module sys_eventfd { + header "sys/eventfd.h" + export * + } + explicit module sys_fcntl { + header "sys/fcntl.h" + export * + } + explicit module sys_file { + header "sys/file.h" + export * + } + explicit module sys_inotify { + header "sys/inotify.h" + export * + } + explicit module sys_ioctl { + header "sys/ioctl.h" + export * + } + explicit module sys_mount { + header "sys/mount.h" + export * + } + explicit module sys_sendfile { + header "sys/sendfile.h" + export * + } + explicit module sys_stat { + header "sys/stat.h" + export * + } + explicit module sys_statvfs { + header "sys/statvfs.h" + export * + } + explicit module sys_vfs { + header "sys/vfs.h" + export * + } + explicit module sys_uio { + header "sys/uio.h" + export * + } +} + +module dl [system] { + // This module groups all dl* based operations + // into one module. + explicit module dlfcn { + header "dlfcn.h" + export * + } + explicit module link_ { + header "link.h" + export * + } +} + +module error [system] { + header "error.h" + export * + explicit module err { + header "err.h" + export * + } +} + +module execinfo [system] { + header "execinfo.h" + export * +} + +module features [system] { + header "features.h" + export * +} + +module getopt [system] { + header "getopt.h" + export * +} + +module grp [system] { + header "grp.h" + export * +} + +module iconv [system] { + header "iconv.h" + export * +} + +module inet [system] { + // This module groups headers related to inet + // and networking. + explicit module ifaddrs { + header "ifaddrs.h" + export * + } + explicit module netdb { + header "netdb.h" + export * + } + explicit module arpa_inet { + header "arpa/inet.h" + export * + } + explicit module net_if { + header "net/if.h" + export * + } + explicit module netinet_in { + header "netinet/in.h" + export * + } + explicit module netinet_in6 { + header "netinet/in6.h" + export * + } + explicit module netinet_tcp { + header "netinet/tcp.h" + export * + } + explicit module bits_ip_mreq_source { + header "bits/ip_mreq_source.h" + export * + } + explicit module bits_ip_msfilter { + header "bits/ip_msfilter.h" + export * + } + explicit module bits_in_addr { + header "bits/in_addr.h" + export * + } + explicit module linux_if { + header "linux/if.h" + export * + } + explicit module sys_socket { + header "sys/socket.h" + export * + } +} + +module jni [system] { + header "jni.h" + export * +} + +module langinfo [system] { + header "langinfo.h" + export * +} + +module pthread [system] { + header "pthread.h" + export * +} + +module pty [system] { + header "pty.h" + export * +} + +module regex [system] { + header "regex.h" + export * +} + +module resolv [system] { + header "resolv.h" + export * +} + +module sched [system] { + header "sched.h" + export * +} + +module search [system] { + header "search.h" + export * +} + +module semaphore [system] { + header "semaphore.h" + export * +} + +module spawn [system] { + header "spawn.h" + export * +} + +module strings [system] { + header "strings.h" + export * +} + +module sys [system] { + explicit module syscall { + header "syscall.h" + export * + } + explicit module sysexits { + header "sysexits.h" + export * + } + explicit module syslog { + header "syslog.h" + export * + } +} + +module tar [system] { + header "tar.h" + export * +} + +module termio [system] { + explicit module termio { + header "termio.h" + export * + } + explicit module termios { + header "termios.h" + export * + } +} + +module uconfig_local [system] { + header "uconfig_local.h" + export * +} + +module ucontext [system] { + header "ucontext.h" + export * +} + +module unistd [system] { + header "unistd.h" + export * + explicit module sys_unistd { + header "sys/unistd.h" + export * + } +} + +module utmp [system] { + explicit module utmp { + header "utmp.h" + export * + } + explicit module utmpx { + header "utmpx.h" + export * + } +} + +module wait [system] { + header "wait.h" + export * + explicit module sys_wait { + header "sys/wait.h" + export * + } +} + +module xlocale [system] { + header "xlocale.h" + export * +} + +// Additional modules in the 'android' subdirectory. +module android_defs [system] { + explicit module ndk_version { + header "android/ndk-version.h" + export * + } + explicit module versioning { + header "android/versioning.h" + export * + } +} + +module android_apis [system] { + explicit module asset_manager_jni { + header "android/asset_manager_jni.h" + export * + } + explicit module asset_manager { + header "android/asset_manager.h" + export * + } + explicit module log { + header "android/log.h" + export * + } + explicit module trace { + header "android/trace.h" + export * + } +} + +// Additional modules in the 'bits' subdirectory. +module _bits_sa_family_t [system] { + // Note: this module is not part of 'inet' + // to prevent a circular modular dependency. + header "bits/sa_family_t.h" + export * +} +module _bits_stdatomic [system] { + // Note: this module is not part of 'stdatomic' + // as it depends on libc++ and forcing it to + // be in the same module breaks that modularization + // chain. + header "bits/stdatomic.h" + export * +} + +// Additional modules in the 'linux' subdirectory. +module _linux_time [system] { + // Note: this module is not part of '_time' + // to prevent a circular modular dependency + // between linux_time and sys modules. + header "linux/time.h" + header "linux/time_types.h" + export * + explicit module bits_timespec { + header "bits/timespec.h" + export * + } +} + +// Additional modules in the 'sys' subdirectory. +module _sys_cdefs [system] { + header "sys/cdefs.h" + // Circular included header, so combine them + // into the same module. + header "android/api-level.h" + export * +} +module _sys_core [system] { + explicit module endian { + header "sys/endian.h" + export * + } + explicit module errno { + header "sys/errno.h" + export * + } + explicit module ifunc { + header "sys/ifunc.h" + export * + } + explicit module ipc { + header "sys/ipc.h" + export * + } + explicit module mman { + header "sys/mman.h" + export * + } + explicit module mman_common { + header "asm-generic/mman-common.h" + export * + } + explicit module msg { + header "sys/msg.h" + export * + } + explicit module random { + header "sys/random.h" + export * + } + explicit module resource { + header "sys/resource.h" + export * + } + explicit module sem { + header "sys/sem.h" + export * + } + explicit module shm { + header "sys/shm.h" + export * + } + explicit module un { + header "sys/un.h" + export * + } + explicit module utsname { + header "sys/utsname.h" + export * + } +} +module _sys_select [system] { + // Note: this module is not part of + // 'sys_core' to prevent circular dependency error. + header "sys/select.h" + export * +} +// Use 'no_undeclared_includes' to avoid pulling in the libc++ module. +// This module depends on 'stdint.h', which is defined in libc++. +// We can't import libc++ as that would cause circular dependency +// between libc++ and this module. Using 'no_undeclared_includes' +// ensures that we include 'stdint.h' from usr/include instead of libc++. +module _sys_types [system] [no_undeclared_includes] { + header "sys/types.h" + // The 'sys/types.h' header has a circular include + // with 'bits/pthread_types.h' and thus it must be in the same module. + header "bits/pthread_types.h" + export * + use _stdint + use _sys_cdefs + use _Builtin_stddef +} +// Use 'no_undeclared_includes' to avoid pulling in the libc++ module. +// This module depends on 'stdint.h', which is defined in libc++. +// We can't import libc++ as that would cause circular dependency +// between libc++ and this module. Using 'no_undeclared_includes' +// ensures that we include 'stdint.h' from usr/include instead of libc++. +module _sys_user [system] [no_undeclared_includes] { + header "sys/user.h" + export * + use _stdint + use _sys_cdefs + use _Builtin_stddef +} + +// Module for zlib headers. +module zlib [system] { + header "zlib.h" + explicit module zconf { + header "zconf.h" + export * + } + export * + link "z" +} diff --git a/stdlib/public/RegexBuilder/CMakeLists.txt b/stdlib/public/RegexBuilder/CMakeLists.txt index 8db5dae116602..117cb57ee7f60 100644 --- a/stdlib/public/RegexBuilder/CMakeLists.txt +++ b/stdlib/public/RegexBuilder/CMakeLists.txt @@ -27,6 +27,7 @@ message(STATUS "Using Experimental String Processing library for RegexBuilder ($ add_swift_target_library(swiftRegexBuilder ${SWIFT_STDLIB_LIBRARY_BUILD_TYPES} IS_STDLIB "${REGEX_BUILDER_SOURCES}" + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_FREEBSD Glibc SWIFT_MODULE_DEPENDS_OPENBSD Glibc diff --git a/stdlib/public/RemoteInspection/TypeLowering.cpp b/stdlib/public/RemoteInspection/TypeLowering.cpp index 2d1126d5ea9a0..d9f8fb1456b2a 100644 --- a/stdlib/public/RemoteInspection/TypeLowering.cpp +++ b/stdlib/public/RemoteInspection/TypeLowering.cpp @@ -23,6 +23,7 @@ #include "llvm/Support/MathExtras.h" #include "swift/ABI/Enum.h" #include "swift/ABI/MetadataValues.h" +#include "swift/RemoteInspection/BitMask.h" #include "swift/RemoteInspection/TypeLowering.h" #include "swift/RemoteInspection/TypeRef.h" #include "swift/RemoteInspection/TypeRefBuilder.h" @@ -229,6 +230,11 @@ void TypeInfo::dump(std::ostream &stream, unsigned Indent) const { stream << "\n"; } +BitMask ReferenceTypeInfo::getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const { + auto mpePointerSpareBits = TC.getBuilder().getMultiPayloadEnumPointerMask(); + return BitMask(getSize(), mpePointerSpareBits); +} + BuiltinTypeInfo::BuiltinTypeInfo(TypeRefBuilder &builder, BuiltinTypeDescriptorBase &descriptor) : TypeInfo(TypeInfoKind::Builtin, descriptor.Size, @@ -237,6 +243,21 @@ BuiltinTypeInfo::BuiltinTypeInfo(TypeRefBuilder &builder, descriptor.IsBitwiseTakable), Name(descriptor.getMangledTypeName()) {} +// Builtin.Int is mangled as 'Bi' N '_' +// Returns 0 if this isn't an Int +static unsigned isIntType(std::string name) { + llvm::StringRef nameRef(name); + if (nameRef.starts_with("Bi") && nameRef.ends_with("_")) { + llvm::StringRef naturalRef = nameRef.drop_front(2).drop_back(); + uint8_t natural; + if (naturalRef.getAsInteger(10, natural)) { + return 0; + } + return natural; + } + return 0; +} + bool BuiltinTypeInfo::readExtraInhabitantIndex( remote::MemoryReader &reader, remote::RemoteAddress address, int *extraInhabitantIndex) const { @@ -244,47 +265,28 @@ bool BuiltinTypeInfo::readExtraInhabitantIndex( *extraInhabitantIndex = -1; return true; } - // If it has extra inhabitants, it could be an integer type with extra - // inhabitants (a bool) or a pointer. - // Check if it's an integer first. The mangling of an integer type is - // type ::= 'Bi' NATURAL '_' - llvm::StringRef nameRef(Name); - if (nameRef.starts_with("Bi") && nameRef.ends_with("_")) { - // Drop the front "Bi" and "_" end, check that what we're left with is a - // bool. - llvm::StringRef naturalRef = nameRef.drop_front(2).drop_back(); - uint8_t natural; - if (naturalRef.getAsInteger(10, natural)) - return false; - - assert(natural == 1 && - "Reading extra inhabitants of integer with more than 1 byte!"); - if (natural != 1) - return false; - - assert(getSize() == 1 && "Reading extra inhabitants of integer but size of " - "type info is different than 1!"); - if (getSize() != 1) - return false; - - assert(getNumExtraInhabitants() == 254 && - "Boolean type info should have 254 extra inhabitants!"); - if (getNumExtraInhabitants() != 254) - return false; + unsigned intSize = isIntType(Name); + if (intSize > 0 && intSize < 64 && getSize() <= 8 && intSize < getSize() * 8) { + uint64_t maxValidValue = (((uint64_t)1) << intSize) - 1; + uint64_t maxAvailableValue = (((uint64_t)1) << (getSize() * 8)) - 1; + uint64_t computedExtraInhabitants = maxAvailableValue - maxValidValue; + if (computedExtraInhabitants > ValueWitnessFlags::MaxNumExtraInhabitants) { + computedExtraInhabitants = ValueWitnessFlags::MaxNumExtraInhabitants; + } + assert(getNumExtraInhabitants() == computedExtraInhabitants && + "Unexpected number of extra inhabitants in an odd-sized integer"); - uint8_t rawValue; - if (!reader.readInteger(address, &rawValue)) + uint64_t rawValue; + if (!reader.readInteger(address, getSize(), &rawValue)) return false; - // The max valid value, for a bool valid values are 0 or 1, so this would - // be 1. - auto maxValidValue = 1; - // If the raw value falls outside the range of valid values, this is an - // extra inhabitant. - if (maxValidValue < rawValue) + // Example: maxValidValue is 1 for a 1-bit bool, so any larger value + // is an extra inhabitant. + if (maxValidValue < rawValue) { *extraInhabitantIndex = rawValue - maxValidValue - 1; - else + } else { *extraInhabitantIndex = -1; + } return true; } else if (Name == "yyXf") { // But there are two different conventions, one for function pointers: @@ -297,6 +299,26 @@ bool BuiltinTypeInfo::readExtraInhabitantIndex( } } +BitMask BuiltinTypeInfo::getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const { + unsigned intSize = isIntType(Name); + if (intSize > 0) { + // Odd-sized integers export spare bits + // In particular: bool fields are Int1 and export 7 spare bits + auto mask = BitMask::oneMask(getSize()); + mask.keepOnlyMostSignificantBits(getSize() * 8 - intSize); + return mask; + } else if ( + Name == "yyXf" // 'yyXf' = @thin () -> Void function + ) { + // Builtin types that expose pointer spare bits + auto mpePointerSpareBits = TC.getBuilder().getMultiPayloadEnumPointerMask(); + return BitMask(getSize(), mpePointerSpareBits); + } else { + // Everything else + return BitMask::zeroMask(getSize()); + } +} + bool RecordTypeInfo::readExtraInhabitantIndex(remote::MemoryReader &reader, remote::RemoteAddress address, int *extraInhabitantIndex) const { @@ -369,6 +391,45 @@ bool RecordTypeInfo::readExtraInhabitantIndex(remote::MemoryReader &reader, return false; } +BitMask RecordTypeInfo::getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const { + auto mask = BitMask::oneMask(getSize()); + switch (SubKind) { + case RecordKind::Invalid: + return mask; // FIXME: Should invalid have all spare bits? Or none? Does it matter? + case RecordKind::Tuple: + case RecordKind::Struct: + break; + case RecordKind::ThickFunction: + break; + case RecordKind::OpaqueExistential: { + // Existential storage isn't recorded as a field, + // so we handle it specially here... + int pointerSize = TC.targetPointerSize(); + BitMask submask = BitMask::zeroMask(pointerSize * 3); + mask.andMask(submask, 0); + hasAddrOnly = true; + break; + } + case RecordKind::ClassExistential: + break; + case RecordKind::ExistentialMetatype: + break; // Field 0 is metadata pointer, a Builtin of type 'yyXf' + case RecordKind::ErrorExistential: + break; + case RecordKind::ClassInstance: + break; + case RecordKind::ClosureContext: + break; + } + for (auto Field : Fields) { + if (Field.TR != 0) { + BitMask submask = Field.TI.getSpareBits(TC, hasAddrOnly); + mask.andMask(submask, Field.Offset); + } + } + return mask; +} + class UnsupportedEnumTypeInfo: public EnumTypeInfo { public: UnsupportedEnumTypeInfo(unsigned Size, unsigned Alignment, @@ -384,6 +445,10 @@ class UnsupportedEnumTypeInfo: public EnumTypeInfo { return false; } + BitMask getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const override { + return BitMask::zeroMask(getSize()); + } + bool projectEnumValue(remote::MemoryReader &reader, remote::RemoteAddress address, int *CaseIndex) const override { @@ -410,6 +475,10 @@ class EmptyEnumTypeInfo: public EnumTypeInfo { return false; } + BitMask getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const override { + return BitMask::zeroMask(getSize()); + } + bool projectEnumValue(remote::MemoryReader &reader, remote::RemoteAddress address, int *CaseIndex) const override { @@ -442,6 +511,10 @@ class TrivialEnumTypeInfo: public EnumTypeInfo { return true; } + BitMask getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const override { + return BitMask::zeroMask(getSize()); + } + bool projectEnumValue(remote::MemoryReader &reader, remote::RemoteAddress address, int *CaseIndex) const override { @@ -497,6 +570,10 @@ class NoPayloadEnumTypeInfo: public EnumTypeInfo { return true; } + BitMask getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const override { + return BitMask::zeroMask(getSize()); + } + bool projectEnumValue(remote::MemoryReader &reader, remote::RemoteAddress address, int *CaseIndex) const override { @@ -563,6 +640,10 @@ class SinglePayloadEnumTypeInfo: public EnumTypeInfo { } } + BitMask getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const override { + return BitMask::zeroMask(getSize()); + } + // Think of a single-payload enum as being encoded in "pages". // The discriminator (tag) tells us which page we're on: // * Page 0 is the payload page which can either store @@ -718,6 +799,16 @@ class TaggedMultiPayloadEnumTypeInfo: public EnumTypeInfo { return true; } + BitMask getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const override { + // Walk the child cases to set `hasAddrOnly` correctly. + for (auto Case : getCases()) { + if (Case.TR != 0) { + auto submask = Case.TI.getSpareBits(TC, hasAddrOnly); + } + } + return BitMask::zeroMask(getSize()); + } + bool projectEnumValue(remote::MemoryReader &reader, remote::RemoteAddress address, int *CaseIndex) const override { @@ -762,294 +853,6 @@ class TaggedMultiPayloadEnumTypeInfo: public EnumTypeInfo { } }; -// A variable-length bitmap used to track "spare bits" for general multi-payload -// enums. -class BitMask { - static constexpr unsigned maxSize = 128 * 1024 * 1024; // 128MB - - unsigned size; // Size of mask in bytes - uint8_t *mask; -public: - ~BitMask() { - free(mask); - } - // Construct a bitmask of the appropriate number of bytes - // initialized to all bits set - BitMask(unsigned sizeInBytes): size(sizeInBytes) { - // Gracefully fail by constructing an empty mask if we exceed the size - // limit. - if (size > maxSize) { - size = 0; - mask = nullptr; - return; - } - - mask = (uint8_t *)malloc(size); - - if (!mask) { - // Malloc might fail if size is large due to some bad data. Assert in - // asserts builds, and fail gracefully in non-asserts builds by - // constructing an empty BitMask. - assert(false && "Failed to allocate BitMask"); - size = 0; - return; - } - - memset(mask, 0xff, size); - } - // Construct a bitmask of the appropriate number of bytes - // initialized with bits from the specified buffer - BitMask(unsigned sizeInBytes, const uint8_t *initialValue, - unsigned initialValueBytes, unsigned offset) - : size(sizeInBytes) { - // Gracefully fail by constructing an empty mask if we exceed the size - // limit. - if (size > maxSize) { - size = 0; - mask = nullptr; - return; - } - - // Bad data could cause the initial value location to be off the end of our - // size. If initialValueBytes + offset is beyond sizeInBytes (or overflows), - // assert in asserts builds, and fail gracefully in non-asserts builds by - // constructing an empty BitMask. - bool overflowed = false; - unsigned initialValueEnd = - llvm::SaturatingAdd(initialValueBytes, offset, &overflowed); - if (overflowed) { - assert(false && "initialValueBytes + offset overflowed"); - size = 0; - mask = nullptr; - return; - } - assert(initialValueEnd <= sizeInBytes); - if (initialValueEnd > size) { - assert(false && "initialValueBytes + offset is greater than size"); - size = 0; - mask = nullptr; - return; - } - - mask = (uint8_t *)calloc(1, size); - - if (!mask) { - // Malloc might fail if size is large due to some bad data. Assert in - // asserts builds, and fail gracefully in non-asserts builds by - // constructing an empty BitMask. - assert(false && "Failed to allocate BitMask"); - size = 0; - return; - } - - memcpy(mask + offset, initialValue, initialValueBytes); - } - // Move constructor moves ownership and zeros the src - BitMask(BitMask&& src) noexcept: size(src.size), mask(std::move(src.mask)) { - src.size = 0; - src.mask = nullptr; - } - // Copy constructor makes a copy of the mask storage - BitMask(const BitMask& src) noexcept: size(src.size), mask(nullptr) { - mask = (uint8_t *)malloc(size); - memcpy(mask, src.mask, size); - } - - std::string str() const { - std::ostringstream buff; - buff << size << ":0x"; - for (unsigned i = 0; i < size; i++) { - buff << std::hex << ((mask[i] >> 4) & 0x0f) << (mask[i] & 0x0f); - } - return buff.str(); - } - - bool operator==(const BitMask& rhs) const { - // The two masks may be of different sizes. - // The common prefix must be identical. - size_t common = std::min(size, rhs.size); - if (memcmp(mask, rhs.mask, common) != 0) - return false; - // The remainder of the longer mask must be - // all zero bits. - unsigned mustBeZeroSize = std::max(size, rhs.size) - common; - uint8_t *mustBeZero; - if (size < rhs.size) { - mustBeZero = rhs.mask + size; - } else if (size > rhs.size) { - mustBeZero = mask + rhs.size; - } - for (unsigned i = 0; i < mustBeZeroSize; ++i) { - if (mustBeZero[i] != 0) { - return false; - } - } - return true; - } - - bool operator!=(const BitMask& rhs) const { - return !(*this == rhs); - } - - bool isNonZero() const { return !isZero(); } - - bool isZero() const { - for (unsigned i = 0; i < size; ++i) { - if (mask[i] != 0) { - return false; - } - } - return true; - } - - void makeZero() { - memset(mask, 0, size * sizeof(mask[0])); - } - - void complement() { - for (unsigned i = 0; i < size; ++i) { - mask[i] = ~mask[i]; - } - } - - int countSetBits() const { - static const int counter[] = - {0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4}; - int bits = 0; - for (unsigned i = 0; i < size; ++i) { - bits += counter[mask[i] >> 4] + counter[mask[i] & 15]; - } - return bits; - } - - int countZeroBits() const { - static const int counter[] = - {4, 3, 3, 2, 3, 2, 2, 1, 3, 2, 2, 1, 2, 1, 1, 0}; - int bits = 0; - for (unsigned i = 0; i < size; ++i) { - bits += counter[mask[i] >> 4] + counter[mask[i] & 15]; - } - return bits; - } - - // Treat the provided value as a mask, `and` it with - // the part of the mask at the provided byte offset. - // Bits outside the specified area are unchanged. - template - void andMask(IntegerType value, unsigned byteOffset) { - andMask((void *)&value, sizeof(value), byteOffset); - } - - // As above, but using the provided bitmask instead - // of an integer. - void andMask(BitMask mask, unsigned offset) { - andMask(mask.mask, mask.size, offset); - } - - // As above, but using the complement of the - // provided mask. - void andNotMask(BitMask mask, unsigned offset) { - if (offset < size) { - andNotMask(mask.mask, mask.size, offset); - } - } - - // Zero all bits except for the `n` most significant ones. - // XXX TODO: Big-endian support? - void keepOnlyMostSignificantBits(unsigned n) { - unsigned count = 0; - if (size < 1) { - return; - } - unsigned i = size; - while (i > 0) { - i -= 1; - if (count < n) { - for (int b = 128; b > 0; b >>= 1) { - if (count >= n) { - mask[i] &= ~b; - } else if ((mask[i] & b) != 0) { - ++count; - } - } - } else { - mask[i] = 0; - } - } - } - - unsigned numBits() const { - return size * 8; - } - - unsigned numSetBits() const { - unsigned count = 0; - for (unsigned i = 0; i < size; ++i) { - if (mask[i] != 0) { - for (unsigned b = 1; b < 256; b <<= 1) { - if ((mask[i] & b) != 0) { - ++count; - } - } - } - } - return count; - } - - // Read a mask-sized area from the target and collect - // the masked bits into a single integer. - template - bool readMaskedInteger(remote::MemoryReader &reader, - remote::RemoteAddress address, - IntegerType *dest) const { - auto data = reader.readBytes(address, size); - if (!data) { - return false; - } -#if defined(__BIG_ENDIAN__) - assert(false && "Big endian not supported for readMaskedInteger"); -#else - IntegerType result = 0; - IntegerType resultBit = 1; // Start from least-significant bit - auto bytes = static_cast(data.get()); - for (unsigned i = 0; i < size; ++i) { - for (unsigned b = 1; b < 256; b <<= 1) { - if ((mask[i] & b) != 0) { - if ((bytes[i] & b) != 0) { - result |= resultBit; - } - resultBit <<= 1; - } - } - } - *dest = result; - return true; -#endif - } - -private: - void andMask(void *maskData, unsigned len, unsigned offset) { - if (offset < size) { - unsigned common = std::min(len, size - offset); - uint8_t *maskBytes = (uint8_t *)maskData; - for (unsigned i = 0; i < common; ++i) { - mask[i + offset] &= maskBytes[i]; - } - } - } - - void andNotMask(void *maskData, unsigned len, unsigned offset) { - assert(offset < size); - if (offset < size) { - unsigned common = std::min(len, size - offset); - uint8_t *maskBytes = (uint8_t *)maskData; - for (unsigned i = 0; i < common; ++i) { - mask[i + offset] &= ~maskBytes[i]; - } - } - } -}; - // General multi-payload enum support for enums that do use spare // bits in the payload. class MultiPayloadEnumTypeInfo: public EnumTypeInfo { @@ -1102,8 +905,9 @@ class MultiPayloadEnumTypeInfo: public EnumTypeInfo { tag = payloadTagLow; tagBits = payloadTagLowBitCount; - // Read the other spare bits + // Read the other spare bits from the payload area auto otherSpareBitsMask = spareBitsMask; // copy + otherSpareBitsMask.keepOnlyLeastSignificantBytes(getPayloadSize()); otherSpareBitsMask.andNotMask(payloadTagLowBitsMask, 0); auto otherSpareBitsCount = otherSpareBitsMask.countSetBits(); if (otherSpareBitsCount > 0) { @@ -1154,6 +958,13 @@ class MultiPayloadEnumTypeInfo: public EnumTypeInfo { return true; } + BitMask getSpareBits(TypeConverter &TC, bool &hasAddrOnly) const override { + auto mask = spareBitsMask; + // Bits we've used for our tag can't be re-used by a containing enum... + mask.andNotMask(getMultiPayloadTagBitsMask(), 0); + return mask; + } + bool projectEnumValue(remote::MemoryReader &reader, remote::RemoteAddress address, int *CaseIndex) const override { @@ -1221,7 +1032,7 @@ class MultiPayloadEnumTypeInfo: public EnumTypeInfo { // The case value is stored in three pieces: // * A separate "discriminator" tag appended to the payload (if necessary) - // * A "payload tag" that uses (a subset of) the spare bits + // * A "payload tag" that uses (a subset of) the spare bits in the payload // * The remainder of the payload bits (for non-payload cases) // This computes the bits used for the payload tag. BitMask getMultiPayloadTagBitsMask() const { @@ -1235,68 +1046,12 @@ class MultiPayloadEnumTypeInfo: public EnumTypeInfo { payloadTagBits += 1; } BitMask payloadTagBitsMask = spareBitsMask; + payloadTagBitsMask.keepOnlyLeastSignificantBytes(getPayloadSize()); payloadTagBitsMask.keepOnlyMostSignificantBits(payloadTagBits); return payloadTagBitsMask; } }; -// Recursively populate the spare bit mask for this single type -static bool populateSpareBitsMask(const TypeInfo *TI, BitMask &mask, uint64_t mpePointerSpareBits); - -// Recursively populate the spare bit mask for this collection of -// record fields or enum cases. -static bool populateSpareBitsMask(const std::vector &Fields, BitMask &mask, uint64_t mpePointerSpareBits) { - for (auto Field : Fields) { - if (Field.TR != 0) { - BitMask submask(Field.TI.getSize()); - if (!populateSpareBitsMask(&Field.TI, submask, mpePointerSpareBits)) { - return false; - } - mask.andMask(submask, Field.Offset); - } - } - return true; -} - -// General recursive type walk to combine spare bit info from nested structures. -static bool populateSpareBitsMask(const TypeInfo *TI, BitMask &mask, uint64_t mpePointerSpareBits) { - switch (TI->getKind()) { - case TypeInfoKind::Reference: { - if (TI->getSize() == 8) { - mask.andMask(mpePointerSpareBits, 0); - } else /* TI->getSize() == 4 */ { - uint32_t pointerMask = (uint32_t)mpePointerSpareBits; - mask.andMask(pointerMask, 0); - } - break; - } - case TypeInfoKind::Enum: { - auto EnumTI = reinterpret_cast(TI); - // Remove bits used by the payloads - if (!populateSpareBitsMask(EnumTI->getCases(), mask, mpePointerSpareBits)) { - return false; - } - // TODO: Remove bits needed to discriminate payloads. - // Until then, return false for any type with an enum in it so we - // won't claim to support something we don't. - return false; - break; - } - case TypeInfoKind::Record: { - auto RecordTI = dyn_cast(TI); - if (!populateSpareBitsMask(RecordTI->getFields(), mask, mpePointerSpareBits)) { - return false; - } - break; - } - default: { - mask.makeZero(); - break; - } - } - return true; -} - /// Utility class for building values that contain witness tables. class ExistentialTypeInfoBuilder { TypeConverter &TC; @@ -2128,6 +1883,7 @@ class EnumTypeInfoBuilder { return nullptr; } + // Sort and classify the fields for (auto Case : Fields) { if (Case.TR == nullptr) { ++NonPayloadCases; @@ -2306,21 +2062,32 @@ class EnumTypeInfoBuilder { Stride = 1; auto PayloadSize = EnumTypeInfo::getPayloadSizeForCases(Cases); - // If there's a multi-payload enum descriptor, then we - // have spare bits information from the compiler. + // Compute the spare bit mask and determine if we have any address-only fields + auto localSpareBitMask = BitMask::oneMask(Size); + bool hasAddrOnly = false; + for (auto Case : Cases) { + if (Case.TR != 0) { + auto submask = Case.TI.getSpareBits(TC, hasAddrOnly); + localSpareBitMask.andMask(submask, 0); + } + } + + // See if we have MPE bit mask information from the compiler... + // TODO: drop this? // Uncomment the following line to dump the MPE section every time we come through here... //TC.getBuilder().dumpMultiPayloadEnumSection(std::cerr); // DEBUG helper auto MPEDescriptor = TC.getBuilder().getMultiPayloadEnumDescriptor(TR); if (MPEDescriptor && MPEDescriptor->usesPayloadSpareBits()) { + // We found compiler-provided spare bit data... auto PayloadSpareBitMaskByteCount = MPEDescriptor->getPayloadSpareBitMaskByteCount(); auto PayloadSpareBitMaskByteOffset = MPEDescriptor->getPayloadSpareBitMaskByteOffset(); auto SpareBitMask = MPEDescriptor->getPayloadSpareBits(); - BitMask spareBitsMask(PayloadSize, SpareBitMask, + BitMask compilerSpareBitMask(PayloadSize, SpareBitMask, PayloadSpareBitMaskByteCount, PayloadSpareBitMaskByteOffset); - if (spareBitsMask.isZero()) { + if (compilerSpareBitMask.isZero() || hasAddrOnly) { // If there are no spare bits, use the "simple" tag-only implementation. return TC.makeTypeInfo( Size, Alignment, Stride, NumExtraInhabitants, @@ -2328,58 +2095,29 @@ class EnumTypeInfoBuilder { } #if 0 // TODO: This should be !defined(NDEBUG) - // DEBUG verification that compiler mask and locally-computed - // mask are the same (whenever both are available). - BitMask locallyComputedSpareBitsMask(PayloadSize); - auto mpePointerSpareBits = TC.getBuilder().getMultiPayloadEnumPointerMask(); - auto locallyComputedSpareBitsMaskIsValid - = populateSpareBitsMask(Cases, locallyComputedSpareBitsMask, mpePointerSpareBits); - // If the local computation were always correct, we could: - // assert(locallyComputedSpareBitsMaskIsValid); - if (locallyComputedSpareBitsMaskIsValid) { - // Whenever the compiler and local computation both produce - // data, they should agree. - // TODO: Make this true, then change `#if 0` above - assert(locallyComputedSpareBitsMask == spareBitsMask); - } + // Verify that compiler provided and local spare bit info agree... + // TODO: If we could make this actually work, then we wouldn't need the + // bulky compiler-provided info, would we? + assert(localSpareBitMask == compilerSpareBitMask); #endif // Use compiler-provided spare bit information return TC.makeTypeInfo( Size, Alignment, Stride, NumExtraInhabitants, - BitwiseTakable, Cases, spareBitsMask, + BitwiseTakable, Cases, compilerSpareBitMask, EffectivePayloadCases); } - // Either there was no compiler data or it didn't make sense - // (existed but claimed to have no mask). - // Try computing the mask ourselves: This is less robust, but necessary to - // support images from older compilers. - BitMask spareBitsMask(PayloadSize); - auto mpePointerSpareBits = TC.getBuilder().getMultiPayloadEnumPointerMask(); - auto validSpareBitsMask = populateSpareBitsMask(Cases, spareBitsMask, mpePointerSpareBits); - // For DEBUGGING, disable fallback to local computation to - // make missing compiler data more obvious: - // validSpareBitsMask = false; - if (!validSpareBitsMask) { - // If we couldn't correctly determine the spare bits mask, - // return a TI that will always fail when asked for XIs or value. - return TC.makeTypeInfo( - Size, Alignment, Stride, NumExtraInhabitants, - BitwiseTakable, EnumKind::MultiPayloadEnum, Cases); - } else if (spareBitsMask.isZero()) { + if (localSpareBitMask.isZero() || hasAddrOnly) { // Simple case that does not use spare bits - // This is correct as long as our local spare bits calculation - // above only returns an empty mask when the mask is really empty, return TC.makeTypeInfo( Size, Alignment, Stride, NumExtraInhabitants, BitwiseTakable, Cases, EffectivePayloadCases); } else { // General case can mix spare bits and extra discriminator - // It obviously relies on having an accurate spare bit mask. return TC.makeTypeInfo( Size, Alignment, Stride, NumExtraInhabitants, - BitwiseTakable, Cases, spareBitsMask, + BitwiseTakable, Cases, localSpareBitMask, EffectivePayloadCases); } } diff --git a/stdlib/public/StringProcessing/CMakeLists.txt b/stdlib/public/StringProcessing/CMakeLists.txt index 408e019e46644..c767dcff59f42 100644 --- a/stdlib/public/StringProcessing/CMakeLists.txt +++ b/stdlib/public/StringProcessing/CMakeLists.txt @@ -41,6 +41,7 @@ message(STATUS "Using Experimental String Processing library for _StringProcessi add_swift_target_library(swift_StringProcessing ${SWIFT_STDLIB_LIBRARY_BUILD_TYPES} IS_STDLIB "${STRING_PROCESSING_SOURCES}" + SWIFT_MODULE_DEPENDS_ANDROID Android SWIFT_MODULE_DEPENDS_LINUX Glibc SWIFT_MODULE_DEPENDS_FREEBSD Glibc SWIFT_MODULE_DEPENDS_OPENBSD Glibc diff --git a/stdlib/public/core/Misc.swift b/stdlib/public/core/Misc.swift index b766b27948301..6183f11e23054 100644 --- a/stdlib/public/core/Misc.swift +++ b/stdlib/public/core/Misc.swift @@ -174,11 +174,20 @@ func _rethrowsViaClosure(_ fn: () throws -> ()) rethrows { @_documentation(visibility: internal) @_marker public protocol Escapable {} +#if $BitwiseCopyable2 #if $NoncopyableGenerics && $NonescapableTypes @_marker public protocol BitwiseCopyable: ~Escapable { } #else @_marker public protocol BitwiseCopyable { } #endif -@available(*, unavailable) -@_marker public protocol _BitwiseCopyable {} +@available(*, deprecated, message: "Use BitwiseCopyable") +public typealias _BitwiseCopyable = BitwiseCopyable +#else +#if $NoncopyableGenerics && $NonescapableTypes +@_marker public protocol _BitwiseCopyable: ~Escapable { } +#else +@_marker public protocol _BitwiseCopyable { } +#endif +public typealias BitwiseCopyable = _BitwiseCopyable +#endif diff --git a/stdlib/public/runtime/EnvironmentVariables.cpp b/stdlib/public/runtime/EnvironmentVariables.cpp index 264e44a4db613..6c30977c7c3bf 100644 --- a/stdlib/public/runtime/EnvironmentVariables.cpp +++ b/stdlib/public/runtime/EnvironmentVariables.cpp @@ -272,3 +272,7 @@ SWIFT_RUNTIME_STDLIB_SPI bool concurrencyEnableJobDispatchIntegration() { SWIFT_RUNTIME_STDLIB_SPI bool concurrencyValidateUncheckedContinuations() { return runtime::environment::SWIFT_DEBUG_VALIDATE_UNCHECKED_CONTINUATIONS(); } + +SWIFT_RUNTIME_STDLIB_SPI const char *concurrencyIsCurrentExecutorLegacyModeOverride() { + return runtime::environment::SWIFT_IS_CURRENT_EXECUTOR_LEGACY_MODE_OVERRIDE(); +} diff --git a/stdlib/public/runtime/EnvironmentVariables.def b/stdlib/public/runtime/EnvironmentVariables.def index 188020ba5ae68..cb70e6702b2ce 100644 --- a/stdlib/public/runtime/EnvironmentVariables.def +++ b/stdlib/public/runtime/EnvironmentVariables.def @@ -112,4 +112,16 @@ VARIABLE(SWIFT_BACKTRACE, string, "", "crash catching and backtracing support in the runtime. " "See docs/Backtracing.rst in the Swift repository for details.") +VARIABLE(SWIFT_IS_CURRENT_EXECUTOR_LEGACY_MODE_OVERRIDE, string, "", + "Allows for suppressing 'is current executor' equality check crashes. " + "As since Swift 6.0 checking for current executor equality, may crash " + "and will never return 'false' because we are calling into library " + "implemented SerialExecutor.checkIsolation which should crash if the " + "isolation is not the expected one. Some old code may rely on the " + "non-crashing behavior. This flag enables temporarily restoring the " + "legacy 'nocrash' behavior until adopting code has been adjusted. " + "Legal values are: " + " 'nocrash' (Legacy behavior), " + " 'crash' (Swift 6.0+ behavior)") + #undef VARIABLE diff --git a/stdlib/public/runtime/Paths.cpp b/stdlib/public/runtime/Paths.cpp index 131a24ed40502..174140238ba5d 100644 --- a/stdlib/public/runtime/Paths.cpp +++ b/stdlib/public/runtime/Paths.cpp @@ -49,6 +49,11 @@ #include #endif +#ifdef __linux__ +// Needed for 'readlink'. +#include +#endif + #include #include #include diff --git a/stdlib/public/runtime/SwiftValue.mm b/stdlib/public/runtime/SwiftValue.mm index 52b5db6fb3120..9881c5ed57a78 100644 --- a/stdlib/public/runtime/SwiftValue.mm +++ b/stdlib/public/runtime/SwiftValue.mm @@ -430,10 +430,10 @@ - (BOOL)isEqual:(id)other { } } - if (runtime::bincompat::useLegacySwiftObjCHashing()) { - // Legacy behavior only proxies isEqual: for Hashable, not Equatable - return NO; - } +// if (runtime::bincompat::useLegacySwiftObjCHashing()) { +// // Legacy behavior only proxies isEqual: for Hashable, not Equatable +// return NO; +// } if (auto equatableConformance = selfHeader->getEquatableConformance()) { if (auto selfEquatableBaseType = selfHeader->getEquatableBaseType()) { @@ -464,10 +464,10 @@ - (NSUInteger)hash { selfHeader->type, hashableConformance); } - if (runtime::bincompat::useLegacySwiftObjCHashing()) { - // Legacy behavior doesn't honor Equatable conformance, only Hashable - return (NSUInteger)self; - } +// if (runtime::bincompat::useLegacySwiftObjCHashing()) { +// // Legacy behavior doesn't honor Equatable conformance, only Hashable +// return (NSUInteger)self; +// } // If Swift type is Equatable but not Hashable, // we have to return something here that is compatible diff --git a/test/AutoDiff/SILOptimizer/closure_specialization.sil b/test/AutoDiff/SILOptimizer/closure_specialization.sil new file mode 100644 index 0000000000000..7aff8ff02d2d6 --- /dev/null +++ b/test/AutoDiff/SILOptimizer/closure_specialization.sil @@ -0,0 +1,116 @@ +// RUN: %target-sil-opt -test-runner %s -o /dev/null 2>&1 | %FileCheck %s + +// REQUIRES: swift_in_compiler + +sil_stage canonical + +import Builtin +import Swift +import SwiftShims + +import _Differentiation + +// ===================== Gathering callsites and corresponding closures ===================== // + +////////////////////////////// +// Single closure call site // +////////////////////////////// +sil @$vjpMultiply : $@convention(thin) (Float, Float, Float) -> (Float, Float) + +sil private @$pullback_f : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> (Float, Float)) -> Float { +bb0(%0 : $Float, %1 : $@callee_guaranteed (Float) -> (Float, Float)): + %2 = apply %1(%0) : $@callee_guaranteed (Float) -> (Float, Float) // users: %5, %4 + strong_release %1 : $@callee_guaranteed (Float) -> (Float, Float) // id: %3 + %4 = tuple_extract %2 : $(Float, Float), 0 // user: %7 + %5 = tuple_extract %2 : $(Float, Float), 1 // user: %6 + %6 = struct_extract %5 : $Float, #Float._value // user: %8 + %7 = struct_extract %4 : $Float, #Float._value // user: %8 + %8 = builtin "fadd_FPIEEE32"(%6 : $Builtin.FPIEEE32, %7 : $Builtin.FPIEEE32) : $Builtin.FPIEEE32 // user: %9 + %9 = struct $Float (%8 : $Builtin.FPIEEE32) // users: %11, %10 + debug_value %9 : $Float, let, name "x", argno 1 // id: %10 + return %9 : $Float // id: %11 +} + +// reverse-mode derivative of f(_:) +sil hidden @$s4test1fyS2fFTJrSpSr : $@convention(thin) (Float) -> (Float, @owned @callee_guaranteed (Float) -> Float) { +bb0(%0 : $Float): + specify_test "closure_specialize_gather_call_sites" + // CHECK-LABEL: Specializing closures in function: $s4test1fyS2fFTJrSpSr + // CHECK: PartialApply call site: %8 = partial_apply [callee_guaranteed] %7(%6) : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> (Float, Float)) -> Float // user: %9 + // CHECK: Passed in closures: + // CHECK: 1. %6 = partial_apply [callee_guaranteed] %5(%0, %0) : $@convention(thin) (Float, Float, Float) -> (Float, Float) // user: %8 + + debug_value %0 : $Float, let, name "x", argno 1 // id: %1 + %2 = struct_extract %0 : $Float, #Float._value // users: %3, %3 + %3 = builtin "fmul_FPIEEE32"(%2 : $Builtin.FPIEEE32, %2 : $Builtin.FPIEEE32) : $Builtin.FPIEEE32 // user: %4 + %4 = struct $Float (%3 : $Builtin.FPIEEE32) // user: %9 + // function_ref closure #1 in static Float._vjpMultiply(lhs:rhs:) + %5 = function_ref @$vjpMultiply : $@convention(thin) (Float, Float, Float) -> (Float, Float) // user: %6 + %6 = partial_apply [callee_guaranteed] %5(%0, %0) : $@convention(thin) (Float, Float, Float) -> (Float, Float) // user: %8 + // function_ref pullback of f(_:) + %7 = function_ref @$pullback_f : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> (Float, Float)) -> Float // user: %8 + %8 = partial_apply [callee_guaranteed] %7(%6) : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> (Float, Float)) -> Float // user: %9 + %9 = tuple (%4 : $Float, %8 : $@callee_guaranteed (Float) -> Float) // user: %10 + return %9 : $(Float, @callee_guaranteed (Float) -> Float) // id: %10 +} + +/////////////////////////////// +// Multiple closure callsite // +/////////////////////////////// +sil @$_vjpSin : $@convention(thin) (Float, Float) -> Float // user: %6 +sil @$_vjpCos : $@convention(thin) (Float, Float) -> Float // user: %10 +sil @$_vjpMultiply : $@convention(thin) (Float, Float, Float) -> (Float, Float) + +// pullback of g(_:) +sil private @$pullback_g : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float, @owned @callee_guaranteed (Float) -> Float, @owned @callee_guaranteed (Float) -> (Float, Float)) -> Float { +bb0(%0 : $Float, %1 : $@callee_guaranteed (Float) -> Float, %2 : $@callee_guaranteed (Float) -> Float, %3 : $@callee_guaranteed (Float) -> (Float, Float)): + %4 = apply %3(%0) : $@callee_guaranteed (Float) -> (Float, Float) // users: %7, %6 + strong_release %3 : $@callee_guaranteed (Float) -> (Float, Float) // id: %5 + %6 = tuple_extract %4 : $(Float, Float), 0 // user: %10 + %7 = tuple_extract %4 : $(Float, Float), 1 // user: %8 + %8 = apply %2(%7) : $@callee_guaranteed (Float) -> Float // user: %12 + strong_release %2 : $@callee_guaranteed (Float) -> Float // id: %9 + %10 = apply %1(%6) : $@callee_guaranteed (Float) -> Float // user: %13 + strong_release %1 : $@callee_guaranteed (Float) -> Float // id: %11 + %12 = struct_extract %8 : $Float, #Float._value // user: %14 + %13 = struct_extract %10 : $Float, #Float._value // user: %14 + %14 = builtin "fadd_FPIEEE32"(%13 : $Builtin.FPIEEE32, %12 : $Builtin.FPIEEE32) : $Builtin.FPIEEE32 // user: %15 + %15 = struct $Float (%14 : $Builtin.FPIEEE32) // users: %17, %16 + debug_value %15 : $Float, let, name "x", argno 1 // id: %16 + return %15 : $Float // id: %17 +} + +// reverse-mode derivative of g(_:) +sil hidden @$s4test1gyS2fFTJrSpSr : $@convention(thin) (Float) -> (Float, @owned @callee_guaranteed (Float) -> Float) { +bb0(%0 : $Float): + specify_test "closure_specialize_gather_call_sites" + // CHECK-LABEL: Specializing closures in function: $s4test1gyS2fFTJrSpSr + // CHECK: PartialApply call site: %16 = partial_apply [callee_guaranteed] %15(%6, %10, %14) : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float, @owned @callee_guaranteed (Float) -> Float, @owned @callee_guaranteed (Float) -> (Float, Float)) -> Float // user: %17 + // CHECK: Passed in closures: + // CHECK: 1. %6 = partial_apply [callee_guaranteed] %5(%0) : $@convention(thin) (Float, Float) -> Float // user: %16 + // CHECK: 2. %10 = partial_apply [callee_guaranteed] %9(%0) : $@convention(thin) (Float, Float) -> Float // user: %16 + // CHECK: 3. %14 = partial_apply [callee_guaranteed] %13(%8, %4) : $@convention(thin) (Float, Float, Float) -> (Float, Float) // user: %16 + + debug_value %0 : $Float, let, name "x", argno 1 // id: %1 + %2 = struct_extract %0 : $Float, #Float._value // users: %7, %3 + %3 = builtin "int_sin_FPIEEE32"(%2 : $Builtin.FPIEEE32) : $Builtin.FPIEEE32 // users: %11, %4 + %4 = struct $Float (%3 : $Builtin.FPIEEE32) // user: %14 + // function_ref closure #1 in _vjpSin(_:) + %5 = function_ref @$_vjpSin : $@convention(thin) (Float, Float) -> Float // user: %6 + %6 = partial_apply [callee_guaranteed] %5(%0) : $@convention(thin) (Float, Float) -> Float // user: %16 + %7 = builtin "int_cos_FPIEEE32"(%2 : $Builtin.FPIEEE32) : $Builtin.FPIEEE32 // users: %11, %8 + %8 = struct $Float (%7 : $Builtin.FPIEEE32) // user: %14 + // function_ref closure #1 in _vjpCos(_:) + %9 = function_ref @$_vjpCos : $@convention(thin) (Float, Float) -> Float // user: %10 + %10 = partial_apply [callee_guaranteed] %9(%0) : $@convention(thin) (Float, Float) -> Float // user: %16 + %11 = builtin "fmul_FPIEEE32"(%3 : $Builtin.FPIEEE32, %7 : $Builtin.FPIEEE32) : $Builtin.FPIEEE32 // user: %12 + %12 = struct $Float (%11 : $Builtin.FPIEEE32) // user: %17 + // function_ref closure #1 in static Float._vjpMultiply(lhs:rhs:) + %13 = function_ref @$_vjpMultiply : $@convention(thin) (Float, Float, Float) -> (Float, Float) // user: %14 + %14 = partial_apply [callee_guaranteed] %13(%8, %4) : $@convention(thin) (Float, Float, Float) -> (Float, Float) // user: %16 + // function_ref pullback of g(_:) + %15 = function_ref @$pullback_g : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float, @owned @callee_guaranteed (Float) -> Float, @owned @callee_guaranteed (Float) -> (Float, Float)) -> Float // user: %16 + %16 = partial_apply [callee_guaranteed] %15(%6, %10, %14) : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float, @owned @callee_guaranteed (Float) -> Float, @owned @callee_guaranteed (Float) -> (Float, Float)) -> Float // user: %17 + %17 = tuple (%12 : $Float, %16 : $@callee_guaranteed (Float) -> Float) // user: %18 + return %17 : $(Float, @callee_guaranteed (Float) -> Float) // id: %18 +} diff --git a/test/AutoDiff/SILOptimizer/closure_specialization_xfail.sil b/test/AutoDiff/SILOptimizer/closure_specialization_xfail.sil new file mode 100644 index 0000000000000..8d5d10aafa24c --- /dev/null +++ b/test/AutoDiff/SILOptimizer/closure_specialization_xfail.sil @@ -0,0 +1,173 @@ +// RUN: %target-sil-opt -test-runner %s -o /dev/null 2>&1 | %FileCheck %s + +// REQUIRES: swift_in_compiler +// XFAIL: * + +sil_stage canonical + +import Builtin +import Swift +import SwiftShims + +import _Differentiation + +// ===================== Gathering callsites and corresponding closures ===================== // + +/////////////////////////////// +/// Parameter subset thunks /// +/////////////////////////////// +struct X : Differentiable { + @_hasStorage var a: Float { get set } + @_hasStorage var b: Double { get set } + struct TangentVector : AdditiveArithmetic, Differentiable { + @_hasStorage var a: Float { get set } + @_hasStorage var b: Double { get set } + static func + (lhs: X.TangentVector, rhs: X.TangentVector) -> X.TangentVector + static func - (lhs: X.TangentVector, rhs: X.TangentVector) -> X.TangentVector + @_implements(Equatable, ==(_:_:)) static func __derived_struct_equals(_ a: X.TangentVector, _ b: X.TangentVector) -> Bool + typealias TangentVector = X.TangentVector + init(a: Float, b: Double) + static var zero: X.TangentVector { get } + } + init(a: Float, b: Double) + mutating func move(by offset: X.TangentVector) +} + +sil [transparent] [thunk] @subset_parameter_thunk : $@convention(thin) (Float, @guaranteed @callee_guaranteed (Float, Double) -> X.TangentVector) -> X.TangentVector + +sil @pullback_f : $@convention(thin) (Float, Double) -> X.TangentVector + +sil @pullback_g : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> X.TangentVector) -> X.TangentVector + +sil hidden @$s5test21g1xSfAA1XV_tFTJrSpSr : $@convention(thin) (X) -> (Float, @owned @callee_guaranteed (Float) -> X.TangentVector) { +bb0(%0 : $X): + specify_test "closure_specialize_gather_call_sites" + // CHECK-LABEL: Specializing closures in function: $s5test21g1xSfAA1XV_tFTJrSpSr + // CHECK: PartialApply call site: %9 = partial_apply [callee_guaranteed] %8(%7) : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> X.TangentVector) -> X.TangentVector // user: %10 + // CHECK: Passed in closures: + // CHECK: 1. %7 = partial_apply [callee_guaranteed] %6(%5) : $@convention(thin) (Float, @guaranteed @callee_guaranteed (Float, Double) -> X.TangentVector) -> X.TangentVector // user: %9 + + %3 = struct_extract %0 : $X, #X.a // user: %10 + %4 = function_ref @pullback_f : $@convention(thin) (Float, Double) -> X.TangentVector // user: %5 + %5 = thin_to_thick_function %4 : $@convention(thin) (Float, Double) -> X.TangentVector to $@callee_guaranteed (Float, Double) -> X.TangentVector // user: %7 + %6 = function_ref @subset_parameter_thunk : $@convention(thin) (Float, @guaranteed @callee_guaranteed (Float, Double) -> X.TangentVector) -> X.TangentVector // user: %7 + %7 = partial_apply [callee_guaranteed] %6(%5) : $@convention(thin) (Float, @guaranteed @callee_guaranteed (Float, Double) -> X.TangentVector) -> X.TangentVector // user: %9 + %8 = function_ref @pullback_g : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> X.TangentVector) -> X.TangentVector // user: %9 + %9 = partial_apply [callee_guaranteed] %8(%7) : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> X.TangentVector) -> X.TangentVector // user: %10 + %10 = tuple (%3 : $Float, %9 : $@callee_guaranteed (Float) -> X.TangentVector) // user: %11 + return %10 : $(Float, @callee_guaranteed (Float) -> X.TangentVector) // id: %11 +} + +/////////////////////////////////////////////////////////////////////// +///////// Specialized generic closures - PartialApply Closure ///////// +/////////////////////////////////////////////////////////////////////// + +// closure #1 in static Float._vjpMultiply(lhs:rhs:) +sil @$sSf16_DifferentiationE12_vjpMultiply3lhs3rhsSf5value_Sf_SftSfc8pullbacktSf_SftFZSf_SftSfcfU_ : $@convention(thin) (Float, Float, Float) -> (Float, Float) + +// thunk for @escaping @callee_guaranteed (@unowned Float) -> (@unowned Float, @unowned Float) +sil [transparent] [reabstraction_thunk] @$sS3fIegydd_S3fIegnrr_TR : $@convention(thin) (@in_guaranteed Float, @guaranteed @callee_guaranteed (Float) -> (Float, Float)) -> (@out Float, @out Float) + +// function_ref specialized pullback of f(a:) +sil [transparent] [thunk] @pullback_f_specialized : $@convention(thin) (@in_guaranteed Float, @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1, τ_0_2> (@in_guaranteed τ_0_0) -> (@out τ_0_1, @out τ_0_2) for ) -> @out Float + +// thunk for @escaping @callee_guaranteed (@in_guaranteed Float) -> (@out Float) +sil [transparent] [reabstraction_thunk] @$sS2fIegnr_S2fIegyd_TR : $@convention(thin) (Float, @guaranteed @callee_guaranteed (@in_guaranteed Float) -> @out Float) -> Float + +sil private [signature_optimized_thunk] [always_inline] @pullback_h : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float) -> Float { +bb0(%0 : $Float, %1 : $@callee_guaranteed (Float) -> Float): + %2 = integer_literal $Builtin.Int64, 0 // user: %3 + %3 = builtin "sitofp_Int64_FPIEEE32"(%2 : $Builtin.Int64) : $Builtin.FPIEEE32 // users: %10, %5 + %4 = struct_extract %0 : $Float, #Float._value // user: %5 + %5 = builtin "fadd_FPIEEE32"(%3 : $Builtin.FPIEEE32, %4 : $Builtin.FPIEEE32) : $Builtin.FPIEEE32 // user: %6 + %6 = struct $Float (%5 : $Builtin.FPIEEE32) // user: %7 + %7 = apply %1(%6) : $@callee_guaranteed (Float) -> Float // user: %9 + strong_release %1 : $@callee_guaranteed (Float) -> Float // id: %8 + %9 = struct_extract %7 : $Float, #Float._value // user: %10 + %10 = builtin "fadd_FPIEEE32"(%3 : $Builtin.FPIEEE32, %9 : $Builtin.FPIEEE32) : $Builtin.FPIEEE32 // user: %11 + %11 = struct $Float (%10 : $Builtin.FPIEEE32) // users: %13, %12 + debug_value %11 : $Float, let, name "x", argno 1 // id: %12 + return %11 : $Float // id: %13 +} + +// reverse-mode derivative of h(x:) +sil hidden @$s5test21h1xS2f_tFTJrSpSr : $@convention(thin) (Float) -> (Float, @owned @callee_guaranteed (Float) -> Float) { +bb0(%0 : $Float): + specify_test "closure_specialize_gather_call_sites" + // CHECK-LABEL: Specializing closures in function: $s5test21h1xS2f_tFTJrSpSr + // CHECK: PartialApply call site: %14 = partial_apply [callee_guaranteed] %13(%11) : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float) -> Float // user: %15 + // CHECK: Passed in closures: + // CHECK: 1. %9 = partial_apply [callee_guaranteed] %8(%7) : $@convention(thin) (@in_guaranteed Float, @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1, τ_0_2> (@in_guaranteed τ_0_0) -> (@out τ_0_1, @out τ_0_2) for ) -> @out Float // user: %11 + + %1 = struct_extract %0 : $Float, #Float._value // users: %2, %2 + %2 = builtin "fmul_FPIEEE32"(%1 : $Builtin.FPIEEE32, %1 : $Builtin.FPIEEE32) : $Builtin.FPIEEE32 // user: %12 + + // function_ref closure #1 in static Float._vjpMultiply(lhs:rhs:) + %3 = function_ref @$sSf16_DifferentiationE12_vjpMultiply3lhs3rhsSf5value_Sf_SftSfc8pullbacktSf_SftFZSf_SftSfcfU_ : $@convention(thin) (Float, Float, Float) -> (Float, Float) // user: %4 + %4 = partial_apply [callee_guaranteed] %3(%0, %0) : $@convention(thin) (Float, Float, Float) -> (Float, Float) // user: %6 + + // function_ref thunk for @escaping @callee_guaranteed (@unowned Float) -> (@unowned Float, @unowned Float) + %5 = function_ref @$sS3fIegydd_S3fIegnrr_TR : $@convention(thin) (@in_guaranteed Float, @guaranteed @callee_guaranteed (Float) -> (Float, Float)) -> (@out Float, @out Float) // user: %6 + %6 = partial_apply [callee_guaranteed] %5(%4) : $@convention(thin) (@in_guaranteed Float, @guaranteed @callee_guaranteed (Float) -> (Float, Float)) -> (@out Float, @out Float) // user: %7 + %7 = convert_function %6 : $@callee_guaranteed (@in_guaranteed Float) -> (@out Float, @out Float) to $@callee_guaranteed @substituted <τ_0_0, τ_0_1, τ_0_2> (@in_guaranteed τ_0_0) -> (@out τ_0_1, @out τ_0_2) for // user: %9 + + // function_ref pullback_f_specialized + %8 = function_ref @pullback_f_specialized : $@convention(thin) (@in_guaranteed Float, @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1, τ_0_2> (@in_guaranteed τ_0_0) -> (@out τ_0_1, @out τ_0_2) for ) -> @out Float // user: %9 + %9 = partial_apply [callee_guaranteed] %8(%7) : $@convention(thin) (@in_guaranteed Float, @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1, τ_0_2> (@in_guaranteed τ_0_0) -> (@out τ_0_1, @out τ_0_2) for ) -> @out Float // user: %11 + + // function_ref thunk for @escaping @callee_guaranteed (@in_guaranteed Float) -> (@out Float) + %10 = function_ref @$sS2fIegnr_S2fIegyd_TR : $@convention(thin) (Float, @guaranteed @callee_guaranteed (@in_guaranteed Float) -> @out Float) -> Float // user: %11 + %11 = partial_apply [callee_guaranteed] %10(%9) : $@convention(thin) (Float, @guaranteed @callee_guaranteed (@in_guaranteed Float) -> @out Float) -> Float // user: %14 + %12 = struct $Float (%2 : $Builtin.FPIEEE32) // user: %15 + + // function_ref pullback_h + %13 = function_ref @pullback_h : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float) -> Float // user: %14 + %14 = partial_apply [callee_guaranteed] %13(%11) : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float) -> Float // user: %15 + %15 = tuple (%12 : $Float, %14 : $@callee_guaranteed (Float) -> Float) // user: %16 + return %15 : $(Float, @callee_guaranteed (Float) -> Float) // id: %16 +} + +////////////////////////////////////////////////////////////////////////////// +///////// Specialized generic closures - ThinToThickFunction closure ///////// +////////////////////////////////////////////////////////////////////////////// + +sil [transparent] [thunk] @pullback_y_specialized : $@convention(thin) (@in_guaranteed Float) -> @out Float + +sil [transparent] [reabstraction_thunk] @reabstraction_thunk : $@convention(thin) (Float, @guaranteed @callee_guaranteed (@in_guaranteed Float) -> @out Float) -> Float + +sil private [signature_optimized_thunk] [always_inline] @pullback_z : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float) -> Float { +bb0(%0 : $Float, %1 : $@callee_guaranteed (Float) -> Float): + %2 = integer_literal $Builtin.Int64, 0 // user: %3 + %3 = builtin "sitofp_Int64_FPIEEE32"(%2 : $Builtin.Int64) : $Builtin.FPIEEE32 // users: %10, %5 + %4 = struct_extract %0 : $Float, #Float._value // user: %5 + %5 = builtin "fadd_FPIEEE32"(%3 : $Builtin.FPIEEE32, %4 : $Builtin.FPIEEE32) : $Builtin.FPIEEE32 // user: %6 + %6 = struct $Float (%5 : $Builtin.FPIEEE32) // user: %7 + %7 = apply %1(%6) : $@callee_guaranteed (Float) -> Float // user: %9 + strong_release %1 : $@callee_guaranteed (Float) -> Float // id: %8 + %9 = struct_extract %7 : $Float, #Float._value // user: %10 + %10 = builtin "fadd_FPIEEE32"(%3 : $Builtin.FPIEEE32, %9 : $Builtin.FPIEEE32) : $Builtin.FPIEEE32 // user: %11 + %11 = struct $Float (%10 : $Builtin.FPIEEE32) // users: %13, %12 + debug_value %11 : $Float, let, name "x", argno 1 // id: %12 + return %11 : $Float // id: %13 +} + +sil hidden @$s5test21z1xS2f_tFTJrSpSr : $@convention(thin) (Float) -> (Float, @owned @callee_guaranteed (Float) -> Float) { +bb0(%0 : $Float): + specify_test "closure_specialize_gather_call_sites" + // CHECK-LABEL: Specializing closures in function: $s5test21z1xS2f_tFTJrSpSr + // CHECK: PartialApply call site: %6 = partial_apply [callee_guaranteed] %5(%4) : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float) -> Float // user: %7 + // CHECK: Passed in closures: + // CHECK: 1. %2 = thin_to_thick_function %1 : $@convention(thin) (@in_guaranteed Float) -> @out Float to $@callee_guaranteed (@in_guaranteed Float) -> @out Float // user: %4 + + // function_ref pullback_y_specialized + %1 = function_ref @pullback_y_specialized : $@convention(thin) (@in_guaranteed Float) -> @out Float // user: %2 + %2 = thin_to_thick_function %1 : $@convention(thin) (@in_guaranteed Float) -> @out Float to $@callee_guaranteed (@in_guaranteed Float) -> @out Float // user: %4 + // function_ref reabstraction_thunk + %3 = function_ref @reabstraction_thunk : $@convention(thin) (Float, @guaranteed @callee_guaranteed (@in_guaranteed Float) -> @out Float) -> Float // user: %4 + %4 = partial_apply [callee_guaranteed] %3(%2) : $@convention(thin) (Float, @guaranteed @callee_guaranteed (@in_guaranteed Float) -> @out Float) -> Float // user: %6 + // function_ref pullback_z + %5 = function_ref @pullback_z : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float) -> Float // user: %6 + %6 = partial_apply [callee_guaranteed] %5(%4) : $@convention(thin) (Float, @owned @callee_guaranteed (Float) -> Float) -> Float // user: %7 + %7 = tuple (%0 : $Float, %6 : $@callee_guaranteed (Float) -> Float) // user: %8 + return %7 : $(Float, @callee_guaranteed (Float) -> Float) // id: %8 +} diff --git a/test/AutoDiff/SILOptimizer/vjp_and_pullback_inlining.swift b/test/AutoDiff/SILOptimizer/vjp_and_pullback_inlining.swift index 43581c1cf6396..dc02e019fe6c3 100644 --- a/test/AutoDiff/SILOptimizer/vjp_and_pullback_inlining.swift +++ b/test/AutoDiff/SILOptimizer/vjp_and_pullback_inlining.swift @@ -9,6 +9,8 @@ import _Differentiation #if canImport(Glibc) import Glibc +#elseif canImport(Android) +import Android #else import Foundation #endif @@ -112,4 +114,4 @@ func caller_of_more_complex_pb_with_control_flow() -> Float { } // CHECK: decision {{{.*}}, b=70, {{.*}}} more_complex_pb_with_control_flowTJpSpSr -// CHECK-NEXT: "more_complex_pb_with_control_flowTJpSpSr" inlined into "caller_of_more_complex_pb_with_control_flow" \ No newline at end of file +// CHECK-NEXT: "more_complex_pb_with_control_flowTJpSpSr" inlined into "caller_of_more_complex_pb_with_control_flow" diff --git a/test/AutoDiff/compiler_crashers_fixed/issue-56600-symbol-in-ir-file-not-tbd-file.swift b/test/AutoDiff/compiler_crashers_fixed/issue-56600-symbol-in-ir-file-not-tbd-file.swift index fe65a165c4f53..7a5e395dd6551 100644 --- a/test/AutoDiff/compiler_crashers_fixed/issue-56600-symbol-in-ir-file-not-tbd-file.swift +++ b/test/AutoDiff/compiler_crashers_fixed/issue-56600-symbol-in-ir-file-not-tbd-file.swift @@ -12,6 +12,8 @@ import _Differentiation import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/test/AutoDiff/stdlib/tgmath_derivatives.swift.gyb b/test/AutoDiff/stdlib/tgmath_derivatives.swift.gyb index 39535f5f1fb55..a4a81a6a28ac8 100644 --- a/test/AutoDiff/stdlib/tgmath_derivatives.swift.gyb +++ b/test/AutoDiff/stdlib/tgmath_derivatives.swift.gyb @@ -5,6 +5,8 @@ import Darwin.C.tgmath #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/test/AutoDiff/validation-test/custom_derivatives.swift b/test/AutoDiff/validation-test/custom_derivatives.swift index f28a233dd34b6..233c5819f40dd 100644 --- a/test/AutoDiff/validation-test/custom_derivatives.swift +++ b/test/AutoDiff/validation-test/custom_derivatives.swift @@ -6,6 +6,8 @@ import StdlibUnittest import Darwin.C #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/test/AutoDiff/validation-test/separate_tangent_type.swift b/test/AutoDiff/validation-test/separate_tangent_type.swift index 3770e240777e6..87796d5da6b5a 100644 --- a/test/AutoDiff/validation-test/separate_tangent_type.swift +++ b/test/AutoDiff/validation-test/separate_tangent_type.swift @@ -6,6 +6,8 @@ import StdlibUnittest import Darwin.C #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/test/ClangImporter/clang_builtins.swift b/test/ClangImporter/clang_builtins.swift index 4b73e96a45be1..24b83bcd6abd8 100644 --- a/test/ClangImporter/clang_builtins.swift +++ b/test/ClangImporter/clang_builtins.swift @@ -4,6 +4,8 @@ import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(WASI) import WASILibc #elseif os(Windows) diff --git a/test/Concurrency/Runtime/async.swift b/test/Concurrency/Runtime/async.swift index ad787c0d874ea..965819cbdf078 100644 --- a/test/Concurrency/Runtime/async.swift +++ b/test/Concurrency/Runtime/async.swift @@ -16,6 +16,8 @@ import StdlibUnittest import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #endif var asyncTests = TestSuite("Async") diff --git a/test/Concurrency/Runtime/async_task_locals_copy_to_sync.swift b/test/Concurrency/Runtime/async_task_locals_copy_to_sync.swift index c0f7bda2805d7..bdb37734d35c0 100644 --- a/test/Concurrency/Runtime/async_task_locals_copy_to_sync.swift +++ b/test/Concurrency/Runtime/async_task_locals_copy_to_sync.swift @@ -18,6 +18,8 @@ import Dispatch import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) +import Android #endif enum TL { diff --git a/test/Concurrency/Runtime/async_task_withUnsafeCurrentTask.swift b/test/Concurrency/Runtime/async_task_withUnsafeCurrentTask.swift index 8ac4ad2f1579e..d22049945b751 100644 --- a/test/Concurrency/Runtime/async_task_withUnsafeCurrentTask.swift +++ b/test/Concurrency/Runtime/async_task_withUnsafeCurrentTask.swift @@ -10,6 +10,8 @@ import Glibc #elseif os(Windows) import MSVCRT +#elseif canImport(Android) +import Android #else import Darwin #endif @@ -36,4 +38,4 @@ func test_withUnsafeCurrentTask() async { static func main() async { await test_withUnsafeCurrentTask() } -} \ No newline at end of file +} diff --git a/test/Concurrency/Runtime/cancellation_handler.swift b/test/Concurrency/Runtime/cancellation_handler.swift index ba221c8592687..3cefaa5134469 100644 --- a/test/Concurrency/Runtime/cancellation_handler.swift +++ b/test/Concurrency/Runtime/cancellation_handler.swift @@ -12,6 +12,8 @@ import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif canImport(WASILibc) import WASILibc #elseif os(Windows) diff --git a/test/Concurrency/Runtime/data_race_detection_crash.swift b/test/Concurrency/Runtime/data_race_detection_crash.swift index 47238ede23127..2ab3ec805aa46 100644 --- a/test/Concurrency/Runtime/data_race_detection_crash.swift +++ b/test/Concurrency/Runtime/data_race_detection_crash.swift @@ -24,6 +24,8 @@ import Dispatch import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) +import Android #endif @MainActor func onMainActor() { diff --git a/test/Concurrency/Runtime/data_race_detection_legacy_warning.swift b/test/Concurrency/Runtime/data_race_detection_legacy_warning.swift index ec1bce9a98445..29bde7486e41e 100644 --- a/test/Concurrency/Runtime/data_race_detection_legacy_warning.swift +++ b/test/Concurrency/Runtime/data_race_detection_legacy_warning.swift @@ -25,6 +25,8 @@ import Dispatch import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) +import Android #endif @MainActor func onMainActor() { @@ -66,14 +68,14 @@ actor MyActor { struct Runner { static func main() async { print("Launching a main-actor task") - // CHECK: warning: data race detected: @MainActor function at main/data_race_detection_legacy_warning.swift:30 was not called on the main thread + // CHECK: warning: data race detected: @MainActor function at main/data_race_detection_legacy_warning.swift:32 was not called on the main thread launchFromMainThread() sleep(1) let actor = MyActor() let actorFn = await actor.getTaskOnMyActor() print("Launching an actor-instance task") - // CHECK: warning: data race detected: actor-isolated function at main/data_race_detection_legacy_warning.swift:59 was not called on the same actor + // CHECK: warning: data race detected: actor-isolated function at main/data_race_detection_legacy_warning.swift:61 was not called on the same actor launchTask(actorFn) sleep(1) diff --git a/test/Concurrency/Runtime/exclusivity.swift b/test/Concurrency/Runtime/exclusivity.swift index 7e07776900f35..6bd44273e6bb0 100644 --- a/test/Concurrency/Runtime/exclusivity.swift +++ b/test/Concurrency/Runtime/exclusivity.swift @@ -31,6 +31,8 @@ var global3: Int = 7 import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) +import Android #elseif canImport(CRT) import CRT #endif diff --git a/test/Concurrency/Runtime/exclusivity_custom_executors.swift b/test/Concurrency/Runtime/exclusivity_custom_executors.swift index 5e5d62fe495c1..f9f44d87aa6ec 100644 --- a/test/Concurrency/Runtime/exclusivity_custom_executors.swift +++ b/test/Concurrency/Runtime/exclusivity_custom_executors.swift @@ -26,6 +26,8 @@ import StdlibUnittest import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) +import Android #elseif canImport(CRT) import CRT #endif diff --git a/test/Concurrency/Runtime/executor_deinit3.swift b/test/Concurrency/Runtime/executor_deinit3.swift index 302ab4c67e91c..f6063c985157b 100644 --- a/test/Concurrency/Runtime/executor_deinit3.swift +++ b/test/Concurrency/Runtime/executor_deinit3.swift @@ -14,6 +14,8 @@ import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #endif @available(SwiftStdlib 5.1, *) diff --git a/test/Concurrency/async_task_base_priority.swift b/test/Concurrency/async_task_base_priority.swift index 76febb58c5887..763ef3ad14a14 100644 --- a/test/Concurrency/async_task_base_priority.swift +++ b/test/Concurrency/async_task_base_priority.swift @@ -21,6 +21,8 @@ import Dispatch import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) +import Android #elseif os(WASI) import WASILibc #elseif os(Windows) diff --git a/test/Concurrency/global_actor_inference_swift6.swift b/test/Concurrency/global_actor_inference_swift6.swift index 893fceae8f246..1f933a7418a7c 100644 --- a/test/Concurrency/global_actor_inference_swift6.swift +++ b/test/Concurrency/global_actor_inference_swift6.swift @@ -153,3 +153,29 @@ class C { struct S: InferMainActor { @Wrapper var value: C // okay, 'S' is isolated to 'MainActor' } + +protocol InferMainActorInherited: InferMainActor { + func f() // expected-note{{mark the protocol requirement 'f()' 'async' to allow actor-isolated conformances}} + func g() +} + +@SomeGlobalActor +protocol InferSomeGlobalActor { } + +protocol InferenceConflict: InferMainActorInherited, InferSomeGlobalActor { } + +struct S2: InferMainActorInherited { + func f() { } // okay, 'f' is MainActor isolated, as is the requirement + @MainActor func g() { } // okay for the same reasons, but more explicitly +} + +@SomeGlobalActor +struct S3: InferenceConflict { + nonisolated func g() { } +} + +extension S3 { + func f() { } + // expected-error@-1{{global actor 'SomeGlobalActor'-isolated instance method 'f()' cannot be used to satisfy main actor-isolated protocol requirement}} + //expected-note@-2{{add 'nonisolated' to 'f()' to make this instance method not isolated to the actor}} +} diff --git a/test/IRGen/builtin_freeze.swift b/test/IRGen/builtin_freeze.swift new file mode 100644 index 0000000000000..8efc01616730e --- /dev/null +++ b/test/IRGen/builtin_freeze.swift @@ -0,0 +1,52 @@ +// RUN: %target-swift-frontend -O -module-name builtin_freeze -enable-experimental-feature BuiltinModule -primary-file %s -emit-ir -o - | %FileCheck %s --check-prefix=CHECK + +import Builtin + +func fptosi(_ x: Float) -> Int32 { + Int32(Builtin.fptosi_FPIEEE32_Int32(x._value)) + // CHECK: fptosi float %{{.+}} to i32 +} + +func fptosiWithFreeze(_ x: Float) -> Int32 { + Int32(Builtin.freeze_Int32(Builtin.fptosi_FPIEEE32_Int32(x._value))) + // CHECK: fptosi float %{{.+}} to i32 + // CHECK-NEXT: freeze i32 %{{.+}} +} + +func yuck() -> Int32 { + fptosi(0x1.0p32) + // CHECK: poison +} + +func yum() -> Int32 { + fptosiWithFreeze(0x1.0p32) + // CHECK-NOT: poison +} + +func fptosi(_ x: SIMD2) -> SIMD2 { + let maybePoison = Builtin.fptosi_Vec2xFPIEEE32_Vec2xInt32(x._storage._value) + var result = SIMD2() + result._storage._value = maybePoison + return result + // CHECK: fptosi <2 x float> %{{.+}} to <2 x i32> +} + +func fptosiWithFreeze(_ x: SIMD2) -> SIMD2 { + let maybePoison = Builtin.fptosi_Vec2xFPIEEE32_Vec2xInt32(x._storage._value) + let frozen = Builtin.freeze_Vec2xInt32(maybePoison) + var result = SIMD2() + result._storage._value = frozen + return result + // CHECK: fptosi <2 x float> %{{.+}} to <2 x i32> + // CHECK-NEXT: freeze <2 x i32> %{{.+}} +} + +func doubleYuck(_ x: SIMD2) -> SIMD2 { + fptosi(SIMD2(repeating: 0x1.0p32)) + // CHECK: poison +} + +func DoubleYum(_ x: SIMD2) -> SIMD2 { + fptosiWithFreeze(SIMD2(repeating: 0x1.0p32)) + // CHECK-NOT: poison +} diff --git a/test/IRGen/builtin_math.swift b/test/IRGen/builtin_math.swift index 3426c1ee085c4..956b2e2e0c5b2 100644 --- a/test/IRGen/builtin_math.swift +++ b/test/IRGen/builtin_math.swift @@ -6,6 +6,8 @@ import Glibc #elseif os(WASI) import WASILibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/test/IRGen/loadable_by_address_issue73018.sil b/test/IRGen/loadable_by_address_issue73018.sil new file mode 100644 index 0000000000000..6f79536a64c9b --- /dev/null +++ b/test/IRGen/loadable_by_address_issue73018.sil @@ -0,0 +1,87 @@ +// RUN: %target-swift-frontend %s -Xllvm -sil-print-after=loadable-address -Xllvm -verify-continue-on-failure -c -o %t/t.o 2>&1 | %FileCheck %s + +// This used to trigger an assertion due to LoadableByAddress not doing proper mapping of +// switch_enum arguments during rewriting + +import Builtin +import Swift + +typealias X = Int +typealias LargeX = (() -> X, () -> X, () -> X, () -> X, () -> X, () -> X, () -> X, () -> X, () -> X) + +enum enum1 { +case bb0(LargeX) +} + +enum enum2 { +case bb0(LargeX) +} + +enum large_enum { +case bb1((enum1, X)) +case bb2((enum2, X)) +} + +sil @test1 : $@convention(thin) (@guaranteed large_enum) -> () { +// CHECK-LABEL: sil @test1 +bb0(%arg : $large_enum): + %loc = alloc_stack $(@callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X) + switch_enum %arg : $large_enum, case #large_enum.bb1!enumelt: bb1, case #large_enum.bb2!enumelt: bb2 + +bb1(%e1 : $(enum1, X)): + // CHECK: %[[ADDR1:.*]] = unchecked_take_enum_data_addr %{{.*}} : $*enum1, #enum1.bb0!enumelt + // CHECK: copy_addr [take] %[[ADDR1]] to [init] %1 : $*(@callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int) + %e11 = tuple_extract %e1 : $(enum1, X), 0 + switch_enum %e11 : $enum1, case #enum1.bb0!enumelt: bb11 + +bb11(%p1 : $((@callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X))): + br bb3(%p1 : $((@callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X))) + +bb2(%e2 : $(enum2, X)): + // CHECK: %[[ADDR2:.*]] = unchecked_take_enum_data_addr %{{.*}} : $*enum2, #enum2.bb0!enumelt + // CHECK: copy_addr [take] %[[ADDR2]] to [init] %1 : $*(@callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int, @callee_guaranteed () -> @owned Int) + %e22 = tuple_extract %e2 : $(enum2, X), 0 + switch_enum %e22 : $enum2, case #enum2.bb0!enumelt: bb22 + +bb22(%p2 : $((@callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X))): + br bb3(%p2 : $((@callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X))) + +bb3(%p3 : $((@callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X))): + store %p3 to %loc : $*(@callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X) + + dealloc_stack %loc : $*(@callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X, @callee_guaranteed () -> @owned X, + @callee_guaranteed () -> @owned X) + %t = tuple () + return %t : $() +} diff --git a/test/IRGen/sanitize_coverage.swift b/test/IRGen/sanitize_coverage.swift index af28df7424155..e7090acaab692 100644 --- a/test/IRGen/sanitize_coverage.swift +++ b/test/IRGen/sanitize_coverage.swift @@ -15,6 +15,8 @@ import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/test/Interop/Cxx/foreign-reference/reference-counted-irgen.swift b/test/Interop/Cxx/foreign-reference/reference-counted-irgen.swift index 09f1e9e6e15c5..5cb8e3dfdae03 100644 --- a/test/Interop/Cxx/foreign-reference/reference-counted-irgen.swift +++ b/test/Interop/Cxx/foreign-reference/reference-counted-irgen.swift @@ -36,7 +36,7 @@ public func getNullable(wantNullptr: Bool) -> GlobalCountNullableInit? { return result } -// CHECK: define {{.*}}swiftcc i64 @"$s4main11getNullable11wantNullptrSo011GlobalCountC4InitVSgSb_tF"(i1 %0) +// CHECK: define {{.*}}swiftcc i{{.*}} @"$s4main11getNullable11wantNullptrSo011GlobalCountC4InitVSgSb_tF"(i1 %0) // CHECK-NEXT: entry: // CHECK: %1 = call ptr @{{_ZN23GlobalCountNullableInit6createEb|"\?create\@GlobalCountNullableInit\@\@SAPEAU1\@_N\@Z"}} // CHECK-NEXT: %2 = ptrtoint ptr %1 to i64 diff --git a/test/Interop/Cxx/foreign-reference/reference-counted.swift b/test/Interop/Cxx/foreign-reference/reference-counted.swift index a09c10af5b752..0ebeb63e1093d 100644 --- a/test/Interop/Cxx/foreign-reference/reference-counted.swift +++ b/test/Interop/Cxx/foreign-reference/reference-counted.swift @@ -1,4 +1,5 @@ -// RUN: %target-run-simple-swift(-I %S/Inputs/ -Xfrontend -enable-experimental-cxx-interop -Xfrontend -validate-tbd-against-ir=none -Xfrontend -disable-llvm-verify -Xfrontend -disable-availability-checking) +// RUN: %target-run-simple-swift(-I %S/Inputs/ -Xfrontend -enable-experimental-cxx-interop -Xfrontend -validate-tbd-against-ir=none -Xfrontend -disable-llvm-verify -Xfrontend -disable-availability-checking -Onone -D NO_OPTIMIZATIONS) +// RUN: %target-run-simple-swift(-I %S/Inputs/ -Xfrontend -enable-experimental-cxx-interop -Xfrontend -validate-tbd-against-ir=none -Xfrontend -disable-llvm-verify -Xfrontend -disable-availability-checking -O) // // REQUIRES: executable_test // TODO: This should work without ObjC interop in the future rdar://97497120 @@ -15,13 +16,17 @@ public func blackHole(_ _: T) { } @inline(never) func localTest() { var x = NS.LocalCount.create() +#if NO_OPTIMIZATIONS expectEqual(x.value, 8) // This is 8 because of "var x" "x.value" * 2, two method calls on x, and "(x, x, x)". +#endif expectEqual(x.returns42(), 42) expectEqual(x.constMethod(), 42) let t = (x, x, x) +#if NO_OPTIMIZATIONS expectEqual(x.value, 5) +#endif } ReferenceCountedTestSuite.test("Local") { @@ -41,14 +46,18 @@ ReferenceCountedTestSuite.test("Global optional holding local ref count") { func globalTest1() { var x = GlobalCount.create() let t = (x, x, x) +#if NO_OPTIMIZATIONS expectEqual(globalCount, 4) +#endif blackHole(t) } @inline(never) func globalTest2() { var x = GlobalCount.create() +#if NO_OPTIMIZATIONS expectEqual(globalCount, 1) +#endif } ReferenceCountedTestSuite.test("Global") { diff --git a/test/Interop/Cxx/foreign-reference/witness-table.swift b/test/Interop/Cxx/foreign-reference/witness-table.swift index 8ef04f518c9b2..62073bd4de8d3 100644 --- a/test/Interop/Cxx/foreign-reference/witness-table.swift +++ b/test/Interop/Cxx/foreign-reference/witness-table.swift @@ -1,7 +1,6 @@ -// RUN: %target-run-simple-swift(-I %S/Inputs/ -Xfrontend -enable-experimental-cxx-interop -Xfrontend -validate-tbd-against-ir=none -Xfrontend -disable-llvm-verify -g) +// RUN: %target-run-simple-swift(-I %S/Inputs/ -Xfrontend -enable-experimental-cxx-interop -Xfrontend -validate-tbd-against-ir=none -Xfrontend -disable-llvm-verify -Xfrontend -disable-availability-checking -g) // // REQUIRES: executable_test -// REQUIRES: rdar95738946 // XFAIL: OS=windows-msvc import StdlibUnittest diff --git a/test/Interop/Cxx/libc/include-glibc.swift b/test/Interop/Cxx/libc/include-libc.swift similarity index 58% rename from test/Interop/Cxx/libc/include-glibc.swift rename to test/Interop/Cxx/libc/include-libc.swift index 2d1dcdc6ea227..807e5681cc558 100644 --- a/test/Interop/Cxx/libc/include-glibc.swift +++ b/test/Interop/Cxx/libc/include-libc.swift @@ -3,12 +3,18 @@ // REQUIRES: executable_test // REQUIRES: OS=linux-gnu || OS=linux-android +#if canImport(Glibc) import Glibc +#elseif canImport(Android) +import Android +#else +#error ("unsupported platform") +#endif import StdlibUnittest -var GlibcTests = TestSuite("GlibcTests") +var LibcTests = TestSuite("LibcTests") -GlibcTests.test("abs") { +LibcTests.test("abs") { expectEqual(42, abs(-42)) } diff --git a/test/Interop/Cxx/reference/Inputs/reference.h b/test/Interop/Cxx/reference/Inputs/reference.h index ffcce0a78ad89..f403e7685585f 100644 --- a/test/Interop/Cxx/reference/Inputs/reference.h +++ b/test/Interop/Cxx/reference/Inputs/reference.h @@ -35,6 +35,8 @@ const ClassTemplate &refToDependent() { return ClassTemplate(); } void dontImportAtomicRef(_Atomic(int)&) { } void takeConstRef(const int &); +inline bool takeConstRefBool(const bool &b) { return b; } +inline void takeRefBool(bool &b) { b = true; } template T &refToTemplate(T &t) { return t; } diff --git a/test/Interop/Cxx/reference/reference.swift b/test/Interop/Cxx/reference/reference.swift index d2c3bc36ccbd1..70ffe95a3191b 100644 --- a/test/Interop/Cxx/reference/reference.swift +++ b/test/Interop/Cxx/reference/reference.swift @@ -78,6 +78,17 @@ ReferenceTestSuite.test("pod-struct-const-lvalue-reference") { expectEqual(getStaticInt(), 78) } +ReferenceTestSuite.test("const reference to bool") { + expectTrue(takeConstRefBool(true)) + expectFalse(takeConstRefBool(false)) +} + +ReferenceTestSuite.test("reference to bool") { + var b = false + takeRefBool(&b) + expectTrue(b) +} + ReferenceTestSuite.test("reference to template") { var val: CInt = 53 let ref = refToTemplate(&val) diff --git a/test/Interop/Cxx/templates/Inputs/function-templates.h b/test/Interop/Cxx/templates/Inputs/function-templates.h index 82593dc4ed0fe..88f77d8d1ef82 100644 --- a/test/Interop/Cxx/templates/Inputs/function-templates.h +++ b/test/Interop/Cxx/templates/Inputs/function-templates.h @@ -72,9 +72,12 @@ template struct Dep { using TT = T; }; template void useDependentType(typename Dep::TT) {} template void lvalueReference(T &ref) { ref = 42; } +template void lvalueReferenceZero(T &ref) { ref = 0; } template void constLvalueReference(const T &) {} +template bool constLvalueReferenceToBool(const T &t) { return t; } + template void forwardingReference(T &&) {} template void PointerTemplateParameter(T*){} diff --git a/test/Interop/Cxx/templates/function-template-typechecker-errors.swift b/test/Interop/Cxx/templates/function-template-typechecker-errors.swift index 50f6835c85553..8ab44a4af8451 100644 --- a/test/Interop/Cxx/templates/function-template-typechecker-errors.swift +++ b/test/Interop/Cxx/templates/function-template-typechecker-errors.swift @@ -15,6 +15,11 @@ public func callIntegerTemplates() { hasDefaultedNonTypeTemplateParameter() } +// CHECK: error: unexpected error produced: cannot pass immutable value as inout argument: literals are not mutable +public func callLvalueRef() { + lvalueReference(true) +} + // Use protocol composition to create a type that we cannot (yet) turn into a clang::QualType. public protocol A { } public protocol B { } diff --git a/test/Interop/Cxx/templates/function-template.swift b/test/Interop/Cxx/templates/function-template.swift index 66056745f8d3c..d718040390e8d 100644 --- a/test/Interop/Cxx/templates/function-template.swift +++ b/test/Interop/Cxx/templates/function-template.swift @@ -28,6 +28,17 @@ FunctionTemplateTestSuite.test("lvalueReference where T == Int") { expectEqual(value, 42) } +FunctionTemplateTestSuite.test("lvalueReferenceZero where T == Bool") { + var value = true + lvalueReferenceZero(&value) + expectEqual(value, false) +} + +FunctionTemplateTestSuite.test("constLvalueReferenceToBool where T == Bool") { + expectTrue(constLvalueReferenceToBool(true)) + expectFalse(constLvalueReferenceToBool(false)) +} + // TODO: Generics, Any, and Protocols should be tested here but need to be // better supported in ClangTypeConverter first. diff --git a/test/Interop/SwiftToCxx/macros/macro-name-collision.swift b/test/Interop/SwiftToCxx/macros/macro-name-collision.swift new file mode 100644 index 0000000000000..07d2e01c3bf0b --- /dev/null +++ b/test/Interop/SwiftToCxx/macros/macro-name-collision.swift @@ -0,0 +1,16 @@ +// RUN: %empty-directory(%t) +// RUN: %target-swift-frontend %s -typecheck -module-name MacroNameCollision -clang-header-expose-decls=all-public -emit-clang-header-path %t/macros.h +// RUN: %FileCheck %s < %t/macros.h + +// RUN: %check-interop-cxx-header-in-clang(%t/macros.h) + +// CHECK-LABEL: namespace MacroNameCollision SWIFT_PRIVATE_ATTR SWIFT_SYMBOL_MODULE("MacroNameCollision") { + +@freestanding(expression) +public macro myLogMacro(error: String) = #externalMacro(module: "CompilerPlugin", type: "LogMacro") + +@freestanding(expression) +public macro myLogMacro(fault: String) = #externalMacro(module: "CompilerPlugin", type: "LogMacro") + +// CHECK: // Unavailable in C++: Swift macro 'myLogMacro(error:)' +// CHECK: // Unavailable in C++: Swift macro 'myLogMacro(fault:)' diff --git a/test/Interpreter/dynamicReplacement_property_observer.swift b/test/Interpreter/dynamicReplacement_property_observer.swift index 9c3313e36c144..bbf47ecf410fe 100644 --- a/test/Interpreter/dynamicReplacement_property_observer.swift +++ b/test/Interpreter/dynamicReplacement_property_observer.swift @@ -17,6 +17,8 @@ import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT import WinSDK diff --git a/test/Interpreter/dynamic_replacement.swift b/test/Interpreter/dynamic_replacement.swift index 80f572e634fce..426532a1951d5 100644 --- a/test/Interpreter/dynamic_replacement.swift +++ b/test/Interpreter/dynamic_replacement.swift @@ -109,6 +109,8 @@ import StdlibUnittest import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT import WinSDK diff --git a/test/Interpreter/dynamic_replacement_chaining.swift b/test/Interpreter/dynamic_replacement_chaining.swift index bd5579ed88528..940a361072bd2 100644 --- a/test/Interpreter/dynamic_replacement_chaining.swift +++ b/test/Interpreter/dynamic_replacement_chaining.swift @@ -30,6 +30,8 @@ import StdlibUnittest import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT import WinSDK diff --git a/test/Interpreter/dynamic_replacement_without_previous_calls.swift b/test/Interpreter/dynamic_replacement_without_previous_calls.swift index a9dc7254d836a..233f52a6aa572 100644 --- a/test/Interpreter/dynamic_replacement_without_previous_calls.swift +++ b/test/Interpreter/dynamic_replacement_without_previous_calls.swift @@ -16,6 +16,8 @@ import StdlibUnittest import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT import WinSDK diff --git a/test/ModuleInterface/bitwise_copyable.swift b/test/ModuleInterface/bitwise_copyable.swift index 7ec9619eb99e5..63050550e60f7 100644 --- a/test/ModuleInterface/bitwise_copyable.swift +++ b/test/ModuleInterface/bitwise_copyable.swift @@ -9,3 +9,9 @@ public struct S_Implicit_Noncopyable {} // CHECK-NOT: extension Test.S_Implicit_Noncopyable : Swift.BitwiseCopyable {} + +// CHECK: public protocol BitwiseCopyable { +// CHECK-NEXT: } +// CHECK-NEXT: public typealias _BitwiseCopyable = Test.BitwiseCopyable +public protocol BitwiseCopyable {} +public typealias _BitwiseCopyable = BitwiseCopyable diff --git a/test/ModuleInterface/bitwise_copyable_stdlib.swift b/test/ModuleInterface/bitwise_copyable_stdlib.swift new file mode 100644 index 0000000000000..d5f48cd05d6dc --- /dev/null +++ b/test/ModuleInterface/bitwise_copyable_stdlib.swift @@ -0,0 +1,21 @@ +// RUN: %empty-directory(%t) +// RUN: %target-swift-emit-module-interface(%t.swiftinterface) %s -parse-stdlib -module-name Swift +// RUN: %FileCheck %s < %t.swiftinterface +// RUN: %target-swift-typecheck-module-from-interface(%t.swiftinterface) -parse-stdlib -module-name Swift + +// CHECK: #if compiler(>=5.3) && $BitwiseCopyable2 +// CHECK-NEXT: public protocol BitwiseCopyable { +// CHECK-NEXT: } +// CHECK-NEXT: #else +// CHECK-NEXT: public protocol _BitwiseCopyable { +// CHECK-NEXT: } +// CHECK-NEXT: #endif + +// CHECK: #if compiler(>=5.3) && $BitwiseCopyable2 +// CHECK-NEXT: public typealias _BitwiseCopyable = Swift.BitwiseCopyable +// CHECK-NEXT: #else +// CHECK-NEXT: public typealias BitwiseCopyable = Swift._BitwiseCopyable +// CHECK-NEXT: #endif +public protocol BitwiseCopyable {} +public typealias _BitwiseCopyable = BitwiseCopyable + diff --git a/test/Prototypes/BigInt.swift b/test/Prototypes/BigInt.swift index 532f926bffb7b..ca7725cede255 100644 --- a/test/Prototypes/BigInt.swift +++ b/test/Prototypes/BigInt.swift @@ -22,6 +22,8 @@ import StdlibUnittest import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/test/Reflection/typeref_decoding.swift b/test/Reflection/typeref_decoding.swift index b1bc12da5ec82..3c5be05e744e3 100644 --- a/test/Reflection/typeref_decoding.swift +++ b/test/Reflection/typeref_decoding.swift @@ -8,6 +8,9 @@ // RUN: %empty-directory(%t) +// FIXME: rdar://127796117 +// UNSUPPORTED: OS=linux-gnu && CPU=aarch64 + // RUN: %target-build-swift -Xfrontend -enable-anonymous-context-mangled-names %S/Inputs/ConcreteTypes.swift %S/Inputs/GenericTypes.swift %S/Inputs/Protocols.swift %S/Inputs/Extensions.swift %S/Inputs/Closures.swift -parse-as-library -emit-module -emit-library -module-name TypesToReflect -o %t/%target-library-name(TypesToReflect) // RUN: %target-build-swift -Xfrontend -enable-anonymous-context-mangled-names %S/Inputs/ConcreteTypes.swift %S/Inputs/GenericTypes.swift %S/Inputs/Protocols.swift %S/Inputs/Extensions.swift %S/Inputs/Closures.swift %S/Inputs/main.swift -emit-module -emit-executable -module-name TypesToReflect -o %t/TypesToReflect diff --git a/test/SIL/OwnershipVerifier/begin_apply_use_after_end_apply.sil b/test/SIL/OwnershipVerifier/begin_apply_use_after_end_apply.sil index ec9ac989cfec0..84e0636dae573 100644 --- a/test/SIL/OwnershipVerifier/begin_apply_use_after_end_apply.sil +++ b/test/SIL/OwnershipVerifier/begin_apply_use_after_end_apply.sil @@ -21,7 +21,7 @@ sil @use_klass : $@convention(thin) (@guaranteed Klass) -> () // CHECK: Error#: 0. End Error in Function: 'guaranteed_coroutine_caller' // CHECK-LABEL: Error#: 1. Begin Error in Function: 'guaranteed_coroutine_caller' -// CHECK: Owned yield without life ending uses! +// CHECK: Owned yield without lifetime ending uses! // CHECK: Value: (**%7**, %8) = begin_apply %1() : $@yield_once @convention(thin) () -> @yields @owned Klass // user: %10 // CHECK: Error#: 1. End Error in Function: 'guaranteed_coroutine_caller' @@ -34,7 +34,7 @@ sil @use_klass : $@convention(thin) (@guaranteed Klass) -> () // CHECK: Error#: 2. End Error in Function: 'guaranteed_coroutine_caller' // CHECK-LABEL: Error#: 3. Begin Error in Function: 'guaranteed_coroutine_caller' -// CHECK: Owned yield without life ending uses! +// CHECK: Owned yield without lifetime ending uses! // CHECK: Value: (**%16**, %17) = begin_apply %1() : $@yield_once @convention(thin) () -> @yields @owned Klass // user: %18 // CHECK: Error#: 3. End Error in Function: 'guaranteed_coroutine_caller' diff --git a/test/SILGen/vtable_thunks.swift b/test/SILGen/vtable_thunks.swift index 4260505f84585..2909bf3e4f949 100644 --- a/test/SILGen/vtable_thunks.swift +++ b/test/SILGen/vtable_thunks.swift @@ -145,18 +145,16 @@ class H: G { // CHECK-LABEL: sil private [thunk] [ossa] @$s13vtable_thunks1DC3iuo{{[_0-9a-zA-Z]*}}FTV // CHECK: bb0([[X:%.*]] : @guaranteed $B, [[Y:%.*]] : @guaranteed $Optional, [[Z:%.*]] : @guaranteed $B, [[W:%.*]] : @guaranteed $D): // CHECK: [[WRAP_X:%.*]] = enum $Optional, #Optional.some!enumelt, [[X]] : $B -// CHECK: [[Y_COPY:%.*]] = copy_value [[Y]] -// CHECK: switch_enum [[Y_COPY]] : $Optional, case #Optional.some!enumelt: [[SOME_BB:bb[0-9]+]], case #Optional.none!enumelt: [[NONE_BB:bb[0-9]+]] +// CHECK: switch_enum [[Y]] : $Optional, case #Optional.some!enumelt: [[SOME_BB:bb[0-9]+]], case #Optional.none!enumelt: [[NONE_BB:bb[0-9]+]] // CHECK: [[NONE_BB]]: // CHECK: [[DIAGNOSE_UNREACHABLE_FUNC:%.*]] = function_ref @$ss30_diagnoseUnexpectedNilOptional{{.*}} // CHECK: apply [[DIAGNOSE_UNREACHABLE_FUNC]] // CHECK: unreachable -// CHECK: [[SOME_BB]]([[UNWRAP_Y:%.*]] : @owned $B): -// CHECK: [[BORROWED_UNWRAP_Y:%.*]] = begin_borrow [[UNWRAP_Y]] +// CHECK: [[SOME_BB]]([[UNWRAP_Y:%.*]] : @guaranteed $B): // CHECK: [[THUNK_FUNC:%.*]] = function_ref @$s13vtable_thunks1DC3iuo{{.*}} -// CHECK: [[RES:%.*]] = apply [[THUNK_FUNC]]([[WRAP_X]], [[BORROWED_UNWRAP_Y]], [[Z]], [[W]]) +// CHECK: [[RES:%.*]] = apply [[THUNK_FUNC]]([[WRAP_X]], [[UNWRAP_Y]], [[Z]], [[W]]) // CHECK: [[WRAP_RES:%.*]] = enum $Optional, {{.*}} [[RES]] // CHECK: return [[WRAP_RES]] diff --git a/test/SILOptimizer/c_string_optimization.swift b/test/SILOptimizer/c_string_optimization.swift index f859063c77d6d..2173bd0b4d122 100644 --- a/test/SILOptimizer/c_string_optimization.swift +++ b/test/SILOptimizer/c_string_optimization.swift @@ -10,6 +10,8 @@ import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/test/SILOptimizer/discard_checking.swift b/test/SILOptimizer/discard_checking.swift index f374bf91aca95..d96b01ea38da3 100644 --- a/test/SILOptimizer/discard_checking.swift +++ b/test/SILOptimizer/discard_checking.swift @@ -16,7 +16,7 @@ func globalThrowingFn() throws {} struct Basics: ~Copyable { consuming func test1(_ b: Bool) { guard b else { - fatalError("bah!") // expected-error {{must consume 'self' before exiting method that discards self}} + return // expected-error {{must consume 'self' before exiting method that discards self}} } discard self // expected-note {{discarded self here}} } @@ -24,7 +24,7 @@ struct Basics: ~Copyable { consuming func test1_fixed(_ b: Bool) { guard b else { _ = consume self - fatalError("bah!") + return } discard self } @@ -33,7 +33,7 @@ struct Basics: ~Copyable { repeat { switch c { case .red: - fatalError("bah!") + return case .blue: throw E.someError case .green: @@ -49,7 +49,7 @@ struct Basics: ~Copyable { switch c { case .red: discard self - fatalError("bah!") + return case .blue: discard self throw E.someError @@ -145,7 +145,7 @@ struct Basics: ~Copyable { if case .red = c { discard self // expected-note {{discarded self here}} } - fatalError("oh no") // expected-error {{must consume 'self' before exiting method that discards self}} + return // expected-error {{must consume 'self' before exiting method that discards self}} } consuming func test7_fixed(_ c: Color) throws { @@ -154,7 +154,7 @@ struct Basics: ~Copyable { return } _ = consume self - fatalError("oh no") + return } consuming func test8(_ c: Color) throws { @@ -162,9 +162,9 @@ struct Basics: ~Copyable { discard self // expected-note {{discarded self here}} } if case .blue = c { - fatalError("hi") // expected-error {{must consume 'self' before exiting method that discards self}} + return } - } + } // expected-error {{must consume 'self' before exiting method that discards self}} consuming func test8_stillMissingAConsume1(_ c: Color) throws { if case .red = c { @@ -173,7 +173,7 @@ struct Basics: ~Copyable { } if case .blue = c { _ = consume self - fatalError("hi") + return } } // expected-error {{must consume 'self' before exiting method that discards self}} @@ -183,7 +183,7 @@ struct Basics: ~Copyable { return } if case .blue = c { - fatalError("hi") // expected-error {{must consume 'self' before exiting method that discards self}} + return // expected-error {{must consume 'self' before exiting method that discards self}} } _ = consume self } @@ -195,7 +195,7 @@ struct Basics: ~Copyable { } if case .blue = c { _ = consume self - fatalError("hi") + return } _ = consume self } @@ -407,7 +407,7 @@ struct Basics: ~Copyable { case 2: return // expected-error {{must consume 'self' before exiting method that discards self}} case 3: - fatalError("no") // expected-error {{must consume 'self' before exiting method that discards self}} + return // expected-error {{must consume 'self' before exiting method that discards self}} case 4: globalConsumingFn(self) default: @@ -568,7 +568,7 @@ struct Money: ~Copyable { consuming func spend(_ charge: Int) throws -> Money { guard charge > 0 else { - fatalError("can't charge a negative amount!") // expected-error {{must consume 'self' before exiting method that discards self}} + return Money(balance: balance) // expected-error {{must consume 'self' before exiting method that discards self}} } if balance < charge { diff --git a/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_spec_and_inline.swift b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_spec_and_inline.swift new file mode 100644 index 0000000000000..de0c3b87511f3 --- /dev/null +++ b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_spec_and_inline.swift @@ -0,0 +1,24 @@ +// RUN: %target-swift-frontend -parse-as-library -O -module-name=test -experimental-swift-based-closure-specialization %s -emit-sil | %FileCheck %s +// XFAIL: * + +func closure(_ a: Int, b: Int) -> Bool { + return a < b +} + +// Check that closure() is inlined into call_closure after call_closure is +// specialized for it. + +// CHECK-LABEL: sil shared [noinline] @$s4test12call_closureySbSi_SiSbSi_SitXEtF27$s4test7closure_1bSbSi_SitFTf1nnc_n +// CHECK-NOT: apply +// CHECK: builtin "cmp_slt_Int +// CHECK-NOT: apply +// CHECK: return +@inline(never) +func call_closure(_ a: Int, _ b: Int, _ f: (Int , Int) -> Bool) -> Bool { + return f(a, b) +} + +public func testit() -> Bool { + return call_closure(0, 1, closure) +} + diff --git a/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize.sil b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize.sil new file mode 100644 index 0000000000000..116c527a6273a --- /dev/null +++ b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize.sil @@ -0,0 +1,943 @@ +// RUN: %target-sil-opt -enable-sil-verify-all -experimental-swift-based-closure-specialization %s | %FileCheck %s +// XFAIL: * + +import Builtin +import Swift + +// CHECK-LABEL: sil shared [noinline] @$s7specgen12take_closureyyySi_SitcF023$s7specgen6calleryySiFyE8_SitcfU_SiTf1c_n : $@convention(thin) (Int) -> () { + +// CHECK: bb0(%0 : $Int) +// CHECK: function_ref @$s7specgen6calleryySiFySi_SitcfU_ +// CHECK: partial_apply + +// CHECK-LABEL: sil shared [noinline] @$s7specgen12take_closureyyySi_SitcF26$s7specgen6calleeyySi_SitFTf1c_n : $@convention(thin) () -> () { +// CHECK-NEXT: bb0: +// CHECK: [[FUN:%.*]] = function_ref @$s7specgen6calleeyySi_SitF : $@convention(thin) (Int, Int) -> () +// CHECK: thin_to_thick_function [[FUN]] : $@convention(thin) (Int, Int) -> () to $@callee_owned (Int, Int) -> () + +// CHECK-LABEL: sil [noinline] @$s7specgen12take_closureyyySi_SitcF : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () { +sil [noinline] @$s7specgen12take_closureyyySi_SitcF : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () { +bb0(%0 : $@callee_owned (Int, Int) -> ()): + %1 = alloc_stack $Int + %2 = load %1 : $*Int + %3 = apply %0(%2, %2) : $@callee_owned (Int, Int) -> () + dealloc_stack %1 : $*Int + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil shared [noinline] @$s7specgen13take_closure2yyySi_SitcF023$s7specgen6calleryySiFyE8_SitcfU_SiTf1c_n : $@convention(thin) (Int) -> () { +// CHECK: bb0(%0 : $Int) +// CHECK: [[FUN:%.*]] = function_ref @$s7specgen6calleryySiFySi_SitcfU_ +// CHECK: partial_apply [[FUN]]( + +// CHECK-LABEL: sil shared [noinline] @$s7specgen13take_closure2yyySi_SitcF26$s7specgen6calleeyySi_SitFTf1c_n : $@convention(thin) () -> () { +// CHECK-NEXT: bb0: +// CHECK: [[FUN:%.*]] = function_ref @$s7specgen6calleeyySi_SitF : $@convention(thin) (Int, Int) -> () +// CHECK: thin_to_thick_function [[FUN]] : $@convention(thin) (Int, Int) -> () to $@callee_owned (Int, Int) -> () + +// CHECK-LABEL: sil [noinline] @$s7specgen13take_closure2yyySi_SitcF : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () { +sil [noinline] @$s7specgen13take_closure2yyySi_SitcF : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () { +bb0(%0 : $@callee_owned (Int, Int) -> ()): + %1 = alloc_stack $Int + %2 = load %1 : $*Int + %3 = apply %0(%2, %2) : $@callee_owned (Int, Int) -> () + dealloc_stack %1 : $*Int + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil [noinline] @$s7specgen6calleeyySi_S2itF : $@convention(thin) (Int, Int, Int) -> () { +// specgen.callee (Swift.Int, Swift.Int, Swift.Int) -> () +sil [noinline] @$s7specgen6calleeyySi_S2itF : $@convention(thin) (Int, Int, Int) -> () { +bb0(%0 : $Int, %1 : $Int, %2 : $Int): + %6 = tuple () // user: %7 + return %6 : $() // id: %7 +} + +// CHECK-LABEL: sil @$s7specgen6calleryySiF : $@convention(thin) (Int) -> () { +// CHECK: [[ID1:%[0-9]+]] = function_ref @$s7specgen13take_closure2yyySi_SitcF023$s7specgen6calleryySiFyE8_SitcfU_SiTf1c_n : $@convention(thin) (Int) -> () +// CHECK: [[ID2:%[0-9]+]] = function_ref @$s7specgen12take_closureyyySi_SitcF023$s7specgen6calleryySiFyE8_SitcfU_SiTf1c_n : $@convention(thin) (Int) -> () +// CHECK: apply [[ID2]](%0) : $@convention(thin) (Int) -> () +// CHECK: apply [[ID1]](%0) : $@convention(thin) (Int) -> () +sil @$s7specgen6calleryySiF : $@convention(thin) (Int) -> () { +bb0(%0 : $Int): + // function_ref specgen.take_closure ((Swift.Int, Swift.Int) -> ()) -> () + %2 = function_ref @$s7specgen12take_closureyyySi_SitcF : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () // user: %5 + // function_ref specgen.(caller (Swift.Int) -> ()).(closure #1) + %3 = function_ref @$s7specgen6calleryySiFySi_SitcfU_ : $@convention(thin) (Int, Int, Int) -> () // user: %4 + %4 = partial_apply %3(%0) : $@convention(thin) (Int, Int, Int) -> () // user: %5 + strong_retain %4 : $@callee_owned (Int, Int) -> () + %5 = apply %2(%4) : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () + %6 = function_ref @$s7specgen13take_closure2yyySi_SitcF : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () // user: %5 + strong_retain %4 : $@callee_owned (Int, Int) -> () + %7 = apply %6(%4) : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () + strong_release %4 : $@callee_owned (Int, Int) -> () + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil shared @$s7specgen6calleryySiFySi_SitcfU_ : $@convention(thin) (Int, Int, Int) -> () { +sil shared @$s7specgen6calleryySiFySi_SitcfU_ : $@convention(thin) (Int, Int, Int) -> () { +bb0(%0 : $Int, %1 : $Int, %2 : $Int): + %5 = alloc_box $<τ_0_0> { var τ_0_0 } , var, name "p" // users: %6, %10, %14 + %5a = project_box %5 : $<τ_0_0> { var τ_0_0 } , 0 + store %0 to %5a : $*Int // id: %6 + %7 = alloc_box $<τ_0_0> { var τ_0_0 } , var, name "q" // users: %8, %11, %13 + %7a = project_box %7 : $<τ_0_0> { var τ_0_0 } , 0 + store %1 to %7a : $*Int // id: %8 + // function_ref specgen.callee (Swift.Int, Swift.Int, Swift.Int) -> () + %9 = function_ref @$s7specgen6calleeyySi_S2itF : $@convention(thin) (Int, Int, Int) -> () // user: %12 + %10 = load %5a : $*Int // user: %12 + %11 = load %7a : $*Int // user: %12 + %12 = apply %9(%10, %11, %2) : $@convention(thin) (Int, Int, Int) -> () + strong_release %7 : $<τ_0_0> { var τ_0_0 } + strong_release %5 : $<τ_0_0> { var τ_0_0 } + %15 = tuple () // user: %16 + return %15 : $() // id: %16 +} + +////////////////////////////////// +// Thin To Thick Function Tests // +////////////////////////////////// + +// CHECK-LABEL: sil [noinline] @$s7specgen6calleeyySi_SitF : $@convention(thin) (Int, Int) -> () { +// specgen.callee (Swift.Int, Swift.Int) -> () +sil [noinline] @$s7specgen6calleeyySi_SitF : $@convention(thin) (Int, Int) -> () { +bb0(%0 : $Int, %1 : $Int): + %6 = tuple () // user: %7 + return %6 : $() // id: %7 +} + +// CHECK-LABEL: sil @$s7specgen11tttficalleryySiF : $@convention(thin) (Int) -> () { +// CHECK: [[ID1:%[0-9]+]] = function_ref @$s7specgen13take_closure2yyySi_SitcF26$s7specgen6calleeyySi_SitFTf1c_n : $@convention(thin) () -> () +// CHECK: [[ID2:%[0-9]+]] = function_ref @$s7specgen12take_closureyyySi_SitcF26$s7specgen6calleeyySi_SitFTf1c_n : $@convention(thin) () -> () +// CHECK: apply [[ID2]]() : $@convention(thin) () -> () +// CHECK: apply [[ID1]]() : $@convention(thin) () -> () +sil @$s7specgen11tttficalleryySiF : $@convention(thin) (Int) -> () { +bb0(%0 : $Int): + // function_ref specgen.take_closure ((Swift.Int, Swift.Int) -> ()) -> () + %2 = function_ref @$s7specgen12take_closureyyySi_SitcF : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () // user: %5 + // function_ref specgen.(caller (Swift.Int) -> ()).(closure #1) + %3 = function_ref @$s7specgen6calleeyySi_SitF : $@convention(thin) (Int, Int) -> () // user: %4 + %4 = thin_to_thick_function %3 : $@convention(thin) (Int, Int) -> () to $@callee_owned (Int, Int) -> () // user: %5 + %5 = apply %2(%4) : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () + %6 = function_ref @$s7specgen13take_closure2yyySi_SitcF : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () + %7 = apply %6(%4) : $@convention(thin) (@owned @callee_owned (Int, Int) -> ()) -> () + %9999 = tuple () // user: %7 + return %9999 : $() // id: %7 +} + +// We don't handle closures that close over address types (*NOTE* this includes +// address and non-address only types) taken as @in or @in_guaranteed. + +// This is a temporary limitation. +// TODO: figure out what to do with non-inout indirect arguments +// https://forums.swift.org/t/non-inout-indirect-types-not-supported-in-closure-specialization-optimization/70826 +// CHECK-LABEL: sil @address_closure : $@convention(thin) (@in Int) -> () { +sil @address_closure : $@convention(thin) (@in Int) -> () { +bb0(%0 : $*Int): + %6 = tuple() + return %6 : $() +} + +// CHECK-LABEL: sil @address_closure_user : $@convention(thin) (@owned @callee_owned () -> ()) -> () { +sil @address_closure_user : $@convention(thin) (@owned @callee_owned () -> ()) -> () { +bb0(%0 : $@callee_owned () -> ()): + %1 = apply %0() : $@callee_owned () -> () + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil @address_caller : $@convention(thin) (@in Int) -> () { +// CHECK-NOT: _TTSf1cl15address_closureSi__address_closure_user +sil @address_caller : $@convention(thin) (@in Int) -> () { +bb0(%0 : $*Int): + %1 = function_ref @address_closure : $@convention(thin) (@in Int) -> () + %2 = partial_apply %1(%0) : $@convention(thin) (@in Int) -> () + %3 = function_ref @address_closure_user : $@convention(thin) (@owned @callee_owned () -> ()) -> () + %4 = apply %3(%2) : $@convention(thin) (@owned @callee_owned () -> ()) -> () + %9999 = tuple() + return %9999 : $() +} + +class A {} + +sil hidden [noinline] @closure : $@convention(thin) (@owned A, @owned A) -> () { +bb0(%0 : $A, %1 : $A): + strong_release %1 : $A + strong_release %0 : $A + %4 = tuple () + return %4 : $() +} + +// CHECK-LABEL: sil shared {{.*}} @$s11use_closure{{.*}}Tf{{.*}} : $@convention(thin) (@owned A) -> () { +sil hidden [noinline] @use_closure : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () { +bb0(%0 : $@callee_owned (@owned A) -> ()): + %1 = alloc_ref $A + %2 = apply %0(%1) : $@callee_owned (@owned A) -> () + %3 = tuple () + return %3 : $() +} + +// CHECK-LABEL: sil shared {{.*}} @$s17use_closure_throw{{.*}}Tf{{.*}} : $@convention(thin) (@owned A) -> @error any Error { +sil hidden [noinline] @use_closure_throw : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> @error any Error { +bb0(%0 : $@callee_owned (@owned A) -> ()): + %1 = alloc_ref $A + %2 = apply %0(%1) : $@callee_owned (@owned A) -> () + %3 = tuple () + return %3 : $() +} + +// CHECK-LABEL: sil {{.*}} @different_execution_counts +// CHECK: bb0([[ARG:%.*]] : $A +// CHECK: strong_retain [[ARG]] +// CHECK-NOT: partial_apply +// CHECK: [[SPECIALIZED_CLOSURE_USER:%.*]] = function_ref @$s11use_closure{{.*}}Tf +// CHECK: retain_value [[ARG]] +// CHECK-NOT: partial_apply +// CHECK: integer_literal $Builtin.Int64, 0 +// CHECK: br bb2 + +// CHECK: bb1: +// CHECK: strong_release [[ARG]] +// CHECK: release_value [[ARG]] +// CHECK: return + +// CHECK: bb2({{.*}}): +// Match the partial_apply consume of arg. +// CHECK: retain_value [[ARG]] +// CHECK: apply [[SPECIALIZED_CLOSURE_USER]]([[ARG]]) +// CHECK: cond_br {{.*}}, bb1, bb3 + +sil hidden [noinline] @different_execution_counts : $@convention(thin) (@guaranteed A) -> () { +bb0(%0 : $A): + strong_retain %0 : $A + %2 = function_ref @closure : $@convention(thin) (@owned A, @owned A) -> () + %3 = partial_apply %2(%0) : $@convention(thin) (@owned A, @owned A) -> () + %4 = integer_literal $Builtin.Int64, 0 + %5 = integer_literal $Builtin.Int64, 5 + %6 = integer_literal $Builtin.Int64, 1 + %7 = integer_literal $Builtin.Int1, 0 + %8 = function_ref @use_closure : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + br bb2(%4 : $Builtin.Int64) + +bb1: + strong_release %3 : $@callee_owned (@owned A) -> () + %11 = tuple () + return %11 : $() + +bb2(%13 : $Builtin.Int64): + %14 = builtin "sadd_with_overflow_Int64"(%13 : $Builtin.Int64, %6 : $Builtin.Int64, %7 : $Builtin.Int1) : $(Builtin.Int64, Builtin.Int1) + %15 = tuple_extract %14 : $(Builtin.Int64, Builtin.Int1), 0 + strong_retain %3 : $@callee_owned (@owned A) -> () + %17 = apply %8(%3) : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + %18 = builtin "cmp_eq_Int64"(%15 : $Builtin.Int64, %5 : $Builtin.Int64) : $Builtin.Int1 + cond_br %18, bb1, bb3 + +bb3: + br bb2(%15 : $Builtin.Int64) +} + +// CHECK-LABEL: sil @insert_release_in_liferange_exit_block +// CHECK: bb0(%0 : $A): +// CHECK: retain_value %0 +// CHECK: bb1: +// CHECK-NEXT: release_value %0 +// CHECK-NEXT: br bb3 +// CHECK: bb2: +// CHECK: retain_value %0 +// CHECK: apply %{{[0-9]+}}(%0) +// CHECK: release_value %0 +// CHECK: bb3: +// CHECK-NOT: %0 +// CHECK: return +sil @insert_release_in_liferange_exit_block : $@convention(thin) (@guaranteed A) -> () { +bb0(%0 : $A): + strong_retain %0 : $A + %2 = function_ref @closure : $@convention(thin) (@owned A, @owned A) -> () + %3 = partial_apply %2(%0) : $@convention(thin) (@owned A, @owned A) -> () + %8 = function_ref @use_closure : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + %5 = partial_apply %8(%3) : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + cond_br undef, bb1, bb2 + +bb1: + br bb3 + +bb2: + strong_retain %3 : $@callee_owned (@owned A) -> () + %17 = apply %8(%3) : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + br bb3 + +bb3: + strong_release %5 : $@callee_owned () -> () + %11 = tuple () + return %11 : $() +} + +// CHECK-LABEL: sil @insert_release_at_critical_edge +// CHECK: bb0(%0 : $A): +// CHECK: retain_value %0 +// CHECK: bb1: +// CHECK-NEXT: release_value %0 +// CHECK-NEXT: br bb3 +// CHECK: bb2: +// CHECK: retain_value %0 +// CHECK: apply %{{[0-9]+}}(%0) +// CHECK: release_value %0 +// CHECK: bb3: +// CHECK-NOT: %0 +// CHECK: return +sil @insert_release_at_critical_edge : $@convention(thin) (@guaranteed A) -> () { +bb0(%0 : $A): + strong_retain %0 : $A + %2 = function_ref @closure : $@convention(thin) (@owned A, @owned A) -> () + %3 = partial_apply %2(%0) : $@convention(thin) (@owned A, @owned A) -> () + %8 = function_ref @use_closure : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + %5 = partial_apply %8(%3) : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + cond_br undef, bb1, bb2 + +bb1: + strong_retain %3 : $@callee_owned (@owned A) -> () + %17 = apply %8(%3) : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + br bb2 + +bb2: + strong_release %5 : $@callee_owned () -> () + %11 = tuple () + return %11 : $() +} + +// CHECK-LABEL: sil @insert_release_at_critical_loop_exit_edge +// CHECK: bb0(%0 : $A): +// CHECK: retain_value %0 +// CHECK: bb1: +// CHECK-NEXT: br bb2 +// CHECK: bb2: +// CHECK: retain_value %0 +// CHECK: apply %{{[0-9]+}}(%0) +// CHECK-NOT: %0 +// CHECK: bb3: +// CHECK-NEXT: release_value %0 +// CHECK-NEXT: br bb5 +// CHECK: bb4: +// CHECK-NEXT: release_value %0 +// CHECK-NEXT: br bb5 +// CHECK: bb5: +// CHECK-NOT: %0 +// CHECK: return +sil @insert_release_at_critical_loop_exit_edge : $@convention(thin) (@guaranteed A) -> () { +bb0(%0 : $A): + strong_retain %0 : $A + %2 = function_ref @closure : $@convention(thin) (@owned A, @owned A) -> () + %3 = partial_apply %2(%0) : $@convention(thin) (@owned A, @owned A) -> () + %8 = function_ref @use_closure : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + %5 = partial_apply %8(%3) : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + cond_br undef, bb3, bb1 + +bb1: + br bb2 + +bb2: + strong_retain %3 : $@callee_owned (@owned A) -> () + %17 = apply %8(%3) : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + cond_br undef, bb2, bb4 + +bb3: + br bb4 + +bb4: + strong_release %5 : $@callee_owned () -> () + %11 = tuple () + return %11 : $() +} + +// CHECK-LABEL: sil @insert_release_in_loop_exit_block +// CHECK: bb0(%0 : $A): +// CHECK: retain_value %0 +// CHECK: bb1: +// CHECK-NEXT: br bb2 +// CHECK: bb2: +// CHECK: retain_value %0 +// CHECK: apply %{{[0-9]+}}(%0) +// CHECK-NOT: %0 +// CHECK: bb3: +// CHECK-NEXT: release_value %0 +// CHECK-NOT: %0 +// CHECK: return +sil @insert_release_in_loop_exit_block : $@convention(thin) (@guaranteed A) -> () { +bb0(%0 : $A): + strong_retain %0 : $A + %2 = function_ref @closure : $@convention(thin) (@owned A, @owned A) -> () + %3 = partial_apply %2(%0) : $@convention(thin) (@owned A, @owned A) -> () + %8 = function_ref @use_closure : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + %5 = partial_apply %8(%3) : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + cond_br undef, bb3, bb1 + +bb1: + br bb2 + +bb2: + strong_retain %3 : $@callee_owned (@owned A) -> () + %17 = apply %8(%3) : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> () + cond_br undef, bb2, bb3 + +bb3: + strong_release %5 : $@callee_owned () -> () + %11 = tuple () + return %11 : $() +} + +// CHECK-LABEL: sil @insert_release_after_try_apply +// CHECK: bb0(%0 : $A): +// CHECK: retain_value %0 +// CHECK: bb1: +// CHECK: retain_value %0 +// CHECK-NEXT: try_apply +// CHECK: bb2(%{{[0-9]+}} : $()): +// CHECK-NEXT: strong_release %0 +// CHECK-NEXT: release_value %0 +// CHECK-NEXT: br bb4 +// CHECK: bb3(%{{[0-9]+}} : $any Error): +// CHECK-NEXT: strong_release %0 +// CHECK-NEXT: release_value %0 +// CHECK-NEXT: br bb4 +// CHECK: bb4: +// CHECK-NOT: %0 +// CHECK: return +sil @insert_release_after_try_apply : $@convention(thin) (@guaranteed A) -> () { +bb0(%0 : $A): + %2 = function_ref @closure : $@convention(thin) (@owned A, @owned A) -> () + %3 = partial_apply %2(%0) : $@convention(thin) (@owned A, @owned A) -> () + %8 = function_ref @use_closure_throw : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> @error Error + br bb1 + +bb1: + strong_retain %3 : $@callee_owned (@owned A) -> () + try_apply %8(%3) : $@convention(thin) (@owned @callee_owned (@owned A) -> ()) -> @error Error, normal bb2, error bb3 + +bb2(%n : $()): + br bb4 + +bb3(%e : $Error): + br bb4 + +bb4: + %11 = tuple () + return %11 : $() +} + + +// Ensure that we can specialize and properly mangle functions that take closures with <τ_0_0> { var τ_0_0 } . + +// CHECK-LABEL: sil shared [noinline] @$s4main5inneryys5Int32Vz_yADctF25closure_with_box_argumentxz_Bi32__lXXTf1nc_n : $@convention(thin) (@inout Builtin.Int32, @owned <τ_0_0> { var τ_0_0 } ) -> () +// CHECK: bb0 +// CHECK: [[FN:%.*]] = function_ref @closure_with_box_argument +// CHECK: [[PARTIAL:%.*]] = partial_apply [[FN]](%1) +// CHECK: [[ARG:%.*]] = load %0 +// CHECK: apply [[PARTIAL]]([[ARG]]) + +// CHECK-LABEL: {{.*}} @$s4main5inneryys5Int32Vz_yADctF +sil hidden [noinline] @$s4main5inneryys5Int32Vz_yADctF : $@convention(thin) (@inout Builtin.Int32, @owned @callee_owned (Builtin.Int32) -> ()) -> () { +bb0(%0 : $*Builtin.Int32, %1 : $@callee_owned (Builtin.Int32) -> ()): + strong_retain %1 : $@callee_owned (Builtin.Int32) -> () + %5 = load %0 : $*Builtin.Int32 + %6 = apply %1(%5) : $@callee_owned (Builtin.Int32) -> () + %11 = tuple () + return %11 : $() +} + +// CHECK-LABEL: sil @pass_a_closure +sil @pass_a_closure: $@convention(thin) () -> Builtin.Int32 { +bb0: + %0 = alloc_box $<τ_0_0> { var τ_0_0 } , var, name "i" + %0a = project_box %0 : $<τ_0_0> { var τ_0_0 } , 0 + %1 = integer_literal $Builtin.Int32, 0 + store %1 to %0a : $*Builtin.Int32 + %4 = function_ref @closure_with_box_argument : $@convention(thin) (Builtin.Int32, @owned <τ_0_0> { var τ_0_0 } ) -> () + strong_retain %0 : $<τ_0_0> { var τ_0_0 } + %6 = partial_apply %4(%0) : $@convention(thin) (Builtin.Int32, @owned <τ_0_0> { var τ_0_0 } ) -> () + %7 = alloc_stack $Builtin.Int32 + %9 = integer_literal $Builtin.Int32, 1 + store %9 to %7 : $*Builtin.Int32 + %12 = function_ref @$s4main5inneryys5Int32Vz_yADctF: $@convention(thin) (@inout Builtin.Int32, @owned @callee_owned (Builtin.Int32) -> ()) -> () + strong_retain %6 : $@callee_owned (Builtin.Int32) -> () + %14 = apply %12(%7, %6) : $@convention(thin) (@inout Builtin.Int32, @owned @callee_owned (Builtin.Int32) -> ()) -> () + strong_release %6 : $@callee_owned (Builtin.Int32) -> () + %16 = tuple () + dealloc_stack %7 : $*Builtin.Int32 + %18 = load %0a : $*Builtin.Int32 + strong_release %0 : $<τ_0_0> { var τ_0_0 } + return %18 : $Builtin.Int32 +} + +// CHECK-LABEL: sil shared @closure_with_box_argument +sil shared @closure_with_box_argument : $@convention(thin) (Builtin.Int32, @owned <τ_0_0> { var τ_0_0 } ) -> () { +bb0(%0 : $Builtin.Int32, %1 : $<τ_0_0> { var τ_0_0 } ): + %3 = project_box %1 : $<τ_0_0> { var τ_0_0 } , 0 + store %0 to %3 : $*Builtin.Int32 + strong_release %1 : $<τ_0_0> { var τ_0_0 } + %7 = tuple () + return %7 : $() +} + +// Check that we don't crash with this: +// CHECK-LABEL: sil @test_box_with_named_elements_tuple +sil @test_box_with_named_elements_tuple: $@convention(thin) () -> Builtin.Int32 { +bb0: + %0 = alloc_box ${ let (first: Builtin.Int32, second: Builtin.Int32) } + %0p = project_box %0 : ${ let (first: Builtin.Int32, second: Builtin.Int32) }, 0 + %0a = tuple_element_addr %0p : $*(first: Builtin.Int32, second: Builtin.Int32), 0 + %0b = tuple_element_addr %0p : $*(first: Builtin.Int32, second: Builtin.Int32), 1 + %1 = integer_literal $Builtin.Int32, 0 + store %1 to %0a : $*Builtin.Int32 + store %1 to %0b : $*Builtin.Int32 + %4 = function_ref @closure_with_named_elements_tuple : $@convention(thin) (Builtin.Int32, @owned { let (first: Builtin.Int32, second: Builtin.Int32) }) -> () + strong_retain %0 : ${ let (first: Builtin.Int32, second: Builtin.Int32) } + %6 = partial_apply %4(%0) : $@convention(thin) (Builtin.Int32, @owned { let (first: Builtin.Int32, second: Builtin.Int32) }) -> () + %7 = alloc_stack $Builtin.Int32 + %9 = integer_literal $Builtin.Int32, 1 + store %9 to %7 : $*Builtin.Int32 + %12 = function_ref @$s4main5inneryys5Int32Vz_yADctF: $@convention(thin) (@inout Builtin.Int32, @owned @callee_owned (Builtin.Int32) -> ()) -> () + strong_retain %6 : $@callee_owned (Builtin.Int32) -> () + %14 = apply %12(%7, %6) : $@convention(thin) (@inout Builtin.Int32, @owned @callee_owned (Builtin.Int32) -> ()) -> () + strong_release %6 : $@callee_owned (Builtin.Int32) -> () + %16 = tuple () + dealloc_stack %7 : $*Builtin.Int32 + %18 = load %0a : $*Builtin.Int32 + strong_release %0 : ${ let (first: Builtin.Int32, second: Builtin.Int32) } + return %18 : $Builtin.Int32 +} + +// CHECK-LABEL: sil shared @closure_with_named_elements_tuple +sil shared @closure_with_named_elements_tuple : $@convention(thin) (Builtin.Int32, @owned { let (first: Builtin.Int32, second: Builtin.Int32) }) -> () { +bb0(%0 : $Builtin.Int32, %1 : ${ let (first: Builtin.Int32, second: Builtin.Int32) }): + %3 = project_box %1 : ${ let (first: Builtin.Int32, second: Builtin.Int32) }, 0 + %4 = tuple_element_addr %3 : $*(first: Builtin.Int32, second: Builtin.Int32), 0 + store %0 to %4 : $*Builtin.Int32 + strong_release %1 : ${ let (first: Builtin.Int32, second: Builtin.Int32) } + %7 = tuple () + return %7 : $() +} + + +// The specialized function should always be a thin function, regardless of the +// representation of the original function. + +public protocol P { + static func foo(cl: () -> Int) -> Int +} + +public struct S : P { + public static func foo(cl: () -> Int) -> Int + init() +} + +// CHECK-LABEL: sil shared @$s4test1SVAA1PA2aDP3fooyS2iycFZTW8closure2SiTf1cn_n : $@convention(thin) (@thick S.Type, Int) -> Int +sil @$s4test1SVAA1PA2aDP3fooyS2iycFZTW : $@convention(witness_method: P) (@owned @callee_owned () -> Int, @thick S.Type) -> Int { +bb0(%0 : $@callee_owned () -> Int, %1 : $@thick S.Type): + %3 = apply %0() : $@callee_owned () -> Int + return %3 : $Int +} + +sil shared @closure2 : $@convention(thin) (Int) -> Int { +bb0(%0 : $Int): + return %0 : $Int +} + +sil @call_witness_method : $@convention(thin) (Int, S) -> Int { +bb0(%0 : $Int, %1 : $S): + %3 = function_ref @closure2 : $@convention(thin) (Int) -> Int + %4 = partial_apply %3(%0) : $@convention(thin) (Int) -> Int + %5 = metatype $@thick S.Type + %6 = function_ref @$s4test1SVAA1PA2aDP3fooyS2iycFZTW : $@convention(witness_method: P) (@owned @callee_owned () -> Int, @thick S.Type) -> Int + %7 = apply %6(%4, %5) : $@convention(witness_method: P) (@owned @callee_owned () -> Int, @thick S.Type) -> Int + return %7 : $Int +} + +sil_witness_table S: P module test { + method #P.foo: @$s4test1SVAA1PA2aDP3fooyS2iycFZTW +} + +// Test partial_apply -> convert_function -> convert_function -> try_apply. +sil @testClosureConvertHelper : $(Int) -> () + +// specialized testClosureConvertThunk +// FIXME: Need to handle closures with multiple exceptional exits. +// CHECK-LABEL: sil shared @$s23testClosureConvertThunk0abC6HelperSiTf1nc_n : $@convention(thin) (Int) -> (@out (), @error any Error) { +// CHECK: bb0(%0 : $*(), %1 : $Int): +// CHECK: [[F:%.*]] = function_ref @testClosureConvertHelper : $@convention(thin) (Int) -> () +// CHECK: [[PA:%.*]] = partial_apply [[F]](%1) : $@convention(thin) (Int) -> () +// CHECK: [[CVT1:%.*]] = convert_escape_to_noescape [[PA]] : $@callee_owned () -> () to $@noescape @callee_owned () -> () +// CHECK: [[CVT2:%.*]] = convert_function [[CVT1]] : $@noescape @callee_owned () -> () to $@noescape @callee_owned () -> @error any Error +// CHECK: try_apply [[CVT2]]() : $@noescape @callee_owned () -> @error any Error, normal bb1, error bb2 +// CHECK: bb1 +// CHECK: release_value [[PA]] +// CHECK: return +// CHECK: bb2 +// CHECK: release_value [[PA]] +// CHECK: throw +// CHECK-LABEL: } // end sil function '$s23testClosureConvertThunk0abC6HelperSiTf1nc_n' +sil @testClosureConvertThunk : $@convention(thin) (@noescape @callee_owned () -> @error Error) -> (@out (), @error Error) { +bb0(%0 : $*(), %1 : $@noescape @callee_owned () -> @error Error): + try_apply %1() : $@noescape @callee_owned () -> @error Error, normal bb1, error bb2 + +bb1(%7 : $()): + %8 = tuple () + return %8 : $() + +bb2(%10 : $Error): + throw %10 : $Error +} + +// Test closure specialization when the closure type is converted before application. +sil @testClosureConvert : $(Int) -> () { +bb0(%0 : $Int): + %48 = alloc_stack $() + %49 = function_ref @testClosureConvertHelper : $@convention(thin) (Int) -> () + %50 = partial_apply %49(%0) : $@convention(thin) (Int) -> () + %51 = convert_escape_to_noescape %50 : $@callee_owned () -> () to $@noescape @callee_owned () -> () + %52 = convert_function %51 : $@noescape @callee_owned () -> () to $@noescape @callee_owned () -> @error Error + %53 = function_ref @testClosureConvertThunk : $@convention(thin) (@noescape @callee_owned () -> @error Error) -> (@out (), @error Error) + try_apply %53(%48, %52) : $@convention(thin) (@noescape @callee_owned () -> @error Error) -> (@out (), @error Error), normal bb7, error bb11 + +bb7(%callret : $()): + br bb99 + +bb11(%128 : $Error): + br bb99 + +bb99: + dealloc_stack %48 : $*() + %empty = tuple () + return %empty : $() +} + +sil @testClosureThunkNoEscape : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> () { +bb0(%0 : $@noescape @callee_guaranteed () -> ()): + apply %0() : $@noescape @callee_guaranteed () -> () + %8 = tuple () + return %8 : $() +} + +// CHECK-LABEL: sil shared @$s24testClosureThunkNoEscape0aB13ConvertHelperSiTf1c_n : $@convention(thin) (Int) -> () { +// CHECK: bb0([[ARG:%.*]] : $Int): +// CHECK: [[F:%.*]] = function_ref @testClosureConvertHelper : $@convention(thin) (Int) -> () +// CHECK: [[PA:%.*]] = partial_apply [callee_guaranteed] [[F]]([[ARG]]) : $@convention(thin) (Int) -> () +// CHECK: [[E:%.*]] = convert_escape_to_noescape [[PA]] : $@callee_guaranteed () -> () to $@noescape @callee_guaranteed () -> () +// CHECK: apply [[E]]() : $@noescape @callee_guaranteed () -> () +// CHECK: release_value [[PA]] +// CHECK: return +// CHECK: } +// CHECK-LABEL: sil @testClosureNoEscape : $@convention(thin) (Int) -> () { +// CHECK-NOT: partial_apply +// CHECK: [[FN:%.*]] = function_ref @$s24testClosureThunkNoEscape0aB13ConvertHelperSiTf1c_n : $@convention(thin) (Int) -> () +// CHECK-NOT: partial_apply +// CHECK: %5 = apply [[FN]](%0) : $@convention(thin) (Int) -> () +// CHECK-NOT: release +// CHECK: return +// CHECK: } + +sil @testClosureNoEscape : $(Int) -> () { +bb0(%0 : $Int): + %48 = alloc_stack $() + %49 = function_ref @testClosureConvertHelper : $@convention(thin) (Int) -> () + %50 = partial_apply [callee_guaranteed] %49(%0) : $@convention(thin) (Int) -> () + %51 = convert_escape_to_noescape %50 : $@callee_guaranteed () -> () to $@noescape @callee_guaranteed () -> () + %53 = function_ref @testClosureThunkNoEscape : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> () + apply %53(%51) : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> () + release_value %50: $@callee_guaranteed () ->() + dealloc_stack %48 : $*() + %empty = tuple () + return %empty : $() +} + + +sil @testClosureConvertHelper2 : $(Int) -> Int + +sil @testClosureThunkNoEscape2 : $@convention(thin) (@noescape @callee_guaranteed () -> @out Int) -> @out Int { +bb0(%0 : $*Int, %1 : $@noescape @callee_guaranteed () -> @out Int): + apply %1(%0) : $@noescape @callee_guaranteed () -> @out Int + %8 = tuple () + return %8 : $() +} + +sil [reabstraction_thunk] @reabstractionThunk : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + +// CHECK-LABEL: sil shared @$s25testClosureThunkNoEscape20aB14ConvertHelper2SiTf1nc_n : $@convention(thin) (Int) -> @out Int +// CHECK: [[PA1:%.*]] = partial_apply +// CHECK: convert_escape_to_noescape +// CHECK: [[PA2:%.*]] = partial_apply +// CHECK: convert_escape_to_noescape +// CHECK: apply +// CHECK: release_value [[PA1]] +// CHECK: release_value [[PA2]] +// CHECK: return + +// CHECK-LABEL: sil shared @$s25testClosureThunkNoEscape219reabstractionThunk2SiIegd_Tf1nc_n : $@convention(thin) (@owned @callee_guaranteed () -> Int) -> @out Int { +// CHECK: bb0(%0 : $*Int, %1 : $@callee_guaranteed () -> Int): +// CHECK: [[F:%.*]] = function_ref @reabstractionThunk2 +// CHECK: [[PA:%.*]] = partial_apply [callee_guaranteed] [[F]](%1) +// CHECK: [[CVT:%.*]] = convert_escape_to_noescape [[PA]] +// CHECK: apply [[CVT]](%0) : $@noescape @callee_guaranteed () -> @out Int +// CHECK: release_value [[PA]] : $@callee_guaranteed () -> @out Int +// CHECK: return + +// CHECK-LABEL: sil @reabstractionTest : $@convention(thin) (Int) -> () +// CHECK: [[F:%.*]] = function_ref @$s25testClosureThunkNoEscape20aB14ConvertHelper2SiTf1nc_n +// CHECK: apply [[F]] +// CHECK: return +sil @reabstractionTest : $(Int) -> () { +bb0(%0 : $Int): + %48 = alloc_stack $Int + %49 = function_ref @testClosureConvertHelper2 : $@convention(thin) (Int) -> Int + %50 = partial_apply [callee_guaranteed] %49(%0) : $@convention(thin) (Int) -> Int + %51 = convert_escape_to_noescape %50 : $@callee_guaranteed () -> Int to $@noescape @callee_guaranteed () -> Int + %52 = function_ref @reabstractionThunk : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + %53 = partial_apply [callee_guaranteed] %52(%51) : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + %54 = convert_escape_to_noescape %53 : $@callee_guaranteed () -> @out Int to $@noescape @callee_guaranteed () -> @out Int + %55 = function_ref @testClosureThunkNoEscape2 : $@convention(thin) (@noescape @callee_guaranteed () -> @out Int) -> @out Int + apply %55(%48, %54) : $@convention(thin) (@noescape @callee_guaranteed () -> @out Int) -> @out Int + release_value %50: $@callee_guaranteed () -> Int + release_value %53: $@callee_guaranteed () -> @out Int + dealloc_stack %48 : $*Int + %empty = tuple () + return %empty : $() +} + +sil @testClosureConvertHelper3 : $@convention(thin) (Int) -> Int +sil [reabstraction_thunk] @reabstractionThunk3 : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + +sil @testClosureThunkNoEscape3 : $@convention(thin) (@owned @noescape @callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for ) -> @out () { +entry(%empty : $*(), %closure : $@noescape @callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for ): + %out = alloc_stack $Int + %ret = apply %closure(%out) : $@noescape @callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for + dealloc_stack %out : $*Int + store %ret to %empty : $*() + %retval = tuple () + return %retval : $() +} + +// CHECK-LABEL: sil @reabstractionTest4 {{.*}} { +// CHECK: [[HELPER:%[^,]+]] = function_ref @testClosureConvertHelper3 +// CHECK: [[SPECIALIZATION:%[^,]+]] = function_ref @$s25testClosureThunkNoEscape30aB14ConvertHelper3SiTf1nc_n +// CHECK: [[CLOSURE:%[^,]+]] = partial_apply [callee_guaranteed] [[HELPER]] +// CHECK: [[NOESCAPE_CLOSURE:%[^,]+]] = convert_escape_to_noescape [[CLOSURE]] +// CHECK: apply [[SPECIALIZATION]]{{.*}} +// CHECK: release_value [[CLOSURE]] +// CHECK-NOT: release_value [[CLOSURE]] +// CHECK: strong_release [[NOESCAPE_CLOSURE]] +// CHECK-LABEL: } // end sil function 'reabstractionTest4' +sil @reabstractionTest4 : $(Int) -> () { +bb0(%value : $Int): + %testThrowingClosureConvertHelper = function_ref @testClosureConvertHelper3 : $@convention(thin) (Int) -> Int + %closure = partial_apply [callee_guaranteed] %testThrowingClosureConvertHelper(%value) : $@convention(thin) (Int) -> Int + %noescapeClosure = convert_escape_to_noescape %closure : $@callee_guaranteed () -> Int to $@noescape @callee_guaranteed () -> Int + %thunk = function_ref @reabstractionThunk3 : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + %appliedThunk = partial_apply [callee_guaranteed] [on_stack] %thunk(%noescapeClosure) : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + + %dependency = mark_dependence %appliedThunk : $@noescape @callee_guaranteed () -> @out Int on %noescapeClosure : $@noescape @callee_guaranteed () -> Int + %generified = convert_function %dependency : $@noescape @callee_guaranteed () -> @out Int to $@noescape @callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for + %test = function_ref @testClosureThunkNoEscape3 : $@convention(thin) (@owned @noescape @callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for ) -> @out () + strong_retain %generified : $@noescape @callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for + %out = alloc_stack $() + %ret = apply %test(%out, %generified) : $@convention(thin) (@owned @noescape @callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for ) -> @out () + dealloc_stack %out : $*() + release_value %closure : $@callee_guaranteed () -> Int + strong_release %noescapeClosure : $@noescape @callee_guaranteed () -> Int + dealloc_stack %appliedThunk : $@noescape @callee_guaranteed () -> @out Int + %empty = tuple () + return %empty : $() +} + +sil @testThrowingClosureConvertHelper : $@convention(thin) (Int) -> (Int, @error any Error) +sil [reabstraction_thunk] @reabstractionThunkThrowing : $@convention(thin) (@noescape @callee_guaranteed () -> (Int, @error any Error)) -> (@out Int, @error any Error) + +sil @testClosureThunkNoEscapeThrowing : $@convention(thin) (@owned @noescape @callee_guaranteed @substituted <τ_0_0> () -> (@out τ_0_0, @error any Error) for ) -> (@out (), @error any Error) { +entry(%empty : $*(), %closure : $@noescape @callee_guaranteed @substituted <τ_0_0> () -> (@out τ_0_0, @error any Error) for ): + %out = alloc_stack $Int + try_apply %closure(%out) : $@noescape @callee_guaranteed @substituted <τ_0_0> () -> (@out τ_0_0, @error any Error) for , normal bb1, error bb2 + +bb1(%ret : $()): + dealloc_stack %out : $*Int + store %ret to %empty : $*() + %retval = tuple () + return %retval : $() + +bb2(%error : $any Error): + dealloc_stack %out : $*Int + throw %error : $any Error +} + +// CHECK-LABEL: sil @reabstractionThrowing : $@convention(thin) (Int) -> ((), @error any Error) { +// CHECK: [[HELPER:%[^,]+]] = function_ref @testThrowingClosureConvertHelper +// CHECK: [[SPECIALIZATION:%[^,]+]] = function_ref @$s32testClosureThunkNoEscapeThrowing0afB13ConvertHelperSiTf1nc_n +// CHECK: [[CLOSURE:%[^,]+]] = partial_apply [callee_guaranteed] [[HELPER]] +// CHECK: [[NOESCAPE_CLOSURE:%[^,]+]] = convert_escape_to_noescape [[CLOSURE]] +// CHECK: try_apply [[SPECIALIZATION]]{{.*}}normal [[NORMAL_BLOCK:bb[0-9]+]], error [[ERROR_BLOCK:bb[0-9]+]] +// CHECK: [[NORMAL_BLOCK]] +// CHECK: release_value [[CLOSURE]] +// CHECK-NOT: release_value [[CLOSURE]] +// CHECK: strong_release [[NOESCAPE_CLOSURE]] +// CHECK: [[ERROR_BLOCK]] +// CHECK: release_value [[CLOSURE]] +// CHECK-NOT: release_value [[CLOSURE]] +// CHECK: strong_release [[NOESCAPE_CLOSURE]] +// CHECK-LABEL: } // end sil function 'reabstractionThrowing' +sil @reabstractionThrowing : $(Int) -> ((), @error any Error) { +bb0(%value : $Int): + %testThrowingClosureConvertHelper = function_ref @testThrowingClosureConvertHelper : $@convention(thin) (Int) -> (Int, @error any Error) + %closure = partial_apply [callee_guaranteed] %testThrowingClosureConvertHelper(%value) : $@convention(thin) (Int) -> (Int, @error any Error) + %noescapeClosure = convert_escape_to_noescape %closure : $@callee_guaranteed () -> (Int, @error any Error) to $@noescape @callee_guaranteed () -> (Int, @error any Error) + %thunk = function_ref @reabstractionThunkThrowing : $@convention(thin) (@noescape @callee_guaranteed () -> (Int, @error any Error)) -> (@out Int, @error any Error) + %appliedThunk = partial_apply [callee_guaranteed] [on_stack] %thunk(%noescapeClosure) : $@convention(thin) (@noescape @callee_guaranteed () -> (Int, @error any Error)) -> (@out Int, @error any Error) + + %dependency = mark_dependence %appliedThunk : $@noescape @callee_guaranteed () -> (@out Int, @error any Error) on %noescapeClosure : $@noescape @callee_guaranteed () -> (Int, @error any Error) + %generified = convert_function %dependency : $@noescape @callee_guaranteed () -> (@out Int, @error any Error) to $@noescape @callee_guaranteed @substituted <τ_0_0> () -> (@out τ_0_0, @error any Error) for + %test = function_ref @testClosureThunkNoEscapeThrowing : $@convention(thin) (@owned @noescape @callee_guaranteed @substituted <τ_0_0> () -> (@out τ_0_0, @error any Error) for ) -> (@out (), @error any Error) + strong_retain %generified : $@noescape @callee_guaranteed @substituted <τ_0_0> () -> (@out τ_0_0, @error any Error) for + %out = alloc_stack $() + try_apply %test(%out, %generified) : $@convention(thin) (@owned @noescape @callee_guaranteed @substituted <τ_0_0> () -> (@out τ_0_0, @error any Error) for ) -> (@out (), @error any Error), normal bb1, error bb2 + +bb1(%ret : $()): + dealloc_stack %out : $*() + release_value %closure : $@callee_guaranteed () -> (Int, @error any Error) + strong_release %noescapeClosure : $@noescape @callee_guaranteed () -> (Int, @error any Error) + dealloc_stack %appliedThunk : $@noescape @callee_guaranteed () -> (@out Int, @error any Error) + %empty = tuple () + return %empty : $() + +bb2(%error : $any Error): + dealloc_stack %out : $*() + release_value %closure : $@callee_guaranteed () -> (Int, @error any Error) + strong_release %noescapeClosure : $@noescape @callee_guaranteed () -> (Int, @error any Error) + dealloc_stack %appliedThunk : $@noescape @callee_guaranteed () -> (@out Int, @error any Error) + throw %error : $any Error +} + +// Currently not supported cases. + +sil @testClosureThunk4 : $@convention(thin) (@owned @callee_guaranteed () -> @out Int) -> @out Int { +bb0(%0 : $*Int, %1 : $@callee_guaranteed () -> @out Int): + apply %1(%0) : $@callee_guaranteed () -> @out Int + release_value %1: $@callee_guaranteed () -> @out Int + %8 = tuple () + return %8 : $() +} +// CHECK-LABEL: sil @reabstractionTest2 +// CHECK: bb0(%0 : $Int): +// CHECK: [[STK:%.*]] = alloc_stack $Int +// CHECK: [[F:%.*]] = function_ref @testClosureConvertHelper2 +// CHECK: [[PA:%.*]] = partial_apply [callee_guaranteed] [[F]](%0) +// CHECK: [[CVT:%.*]] = convert_escape_to_noescape [[PA]] +// CHECK: [[F2:%.*]] = function_ref @reabstractionThunk +// CHECK: [[PA2:%.*]] = partial_apply [callee_guaranteed] [[F2]]([[CVT]]) +// CHECK: [[F3:%.*]] = function_ref @testClosureThunk4 +// CHECK: apply [[F3]]([[STK]], [[PA2]]) +// CHECK: release_value [[PA]] +// CHECK: dealloc_stack [[STK]] +// CHECK: return + +sil @reabstractionTest2 : $(Int) -> () { +bb0(%0 : $Int): + %48 = alloc_stack $Int + %49 = function_ref @testClosureConvertHelper2 : $@convention(thin) (Int) -> Int + %50 = partial_apply [callee_guaranteed] %49(%0) : $@convention(thin) (Int) -> Int + %51 = convert_escape_to_noescape %50 : $@callee_guaranteed () -> Int to $@noescape @callee_guaranteed () -> Int + %52 = function_ref @reabstractionThunk : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + %53 = partial_apply [callee_guaranteed] %52(%51) : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + %55 = function_ref @testClosureThunk4 : $@convention(thin) (@owned @callee_guaranteed () -> @out Int) -> @out Int + apply %55(%48, %53) : $@convention(thin) (@owned @callee_guaranteed () -> @out Int) -> @out Int + release_value %50: $@callee_guaranteed () -> Int + dealloc_stack %48 : $*Int + %empty = tuple () + return %empty : $() +} + +// Only support the ultimate partial_apply. +sil [reabstraction_thunk] @reabstractionThunk2 : $@convention(thin) (@guaranteed @callee_guaranteed () -> Int) -> @out Int + +// CHECK-LABEL: sil @reabstractionTest3 : $@convention(thin) (Int) -> () { +// CHECK: bb0(%0 : $Int): +// CHECK: [[STK:%.*]] = alloc_stack $Int +// CHECK: [[F:%.*]] = function_ref @testClosureConvertHelper2 +// CHECK: [[PA:%.*]] = partial_apply [callee_guaranteed] [[F]](%0) +// CHECK: [[F2:%.*]] = function_ref @reabstractionThunk2 +// CHECK: [[SPEC:%.*]] = function_ref @$s25testClosureThunkNoEscape219reabstractionThunk2SiIegd_Tf1nc_n : $@convention(thin) (@owned @callee_guaranteed () -> Int) -> @out Int +// CHECK: retain_value [[PA]] : $@callee_guaranteed () -> Int +// CHECK: %8 = apply [[SPEC]]([[STK]], [[PA]]) : $@convention(thin) (@owned @callee_guaranteed () -> Int) -> @out Int +// CHECK: strong_release [[PA]] : $@callee_guaranteed () -> Int +// CHECK: dealloc_stack [[STK]] : $*Int +// CHECK: return + +sil @reabstractionTest3 : $(Int) -> () { +bb0(%0 : $Int): + %48 = alloc_stack $Int + %49 = function_ref @testClosureConvertHelper2 : $@convention(thin) (Int) -> Int + %50 = partial_apply [callee_guaranteed] %49(%0) : $@convention(thin) (Int) -> Int + %52 = function_ref @reabstractionThunk2 : $@convention(thin) (@guaranteed @callee_guaranteed () -> Int) -> @out Int + %53 = partial_apply [callee_guaranteed] %52(%50) : $@convention(thin) (@guaranteed @callee_guaranteed () -> Int) -> @out Int + %54 = convert_escape_to_noescape %53 : $@callee_guaranteed () -> @out Int to $@noescape @callee_guaranteed () -> @out Int + %55 = function_ref @testClosureThunkNoEscape2 : $@convention(thin) (@noescape @callee_guaranteed () -> @out Int) -> @out Int + apply %55(%48, %54) : $@convention(thin) (@noescape @callee_guaranteed () -> @out Int) -> @out Int + release_value %53: $@callee_guaranteed () -> @out Int + dealloc_stack %48 : $*Int + %empty = tuple () + return %empty : $() +} + +////////////////////// +// Begin Apply Test // +////////////////////// + +sil @coroutine_user : $@yield_once @convention(thin) (@noescape @callee_guaranteed () -> Int) -> @yields Int { +bb0(%0 : $@noescape @callee_guaranteed () -> Int): + %1 = apply %0() : $@noescape @callee_guaranteed () -> Int + unreachable +} + +// CHECK-LABEL: sil @test_coroutine_user : $@convention(thin) (Int) -> Int { +// CHECK: [[COROUTINE_USER:%.*]] = function_ref @coroutine_user +// CHECK: begin_apply [[COROUTINE_USER]]( +// CHECK: } // end sil function 'test_coroutine_user' +sil @test_coroutine_user : $@convention(thin) (Int) -> Int { +bb0(%0 : $Int): + %1 = function_ref @testClosureConvertHelper2 : $@convention(thin) (Int) -> Int + %2 = partial_apply [callee_guaranteed] %1(%0) : $@convention(thin) (Int) -> Int + %3 = convert_escape_to_noescape %2 : $@callee_guaranteed () -> Int to $@noescape @callee_guaranteed () -> Int + %4 = function_ref @coroutine_user : $@yield_once @convention(thin) (@noescape @callee_guaranteed () -> Int) -> @yields Int + (%value, %token) = begin_apply %4(%3) : $@yield_once @convention(thin) (@noescape @callee_guaranteed () -> Int) -> @yields Int + cond_br undef, bb1, bb2 + +bb1: + end_apply %token as $() + br bb3 + +bb2: + abort_apply %token + br bb3 + +bb3: + release_value %2 : $@callee_guaranteed () -> Int + return %value : $Int +} +// CHECK-LABEL: sil @reabstractionTest_on_stack +// CHECK: bb0([[A:%.*]] : $Int): +// CHECK: [[R:%.*]] = alloc_stack $Int +// CHECK: [[F:%.*]] = function_ref @$s25testClosureThunkNoEscape20aB14ConvertHelper2SiTf1nc_n +// CHECK: apply [[F]]([[R]], [[A]]) +sil @reabstractionTest_on_stack : $(Int) -> () { +bb0(%0 : $Int): + %48 = alloc_stack $Int + %49 = function_ref @testClosureConvertHelper2 : $@convention(thin) (Int) -> Int + %50 = partial_apply [callee_guaranteed] [on_stack] %49(%0) : $@convention(thin) (Int) -> Int + %52 = function_ref @reabstractionThunk : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + %53 = partial_apply [callee_guaranteed] [on_stack] %52(%50) : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int + %55 = function_ref @testClosureThunkNoEscape2 : $@convention(thin) (@noescape @callee_guaranteed () -> @out Int) -> @out Int + apply %55(%48, %53) : $@convention(thin) (@noescape @callee_guaranteed () -> @out Int) -> @out Int + dealloc_stack %53 : $@noescape @callee_guaranteed () -> @out Int + dealloc_stack %50 : $@noescape @callee_guaranteed () -> Int + dealloc_stack %48 : $*Int + %empty = tuple () + return %empty : $() +} diff --git a/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_and_cfg.sil b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_and_cfg.sil new file mode 100644 index 0000000000000..b54d53662ced8 --- /dev/null +++ b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_and_cfg.sil @@ -0,0 +1,49 @@ +// RUN: %target-sil-opt -sil-verify-without-invalidation -enable-sil-verify-all -simplify-cfg -experimental-swift-based-closure-specialization %s + +// Test if the ClosureSpecializer correctly invalidates the dominator tree +// even if there are no functions specialized. +// The test just checks if the compiler does not crash. +// First running SimplifyCFG creates the dominator tree, which should then be +// invalidated by the ClosureSpecializer. +// If this is not done correctly the verification will complain that the +// dominator tree is not up to date. + +import Builtin +import Swift + +sil @closure : $@convention(thin) () -> () + +sil @use_closure : $@convention(thin) (@owned @callee_owned () -> ()) -> () + +sil hidden [noinline] @use_closure2 : $@convention(thin) (@owned @callee_owned () -> (), @owned @callee_owned () -> ()) -> () { +bb0(%0 : $@callee_owned () -> (), %1 : $@callee_owned () -> ()): + %2 = apply %0() : $@callee_owned () -> () + %3 = apply %1() : $@callee_owned () -> () + %4 = tuple () + return %3 : $() +} + +sil @insert_release_in_liferange_exit_block : $@convention(thin) () -> () { +bb0: + %2 = function_ref @closure : $@convention(thin) () -> () + %3 = partial_apply %2() : $@convention(thin) () -> () + %8 = function_ref @use_closure : $@convention(thin) (@owned @callee_owned () -> ()) -> () + %5 = partial_apply %8(%3) : $@convention(thin) (@owned @callee_owned () -> ()) -> () + + // There is a critical edge from bb0 to bb2 which is broken by ValueLifetimeAnalysis. + cond_br undef, bb2, bb1 + +bb1: + strong_retain %3 : $@callee_owned () -> () + strong_retain %3 : $@callee_owned () -> () + %10 = function_ref @use_closure2 : $@convention(thin) (@owned @callee_owned () -> (), @owned @callee_owned () -> ()) -> () + + // Passing two closures actually prevents closure specialization. + %17 = apply %10(%3, %3) : $@convention(thin) (@owned @callee_owned () -> (), @owned @callee_owned () -> ()) -> () + br bb2 + +bb2: + strong_release %5 : $@callee_owned () -> () + %11 = tuple () + return %11 : $() +} diff --git a/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_attrs.sil b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_attrs.sil new file mode 100644 index 0000000000000..34222394140be --- /dev/null +++ b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_attrs.sil @@ -0,0 +1,87 @@ +// RUN: %target-sil-opt -enable-sil-verify-all -experimental-swift-based-closure-specialization %s | %FileCheck %s +// XFAIL: * + +import Builtin + +class C {} + +sil [ossa] @getC : $@convention(thin) () -> @owned C + +class Storage {} + +struct Val {} + +// Verify that the argument to the specialized take_closure is still @_eagerMove. + +// CHECK-LABEL: sil {{.*}}@$s12take_closure0B04main1CCTf1nc_n : {{.*}}{ +// CHECK: {{bb[0-9]+}}({{%[^,]+}} : @_eagerMove @owned $C, {{%[^,]+}} : +// CHECK-LABEL: } // end sil function '$s12take_closure0B04main1CCTf1nc_n' + +sil [ossa] [noinline] @take_closure : $@convention(thin) (@owned C, @guaranteed @noescape @callee_guaranteed (@guaranteed C, @guaranteed C) -> ()) -> () { +bb0(%c : @_eagerMove @owned $C, %0 : @guaranteed $@noescape @callee_guaranteed (@guaranteed C, @guaranteed C) -> ()): + %getC = function_ref @getC : $@convention(thin) () -> @owned C + %c1 = apply %getC() : $@convention(thin) () -> @owned C + %c2 = apply %getC() : $@convention(thin) () -> @owned C + %3 = apply %0(%c1, %c2) : $@noescape @callee_guaranteed (@guaranteed C, @guaranteed C) -> () + destroy_value %c2 : $C + destroy_value %c1 : $C + destroy_value %c : $C + %retval = tuple() + return %retval : $() +} + +sil shared [ossa] @closure : $@convention(thin) (@guaranteed C, @guaranteed C, @guaranteed C) -> () { +bb0(%0 : @guaranteed $C, %1 : @guaranteed $C, %2 : @guaranteed $C): + %15 = tuple () + return %15 : $() +} + +sil @caller : $@convention(thin) (@owned C) -> () { +bb0(%0 : $C): + %3 = function_ref @closure : $@convention(thin) (@guaranteed C, @guaranteed C, @guaranteed C) -> () + %4 = partial_apply [callee_guaranteed] [on_stack] %3(%0) : $@convention(thin) (@guaranteed C, @guaranteed C, @guaranteed C) -> () + %take_closure = function_ref @take_closure : $@convention(thin) (@owned C, @guaranteed @noescape @callee_guaranteed (@guaranteed C, @guaranteed C) -> ()) -> () + strong_retain %0 : $C + %5 = apply %take_closure(%0, %4) : $@convention(thin) (@owned C, @guaranteed @noescape @callee_guaranteed (@guaranteed C, @guaranteed C) -> ()) -> () + strong_release %0 : $C + dealloc_stack %4 : $@noescape @callee_guaranteed (@guaranteed C, @guaranteed C) -> () + %retval = tuple() + return %retval : $() +} + +// ============================================================================= +// rdar://105887096: do not insert a retain inside a read-only function. +// For now, the specialization is disabled. +// +// TODO: A @noescape closure should never be converted to an @owned argument +// regardless of the function attribute. + +// This should not be specialized until we support guaranteed arguments. +// CHECK-NOT: @$s20takesReadOnlyClosure +sil private [readonly] @takesReadOnlyClosure : $@convention(thin) (@noescape @callee_guaranteed (Val) -> Val) -> Val { +bb0(%2 : $@noescape @callee_guaranteed (Val) -> Val): + %46 = struct $Val () + %261 = apply %2(%46) : $@noescape @callee_guaranteed (Val) -> Val + return %261 : $Val +} + +sil private @readOnlyClosure : $@convention(thin) (Val, @guaranteed Storage) -> Val { +bb0(%0 : $Val, %1 : @closureCapture $Storage): + %46 = struct $Val () + return %46 : $Val +} + +// CHECK-LABEL: sil @testPassReadOnlyClosure : $@convention(method) (@guaranteed Storage) -> Val { +// CHECK-NOT: @owned Storage +// CHECK: apply %{{.*}} : $@convention(thin) (@noescape @callee_guaranteed (Val) -> Val) -> Val +// CHECK-LABEL: } // end sil function 'testPassReadOnlyClosure' +sil @testPassReadOnlyClosure : $@convention(method) (@guaranteed Storage) -> Val { +bb0(%0 : $Storage): + %176 = function_ref @readOnlyClosure : $@convention(thin) (Val, @guaranteed Storage) -> Val + %177 = partial_apply [callee_guaranteed] [on_stack] %176(%0) : $@convention(thin) (Val, @guaranteed Storage) -> Val + %178 = mark_dependence %177 : $@noescape @callee_guaranteed (Val) -> Val on %0 : $Storage + %188 = function_ref @takesReadOnlyClosure : $@convention(thin) (@noescape @callee_guaranteed (Val) -> Val) -> Val + %189 = apply %188(%178) : $@convention(thin) (@noescape @callee_guaranteed (Val) -> Val) -> Val + dealloc_stack %177 : $@noescape @callee_guaranteed (Val) -> Val + return %189 : $Val +} diff --git a/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_consolidated.sil b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_consolidated.sil new file mode 100644 index 0000000000000..e90d6518408b9 --- /dev/null +++ b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_consolidated.sil @@ -0,0 +1,725 @@ +// RUN: %target-sil-opt -enable-sil-verify-all -experimental-swift-based-closure-specialization %s | %FileCheck %s -check-prefix=REMOVECLOSURES +// RUN: %target-sil-opt -enable-sil-verify-all -closure-specialize-eliminate-dead-closures=0 -experimental-swift-based-closure-specialization %s | %FileCheck %s +// XFAIL: * + +import Builtin +import Swift + +/////////////////// +// Utility Types // +/////////////////// + +protocol P { + func foo(f: (Int32)->Int32, _ j: Int32) -> Int32 +} + +protocol Q { +} + +public class C { + @_hasStorage var c: C? { get set } + init() +} + +public struct S: Q { + @_hasStorage var c: C? { get set } + init(c: C?) + init() +} + +// = Test Summary = +// We test the following things here: +// +// 1. Address Argument +// 2. ThinToThick, Partial Apply: +// a. with and without removal of closure. +// b. @owned and @guaranteed. +// 3. Bad NonFailureExitBB. +// 4. No Call in Apply Callee. +// 5. Non simple closure (i.e. non function_ref closure). +// 6. Handle interface return types correctly. + +//////////////////////////// +// Address Argument Tests // +//////////////////////////// +// +// Make sure that we can specialize even if we have address arguments. +// +// But we don't handle closures that close over address types passed as @in or +// @in_guaranteed. +// (*NOTE* this includes address and non-address only types). +// This is a temporary limitation. +// CHECK-LABEL: sil @address_closure : $@convention(thin) (@in Int32) -> () { +sil @address_closure : $@convention(thin) (@in Int32) -> () { +bb0(%0 : $*Int32): + %6 = tuple() + return %6 : $() +} + +sil @address_closure_struct_complex : $@convention(thin) (@in S) -> () { +bb0(%0 : $*S): + %6 = tuple() + return %6 : $() +} + +// CHECK-LABEL: sil @address_closure_user : $@convention(thin) (@owned @callee_owned () -> ()) -> () { +sil @address_closure_user : $@convention(thin) (@owned @callee_owned () -> ()) -> () { +bb0(%0 : $@callee_owned () -> ()): + %1 = apply %0() : $@callee_owned () -> () + %9999 = tuple() + return %9999 : $() +} + +// Check that a specialization of address_closure_noescape_user was generated which does not +// take a closure as a parameter anymore. +// CHECK-LABEL: sil shared @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (Int32, @inout_aliasable Int32) -> () +// CHECK: function_ref @address_closure_trivial : $@convention(thin) (Int32, @inout_aliasable Int32) -> () +// CHECK: partial_apply %{{.*}} : $@convention(thin) (Int32, @inout_aliasable Int32) -> () +// CHECK: apply +// CHECK: return + +// Check that a specialization of address_closure_noescape_user was generated which does not +// take a closure as a parameter anymore. +// CHECK-LABEL: sil shared @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable Int32) -> () +// CHECK: function_ref @address_closure_trivial_mutating : $@convention(thin) (@inout_aliasable Int32) -> () +// CHECK: partial_apply %{{.*}} : $@convention(thin) (@inout_aliasable Int32) -> () +// CHECK: apply +// CHECK: return + +// Check that a specialization of address_closure_noescape_user was generated which does not +// take a closure as a parameter anymore. +// CHECK-LABEL: sil shared @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable any P) -> () +// CHECK: function_ref @address_closure_existential : $@convention(thin) (@inout_aliasable any P) -> () +// CHECK: partial_apply %{{.*}} : $@convention(thin) (@inout_aliasable any P) -> () +// CHECK: apply +// CHECK: return + +// Check that a specialization of address_closure_noescape_user was generated which does not +// take a closure as a parameter anymore. +// CHECK-LABEL: sil shared @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable S, @owned S) -> () +// CHECK: function_ref @address_closure_struct1 : $@convention(thin) (@inout_aliasable S, @owned S) -> () +// CHECK: partial_apply %{{.*}} : $@convention(thin) (@inout_aliasable S, @owned S) -> () +// CHECK: apply +// CHECK: return + +// Check that a specialization of address_closure_user was generated which does not +// take a closure as a parameter anymore. +// CHECK-LABEL: sil shared @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable S, @owned S) -> () +// CHECK: function_ref @address_closure_struct2 : $@convention(thin) (@inout_aliasable S, @owned S) -> () +// CHECK: partial_apply %{{.*}} : $@convention(thin) (@inout_aliasable S, @owned S) -> () +// CHECK: apply +// CHECK: return + +// Check that a specialization of address_closure_user was generated which does not +// take a closure as a parameter anymore. +// CHECK-LABEL: sil shared @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable C, @owned C) -> () +// CHECK: function_ref @address_closure_class1 : $@convention(thin) (@inout_aliasable C, @owned C) -> () +// CHECK: partial_apply %{{.*}} : $@convention(thin) (@inout_aliasable C, @owned C) -> () +// CHECK: apply +// CHECK: return + +// CHECK-LABEL: sil @address_closure_noescape_user : $@convention(thin) (@noescape @callee_owned () -> ()) -> () { +sil @address_closure_noescape_user : $@convention(thin) (@noescape @callee_owned () -> ()) -> () { +bb0(%0 : $@noescape @callee_owned () -> ()): + %1 = apply %0() : $@noescape @callee_owned () -> () + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil @address_caller : $@convention(thin) (@in Int32) -> () { +// CHECK-NOT: _TTSf1cl15address_closureSi__address_closure_user +sil @address_caller : $@convention(thin) (@in Int32) -> () { +bb0(%0 : $*Int32): + %1 = function_ref @address_closure : $@convention(thin) (@in Int32) -> () + %2 = partial_apply %1(%0) : $@convention(thin) (@in Int32) -> () + %3 = function_ref @address_closure_user : $@convention(thin) (@owned @callee_owned () -> ()) -> () + %4 = apply %3(%2) : $@convention(thin) (@owned @callee_owned () -> ()) -> () + %9999 = tuple() + return %9999 : $() +} + +// We don't handle closures that close over address types passed as @in or +// @in_guaranteed. +// (*NOTE* this includes address and non-address only types). +// This is a temporary limitation. +// +// CHECK-LABEL: sil @address_caller_complex : $@convention(thin) (@in Int32) -> () +// CHECK-NOT: function_ref @{{.*}}address_closure_user{{.*}} : $@convention(thin) (@in Int32) -> () +// CHECK: partial_apply +// CHECK-NOT: function_ref @{{.*}}address_closure_user{{.*}} : $@convention(thin) (@in Int32) -> () +// CHECK: return +sil @address_caller_complex : $@convention(thin) (@in Int32) -> () { +bb0(%0 : $*Int32): + %00 = alloc_stack $Int32 + %01 = load %0 : $*Int32 + store %01 to %00 : $*Int32 + %1 = function_ref @address_closure : $@convention(thin) (@in Int32) -> () + %2 = partial_apply %1(%00) : $@convention(thin) (@in Int32) -> () + %3 = function_ref @address_closure_user : $@convention(thin) (@owned @callee_owned () -> ()) -> () + %4 = apply %3(%2) : $@convention(thin) (@owned @callee_owned () -> ()) -> () + dealloc_stack %00 : $*Int32 + br bb1 + +bb1: + %6 = apply %3(%2) : $@convention(thin) (@owned @callee_owned () -> ()) -> () + %9999 = tuple() + return %9999 : $() +} + +// We don't handle closures that close over address types passed as @in or +// @in_guaranteed. +// (*NOTE* this includes address and non-address only types). +// This is a temporary limitation. +// +// CHECK-LABEL: sil @address_caller_struct_complex : $@convention(thin) (@in S) -> () +// CHECK-NOT: [[SPECIALIZED_FN1:%.*]] = function_ref @{{.*}}address_closure_user{{.*}} : $@convention(thin) (@in S) -> () +// CHECK: partial_apply +// CHECK-NOT: [[SPECIALIZED_FN1:%.*]] = function_ref @{{.*}}address_closure_user{{.*}} : $@convention(thin) (@in S) -> () +// CHECK: return +sil @address_caller_struct_complex : $@convention(thin) (@in S) -> () { +bb0(%0 : $*S): + %00 = alloc_stack $S + %01 = load %0 : $*S + retain_value %01 : $S + store %01 to %00 : $*S + %1 = function_ref @address_closure_struct_complex : $@convention(thin) (@in S) -> () + %2 = partial_apply %1(%00) : $@convention(thin) (@in S) -> () + %3 = function_ref @address_closure_user : $@convention(thin) (@owned @callee_owned () -> ()) -> () + %4 = apply %3(%2) : $@convention(thin) (@owned @callee_owned () -> ()) -> () + %5 = load %00 : $*S + release_value %5 : $S + dealloc_stack %00 : $*S + br bb1 + +bb1: + %6 = apply %3(%2) : $@convention(thin) (@owned @callee_owned () -> ()) -> () + %9999 = tuple() + return %9999 : $() +} + +// More complex tests involving address arguments. + +sil @address_closure_trivial : $@convention(thin) (Int32, @inout_aliasable Int32) -> () { +bb0(%0 : $Int32, %1 : $*Int32): + %9 = integer_literal $Builtin.Int32, 42 + %10 = struct $Int32 (%9 : $Builtin.Int32) + store %10 to %1 : $*Int32 + %12 = tuple () + return %12 : $() +} + +// CHECK-LABEL: sil @address_caller_trivial +// CHECK-NOT: partial_apply +// CHECK: [[SPECIALIZED_FN1:%.*]] = function_ref @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (Int32, @inout_aliasable Int32) -> () +// CHECK: apply [[SPECIALIZED_FN1]]{{.*}} +// CHECK-NOT: partial_apply +// CHECK: return +sil @address_caller_trivial: $@convention(thin) (Int32) -> Int32 { +bb0(%0 : $Int32): + %2 = alloc_stack $Int32, var, name "xx" + store %0 to %2 : $*Int32 + // function_ref address_closure_noescape_user(f:) + %4 = function_ref @address_closure_noescape_user : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + // function_ref address_closure_trivial(x:) + %5 = function_ref @address_closure_trivial : $@convention(thin) (Int32, @inout_aliasable Int32) -> () + %6 = partial_apply %5(%0, %2) : $@convention(thin) (Int32, @inout_aliasable Int32) -> () + %6b = convert_escape_to_noescape %6 : $@callee_owned () -> () to $@noescape @callee_owned () -> () + %7 = apply %4(%6b) : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + %8 = load %2 : $*Int32 + dealloc_stack %2 : $*Int32 + return %8 : $Int32 +} + +sil @address_closure_trivial_mutating : $@convention(thin) (@inout_aliasable Int32) -> () { +bb0(%0 : $*Int32): + %2 = struct_element_addr %0 : $*Int32, #Int32._value + %3 = load %2 : $*Builtin.Int32 + %4 = integer_literal $Builtin.Int32, 1 + %5 = integer_literal $Builtin.Int1, -1 + %6 = builtin "sadd_with_overflow_Int32"(%3 : $Builtin.Int32, %4 : $Builtin.Int32, %5 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) + %7 = tuple_extract %6 : $(Builtin.Int32, Builtin.Int1), 0 + %8 = tuple_extract %6 : $(Builtin.Int32, Builtin.Int1), 1 + cond_fail %8 : $Builtin.Int1 + %10 = struct $Int32 (%7 : $Builtin.Int32) + store %10 to %0 : $*Int32 + %12 = tuple () + return %12 : $() +} + +// CHECK-LABEL: sil @address_caller_trivial_mutating +// CHECK-NOT: partial_apply +// CHECK: [[SPECIALIZED_FN1:%.*]] = function_ref @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable Int32) -> () +// CHECK: apply [[SPECIALIZED_FN1]]{{.*}} +// CHECK-NOT: partial_apply +// CHECK: return +sil @address_caller_trivial_mutating: $@convention(thin) (Int32) -> Int32 { +bb0(%0 : $Int32): + %2 = alloc_stack $Int32, var, name "xx" + store %0 to %2 : $*Int32 + %4 = function_ref @address_closure_noescape_user : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + %5 = function_ref @address_closure_trivial_mutating : $@convention(thin) (@inout_aliasable Int32) -> () + %6 = partial_apply %5(%2) : $@convention(thin) (@inout_aliasable Int32) -> () + %6b = convert_escape_to_noescape %6 : $@callee_owned () -> () to $@noescape @callee_owned () -> () + %7 = apply %4(%6b) : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + %8 = load %2 : $*Int32 + dealloc_stack %2 : $*Int32 + return %8 : $Int32 +} + +sil @S_init : $@convention(method) (@thin S.Type) -> @owned S + +sil hidden @address_closure_body_out_result : $@convention(thin) (@in Q, @in Q) -> @out Q { +bb0(%0 : $*Q, %1 : $*Q, %2 : $*Q): + %5 = init_existential_addr %0 : $*Q, $S + // function_ref S.init() + %6 = function_ref @S_init : $@convention(method) (@thin S.Type) -> @owned S + %7 = metatype $@thin S.Type + %8 = apply %6(%7) : $@convention(method) (@thin S.Type) -> @owned S + store %8 to %5 : $*S + destroy_addr %2 : $*Q + destroy_addr %1 : $*Q + %12 = tuple () + return %12 : $() +} + +sil @address_closure_out_result : $@convention(thin) (@in Q, @inout_aliasable Q, @inout_aliasable Q) -> @out Q { +bb0(%0 : $*Q, %1 : $*Q, %2 : $*Q, %3 : $*Q): + %7 = function_ref @address_closure_body_out_result : $@convention(thin) (@in Q, @in Q) -> @out Q + %8 = alloc_stack $Q + copy_addr %2 to [init] %8 : $*Q + %10 = alloc_stack $Q + copy_addr %3 to [init] %10 : $*Q + %12 = apply %7(%0, %8, %10) : $@convention(thin) (@in Q, @in Q) -> @out Q + dealloc_stack %10 : $*Q + dealloc_stack %8 : $*Q + destroy_addr %1 : $*Q + %16 = tuple () + return %16 : $() +} + +// Check that a specialization of address_closure_user_out_result was generated which does not +// take a closure as a parameter anymore. +// CHECK-LABEL: sil shared @{{.*}}address_closure_user_out_result{{.*}} : $@convention(thin) (@inout_aliasable any Q, @inout_aliasable any Q) -> @out any Q +// CHECK: function_ref @address_closure_out_result : $@convention(thin) (@in any Q, @inout_aliasable any Q, @inout_aliasable any Q) -> @out any Q +// CHECK: [[PARTIAL_APPLY:%.*]] = partial_apply %{{.*}} : $@convention(thin) (@in any Q, @inout_aliasable any Q, @inout_aliasable any Q) -> @out any Q +// CHECK: apply [[PARTIAL_APPLY]] +// CHECK: return + +sil @address_closure_user_out_result : $@convention(thin) (@noescape @callee_owned (@in Q) -> @out Q) -> @out Q { +bb0(%0 : $*Q, %1 : $@noescape @callee_owned (@in Q) -> @out Q): + %4 = alloc_stack $Q + %5 = init_existential_addr %4 : $*Q, $S + %6 = function_ref @S_init : $@convention(method) (@thin S.Type) -> @owned S + %7 = metatype $@thin S.Type + %8 = apply %6(%7) : $@convention(method) (@thin S.Type) -> @owned S + store %8 to %5 : $*S + %10 = apply %1(%0, %4) : $@noescape @callee_owned (@in Q) -> @out Q + dealloc_stack %4 : $*Q + %13 = tuple () + return %13 : $() +} + +// Check that closure specialization can handle cases where the full closure type may have +// unsupported address type arguments (e.g. @in or @out), but the partial_apply has only +// supported address type arguments, i.e. @inout or @inout_aliasable. +// +// CHECK-LABEL: sil @address_caller_out_result : $@convention(thin) (@in any Q, @in any Q) -> @out any Q +// CHECK-NOT: partial_apply +// CHECK: [[SPECIALIZED_FN1:%.*]] = function_ref @{{.*}}address_closure_user_out_result{{.*}} : $@convention(thin) (@inout_aliasable any Q, @inout_aliasable any Q) -> @out any Q +// CHECK: apply [[SPECIALIZED_FN1]]{{.*}} +// CHECK-NOT: partial_apply +// CHECK: return +sil @address_caller_out_result: $@convention(thin) (@in Q, @in Q) -> @out Q { +bb0(%0 : $*Q, %1 : $*Q, %2 : $*Q): + %5 = function_ref @address_closure_user_out_result : $@convention(thin) (@noescape @callee_owned (@in Q) -> @out Q) -> @out Q + %6 = function_ref @address_closure_out_result : $@convention(thin) (@in Q, @inout_aliasable Q, @inout_aliasable Q) -> @out Q + %7 = partial_apply %6(%1, %2) : $@convention(thin) (@in Q, @inout_aliasable Q, @inout_aliasable Q) -> @out Q + %7b = convert_escape_to_noescape %7 : $@callee_owned (@in Q) -> @out Q to $@noescape @callee_owned (@in Q) -> @out Q + %8 = apply %5(%0, %7b) : $@convention(thin) (@noescape @callee_owned (@in Q) -> @out Q) -> @out Q + destroy_addr %2 : $*Q + destroy_addr %1 : $*Q + %11 = tuple () + return %11 : $() +} + +// CHECK-LABEL: sil @address_caller_existential +// CHECK-NOT: partial_apply +// CHECK: [[SPECIALIZED_FN1:%.*]] = function_ref @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable any P) -> () +// CHECK: [[SPECIALIZED_FN2:%.*]] = function_ref @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable any P) -> () +// CHECK: apply [[SPECIALIZED_FN2]]{{.*}} +// CHECK: apply [[SPECIALIZED_FN1]]{{.*}} +// CHECK-NOT: partial_apply +// CHECK: return +sil @address_caller_existential : $@convention(thin) (@in P, @in P, Int32) -> @out P { +bb0(%0 : $*P, %1 : $*P, %2 : $*P, %3 : $Int32): + %7 = alloc_stack $P + copy_addr %1 to [init] %7 : $*P + %9 = function_ref @address_closure_existential : $@convention(thin) (@inout_aliasable P) -> () + %10 = partial_apply %9(%7) : $@convention(thin) (@inout_aliasable P) -> () + %10b = convert_escape_to_noescape %10 : $@callee_owned () -> () to $@noescape @callee_owned () -> () + %12 = function_ref @address_closure_noescape_user : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + strong_retain %10 : $@callee_owned () -> () + %14 = apply %12(%10b) : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + strong_retain %10 : $@callee_owned () -> () + %16 = apply %12(%10b) : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + %17 = integer_literal $Builtin.Int32, 10 + %18 = struct_extract %3 : $Int32, #Int32._value + %19 = builtin "cmp_slt_Int32"(%17 : $Builtin.Int32, %18 : $Builtin.Int32) : $Builtin.Int1 + cond_br %19, bb1, bb2 + +bb1: + destroy_addr %2 : $*P + copy_addr %1 to [init] %0 : $*P + destroy_addr %1 : $*P + strong_release %10 : $@callee_owned () -> () + br bb3 + +bb2: + destroy_addr %1 : $*P + copy_addr %2 to [init] %0 : $*P + destroy_addr %2 : $*P + strong_release %10 : $@callee_owned () -> () + br bb3 + +bb3: + destroy_addr %7 : $*P + dealloc_stack %7 : $*P + %33 = tuple () + return %33 : $() +} + +sil shared @address_closure_existential : $@convention(thin) (@inout_aliasable P) -> () { +bb0(%0 : $*P): + %7 = tuple () + return %7 : $() +} + +sil @address_closure_struct1 : $@convention(thin) (@inout_aliasable S, @owned S) -> () { +bb0(%0 : $*S, %1 : $S): + %4 = struct_element_addr %0 : $*S, #S.c + %5 = load %4 : $*Optional + store %1 to %0 : $*S + release_value %5 : $Optional + %8 = tuple () + return %8 : $() +} + +sil @address_closure_struct2 : $@convention(thin) (@inout_aliasable S, @owned S) -> () { +bb0(%0 : $*S, %1 : $S): + %4 = struct_element_addr %0 : $*S, #S.c + %5 = load %4 : $*Optional + store %1 to %0 : $*S + release_value %5 : $Optional + %8 = tuple () + return %8 : $() +} + +// CHECK-LABEL: sil @address_caller_struct +// CHECK-NOT: partial_apply +// CHECK: [[SPECIALIZED_FN1:%.*]] = function_ref @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable S, @owned S) -> () +// CHECK: apply [[SPECIALIZED_FN1]] +// CHECK: [[SPECIALIZED_FN2:%.*]] = function_ref @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable S, @owned S) -> () +// CHECK: apply [[SPECIALIZED_FN2]] +// CHECK-NOT: partial_apply +// CHECK: return +sil @address_caller_struct : $@convention(thin) (@guaranteed S, @guaranteed S) -> @owned S { +bb0(%0 : $S, %1 : $S): + %4 = alloc_stack $S, var, name "xx" + %5 = struct_extract %0 : $S, #S.c + store %0 to %4 : $*S + %7 = function_ref @address_closure_noescape_user : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + %8 = function_ref @address_closure_struct1 : $@convention(thin) (@inout_aliasable S, @owned S) -> () + %9 = partial_apply %8(%4, %1) : $@convention(thin) (@inout_aliasable S, @owned S) -> () + %9b = convert_escape_to_noescape %9 : $@callee_owned () -> () to $@noescape @callee_owned () -> () + retain_value %0 : $S + retain_value %1 : $S + %12 = apply %7(%9b) : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + %13 = function_ref @address_closure_struct2 : $@convention(thin) (@inout_aliasable S, @owned S) -> () + %14 = partial_apply %13(%4, %0) : $@convention(thin) (@inout_aliasable S, @owned S) -> () + %14b = convert_escape_to_noescape %14 : $@callee_owned () -> () to $@noescape @callee_owned () -> () + retain_value %5 : $Optional + %16 = apply %7(%14b) : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + %17 = load %4 : $*S + dealloc_stack %4 : $*S + return %17 : $S +} + +sil shared @address_closure_class1 : $@convention(thin) (@inout_aliasable C, @owned C) -> () { +bb0(%0 : $*C, %1 : $C): + %4 = load %0 : $*C + store %1 to %0 : $*C + strong_release %4 : $C + %7 = tuple () + return %7 : $() +} + +// CHECK-LABEL: sil @address_caller_class1 +// CHECK-NOT: partial_apply +// CHECK: [[SPECIALIZED_FN1:%.*]] = function_ref @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable C, @owned C) -> () +// CHECK: [[SPECIALIZED_FN2:%.*]] = function_ref @{{.*}}address_closure_noescape_user{{.*}} : $@convention(thin) (@inout_aliasable C, @owned C) -> () +// CHECK: apply [[SPECIALIZED_FN2]]{{.*}} +// CHECK: apply [[SPECIALIZED_FN1]]{{.*}} +// CHECK-NOT: partial_apply +// CHECK: return +sil @address_caller_class1 : $@convention(thin) (@guaranteed C, @guaranteed C) -> @owned C { +bb0(%0 : $C, %1 : $C): + %4 = alloc_stack $C, var, name "xx" + store %0 to %4 : $*C + %7 = function_ref @address_closure_class1 : $@convention(thin) (@inout_aliasable C, @owned C) -> () + %8 = partial_apply %7(%4, %1) : $@convention(thin) (@inout_aliasable C, @owned C) -> () + %8b = convert_escape_to_noescape %8 : $@callee_owned () -> () to $@noescape @callee_owned () -> () + %10 = function_ref @address_closure_noescape_user : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + strong_retain %0 : $C + strong_retain %1 : $C + strong_retain %8 : $@callee_owned () -> () + %14 = apply %10(%8b) : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + strong_retain %8 : $@callee_owned () -> () + %16 = apply %10(%8b) : $@convention(thin) (@noescape @callee_owned () -> ()) -> () + %17 = load %4 : $*C + strong_retain %17 : $C + strong_release %8 : $@callee_owned () -> () + %20 = load %4 : $*C + strong_release %20 : $C + dealloc_stack %4 : $*C + return %17 : $C +} + +///////////////////////////////////// +// Thin To Thick and Partial Apply // +///////////////////////////////////// +// +// Make sure that we handle these correctly with and without removal of the +// closure and @owned and @guaranteed. +// + +// CHECK-LABEL: sil @large_closure_callee : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () { +sil @large_closure_callee : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () { +bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.Int32, %2 : $Builtin.NativeObject, %3 : $Builtin.NativeObject, %4 : $Builtin.NativeObject, %5 : $Builtin.Int32, %6 : $Builtin.NativeObject, %7 : $Builtin.NativeObject): + %9999 = tuple () + + release_value %2 : $Builtin.NativeObject + release_value %6 : $Builtin.NativeObject + return %9999 : $() +} + +// CHECK-LABEL: sil @small_closure_callee : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () { +sil @small_closure_callee : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () { +bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.Int32, %2 : $Builtin.NativeObject, %3 : $Builtin.NativeObject): + %9999 = tuple () + release_value %2 : $Builtin.NativeObject + return %9999 : $() +} + +// CHECK-LABEL: sil shared @$s18owned_apply_callee014large_closure_C0BoBi32_BoBoTf1cnnnn_n : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, @owned Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +// CHECK: bb0 +// CHECK: [[FUN:%.*]] = function_ref @large_closure_callee : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () +// CHECK: [[CLOSURE:%.*]] = partial_apply [[FUN]]( +// CHECK: apply [[CLOSURE]]( +// CHECK: release_value [[CLOSURE]] + +// CHECK-LABEL: sil shared @$s18owned_apply_callee014small_closure_C0Tf1cnnnn_n : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () { +// CHECK: bb0 +// CHECK: [[FUN:%.*]] = function_ref @small_closure_callee +// CHECK: [[CLOSURE:%.*]] = thin_to_thick_function [[FUN]] : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () to $@callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () +// CHECK: apply [[CLOSURE]]( +// CHECK: release_value [[CLOSURE]] + +// CHECK-LABEL: sil @owned_apply_callee : $@convention(thin) (@owned @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () { +sil @owned_apply_callee : $@convention(thin) (@owned @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () { +bb0(%0 : $@callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), %1 : $Builtin.NativeObject, %2 : $Builtin.Int32, %3 : $Builtin.NativeObject, %4 : $Builtin.NativeObject): + retain_value %3 : $Builtin.NativeObject + apply %0(%1, %2, %3, %4) : $@callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + release_value %3 : $Builtin.NativeObject + release_value %0 : $@callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil shared @$s23guaranteed_apply_callee014large_closure_C0BoBi32_BoBoTf1cnnnn_n : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, @owned Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +// CHECK: bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.Int32, %2 : $Builtin.NativeObject, %3 : $Builtin.NativeObject, %4 : $Builtin.NativeObject, %5 : $Builtin.Int32, %6 : $Builtin.NativeObject, %7 : $Builtin.NativeObject): +// CHECK: [[FUN:%.*]] = function_ref @large_closure_callee : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () +// CHECK: [[CLOSURE:%.*]] = partial_apply [[FUN]]( +// CHECK: apply [[CLOSURE]]( +// CHECK: release_value [[CLOSURE]] + +// CHECK-LABEL: sil shared @$s23guaranteed_apply_callee014small_closure_C0Tf1cnnnn_n : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () { +// CHECK: bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.Int32, %2 : $Builtin.NativeObject, %3 : $Builtin.NativeObject): +// CHECK: [[FUN:%.*]] = function_ref @small_closure_callee +// CHECK: [[CLOSURE:%.*]] = thin_to_thick_function [[FUN]] : +// CHECK: apply [[CLOSURE]]( +// CHECK-NOT: release_value [[CLOSURE]] + +// CHECK-LABEL: sil @guaranteed_apply_callee : $@convention(thin) (@guaranteed @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () { +sil @guaranteed_apply_callee : $@convention(thin) (@guaranteed @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () { +bb0(%0 : $@callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), %1 : $Builtin.NativeObject, %2 : $Builtin.Int32, %3 : $Builtin.NativeObject, %4 : $Builtin.NativeObject): + retain_value %3 : $Builtin.NativeObject + apply %0(%1, %2, %3, %4) : $@callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + release_value %3 : $Builtin.NativeObject + %9999 = tuple () + return %9999 : $() +} +sil @guaranteed_apply_callee_throw : $@convention(thin) (@guaranteed @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, @owned Error) -> @error Error { +bb0(%0 : $@callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), %1 : $Builtin.NativeObject, %2 : $Builtin.Int32, %3 : $Builtin.NativeObject, %4 : $Builtin.NativeObject, %5: $Error): + retain_value %3 : $Builtin.NativeObject + apply %0(%1, %2, %3, %4) : $@callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + release_value %3 : $Builtin.NativeObject + throw %5 : $Error +} +// CHECK-LABEL: sil @thin_thick_and_partial_apply_test : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, @owned any Error) -> () { +// CHECK: bb0([[ARG0:%.*]] : $Builtin.NativeObject, [[ARG1:%.*]] : $Builtin.Int32, [[ARG2:%.*]] : $Builtin.NativeObject, [[ARG3:%.*]] : $Builtin.NativeObject, [[ARG4:%.*]] : $any Error): +// CHECK: [[OLD_CLOSURE_CALLEE1:%.*]] = function_ref @large_closure_callee +// CHECK: [[OLD_CLOSURE_CALLEE2:%.*]] = function_ref @small_closure_callee +// CHECK: retain_value [[ARG0]] : $Builtin.NativeObject +// CHECK-NEXT: retain_value [[ARG2]] : $Builtin.NativeObject +// CHECK-NEXT: retain_value [[ARG3]] : $Builtin.NativeObject +// CHECK: [[SPECFUN0:%.*]] = function_ref @$s23guaranteed_apply_callee014large_closure_C0BoBi32_BoBoTf1cnnnn_n +// CHECK: retain_value [[ARG0]] : $Builtin.NativeObject +// CHECK-NEXT: retain_value [[ARG2]] : $Builtin.NativeObject +// CHECK-NEXT: retain_value [[ARG3]] : $Builtin.NativeObject +// CHECK: [[SPECFUN1:%.*]] = function_ref @$s18owned_apply_callee014large_closure_C0BoBi32_BoBoTf1cnnnn_n +// CHECK: retain_value [[ARG0]] : $Builtin.NativeObject +// CHECK-NEXT: retain_value [[ARG2]] : $Builtin.NativeObject +// CHECK-NEXT: retain_value [[ARG3]] : $Builtin.NativeObject +// CHECK: [[DEAD_CLOSURE_1:%.*]] = partial_apply [[OLD_CLOSURE_CALLEE1]] +// CHECK: [[SPECFUN2:%.*]] = function_ref @$s23guaranteed_apply_callee014small_closure_C0Tf1cnnnn_n +// CHECK: [[SPECFUN3:%.*]] = function_ref @$s18owned_apply_callee014small_closure_C0Tf1cnnnn_n +// CHECK: [[DEAD_CLOSURE_2:%.*]] = thin_to_thick_function [[OLD_CLOSURE_CALLEE2]] +// CHECK: retain_value [[DEAD_CLOSURE_1]] +// CHECK-NOT: retain_value [[DEAD_CLOSURE_2]] +// CHECK-NOT: apply [[DEAD_CLOSURE_1]] +// CHECK-NOT: apply [[DEAD_CLOSURE_2]] +// CHECK: apply [[SPECFUN1]]( +// CHECK-NEXT: release_value [[DEAD_CLOSURE_1]] +// CHECK-NOT: release_value [[DEAD_CLOSURE_2]] +// CHECK: apply [[SPECFUN3]]( +// CHECK-NOT: release_value [[DEAD_CLOSURE_1]] +// CHECK-NOT: release_value [[DEAD_CLOSURE_2]] +// CHECK: apply [[SPECFUN0]]( +// CHECK-NOT: release_value [[DEAD_CLOSURE_1]] +// CHECK-NOT: release_value [[DEAD_CLOSURE_2]] +// CHECK: apply [[SPECFUN2]]( +// CHECK-NEXT: release_value [[DEAD_CLOSURE_1]] +// CHECK-NOT: release_value [[DEAD_CLOSURE_2]] + +// REMOVECLOSURES-LABEL: sil @thin_thick_and_partial_apply_test : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, @owned any Error) -> () { +// REMOVECLOSURES: bb0([[ARG0:%.*]] : $Builtin.NativeObject, [[ARG1:%.*]] : $Builtin.Int32, [[ARG2:%.*]] : $Builtin.NativeObject, [[ARG3:%.*]] : $Builtin.NativeObject, [[ARG4:%.*]] : $any Error): +// REMOVECLOSURES: [[OLD_CLOSURE_CALLEE1:%.*]] = function_ref @large_closure_callee +// REMOVECLOSURES: [[OLD_CLOSURE_CALLEE2:%.*]] = function_ref @small_closure_callee +// REMOVECLOSURES: retain_value [[ARG0]] : $Builtin.NativeObject +// REMOVECLOSURES-NEXT: retain_value [[ARG2]] : $Builtin.NativeObject +// REMOVECLOSURES-NEXT: retain_value [[ARG3]] : $Builtin.NativeObject +// REMOVECLOSURES: [[SPECFUN0:%.*]] = function_ref @$s23guaranteed_apply_callee014large_closure_C0BoBi32_BoBoTf1cnnnn_n +// REMOVECLOSURES: retain_value [[ARG0]] : $Builtin.NativeObject +// REMOVECLOSURES-NEXT: retain_value [[ARG2]] : $Builtin.NativeObject +// REMOVECLOSURES-NEXT: retain_value [[ARG3]] : $Builtin.NativeObject +// REMOVECLOSURES: [[SPECFUN1:%.*]] = function_ref @$s18owned_apply_callee014large_closure_C0BoBi32_BoBoTf1cnnnn_n +// REMOVECLOSURES: retain_value [[ARG0]] : $Builtin.NativeObject +// REMOVECLOSURES-NEXT: retain_value [[ARG2]] : $Builtin.NativeObject +// REMOVECLOSURES-NEXT: retain_value [[ARG3]] : $Builtin.NativeObject +// REMOVECLOSURES-NOT: partial_apply [[OLD_CLOSURE_CALLEE1]] +// REMOVECLOSURES: [[SPECFUN4:%.*]] = function_ref @$s29guaranteed_apply_callee_throw014small_closure_C0Tf1cnnnnn_n +// REMOVECLOSURES: [[SPECFUN2:%.*]] = function_ref @$s23guaranteed_apply_callee014small_closure_C0Tf1cnnnn_n +// REMOVECLOSURES: [[SPECFUN3:%.*]] = function_ref @$s18owned_apply_callee014small_closure_C0Tf1cnnnn_n +// REMOVECLOSURES-NOT: thin_to_thick_function [[OLD_CLOSURE_CALLEE2]] +// REMOVECLOSURES: apply [[SPECFUN1]]( +// REMOVECLOSURES-NEXT: apply [[SPECFUN3]]( +// REMOVECLOSURES-NEXT: apply [[SPECFUN0]]( +// REMOVECLOSURES-NEXT: apply [[SPECFUN2]]( +// REMOVECLOSURES-NEXT: strong_release [[ARG0]] : $Builtin.NativeObject +// REMOVECLOSURES-NEXT: strong_release [[ARG2]] : $Builtin.NativeObject +// REMOVECLOSURES-NEXT: strong_release [[ARG3]] : $Builtin.NativeObject +// REMOVECLOSURES-NEXT: try_apply [[SPECFUN4]]( +sil @thin_thick_and_partial_apply_test : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, @owned Error) -> () { +bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.Int32, %2 : $Builtin.NativeObject, %3 : $Builtin.NativeObject, %11: $Error): + %4 = function_ref @owned_apply_callee : $@convention(thin) (@owned @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + %5 = function_ref @guaranteed_apply_callee : $@convention(thin) (@guaranteed @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + %6 = function_ref @large_closure_callee : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + %7 = function_ref @small_closure_callee : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + %10 = function_ref @guaranteed_apply_callee_throw : $@convention(thin) (@guaranteed @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, @owned Error) -> @error Error + + retain_value %0 : $Builtin.NativeObject + retain_value %2 : $Builtin.NativeObject + retain_value %3 : $Builtin.NativeObject + %8 = partial_apply %6(%0, %1, %2, %3) : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + %9 = thin_to_thick_function %7 : $@convention(thin) (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () to $@callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + + retain_value %8 : $@callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + apply %4(%8, %0, %1, %2, %3) : $@convention(thin) (@owned @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + apply %4(%9, %0, %1, %2, %3) : $@convention(thin) (@owned @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + apply %5(%8, %0, %1, %2, %3) : $@convention(thin) (@guaranteed @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + apply %5(%9, %0, %1, %2, %3) : $@convention(thin) (@guaranteed @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + + release_value %8 : $@callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> () + try_apply %10(%9, %0, %1, %2, %3, %11) : $@convention(thin) (@guaranteed @callee_owned (Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject) -> (), Builtin.NativeObject, Builtin.Int32, @owned Builtin.NativeObject, @guaranteed Builtin.NativeObject, @owned Error) -> @error Error, normal bb2, error bb3 + +bb2(%n : $()): + br bb4 + +bb3(%e : $Error): + br bb4 + +bb4: + %9999 = tuple() + return %9999 : $() +} + +////////////////////////////// +// Non Function Ref Closure // +////////////////////////////// +// +// Make sure we do not try to specialize a closure if we are not closing over a +// direct function ref. + +// CHECK-LABEL: @$s4test3barSiAA1P_p_SitF : $@convention(thin) (@in any P, Int32) -> Int32 { +// CHECK: partial_apply +// CHECK: apply +sil [noinline] @$s4test3barSiAA1P_p_SitF : $@convention(thin) (@in P, Int32) -> Int32 { +bb0(%0 : $*P, %1 : $Int32): + %2 = open_existential_addr mutable_access %0 : $*P to $*@opened("01234567-89ab-cdef-0123-000000000000", P) Self + %3 = witness_method $@opened("01234567-89ab-cdef-0123-000000000000", P) Self, #P.foo, %2 : $*@opened("01234567-89ab-cdef-0123-000000000000", P) Self : $@convention(witness_method: P) @callee_owned (@callee_owned (Int32) -> Int32, Int32, @inout T) -> Int32 + %4 = integer_literal $Builtin.Int32, 2 + %5 = struct $Int32 (%4 : $Builtin.Int32) + // function_ref test.baz (Swift.Int32)(m : Swift.Int32) -> Swift.Int32 + %6 = function_ref @$s4test3bazSiSi1m_tcSiF : $@convention(thin) (Int32, Int32) -> Int32 + %7 = partial_apply %6(%5) : $@convention(thin) (Int32, Int32) -> Int32 + %8 = apply %3<@opened("01234567-89ab-cdef-0123-000000000000", P) Self>(%7, %1, %2) : $@convention(witness_method: P) @callee_owned (@callee_owned (Int32) -> Int32, Int32, @inout T) -> Int32 + destroy_addr %0 : $*P + return %8 : $Int32 +} + +sil @$s4test3bazSiSi1m_tcSiF : $@convention(thin) (Int32, Int32) -> Int32 + +////////////////////////////////////////////////////////////////////////////////// +// Make sure that we properly set a specialized closure's indirect return type. // +////////////////////////////////////////////////////////////////////////////////// +// +// SIL verification should catch the incorrect type. +// rdar:://19321284 + +// CHECK-LABEL: sil [serialized] @callee : $@convention(thin) (Builtin.Int32) -> () { +sil [serialized] @callee : $@convention(thin) (Builtin.Int32) -> () { +bb0(%0 : $Builtin.Int32): + unreachable +} + +sil shared [serialized] @thunk : $@convention(thin) (@callee_owned () -> ()) -> @out () { +bb0(%0 : $*(), %1 : $@callee_owned () -> ()): + apply %1() : $@callee_owned () -> () + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: @test_closure_propagation : $@convention(thin) () -> () { +// REMOVECLOSURES-LABEL: @test_closure_propagation : $@convention(thin) () -> () { +// REMOVECLOSURES-NOT: partial_apply +sil [serialized] @test_closure_propagation : $@convention(thin) () -> () { +bb0: + %f1 = function_ref @callee : $@convention(thin) (Builtin.Int32) -> () + %i1 = integer_literal $Builtin.Int32, 24 + %p1 = partial_apply %f1(%i1) : $@convention(thin) (Builtin.Int32) -> () + %f2 = function_ref @thunk : $@convention(thin) (@callee_owned () -> ()) -> @out () + %s1 = alloc_stack $() + %a1 = apply %f2(%s1, %p1) : $@convention(thin) (@callee_owned () -> ()) -> @out () + dealloc_stack %s1 : $*() + unreachable +} diff --git a/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_dynamic_self.swift b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_dynamic_self.swift new file mode 100644 index 0000000000000..4e304127aa1f6 --- /dev/null +++ b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_dynamic_self.swift @@ -0,0 +1,23 @@ +// RUN: %target-swift-frontend -emit-sil -O -experimental-swift-based-closure-specialization -primary-file %s + +// Just make sure we skip the optimization and not crash here. +// +// Eventually, we can make this work. +// +// + +class Foo { + required init() {} + + static func foo(_ f: () -> ()) -> Self { + f() + return self.init() + } +} + +class Bar: Foo {} + +func closures(_ x: String) { + print(Foo.foo { _ = x }) + print(Bar.foo { _ = x }) +} diff --git a/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_fragile.sil b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_fragile.sil new file mode 100644 index 0000000000000..bf808aafadc10 --- /dev/null +++ b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_fragile.sil @@ -0,0 +1,60 @@ +// RUN: %target-sil-opt %s -verify -experimental-swift-based-closure-specialization -o - | %FileCheck %s + +// Make sure we do not specialize resilientCallee. + +sil_stage canonical + +import Builtin +import Swift +import SwiftShims + +@_optimize(none) public func action() + +@inline(__always) public func fragileCaller() + +public func resilientCallee(fn: () -> ()) + +// action() +sil [Onone] @$s26closure_specialize_fragile6actionyyF : $@convention(thin) () -> () { +bb0: + %0 = tuple () + return %0 : $() +} // end sil function '$s26closure_specialize_fragile6actionyyF' + +// CHECK-LABEL: sil [serialized] [always_inline] @$s26closure_specialize_fragile0C6CalleryyF : $@convention(thin) () -> () +// CHECK: function_ref @$s26closure_specialize_fragile15resilientCalleeyyyc2fn_tF : $@convention(thin) (@owned @callee_owned () -> ()) -> () +// CHECK: return +// fragileCaller() +sil [serialized] [always_inline] @$s26closure_specialize_fragile0C6CalleryyF : $@convention(thin) () -> () { +bb0: + // function_ref resilientCallee(fn:) + %0 = function_ref @$s26closure_specialize_fragile15resilientCalleeyyyc2fn_tF : $@convention(thin) (@owned @callee_owned () -> ()) -> () + // function_ref closure #1 in fragileCaller() + %1 = function_ref @$s26closure_specialize_fragile0C6CalleryyFyycfU_ : $@convention(thin) () -> () + %2 = thin_to_thick_function %1 : $@convention(thin) () -> () to $@callee_owned () -> () + %3 = apply %0(%2) : $@convention(thin) (@owned @callee_owned () -> ()) -> () + %4 = tuple () + return %4 : $() +} // end sil function '$s26closure_specialize_fragile0C6CalleryyF' + +// CHECK-LABEL: sil @$s26closure_specialize_fragile15resilientCalleeyyyc2fn_tF : $@convention(thin) (@owned @callee_owned () -> ()) -> () + +// resilientCallee(fn:) +sil @$s26closure_specialize_fragile15resilientCalleeyyyc2fn_tF : $@convention(thin) (@owned @callee_owned () -> ()) -> () { +bb0(%0 : $@callee_owned () -> ()): + strong_retain %0 : $@callee_owned () -> () + %3 = apply %0() : $@callee_owned () -> () + strong_release %0 : $@callee_owned () -> () + %5 = tuple () + return %5 : $() +} // end sil function '$s26closure_specialize_fragile15resilientCalleeyyyc2fn_tF' + +// closure #1 in fragileCaller() +sil shared [serialized] @$s26closure_specialize_fragile0C6CalleryyFyycfU_ : $@convention(thin) () -> () { +bb0: + // function_ref action() + %0 = function_ref @$s26closure_specialize_fragile6actionyyF : $@convention(thin) () -> () + %1 = apply %0() : $@convention(thin) () -> () + %2 = tuple () + return %2 : $() +} // end sil function '$s26closure_specialize_fragile0C6CalleryyFyycfU_' diff --git a/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_loop.swift b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_loop.swift new file mode 100644 index 0000000000000..a5eb9a228e660 --- /dev/null +++ b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_loop.swift @@ -0,0 +1,81 @@ +// RUN: %{python} %S/../Inputs/timeout.py 10 %target-swift-frontend -O -parse-as-library -experimental-swift-based-closure-specialization %s -emit-sil | %FileCheck %s +// XFAIL: * +public func callit() { + testit { false } +} + +// Check if the compiler terminates and does not full into an infinite optimization +// loop between the ClosureSpecializer and CapturePropagation. + +// CHECK-LABEL: sil @$s23closure_specialize_loop6testit1cySbyc_tF +public func testit(c: @escaping () -> Bool) { + if c() { + testit { !c() } + } +} + +// PR: https://github.com/apple/swift/pull/61956 +// Optimizing Expression.contains(where:) should not timeout. +// +// Repeated capture propagation leads to: +// func contains$termPred@arg0$[termPred$falsePred@arg1]@arg1(expr) { +// closure = termPred$[termPred$falsePred@arg1]@arg1 +// falsePred(expr) +// contains$termPred@arg0$termPred$[termPred$falsePred@arg1]@arg1(expr) +// } +// +// func contains$termPred@arg0$termPred$[termPred$falsePred@arg1]@arg1(expr) { +// closure = [termPred(termPred$[termPred$falsePred@arg1]@arg1)] +// closure(expr) +// contains$termPred@arg0(expr, closure) +// } +// The Demangled type tree looks like: +// kind=FunctionSignatureSpecialization +// kind=SpecializationPassID, index=3 +// kind=FunctionSignatureSpecializationParam +// kind=FunctionSignatureSpecializationParam +// kind=FunctionSignatureSpecializationParamKind, index=0 +// kind=FunctionSignatureSpecializationParamPayload, text="$s4test10ExpressionO8contains5whereS3bXE_tFSbACXEfU_S2bXEfU_36$s4test12IndirectEnumVACycfcS2bXEfU_Tf3npf_n" +// +// CHECK-LABEL: $s23closure_specialize_loop10ExpressionO8contains5whereS3bXE_tFSbACXEfU_S2bXEfU_012$s23closure_b7_loop10d44O8contains5whereS3bXE_tFSbACXEfU_S2bXEfU_012g13_b7_loop10d44ijk2_tlm2U_no52U_012g30_B34_loop12IndirectEnumVACycfcnO10U_Tf3npf_nY2_nTf3npf_n +// ---> function signature specialization +// Swift.Bool +// in closure_specialize_loop.IndirectEnum.init() -> closure_specialize_loop.IndirectEnum]> +// of closure #1 (Swift.Bool) -> Swift.Bool +// in closure #1 (closure_specialize_loop.Expression) -> Swift.Bool +// in closure_specialize_loop.Expression.contains(where: (Swift.Bool) -> Swift.Bool) -> Swift.Bool]> +// of closure #1 (Swift.Bool) -> Swift.Bool +// in closure #1 (closure_specialize_loop.Expression) -> Swift.Bool +// in closure_specialize_loop.Expression.contains(where: (Swift.Bool) -> Swift.Bool) -> Swift.Bool]> +// of closure #1 (Swift.Bool) -> Swift.Bool +// in closure #1 (closure_specialize_loop.Expression) -> Swift.Bool +// in closure_specialize_loop.Expression.contains(where: (Swift.Bool) -> Swift.Bool) -> Swift.Bool +// +public indirect enum Expression { + case term(Bool) + case list(_ expressions: [Expression]) + + public func contains(where predicate: (Bool) -> Bool) -> Bool { + switch self { + case let .term(term): + return predicate(term) + case let .list(expressions): + return expressions.contains { expression in + expression.contains { term in + predicate(term) + } + } + } + } +} + +public struct IndirectEnum { + public init() { + let containsFalse = Expression.list([.list([.term(true), .term(false)]), .term(true)]).contains { term in + term == false + } + print(containsFalse) + } +} diff --git a/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_opaque.sil b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_opaque.sil new file mode 100644 index 0000000000000..149e7c3044711 --- /dev/null +++ b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_opaque.sil @@ -0,0 +1,43 @@ +// RUN: %target-sil-opt -enable-sil-opaque-values -enable-sil-verify-all -experimental-swift-based-closure-specialization %s | %FileCheck %s +// XFAIL: * + +struct TestView {} +struct TestRange {} +struct TestSlice {} + +// helper +sil @closure : $@convention(thin) (@inout TestView, TestRange, @in TestSlice) -> () { +bb0(%0 : $*TestView, %1 : $TestRange, %2 : $TestSlice): + %1284 = tuple () + return %1284 : $() +} + +// helper +sil @thunk : $@convention(thin) (@inout TestView, @owned @callee_owned (@inout TestView) -> ()) -> @out () { +bb0(%0 : $*TestView, %1 : $@callee_owned (@inout TestView) ->()): + %call = apply %1(%0) : $@callee_owned (@inout TestView) -> () + %1284 = tuple () + return %1284 : $() +} + +// Test that ClosureSpecializer can handle captured @in args, in addition to direct args. +// +// CHECK-LABEL: sil @testSpecializeThunk : $@convention(thin) (@inout TestView, TestRange, @in TestSlice) -> () { +// CHECK: bb0(%0 : $*TestView, %1 : $TestRange, %2 : $TestSlice): +// CHECK: [[CLOSURE:%.*]] = function_ref @closure : $@convention(thin) (@inout TestView, TestRange, @in TestSlice) -> () +// CHECK: [[SPECIALIZED:%.*]] = function_ref @$s5thunk7closure4main9TestRangeVAC0D5SliceVTf1nc_n : $@convention(thin) (@inout TestView, TestRange, @in TestSlice) -> @out () // user: %6 +// CHECK: [[THUNK:%.*]] = function_ref @thunk : $@convention(thin) (@inout TestView, @owned @callee_owned (@inout TestView) -> ()) -> @out () +// CHECK: [[CALL:%.*]] = apply [[SPECIALIZED]](%0, %1, %2) : $@convention(thin) (@inout TestView, TestRange, @in TestSlice) -> @out () +// CHECK: %{{.*}} = tuple () +// CHECK: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'testSpecializeThunk' +sil @testSpecializeThunk : $@convention(thin) (@inout TestView, TestRange, @in TestSlice) -> () { +bb0(%0 : $*TestView, %1 : $TestRange, %2 : $TestSlice): + %closurefn = function_ref @closure : $@convention(thin) (@inout TestView, TestRange, @in TestSlice) -> () + %pa = partial_apply %closurefn(%1, %2) : $@convention(thin) (@inout TestView, TestRange, @in TestSlice) -> () + %thunk = function_ref @thunk : $@convention(thin) (@inout TestView, @owned @callee_owned (@inout TestView) -> ()) -> @out () + strong_retain %pa : $@callee_owned (@inout TestView) -> () + %call = apply %thunk(%0, %pa) : $@convention(thin) (@inout TestView, @owned @callee_owned (@inout TestView) -> ()) -> @out () + %1284 = tuple () + return %1284 : $() +} diff --git a/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_simple.sil b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_simple.sil new file mode 100644 index 0000000000000..42d91710ae3db --- /dev/null +++ b/test/SILOptimizer/experimental-swift-based-closure-specialization/closure_specialize_simple.sil @@ -0,0 +1,292 @@ +// RUN: %target-sil-opt -enable-sil-verify-all -experimental-swift-based-closure-specialization %s | %FileCheck %s +// XFAIL: * + +import Builtin +import Swift + +sil @simple_partial_apply_fun : $@convention(thin) (Builtin.Int1, Builtin.Int1) -> Builtin.Int1 + +// CHECK-LABEL: sil shared @$s27simple_partial_apply_caller0a1_b1_C4_funBi1_Tf1c_n : $@convention(thin) (Builtin.Int1) -> Builtin.Int1 { +// CHECK: bb0([[CAPTURED_ARG:%.*]] : $Builtin.Int1): +// CHECK: [[CLOSED_OVER_FUN:%.*]] = function_ref @simple_partial_apply_fun : +// CHECK: [[NEW_PAI:%.*]] = partial_apply [[CLOSED_OVER_FUN]] +// CHECK: strong_release [[NEW_PAI]] +sil @simple_partial_apply_caller : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 { +bb0(%0 : $@callee_owned (Builtin.Int1) -> Builtin.Int1): + br bb1 + +bb1: + %1 = integer_literal $Builtin.Int1, 0 + // We cannot do anything here for now but in the future I think we should try + // to handle this in closure specialization potentially. + %2 = apply %0(%1) : $@callee_owned (Builtin.Int1) -> Builtin.Int1 + strong_release %0 : $@callee_owned (Builtin.Int1) -> Builtin.Int1 + cond_br undef, bb1, bb2 + +bb2: + return %2 : $Builtin.Int1 +} + +// CHECK-LABEL: sil shared @$s37simple_partial_apply_2nd_level_caller0a1_b1_C4_funBi1_Tf1c_n : $@convention(thin) (Builtin.Int1) -> Builtin.Int1 { +// CHECK: bb0([[CAPTURED_ARG:%.*]] : $Builtin.Int1): +// CHECK: [[SPECIALIZED_CALLEE:%.*]] = function_ref @$s27simple_partial_apply_caller0a1_b1_C4_funBi1_Tf1c_n : +// CHECK: [[RET:%.*]]= apply [[SPECIALIZED_CALLEE]]([[CAPTURED_ARG]]) +// CHECK: return [[RET]] +sil @simple_partial_apply_2nd_level_caller : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 { +bb0(%0 : $@callee_owned (Builtin.Int1) -> Builtin.Int1): + br bb1 + +bb1: + %1 = function_ref @simple_partial_apply_caller : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + %2 = apply %1(%0) : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + cond_br undef, bb1, bb2 + +bb2: + return %2 : $Builtin.Int1 +} +sil @simple_partial_apply_caller_decl : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + +sil @simple_multiple_partial_apply_caller : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1, @owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 { +bb0(%0 : $@callee_owned (Builtin.Int1) -> Builtin.Int1, %1 : $@callee_owned (Builtin.Int1) -> Builtin.Int1): + br bb1 + +bb1: + %2 = integer_literal $Builtin.Int1, 0 + // We cannot do anything here for now but in the future I think we should try + // to handle this in closure specialization potentially. + apply %0(%2) : $@callee_owned (Builtin.Int1) -> Builtin.Int1 + strong_release %0 : $@callee_owned (Builtin.Int1) -> Builtin.Int1 + apply %1(%2) : $@callee_owned (Builtin.Int1) -> Builtin.Int1 + strong_release %1 : $@callee_owned (Builtin.Int1) -> Builtin.Int1 + cond_br undef, bb1, bb2 + +bb2: + return %2 : $Builtin.Int1 +} + +sil @simple_partial_apply_fun2 : $@convention(thin) (Builtin.Int1, Builtin.Int1) -> Builtin.Int1 +sil @simple_partial_apply_caller2 : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 { +bb0(%0 : $@callee_owned (Builtin.Int1) -> Builtin.Int1): + br bb1 + +bb1: + %1 = integer_literal $Builtin.Int1, 0 + // We cannot do anything here for now but in the future I think we should try + // to handle this in closure specialization potentially. + %2 = apply %0(%1) : $@callee_owned (Builtin.Int1) -> Builtin.Int1 + strong_release %0 : $@callee_owned (Builtin.Int1) -> Builtin.Int1 + cond_br undef, bb1, bb2 + +bb2: + return %2 : $Builtin.Int1 +} + + +sil @indirect_parameter_partial_apply_fun : $@convention(thin) (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1 + +sil @indirect_parameter_partial_apply_caller1 : $@convention(thin) (@callee_owned (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1) -> () { +bb0(%0 : $@callee_owned (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1): + br bb1 + +bb1: + %1 = alloc_stack $Builtin.Int1 + %2 = integer_literal $Builtin.Int1, 0 + apply %0(%1, %1, %2, %1) : $@callee_owned (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1 + dealloc_stack %1 : $*Builtin.Int1 + cond_br undef, bb1, bb2 + +bb2: + %9999 = tuple() + return %9999 : $() +} + +sil @indirect_parameter_partial_apply_caller2 : $@convention(thin) (@callee_owned (@in Builtin.Int1, Builtin.Int1) -> @out Builtin.Int1) -> () { +bb0(%0 : $@callee_owned (@in Builtin.Int1, Builtin.Int1) -> @out Builtin.Int1): + br bb1 + +bb1: + %1 = alloc_stack $Builtin.Int1 + %2 = integer_literal $Builtin.Int1, 0 + apply %0(%1, %1, %2) : $@callee_owned (@in Builtin.Int1, Builtin.Int1) -> @out Builtin.Int1 + dealloc_stack %1 : $*Builtin.Int1 + cond_br undef, bb1, bb2 + +bb2: + %9999 = tuple() + return %9999 : $() +} + +sil @indirect_parameter_partial_apply_caller3 : $@convention(thin) (@callee_owned (@in Builtin.Int1) -> @out Builtin.Int1) -> () { +bb0(%0 : $@callee_owned (@in Builtin.Int1) -> @out Builtin.Int1): + br bb1 + +bb1: + %1 = alloc_stack $Builtin.Int1 + apply %0(%1, %1) : $@callee_owned (@in Builtin.Int1) -> @out Builtin.Int1 + dealloc_stack %1 : $*Builtin.Int1 + cond_br undef, bb1, bb2 + +bb2: + %9999 = tuple() + return %9999 : $() +} + +sil @indirect_parameter_partial_apply_caller4 : $@convention(thin) (@callee_owned () -> @out Builtin.Int1) -> () { +bb0(%0 : $@callee_owned () -> @out Builtin.Int1): + br bb1 + +bb1: + %1 = alloc_stack $Builtin.Int1 + apply %0(%1) : $@callee_owned () -> @out Builtin.Int1 + dealloc_stack %1 : $*Builtin.Int1 + cond_br undef, bb1, bb2 + +bb2: + %9999 = tuple() + return %9999 : $() +} + +sil @indirect_parameter_partial_apply_caller5 : $@convention(thin) (@callee_owned () -> ()) -> () { +bb0(%0 : $@callee_owned () -> ()): + br bb1 + +bb1: + apply %0() : $@callee_owned () -> () + cond_br undef, bb1, bb2 + +bb2: + %9999 = tuple() + return %9999 : $() +} + +sil @indirect_parameter_partial_apply_caller6 : $@convention(thin) (@callee_owned () -> @out Builtin.Int1) -> @out Builtin.Int1 { +bb0(%1 : $*Builtin.Int1, %0 : $@callee_owned () -> @out Builtin.Int1): + br bb1 + +bb1: + apply %0(%1) : $@callee_owned () -> @out Builtin.Int1 + cond_br undef, bb1, bb2 + +bb2: + %9999 = tuple() + return %9999 : $() +} + +sil @indirect_parameter_partial_apply_caller7 : $@convention(thin) (@callee_owned () -> @out Builtin.Int1) -> @out (Builtin.Int1, Builtin.Int1) { +bb0(%1 : $*(Builtin.Int1, Builtin.Int1), %0 : $@callee_owned () -> @out Builtin.Int1): + br bb1 + +bb1: + %2 = alloc_stack $Builtin.Int1 + %3 = alloc_stack $Builtin.Int1 + apply %0(%2) : $@callee_owned () -> @out Builtin.Int1 + apply %0(%3) : $@callee_owned () -> @out Builtin.Int1 + %4 = load %2: $*Builtin.Int1 + %5 = load %3: $*Builtin.Int1 + %6 = tuple(%4 : $Builtin.Int1, %5: $Builtin.Int1) + store %6 to %1 : $*(Builtin.Int1, Builtin.Int1) + dealloc_stack %3: $*Builtin.Int1 + dealloc_stack %2: $*Builtin.Int1 + cond_br undef, bb1, bb2 + +bb2: + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil @loop_driver : $@convention(thin) (Builtin.Int1, Builtin.Int1) -> () { +// CHECK-DAG: [[SPECIALIZED_FUN:%.*]] = function_ref @$s27simple_partial_apply_caller0a1_b1_C4_funBi1_Tf1c_n : $@convention(thin) (Builtin.Int1) -> Builtin.Int1 +// CHECK-DAG: [[SPECIALIZED_FUN2:%.*]] = function_ref @$s37simple_partial_apply_2nd_level_caller0a1_b1_C4_funBi1_Tf1c_n : $@convention(thin) (Builtin.Int1) -> Builtin.Int1 +// CHECK: apply [[SPECIALIZED_FUN]] +// CHECK: apply [[SPECIALIZED_FUN2]] + +// We can't call this one b/c it is just a declaration. +// CHECK: [[UNSPECIALIZED_FUN_DECL:%.*]] = function_ref @simple_partial_apply_caller_decl : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 +// CHECK: apply [[UNSPECIALIZED_FUN_DECL]] + +// We handle closures with indirect results. +// CHECK: [[CLOSUREFUN:%.*]] = function_ref @indirect_parameter_partial_apply_fun +// CHECK-NOT: partial_apply [[CLOSUREFUN]]() +// CHECK: [[INLINEDCLOSURE_CALLER1:%.*]] = function_ref @$s40indirect_parameter_partial_apply_caller10a1_b1_c1_D4_funTf1c_n +// CHECK-NOT: partial_apply [[CLOSUREFUN]]() + +// We don't handle captured indirect @in and @in_guaranteed parameters yet. +// CHECK: [[CLOSURE2:%.*]] = partial_apply [[CLOSUREFUN]](%{{.*}}) +// CHECK: [[CLOSURE3:%.*]] = partial_apply [[CLOSUREFUN]](%{{.*}}) +// CHECK: [[CLOSURE4:%.*]] = partial_apply [[CLOSUREFUN]](%{{.*}}) + +// CHECK: [[CALLER1:%.*]] = function_ref @indirect_parameter_partial_apply_caller1 +// CHECK: [[CALLER2:%.*]] = function_ref @indirect_parameter_partial_apply_caller2 +// CHECK: [[CALLER3:%.*]] = function_ref @indirect_parameter_partial_apply_caller3 +// CHECK: [[CALLER4:%.*]] = function_ref @indirect_parameter_partial_apply_caller4 + +// Closure with indirect result but no captured indirect parameter. +// CHECK-NOT: apply [[CALLER1]] +// apply [[INLINEDCLOSURE_CALLER1]]() +// CHECK-NOT: apply [[CALLER1]] + +// Closures with captured indirect parameters. +// apply [[CALLER2]]([[CLOSURE2]]) +// apply [[CALLER3]]([[CLOSURE3]]) +// apply [[CALLER4]]([[CLOSURE4]]) + +// CHECK: return +sil @loop_driver : $@convention(thin) (Builtin.Int1, Builtin.Int1) -> () { +bb0(%0 : $Builtin.Int1, %1 : $Builtin.Int1): + %2 = function_ref @simple_partial_apply_fun : $@convention(thin) (Builtin.Int1, Builtin.Int1) -> Builtin.Int1 + %3 = partial_apply %2(%0) : $@convention(thin) (Builtin.Int1, Builtin.Int1) -> Builtin.Int1 + %4 = function_ref @simple_partial_apply_caller : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + %5 = apply %4(%3) : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + + %51 = function_ref @simple_partial_apply_2nd_level_caller : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + %52 = apply %51(%3) : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + + %6 = partial_apply %2(%0) : $@convention(thin) (Builtin.Int1, Builtin.Int1) -> Builtin.Int1 + %7 = function_ref @simple_partial_apply_caller_decl : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + %8 = apply %7(%6) : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + + %9 = alloc_stack $Builtin.Int1 + + %10 = function_ref @indirect_parameter_partial_apply_fun : $@convention(thin) (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1 + %11 = partial_apply %10() : $@convention(thin) (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1 + %12 = partial_apply %10(%9) : $@convention(thin) (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1 + %13 = partial_apply %10(%1, %9) : $@convention(thin) (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1 + %14 = partial_apply %10(%9, %1, %9) : $@convention(thin) (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1 + + %16 = function_ref @indirect_parameter_partial_apply_caller1 : $@convention(thin) (@callee_owned (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1) -> () + %17 = function_ref @indirect_parameter_partial_apply_caller2 : $@convention(thin) (@callee_owned (@in Builtin.Int1, Builtin.Int1) -> @out Builtin.Int1) -> () + %18 = function_ref @indirect_parameter_partial_apply_caller3 : $@convention(thin) (@callee_owned (@in Builtin.Int1) -> @out Builtin.Int1) -> () + %19 = function_ref @indirect_parameter_partial_apply_caller4 : $@convention(thin) (@callee_owned () -> @out Builtin.Int1) -> () + %20 = function_ref @indirect_parameter_partial_apply_caller5 : $@convention(thin) (@callee_owned () -> ()) -> () + + apply %16(%11) : $@convention(thin) (@callee_owned (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1) -> () + apply %17(%12) : $@convention(thin) (@callee_owned (@in Builtin.Int1, Builtin.Int1) -> @out Builtin.Int1) -> () + apply %18(%13) : $@convention(thin) (@callee_owned (@in Builtin.Int1) -> @out Builtin.Int1) -> () + apply %19(%14) : $@convention(thin) (@callee_owned () -> @out Builtin.Int1) -> () + + // Make sure we handle when we already have an out parameter correctly. + %21 = alloc_stack $(Builtin.Int1, Builtin.Int1) + %22 = function_ref @indirect_parameter_partial_apply_caller6 : $@convention(thin) (@callee_owned () -> @out Builtin.Int1) -> @out Builtin.Int1 + %23 = function_ref @indirect_parameter_partial_apply_caller7 : $@convention(thin) (@callee_owned () -> @out Builtin.Int1) -> @out (Builtin.Int1, Builtin.Int1) + %24 = partial_apply %10(%9, %1, %9) : $@convention(thin) (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1 + %25 = partial_apply %10(%9, %1, %9) : $@convention(thin) (@in Builtin.Int1, Builtin.Int1, @in Builtin.Int1) -> @out Builtin.Int1 + apply %22(%9, %24) : $@convention(thin) (@callee_owned () -> @out Builtin.Int1) -> @out Builtin.Int1 + apply %23(%21, %25) : $@convention(thin) (@callee_owned () -> @out Builtin.Int1) -> @out (Builtin.Int1, Builtin.Int1) + + dealloc_stack %21 : $*(Builtin.Int1, Builtin.Int1) + dealloc_stack %9 : $*Builtin.Int1 + + %26 = partial_apply %2(%0) : $@convention(thin) (Builtin.Int1, Builtin.Int1) -> Builtin.Int1 + %27 = partial_apply %2(%0) : $@convention(thin) (Builtin.Int1, Builtin.Int1) -> Builtin.Int1 + %28 = function_ref @simple_multiple_partial_apply_caller : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1, @owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + %29 = apply %28(%26, %27) : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1, @owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + + %30 = function_ref @simple_partial_apply_fun2 : $@convention(thin) (Builtin.Int1, Builtin.Int1) -> Builtin.Int1 + %31 = partial_apply %30(%1) : $@convention(thin) (Builtin.Int1, Builtin.Int1) -> Builtin.Int1 + %32 = function_ref @simple_partial_apply_caller2 : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + %33 = apply %32(%31) : $@convention(thin) (@owned @callee_owned (Builtin.Int1) -> Builtin.Int1) -> Builtin.Int1 + + + %9999 = tuple() + return %9999 : $() +} \ No newline at end of file diff --git a/test/SILOptimizer/moveonly_optional_force_unwrap.swift b/test/SILOptimizer/moveonly_optional_force_unwrap.swift new file mode 100644 index 0000000000000..bb7f77bc5b5ec --- /dev/null +++ b/test/SILOptimizer/moveonly_optional_force_unwrap.swift @@ -0,0 +1,152 @@ +// RUN: %target-swift-frontend -enable-experimental-feature NoncopyableGenerics -emit-sil -verify %s + +struct NC: ~Copyable { + borrowing func borrow() {} + mutating func mutate() {} + consuming func consume() {} +} + +func borrow(_: borrowing NC) {} +func consume(_: consuming NC) {} +func mutate(_: inout NC) {} + +func unwrapBorrow_Borrow(x: borrowing NC?) { + x!.borrow() + borrow(x!) + + x!.borrow() + borrow(x!) +} + +func unwrapConsume_Borrow(x: borrowing NC?) { // expected-error{{cannot be consumed}} + x!.consume() // expected-note{{consumed here}} + consume(x!) // expected-note{{consumed here}} + + x!.consume() // expected-note{{consumed here}} + consume(x!) // expected-note{{consumed here}} +} + +func unwrapBorrowMutateConsume_Consume(x: consuming NC?) { + x!.borrow() + x!.mutate() + + borrow(x!) + mutate(&x!) + + consume(x!) +} + +func unwrapBorrowMutateConsume2_Consume(x: consuming NC?) { + x!.borrow() + x!.mutate() + + borrow(x!) + mutate(&x!) + + x!.consume() +} + +func unwrapBorrowMutateConsumeBorrow_Consume(x: consuming NC?) { // expected-error {{used after consume}} + x!.borrow() + x!.mutate() + + borrow(x!) + mutate(&x!) + + consume(x!) // expected-note{{consumed here}} + + x!.borrow() // expected-note{{used here}} + borrow(x!) +} + +func unwrapBorrowMutateConsumeMutate_Consume(x: consuming NC?) { // expected-error {{used after consume}} + x!.borrow() + x!.mutate() + + borrow(x!) + mutate(&x!) + + consume(x!) // expected-note{{consumed here}} + + x!.mutate() // expected-note{{used here}} + mutate(&x!) +} + +func unwrapBorrowMutateConsumeInitBorrow_Consume(x: consuming NC?, y: consuming NC?) { + x!.borrow() + x!.mutate() + + borrow(x!) + mutate(&x!) + + consume(x!) + + x = y + + x!.borrow() + borrow(x!) +} + +func unwrapBorrowMutateConsumeInitMutate_Consume(x: consuming NC?, y: consuming NC?) { + x!.borrow() + x!.mutate() + + borrow(x!) + mutate(&x!) + + consume(x!) + + x = y + + x!.mutate() + mutate(&x!) +} + +func unwrapBorrowMutateConsumeInitBorrowMutateConsume_Consume(x: consuming NC?, y: consuming NC?) { + x!.borrow() + x!.mutate() + + borrow(x!) + mutate(&x!) + + consume(x!) + + x = y + + x!.mutate() + x!.borrow() + mutate(&x!) + borrow(x!) + + consume(x!) +} + +func unwrapBorrowMutate_Mutate(x: inout NC?) { + x!.borrow() + x!.mutate() + + borrow(x!) + mutate(&x!) +} + +func unwrapBorrowMutateConsume_Mutate(x: inout NC?) { // expected-error {{missing reinitialization}} + x!.borrow() + x!.mutate() + + borrow(x!) + mutate(&x!) + + x!.consume() // expected-note {{consumed here}} +} + +func unwrapBorrowMutateConsumeInit_Mutate(x: inout NC?, y: consuming NC) { + x!.borrow() + x!.mutate() + + borrow(x!) + mutate(&x!) + + x!.consume() // expected-note{{consumed here}} + + x! = y // expected-error{{cannot partially reinitialize}} +} diff --git a/test/SILOptimizer/ossa_lifetime_completion.sil b/test/SILOptimizer/ossa_lifetime_completion.sil index 869916bbb1f10..3a56916259c70 100644 --- a/test/SILOptimizer/ossa_lifetime_completion.sil +++ b/test/SILOptimizer/ossa_lifetime_completion.sil @@ -27,15 +27,15 @@ case none case some(Wrapped) } -// CHECK-LABEL: begin running test 1 of 1 on eagerConsumneOwnedArg: ossa-lifetime-completion with: @argument -// CHECK-LABEL: OSSA lifetime completion: %0 = argument of bb0 : $C +// CHECK-LABEL: begin running test 1 of 1 on eagerConsumneOwnedArg: ossa_lifetime_completion with: @argument +// CHECK-LABEL: OSSA lifetime completion on liveness boundary: %0 = argument of bb0 : $C // CHECK: sil [ossa] @eagerConsumneOwnedArg : $@convention(thin) (@owned C) -> () { // CHECK: bb0(%0 : @_eagerMove @owned $C): // CHECK-NEXT: destroy_value %0 : $C -// CHECK-LABEL: end running test 1 of 1 on eagerConsumneOwnedArg: ossa-lifetime-completion with: @argument +// CHECK-LABEL: end running test 1 of 1 on eagerConsumneOwnedArg: ossa_lifetime_completion with: @argument sil [ossa] @eagerConsumneOwnedArg : $@convention(thin) (@owned C) -> () { entry(%0 : @_eagerMove @owned $C): - specify_test "ossa-lifetime-completion @argument" + specify_test "ossa_lifetime_completion @argument liveness" br exit exit: @@ -43,8 +43,8 @@ exit: return %retval : $() } -// CHECK-LABEL: begin running test 1 of 1 on lexicalOwnedArg: ossa-lifetime-completion with: @argument -// CHECK: OSSA lifetime completion: %0 = argument of bb0 : $C // user: %4 +// CHECK-LABEL: begin running test 1 of 1 on lexicalOwnedArg: ossa_lifetime_completion with: @argument +// CHECK: OSSA lifetime completion on availability boundary: %0 = argument of bb0 : $C // user: %4 // CHECK: sil [ossa] @lexicalOwnedArg : $@convention(thin) (@owned C) -> () { // CHECK: bb0(%0 : @owned $C): // CHECK: cond_br undef, bb1, bb2 @@ -52,10 +52,10 @@ exit: // CHECK-NEXT: destroy_value %0 : $C // CHECK-NEXT: unreachable // CHECK: } // end sil function 'lexicalOwnedArg' -// CHECK-LABEL: end running test 1 of 1 on lexicalOwnedArg: ossa-lifetime-completion with: @argument +// CHECK-LABEL: end running test 1 of 1 on lexicalOwnedArg: ossa_lifetime_completion with: @argument sil [ossa] @lexicalOwnedArg : $@convention(thin) (@owned C) -> () { bb0(%0 : @owned $C): - specify_test "ossa-lifetime-completion @argument" + specify_test "ossa_lifetime_completion @argument availability" cond_br undef, bb1, bb2 bb1: br bb3 @@ -74,7 +74,7 @@ bb3: // CHECK-LABEL: } // end sil function 'borrowTest' sil [ossa] @borrowTest : $@convention(method) (@owned C) -> () { bb0(%0 : @owned $C): - specify_test "ossa-lifetime-completion @instruction[0]" + specify_test "ossa_lifetime_completion @instruction[0] availability" %borrow = begin_borrow %0 : $C cond_br undef, bb1, bb2 @@ -99,7 +99,7 @@ bb3: // CHECK-LABEL: } // end sil function 'enumTest' sil [ossa] @enumTest : $@convention(method) (@guaranteed FakeOptional) -> () { bb0(%0 : @guaranteed $FakeOptional): - specify_test "ossa-lifetime-completion @instruction[0]" + specify_test "ossa_lifetime_completion @instruction[0] liveness" %copy = copy_value %0 : $FakeOptional %borrow = begin_borrow %copy : $FakeOptional switch_enum %borrow : $FakeOptional, case #FakeOptional.some!enumelt: bb1, case #FakeOptional.none!enumelt: bb2 @@ -122,7 +122,7 @@ sil @use_guaranteed : $@convention(thin) (@guaranteed C) -> () sil [ossa] @argTest : $@convention(method) (@owned C) -> () { bb0(%0 : @owned $C): - specify_test "ossa-lifetime-completion @argument" + specify_test "ossa_lifetime_completion @argument availability" debug_value %0 : $C cond_br undef, bb1, bb2 @@ -146,7 +146,7 @@ bb4: // Ensure no assert fires while inserting dead end blocks to the worklist sil [ossa] @testLexicalLifetimeCompletion : $@convention(thin) (@owned C) -> () { bb0(%0 : @owned $C): - specify_test "ossa-lifetime-completion @argument" + specify_test "ossa_lifetime_completion @argument availability" debug_value %0 : $C, let, name "newElements", argno 1 cond_br undef, bb1, bb2 @@ -189,7 +189,7 @@ sil @foo : $@convention(thin) (@guaranteed C) -> () // Ensure no assert fires while handling lifetime end of partial_apply sil [ossa] @testPartialApplyStack1 : $@convention(thin) (@guaranteed C) -> () { bb0(%0 : @guaranteed $C): - specify_test "ossa-lifetime-completion @instruction[0]" + specify_test "ossa_lifetime_completion @instruction[0] availability" %8 = copy_value %0 : $C %9 = begin_borrow %8 : $C %80 = function_ref @foo : $@convention(thin) (@guaranteed C) -> () @@ -213,7 +213,7 @@ bb3: // Ensure no assert fires while handling lifetime end of partial_apply sil [ossa] @testPartialApplyStack2 : $@convention(thin) (@guaranteed C) -> () { bb0(%0 : @guaranteed $C): - specify_test "ossa-lifetime-completion @instruction[1]" + specify_test "ossa_lifetime_completion @instruction[1] availability" %2 = alloc_stack $C %3 = copy_value %0 : $C %4 = begin_borrow %3 : $C @@ -253,7 +253,7 @@ sil [ossa] @availability_boundary_1 : $@convention(thin) () -> () { entry: %value = apply undef() : $@convention(thin) () -> @owned C %lexical = move_value [lexical] %value : $C // required (for lexicality) - specify_test "ossa-lifetime-completion %lexical" + specify_test "ossa_lifetime_completion %lexical availability" br condition_1 condition_1: @@ -310,7 +310,7 @@ sil [ossa] @availability_boundary_2_after_loop : $@convention(thin) () -> () { entry: %value = apply undef() : $@convention(thin) () -> @owned C %lexical = move_value [lexical] %value : $C // required (for lexicality) - specify_test "ossa-lifetime-completion %lexical" + specify_test "ossa_lifetime_completion %lexical availability" br condition_1 condition_1: @@ -375,7 +375,7 @@ sil [ossa] @availability_boundary_3_after_loop : $@convention(thin) () -> () { entry: %value = apply undef() : $@convention(thin) () -> @owned C %lexical = move_value [lexical] %value : $C // required (for lexicality) - specify_test "ossa-lifetime-completion %lexical" + specify_test "ossa_lifetime_completion %lexical availability" br condition_1 condition_1: @@ -425,7 +425,7 @@ sil [ossa] @project_box_deadend : $@convention(thin) (@owned C) -> () { bb0(%0 : @owned $C): %2 = alloc_box ${ var C } %3 = begin_borrow %2 : ${ var C } - specify_test "ossa-lifetime-completion %3" + specify_test "ossa_lifetime_completion %3 availability" %4 = project_box %3 : ${ var C }, 0 store %0 to [init] %4 : $*C unreachable @@ -453,7 +453,7 @@ entry(%ie : @owned $IndirectEnumNontrivialPayload): switch_enum %ie : $IndirectEnumNontrivialPayload, case #IndirectEnumNontrivialPayload.c!enumelt: one_case one_case(%box : @owned ${ var C }): - specify_test "ossa-lifetime-completion %box" + specify_test "ossa_lifetime_completion %box availability" %c_addr = project_box %box : ${ var C }, 0 %c = load_borrow %c_addr : $*C cond_br undef, left, right @@ -483,7 +483,7 @@ bb0: %callee_pa = partial_apply [callee_guaranteed] undef() : $@convention(thin) @async @substituted <τ_0_0> () -> (@out τ_0_0, @error any Error) for <()> %callee_noescape = convert_escape_to_noescape [not_guaranteed] %callee_pa : $@async @callee_guaranteed @substituted <τ_0_0> () -> (@out τ_0_0, @error any Error) for <()> to $@noescape @async @callee_guaranteed @substituted <τ_0_0> () -> (@out τ_0_0, @error any Error) for <()> - specify_test "ossa-lifetime-completion %callee_noescape" + specify_test "ossa_lifetime_completion %callee_noescape availability" %async_let = builtin "startAsyncLetWithLocalBuffer"<()>( %task_options : $Optional, %callee_noescape : $@noescape @async @callee_guaranteed @substituted <τ_0_0> () -> (@out τ_0_0, @error any Error) for <()>, @@ -499,16 +499,16 @@ bb0: return %retval : $() } -// CHECK-LABEL: begin running test {{.*}} on alloc_box: ossa-lifetime-completion +// CHECK-LABEL: begin running test {{.*}} on alloc_box: ossa_lifetime_completion // CHECK-LABEL: sil [ossa] @alloc_box : {{.*}} { // CHECK: [[BOX:%[^,]+]] = alloc_box // CHECK: dealloc_box [[BOX]] // CHECK-LABEL: } // end sil function 'alloc_box' -// CHECK-LABEL: end running test {{.*}} on alloc_box: ossa-lifetime-completion +// CHECK-LABEL: end running test {{.*}} on alloc_box: ossa_lifetime_completion sil [ossa] @alloc_box : $@convention(thin) (@owned C) -> () { entry(%instance : @owned $C): %box = alloc_box ${ var C } - specify_test "ossa-lifetime-completion %box" + specify_test "ossa_lifetime_completion %box availability" %addr = project_box %box : ${ var C }, 0 store %instance to [init] %addr : $*C unreachable @@ -526,7 +526,7 @@ entry(%instance : @owned $C): sil [ossa] @begin_apply : $@convention(thin) () -> () { entry: (%_, %token) = begin_apply undef() : $@yield_once @convention(thin) () -> (@yields @in_guaranteed ()) - specify_test "ossa-lifetime-completion %token" + specify_test "ossa_lifetime_completion %token availability" cond_br undef, left, right left: diff --git a/test/Sema/bitwse_copyable_underscore.swift b/test/Sema/bitwse_copyable_underscore.swift new file mode 100644 index 0000000000000..11f7a1d873f16 --- /dev/null +++ b/test/Sema/bitwse_copyable_underscore.swift @@ -0,0 +1,7 @@ +// RUN: %target-typecheck-verify-swift \ +// RUN: -disable-availability-checking \ +// RUN: -debug-diagnostic-names + +struct S : _BitwiseCopyable {} // expected-warning {{'_BitwiseCopyable' is deprecated: Use BitwiseCopyable}} + +func f(_ t: T) {} // expected-warning {{'_BitwiseCopyable' is deprecated: Use BitwiseCopyable}} diff --git a/test/abi/macOS/arm64/stdlib.swift b/test/abi/macOS/arm64/stdlib.swift index 4b6a4e1c80f8f..9f7ff20010f23 100644 --- a/test/abi/macOS/arm64/stdlib.swift +++ b/test/abi/macOS/arm64/stdlib.swift @@ -567,3 +567,6 @@ Added: _swift_updatePureObjCClassMetadata // Runtime bincompat functions for Concurrency runtime to detect legacy mode Added: _swift_bincompat_useLegacyNonCrashingExecutorChecks + +// Add add SWIFT_IS_CURRENT_EXECUTOR_LEGACY_MODE_OVERRIDE +Added: _concurrencyIsCurrentExecutorLegacyModeOverride \ No newline at end of file diff --git a/test/abi/macOS/x86_64/stdlib.swift b/test/abi/macOS/x86_64/stdlib.swift index ba16cb21969a6..73c8ae0a19ccd 100644 --- a/test/abi/macOS/x86_64/stdlib.swift +++ b/test/abi/macOS/x86_64/stdlib.swift @@ -567,3 +567,6 @@ Added: _swift_updatePureObjCClassMetadata // Runtime bincompat functions for Concurrency runtime to detect legacy mode Added: _swift_bincompat_useLegacyNonCrashingExecutorChecks + +// Add add SWIFT_IS_CURRENT_EXECUTOR_LEGACY_MODE_OVERRIDE +Added: _concurrencyIsCurrentExecutorLegacyModeOverride \ No newline at end of file diff --git a/test/embedded/lto-multiple-object-files.swift b/test/embedded/lto-multiple-object-files.swift index d1fba776f58eb..edfec93b1f876 100644 --- a/test/embedded/lto-multiple-object-files.swift +++ b/test/embedded/lto-multiple-object-files.swift @@ -14,6 +14,7 @@ // REQUIRES: swift_in_compiler // REQUIRES: executable_test +// REQUIRES: optimized_stdlib // REQUIRES: OS=macosx // For LTO, the linker dlopen()'s the libLTO library, which is a scenario that diff --git a/test/multifile/protocol-conformance-redundant.swift b/test/multifile/protocol-conformance-redundant.swift index f27a8a346b5d6..9a8814221e264 100644 --- a/test/multifile/protocol-conformance-redundant.swift +++ b/test/multifile/protocol-conformance-redundant.swift @@ -17,6 +17,8 @@ import StdlibUnittest import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) +import Android #else #error("Unsupported platform") #endif diff --git a/test/stdlib/BridgeEquatableToObjC.swift b/test/stdlib/BridgeEquatableToObjC.swift index 02b1f5088b4d0..c426a0d6a55fb 100644 --- a/test/stdlib/BridgeEquatableToObjC.swift +++ b/test/stdlib/BridgeEquatableToObjC.swift @@ -32,12 +32,7 @@ BridgeEquatableToObjC.test("Bridge equatable struct") { let objcResult = objcA.isEqual(objcB) expectEqual(swiftResult, true) -#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS) || os(visionOS) - // Apple platforms use old semantics for now... - expectEqual(objcResult, false) -#else expectEqual(objcResult, true) -#endif } BridgeEquatableToObjC.test("Bridge non-equatable struct") { diff --git a/test/stdlib/Duration.swift b/test/stdlib/Duration.swift index b68344a692b88..1cd2d7ac0b380 100644 --- a/test/stdlib/Duration.swift +++ b/test/stdlib/Duration.swift @@ -1,10 +1,6 @@ // RUN: %target-run-simple-swift // REQUIRES: executable_test -// Int128 operations are not supported on 32bit platforms, 128-bit types are not -// provided by the 32-bit LLVM. See `dividingFullWidth` in IntegerTypes.swift.gyb -// UNSUPPORTED: PTRSIZE=32 - // These test a codepath that was fixed in the Swift 5.9 stdlib, so it will // fail if run against earlier standard library versions. // UNSUPPORTED: use_os_stdlib @@ -12,7 +8,7 @@ import StdlibUnittest -var suite = TestSuite("StringIndexTests") +var suite = TestSuite("DurationTests") defer { runAllTests() } if #available(SwiftStdlib 5.7, *) { @@ -54,4 +50,178 @@ if #available(SwiftStdlib 5.7, *) { expectEqual(attosec, Int64(integerValue) % 1_000_000 * 1_000_000_000_000) } } + + suite.test("seconds from Int64") { + let one = Duration.seconds(1 as Int64) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000_000_000_000) + let mone = Duration.seconds(-1 as Int64) + expectEqual(mone._high, -1) + expectEqual(mone._low, .max - 999_999_999_999_999_999) + let max64 = Duration.seconds(Int64.max) + expectEqual(max64._high, 499_999_999_999_999_999) + expectEqual(max64._low, .max - 999_999_999_999_999_999) + let min64 = Duration.seconds(Int64.min) + expectEqual(min64._high,-500_000_000_000_000_000) + expectEqual(min64._low, 0) + } + + suite.test("seconds from UInt64") { + let one = Duration.seconds(1 as UInt64) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000_000_000_000) + let max64 = Duration.seconds(UInt64.max) + expectEqual(max64._high, 999_999_999_999_999_999) + expectEqual(max64._low, .max - 999_999_999_999_999_999) + } + + suite.test("milliseconds from Int64") { + let one = Duration.milliseconds(1 as Int64) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000_000_000) + let mone = Duration.milliseconds(-1 as Int64) + expectEqual(mone._high, -1) + expectEqual(mone._low, .max - 999_999_999_999_999) + let max64 = Duration.milliseconds(Int64.max) + expectEqual(max64._high, 499_999_999_999_999) + expectEqual(max64._low, .max - 999_999_999_999_999) + let min64 = Duration.milliseconds(Int64.min) + expectEqual(min64._high,-500_000_000_000_000) + expectEqual(min64._low, 0) + } + + suite.test("milliseconds from UInt64") { + let one = Duration.milliseconds(1 as UInt64) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000_000_000) + let max64 = Duration.milliseconds(UInt64.max) + expectEqual(max64._high, 999_999_999_999_999) + expectEqual(max64._low, .max - 999_999_999_999_999) + } + + suite.test("microseconds from Int64") { + let one = Duration.microseconds(1 as Int64) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000_000) + let mone = Duration.microseconds(-1 as Int64) + expectEqual(mone._high, -1) + expectEqual(mone._low, .max - 999_999_999_999) + let max64 = Duration.microseconds(Int64.max) + expectEqual(max64._high, 499_999_999_999) + expectEqual(max64._low, .max - 999_999_999_999) + let min64 = Duration.microseconds(Int64.min) + expectEqual(min64._high,-500_000_000_000) + expectEqual(min64._low, 0) + } + + suite.test("microseconds from UInt64") { + let one = Duration.microseconds(1 as UInt64) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000_000) + let max64 = Duration.microseconds(UInt64.max) + expectEqual(max64._high, 999_999_999_999) + expectEqual(max64._low, .max - 999_999_999_999) + } + + suite.test("nanoseconds from Int64") { + let one = Duration.nanoseconds(1 as Int64) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000) + let mone = Duration.nanoseconds(-1 as Int64) + expectEqual(mone._high, -1) + expectEqual(mone._low, .max - 999_999_999) + let max64 = Duration.nanoseconds(Int64.max) + expectEqual(max64._high, 499_999_999) + expectEqual(max64._low, .max - 999_999_999) + let min64 = Duration.nanoseconds(Int64.min) + expectEqual(min64._high,-500_000_000) + expectEqual(min64._low, 0) + } + + suite.test("nanoseconds from UInt64") { + let one = Duration.nanoseconds(1 as UInt64) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000) + let max64 = Duration.nanoseconds(UInt64.max) + expectEqual(max64._high, 999_999_999) + expectEqual(max64._low, .max - 999_999_999) + } +} + +if #available(SwiftStdlib 6.0, *) { + suite.test("seconds from Int128") { + let one = Duration.seconds(1 as Int128) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000_000_000_000) + let mone = Duration.seconds(-1 as Int128) + expectEqual(mone._high, -1) + expectEqual(mone._low, .max - 999_999_999_999_999_999) + let maxRep = Duration.seconds( 170141183460469231731 as Int128) + expectEqual(maxRep._high, 9_223_372_036_854_775_807) + expectEqual(maxRep._low, 17_759_440_357_825_445_888) + // negative overflow boundary is _smaller_ than positive for seconds; + // this could be avoided by reworking how highScaled is computed, but + // it's already so large (5 trillion years) that this probably isn't + // necessary. + let minRep = Duration.seconds(-166020696663385964544 as Int128) + expectEqual(minRep._high,-9_000_000_000_000_000_000) + expectEqual(minRep._low, 0) + // Check just above the overflow boundary + expectCrashLater() + let _ = Duration.seconds( 170141183460469231732 as Int128) + } + + suite.test("milliseconds from Int128") { + let one = Duration.milliseconds(1 as Int128) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000_000_000) + let mone = Duration.milliseconds(-1 as Int128) + expectEqual(mone._high, -1) + expectEqual(mone._low, .max - 999_999_999_999_999) + let maxRep = Duration.milliseconds( 170141183460469231731687 as Int128) + expectEqual(maxRep._high, 9_223_372_036_854_775_807) + expectEqual(maxRep._low, 18_446_440_357_825_445_888) + let minRep = Duration.milliseconds(-170134320591823194554368 as Int128) + expectEqual(minRep._high,-9_223_000_000_000_000_000) + expectEqual(minRep._low, 0) + // Check just above the overflow boundary + expectCrashLater() + let _ = Duration.milliseconds( 170141183460469231731689 as Int128) + } + + suite.test("microseconds from Int128") { + let one = Duration.microseconds(1 as Int128) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000_000) + let mone = Duration.microseconds(-1 as Int128) + expectEqual(mone._high, -1) + expectEqual(mone._low, .max - 999_999_999_999) + let maxRep = Duration.microseconds( 170141183460469231731687303 as Int128) + expectEqual(maxRep._high, 9_223_372_036_854_775_807) + expectEqual(maxRep._low, 18_446_743_357_825_445_888) + let minRep = Duration.microseconds(-170141182780618614507569152 as Int128) + expectEqual(minRep._high,-9_223_372_000_000_000_000) + expectEqual(minRep._low, 0) + // Check just above the overflow boundary + expectCrashLater() + let _ = Duration.microseconds( 170141183460469231731687304 as Int128) + } + + suite.test("nanoseconds from Int128") { + let one = Duration.nanoseconds(1 as Int128) + expectEqual(one._high, 0) + expectEqual(one._low, 1_000_000_000) + let mone = Duration.nanoseconds(-1 as Int128) + expectEqual(mone._high, -1) + expectEqual(mone._low, .max - 999_999_999) + let maxRep = Duration.nanoseconds( 170141183460469231731687303715 as Int128) + expectEqual(maxRep._high, 9_223_372_036_854_775_807) + expectEqual(maxRep._low, 18_446_744_072_825_445_888) + let minRep = Duration.nanoseconds(-170141183444701401161113010176 as Int128) + expectEqual(minRep._high,-9_223_372_036_000_000_000) + expectEqual(minRep._low, 0) + // Check just above the overflow boundary + expectCrashLater() + let _ = Duration.nanoseconds( 170141183460469231731687303716 as Int128) + } } diff --git a/test/stdlib/FloatConstants.swift b/test/stdlib/FloatConstants.swift index 3ed928a9d8848..0067e7df79cb0 100644 --- a/test/stdlib/FloatConstants.swift +++ b/test/stdlib/FloatConstants.swift @@ -6,6 +6,8 @@ import Glibc #elseif os(WASI) import WASILibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/test/stdlib/MathConstants.swift b/test/stdlib/MathConstants.swift index 9cc0cd776a07f..1ff5c0d6a5b94 100644 --- a/test/stdlib/MathConstants.swift +++ b/test/stdlib/MathConstants.swift @@ -6,6 +6,8 @@ import Glibc #elseif os(WASI) import WASILibc +#elseif CanImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/test/stdlib/POSIX.swift b/test/stdlib/POSIX.swift index c60aa771c0000..42b9edb5f4123 100644 --- a/test/stdlib/POSIX.swift +++ b/test/stdlib/POSIX.swift @@ -9,6 +9,8 @@ import SwiftPrivateLibcExtras import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #else #error("Unsupported platform") #endif diff --git a/test/stdlib/PrintFloat.swift.gyb b/test/stdlib/PrintFloat.swift.gyb index 78d51cd67abae..5377a6d2b2467 100644 --- a/test/stdlib/PrintFloat.swift.gyb +++ b/test/stdlib/PrintFloat.swift.gyb @@ -19,6 +19,8 @@ import SwiftPrivateLibcExtras import Glibc #elseif os(WASI) import WASILibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/test/stdlib/Runtime.swift.gyb b/test/stdlib/Runtime.swift.gyb index c4f872f1961e7..6d5ef26e90559 100644 --- a/test/stdlib/Runtime.swift.gyb +++ b/test/stdlib/Runtime.swift.gyb @@ -17,6 +17,8 @@ import SwiftShims import Glibc #elseif os(WASI) import WASILibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT import WinSDK diff --git a/test/stdlib/SwiftValueNSObject.swift b/test/stdlib/SwiftValueNSObject.swift index 8d04961756dca..11f51d746401a 100644 --- a/test/stdlib/SwiftValueNSObject.swift +++ b/test/stdlib/SwiftValueNSObject.swift @@ -90,12 +90,7 @@ func TestHashableEquals(_ e1: T, _ e2: T) { // This has not always been true for Equatable value types func TestEquatableEquals(_ e1: T, _ e2: T) { if e1 == e2 { -#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS) || os(visionOS) - // Legacy: Swift Equatable is not used in ObjC - TestSwiftValueNSObjectNotEquals(e1 as AnyObject, e2 as AnyObject) -#else TestSwiftValueNSObjectEquals(e1 as AnyObject, e2 as AnyObject) -#endif } else { TestSwiftValueNSObjectNotEquals(e1 as AnyObject, e2 as AnyObject) } @@ -114,14 +109,8 @@ func TestHashable(_ h: T) // Test Obj-C hashValue for Swift types that are Equatable but not Hashable func TestEquatableHash(_ e: T) { -#if os(macOS) || os(iOS) || os(watchOS) || os(tvOS) || os(visionOS) - // Legacy behavior used the pointer value, which is - // incompatible with user-defined equality. - TestSwiftValueNSObjectDefaultHashValue(e as AnyObject) -#else // New behavior uses a constant hash value in this case TestSwiftValueNSObjectHashValue(e as AnyObject, 1) -#endif } func TestNonEquatableHash(_ e: T) diff --git a/test/stdlib/VarArgs.swift b/test/stdlib/VarArgs.swift index 139f66c8bfb90..b3f65d0bd6b03 100644 --- a/test/stdlib/VarArgs.swift +++ b/test/stdlib/VarArgs.swift @@ -27,6 +27,9 @@ runAllTests() #elseif os(WASI) import WASILibc typealias CGFloat = Double +#elseif canImport(Android) + import Android + typealias CGFloat = Double #elseif os(Windows) import CRT #if arch(x86_64) || arch(arm64) diff --git a/test/stdlib/mmap.swift b/test/stdlib/mmap.swift index cf335bf81810f..2695875543911 100644 --- a/test/stdlib/mmap.swift +++ b/test/stdlib/mmap.swift @@ -8,6 +8,10 @@ import StdlibUnittest import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android + // MAP_FAILED is not available on android. + let MAP_FAILED = UnsafeMutableRawPointer(bitPattern: -1) #else #error("Unsupported platform") #endif diff --git a/test/stdlib/tgmath_optimized.swift b/test/stdlib/tgmath_optimized.swift index 1d6de6ba3b7ee..369a7e8ab1266 100644 --- a/test/stdlib/tgmath_optimized.swift +++ b/test/stdlib/tgmath_optimized.swift @@ -10,6 +10,8 @@ import Glibc #elseif os(WASI) import WASILibc +#elseif canImport(Android) + import Android #elseif os(Windows) import CRT #else diff --git a/utils/build_swift/build_swift/driver_arguments.py b/utils/build_swift/build_swift/driver_arguments.py index 76750792a1b2e..034cba4e3830f 100644 --- a/utils/build_swift/build_swift/driver_arguments.py +++ b/utils/build_swift/build_swift/driver_arguments.py @@ -830,8 +830,10 @@ def create_argument_parser(): option('--build-ninja', toggle_true, help='build the Ninja tool') - option(['--build-lld'], toggle_true('build_lld'), + option(['--build-lld'], toggle_true('build_lld'), default=True, help='build lld as part of llvm') + option(['--skip-build-lld'], toggle_false('build_lld'), + help='skip building lld as part of llvm') option('--skip-build-clang-tools-extra', toggle_false('build_clang_tools_extra'), diff --git a/utils/build_swift/tests/expected_options.py b/utils/build_swift/tests/expected_options.py index a9b797b1c400b..77e435ebd52b1 100644 --- a/utils/build_swift/tests/expected_options.py +++ b/utils/build_swift/tests/expected_options.py @@ -74,7 +74,7 @@ 'build_libcxx': False, 'build_linux_static': False, 'build_ninja': False, - 'build_lld': False, + 'build_lld': True, 'build_osx': True, 'build_playgroundsupport': False, 'build_runtime_with_host_compiler': False, @@ -753,6 +753,7 @@ class BuildScriptImplOption(_BaseOption): DisableOption('--skip-build-zlib', dest='build_zlib'), DisableOption('--skip-build-curl', dest='build_curl'), DisableOption('--skip-build-compiler-rt', dest='build_compiler_rt'), + DisableOption('--skip-build-lld', dest='build_lld'), ChoicesOption('--compiler-vendor', choices=['none', 'apple']), diff --git a/utils/swift_build_support/swift_build_support/build_script_invocation.py b/utils/swift_build_support/swift_build_support/build_script_invocation.py index c8111bc6ad1ca..cb88b9b2e1529 100644 --- a/utils/swift_build_support/swift_build_support/build_script_invocation.py +++ b/utils/swift_build_support/swift_build_support/build_script_invocation.py @@ -463,16 +463,7 @@ def convert_to_impl_arguments(self): "--llvm-install-components=%s" % args.llvm_install_components ] - # On non-Darwin platforms, build lld so we can always have a - # linker that is compatible with the swift we are using to - # compile the stdlib. - # - # This makes it easier to build target stdlibs on systems that - # have old toolchains without more modern linker features. - # - # On Darwin, only build lld if explicitly requested using --build-lld. - should_build_lld = (platform.system() != 'Darwin' or args.build_lld) - if not should_build_lld: + if not args.build_lld: impl_args += [ "--skip-build-lld" ] diff --git a/utils/swift_build_support/swift_build_support/products/llvm.py b/utils/swift_build_support/swift_build_support/products/llvm.py index 96ff2e7fb5212..420f9ea48d946 100644 --- a/utils/swift_build_support/swift_build_support/products/llvm.py +++ b/utils/swift_build_support/swift_build_support/products/llvm.py @@ -324,13 +324,14 @@ def build(self, host_target): if self.args.build_clang_tools_extra: llvm_enable_projects.append('clang-tools-extra') - # Always build lld -- on non-Darwin so we can always have a + # Building lld is on by default -- on non-Darwin so we can always have a # linker that is compatible with the swift we are using to # compile the stdlib, but on Darwin too for Embedded Swift use cases. # # This makes it easier to build target stdlibs on systems that # have old toolchains without more modern linker features. - llvm_enable_projects.append('lld') + if self.args.build_lld: + llvm_enable_projects.append('lld') llvm_cmake_options.define('LLVM_ENABLE_PROJECTS', ';'.join(llvm_enable_projects)) diff --git a/utils/update_checkout/update-checkout-config.json b/utils/update_checkout/update-checkout-config.json index 61a9b6f02584a..f1c91b5707482 100644 --- a/utils/update_checkout/update-checkout-config.json +++ b/utils/update_checkout/update-checkout-config.json @@ -136,7 +136,7 @@ "swift-integration-tests": "main", "swift-xcode-playground-support": "main", "ninja": "v1.11.1", - "yams": "5.0.1", + "yams": "5.0.6", "cmake": "v3.24.2", "indexstore-db": "main", "sourcekit-lsp": "main", @@ -184,7 +184,7 @@ "swift-integration-tests": "release/6.0", "swift-xcode-playground-support": "release/6.0", "ninja": "v1.11.1", - "yams": "5.0.1", + "yams": "5.0.6", "cmake": "v3.24.2", "indexstore-db": "release/6.0", "sourcekit-lsp": "release/6.0", @@ -233,7 +233,7 @@ "swift-integration-tests": "release/5.10", "swift-xcode-playground-support": "release/5.10", "ninja": "v1.11.1", - "yams": "5.0.1", + "yams": "5.0.6", "cmake": "v3.24.2", "indexstore-db": "release/5.10", "sourcekit-lsp": "release/5.10", @@ -282,7 +282,7 @@ "swift-integration-tests": "main", "swift-xcode-playground-support": "main", "ninja": "release", - "yams": "5.0.1", + "yams": "5.0.6", "cmake": "v3.24.2", "indexstore-db": "main", "sourcekit-lsp": "main", @@ -664,7 +664,7 @@ "swift-integration-tests": "main", "swift-xcode-playground-support": "main", "ninja": "release", - "yams": "5.0.1", + "yams": "5.0.6", "cmake": "v3.24.2", "indexstore-db": "main", "sourcekit-lsp": "main", diff --git a/validation-test/Reflection/reflect_Enum_value.swift b/validation-test/Reflection/reflect_Enum_value.swift index bf5bcbe1e2c2f..d39dd100d8bb8 100644 --- a/validation-test/Reflection/reflect_Enum_value.swift +++ b/validation-test/Reflection/reflect_Enum_value.swift @@ -364,6 +364,13 @@ case leafE case leafF } +reflect(enumValue: OneIndirectPayload.child(.child(.leafF))) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_value.OneIndirectPayload) +// CHECK-NEXT: Value: .child(.child(.leafF)) + reflect(enumValue: OneIndirectPayload.child(.leafF)) // CHECK: Reflecting an enum value. @@ -378,6 +385,23 @@ reflect(enumValue: OneIndirectPayload.leafF) // CHECK-NEXT: (enum reflect_Enum_value.OneIndirectPayload) // CHECK-NEXT: Value: .leafF +enum ADT { + case A + case B(Int) +} + +enum GuineaPig { + case a + indirect case b(ADT) +} + +reflect(enumValue: GuineaPig.b(ADT.B(42))) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_value.GuineaPig) +// CHECK-NEXT: Value: .b(.B(_)) + class SimpleSwiftClass { let value = 7 } diff --git a/validation-test/Reflection/reflect_Enum_values5.swift b/validation-test/Reflection/reflect_Enum_values5.swift new file mode 100644 index 0000000000000..3e191958d17fd --- /dev/null +++ b/validation-test/Reflection/reflect_Enum_values5.swift @@ -0,0 +1,63 @@ +// RUN: %empty-directory(%t) +// RUN: %target-build-swift -lswiftSwiftReflectionTest %s -o %t/reflect_Enum_values5 +// RUN: %target-codesign %t/reflect_Enum_values5 + +// RUN: %target-run %target-swift-reflection-test %t/reflect_Enum_values5 | tee /dev/stderr | %FileCheck %s --dump-input=fail + +// REQUIRES: objc_interop +// REQUIRES: executable_test +// UNSUPPORTED: use_os_stdlib + +import SwiftReflectionTest +public enum E: Error { + public struct Context { + public let a: [any CodingKey] = [] + public let b: String = "abc" + public let c: Error? = nil + } + + case typeMismatch(Any.Type) + case valueNotFound(Any.Type, Context) + case keyNotFound(any CodingKey, Context) + case dataCorrupted(Context) +} + +public enum M { +case A(E) +case B(E, Int) +} + +reflect(enumValue: M.A(.typeMismatch(Int.self))) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_values5.M) +// CHECK-NEXT: Value: .A(.typeMismatch(_)) + +reflect(enumValue: M.A(.dataCorrupted(.init()))) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_values5.M) +// CHECK-NEXT: Value: .A(.dataCorrupted(_)) + +reflect(enumValue: M.B(.typeMismatch(Int.self), 74)) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_values5.M) +// Note: reflection tester only drills down into +// payloads that are a single enum. This payload is a tuple. +// CHECK-NEXT: Value: .B(_) + +reflect(enumValue: M.B(.dataCorrupted(.init()), 42)) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_values5.M) +// CHECK-NEXT: Value: .B(_) + +doneReflecting() + +// CHECK: Done. + diff --git a/validation-test/Reflection/reflect_Enum_values6.swift b/validation-test/Reflection/reflect_Enum_values6.swift new file mode 100644 index 0000000000000..13b34d36a09fd --- /dev/null +++ b/validation-test/Reflection/reflect_Enum_values6.swift @@ -0,0 +1,82 @@ +// RUN: %empty-directory(%t) +// RUN: %target-build-swift -lswiftSwiftReflectionTest %s -o %t/reflect_Enum_values6 +// RUN: %target-codesign %t/reflect_Enum_values6 + +// RUN: %target-run %target-swift-reflection-test %t/reflect_Enum_values6 | tee /dev/stderr | %FileCheck %s --dump-input=fail + +// REQUIRES: objc_interop +// REQUIRES: executable_test +// UNSUPPORTED: use_os_stdlib + +import SwiftReflectionTest + +public struct MyError : Error {} + +public enum E1 { +case A(Error) +case B(Error) +} + +// MemoryLayout.size == 8 ==> Error has spare bits + +reflect(enumValue: E1.A(MyError())) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_values6.E1) +// CHECK-NEXT: Value: .A(_) + +reflect(enumValue: E1.B(MyError())) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_values6.E1) +// CHECK-NEXT: Value: .B(_) + +public enum E2 { +case A(Error?) +case B(Error?) +} + +// MemoryLayout.size == 9 => Error? has no spare bits + +reflect(enumValue: E2.A(MyError())) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_values6.E2) +// CHECK-NEXT: Value: .A(.some(_)) + +reflect(enumValue: E2.B(MyError())) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_values6.E2) +// CHECK-NEXT: Value: .B(.some(_)) + + +public enum E3 { +case A(Any.Type) +case B(Any.Type) +} + +// MemoryLayout.size == 8 => Any.Type has spare bits + +reflect(enumValue: E3.A(Any.self)) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_values6.E3) +// CHECK-NEXT: Value: .A(_) + +reflect(enumValue: E3.B(Any.self)) + +// CHECK: Reflecting an enum value. +// CHECK-NEXT: Type reference: +// CHECK-NEXT: (enum reflect_Enum_values6.E3) +// CHECK-NEXT: Value: .B(_) + +doneReflecting() + +// CHECK: Done. + diff --git a/validation-test/StdlibUnittest/ChildProcessShutdown/FailIfChildCrashesDuringShutdown.swift b/validation-test/StdlibUnittest/ChildProcessShutdown/FailIfChildCrashesDuringShutdown.swift index b596a2015b9b3..0408bc81bf5ee 100644 --- a/validation-test/StdlibUnittest/ChildProcessShutdown/FailIfChildCrashesDuringShutdown.swift +++ b/validation-test/StdlibUnittest/ChildProcessShutdown/FailIfChildCrashesDuringShutdown.swift @@ -10,6 +10,8 @@ import StdlibUnittest import Glibc #elseif os(Windows) import MSVCRT +#elseif canImport(Android) + import Android #else #error("Unsupported platform") #endif diff --git a/validation-test/StdlibUnittest/ChildProcessShutdown/FailIfChildExitsDuringShutdown.swift b/validation-test/StdlibUnittest/ChildProcessShutdown/FailIfChildExitsDuringShutdown.swift index 888274eb243d9..e024a0e070810 100644 --- a/validation-test/StdlibUnittest/ChildProcessShutdown/FailIfChildExitsDuringShutdown.swift +++ b/validation-test/StdlibUnittest/ChildProcessShutdown/FailIfChildExitsDuringShutdown.swift @@ -8,6 +8,8 @@ import StdlibUnittest import Glibc #elseif os(Windows) import MSVCRT +#elseif canImport(Android) + import Android #else #error("Unsupported platform") #endif diff --git a/validation-test/StdlibUnittest/ChildProcessShutdown/PassIfChildCrashedDuringTestExecution.swift b/validation-test/StdlibUnittest/ChildProcessShutdown/PassIfChildCrashedDuringTestExecution.swift index 77af5601fd0be..8687bde2c1843 100644 --- a/validation-test/StdlibUnittest/ChildProcessShutdown/PassIfChildCrashedDuringTestExecution.swift +++ b/validation-test/StdlibUnittest/ChildProcessShutdown/PassIfChildCrashedDuringTestExecution.swift @@ -8,6 +8,8 @@ import StdlibUnittest import Glibc #elseif os(Windows) import MSVCRT +#elseif canImport(Android) + import Android #else #error("Unsupported platform") #endif diff --git a/validation-test/StdlibUnittest/ChildProcessShutdown/RequireOwnProcess.swift b/validation-test/StdlibUnittest/ChildProcessShutdown/RequireOwnProcess.swift index 2bfa62c5252bd..a74e4a25165d8 100644 --- a/validation-test/StdlibUnittest/ChildProcessShutdown/RequireOwnProcess.swift +++ b/validation-test/StdlibUnittest/ChildProcessShutdown/RequireOwnProcess.swift @@ -8,6 +8,8 @@ import StdlibUnittest import Glibc #elseif os(Windows) import MSVCRT +#elseif canImport(Android) + import Android #else #error("Unsupported platform") #endif diff --git a/validation-test/StdlibUnittest/Stdin.swift b/validation-test/StdlibUnittest/Stdin.swift index 056a92c9ca04e..a6c144e56eabd 100644 --- a/validation-test/StdlibUnittest/Stdin.swift +++ b/validation-test/StdlibUnittest/Stdin.swift @@ -10,6 +10,8 @@ import StdlibUnittest import Glibc #elseif os(Windows) import MSVCRT +#elseif canImport(Android) + import Android #else #error("Unsupported platform") #endif diff --git a/validation-test/stdlib/Glibc.swift b/validation-test/stdlib/Glibc.swift index 70276d29e0b81..1b004367078a8 100644 --- a/validation-test/stdlib/Glibc.swift +++ b/validation-test/stdlib/Glibc.swift @@ -6,7 +6,7 @@ // UNSUPPORTED: OS=tvos // UNSUPPORTED: OS=watchos -// REQUIRES: OS=linux-gnu || OS=linux-androideabi || OS=linux-android +// REQUIRES: OS=linux-gnu import Swift import StdlibUnittest diff --git a/validation-test/stdlib/POSIXErrorCode.swift b/validation-test/stdlib/POSIXErrorCode.swift index 2d3a442cd125e..5f126a314ffcb 100644 --- a/validation-test/stdlib/POSIXErrorCode.swift +++ b/validation-test/stdlib/POSIXErrorCode.swift @@ -10,6 +10,8 @@ import StdlibUnittest import Darwin #elseif canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #else #error("Unsupported platform") #endif diff --git a/validation-test/stdlib/String.swift b/validation-test/stdlib/String.swift index 32de3fa68542c..cc2219af7e73f 100644 --- a/validation-test/stdlib/String.swift +++ b/validation-test/stdlib/String.swift @@ -1235,6 +1235,8 @@ StringTests.test("Conversions") { #if canImport(Glibc) import Glibc +#elseif canImport(Android) + import Android #endif StringTests.test("lowercased()") {