diff --git a/stdlib/public/core/Hasher.swift b/stdlib/public/core/Hasher.swift index c1850959d2c30..a419d0153179a 100644 --- a/stdlib/public/core/Hasher.swift +++ b/stdlib/public/core/Hasher.swift @@ -16,33 +16,6 @@ import SwiftShims -// FIXME: Remove @usableFromInline once Hasher is resilient. -// rdar://problem/38549901 -@usableFromInline -internal protocol _HasherCore { - init(rawSeed: (UInt64, UInt64)) - mutating func compress(_ value: UInt64) - mutating func finalize(tailAndByteCount: UInt64) -> UInt64 -} - -extension _HasherCore { - @inline(__always) - internal init() { - self.init(rawSeed: Hasher._executionSeed) - } - - @inline(__always) - internal init(seed: Int) { - let executionSeed = Hasher._executionSeed - // Prevent sign-extending the supplied seed; this makes testing slightly - // easier. - let seed = UInt(bitPattern: seed) - self.init(rawSeed: ( - executionSeed.0 ^ UInt64(truncatingIfNeeded: seed), - executionSeed.1)) - } -} - @inline(__always) internal func _loadPartialUnalignedUInt64LE( _ p: UnsafeRawPointer, @@ -78,197 +51,201 @@ internal func _loadPartialUnalignedUInt64LE( } } -/// This is a buffer for segmenting arbitrary data into 8-byte chunks. Buffer -/// storage is represented by a single 64-bit value in the format used by the -/// finalization step of SipHash. (The least significant 56 bits hold the -/// trailing bytes, while the most significant 8 bits hold the count of bytes -/// appended so far, modulo 256. The count of bytes currently stored in the -/// buffer is in the lower three bits of the byte count.) -// FIXME: Remove @usableFromInline and @_fixed_layout once Hasher is resilient. -// rdar://problem/38549901 -@usableFromInline @_fixed_layout -internal struct _HasherTailBuffer { - // msb lsb - // +---------+-------+-------+-------+-------+-------+-------+-------+ - // |byteCount| tail (<= 56 bits) | - // +---------+-------+-------+-------+-------+-------+-------+-------+ - internal var value: UInt64 - - @inline(__always) - internal init() { - self.value = 0 - } - - @inline(__always) - internal init(tail: UInt64, byteCount: UInt64) { - // byteCount can be any value, but we only keep the lower 8 bits. (The - // lower three bits specify the count of bytes stored in this buffer.) - // FIXME: This should be a single expression, but it causes exponential - // behavior in the expression type checker . - let shiftedByteCount: UInt64 = ((byteCount & 7) << 3) - let mask: UInt64 = (1 << shiftedByteCount - 1) - _sanityCheck(tail & ~mask == 0) - self.value = (byteCount &<< 56 | tail) - } +extension Hasher { + /// This is a buffer for segmenting arbitrary data into 8-byte chunks. Buffer + /// storage is represented by a single 64-bit value in the format used by the + /// finalization step of SipHash. (The least significant 56 bits hold the + /// trailing bytes, while the most significant 8 bits hold the count of bytes + /// appended so far, modulo 256. The count of bytes currently stored in the + /// buffer is in the lower three bits of the byte count.) + // FIXME: Remove @usableFromInline and @_fixed_layout once Hasher is resilient. + // rdar://problem/38549901 + @usableFromInline @_fixed_layout + internal struct _TailBuffer { + // msb lsb + // +---------+-------+-------+-------+-------+-------+-------+-------+ + // |byteCount| tail (<= 56 bits) | + // +---------+-------+-------+-------+-------+-------+-------+-------+ + internal var value: UInt64 - @inline(__always) - internal init(tail: UInt64, byteCount: Int) { - self.init(tail: tail, byteCount: UInt64(truncatingIfNeeded: byteCount)) - } + @inline(__always) + internal init() { + self.value = 0 + } - internal var tail: UInt64 { @inline(__always) - get { return value & ~(0xFF &<< 56) } - } + internal init(tail: UInt64, byteCount: UInt64) { + // byteCount can be any value, but we only keep the lower 8 bits. (The + // lower three bits specify the count of bytes stored in this buffer.) + // FIXME: This should be a single expression, but it causes exponential + // behavior in the expression type checker . + let shiftedByteCount: UInt64 = ((byteCount & 7) << 3) + let mask: UInt64 = (1 << shiftedByteCount - 1) + _sanityCheck(tail & ~mask == 0) + self.value = (byteCount &<< 56 | tail) + } - internal var byteCount: UInt64 { @inline(__always) - get { return value &>> 56 } - } + internal init(tail: UInt64, byteCount: Int) { + self.init(tail: tail, byteCount: UInt64(truncatingIfNeeded: byteCount)) + } - @inline(__always) - internal mutating func append(_ bytes: UInt64) -> UInt64 { - let c = byteCount & 7 - if c == 0 { - value = value &+ (8 &<< 56) - return bytes + internal var tail: UInt64 { + @inline(__always) + get { return value & ~(0xFF &<< 56) } } - let shift = c &<< 3 - let chunk = tail | (bytes &<< shift) - value = (((value &>> 56) &+ 8) &<< 56) | (bytes &>> (64 - shift)) - return chunk - } - @inline(__always) - internal - mutating func append(_ bytes: UInt64, count: UInt64) -> UInt64? { - _sanityCheck(count >= 0 && count < 8) - _sanityCheck(bytes & ~((1 &<< (count &<< 3)) &- 1) == 0) - let c = byteCount & 7 - let shift = c &<< 3 - if c + count < 8 { - value = (value | (bytes &<< shift)) &+ (count &<< 56) - return nil + internal var byteCount: UInt64 { + @inline(__always) + get { return value &>> 56 } } - let chunk = tail | (bytes &<< shift) - value = ((value &>> 56) &+ count) &<< 56 - if c + count > 8 { - value |= bytes &>> (64 - shift) + + @inline(__always) + internal mutating func append(_ bytes: UInt64) -> UInt64 { + let c = byteCount & 7 + if c == 0 { + value = value &+ (8 &<< 56) + return bytes + } + let shift = c &<< 3 + let chunk = tail | (bytes &<< shift) + value = (((value &>> 56) &+ 8) &<< 56) | (bytes &>> (64 - shift)) + return chunk + } + + @inline(__always) + internal + mutating func append(_ bytes: UInt64, count: UInt64) -> UInt64? { + _sanityCheck(count >= 0 && count < 8) + _sanityCheck(bytes & ~((1 &<< (count &<< 3)) &- 1) == 0) + let c = byteCount & 7 + let shift = c &<< 3 + if c + count < 8 { + value = (value | (bytes &<< shift)) &+ (count &<< 56) + return nil + } + let chunk = tail | (bytes &<< shift) + value = ((value &>> 56) &+ count) &<< 56 + if c + count > 8 { + value |= bytes &>> (64 - shift) + } + return chunk } - return chunk } } -// FIXME: Remove @usableFromInline and @_fixed_layout once Hasher is resilient. -// rdar://problem/38549901 -@usableFromInline @_fixed_layout -internal struct _BufferingHasher { - private var _buffer: _HasherTailBuffer - private var _core: Core +extension Hasher { + // FIXME: Remove @usableFromInline and @_fixed_layout once Hasher is resilient. + // rdar://problem/38549901 + @usableFromInline @_fixed_layout + internal struct _Core { + private var _buffer: _TailBuffer + private var _state: Hasher._State - @inline(__always) - internal init(core: Core) { - self._buffer = _HasherTailBuffer() - self._core = core - } + @inline(__always) + internal init(state: Hasher._State) { + self._buffer = _TailBuffer() + self._state = state + } - @inline(__always) - internal init() { - self.init(core: Core()) - } + @inline(__always) + internal init() { + self.init(state: _State()) + } - @inline(__always) - internal init(seed: Int) { - self.init(core: Core(seed: seed)) - } + @inline(__always) + internal init(seed: Int) { + self.init(state: _State(seed: seed)) + } - @inline(__always) - internal mutating func combine(_ value: UInt) { + @inline(__always) + internal mutating func combine(_ value: UInt) { #if arch(i386) || arch(arm) - combine(UInt32(truncatingIfNeeded: value)) + combine(UInt32(truncatingIfNeeded: value)) #else - combine(UInt64(truncatingIfNeeded: value)) + combine(UInt64(truncatingIfNeeded: value)) #endif - } - - @inline(__always) - internal mutating func combine(_ value: UInt64) { - _core.compress(_buffer.append(value)) - } - - @inline(__always) - internal mutating func combine(_ value: UInt32) { - let value = UInt64(truncatingIfNeeded: value) - if let chunk = _buffer.append(value, count: 4) { - _core.compress(chunk) } - } - @inline(__always) - internal mutating func combine(_ value: UInt16) { - let value = UInt64(truncatingIfNeeded: value) - if let chunk = _buffer.append(value, count: 2) { - _core.compress(chunk) + @inline(__always) + internal mutating func combine(_ value: UInt64) { + _state.compress(_buffer.append(value)) } - } - @inline(__always) - internal mutating func combine(_ value: UInt8) { - let value = UInt64(truncatingIfNeeded: value) - if let chunk = _buffer.append(value, count: 1) { - _core.compress(chunk) + @inline(__always) + internal mutating func combine(_ value: UInt32) { + let value = UInt64(truncatingIfNeeded: value) + if let chunk = _buffer.append(value, count: 4) { + _state.compress(chunk) + } } - } - @inline(__always) - internal mutating func combine(bytes: UInt64, count: Int) { - _sanityCheck(count >= 0 && count < 8) - let count = UInt64(truncatingIfNeeded: count) - if let chunk = _buffer.append(bytes, count: count) { - _core.compress(chunk) + @inline(__always) + internal mutating func combine(_ value: UInt16) { + let value = UInt64(truncatingIfNeeded: value) + if let chunk = _buffer.append(value, count: 2) { + _state.compress(chunk) + } } - } - @inline(__always) - internal mutating func combine(bytes: UnsafeRawBufferPointer) { - var remaining = bytes.count - guard remaining > 0 else { return } - var data = bytes.baseAddress! - - // Load first unaligned partial word of data - do { - let start = UInt(bitPattern: data) - let end = _roundUp(start, toAlignment: MemoryLayout.alignment) - let c = min(remaining, Int(end - start)) - if c > 0 { - let chunk = _loadPartialUnalignedUInt64LE(data, byteCount: c) - combine(bytes: chunk, count: c) - data += c - remaining -= c + @inline(__always) + internal mutating func combine(_ value: UInt8) { + let value = UInt64(truncatingIfNeeded: value) + if let chunk = _buffer.append(value, count: 1) { + _state.compress(chunk) } } - _sanityCheck( - remaining == 0 || - Int(bitPattern: data) & (MemoryLayout.alignment - 1) == 0) - - // Load as many aligned words as there are in the input buffer - while remaining >= MemoryLayout.size { - combine(UInt64(littleEndian: data.load(as: UInt64.self))) - data += MemoryLayout.size - remaining -= MemoryLayout.size + + @inline(__always) + internal mutating func combine(bytes: UInt64, count: Int) { + _sanityCheck(count >= 0 && count < 8) + let count = UInt64(truncatingIfNeeded: count) + if let chunk = _buffer.append(bytes, count: count) { + _state.compress(chunk) + } } - // Load last partial word of data - _sanityCheck(remaining >= 0 && remaining < 8) - if remaining > 0 { - let chunk = _loadPartialUnalignedUInt64LE(data, byteCount: remaining) - combine(bytes: chunk, count: remaining) + @inline(__always) + internal mutating func combine(bytes: UnsafeRawBufferPointer) { + var remaining = bytes.count + guard remaining > 0 else { return } + var data = bytes.baseAddress! + + // Load first unaligned partial word of data + do { + let start = UInt(bitPattern: data) + let end = _roundUp(start, toAlignment: MemoryLayout.alignment) + let c = min(remaining, Int(end - start)) + if c > 0 { + let chunk = _loadPartialUnalignedUInt64LE(data, byteCount: c) + combine(bytes: chunk, count: c) + data += c + remaining -= c + } + } + _sanityCheck( + remaining == 0 || + Int(bitPattern: data) & (MemoryLayout.alignment - 1) == 0) + + // Load as many aligned words as there are in the input buffer + while remaining >= MemoryLayout.size { + combine(UInt64(littleEndian: data.load(as: UInt64.self))) + data += MemoryLayout.size + remaining -= MemoryLayout.size + } + + // Load last partial word of data + _sanityCheck(remaining >= 0 && remaining < 8) + if remaining > 0 { + let chunk = _loadPartialUnalignedUInt64LE(data, byteCount: remaining) + combine(bytes: chunk, count: remaining) + } } - } - @inline(__always) - internal mutating func finalize() -> UInt64 { - return _core.finalize(tailAndByteCount: _buffer.value) + @inline(__always) + internal mutating func finalize() -> UInt64 { + return _state.finalize(tailAndByteCount: _buffer.value) + } } } @@ -297,12 +274,7 @@ internal struct _BufferingHasher { /// versions of the standard library. @_fixed_layout // FIXME: Should be resilient (rdar://problem/38549901) public struct Hasher { - // FIXME: Remove @usableFromInline once Hasher is resilient. - // rdar://problem/38549901 - @usableFromInline - internal typealias _BufferingCore = _BufferingHasher<_Core> - - internal var _core: _BufferingCore + internal var _core: _Core /// Creates a new hasher. /// @@ -310,7 +282,7 @@ public struct Hasher { /// startup, usually from a high-quality random source. @_effects(releasenone) public init() { - self._core = _BufferingCore() + self._core = _Core() } /// Initialize a new hasher using the specified seed value. @@ -318,14 +290,14 @@ public struct Hasher { @usableFromInline @_effects(releasenone) internal init(_seed: Int) { - self._core = _BufferingCore(seed: _seed) + self._core = _Core(seed: _seed) } /// Initialize a new hasher using the specified seed value. @usableFromInline // @testable @_effects(releasenone) internal init(_rawSeed: (UInt64, UInt64)) { - self._core = _BufferingCore(core: _Core(rawSeed: _rawSeed)) + self._core = _Core(state: _State(rawSeed: _rawSeed)) } /// Indicates whether we're running in an environment where hashing needs to @@ -441,27 +413,27 @@ public struct Hasher { @_effects(readnone) @usableFromInline internal static func _hash(seed: Int, _ value: UInt64) -> Int { - var core = _Core(seed: seed) - core.compress(value) - let tbc = _HasherTailBuffer(tail: 0, byteCount: 8) - return Int(truncatingIfNeeded: core.finalize(tailAndByteCount: tbc.value)) + var state = _State(seed: seed) + state.compress(value) + let tbc = _TailBuffer(tail: 0, byteCount: 8) + return Int(truncatingIfNeeded: state.finalize(tailAndByteCount: tbc.value)) } @_effects(readnone) @usableFromInline internal static func _hash(seed: Int, _ value: UInt) -> Int { - var core = _Core(seed: seed) + var state = _State(seed: seed) #if arch(i386) || arch(arm) _sanityCheck(UInt.bitWidth < UInt64.bitWidth) - let tbc = _HasherTailBuffer( + let tbc = _TailBuffer( tail: UInt64(truncatingIfNeeded: value), byteCount: UInt.bitWidth &>> 3) #else _sanityCheck(UInt.bitWidth == UInt64.bitWidth) - core.compress(UInt64(truncatingIfNeeded: value)) - let tbc = _HasherTailBuffer(tail: 0, byteCount: 8) + state.compress(UInt64(truncatingIfNeeded: value)) + let tbc = _TailBuffer(tail: 0, byteCount: 8) #endif - return Int(truncatingIfNeeded: core.finalize(tailAndByteCount: tbc.value)) + return Int(truncatingIfNeeded: state.finalize(tailAndByteCount: tbc.value)) } @_effects(readnone) @@ -471,9 +443,9 @@ public struct Hasher { bytes value: UInt64, count: Int) -> Int { _sanityCheck(count >= 0 && count < 8) - var core = _Core(seed: seed) - let tbc = _HasherTailBuffer(tail: value, byteCount: count) - return Int(truncatingIfNeeded: core.finalize(tailAndByteCount: tbc.value)) + var state = _State(seed: seed) + let tbc = _TailBuffer(tail: value, byteCount: count) + return Int(truncatingIfNeeded: state.finalize(tailAndByteCount: tbc.value)) } @_effects(readnone) @@ -481,7 +453,7 @@ public struct Hasher { internal static func _hash( seed: Int, bytes: UnsafeRawBufferPointer) -> Int { - var core = _BufferingCore(seed: seed) + var core = _Core(seed: seed) core.combine(bytes: bytes) return Int(truncatingIfNeeded: core.finalize()) } diff --git a/stdlib/public/core/SipHash.swift b/stdlib/public/core/SipHash.swift index e13be21b373a7..3aa81d4db5d0b 100644 --- a/stdlib/public/core/SipHash.swift +++ b/stdlib/public/core/SipHash.swift @@ -25,82 +25,89 @@ extension Hasher { @usableFromInline @_fixed_layout internal struct _State { // "somepseudorandomlygeneratedbytes" - fileprivate var v0: UInt64 = 0x736f6d6570736575 - fileprivate var v1: UInt64 = 0x646f72616e646f6d - fileprivate var v2: UInt64 = 0x6c7967656e657261 - fileprivate var v3: UInt64 = 0x7465646279746573 + private var v0: UInt64 = 0x736f6d6570736575 + private var v1: UInt64 = 0x646f72616e646f6d + private var v2: UInt64 = 0x6c7967656e657261 + private var v3: UInt64 = 0x7465646279746573 // The fields below are reserved for future use. They aren't currently used. - fileprivate var v4: UInt64 = 0 - fileprivate var v5: UInt64 = 0 - fileprivate var v6: UInt64 = 0 - fileprivate var v7: UInt64 = 0 + private var v4: UInt64 = 0 + private var v5: UInt64 = 0 + private var v6: UInt64 = 0 + private var v7: UInt64 = 0 @inline(__always) - fileprivate init(rawSeed: (UInt64, UInt64)) { + internal init(rawSeed: (UInt64, UInt64)) { v3 ^= rawSeed.1 v2 ^= rawSeed.0 v1 ^= rawSeed.1 v0 ^= rawSeed.0 } + } +} - @inline(__always) - fileprivate - static func _rotateLeft(_ x: UInt64, by amount: UInt64) -> UInt64 { - return (x &<< amount) | (x &>> (64 - amount)) - } +extension Hasher._State { + @inline(__always) + private static func _rotateLeft(_ x: UInt64, by amount: UInt64) -> UInt64 { + return (x &<< amount) | (x &>> (64 - amount)) + } - @inline(__always) - fileprivate mutating func _round() { - v0 = v0 &+ v1 - v1 = Hasher._State._rotateLeft(v1, by: 13) - v1 ^= v0 - v0 = Hasher._State._rotateLeft(v0, by: 32) - v2 = v2 &+ v3 - v3 = Hasher._State._rotateLeft(v3, by: 16) - v3 ^= v2 - v0 = v0 &+ v3 - v3 = Hasher._State._rotateLeft(v3, by: 21) - v3 ^= v0 - v2 = v2 &+ v1 - v1 = Hasher._State._rotateLeft(v1, by: 17) - v1 ^= v2 - v2 = Hasher._State._rotateLeft(v2, by: 32) - } + @inline(__always) + private mutating func _round() { + v0 = v0 &+ v1 + v1 = Hasher._State._rotateLeft(v1, by: 13) + v1 ^= v0 + v0 = Hasher._State._rotateLeft(v0, by: 32) + v2 = v2 &+ v3 + v3 = Hasher._State._rotateLeft(v3, by: 16) + v3 ^= v2 + v0 = v0 &+ v3 + v3 = Hasher._State._rotateLeft(v3, by: 21) + v3 ^= v0 + v2 = v2 &+ v1 + v1 = Hasher._State._rotateLeft(v1, by: 17) + v1 ^= v2 + v2 = Hasher._State._rotateLeft(v2, by: 32) + } - @inline(__always) - fileprivate func _extract() -> UInt64 { - return v0 ^ v1 ^ v2 ^ v3 - } + @inline(__always) + private func _extract() -> UInt64 { + return v0 ^ v1 ^ v2 ^ v3 } } -extension Hasher { - // FIXME: Remove @usableFromInline and @_fixed_layout once Hasher is resilient. - // rdar://problem/38549901 - @usableFromInline @_fixed_layout - internal struct _Core: _HasherCore { - private var _state: Hasher._State +extension Hasher._State { + @inline(__always) + internal mutating func compress(_ m: UInt64) { + v3 ^= m + _round() + v0 ^= m + } - @inline(__always) - internal init(rawSeed: (UInt64, UInt64)) { - _state = Hasher._State(rawSeed: rawSeed) + @inline(__always) + internal mutating func finalize(tailAndByteCount: UInt64) -> UInt64 { + compress(tailAndByteCount) + v2 ^= 0xff + for _ in 0..<3 { + _round() } + return _extract() + } +} - @inline(__always) - internal mutating func compress(_ m: UInt64) { - _state.v3 ^= m - _state._round() - _state.v0 ^= m - } +extension Hasher._State { + @inline(__always) + internal init() { + self.init(rawSeed: Hasher._executionSeed) + } - @inline(__always) - internal mutating func finalize(tailAndByteCount: UInt64) -> UInt64 { - compress(tailAndByteCount) - _state.v2 ^= 0xff - for _ in 0..<3 { - _state._round() - } - return _state._extract() - } + @inline(__always) + internal init(seed: Int) { + let executionSeed = Hasher._executionSeed + // Prevent sign-extending the supplied seed; this makes testing slightly + // easier. + let seed = UInt(bitPattern: seed) + self.init(rawSeed: ( + executionSeed.0 ^ UInt64(truncatingIfNeeded: seed), + executionSeed.1)) } } diff --git a/test/api-digester/Outputs/stability-stdlib-abi.swift.expected b/test/api-digester/Outputs/stability-stdlib-abi.swift.expected index 5dfefb6e3f2ac..aba0c12dfe573 100644 --- a/test/api-digester/Outputs/stability-stdlib-abi.swift.expected +++ b/test/api-digester/Outputs/stability-stdlib-abi.swift.expected @@ -45,6 +45,9 @@ Func _swift_stdlib_atomicStoreInt32(object:desired:) has been removed Func _swift_stdlib_atomicStoreInt64(object:desired:) has been removed Func _swift_stdlib_atomicStoreUInt32(object:desired:) has been removed Func _swift_stdlib_atomicStoreUInt64(object:desired:) has been removed +Protocol _HasherCore has been removed +Struct _BufferingHasher has been removed +Struct _HasherTailBuffer has been removed /* Moved Decls */ @@ -53,6 +56,10 @@ Func MutableCollection._partition(within:by:) has been renamed to Func MutableCo /* Type Changes */ Func MutableCollection._partition(within:by:) has parameter 0 type change from Range<τ_0_0.Index> to (τ_0_0.Element) throws -> Bool +Var Hasher._core has declared type change from _BufferingHasher to Hasher._Core +Var Hasher._Core._buffer is added to a non-resilient type +Var Hasher._Core._state in a non-resilient type changes position from 0 to 1 +Struct Hasher._Core has removed conformance to _HasherCore /* Decl Attribute changes */