Int32.self) let oncePtr: UnsafeRawPointer? if oncePtrOffset != 0 { let theOncePtr = _resolveRelativeAddress(oncePtrPtr, oncePtrOffset) oncePtr = theOncePtr // See whether we already instantiated this key path. // This is a non-atomic load because the instantiated pointer will be // written with a release barrier, and loads of the instantiated key path // ought to carry a dependency through this loaded pointer. let existingInstance = theOncePtr.load(as: UnsafeRawPointer?.self) if let existingInstance = existingInstance { // Return the instantiated object at +1. let object = Unmanaged<AnyKeyPath>.fromOpaque(existingInstance) // TODO: This retain will be unnecessary once we support global objects // with inert refcounting. _ = object.retain() return existingInstance } } else { oncePtr = nil } 37
on the pattern. // Do a pass to determine the class of the key path we'll be instantiating // and how much space we'll need for it. let (keyPathClass, rootType, size, _) = _getKeyPathClassAndInstanceSizeFromPattern(patternPtr, arguments) // Allocate the instance. let instance = keyPathClass._create(capacityInBytes: size) { instanceData in // Instantiate the pattern into the instance. _instantiateKeyPathBuffer(patternPtr, instanceData, rootType, arguments) } 38
// Try to replace a null pointer in the cache variable with the instance // pointer. let instancePtr = Unmanaged.passRetained(instance) while true { let (oldValue, won) = Builtin.cmpxchg_seqcst_seqcst_Word( oncePtr._rawValue, 0._builtinWordValue, UInt(bitPattern: instancePtr.toOpaque())._builtinWordValue) // If the exchange succeeds, then the instance we formed is the canonical // one. if Bool(won) { break } // Otherwise, someone raced with us to instantiate the key path pattern // and won. Their instance should be just as good as ours, so we can take // that one and let ours get deallocated. if let existingInstance = UnsafeRawPointer(bitPattern: Int(oldValue)) { // Return the instantiated object at +1. let object = Unmanaged<AnyKeyPath>.fromOpaque(existingInstance) // TODO: This retain will be unnecessary once we support global objects // with inert refcounting. _ = object.retain() // Release the instance we created. instancePtr.release() return existingInstance } else { // Try the cmpxchg again if it spuriously failed. continue } } 40
func _projectReadOnly(from root: Root) -> Value { // TODO: For perf, we could use a local growable buffer instead of Any var curBase: Any = root return withBuffer { var buffer = $0 if buffer.data.isEmpty { return unsafeBitCast(root, to: Value.self) } while true { let (rawComponent, optNextType) = buffer.next() let valueType = optNextType ?? Value.self let isLast = optNextType == nil func project<CurValue>(_ base: CurValue) -> Value? { func project2<NewValue>(_: NewValue.Type) -> Value? { switch rawComponent._projectReadOnly(base, to: NewValue.self, endingWith: Value.self) { case .continue(let newBase): if isLast { _internalInvariant(NewValue.self == Value.self, "key path does not terminate in correct type") return unsafeBitCast(newBase, to: Value.self) } else { curBase = newBase return nil } case .break(let result): return result } } return _openExistential(valueType, do: project2) } if let result = _openExistential(curBase, do: project) { return result } } } } 42
// key path getter for Bar.subscript<A>(_:_:) : <A>BarA sil [thunk] @$s4main3BarVySix_SitcluipSHRzlACxTK : [email protected](thin) <T where T : Hashable> ( @in_guaranteed Bar, UnsafeRawPointer ) -> @out Int 56