Upgrade to Pro — share decks privately, control downloads, hide ads and more …

macOSで自分のカメラを作ってみよう - Core Media IO Extensions

satoshi0212
September 02, 2023

macOSで自分のカメラを作ってみよう - Core Media IO Extensions

satoshi0212

September 02, 2023
Tweet

More Decks by satoshi0212

Other Decks in Programming

Transcript

  1. import SwiftUI import SystemExtensions struct ContentView: View { let extensionID:

    String = "tokyo.shmdevelopment.MyCreativeCamera.Extension" var body: some View { HStack { Button { let activationRequest = OSSystemExtensionRequest.activationRequest( forExtensionWithIdentifier: extensionID, queue: .main) OSSystemExtensionManager.shared.submitRequest(activationRequest) } label: { Text("Install") } Button { let deactivationRequest = OSSystemExtensionRequest.deactivationRequest( forExtensionWithIdentifier: extensionID, queue: .main) OSSystemExtensionManager.shared.submitRequest(deactivationRequest) } label: { Text("Uninstall") } } .padding() } } $POUFOU7JFXTXJGU
  2. import SwiftUI import SystemExtensions struct ContentView: View { let extensionID:

    String = "tokyo.shmdevelopment.MyCreativeCamera.Extension" var body: some View { HStack { Button { let activationRequest = OSSystemExtensionRequest.activationRequest( forExtensionWithIdentifier: extensionID, queue: .main) OSSystemExtensionManager.shared.submitRequest(activationRequest) } label: { Text("Install") } Button { let deactivationRequest = OSSystemExtensionRequest.deactivationRequest( forExtensionWithIdentifier: extensionID, queue: .main) OSSystemExtensionManager.shared.submitRequest(deactivationRequest) } label: { Text("Uninstall") } } .padding() } } $POUFOU7JFXTXJGU
  3. extension AVCaptureDevice.DiscoverySession { static func faceTimeDevice() -> AVCaptureDevice { let

    discoverySession = AVCaptureDevice.DiscoverySession( deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .unspecified ) let devices = discoverySession.devices let device = devices.filter({ $0.manufacturer == "Apple Inc." && $0.modelID.hasPrefix("FaceTime ")}).first! return device } } &YUFOTJPO1SPWJEFSTXJGU
  4. let input: AVCaptureDeviceInput = { let device = AVCaptureDevice.DiscoverySession.faceTimeDevice() return

    try! AVCaptureDeviceInput(device: device) }() let output: AVCaptureVideoDataOutput = { let output = AVCaptureVideoDataOutput() output.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA ] return output }() lazy var session: AVCaptureSession = { var session = AVCaptureSession() session.addInput(input) output.setSampleBufferDelegate(self, queue: .main) session.addOutput(output) return session }() &YUFOTJPO1SPWJEFSTXJGU
  5. let input: AVCaptureDeviceInput = { let device = AVCaptureDevice.DiscoverySession.faceTimeDevice() return

    try! AVCaptureDeviceInput(device: device) }() let output: AVCaptureVideoDataOutput = { let output = AVCaptureVideoDataOutput() output.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA ] return output }() lazy var session: AVCaptureSession = { var session = AVCaptureSession() session.addInput(input) output.setSampleBufferDelegate(self, queue: .main) session.addOutput(output) return session }() &YUFOTJPO1SPWJEFSTXJGU
  6. func startStreaming() { guard let _ = _bufferPool else {

    return } _streamingCounter += 1 _timer = DispatchSource.makeTimerSource(flags: .strict, queue: _timerQueue) _timer!.schedule(deadline: .now(), repeating: 1.0 / Double(kFrameRate), leeway: .seconds(0)) _timer!.setEventHandler { var err: OSStatus = 0 let now = CMClockGetTime(CMClockGetHostTimeClock()) var pixelBuffer: CVPixelBuffer? err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, self._bufferPool, self._bufferAuxAttributes, &pixelBuffer) if err != 0 { os_log(.error, "out of pixel buffers \(err)") } if let pixelBuffer = pixelBuffer { CVPixelBufferLockBaseAddress(pixelBuffer, []) var bufferPtr = CVPixelBufferGetBaseAddress(pixelBuffer)! let width = CVPixelBufferGetWidth(pixelBuffer) let height = CVPixelBufferGetHeight(pixelBuffer) let rowBytes = CVPixelBufferGetBytesPerRow(pixelBuffer) memset(bufferPtr, 0, rowBytes * height) let whiteStripeStartRow = self._whiteStripeStartRow if self._whiteStripeIsAscending { self._whiteStripeStartRow = whiteStripeStartRow - 1 self._whiteStripeIsAscending = self._whiteStripeStartRow > 0 } else { self._whiteStripeStartRow = whiteStripeStartRow + 1 self._whiteStripeIsAscending = self._whiteStripeStartRow >= (height - kWhiteStripeHeight) } bufferPtr += rowBytes * Int(whiteStripeStartRow) for _ in 0..<kWhiteStripeHeight { for _ in 0..<width { var white: UInt32 = 0xFFFFFFFF memcpy(bufferPtr, &white, MemoryLayout.size(ofValue: white)) bufferPtr += MemoryLayout.size(ofValue: white) } } CVPixelBufferUnlockBaseAddress(pixelBuffer, []) var sbuf: CMSampleBuffer! var timingInfo = CMSampleTimingInfo() timingInfo.presentationTimeStamp = CMClockGetTime(CMClockGetHostTimeClock()) err = CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: self._videoDescription, sampleTiming: &timingInfo, sampleBufferOut: &sbuf) if err == 0 { self._streamSource.stream.send(sbuf, discontinuity: [], hostTimeInNanoseconds: UInt64(timingInfo.presentationTimeStamp.seconds * Double(NSEC_PER_SEC))) } os_log(.info, "video time \(timingInfo.presentationTimeStamp.seconds) now \(now.seconds) err \(err)") } } _timer!.setCancelHandler { } _timer!.resume() } &YUFOTJPO1SPWJEFSTXJGU DMBTT&YUFOTJPO%FWJDF4PVSDF
  7. func startStreaming() { guard let _ = _bufferPool else {

    return } _streamingCounter += 1 _timer = DispatchSource.makeTimerSource(flags: .strict, queue: _timerQueue) _timer!.schedule(deadline: .now(), repeating: 1.0 / Double(kFrameRate), leeway: .seconds(0)) _timer!.setEventHandler { var err: OSStatus = 0 let now = CMClockGetTime(CMClockGetHostTimeClock()) var pixelBuffer: CVPixelBuffer? err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, self._bufferPool, self._bufferAuxAttributes, &pixelBuffer) if err != 0 { os_log(.error, "out of pixel buffers \(err)") } if let pixelBuffer = pixelBuffer { CVPixelBufferLockBaseAddress(pixelBuffer, []) var bufferPtr = CVPixelBufferGetBaseAddress(pixelBuffer)! let width = CVPixelBufferGetWidth(pixelBuffer) let height = CVPixelBufferGetHeight(pixelBuffer) let rowBytes = CVPixelBufferGetBytesPerRow(pixelBuffer) memset(bufferPtr, 0, rowBytes * height) let whiteStripeStartRow = self._whiteStripeStartRow if self._whiteStripeIsAscending { self._whiteStripeStartRow = whiteStripeStartRow - 1 self._whiteStripeIsAscending = self._whiteStripeStartRow > 0 } else { self._whiteStripeStartRow = whiteStripeStartRow + 1 self._whiteStripeIsAscending = self._whiteStripeStartRow >= (height - kWhiteStripeHeight) } bufferPtr += rowBytes * Int(whiteStripeStartRow) for _ in 0..<kWhiteStripeHeight { for _ in 0..<width { var white: UInt32 = 0xFFFFFFFF memcpy(bufferPtr, &white, MemoryLayout.size(ofValue: white)) bufferPtr += MemoryLayout.size(ofValue: white) } } CVPixelBufferUnlockBaseAddress(pixelBuffer, []) var sbuf: CMSampleBuffer! var timingInfo = CMSampleTimingInfo() timingInfo.presentationTimeStamp = CMClockGetTime(CMClockGetHostTimeClock()) err = CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: self._videoDescription, sampleTiming: &timingInfo, sampleBufferOut: &sbuf) if err == 0 { self._streamSource.stream.send(sbuf, discontinuity: [], hostTimeInNanoseconds: UInt64(timingInfo.presentationTimeStamp.seconds * Double(NSEC_PER_SEC))) } os_log(.info, "video time \(timingInfo.presentationTimeStamp.seconds) now \(now.seconds) err \(err)") } } _timer!.setCancelHandler { } _timer!.resume() } &YUFOTJPO1SPWJEFSTXJGU DMBTT&YUFOTJPO%FWJDF4PVSDF
  8. func startStreaming() { guard let _ = _bufferPool else {

    return } _streamingCounter += 1 _timer = DispatchSource.makeTimerSource(flags: .strict, queue: _timerQueue) _timer!.schedule(deadline: .now(), repeating: 1.0 / Double(kFrameRate), leeway: .seconds(0)) _timer!.setEventHandler { var err: OSStatus = 0 let now = CMClockGetTime(CMClockGetHostTimeClock()) var pixelBuffer: CVPixelBuffer? err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, self._bufferPool, self._bufferAuxAttributes, &pixelBuffer) if err != 0 { os_log(.error, "out of pixel buffers \(err)") } if let pixelBuffer = pixelBuffer { CVPixelBufferLockBaseAddress(pixelBuffer, []) var bufferPtr = CVPixelBufferGetBaseAddress(pixelBuffer)! let width = CVPixelBufferGetWidth(pixelBuffer) let height = CVPixelBufferGetHeight(pixelBuffer) let rowBytes = CVPixelBufferGetBytesPerRow(pixelBuffer) memset(bufferPtr, 0, rowBytes * height) let whiteStripeStartRow = self._whiteStripeStartRow if self._whiteStripeIsAscending { self._whiteStripeStartRow = whiteStripeStartRow - 1 self._whiteStripeIsAscending = self._whiteStripeStartRow > 0 } else { self._whiteStripeStartRow = whiteStripeStartRow + 1 self._whiteStripeIsAscending = self._whiteStripeStartRow >= (height - kWhiteStripeHeight) } bufferPtr += rowBytes * Int(whiteStripeStartRow) for _ in 0..<kWhiteStripeHeight { for _ in 0..<width { var white: UInt32 = 0xFFFFFFFF memcpy(bufferPtr, &white, MemoryLayout.size(ofValue: white)) bufferPtr += MemoryLayout.size(ofValue: white) } } CVPixelBufferUnlockBaseAddress(pixelBuffer, []) var sbuf: CMSampleBuffer! var timingInfo = CMSampleTimingInfo() timingInfo.presentationTimeStamp = CMClockGetTime(CMClockGetHostTimeClock()) err = CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: self._videoDescription, sampleTiming: &timingInfo, sampleBufferOut: &sbuf) if err == 0 { self._streamSource.stream.send(sbuf, discontinuity: [], hostTimeInNanoseconds: UInt64(timingInfo.presentationTimeStamp.seconds * Double(NSEC_PER_SEC))) } os_log(.info, "video time \(timingInfo.presentationTimeStamp.seconds) now \(now.seconds) err \(err)") } } _timer!.setCancelHandler { } _timer!.resume() } &YUFOTJPO1SPWJEFSTXJGU DMBTT&YUFOTJPO%FWJDF4PVSDF
  9. func startStreaming() { guard let _ = _bufferPool else {

    return } _streamingCounter += 1 _timer = DispatchSource.makeTimerSource(flags: .strict, queue: _timerQueue) _timer!.schedule(deadline: .now(), repeating: 1.0 / Double(kFrameRate), leeway: .seconds(0)) _timer!.setEventHandler { var err: OSStatus = 0 let now = CMClockGetTime(CMClockGetHostTimeClock()) var pixelBuffer: CVPixelBuffer? err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, self._bufferPool, self._bufferAuxAttributes, &pixelBuffer) if err != 0 { os_log(.error, "out of pixel buffers \(err)") } if let pixelBuffer = pixelBuffer { CVPixelBufferLockBaseAddress(pixelBuffer, []) var bufferPtr = CVPixelBufferGetBaseAddress(pixelBuffer)! let width = CVPixelBufferGetWidth(pixelBuffer) let height = CVPixelBufferGetHeight(pixelBuffer) let rowBytes = CVPixelBufferGetBytesPerRow(pixelBuffer) memset(bufferPtr, 0, rowBytes * height) let whiteStripeStartRow = self._whiteStripeStartRow if self._whiteStripeIsAscending { self._whiteStripeStartRow = whiteStripeStartRow - 1 self._whiteStripeIsAscending = self._whiteStripeStartRow > 0 } else { self._whiteStripeStartRow = whiteStripeStartRow + 1 self._whiteStripeIsAscending = self._whiteStripeStartRow >= (height - kWhiteStripeHeight) } bufferPtr += rowBytes * Int(whiteStripeStartRow) for _ in 0..<kWhiteStripeHeight { for _ in 0..<width { var white: UInt32 = 0xFFFFFFFF memcpy(bufferPtr, &white, MemoryLayout.size(ofValue: white)) bufferPtr += MemoryLayout.size(ofValue: white) } } CVPixelBufferUnlockBaseAddress(pixelBuffer, []) var sbuf: CMSampleBuffer! var timingInfo = CMSampleTimingInfo() timingInfo.presentationTimeStamp = CMClockGetTime(CMClockGetHostTimeClock()) err = CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: self._videoDescription, sampleTiming: &timingInfo, sampleBufferOut: &sbuf) if err == 0 { self._streamSource.stream.send(sbuf, discontinuity: [], hostTimeInNanoseconds: UInt64(timingInfo.presentationTimeStamp.seconds * Double(NSEC_PER_SEC))) } os_log(.info, "video time \(timingInfo.presentationTimeStamp.seconds) now \(now.seconds) err \(err)") } } _timer!.setCancelHandler { } _timer!.resume() } &YUFOTJPO1SPWJEFSTXJGU DMBTT&YUFOTJPO%FWJDF4PVSDF
  10. extension ExtensionDeviceSource: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer:

    CMSampleBuffer, from connection: AVCaptureConnection) { let inputImage = CIImage(cvImageBuffer: sampleBuffer.imageBuffer!) let filter = CIFilter.sepiaTone() filter.inputImage = inputImage ciContext.render(filter.outputImage!, to: sampleBuffer.imageBuffer!) _streamSource.stream.send(sampleBuffer, discontinuity: .time, hostTimeInNanoseconds: 0) } func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { _streamSource.stream.send(sampleBuffer, discontinuity: .sampleDropped, hostTimeInNanoseconds: 0) } } &YUFOTJPO1SPWJEFSTXJGU
  11. extension ExtensionDeviceSource: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer:

    CMSampleBuffer, from connection: AVCaptureConnection) { let inputImage = CIImage(cvImageBuffer: sampleBuffer.imageBuffer!) let filter = CIFilter.sepiaTone() filter.inputImage = inputImage ciContext.render(filter.outputImage!, to: sampleBuffer.imageBuffer!) _streamSource.stream.send(sampleBuffer, discontinuity: .time, hostTimeInNanoseconds: 0) } func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { _streamSource.stream.send(sampleBuffer, discontinuity: .sampleDropped, hostTimeInNanoseconds: 0) } } &YUFOTJPO1SPWJEFSTXJGU
  12. static func generate(text: String, size: CGFloat = 100, imageSize: NSSize

    = NSSize(width: 1920, height: 1080)) -> CIImage? { let font = NSFont(name: "HiraginoSans-W9", size: size) ?? NSFont.systemFont(ofSize: size) let image = NSImage(size: imageSize, flipped: false) { (rect) -> Bool in let paragraphStyle = NSMutableParagraphStyle() paragraphStyle.alignment = .center paragraphStyle.lineBreakMode = .byCharWrapping let numberOfLines: CGFloat = CGFloat(text.split(separator: "\n").count) let rectangle = NSRect(x: 0, y: imageSize.height - font.lineHeight() * numberOfLines, width: imageSize.width, height: font.lineHeight() * numberOfLines) let textAttributes = [ .strokeColor: NSColor.black, .foregroundColor: NSColor.white, .strokeWidth: -1, .font: font, .paragraphStyle: paragraphStyle ] as [NSAttributedString.Key : Any] (text as NSString).draw(in: rectangle, withAttributes: textAttributes) return true } return image.ciImage } 6UJMJUZTXJGU
  13. private let filterComposite = CIFilter(name: "CISourceOverCompositing") private func compose(bgImage: CIImage,

    overlayImage: CIImage?) -> CIImage? { guard let filterComposite = filterComposite, let overlayImage = overlayImage else { return bgImage } filterComposite.setValue(overlayImage, forKeyPath: kCIInputImageKey) filterComposite.setValue(bgImage, forKeyPath: kCIInputBackgroundImageKey) return filterComposite.outputImage } &YUFOTJPO1SPWJEFSTXJGU
  14. &YUFOTJPO1SPWJEFSTXJGU func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection:

    AVCaptureConnection) { let inputImage = CIImage(cvImageBuffer: sampleBuffer.imageBuffer!) let filter = CIFilter.sepiaTone() filter.inputImage = inputImage ciContext.render(filter.outputImage!, to: sampleBuffer.imageBuffer!) _streamSource.stream.send(sampleBuffer, discontinuity: .time, hostTimeInNanoseconds: 0) }
  15. func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)

    { let inputImage = CIImage(cvImageBuffer: sampleBuffer.imageBuffer!) if let compositedImage = compose(bgImage: inputImage, overlayImage: textImage) { ciContext.render(compositedImage, to: sampleBuffer.imageBuffer!) } _streamSource.stream.send(sampleBuffer, discontinuity: .time, hostTimeInNanoseconds: 0) } &YUFOTJPO1SPWJEFSTXJGU
  16. import Defaults extension Defaults.Keys { public static let isBypass =

    Key<Bool>("isBypass", default: false, suite: defaultsSuite) public static let message = Key<String>("message", default: "Hello World", suite: defaultsSuite) } %FGBVMUT ,FZTTXJGU
  17. import SwiftUI import SystemExtensions import Defaults struct ContentView: View {

    @Default(.message) var message var body: some View { VStack { HStack { Button { let activationRequest = OSSystemExtensionRequest.activationRequest( forExtensionWithIdentifier: extensionID, queue: .main) OSSystemExtensionManager.shared.submitRequest(activationRequest) } label: { Text("Install") } Button { let deactivationRequest = OSSystemExtensionRequest.deactivationRequest( forExtensionWithIdentifier: extensionID, queue: .main) OSSystemExtensionManager.shared.submitRequest(deactivationRequest) } label: { Text("Uninstall") } Defaults.Toggle("Bypass", key: .isBypass) } .padding() HStack { TextEditor(text: $message) .font(.system(size: 16)) .border(Color.gray, width: 1) } .padding() } } } $POUFOU7JFXTXJGU
  18. import SwiftUI import SystemExtensions import Defaults struct ContentView: View {

    @Default(.message) var message var body: some View { VStack { HStack { Button { let activationRequest = OSSystemExtensionRequest.activationRequest( forExtensionWithIdentifier: extensionID, queue: .main) OSSystemExtensionManager.shared.submitRequest(activationRequest) } label: { Text("Install") } Button { let deactivationRequest = OSSystemExtensionRequest.deactivationRequest( forExtensionWithIdentifier: extensionID, queue: .main) OSSystemExtensionManager.shared.submitRequest(deactivationRequest) } label: { Text("Uninstall") } Defaults.Toggle("Bypass", key: .isBypass) } .padding() HStack { TextEditor(text: $message) .font(.system(size: 16)) .border(Color.gray, width: 1) } .padding() } } } $POUFOU7JFXTXJGU
  19. private func observeSettings() { Task { for await isBypass in

    Defaults.updates(.isBypass) { self.isBypass = isBypass } } Task { for await message in Defaults.updates(.message) { textImage = Utility.generate(text: message) } } } &YUFOTJPO1SPWJEFSTXJGU
  20. private func observeSettings() { Task { for await isBypass in

    Defaults.updates(.isBypass) { self.isBypass = isBypass } } Task { for await message in Defaults.updates(.message) { textImage = Utility.generate(text: message) } } } &YUFOTJPO1SPWJEFSTXJGU
  21. func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)

    { if isBypass { _streamSource.stream.send(sampleBuffer, discontinuity: .time, hostTimeInNanoseconds: 0) return } let inputImage = CIImage(cvImageBuffer: sampleBuffer.imageBuffer!) if let compositedImage = compose(bgImage: inputImage, overlayImage: textImage) { ciContext.render(compositedImage, to: sampleBuffer.imageBuffer!) } _streamSource.stream.send(sampleBuffer, discontinuity: .time, hostTimeInNanoseconds: 0) } &YUFOTJPO1SPWJEFSTXJGU
  22. func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)

    { if isBypass { _streamSource.stream.send(sampleBuffer, discontinuity: .time, hostTimeInNanoseconds: 0) return } let inputImage = CIImage(cvImageBuffer: sampleBuffer.imageBuffer!) if let compositedImage = compose(bgImage: inputImage, overlayImage: textImage) { ciContext.render(compositedImage, to: sampleBuffer.imageBuffer!) } _streamSource.stream.send(sampleBuffer, discontinuity: .time, hostTimeInNanoseconds: 0) } &YUFOTJPO1SPWJEFSTXJGU
  23. class ViewModel: NSObject, ObservableObject, SpeechRecognizerDelegate { @Default(.message) var message: String

    let speechRecognizer = SpeechRecognizer() func setUpSpeechRecognizer() { speechRecognizer.setup(delegate: self) } func startSpeechRecognize() { guard !speechRecognizer.started else { return } speechRecognizer.start() } func stopSpeechRecognize() { speechRecognizer.stop() } func speechTextUpdate(value: String) { message = value } } $POUFOU7JFXTXJGU
  24. import AVFoundation import Speech public class SpeechRecognizer { (...) public

    func setup(delegate: SpeechRecognizerDelegate) { self.delegate = delegate (...) recognizer = SFSpeechRecognizer(locale: Locale(identifier: "ja-JP")) } (...) private func startRecognitionTask() { recognitionTask = recognizer.recognitionTask(with: recognitionRequest) { result, error in var isFinal = false if let foundResult = result { self.previousResult = foundResult isFinal = foundResult.isFinal print(foundResult.bestTranscription.formattedString) self.delegate?.speechTextUpdate(value: foundResult.bestTranscription.formattedString) } if error != nil { print("\(error!.localizedDescription)") self.stop() } if isFinal { print("FINAL RESULT reached") self.stop() } } } } 4QFFDI3FDPHOJ[FSTXJGUҰ෦ൈਮ
  25. import AVFoundation import Speech public class SpeechRecognizer { (...) public

    func setup(delegate: SpeechRecognizerDelegate) { self.delegate = delegate (...) recognizer = SFSpeechRecognizer(locale: Locale(identifier: "ja-JP")) } (...) private func startRecognitionTask() { recognitionTask = recognizer.recognitionTask(with: recognitionRequest) { result, error in var isFinal = false if let foundResult = result { self.previousResult = foundResult isFinal = foundResult.isFinal print(foundResult.bestTranscription.formattedString) self.delegate?.speechTextUpdate(value: foundResult.bestTranscription.formattedString) } if error != nil { print("\(error!.localizedDescription)") self.stop() } if isFinal { print("FINAL RESULT reached") self.stop() } } } } 4QFFDI3FDPHOJ[FSTXJGUҰ෦ൈਮ