Upgrade to Pro — share decks privately, control downloads, hide ads and more …

TwilioのCameraSource に任意のフレームを使用するすゝめ

Avatar for BOB BOB
December 22, 2020

TwilioのCameraSource に任意のフレームを使用するすゝめ

potatotips 72

Avatar for BOB

BOB

December 22, 2020
Tweet

More Decks by BOB

Other Decks in Programming

Transcript

  1. λοϓϧɿϏσΦνϟοτ  w ݄ϏσΦνϟοτϦϦʔε w 5XJMJPΛ࢖༻͓ͯ͠Γ·͢ w ݄ϑΟϧλʔػೳϦϦʔε w ʮ͓෦԰ͷย෇͚͕େมʯɼ

    ʮ਎ͩ͠ͳΈΛ੔͑Δͷʹ͕͔͔࣌ؒΔʯ ͱ͍ͬͨ੠Λड͚ͯ։ൃ͍ͨ͠·ͨ͠ w ϑΟϧλʔػೳ͸"3,JU΋࢖༻͓ͯ͠Γ·͢ ϚονϯάΞϓϦʮλοϓϧ஀ੜʯɺ͙͢ʹΦϯϥΠϯσʔτ͕Ͱ͖Δ ʮϏσΦνϟοτʯʹ৽ͨʹʮϑΟϧλʔػೳʯΛ௥Ճɿhttps://www.cyberagent.co.jp/news/detail/id=25083 5XJMJPհͯ͠೚ҙͷϑϨʔϜΛૹΔํ๏Λ͝঺հ
  2. $BNFSB4PVSDFʹૹΔʹ͸˓˓͕େ੾  // TVIVideoFrame - (nullable instancetype)initWithTimestamp:(CMTime)timestamp buffer:(nonnull CVImageBufferRef)imageBuffer orientation:(TVIVideoOrientation)orientation;

    - (nullable instancetype)initWithTimeInterval:(CFTimeInterval)timeInterval buffer:(nonnull CVImageBufferRef)imageBuffer orientation:(TVIVideoOrientation)orientation; TVIVideoFrame Class Referenceɿhttps://twilio.github.io/twilio-video-ios/docs/latest/Classes/TVIVideoFrame.html#//api/name/initWithTimeInterval:buffer:orientation:
  3. $BNFSB4PVSDFʹૹΔʹ͸˓˓͕େ੾  // TVIVideoFrame - (nullable instancetype)initWithTimestamp:(CMTime)timestamp buffer:(nonnull CVImageBufferRef)imageBuffer orientation:(TVIVideoOrientation)orientation;

    - (nullable instancetype)initWithTimeInterval:(CFTimeInterval)timeInterval buffer:(nonnull CVImageBufferRef)imageBuffer orientation:(TVIVideoOrientation)orientation; CVImageBufferRef CVImageBufferRef TVIVideoFrame Class Referenceɿhttps://twilio.github.io/twilio-video-ios/docs/latest/Classes/TVIVideoFrame.html#//api/name/initWithTimeInterval:buffer:orientation:
  4. $BNFSB4PVSDFʹૹΔʹ͸˓˓͕େ੾  // TVIVideoFrame - (nullable instancetype)initWithTimestamp:(CMTime)timestamp buffer:(nonnull CVImageBufferRef)imageBuffer orientation:(TVIVideoOrientation)orientation;

    - (nullable instancetype)initWithTimeInterval:(CFTimeInterval)timeInterval buffer:(nonnull CVImageBufferRef)imageBuffer orientation:(TVIVideoOrientation)orientation; CMTime CFTimeInterval ϑϨʔϜΛऔಘ͢Δ Ճ޻͢Δ 7JEFP'SBNFΛ ࡞੒͢Δ ྲྀ͠ࠐΉ " # TVIVideoFrame Class Referenceɿhttps://twilio.github.io/twilio-video-ios/docs/latest/Classes/TVIVideoFrame.html#//api/name/initWithTimeInterval:buffer:orientation: $ %
  5. $BNFSB4PVSDFʹૹΔʹ͸4JOL͕େ੾  import TwilioVideo final class LocalTrackSourceManager: NSObject { private(set)

    var camera: CameraSource! func onVideoFrame(image: CIImage, time: TimeInterval) { guard let pixelBuffer = image.convertCVPixelBuffer() else { return } guard let videoFrame = VideoFrame(timeInterval: time, buffer: pixelBuffer, orientation: VideoOrientation.up) else { return } camera.sink?.onVideoFrame(videoFrame) } func onVideoFrame(image: CIImage, time: CMTime) { guard let pixelBuffer = image.convertCVPixelBuffer() else { return } guard let videoFrame = VideoFrame(timestamp: time, buffer: pixelBuffer, orientation: VideoOrientation.up) else { return } camera.sink?.onVideoFrame(videoFrame) } } # $ %
  6. import TwilioVideo final class LocalTrackSourceManager: NSObject { private(set) var camera:

    CameraSource! func onVideoFrame(image: CIImage, time: TimeInterval) { guard let pixelBuffer = image.convertCVPixelBuffer() else { return } guard let videoFrame = VideoFrame(timeInterval: time, buffer: pixelBuffer, orientation: VideoOrientation.up) else { return } camera.sink?.onVideoFrame(videoFrame) } func onVideoFrame(image: CIImage, time: CMTime) { guard let pixelBuffer = image.convertCVPixelBuffer() else { return } guard let videoFrame = VideoFrame(timestamp: time, buffer: pixelBuffer, orientation: VideoOrientation.up) else { return } camera.sink?.onVideoFrame(videoFrame) } } $BNFSB4PVSDFʹૹΔʹ͸4JOL͕େ੾  # $ % guard let pixelBuffer = image.convertCVPixelBuffer() else { return } guard let pixelBuffer = image.convertCVPixelBuffer() else { return }
  7. import TwilioVideo final class LocalTrackSourceManager: NSObject { private(set) var camera:

    CameraSource! func onVideoFrame(image: CIImage, time: TimeInterval) { guard let pixelBuffer = image.convertCVPixelBuffer() else { return } guard let videoFrame = VideoFrame(timeInterval: time, buffer: pixelBuffer, orientation: VideoOrientation.up) else { return } camera.sink?.onVideoFrame(videoFrame) } func onVideoFrame(image: CIImage, time: CMTime) { guard let pixelBuffer = image.convertCVPixelBuffer() else { return } guard let videoFrame = VideoFrame(timestamp: time, buffer: pixelBuffer, orientation: VideoOrientation.up) else { return } camera.sink?.onVideoFrame(videoFrame) } } $BNFSB4PVSDFʹૹΔʹ͸4JOL͕େ੾  # $ % VideoFrame(timeInterval: time, buffer: pixelBuffer, orientation: VideoOrientation.up) VideoFrame(timestamp: time, buffer: pixelBuffer, orientation: VideoOrientation.up)
  8. import TwilioVideo final class LocalTrackSourceManager: NSObject { private(set) var camera:

    CameraSource! func onVideoFrame(image: CIImage, time: TimeInterval) { guard let pixelBuffer = image.convertCVPixelBuffer() else { return } guard let videoFrame = VideoFrame(timeInterval: time, buffer: pixelBuffer, orientation: VideoOrientation.up) else { return } camera.sink?.onVideoFrame(videoFrame) } func onVideoFrame(image: CIImage, time: CMTime) { guard let pixelBuffer = image.convertCVPixelBuffer() else { return } guard let videoFrame = VideoFrame(timestamp: time, buffer: pixelBuffer, orientation: VideoOrientation.up) else { return } camera.sink?.onVideoFrame(videoFrame) } } $BNFSB4PVSDFʹૹΔʹ͸4JOL͕େ੾  camera.sink?.onVideoFrame(videoFrame) camera.sink?.onVideoFrame(videoFrame)
  9. $BNFSB4PVSDFʹૹΔʹ͸4JOL͕େ੾  import TwilioVideo final class LocalTrackSourceManager: NSObject { private(set)

    var camera: CameraSource! func onVideoFrame(image: CIImage, time: TimeInterval) { guard let pixelBuffer = image.convertCVPixelBuffer() else { return } guard let videoFrame = VideoFrame(timeInterval: time, buffer: pixelBuffer, orientation: VideoOrientation.up) else { return } camera.sink?.onVideoFrame(videoFrame) } func onVideoFrame(image: CIImage, time: CMTime) { guard let pixelBuffer = image.convertCVPixelBuffer() else { return } guard let videoFrame = VideoFrame(timestamp: time, buffer: pixelBuffer, orientation: VideoOrientation.up) else { return } camera.sink?.onVideoFrame(videoFrame) } } camera.sink?.onVideoFrame(videoFrame) camera.sink?.onVideoFrame(videoFrame) extension VideoChatAVCaptureView: VideoSource { weak var sink: VideoSink? var isScreencast: Bool { return false } func requestOutputFormat(_ outputFormat: VideoFormat) { guard let sink = sink else { return } sink.onVideoFormatRequest(outputFormat) } } w 7JEFP4PVSDFʹ४ڌͤͯ͞TJOLΛ࣋ͭ͜ͱ΋Մೳ w ͨͩ͠ɼৗʹ4JOL΁ϑϨʔϜΛૹΔ͜ͱ͕৚݅ ʢΫϥογϡ͠·͢ʣ w Ұํɼ$BNFSB4PVSDFͷ4JOLʹ͸ͦͷ৚͕݅ͳ͍ 5JQT var camera: CameraSource!
  10. "7$BQUVSFɿϑϨʔϜͷऔΓํ  import AVFoundation import UIKit final class VideoChatAVCaptureView: UIView

    { private var session: AVCaptureSession? private func prepare() { let output: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput() output.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoChat")) } } extension VideoChatAVCaptureView: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard let ciImage = sampleBuffer.convertCIImage() else { return } let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) localTrackSourceManager.onVideoFrame(image: ciImage, time: timestamp) } } " #
  11. "7$BQUVSFɿϑϨʔϜͷऔΓํ  import AVFoundation import UIKit final class VideoChatAVCaptureView: UIView

    { private var session: AVCaptureSession? private func prepare() { let output: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput() output.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoChat")) } } extension VideoChatAVCaptureView: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard let ciImage = sampleBuffer.convertCIImage() else { return } let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) localTrackSourceManager.onVideoFrame(image: ciImage, time: timestamp) } } output.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoChat")) EFMFHBUFΛࣗ਎ʹ͋ͯ·͢
  12. "7$BQUVSFɿϑϨʔϜͷऔΓํ  import AVFoundation import UIKit final class VideoChatAVCaptureView: UIView

    { private var session: AVCaptureSession? private func prepare() { let output: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput() output.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoChat")) } } extension VideoChatAVCaptureView: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard let ciImage = sampleBuffer.convertCIImage() else { return } let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) localTrackSourceManager.onVideoFrame(image: ciImage, time: timestamp) } } guard let ciImage = sampleBuffer.convertCIImage() else { return } $.4BNQMF#V⒎FSΛ$**NBHFʹม׵͠·͢
  13. "7$BQUVSFɿϑϨʔϜͷऔΓํ  import AVFoundation import UIKit final class VideoChatAVCaptureView: UIView

    { private var session: AVCaptureSession? private func prepare() { let output: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput() output.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoChat")) } } extension VideoChatAVCaptureView: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard let ciImage = sampleBuffer.convertCIImage() else { return } let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) localTrackSourceManager.onVideoFrame(image: ciImage, time: timestamp) } } let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) $.5JNFΛ࡞੒͠·͢
  14. "3,JUɿϑϨʔϜͷऔΓํ  import ARKit import SceneKit import UIKit final class

    VideoChatARKitView: UIView { private lazy var sceneView: ARSCNView = {...}() private func prepare() { sceneView.delegate = self } } extension VideoChatARKitView: ARSCNViewDelegate { func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) { guard let sceneView = renderer as? ARSCNView else { return } guard let ciImage = CIImage(image: sceneView.snapshot()) else { return } localTrackSourceManager.onVideoFrame(image: ciImage, time: time) } } " #
  15. import ARKit import SceneKit import UIKit final class VideoChatARKitView: UIView

    { private lazy var sceneView: ARSCNView = {...}() private func prepare() { sceneView.delegate = self } } extension VideoChatARKitView: ARSCNViewDelegate { func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) { guard let sceneView = renderer as? ARSCNView else { return } guard let ciImage = CIImage(image: sceneView.snapshot()) else { return } localTrackSourceManager.onVideoFrame(image: ciImage, time: time) } } "3,JUɿϑϨʔϜͷऔΓํ  sceneView.delegate = self EFMFHBUFΛࣗ਎ʹ͋ͯ·͢
  16. "3,JUɿϑϨʔϜͷऔΓํ  import ARKit import SceneKit import UIKit final class

    VideoChatARKitView: UIView { private lazy var sceneView: ARSCNView = {...}() private func prepare() { sceneView.delegate = self } } extension VideoChatARKitView: ARSCNViewDelegate { func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) { guard let sceneView = renderer as? ARSCNView else { return } guard let ciImage = CIImage(image: sceneView.snapshot()) else { return } localTrackSourceManager.onVideoFrame(image: ciImage, time: time) } } let ciImage = CIImage(image: sceneView.snapshot()) εφοϓγϣοτΛऔಘ͠·͢
  17. "3,JUɿϑϨʔϜͷऔΓํ  import ARKit import SceneKit import UIKit final class

    VideoChatARKitView: UIView { private lazy var sceneView: ARSCNView = {...}() private func prepare() { sceneView.delegate = self } } extension VideoChatARKitView: ARSCNViewDelegate { func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) { guard let sceneView = renderer as? ARSCNView else { return } guard let ciImage = CIImage(image: sceneView.snapshot()) else { return } localTrackSourceManager.onVideoFrame(image: ciImage, time: time) } } time: TimeInterval ͦͷ··౉͢͜ͱ͕Ͱ͖·͢