Upgrade to Pro — share decks privately, control downloads, hide ads and more …

NDIとARKitを連動させた新しい映像表現

satoshi0212
September 18, 2021

 NDIとARKitを連動させた新しい映像表現

satoshi0212

September 18, 2021
Tweet

More Decks by satoshi0212

Other Decks in Programming

Transcript

  1. #import <Foundation/Foundation.h> #import <AVFoundation/AVFoundation.h> @interface NDIWrapper : NSObject + (void)initialize;

    - (void)start:(NSString *)name; - (void)stop; - (void)send:(CMSampleBufferRef)sampleBuffer metadata:(NSString *)metadata; @end </%*8SBQQFS>/%*8SBQQFSI
  2. - (void)send:(CMSampleBufferRef)sampleBuffer metadata:(NSString *)metadata { if (!my_ndi_send) { NSLog(@"ERROR: NDI

    instance is nil"); return; } CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); NDIlib_video_frame_v2_t video_frame; video_frame.frame_rate_N = 30000; video_frame.frame_rate_D = 1000; video_frame.xres = (int)CVPixelBufferGetWidth(imageBuffer); video_frame.yres = (int)CVPixelBufferGetHeight(imageBuffer); video_frame.FourCC = NDIlib_FourCC_type_BGRA; video_frame.frame_format_type = NDIlib_frame_format_type_progressive; video_frame.picture_aspect_ratio = video_frame.yres / video_frame.xres; video_frame.line_stride_in_bytes = video_frame.xres * 4; video_frame.p_metadata = metadata.UTF8String; CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); video_frame.p_data = CVPixelBufferGetBaseAddress(imageBuffer); CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); NDIlib_send_send_video_async_v2(my_ndi_send, &video_frame); } </%*8SBQQFS>/%*8SBQQFSNൈਮ
  3. - (void)send:(CMSampleBufferRef)sampleBuffer metadata:(NSString *)metadata { if (!my_ndi_send) { NSLog(@"ERROR: NDI

    instance is nil"); return; } CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); NDIlib_video_frame_v2_t video_frame; video_frame.frame_rate_N = 30000; video_frame.frame_rate_D = 1000; video_frame.xres = (int)CVPixelBufferGetWidth(imageBuffer); video_frame.yres = (int)CVPixelBufferGetHeight(imageBuffer); video_frame.FourCC = NDIlib_FourCC_type_BGRA; video_frame.frame_format_type = NDIlib_frame_format_type_progressive; video_frame.picture_aspect_ratio = video_frame.yres / video_frame.xres; video_frame.line_stride_in_bytes = video_frame.xres * 4; video_frame.p_metadata = metadata.UTF8String; CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); video_frame.p_data = CVPixelBufferGetBaseAddress(imageBuffer); CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); NDIlib_send_send_video_async_v2(my_ndi_send, &video_frame); } </%*8SBQQFS>/%*8SBQQFSNൈਮ
  4. - (void)send:(CMSampleBufferRef)sampleBuffer metadata:(NSString *)metadata { if (!my_ndi_send) { NSLog(@"ERROR: NDI

    instance is nil"); return; } CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); NDIlib_video_frame_v2_t video_frame; video_frame.frame_rate_N = 30000; video_frame.frame_rate_D = 1000; video_frame.xres = (int)CVPixelBufferGetWidth(imageBuffer); video_frame.yres = (int)CVPixelBufferGetHeight(imageBuffer); video_frame.FourCC = NDIlib_FourCC_type_BGRA; video_frame.frame_format_type = NDIlib_frame_format_type_progressive; video_frame.picture_aspect_ratio = video_frame.yres / video_frame.xres; video_frame.line_stride_in_bytes = video_frame.xres * 4; video_frame.p_metadata = metadata.UTF8String; CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); video_frame.p_data = CVPixelBufferGetBaseAddress(imageBuffer); CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); NDIlib_send_send_video_async_v2(my_ndi_send, &video_frame); } </%*8SBQQFS>/%*8SBQQFSNൈਮ
  5. - (void)send:(CMSampleBufferRef)sampleBuffer metadata:(NSString *)metadata { if (!my_ndi_send) { NSLog(@"ERROR: NDI

    instance is nil"); return; } CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); NDIlib_video_frame_v2_t video_frame; video_frame.frame_rate_N = 30000; video_frame.frame_rate_D = 1000; video_frame.xres = (int)CVPixelBufferGetWidth(imageBuffer); video_frame.yres = (int)CVPixelBufferGetHeight(imageBuffer); video_frame.FourCC = NDIlib_FourCC_type_BGRA; video_frame.frame_format_type = NDIlib_frame_format_type_progressive; video_frame.picture_aspect_ratio = video_frame.yres / video_frame.xres; video_frame.line_stride_in_bytes = video_frame.xres * 4; video_frame.p_metadata = metadata.UTF8String; CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); video_frame.p_data = CVPixelBufferGetBaseAddress(imageBuffer); CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); NDIlib_send_send_video_async_v2(my_ndi_send, &video_frame); } </%*8SBQQFS>/%*8SBQQFSNൈਮ
  6. class ViewController: UIViewController { private var ndiWrapper: NDIWrapper? override func

    viewDidLoad() { super.viewDidLoad() ndiWrapper = NDIWrapper() ... } } </%*4FOEFS&YBNQMF>7JFX$POUSPMMFSTXJGU
  7. class ViewController: UIViewController { private var ndiWrapper: NDIWrapper? ... override

    func viewDidLoad() { super.viewDidLoad() ndiWrapper = NDIWrapper() ... } } </%*4FOEFS&YBNQMF>7JFX$POUSPMMFSTXJGU
  8. func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)

    { guard let ndiWrapper = self.ndiWrapper, isSending else { return } ndiWrapper.send(sampleBuffer, metadata: makeMetadata().serialize()) } </%*4FOEFS&YBNQMF>7JFX$POUSPMMFSTXJGU"7$BQUVSF7JEFP%BUB0VUQVU4BNQMF#V ff FS%FMFHBUF
  9. func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)

    { guard let ndiWrapper = self.ndiWrapper, isSending else { return } ndiWrapper.send(sampleBuffer, metadata: makeMetadata().serialize()) } </%*4FOEFS&YBNQMF>7JFX$POUSPMMFSTXJGU"7$BQUVSF7JEFP%BUB0VUQVU4BNQMF#V ff FS%FMFHBUF
  10. public struct Metadata { public let cameraPosition: simd_float3 public let

    cameraRotation: CMQuaternion public let projectionMatrix: matrix_float4x4 public let depthRange: simd_float2 public let inputState: InputState } public struct InputState { public let buttons: [UInt8] // 2 public let toggles: [UInt8] // 2 public let knobs: [UInt8] // 32 } </%*4FOEFS&YBNQMF>7JFX$POUSPMMFSTXJGU
  11. // Debug func makeMetadata() -> Metadata { var knobs =

    Array<UInt8>(repeating: 0, count: 32) knobs[1] = UInt8(slider.value) let inputState = InputState(buttons: [0, 0], toggles: [0, 0], knobs: knobs) let metadata = Metadata(cameraPosition: simd_float3(0, 0, 0), cameraRotation: CMQuaternion(x: 0, y: 0, z: 0, w: 0), projectionMatrix: matrix_float4x4(SIMD4<Float>(0, 0, 0, 0), SIMD4<Float>(0, 0, 0, 0), SIMD4<Float>(0, 0, 0, 0), SIMD4<Float>(0, 0, 0, 0)), depthRange: simd_float2(0, 1), inputState: inputState) return metadata } </%*4FOEFS&YBNQMF>7JFX$POUSPMMFSTXJGU
  12. // Debug func makeMetadata() -> Metadata { var knobs =

    Array<UInt8>(repeating: 0, count: 32) knobs[1] = UInt8(slider.value) let inputState = InputState(buttons: [0, 0], toggles: [0, 0], knobs: knobs) let metadata = Metadata(cameraPosition: simd_float3(0, 0, 0), cameraRotation: CMQuaternion(x: 0, y: 0, z: 0, w: 0), projectionMatrix: matrix_float4x4(SIMD4<Float>(0, 0, 0, 0), SIMD4<Float>(0, 0, 0, 0), SIMD4<Float>(0, 0, 0, 0), SIMD4<Float>(0, 0, 0, 0)), depthRange: simd_float2(0, 1), inputState: inputState) return metadata } </%*4FOEFS&YBNQMF>7JFX$POUSPMMFSTXJGU
  13. override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) let configuration =

    ARWorldTrackingConfiguration() configuration.frameSemantics = .personSegmentationWithDepth session.run(configuration) } </%*4FOEFS&YBNQMF>7JFX$POUSPMMFSTXJGU
  14. override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) let configuration =

    ARWorldTrackingConfiguration() configuration.frameSemantics = .personSegmentationWithDepth session.run(configuration) } </%*4FOEFS&YBNQMF>7JFX$POUSPMMFSTXJGU
  15. class Renderer { let session: ARSession let matteGenerator: ARMatteGenerator let

    device: MTLDevice let inFlightSemaphore = DispatchSemaphore(value: kMaxBuffersInFlight) var renderDestination: RenderDestinationProvider ... } func updateMatteTextures(commandBuffer: MTLCommandBuffer) { guard let currentFrame = session.currentFrame else { return } alphaTexture = matteGenerator.generateMatte(from: currentFrame, commandBuffer: commandBuffer) dilatedDepthTexture = matteGenerator.generateDilatedDepth(from: currentFrame, commandBuffer: commandBuffer) } </%*4FOEFS&YBNQMF>3FOEFSFSTXJGU
  16. void UpdateMetadata() { // Deserialization var xml = _ndiReceiver.metadata; if

    (xml == null || xml.Length == 0) return; _metadata = Metadata.Deserialize(xml); // Input state update with the metadata Singletons.InputHandle.InputState = _metadata.InputState; } <3DBN7JTVBMJ[FS>3DBN3FDFJWFSDT
  17. void UpdateMetadata() { // Deserialization var xml = _ndiReceiver.metadata; if

    (xml == null || xml.Length == 0) return; _metadata = Metadata.Deserialize(xml); // Input state update with the metadata Singletons.InputHandle.InputState = _metadata.InputState; } <3DBN7JTVBMJ[FS>3DBN3FDFJWFSDT
  18. void UpdateMetadata() { // Deserialization var xml = _ndiReceiver.metadata; if

    (xml == null || xml.Length == 0) return; _metadata = Metadata.Deserialize(xml); // Input state update with the metadata Singletons.InputHandle.InputState = _metadata.InputState; } <3DBN7JTVBMJ[FS>3DBN3FDFJWFSDT