Upgrade to Pro — share decks privately, control downloads, hide ads and more …

ARKitを触りながら振り返る

hira
October 11, 2022

 ARKitを触りながら振り返る

iOSDC Japan 2022 After Talk LT1 スライド

hira

October 11, 2022
Tweet

More Decks by hira

Other Decks in Technology

Transcript

  1. ,ʹରԠ w ,ͰࡱӨ w  ͰฏۉԽ͠ॖখ w ໿NTຖʹॲཧ 㲈GQT 

    w ϨϯμϦϯά #FGPSF IUUQTEFWFMPQFSBQQMFDPNWJEFPTQMBZXXED 
  2. ,ʹରԠ w ,ͰࡱӨ w  ͰฏۉԽ͠ॖখ w ໿NTຖʹॲཧ 㲈GQT 

    w ϨϯμϦϯά #FGPSF IUUQTEFWFMPQFSBQQMFDPNWJEFPTQMBZXXED 
  3. ,ʹରԠ w ,ͰࡱӨ w  ͰฏۉԽ͠ॖখ w ໿NTຖʹॲཧ 㲈GQT 

    w ϨϯμϦϯά #FGPSF IUUQTEFWFMPQFSBQQMFDPNWJEFPTQMBZXXED 
  4. ,ʹରԠ w ,ͰࡱӨ w  ͰฏۉԽ͠ॖখ w ໿NTຖʹॲཧ 㲈GQT 

    w ϨϯμϦϯά #FGPSF IUUQTEFWFMPQFSBQQMFDPNWJEFPTQMBZXXED 
  5. ,ʹରԠ w ,ͰࡱӨ w  ͰฏۉԽ͠ॖখ w ໿NTຖʹॲཧ 㲈GQT 

    w ϨϯμϦϯά "GUFS ,NPEF IUUQTEFWFMPQFSBQQMFDPNWJEFPTQMBZXXED 💪 
  6. ,ʹରԠ w ,ͰࡱӨ w  ͰฏۉԽ͠ॖখ w ໿NTຖʹॲཧ 㲈GQT 

    w ϨϯμϦϯά "GUFS ,NPEF IUUQTEFWFMPQFSBQQMFDPNWJEFPTQMBZXXED 💪 
  7. ,ʹରԠ w ,ͰࡱӨ w  ͰฏۉԽ͠ॖখ w ໿NTຖʹॲཧ 㲈GQT 

    w ϨϯμϦϯά "GUFS ,NPEF IUUQTEFWFMPQFSBQQMFDPNWJEFPTQMBZXXED 💪 
  8. /// - Tag: WillBeginCapture func photoOutput(_ output: AVCapturePhotoOutput, willBeginCaptureFor resolvedSettings:

    AVCaptureResolvedPhotoSettings) { maxPhotoProcessingTime = resolvedSettings.photoProcessingTimeRange.start + resolvedSettings.photoProcessingTimeRange.duration } /// - Tag: WillCapturePhoto func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) { willCapturePhotoAnimation() // Retrieve the gravity vector at capture time. if motionManager.isDeviceMotionActive { gravity = motionManager.deviceMotion?.gravity logger.log("Captured gravity vector: \(String(describing: self.gravity))") } guard let maxPhotoProcessingTime = maxPhotoProcessingTime else { return } // Show a spinner if processing time exceeds one second. let oneSecond = CMTime(seconds: 1, preferredTimescale: 1) if maxPhotoProcessingTime > oneSecond { photoProcessingHandler(true) } } ॏྗՃ଎౓Λอଘ 
  9. /// - Tag: DidFinishProcessingPhoto func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo:

    AVCapturePhoto, error: Error?) { photoProcessingHandler(false) if let error = error { print("Error capturing photo: \(error)") photoData = nil } else { // Cache the HEIF representation of the data. photoData = photo } // Cache the depth data, if it exists, as a disparity map. logger.log("DidFinishProcessingPhoto: photo=\(String(describing: photo))") if let depthData = photo.depthData?.converting(toDepthDataType: kCVPixelFormatType_DisparityFloat32), let colorSpace = CGColorSpace(name: CGColorSpace.linearGray) { let depthImage = CIImage( cvImageBuffer: depthData.depthDataMap, options: [ .auxiliaryDisparity: true ] ) depthMapData = context.tiffRepresentation(of: depthImage, format: .Lf, colorSpace: colorSpace, options: [.disparityImage: depthImage]) } else { logger.error("colorSpace .linearGray not available... can't save depth data!") depthMapData = nil } } ਂ౓ϚοϓΛอଘ 
  10. if let photoOutputConnection = self.photoOutput.connection(with: .video) { photoOutputConnection.videoOrientation = videoPreviewLayerOrientation

    } var photoSettings = AVCapturePhotoSettings() // Request HEIF photos if supported and enable high-resolution photos. if self.photoOutput.availablePhotoCodecTypes.contains(.hevc) { photoSettings = AVCapturePhotoSettings( format: [AVVideoCodecKey: AVVideoCodecType.hevc]) } // Turn off the flash. The app relies on ambient lighting to avoid specular highlights. if self.videoDeviceInput!.device.isFlashAvailable { photoSettings.flashMode = .off } // Turn on high-resolution, depth data, and quality prioritzation mode. photoSettings.isHighResolutionPhotoEnabled = true photoSettings.isDepthDataDeliveryEnabled = self.photoOutput.isDepthDataDeliveryEnabled photoSettings.photoQualityPrioritization = self.photoQualityPrioritizationMode // Request that the camera embed a depth map into the HEIC output file. photoSettings.embedsDepthDataInPhoto = true // Specify a preview image. if !photoSettings.__availablePreviewPhotoPixelFormatTypes.isEmpty { photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey: photoSettings.__availablePreviewPhotoPixelFormatTypes.first!, kCVPixelBufferWidthKey: self.previewWidth, kCVPixelBufferHeightKey: self.previewHeight] as [String: Any] logger.log("Found available previewPhotoFormat: \(String(describing: photoSettings.previewPhotoFormat))") } else { logger.warning("Can't find preview photo formats! Not setting...") } // Tell the camera to embed a preview image in the output file. photoSettings.embeddedThumbnailPhotoFormat = [ AVVideoCodecKey: AVVideoCodecType.jpeg, AVVideoWidthKey: self.thumbnailWidth, AVVideoHeightKey: self.thumbnailHeight ] DispatchQueue.main.async { self.isHighQualityMode = photoSettings.isHighResolutionPhotoEnabled && photoSettings.photoQualityPrioritization == .quality } Χϝϥͱը૾ͷઃఆ 
  11. let inputFolderUrl = URL(fileURLWithPath: inputFolder, isDirectory: true) let configuration =

    makeConfigurationFromArguments() logger.log("Using configuration: \(String(describing: configuration))") // Try to create the session, or else exit. var maybeSession: PhotogrammetrySession? = nil do { maybeSession = try PhotogrammetrySession(input: inputFolderUrl, configuration: configuration) logger.log("Successfully created session.") } catch { logger.error("Error creating session: \(String(describing: error))") Foundation.exit(1) } ϑΥϧμΛ౉͚ͩ͢ 
  12. 

  13. let entity = try! ModelEntity.load(named: "test.usdz") let anchorEntity = AnchorEntity(plane:

    .any) anchorEntity.addChild(entity) arView.scene.addAnchor(anchorEntity) &OUJUZͷ௥Ճ 
  14. let config = ARWorldTrackingConfiguration() // ୺຤͕ѻ͑Δ࠷ߴը࣭ͰͷϑΥʔϚοτΛฦ͢ if let hiResCaptureVideoFormat =

    ARWorldTrackingConfiguration.recommendedVideoFormatForHighResolutionFrameCapturing { config.videoFormat = hiResCaptureVideoFormat } // 4kը࣭͕ѻ͑Δ৔߹ͷΈϑΥʔϚοτΛฦ͢ if let res4kCaptureVideoFormat = ARWorldTrackingConfiguration.recommendedVideoFormatFor4KResolution { config.videoFormat = res4kCaptureVideoFormat } "34FTTJPOͰͷө૾ϑΥʔϚοτ 
  15. arView.session.captureHighResolutionFrame { arFrame, error in guard let imageBuffer = arFrame?.capturedImage

    else { return } let ciImage = CIImage(cvPixelBuffer: imageBuffer) let w = CGFloat(CVPixelBufferGetWidth(imageBuffer)) let h = CGFloat(CVPixelBufferGetHeight(imageBuffer)) let rect:CGRect = CGRect.init(x: 0, y: 0, width: w, height: h) let context = CIContext.init() guard let cgImage = context.createCGImage(ciImage, from: rect) else { return } let uiimage = UIImage(cgImage: cgImage).rotated(by: 90.0 * CGFloat.pi / 180) UIImageWriteToSavedPhotosAlbum(uiimage, self, nil, nil) } "34FTTJPOதͷϑϨʔϜऔಘ "34FTTJPO͕ઃఆΛΑ͠ͳʹͯ͘͠Εͯ "3,JUͷϝιου͸Θ͔Γ΍͍͢ 
  16. let entity = try! ModelEntity.load(named: "test.usdz") let anchorEntity = AnchorEntity(plane:

    .any) anchorEntity.addChild(entity) arView.scene.addAnchor(anchorEntity) &OUJUZͷ௥Ճ 
  17. "34$/7JFX "3,JU 4DFOF,JU "34FTTJPJO "3'SBNF "3"ODIPS "37JFX 3FBMJUZ,JU 4DFOF "ODIPS&OUJUZ

    .PEFM&OUJUZ "ODIPS&OUJUZ &OUJUZ &OUJUZ $PO fi HVSBUJPO "3'SBNF "3"ODIPS 
  18. 

  19. 

  20. 

  21. let configuration = ARBodyTrackingConfiguration() arView.session.run(configuration) arView.scene.addAnchor(characterAnchor) if let entity =

    try? Entity.loadBodyTracked(named: "character/robot") { self.character = entity } Ϟσϧͷϩʔυ 4DFOFʹϞσϧͷ"ODIPSΛઃஔ Ϟσϧͷ#PEZ5SBDLFE&OUJUZͷ༻ҙ 
  22. func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) { for anchor

    in anchors { guard let bodyAnchor = anchor as? ARBodyAnchor else { continue } let bodyPosition = simd_make_float3(bodyAnchor.transform.columns.3) characterAnchor.position = bodyPosition + characterOffset characterAnchor.orientation = Transform(matrix: bodyAnchor.transform).rotation if let character = character, character.parent == nil { characterAnchor.addChild(character) } } } Ϟσϧͱ"3#PEZ"ODIPSͷ઀ଓ ϞσϧଆͰKPJOU/BNF΋߹Θ͍ͤͯΔͷͰɼ"ODIPSʹॏͶΔ͚ͩ 
  23. 

  24. 

  25. 

  26.