2017-12-06 19 views
4

ARSCNViewで透明度を示すビデオを再生しようとしています。 A SCNPlaneはビデオの投影空間として使用されており、私はGPUImageでこのビデオのキーを色付けしようとしています。ARKitのSCNPlaneでGPUImageでビデオをカラーリングする

私はここでthis exampleを追跡しました。残念ながら、そのビデオを私のvideoSpriteKitNodeに投影する方法を見つけられませんでした。フィルタはGPUImageViewにレンダリングされ、はAVPlayerになります。

誰もが自分の洞察力を分かち合うことができたら、私は非常に感謝しています。

import UIKit 
import ARKit 
import GPUImage 

class ARTransVC: UIViewController{ 

@IBOutlet weak var sceneView: ARSCNView! 
let configuration = ARWorldTrackingConfiguration() 
var movie: GPUImageMovie! 
var filter: GPUImageChromaKeyBlendFilter! 
var sourcePicture: GPUImagePicture! 
var player = AVPlayer() 
var gpuImageView: GPUImageView! 


override func viewDidLoad() { 
    super.viewDidLoad() 
    self.sceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints] 
    self.sceneView.session.run(configuration) 

    self.gpuImageView = GPUImageView() 
    self.gpuImageView.translatesAutoresizingMaskIntoConstraints = false 

    //a delay for ARKit to capture the surroundings 
    DispatchQueue.main.asyncAfter(deadline: .now() + 3) { 

     // A SpriteKit scene to contain the SpriteKit video node 
     let spriteKitScene = SKScene(size: CGSize(width: self.sceneView.frame.width, height: self.sceneView.frame.height)) 
     spriteKitScene.scaleMode = .aspectFit 

     // Create a video player, which will be responsible for the playback of the video material 
     guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { return } 
     let playerItem = AVPlayerItem(url: url) 
     self.player.replaceCurrentItem(with: playerItem) 

     //trans 
     self.filter = GPUImageChromaKeyBlendFilter() 
     self.filter.thresholdSensitivity = 0.15 
     self.filter.smoothing = 0.3 
     self.filter.setColorToReplaceRed(0.322, green: 0.455, blue: 0.831) 

     self.movie = GPUImageMovie(playerItem: playerItem) 
     self.movie.playAtActualSpeed = true 
     self.movie.addTarget(self.filter) 
     self.movie.startProcessing() 

     let backgroundImage = UIImage(named: "transparent.png") 
     self.sourcePicture = GPUImagePicture(image: backgroundImage, smoothlyScaleOutput: true)! 
     self.sourcePicture.addTarget(self.filter) 
     self.sourcePicture.processImage() 

     ///HERE DON'T KNOW HOW TO CONTINUE ? 
     self.filter.addTarget(self.gpuImageView) 


     // To make the video loop 
     self.player.actionAtItemEnd = .none 
     NotificationCenter.default.addObserver(
      self, 
      selector: #selector(ARTransVC.playerItemDidReachEnd), 
      name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, 
      object: self.player.currentItem) 

     // Create the SpriteKit video node, containing the video player 
     let videoSpriteKitNode = SKVideoNode(avPlayer: self.player) 
     videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width/2.0, y: spriteKitScene.size.height/2.0) 
     videoSpriteKitNode.size = spriteKitScene.size 
     videoSpriteKitNode.yScale = -1.0 
     videoSpriteKitNode.play() 
     spriteKitScene.addChild(videoSpriteKitNode) 

     // Create the SceneKit scene 
     let scene = SCNScene() 
     self.sceneView.scene = scene 
     self.sceneView.isPlaying = true 

     // Create a SceneKit plane and add the SpriteKit scene as its material 
     let background = SCNPlane(width: CGFloat(1), height: CGFloat(1)) 
     background.firstMaterial?.diffuse.contents = spriteKitScene 
     let backgroundNode = SCNNode(geometry: background) 
     backgroundNode.geometry?.firstMaterial?.isDoubleSided = true 

     backgroundNode.position = SCNVector3(0,0,-2.0) 
     scene.rootNode.addChildNode(backgroundNode) 
    } 
} 

@objc func playerItemDidReachEnd(notification: NSNotification) { 
    if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem { 
     playerItem.seek(to: kCMTimeZero, completionHandler: nil) 
    } 
} 
} 

答えて

1

背景をクリアしようとするので、

backgroundColor = .clear 
scaleMode = .aspectFit 
+0

これは正しい軌道に乗った!どうも – Felix

3

とをscaleModeを設定し、私は今、クロマキーに管理し、ARSCNViewになりまし透明ビデオを再生するが、それはまだ少しまだらなソリューションです。

私は以前のアプローチから離れて、ChromaKeyMaterialLësha Turkowskiから実装しました!

ここでは、私がキーに欲しかった色に調整されている。ここで

import SceneKit 

public class ChromaKeyMaterial: SCNMaterial { 

public var backgroundColor: UIColor { 
    didSet { didSetBackgroundColor() } 
} 

public var thresholdSensitivity: Float { 
    didSet { didSetThresholdSensitivity() } 
} 

public var smoothing: Float { 
    didSet { didSetSmoothing() } 
} 

public init(backgroundColor: UIColor = .green, thresholdSensitivity: Float = 0.15, smoothing: Float = 0.0) { 

    self.backgroundColor = backgroundColor 
    self.thresholdSensitivity = thresholdSensitivity 
    self.smoothing = smoothing 

    super.init() 

    didSetBackgroundColor() 
    didSetThresholdSensitivity() 
    didSetSmoothing() 

    // chroma key shader is based on GPUImage 
    // https://github.com/BradLarson/GPUImage/blob/master/framework/Source/GPUImageChromaKeyFilter.m 

    let surfaceShader = 
    """ 
    uniform vec3 c_colorToReplace; 
    uniform float c_thresholdSensitivity; 
    uniform float c_smoothing; 

    #pragma transparent 
    #pragma body 

    vec3 textureColor = _surface.diffuse.rgb; 

    float maskY = 0.2989 * c_colorToReplace.r + 0.5866 * c_colorToReplace.g + 0.1145 * c_colorToReplace.b; 
    float maskCr = 0.7132 * (c_colorToReplace.r - maskY); 
    float maskCb = 0.5647 * (c_colorToReplace.b - maskY); 

    float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b; 
    float Cr = 0.7132 * (textureColor.r - Y); 
    float Cb = 0.5647 * (textureColor.b - Y); 

    float blendValue = smoothstep(c_thresholdSensitivity, c_thresholdSensitivity + c_smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb))); 

    float a = blendValue; 
    _surface.transparent.a = a; 
    """ 

    //_surface.transparent.a = a; 

    shaderModifiers = [ 
     .surface: surfaceShader, 
    ] 
} 

required public init?(coder aDecoder: NSCoder) { 
    fatalError("init(coder:) has not been implemented") 
} 

//setting background color to be keyed out 
private func didSetBackgroundColor() { 
//getting pixel from background color 
//let rgb = backgroundColor.cgColor.components!.map{Float($0)} 
//let vector = SCNVector3(x: rgb[0], y: rgb[1], z: rgb[2]) 
    let vector = SCNVector3(x: 0.216, y: 0.357, z: 0.663) 
    setValue(vector, forKey: "c_colorToReplace") 
} 

private func didSetSmoothing() { 
    setValue(smoothing, forKey: "c_smoothing") 
} 

private func didSetThresholdSensitivity() { 
    setValue(thresholdSensitivity, forKey: "c_thresholdSensitivity") 
} 
} 

はSCNPlaneにARKitにキー付きビデオを再生コードは次のとおりです。

import UIKit 
import ARKit 

class ARTransVC: UIViewController{ 

@IBOutlet weak var arSceneView: ARSCNView! 
let configuration = ARWorldTrackingConfiguration() 

private var player: AVPlayer = { 
    guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { fatalError() } 
    return AVPlayer(url: url) 
}() 

override func viewDidLoad() { 
    super.viewDidLoad() 
    self.arSceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints] 
    self.arSceneView.session.run(configuration) 

    //a delay for ARKit to capture the surroundings 
    DispatchQueue.main.asyncAfter(deadline: .now() + 3) { 

     // A SpriteKit scene to contain the SpriteKit video node 
     let spriteKitScene = SKScene(size: CGSize(width: self.arSceneView.frame.width, height: self.arSceneView.frame.height)) 
     spriteKitScene.scaleMode = .aspectFit 
     spriteKitScene.backgroundColor = .clear 
     spriteKitScene.scaleMode = .aspectFit 

     //Create the SpriteKit video node, containing the video player 
     let videoSpriteKitNode = SKVideoNode(avPlayer: self.player) 
     videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width/2.0, y: spriteKitScene.size.height/2.0) 
     videoSpriteKitNode.size = spriteKitScene.size 
     videoSpriteKitNode.yScale = -1.0 
     videoSpriteKitNode.play() 
     spriteKitScene.addChild(videoSpriteKitNode) 

     // To make the video loop 
     self.player.actionAtItemEnd = .none 
     NotificationCenter.default.addObserver(
      self, 
      selector: #selector(ARTransVC.playerItemDidReachEnd), 
      name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, 
      object: self.player.currentItem) 

     // Create the SceneKit scene 
     let scene = SCNScene() 
     self.arSceneView.scene = scene 

     //Create a SceneKit plane and add the SpriteKit scene as its material 
     let background = SCNPlane(width: CGFloat(1), height: CGFloat(1)) 
     background.firstMaterial?.diffuse.contents = spriteKitScene 
     let chromaKeyMaterial = ChromaKeyMaterial() 
     chromaKeyMaterial.diffuse.contents = self.player 

     let backgroundNode = SCNNode(geometry: background) 
     backgroundNode.geometry?.firstMaterial?.isDoubleSided = true 
     backgroundNode.geometry!.materials = [chromaKeyMaterial] 

     backgroundNode.position = SCNVector3(0,0,-2.0) 
     scene.rootNode.addChildNode(backgroundNode) 

     //video does not start without delaying the player 
     //playing the video before just results in [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef) 
     DispatchQueue.main.asyncAfter(deadline: .now() + 1) { 
      self.player.seek(to:CMTimeMakeWithSeconds(1, 1000)) 
      self.player.play() 
     } 
    } 
} 

@objc func playerItemDidReachEnd(notification: NSNotification) { 
    if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem { 
     playerItem.seek(to: kCMTimeZero, completionHandler: nil) 
    } 
} 

私はなっていました[SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)であり、明らかにiOS 11.2ではfixedであった。今のところ、私はちょっとばかげた解決策を見つけました。私は1秒後にビデオを再開しました。そのためのより良いアプローチは非常に高く評価されています。

関連する問題