2016-04-01 7 views
0

リアカメラを120 fpsに設定しました。しかし、captureOutput()でサンプル出力をチェックしたときに、その関数が呼び出された時間を印字すると(以下参照)、その差は約33ms(30fps)です。 activeVideoMinFrameDurationとactiveVideoMaxFrameDurationでどのようなfpsを設定しても、captureOutput()で観測されるfpsは常に30fpsです。ビデオ出力は常に30fpsで行われますが、それ以外は設定されていますか?

私はスローモーションビデオを扱うことができるiPhone 6でこれをテストしました。私はhttps://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/04_MediaCapture.htmlでAppleの公式ドキュメントを読んだ。どんな手掛かり?

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate 
{ 
    var captureDevice: AVCaptureDevice? 
    let captureSession = AVCaptureSession() 
    let videoCaptureOutput = AVCaptureVideoDataOutput() 

    var startTime = NSDate.timeIntervalSinceReferenceDate() 

    // press button to start the video session 
    @IBAction func startPressed() { 
     if captureSession.inputs.count > 0 && captureSession.outputs.count > 0 { 
      startTime = NSDate.timeIntervalSinceReferenceDate() 
      captureSession.startRunning() 
     } 
    } 

    override func viewDidLoad() { 
     super.viewDidLoad() 

     // set capture session resolution 
     captureSession.sessionPreset = AVCaptureSessionPresetLow 

     let devices = AVCaptureDevice.devices() 
     var avFormat: AVCaptureDeviceFormat? = nil 
     for device in devices { 
      if (device.hasMediaType(AVMediaTypeVideo)) { 
       if (device.position == AVCaptureDevicePosition.Back) { 
        for vFormat in device.formats { 
         let ranges = vFormat.videoSupportedFrameRateRanges as! [AVFrameRateRange] 
         let filtered: Array<Double> = ranges.map({ $0.maxFrameRate }).filter({$0 >= 119.0}) 
         if !filtered.isEmpty { 
          // found a good device with good format! 
          captureDevice = device as? AVCaptureDevice 
          avFormat = vFormat as? AVCaptureDeviceFormat 
         } 
        } 
       } 
      } 
     } 

     // use the found capture device and format to set things up 
     if let dv = captureDevice { 
      // configure 
      do { 
       try dv.lockForConfiguration() 
      } catch _ { 
       print("failed locking device") 
      } 

      dv.activeFormat = avFormat 
      dv.activeVideoMinFrameDuration = CMTimeMake(1, 120) 
      dv.activeVideoMaxFrameDuration = CMTimeMake(1, 120) 
      dv.unlockForConfiguration() 

      // input -> session 
      do { 
       let input = try AVCaptureDeviceInput(device: dv) 
       if captureSession.canAddInput(input) { 
        captureSession.addInput(input) 
       } 
      } catch _ { 
       print("failed adding capture device as input to capture session") 
      } 
     } 

     // output -> session 
     let videoQueue = dispatch_queue_create("videoQueue", DISPATCH_QUEUE_SERIAL) 
     videoCaptureOutput.setSampleBufferDelegate(self, queue: videoQueue) 
     videoCaptureOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: Int(kCVPixelFormatType_32BGRA)] 
     videoCaptureOutput.alwaysDiscardsLateVideoFrames = true 
     if captureSession.canAddOutput(videoCaptureOutput) { 
      captureSession.addOutput(videoCaptureOutput) 
     } 
    } 

    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) 
    { 
     print("\(NSDate.timeIntervalSinceReferenceDate() - startTime)") 

     // More pixel/frame processing here 
    } 
} 

答えて

0

回答が見つかりました。 2つのブロック "order"と "input - > session"の注文を入れ替えます。

関連する問題