2017-06-11 5 views
0

私はSnapChatのようなアプリを作ろうとしています。同じボタンから、私はユーザーが写真を撮って(内部を触れる)、ビデオを録画できるようにしています(長押し)。同じAVCaptureSessionから写真とビデオをキャプチャする方法はありますか?

私はAVFoundationを使用しています。難しい部分は、同じAVCaptureSessionで正しく動作させることができないということです。つまり、両方のキャプチャに1つのプレビューレイヤーしかありません。録画ボタンとのユーザーのやり取りに応じて、どのように適切なレイヤーを開始できますか?既に似たようなことをしている人はいましたか?

ここに私のコードの一部です:

import UIKit 
import AVFoundation 

protocol RecordCameraDelegate { 
    func didSavedOutputFile(url: URL!, error: Error?) 
    func didSavedImage(image: UIImage?) 
} 

// MARK: - Camera 
class RecordCamera : NSObject { 

    var videoLayer : AVCaptureVideoPreviewLayer! 
    var delegate : RecordCameraDelegate! 
    var capturedPhoto : UIImage? 

    fileprivate var captureSession = AVCaptureSession() 
    fileprivate var photoSession = AVCaptureSession() 

    fileprivate var movieOutput = AVCaptureMovieFileOutput() 
    fileprivate var cameraDevice : AVCaptureDevicePosition! 
    fileprivate let stillImageOutput = AVCaptureStillImageOutput() 

    // Devices 
    fileprivate lazy var frontCameraDevice: AVCaptureDevice? = { 
     let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice] 
     return devices.filter { $0.position == .front }.first 
    }() 

    fileprivate lazy var backCameraDevice: AVCaptureDevice? = { 
     let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice] 
     return devices.filter { $0.position == .back }.first 
    }() 

    fileprivate lazy var micDevice: AVCaptureDevice? = { 
     return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) 
    }() 

    fileprivate var tempFilePath: URL = { 
     let tempPath = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("bighug").appendingPathExtension("mp4").absoluteString 
     if FileManager.default.fileExists(atPath: tempPath) { 
      do { 
       try FileManager.default.removeItem(atPath: tempPath) 
      } catch let error { print("Can't create File URL: \(String(describing: error))") } 
     } 
     return URL(string: tempPath)! 
    }() 

    // MARK: - Initialization 
    init(view: UIView, cameraPosition: AVCaptureDevicePosition = .front) { 
     super.init() 

     cameraDevice = cameraPosition 

     // Video 
     self.configureToRecord(view: view) 
     // Photo 
     self.configureToCapturePhoto() 
    } 

    func configureToRecord(view: UIView? = nil) { 

     captureSession.beginConfiguration() 
     defer { 
      // commit & stop session 
      captureSession.commitConfiguration() 
      if !captureSession.isRunning { captureSession.startRunning() } 
     } 

     captureSession.sessionPreset = AVCaptureSessionPresetHigh 

     // Start configuration 
     if !captureSession.isRunning { 

      // layer 
      if let validView = view { 
       videoLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
       videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill 
       videoLayer.frame = validView.bounds 
       validView.layer.addSublayer(videoLayer) 
      } 

      // add device inputs (front camera and mic) 
      if cameraDevice == .front { 
       captureSession.addInput(deviceInputFrom(device: frontCameraDevice)) 
      } else { 
       captureSession.addInput(deviceInputFrom(device: backCameraDevice)) 
      } 
     } 

     captureSession.addInput(deviceInputFrom(device: micDevice)) 

     // Output 
     movieOutput.movieFragmentInterval = kCMTimeInvalid 

     // Remove previous output 
     if let existingOutput = captureSession.outputs.first as? AVCaptureOutput { 
      captureSession.removeOutput(existingOutput) 
     } 
     // Add Movie Output 
     if captureSession.canAddOutput(movieOutput) { 
      captureSession.addOutput(movieOutput) 
     } 
    } 

    func configureToCapturePhoto() { 

     photoSession.beginConfiguration() 
     defer { photoSession.commitConfiguration() } 

     photoSession.sessionPreset = AVCaptureSessionPresetPhoto 
     stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG] 

     if #available(iOS 10.0, *) { 
      let cameraOutput = AVCapturePhotoOutput() 
      // Add Photo Output 
      if photoSession.canAddOutput(cameraOutput) { 
       photoSession.addOutput(cameraOutput) 
      } 
     } 
     else { 
      // Add Photo Output 
      if photoSession.canAddOutput(stillImageOutput) { 
       photoSession.addOutput(stillImageOutput) 
      } 
     } 
    } 

    func takePicture() { 
     if #available(iOS 10.0, *) { 
      let cameraOutput = photoSession.outputs.first as! AVCapturePhotoOutput 
      // Capture Picture 
      let settings = AVCapturePhotoSettings() 
      let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first! 
      let previewFormat = [ 
       kCVPixelBufferPixelFormatTypeKey as String: previewPixelType, 
       kCVPixelBufferWidthKey as String: 828, 
       kCVPixelBufferHeightKey as String: 828 
      ] 
      settings.previewPhotoFormat = previewFormat 
      cameraOutput.capturePhoto(with: settings, delegate: self) 
     } 
     else { 
      if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) { 
       stillImageOutput.captureStillImageAsynchronously(from: videoConnection) { (imageDataSampleBuffer, error) -> Void in 
        let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer) 
        //UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData!)!, nil, nil, nil) 
        guard let validData = imageData else { self.delegate?.didSavedImage(image: nil); return } 
        self.capturedPhoto = UIImage(data: validData) 
       } 
      } 
     } 
    } 

    // MARK: - Record Methods 
    func startRecording() { 
     // Take picture 
     print("Camera started recording") 
     self.takePicture() 
     // Start recording 
     movieOutput.startRecording(
      toOutputFileURL: tempFilePath, 
      recordingDelegate: self 
     ) 
    } 

    func stopRecording() { 
     print("Camera stopped recording") 
     movieOutput.stopRecording() 
    } 

    // MARK: - Modes 
    func cameraMode() { 

     captureSession.beginConfiguration() 
     defer { captureSession.commitConfiguration() } 

     let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? [] 

     // From 
     if cameraDevice == .front { 
      if let validFrontDevice = deviceInputFrom(device: frontCameraDevice) { 
       if !inputs.contains(validFrontDevice) { 
        captureSession.addInput(validFrontDevice) 
       } 
      } 
     } 
     // Back 
     if cameraDevice == .back { 
      if let validBackDevice = deviceInputFrom(device: backCameraDevice) { 
       if !inputs.contains(validBackDevice) { 
        captureSession.addInput(validBackDevice) 
       } 
      } 
     } 

     print("Record Camera --> Set VIDEO Mode") 
    } 

    func audioMode() { 

     captureSession.beginConfiguration() 
     defer { captureSession.commitConfiguration() } 

     let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? [] 

     // Remove.. 
     for input in inputs { 
      if let deviceInput = input as? AVCaptureDeviceInput { 
       if deviceInput.device == backCameraDevice 
       || deviceInput.device == frontCameraDevice { 
        captureSession.removeInput(deviceInput) 
       } 
      } 
     } 

     print("Record Camera --> Set AUDIO Mode") 
    } 

    // MARK: - Util methods 
    fileprivate func deviceInputFrom(device: AVCaptureDevice?) -> AVCaptureDeviceInput? { 
     guard let validDevice = device else { return nil } 
     do { 
      return try AVCaptureDeviceInput(device: validDevice) 
     } catch let outError { 
      print("Device setup error occured: \(String(describing: outError))") 
      return nil 
     } 
    } 

    func swipeCamera() { 

     cameraDevice = cameraDevice == .front ? .back : .front 

     captureSession.beginConfiguration() 
     defer { captureSession.commitConfiguration() } 

     let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? [] 

     // Remove... 
     for input in inputs { 
      if let deviceInput = input as? AVCaptureDeviceInput { 
       if deviceInput.device == backCameraDevice && cameraDevice == .front { 
        captureSession.removeInput(deviceInput) 
        photoSession.removeInput(deviceInput) 
        break; 
       } else if deviceInput.device == frontCameraDevice && cameraDevice == .back { 
        captureSession.removeInput(deviceInput) 
        photoSession.removeInput(deviceInput) 
        break; 
       } 
      } 
     } 

     // From 
     if cameraDevice == .front { 
      if let validFrontDevice = deviceInputFrom(device: frontCameraDevice) { 
       if !inputs.contains(validFrontDevice) { 
        captureSession.addInput(validFrontDevice) 
        photoSession.addInput(validFrontDevice) 
        print("Record Camera --> Swipe to Front Camera") 
       } 
      } 
     } 
     // Back 
     if cameraDevice == .back { 
      if let validBackDevice = deviceInputFrom(device: backCameraDevice) { 
       if !inputs.contains(validBackDevice) { 
        captureSession.addInput(validBackDevice) 
        photoSession.addInput(validBackDevice) 
        print("Record Camera --> Swipe to Back Camera") 
       } 
      } 
     } 
    } 
} 

// MARK: - Capture Output 
extension RecordCamera : AVCaptureFileOutputRecordingDelegate { 

    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { 
     // Not implemented 
    } 

    func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { 
     guard error == nil else { 
      if let photo = capturedPhoto { 
       delegate?.didSavedImage(image: photo) 
      } 
      return 
     } 
     delegate?.didSavedOutputFile(url: outputFileURL, error: error) 
    } 
} 

@available(iOS 10.0, *) 
extension RecordCamera : AVCapturePhotoCaptureDelegate { 

    func capture(_ captureOutput: AVCapturePhotoOutput, didCapturePhotoForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings) { 
     print("Picture taken") 
    } 

    func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) { 

     guard error == nil else { 
      print("Failed Capturing Picture: \(String(describing: error!.localizedDescription))") 
      capturedPhoto = nil 
      //self.delegate.didSavedImage(image: nil) 
      return 
     } 

     if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, 
      let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) { 
      print("Photo Saved!") 
      capturedPhoto = UIImage(data: imageData) 
      //self.delegate.didSavedImage(image: image) 
     } 

    } 
} 

答えて

1

私はあなたが必要とほぼ同じ機能をしました。 キャプチャセッションを1つ作成して設定しました。 ビデオ出力に関しては、AVCaptureVideoDataOutputクラス、オーディオAVCaptureAudioDataOutputクラス、および写真(AVCaptureStillImageOutput)を使用しました。

カスタムビデオ操作を実行する必要があったため、ビデオとオーディオの録画にはAVAssetWriterを使用しました。記録は、
AVCaptureVideoDataOutputSampleBufferDelegateの方法で行われる。 そのデリゲートメソッドは次のようになります。

func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { 
    if !isRecordingVideo { 
     return 
    } 

    if captureOutput == self.videoOutput { 
     assetVideoWriterQueue.async { 
      if self.shouldStartWritingSession { 
       self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) 
       self.shouldStartWritingSession = false 
      } 

      if self.assetWriterInputCamera.isReadyForMoreMediaData { 
       self.assetWriterInputCamera.append(sampleBuffer) 
      } 
     } 
    } 

    if captureOutput == self.audioOutput { 
     assetAudioWriterQueue.async { 
      let shouldStartWritingSession = self.shouldStartWritingSession 
      if self.assetWriterInputMicrofone.isReadyForMoreMediaData && shouldStartWritingSession == false { 
       self.assetWriterInputMicrofone.append(sampleBuffer) 
      } 

      if shouldStartWritingSession { 
       print("In audioOutput and CANNOT Record") 
      } 
     } 
    } 
} 

私は静止画撮影は、次のようになります

func captureStillImage(_ completion: @escaping ((Bool, UIImage?) -> Void)) { 
    guard self.state == .running else { 
     completion(false, nil) 
     return 
    } 

    backgroundQueue.async { 
     let connection = self.stillImageOutpup.connection(withMediaType: AVMediaTypeVideo) 

     self.stillImageOutpup.captureStillImageAsynchronously(from: connection, completionHandler: { (buffer, error) in 
      defer { 
       self.state = .running 
      } 

      guard let buffer = buffer, let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) else { 
       DispatchQueue.main.async { 
        completion(false, nil) 
       } 

       return 
      } 

      let image = UIImage(data: imageData) 

      DispatchQueue.main.async { 
       completion(true, image) 
      } 
     }) 
    } 
} 

あなたはStackOverflowの上、資産ライターを使用する方法を細かくすることができます。 たとえば、あなたはよく知っているかもしれませんthis

+0

ありがとう@ xaoc1024!確かめます! –

関連する問題