2016-08-15 16 views
1

ビデオにウォーターマークを追加します。原点透かし層(10; 10)を設定しますが、エクスポート後は左下にウォーターマークがあります。私がyを大きくすると、ウォーターマークが上に移動します。 y軸が下から上に向かって増加するかのように。AVMutableComposition y軸が逆転

誰かが何か提案できますか?

NSString *path = [[NSBundle mainBundle] pathForResource:@"video" ofType:@"mov"]; 
NSURL *file = [NSURL fileURLWithPath:path]; 
AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:file options:nil]; 
self.asset = videoAsset; 
AVMutableComposition *mixComposition = [AVMutableComposition composition]; 
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject]; 
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] firstObject]; 
if (clipAudioTrack) 
{ 
    [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) 
            ofTrack:clipAudioTrack 
            atTime:kCMTimeZero 
            error:nil]; 
} 
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) 
           ofTrack:clipVideoTrack 
           atTime:kCMTimeZero 
           error:nil]; 
[compositionVideoTrack setPreferredTransform:clipVideoTrack.preferredTransform]; 

CGSize videoSize = [clipVideoTrack naturalSize]; 
CALayer *parentLayer = [CALayer layer]; 
CALayer *videoLayer = [CALayer layer]; 
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 

UIImage *myImage = [UIImage imageNamed:@"watermark"]; 
CALayer *aLayer = [CALayer layer]; 
CGRect frame = CGRectMake(10, 10, CGImageGetWidth(myImage.CGImage), CGImageGetHeight(myImage.CGImage)); 
aLayer.frame = frame; 
aLayer.contents = (id)myImage.CGImage; 

[parentLayer addSublayer:videoLayer]; 
[parentLayer addSublayer:aLayer]; 

AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ; 
videoComposition.frameDuration=CMTimeMake(1, 30); 
videoComposition.renderSize = videoSize; 
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; 

AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]); 
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] firstObject]; 
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; 
instruction.layerInstructions = @[layerInstruction]; 
videoComposition.instructions = @[instruction]; 

NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject]; 
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init]; 
[dateFormatter setDateFormat:@"yyyy-MM-dd_HH-mm-ss"]; 
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:@"/utput_%@.mov", [dateFormatter stringFromDate:[NSDate date]]]; 

AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality]; 
exportSession.videoComposition = videoComposition; 

exportSession.outputURL = [NSURL fileURLWithPath:destinationPath]; 
exportSession.outputFileType = AVFileTypeMPEG4; 
[exportSession exportAsynchronouslyWithCompletionHandler:^{ 
    switch (exportSession.status) 
    { 
     case AVAssetExportSessionStatusCompleted: 
      NSLog(@"Export OK"); 
      if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) { 
       UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, nil, nil); 
      } 
      break; 
     case AVAssetExportSessionStatusFailed: 
      NSLog (@"AVAssetExportSessionStatusFailed: %@", exportSession.error); 
      break; 
     case AVAssetExportSessionStatusCancelled: 
      NSLog(@"Export Cancelled"); 
      break; 
     default: 
      break; 
    } 
}]; 
+0

parentLayer.geometryFlipped = YES;問題を解決するhttp://stackoverflow.com/questions/6749216/how-to-properly-export-calayer-on-top-of-avmutablecomposition-with-avassetexport –

答えて

0

私は過去にウォーターマークを追加するための正しいアンカーポイントを見つけるために多くの問題がありました。コードを投稿します。右下隅にウォーターマークが付いたビデオを生成することに注意してください(5ピクセルのマージン)。

有料気をつけ:

logoImageView.frame = CGRectMake(finalSize.width - ((logoImageView.frame.size.width/2) + 5), 
             finalSize.height - ((logoImageView.frame.size.height/2) + 5), 
             logoImageView.frame.size.width/2, 
             logoImageView.frame.size.height/2) 

決勝コード:

final class QUWatermarkManager { 

    static func watermark(video videoAsset:AVAsset, imageLayer : CALayer, saveToLibrary flag : Bool, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) ->())?) { 

     var finalSize = CGSizeMake(imageLayer.frame.size.width, imageLayer.frame.size.width) 
     var computedVideoSize = finalSize.width 
     while (computedVideoSize%16>0) { // find the right resolution that can be divided by 16 
      computedVideoSize++; 
     } 

     finalSize = CGSizeMake(computedVideoSize,computedVideoSize) 

     let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack 
     let videoSize = clipVideoTrack.naturalSize 
     let scale = finalSize.width/videoSize.width 

     let composition = AVMutableComposition() 
     composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID()) 

     let videoComposition = AVMutableVideoComposition() 


     videoComposition.renderSize = CGSizeMake(finalSize.width,finalSize.width) 
     videoComposition.frameDuration = CMTimeMake(1, 30) 

     let instruction = AVMutableVideoCompositionInstruction() 
     instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30)) 

     let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack) 
     let t4 = CGAffineTransformScale(clipVideoTrack.preferredTransform, scale, scale) 
     layerInstruction.setTransform(t4, atTime: kCMTimeZero) 
     instruction.layerInstructions = [layerInstruction] 
     videoComposition.instructions = [instruction] 

     // Parent Layer 
     let parentLayer = CALayer() 
     parentLayer.frame = CGRectMake(0, 0, finalSize.width, finalSize.height) 
     parentLayer.geometryFlipped = true 

     //Video Layer 
     let videoLayer = CALayer() 
     videoLayer.frame = CGRectMake(0, 0, finalSize.width, finalSize.height) 
     videoLayer.geometryFlipped = true 

     let logoImageView = UIImageView(image: UIImage(named: "logo_nav2")) 
     logoImageView.alpha = 0.5 
     logoImageView.contentMode = .ScaleAspectFit 
     logoImageView.frame = CGRectMake(finalSize.width - ((logoImageView.frame.size.width/2) + 5), 
             finalSize.height - ((logoImageView.frame.size.height/2) + 5), 
             logoImageView.frame.size.width/2, 
             logoImageView.frame.size.height/2) 

     parentLayer.addSublayer(videoLayer) 
     parentLayer.addSublayer(imageLayer) 
     parentLayer.addSublayer(logoImageView.layer) 


     videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer) 

     // 4 - Get path 
     let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] 
     let dateFormatter = NSDateFormatter() 
     dateFormatter.dateStyle = .LongStyle 
     dateFormatter.timeStyle = .FullStyle 
     let date = dateFormatter.stringFromDate(NSDate()) 
     let savePath = documentDirectory.stringByAppendingPathComponent("watermarkVideo-\(date).mov") 
     let url = NSURL(fileURLWithPath: savePath) 

     let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)! 
     exporter.videoComposition = videoComposition 
     exporter.outputURL = url 
     exporter.outputFileType = AVFileTypeMPEG4 
     exporter.shouldOptimizeForNetworkUse = true 
     exporter.canPerformMultiplePassesOverSourceMediaData = true 

     exporter.exportAsynchronouslyWithCompletionHandler() { 
      dispatch_async(dispatch_get_main_queue(), {() -> Void in 
       if exporter.status == AVAssetExportSessionStatus.Completed { 
        let outputURL = exporter.outputURL 
        if flag { 
         // Save to library 
         let library = ALAssetsLibrary() 
         if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) { 
          library.writeVideoAtPathToSavedPhotosAlbum(outputURL, 
           completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in 
            completion!(status: .Completed, session: exporter, outputURL: outputURL) 
          }) 
         } 
        } else { 
         // Dont svae to library 
         completion!(status: .Completed, session: exporter, outputURL: outputURL) 
        } 

       } else { 
        // Error 
        completion!(status: exporter.status, session: exporter, outputURL: exporter.outputURL) 
       } 
      }) 
     } 

    } 
} 
+0

http://stackoverflow.com/questions/6749216/how- avassetexportを使用して適切にエクスポートされた最上位の層の上に表示されます。 parentLayer.geometryFlippedをYESに設定します。 –

関連する問題