2017-06-19 12 views
0

次のコードを使用して画像ウォーターマークを追加していますが、結果のビデオのフレームが180度回転しています。私はちょうどソースビデオとして透かしと同じビデオをしたい。ソリューションを提案してください。 atTime:ビデオにウォーターマークとして画像を追加する反転ビデオ

-(void)watermarkVideoAtURL:(NSURL *)url fb:(BOOL)fb withCompletionHandler:(void(^)(bool success, NSURL *assetsURL, NSError *error))completionHandler { 

    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil]; 
    AVMutableComposition *mixComposition = [AVMutableComposition composition]; 

    AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject]; 
    AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 

    AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] lastObject]; 
    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil]; 
    [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil]; 
    [compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject] preferredTransform]]; 
    CGSize sizeOfVideo = [videoAsset naturalSize]; 

    CALayer *parentLayer = [CALayer layer]; 
    CALayer *videoLayer = [CALayer layer]; 

    parentLayer.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height); 
    videoLayer.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height); 

    // Image of watermark 
    UIImage *myImage = [self imageByApplyingAlpha:watermarkOpacityFactor toImage:[UIImage imageNamed:@"iconbig"]]; 
    CALayer *layerCa = [CALayer layer]; 
    layerCa.contents = (id)myImage.CGImage; 
    layerCa.frame = CGRectMake(10, sizeOfVideo.height - 50, 50, 50); 
    layerCa.opacity = 1.0; 

    CALayer *layerCa2 = [CALayer layer]; 
    layerCa2.contents = (id)myImage.CGImage; 
    layerCa2.frame = CGRectMake(sizeOfVideo.width - 60, 10, 50, 50); 
    layerCa2.opacity = 1.0; 

    [parentLayer addSublayer:videoLayer]; 
    [parentLayer addSublayer:layerCa]; 
    [parentLayer addSublayer:layerCa2]; 

    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition]; 
    videoComposition.frameDuration = CMTimeMake(1, 30); 
    videoComposition.renderSize = sizeOfVideo; 
    videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; 

    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 

    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]); 
    AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] lastObject]; 


    AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; 

    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction]; 
    videoComposition.instructions = [NSArray arrayWithObject:instruction]; 


    NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0]; 
    NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init]; 
    [dateFormatter setDateFormat:@"yyyy-MM-dd_HH-mm-ss"]; 
    NSString *destinationPath = [documentsDirectory stringByAppendingFormat:@"/utput_%@.mov", [dateFormatter stringFromDate:[NSDate date]]]; 

    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; 
    exportSession.videoComposition = videoComposition; 
    exportSession.outputURL = [NSURL fileURLWithPath:destinationPath]; 
    exportSession.outputFileType = AVFileTypeQuickTimeMovie; 

    [exportSession exportAsynchronouslyWithCompletionHandler:^{ 
     switch (exportSession.status) { 
      case AVAssetExportSessionStatusCompleted: { 
       ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; 
       [library writeVideoAtPathToSavedPhotosAlbum:exportSession.outputURL completionBlock:^(NSURL *assetURL, NSError *error) { 
        if (!error) { 
         completionHandler(YES, assetURL, nil); 
        } else { 
         completionHandler(NO, nil, error); 
        } 
       }]; 
      } 
       break; 

      case AVAssetExportSessionStatusFailed: { 
       completionHandler(NO, nil, exportSession.error); 
      } 
       break; 

      case AVAssetExportSessionStatusCancelled: { 
       completionHandler(NO, nil, exportSession.error); 
      } 
       break; 

      default: 
       break; 
     } 
    }]; 
} 

答えて

0

は、命令

のsetTransform層にAVAssetTrackpreferredTransformを設定してみ

を固定変換れる次回があるまで、指定した時刻から適用するトランスフォームセット[...]変換が設定されている最初の指定時刻の前に、アフィン変換は、 の値で一定に保持されます。CGAffineTransformIdentity ;変換が設定された最後の時間の後、アフィン変換はその最後の値で一定に保持される。

関連する問題