2016-10-03 17 views
1

AVAssetAVAssetTrackがあります。サイズは(たとえば - (width = 1920、height = 1080))です。必要なもの - このアセットを所定の画面サイズに合わせます(たとえば、(width = 320、height = 568))。アセットがランドスケープの場合は - 90度回転させます(正方形の場合は上下に黒いストライプを追加します)。少しズームしている回転景観資産が、結果資産 - それはほとんどの最初のステップを行いますCGAffineデバイスの画面に合わせるようにAVAssetを変換する

- (void)changeAsset:(AVAsset*)asset savetoURL:(NSURL*)toURL withSize:(CGSize)toSize offsetRatioPoint:(CGPoint*)offsetRatioPoint completion:(void (^)(NSURL* in_url, NSError* error))handler 
{ 
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject]; 
if (!videoTrack) 
{ 
    if (handler) 
     handler(nil, [NSError errorWithDomain:@"com.myapp.error" code:-1 userInfo:@{@"message" : @"there are no video tracks in asset"}]); 
    return; 
} 


CGFloat newHeight = [videoTrack naturalSize].height/3*4; 
CGFloat newWidth = [videoTrack naturalSize].width/3*4; 

const CGFloat videoAspectRatio = newWidth/newHeight; 

const CGFloat toSizeAspectRatio = toSize.width/toSize.height; 

CGFloat scale = 1.f; 
    if (videoAspectRatio > toSizeAspectRatio) 
    { 
     scale = toSize.height/newHeight; 
    } 
    else 
    { 
     scale = toSize.width /newWidth; 
    } 

CGAffineTransform scaleTrans = CGAffineTransformMakeScale(scale, scale); 

CGAffineTransform translateTrans = CGAffineTransformIdentity; 
    if (videoAspectRatio > toSizeAspectRatio) 
    { 
     if (offsetRatioPoint) 
     { 
      const CGFloat dx = offsetRatioPoint->x * newWidth * scale; 
      translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f); 
     } 
     else 
     { 
      const CGFloat dx = 0.5f * (newWidth * scale - toSize.width); 
      translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f); 
     } 
    } 
    else 
    { 
     if (offsetRatioPoint) 
     { 
      const CGFloat dy = offsetRatioPoint->y * newHeight * scale; 
      translateTrans = CGAffineTransformMakeTranslation(0.f, -dy); 
     } 
     else 
     { 
      const CGFloat dy = 0.5f * (newHeight * scale - toSize.height); 
      translateTrans = CGAffineTransformMakeTranslation(0.f, -dy); 
     } 
    } 
CGAffineTransform t1 = CGAffineTransformTranslate(translateTrans, toSize.height, -scale*toSize.width); 

// Rotate transformation 
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2); 


CGAffineTransform finalTrans = CGAffineTransformConcat(scaleTrans, t2); 

AVMutableVideoComposition *videoComposition = [[AVMutableVideoComposition alloc] init]; 
videoComposition.renderSize = toSize; 


int32_t frameRate = 30; 
videoComposition.frameDuration = CMTimeMake(1, frameRate); 

AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration); 
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; 
[layerInstruction setTransform:finalTrans atTime:kCMTimeZero]; 
instruction.layerInstructions = @[layerInstruction]; 
videoComposition.instructions = @[instruction]; 

AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset640x480]; 
exporter.videoComposition = videoComposition; 
exporter.shouldOptimizeForNetworkUse = YES; 

NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:kCroppedFileName]; 
if (toURL) 
    exportPath = toURL.path; 

if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath] == YES) 
    [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil]; 

__block NSURL* outURL = [NSURL fileURLWithPath: exportPath]; 

exporter.outputURL = outURL; 
exporter.outputFileType = AVFileTypeMPEG4; 
exporter.timeRange = instruction.timeRange; 

NSLog(@"%@", exportPath); 

[exporter exportAsynchronouslyWithCompletionHandler:^(void) { 
    if (!toURL) 
    { 
     if ([[NSFileManager defaultManager] fileExistsAtPath:[GlobalConst fullMoviePath]] == YES) 
      [[NSFileManager defaultManager] removeItemAtPath:[GlobalConst fullMoviePath] error:nil]; 


     NSError *error; 
     if (![[NSFileManager defaultManager] moveItemAtPath: exportPath toPath:[GlobalConst fullMoviePath] error:&error]) { 
      NSLog(@"Error %@", error); 
     } 
     outURL = [NSURL fileURLWithPath: [GlobalConst fullMoviePath] ]; 
    } 

    NSLog(@"%@", outURL); 
    handler(outURL, nil); 
}]; 

} 

:私はこれを試してみました。すべてのアドバイスを事前に感謝します。

答えて

0

私はこのコードをこのprblm usrin解決します。このコードでは、風景をビデオポートレートに回転します。それを正方形にして画像をウォーターマークとして追加する。

- (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset 
{ 
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    CGSize size = [videoTrack naturalSize]; 
    CGAffineTransform txf = [videoTrack preferredTransform]; 

    if (size.width == txf.tx && size.height == txf.ty) 
     return UIInterfaceOrientationLandscapeRight; 
    else if (txf.tx == 0 && txf.ty == 0) 
     return UIInterfaceOrientationLandscapeLeft; 
    else if (txf.tx == 0 && txf.ty == size.width) 
     return UIInterfaceOrientationPortraitUpsideDown; 
    else 
     return UIInterfaceOrientationPortrait; 
} 

dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^(){ 

      // input clip 
      AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
      int videoDimention; 
      // make it square 
      UIInterfaceOrientation orientation = [self orientationForTrack:asset]; 
      BOOL isPortrait = (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) ? YES: NO; 

      CGSize videoSize; 
      NSUserDefaults *userDefault=[NSUserDefaults standardUserDefaults]; 
      if(isPortrait) { 
       //videoSize = CGSizeMake(complimentSize*.7,clipVideoTrack.naturalSize.height); 
       videoSize = CGSizeMake(clipVideoTrack.naturalSize.height,clipVideoTrack.naturalSize.width*.7); 

       [userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT]; 
       [userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT]; 


       videoDimention=0;// for Portrait 
      } else { 
       videoSize = CGSizeMake(clipVideoTrack.naturalSize.width, clipVideoTrack.naturalSize.height); 
       videoDimention=1;// for Landscape 
       [userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT]; 
       [userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT]; 
      } 
      AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition]; 
      if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){ 
       videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height , clipVideoTrack.naturalSize.height); 
       videoDimention=2; // for squre 
       double height=clipVideoTrack.naturalSize.height; 
       [userDefault setDouble:height forKey:VIDEO_WIDTH_OUTPUT]; 
       [userDefault setDouble:height forKey:VIDEO_HEIGHT_OUTPUT]; 
      } 
      else{ 
       videoComposition.renderSize =videoSize; 

      } 
      // videoComposition.renderScale=.5; 
      if([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==0){ 
       videoComposition.frameDuration = CMTimeMake(1, 15); 
      } 
      else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==1){ 
       videoComposition.frameDuration = CMTimeMake(1, 20); 
      } 
      else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==2){ 
       videoComposition.frameDuration = CMTimeMake(1, 25); 
      } 
      else{ 
       videoComposition.frameDuration = CMTimeMake(1, 30); 
      } 
      AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
      instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [asset duration]);; 
      AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack]; 
      // rotate to portrait 
      if([self orientationForTrack:asset]==UIInterfaceOrientationPortrait){ 

       CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2); 
       CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2); 

       CGAffineTransform finalTransform = t2; 
       [transformer setTransform:finalTransform atTime:kCMTimeZero]; 

      } 
      //for water mark 
      CGSize sizeOfVideo=[asset naturalSize]; 
      //Image of watermark 
      UIImage *myImage=[UIImage imageNamed:@"watermark"]; 
      CALayer *layerCa = [CALayer layer]; 
      layerCa.contents = (id)myImage.CGImage; 
      if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){ 
       layerCa.frame = CGRectMake(videoSize.height-(videoSize.height/6), 0, videoSize.height/6, (videoSize.height/6)/4); 
      } 
      else{ 
       layerCa.frame = CGRectMake(videoSize.width-(videoSize.width/6), 0, videoSize.width/6, (videoSize.width/6)/4); 
      } 
      // layerCa.frame = CGRectMake(videoSize.width-200, 0, 200, 60); 
      layerCa.opacity = 1.0; 


      CALayer *parentLayer=[CALayer layer]; 
      CALayer *videoLayer=[CALayer layer]; 
      parentLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height); 
      videoLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height); 
      [parentLayer addSublayer:videoLayer]; 
      [parentLayer addSublayer:layerCa]; 
      instruction.layerInstructions = [NSArray arrayWithObject:transformer]; 
      videoComposition.instructions = [NSArray arrayWithObject: instruction]; 
      if([[NSUserDefaults standardUserDefaults] boolForKey:UP_PID]==NO){ 
       videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; 
      } 
      AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality]; 


     // AVMutableComposition *composition = [AVMutableComposition composition]; 
     //  [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
      // e.g .mov type 
      exportSession.outputURL = fileURL; 
      exportSession.videoComposition = videoComposition; 
      // [exportSession addObserver: forKeyPath:@"progress" options:NSKeyValueObservingOptionNew context:NULL]; 
      exportSession.outputFileType = AVFileTypeQuickTimeMovie; 
      AVAssetExportSessionStatus status = [exportSession status]; 
      [exportSession exportAsynchronouslyWithCompletionHandler:^{ 
       dispatch_async(dispatch_get_main_queue(), ^{ 
        VideoEditVC *controller=[[VideoEditVC alloc] init]; 
        controller.isFirst=YES; 
        controller.videoSize=videoDimention; 
        [self.navigationController pushViewController:controller animated:YES]; 
        self.delegate=controller; 
       }); 

      }]; 
     }); 

は、あなたも、このいずれかを実装する必要があります

関連する問題