2012-02-07 4 views
3

this tutorialを使用して、1つのイメージ(1つのフレームのみ)と数秒のオーディオからムービーファイルをマージするイメージを作成します。長さが異なるときにオーディオとビデオをマージする

iphoneデバイスでは、ビデオの再生時間はオーディオの再生時間と同じで、iはすべてのビデオに沿って画像を見ました。

しかし、私はアンドロイドデバイス(whatsappを介して)と私は再生時間を再生すると、画像の期間(1フレーム)からムービーに共有します。私は、1つのイメージからムービーファイルを100回(10fps、10秒)繰り返して、アンドロイドデバイスの再生時間を10秒にするというテストを行います。

私はアンドロイドデバイスはビデオの中で最短のトラックしか再生しないと思うが、もし私がtimerangeを変更したらaddMutableTrackWithMediaTypeのビデオはオーディオの持続時間に何も起こらない。

アドバイスはありますか?

は、私はここにすべてのコードを入れてサポート

をありがとう:

-(void) writeImagesToMovieAtPath:(NSString *)path withSize:(CGSize) size { 

    NSMutableArray *m_PictArray = [NSMutableArray arrayWithCapacity:1]; 
    [m_PictArray addObject:[UIImage imageNamed:@"prueba.jpg"]]; 

    NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0]; 
    NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil]; 
    for (NSString *tString in dirContents) { 
     if ([tString isEqualToString:@"essai.mp4"]) 
     { 
      [[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,tString] error:nil]; 

     } 
    } 

    NSLog(@"Write Started"); 

    NSError *error = nil; 

    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: 
            [NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4 
                   error:&error];  
    NSParameterAssert(videoWriter); 

    NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
            [NSNumber numberWithInt:128000], AVVideoAverageBitRateKey, 
            [NSNumber numberWithInt:15],AVVideoMaxKeyFrameIntervalKey, 
            AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey, 
            nil];  

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
            AVVideoCodecH264, AVVideoCodecKey, 
            codecSettings,AVVideoCompressionPropertiesKey, 
            [NSNumber numberWithInt:size.width], AVVideoWidthKey, 
            [NSNumber numberWithInt:size.height], AVVideoHeightKey, 
            nil];  

    AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput 
              assetWriterInputWithMediaType:AVMediaTypeVideo 
              outputSettings:videoSettings] retain]; 

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor 
                assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput 
                sourcePixelBufferAttributes:nil]; 

    NSParameterAssert(videoWriterInput); 

    NSParameterAssert([videoWriter canAddInput:videoWriterInput]); 
    videoWriterInput.expectsMediaDataInRealTime = YES; 
    [videoWriter addInput:videoWriterInput]; 
    //Start a session: 
    [videoWriter startWriting]; 
    [videoWriter startSessionAtSourceTime:kCMTimeZero]; 


    //Video encoding 

    CVPixelBufferRef buffer = NULL; 

    //convert uiimage to CGImage. 

    int frameCount = 0; 

    for(int i = 0; i<[m_PictArray count]; i++) 
    { 
     buffer = [self newPixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size]; 

     BOOL append_ok = NO; 
     int j = 0; 
     while (!append_ok && j < 30) 
     { 
      if (adaptor.assetWriterInput.readyForMoreMediaData) 
      { 
       printf("appending %d attemp %d\n", frameCount, j); 

       CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10); 
       /* 
       Float64 seconds = 1; 
       int32_t preferredTimeScale = 10; 
       CMTime frameTime = CMTimeMakeWithSeconds(seconds, preferredTimeScale); 
       */ 
       append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime]; 
       CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool; 
       NSParameterAssert(bufferPool != NULL); 

       [NSThread sleepForTimeInterval:0.05]; 
      } 
      else 
      { 
       printf("adaptor not ready %d, %d\n", frameCount, j); 
       [NSThread sleepForTimeInterval:0.1]; 
      } 
      j++; 
     } 
     if (!append_ok) { 
      printf("error appending image %d times %d\n", frameCount, j); 
     } 
     frameCount++; 
     CVBufferRelease(buffer); 
    } 

    [videoWriterInput markAsFinished]; 
    [videoWriter finishWriting]; 

    [videoWriterInput release]; 
    [videoWriter release]; 

    [m_PictArray removeAllObjects]; 

    NSLog(@"Write Ended"); 

    [self saveVideoToAlbum:path]; 
} 


-(void)CompileFilesToMakeMovie { 

    NSLog(@"CompileFilesToMakeMovie"); 

    AVMutableComposition* mixComposition = [AVMutableComposition composition]; 

    NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];  

    //Audio file in AAC 
    NSString* audio_inputFileName = @"zApY4o8QY.m4a"; 

    NSString* audio_inputFilePath = [NSString stringWithFormat:@"%@/%@",[[NSBundle mainBundle] resourcePath],audio_inputFileName]; 
    NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath]; 

    NSString* video_inputFileName = @"essai.mp4"; 
    NSString* video_inputFilePath = [NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,video_inputFileName]; 
    NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath]; 

    NSString* outputFileName = @"outputFile.mov"; 
    NSString* outputFilePath = [NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,outputFileName]; 

    NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath]; 

    if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) 
     [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil]; 


    CMTime nextClipStartTime = kCMTimeZero; 

    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil]; 
    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil]; 


    //CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration); 
    CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration); 
    AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil]; 

    CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration); 
    AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
    [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil]; 



    AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetLowQuality]; 
    _assetExport.shouldOptimizeForNetworkUse = YES; 
    _assetExport.outputFileType = @"com.apple.quicktime-movie"; 
    _assetExport.outputURL = outputFileUrl; 
    _assetExport.timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration); 

    [_assetExport exportAsynchronouslyWithCompletionHandler: 
    ^(void) { 
     [self saveVideoToAlbum:outputFilePath]; 
    }  
    ]; 

    NSLog(@"CompileFilesToMakeMovie Finish"); 
} 

- (void) saveVideoToAlbum:(NSString*)path { 

    NSLog(@"saveVideoToAlbum"); 

    if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){ 
     UISaveVideoAtPathToSavedPhotosAlbum (path, self, @selector(video:didFinishSavingWithError: contextInfo:), nil); 
    } 
} 

-(void) video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo { 
    if(error) 
     NSLog(@"Exportado con error: %@", error); 
    else 
     NSLog(@"Exportado OK"); 
} 

- (CVPixelBufferRef) newPixelBufferFromCGImage: (CGImageRef)image andSize:(CGSize)frameSize { 

    CGAffineTransform frameTransform = CGAffineTransformMake(0, 0, 0, 0, 0, 0); 

    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: 
          [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
          [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, 
          nil]; 
    CVPixelBufferRef pxbuffer = NULL; 

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width, 
              frameSize.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, 
              &pxbuffer); 
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

    CVPixelBufferLockBaseAddress(pxbuffer, 0); 
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); 
    NSParameterAssert(pxdata != NULL); 

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 
    CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width, 
               frameSize.height, 8, 4*frameSize.width, rgbColorSpace, 
               kCGImageAlphaNoneSkipFirst); 
    NSParameterAssert(context); 
    //CGContextConcatCTM(context, frameTransform); 
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), 
              CGImageGetHeight(image)), image); 
    CGColorSpaceRelease(rgbColorSpace); 
    CGContextRelease(context); 

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0); 

    return (CVPixelBufferRef)pxbuffer; 
} 

答えて

0

ちょうど固定します!

私はX回画像を繰り返してムービーファイルを作成し、私はaudioAsset.duration

CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration); 
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 
[a_compositionVideoTrack scaleTimeRange:video_timeRange toDuration:audioAsset.duration]; 

の大きさに縮小合成処理にあなたはトラックができるようにするために、一度画像を繰り返す必要があります画像が2フレームしかない場合、アンドロイドでは8秒しか再生されないので、画像を10回繰り返して、whatsappのビデオシェアで45秒を超えることができるようにしました。

0

Inside CompileFilesToMakeMovieメソッドaudio_timeRangeの代わりにvideo_timeRangeを使用してください。

関連する問題