2011-10-05 49 views
5

私は見て、答えを探しましたが、見つけられないようです。たくさんの人が尋ねましたが、答えを得た人はいませんでした。 AVAudioRecorderを使ってオーディオを記録するアプリがあります。今では、2つ以上の録音を1つのファイルにマージして、電子メールで送信できます。誰にどのようにこれを行うことができますか?iPhoneで2つの.cafファイルを結合する

This answerオーディオサービスキューと呼ばれるものを用いることが提案されているが、私はそれについて何も知らない)

+0

それとも誰も参考になる2つの.wavファイルを、結合する方法を知っているかのように呼ばれる配列に含まれるオーディオファイルの順番の数.. – Snowman

答えて

8

それはあなたが思うだろうほど簡単ではありません。私は、を使用して、iAmRingtonesを作成しようとしていることを正確に行いました。オーディオファイルからAVAssetsを作成し、AVExportSessionを設定する必要がありました。最終結果は素晴らしかったが、確かに少しの作業が必要だった。ここでは、我々のアプリにエクスポート機能を作成する方法、多かれ少なかれです:

- (void) setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition *)composition { 

    AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil]; 

    AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
    AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 

    NSError *error = nil; 
    BOOL ok = NO; 

    CMTime startTime = CMTimeMakeWithSeconds(0, 1); 
    CMTime trackDuration = songAsset.duration; 
    CMTime longestTime = CMTimeMake(848896, 44100); //(19.24 seconds) 
    CMTimeRange tRange = CMTimeRangeMake(startTime, trackDuration); 

    //Set Volume 
    AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track]; 
    [trackMix setVolume:0.8f atTime:startTime]; 
    [audioMixParams addObject:trackMix]; 

    //Insert audio into track 
    ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:CMTimeMake(0, 44100) error:&error]; 
} 

上記の方法は、以下の方法から2回(一回、各オーディオトラックの場合)に呼び出されます:

いずれかをマージする方法
- (void) exportAudio { 

    AVMutableComposition *composition = [AVMutableComposition composition]; 
    audioMixParams = [[NSMutableArray alloc] initWithObjects:nil]; 

    //Add Audio Tracks to Composition 
    NSString *URLPath1 = pathToYourAudioFile1; 
    NSURL *assetURL1 = [NSURL fileURLWithPath:URLPath1]; 
    [self setUpAndAddAudioAtPath:assetURL1 toComposition:composition]; 

    NSString *URLPath2 = pathToYourAudioFile2; 
    NSURL *assetURL2 = [NSURL fileURLWithPath:URLPath2]; 
    [self setUpAndAddAudioAtPath:assetURL2 toComposition:composition]; 

    AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix]; 
    audioMix.inputParameters = [NSArray arrayWithArray:audioMixParams]; 

    //If you need to query what formats you can export to, here's a way to find out 
    NSLog (@"compatible presets for songAsset: %@", 
      [AVAssetExportSession exportPresetsCompatibleWithAsset:composition]); 

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] 
            initWithAsset: composition 
            presetName: AVAssetExportPresetAppleM4A]; 
    exporter.audioMix = audioMix; 
    exporter.outputFileType = @"com.apple.m4a-audio"; 
    NSString *fileName = @"someFilename"; 
    NSString *exportFile = [[util getDocumentsDirectory] stringByAppendingFormat: @"/%@.m4a", fileName];  

    // set up export 
    myDeleteFile(exportFile); 
    NSURL *exportURL = [NSURL fileURLWithPath:exportFile]; 
    exporter.outputURL = exportURL; 

    // do the export 
    [exporter exportAsynchronouslyWithCompletionHandler:^{ 
      int exportStatus = exporter.status; 
      switch (exportStatus) { 
       case AVAssetExportSessionStatusFailed: 
        NSError *exportError = exporter.error; 
        NSLog (@"AVAssetExportSessionStatusFailed: %@", exportError); 
        break; 

       case AVAssetExportSessionStatusCompleted: NSLog (@"AVAssetExportSessionStatusCompleted"); break; 
       case AVAssetExportSessionStatusUnknown: NSLog (@"AVAssetExportSessionStatusUnknown"); break; 
       case AVAssetExportSessionStatusExporting: NSLog (@"AVAssetExportSessionStatusExporting"); break; 
       case AVAssetExportSessionStatusCancelled: NSLog (@"AVAssetExportSessionStatusCancelled"); break; 
       case AVAssetExportSessionStatusWaiting: NSLog (@"AVAssetExportSessionStatusWaiting"); break; 
       default: NSLog (@"didn't get export status"); break; 
    } 
}]; 

    // start up the export progress bar 
    progressView.hidden = NO; 
    progressView.progress = 0.0; 
    [NSTimer scheduledTimerWithTimeInterval:0.1 
           target:self 
           selector:@selector (updateExportProgress:) 
           userInfo:exporter 
           repeats:YES]; 

} 
+0

どのようにすることができます.m4aファイルの代わりに.cafファイルとしてエクスポートしますか? – Newbie

+0

exporter.outputFileType = AVFileTypeCoreAudioFormat; //.caf – Underdog

1

そのパスrecordingsArray

# pragma mark mergeRecording 

- (void) mergeRecording 
{ 
     AVMutableComposition *composition = [AVMutableComposition composition]; 
     [self buildSequenceComposition:composition]; //given Below 

     NSLog (@"compatible presets for songAsset: %@",[AVAssetExportSession exportPresetsCompatibleWithAsset:composition]); 

     AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset: composition presetName: AVAssetExportPresetAppleM4A]; 
     exporter.outputFileType = @"com.apple.m4a-audio"; 

     //File Name 

     NSString *recordingFileName = [self setRecordingFileName]; 
     self.recordingTimeLbl.text = @"00:00:00"; 
     NSString *exportFile = [NSTemporaryDirectory() stringByAppendingFormat: @"/%@.m4a", recordingFileName]; 

     // set up export 
     BOOL yes = [[NSFileManager defaultManager] removeItemAtPath:exportFile error:NULL]; 
     NSURL *exportURL = [NSURL fileURLWithPath:exportFile]; 
     exporter.outputURL = exportURL; 
     NSData *sound1Data = [[NSData alloc] initWithContentsOfURL: exportURL]; 
     NSLog(@"Length %i",sound1Data.length); 

     [exporter exportAsynchronouslyWithCompletionHandler:^{ 
      int exportStatus = exporter.status; 
      switch (exportStatus) { 
       case AVAssetExportSessionStatusFailed: 
        NSLog (@"AVAssetExportSessionStatusFailed:"); 
        break; 

       case AVAssetExportSessionStatusCompleted: NSLog (@"AVAssetExportSessionStatusCompleted"); break; 
       case AVAssetExportSessionStatusUnknown: NSLog (@"AVAssetExportSessionStatusUnknown"); break; 
       case AVAssetExportSessionStatusExporting: NSLog (@"AVAssetExportSessionStatusExporting"); break; 
       case AVAssetExportSessionStatusCancelled: NSLog (@"AVAssetExportSessionStatusCancelled"); break; 
       case AVAssetExportSessionStatusWaiting: NSLog (@"AVAssetExportSessionStatusWaiting"); break; 
       default: NSLog (@"didn't get export status"); break; 
      } 
     }]; 

     // start up the export progress bar 
     [NSTimer scheduledTimerWithTimeInterval:0.1 target:self selector:@selector (updateProgress:) userInfo:exporter repeats:NO]; 
} 


- (NSString *) setRecordingFileName 
{ 
    NSDate *todaysDate = [NSDate date]; 

    NSDateFormatter *dateFormat = [[NSDateFormatter alloc] init]; 
    [dateFormat setDateFormat:@"dd-MM-yyyy"]; 
    NSString *dateString11 = [dateFormat stringFromDate:todaysDate]; 

    NSCalendar *gregorian = [[NSCalendar alloc] initWithCalendarIdentifier:NSGregorianCalendar]; 
    NSDateComponents *dateComponents = [gregorian components:(NSHourCalendarUnit | NSMinuteCalendarUnit | NSSecondCalendarUnit) fromDate:todaysDate]; 
    NSInteger hour = [dateComponents hour]; 
    NSInteger minute = [dateComponents minute]; 
    NSInteger second = [dateComponents second]; 
    [gregorian release]; 

    NSLog(@"Date: %@ \n Time : %@-%@-%@",dateString11,[NSString stringWithFormat:@"%i",hour],[NSString stringWithFormat:@"%i",minute],[NSString stringWithFormat:@"%i",second]); 


    NSString *recordingFileName = @"Any Name"; 
    if(recordingFileName.length > 0) 
    { 
      recordingFileName = [NSString stringWithFormat:@"%@AND%@AND%@-%@-%@", recordingFileName, dateString11, [NSString stringWithFormat:@"%i",hour], [NSString stringWithFormat:@"%i",minute], [NSString stringWithFormat:@"%i",second]]; 
    } 
    else 
    { 
      recordingFileName = [NSString stringWithFormat:@"%@AND%@-%@-%@",dateString11,[NSString stringWithFormat:@"%i",hour],[NSString stringWithFormat:@"%i",minute],[NSString stringWithFormat:@"%i",second]]; 
    } 
    return recordingFileName; 
} 


- (void)updateProgress:(id)timer 
{ 
    AVAssetExportSession *session; 
    if([timer isKindOfClass:[NSTimer class]]) 
     session = (AVAssetExportSession *)[timer userInfo]; 
    else if([timer isKindOfClass:[AVAssetExportSession class]]) 
     session = timer; 

    if (session.status == AVAssetExportSessionStatusExporting) 
    { 

     NSArray *modes = [[[NSArray alloc] initWithObjects:NSDefaultRunLoopMode, UITrackingRunLoopMode, nil] autorelease]; 
     [self performSelector:@selector(updateProgress:) withObject:session afterDelay:0.5 inModes:modes]; 

    } 
    else if(session.status == AVAssetExportSessionStatusCompleted) 
    { 
     NSLog(@"Exporting Ended"); 
     NSURL *exportURL = session.outputURL; 
     NSData *sound1Data = [[NSData alloc] initWithContentsOfURL: exportURL]; 
     NSLog(@"Length %i \n Path %@",sound1Data.length,exportURL); 

     [self.activityIndicator stopAnimating]; 
     self.activityIndicator.hidden = YES; 
     NSLog(@"Merging Complete"); 

     for(int x = 0 ; x < [recordingsArray count] ; x++) 
     { 
       NSURL *recordingPathUrl = [recordingsArray objectAtIndex:x]; 
       BOOL yes = [[NSFileManager defaultManager] removeItemAtPath:recordingPathUrl.relativePath error:NULL]; 
       if (yes) 
       { 
        NSLog(@"File Removed at Path %@",recordingPathUrl.relativePath); 
       } 
       else 
       { 
        NSLog(@"Problem During Removal of Recording At Path %@",recordingPathUrl.relativePath); 
       } 

     } 

     NSString *exportFile = [NSString stringWithFormat:@"%@",exportURL]; 
     NSString *recordingFileName = [self setRecordingFileName]; 
     BOOL isInserted = [[DbFile sharedDatabase] insertRecordingDataIntoTable:recordingFileName recordingPath:exportFile]; 

     if(isInserted) 
     { 
      NSLog(@"Recording Inserted In Database"); 
     } 
     else 
     { 
      NSLog(@"Recording Inserted In Database"); 
     } 


     if([timer isKindOfClass:[NSTimer class]]) 
      [timer invalidate]; 

    } 
    else if(session.status == AVAssetExportSessionStatusFailed) 
    { 

      [self.activityIndicator stopAnimating]; 
      NSLog(@"Recording Export Failed"); 

      UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Recording Export Failed" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles: nil]; 
      [alertView show]; 
      [alertView release]; 

      if([timer isKindOfClass:[NSTimer class]]) 
       [timer invalidate]; 

    } 
    else if(session.status == AVAssetExportSessionStatusCancelled) 
    { 

      [self.activityIndicator stopAnimating]; 
      NSLog(@"Recording Export Cancelled"); 

      UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Recording Export Cancelled" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles: nil]; 
      [alertView show]; 
      [alertView release]; 
      if([timer isKindOfClass:[NSTimer class]]) 
       [timer invalidate]; 
    } 
} 


- (void) buildSequenceComposition:(AVMutableComposition *)composition 
{ 
    AVMutableCompositionTrack *audioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio 
                     preferredTrackID:kCMPersistentTrackID_Invalid]; 
    CMTime nextClipStartTime = kCMTimeZero; 

    for(NSURL * view in recordingsArray) 
    { 
     AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:view options:nil]; 
     CMTimeRange timeRangeInAsset; 
     timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [audioAsset duration]); 

     AVAssetTrack *clipVideoTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
     [audioTrack1 insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil]; 
     nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration); 
    } 
}