2011-10-05 2 views
5

पर दो .caf फ़ाइलों का संयोजन मैंने देखा है और एक उत्तर की तलाश की है, लेकिन ऐसा लगता है कि कोई नहीं ढूंढ सकता। बहुत से पूछे गए हैं, लेकिन किसी को भी जवाब नहीं मिला है। मेरे पास एक ऐप है जो AVAudioRecorder का उपयोग कर ऑडियो रिकॉर्ड करता है। अब मैं सिर्फ दो या दो से अधिक रिकॉर्डिंग को एक फाइल में विलय करना चाहता हूं जिसे ईमेल के माध्यम से भेजा जा सकता है। क्या किसी के पास कोई सुराग है कि यह कैसे किया जा सकता है?आईफोन

(This answer कुछ ऑडियो सेवा कतार बुलाया का उपयोग कर पता चलता है, लेकिन मैं इस बारे में कुछ भी पता नहीं है)

+0

या के रूप में बुलाया के अंतर्गत होता है की संख्या किसी को भी जानता है कि कैसे दो .wav फ़ाइलें गठबंधन करने के लिए, जो उपयोगी भी .. – Snowman

उत्तर

8

यह काफी जितना आसान आपको लगता होगा नहीं है। iAmRingtones बनाने के बारे में आप जो पूछ रहे हैं, वही करने के लिए मैंने AVFoundation framework का उपयोग किया था। इसे ऑडियो फ़ाइलों से AVAssets बनाने और एक AVExportSession की स्थापना की आवश्यकता थी। अंतिम परिणाम बहुत अच्छा था, लेकिन यह निश्चित रूप से थोड़ा सा काम ले लिया। निम्न विधि से (एक बार प्रत्येक ऑडियो ट्रैक के लिए) दो बार

- (void) setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition *)composition { 

    AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil]; 

    AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
    AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 

    NSError *error = nil; 
    BOOL ok = NO; 

    CMTime startTime = CMTimeMakeWithSeconds(0, 1); 
    CMTime trackDuration = songAsset.duration; 
    CMTime longestTime = CMTimeMake(848896, 44100); //(19.24 seconds) 
    CMTimeRange tRange = CMTimeRangeMake(startTime, trackDuration); 

    //Set Volume 
    AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track]; 
    [trackMix setVolume:0.8f atTime:startTime]; 
    [audioMixParams addObject:trackMix]; 

    //Insert audio into track 
    ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:CMTimeMake(0, 44100) error:&error]; 
} 

उपरोक्त विधि कहा जाता हो जाता है:

- (void) exportAudio { 

    AVMutableComposition *composition = [AVMutableComposition composition]; 
    audioMixParams = [[NSMutableArray alloc] initWithObjects:nil]; 

    //Add Audio Tracks to Composition 
    NSString *URLPath1 = pathToYourAudioFile1; 
    NSURL *assetURL1 = [NSURL fileURLWithPath:URLPath1]; 
    [self setUpAndAddAudioAtPath:assetURL1 toComposition:composition]; 

    NSString *URLPath2 = pathToYourAudioFile2; 
    NSURL *assetURL2 = [NSURL fileURLWithPath:URLPath2]; 
    [self setUpAndAddAudioAtPath:assetURL2 toComposition:composition]; 

    AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix]; 
    audioMix.inputParameters = [NSArray arrayWithArray:audioMixParams]; 

    //If you need to query what formats you can export to, here's a way to find out 
    NSLog (@"compatible presets for songAsset: %@", 
      [AVAssetExportSession exportPresetsCompatibleWithAsset:composition]); 

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] 
            initWithAsset: composition 
            presetName: AVAssetExportPresetAppleM4A]; 
    exporter.audioMix = audioMix; 
    exporter.outputFileType = @"com.apple.m4a-audio"; 
    NSString *fileName = @"someFilename"; 
    NSString *exportFile = [[util getDocumentsDirectory] stringByAppendingFormat: @"/%@.m4a", fileName];  

    // set up export 
    myDeleteFile(exportFile); 
    NSURL *exportURL = [NSURL fileURLWithPath:exportFile]; 
    exporter.outputURL = exportURL; 

    // do the export 
    [exporter exportAsynchronouslyWithCompletionHandler:^{ 
      int exportStatus = exporter.status; 
      switch (exportStatus) { 
       case AVAssetExportSessionStatusFailed: 
        NSError *exportError = exporter.error; 
        NSLog (@"AVAssetExportSessionStatusFailed: %@", exportError); 
        break; 

       case AVAssetExportSessionStatusCompleted: NSLog (@"AVAssetExportSessionStatusCompleted"); break; 
       case AVAssetExportSessionStatusUnknown: NSLog (@"AVAssetExportSessionStatusUnknown"); break; 
       case AVAssetExportSessionStatusExporting: NSLog (@"AVAssetExportSessionStatusExporting"); break; 
       case AVAssetExportSessionStatusCancelled: NSLog (@"AVAssetExportSessionStatusCancelled"); break; 
       case AVAssetExportSessionStatusWaiting: NSLog (@"AVAssetExportSessionStatusWaiting"); break; 
       default: NSLog (@"didn't get export status"); break; 
    } 
}]; 

    // start up the export progress bar 
    progressView.hidden = NO; 
    progressView.progress = 0.0; 
    [NSTimer scheduledTimerWithTimeInterval:0.1 
           target:self 
           selector:@selector (updateExportProgress:) 
           userInfo:exporter 
           repeats:YES]; 

} 
+0

होगा मैं कैसे कर सकते हैं इसे .m4a फ़ाइल के बजाय .caf फ़ाइल के रूप में निर्यात करें? – Newbie

+0

निर्यातक .outputFileType = AVFileTypeCoreAudioFormat; //.caf – Underdog

1

कैसे किसी भी विलय करने के लिए यहाँ और अधिक या कम कैसे हम अपने ऐप्लिकेशन में निर्यात कार्यक्षमता बनाया ऑडियो फ़ाइलें क्रमिक रूप से जिसका पथ एक सरणी recordingsArray

# pragma mark mergeRecording 

- (void) mergeRecording 
{ 
     AVMutableComposition *composition = [AVMutableComposition composition]; 
     [self buildSequenceComposition:composition]; //given Below 

     NSLog (@"compatible presets for songAsset: %@",[AVAssetExportSession exportPresetsCompatibleWithAsset:composition]); 

     AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset: composition presetName: AVAssetExportPresetAppleM4A]; 
     exporter.outputFileType = @"com.apple.m4a-audio"; 

     //File Name 

     NSString *recordingFileName = [self setRecordingFileName]; 
     self.recordingTimeLbl.text = @"00:00:00"; 
     NSString *exportFile = [NSTemporaryDirectory() stringByAppendingFormat: @"/%@.m4a", recordingFileName]; 

     // set up export 
     BOOL yes = [[NSFileManager defaultManager] removeItemAtPath:exportFile error:NULL]; 
     NSURL *exportURL = [NSURL fileURLWithPath:exportFile]; 
     exporter.outputURL = exportURL; 
     NSData *sound1Data = [[NSData alloc] initWithContentsOfURL: exportURL]; 
     NSLog(@"Length %i",sound1Data.length); 

     [exporter exportAsynchronouslyWithCompletionHandler:^{ 
      int exportStatus = exporter.status; 
      switch (exportStatus) { 
       case AVAssetExportSessionStatusFailed: 
        NSLog (@"AVAssetExportSessionStatusFailed:"); 
        break; 

       case AVAssetExportSessionStatusCompleted: NSLog (@"AVAssetExportSessionStatusCompleted"); break; 
       case AVAssetExportSessionStatusUnknown: NSLog (@"AVAssetExportSessionStatusUnknown"); break; 
       case AVAssetExportSessionStatusExporting: NSLog (@"AVAssetExportSessionStatusExporting"); break; 
       case AVAssetExportSessionStatusCancelled: NSLog (@"AVAssetExportSessionStatusCancelled"); break; 
       case AVAssetExportSessionStatusWaiting: NSLog (@"AVAssetExportSessionStatusWaiting"); break; 
       default: NSLog (@"didn't get export status"); break; 
      } 
     }]; 

     // start up the export progress bar 
     [NSTimer scheduledTimerWithTimeInterval:0.1 target:self selector:@selector (updateProgress:) userInfo:exporter repeats:NO]; 
} 


- (NSString *) setRecordingFileName 
{ 
    NSDate *todaysDate = [NSDate date]; 

    NSDateFormatter *dateFormat = [[NSDateFormatter alloc] init]; 
    [dateFormat setDateFormat:@"dd-MM-yyyy"]; 
    NSString *dateString11 = [dateFormat stringFromDate:todaysDate]; 

    NSCalendar *gregorian = [[NSCalendar alloc] initWithCalendarIdentifier:NSGregorianCalendar]; 
    NSDateComponents *dateComponents = [gregorian components:(NSHourCalendarUnit | NSMinuteCalendarUnit | NSSecondCalendarUnit) fromDate:todaysDate]; 
    NSInteger hour = [dateComponents hour]; 
    NSInteger minute = [dateComponents minute]; 
    NSInteger second = [dateComponents second]; 
    [gregorian release]; 

    NSLog(@"Date: %@ \n Time : %@-%@-%@",dateString11,[NSString stringWithFormat:@"%i",hour],[NSString stringWithFormat:@"%i",minute],[NSString stringWithFormat:@"%i",second]); 


    NSString *recordingFileName = @"Any Name"; 
    if(recordingFileName.length > 0) 
    { 
      recordingFileName = [NSString stringWithFormat:@"%@AND%@AND%@-%@-%@", recordingFileName, dateString11, [NSString stringWithFormat:@"%i",hour], [NSString stringWithFormat:@"%i",minute], [NSString stringWithFormat:@"%i",second]]; 
    } 
    else 
    { 
      recordingFileName = [NSString stringWithFormat:@"%@AND%@-%@-%@",dateString11,[NSString stringWithFormat:@"%i",hour],[NSString stringWithFormat:@"%i",minute],[NSString stringWithFormat:@"%i",second]]; 
    } 
    return recordingFileName; 
} 


- (void)updateProgress:(id)timer 
{ 
    AVAssetExportSession *session; 
    if([timer isKindOfClass:[NSTimer class]]) 
     session = (AVAssetExportSession *)[timer userInfo]; 
    else if([timer isKindOfClass:[AVAssetExportSession class]]) 
     session = timer; 

    if (session.status == AVAssetExportSessionStatusExporting) 
    { 

     NSArray *modes = [[[NSArray alloc] initWithObjects:NSDefaultRunLoopMode, UITrackingRunLoopMode, nil] autorelease]; 
     [self performSelector:@selector(updateProgress:) withObject:session afterDelay:0.5 inModes:modes]; 

    } 
    else if(session.status == AVAssetExportSessionStatusCompleted) 
    { 
     NSLog(@"Exporting Ended"); 
     NSURL *exportURL = session.outputURL; 
     NSData *sound1Data = [[NSData alloc] initWithContentsOfURL: exportURL]; 
     NSLog(@"Length %i \n Path %@",sound1Data.length,exportURL); 

     [self.activityIndicator stopAnimating]; 
     self.activityIndicator.hidden = YES; 
     NSLog(@"Merging Complete"); 

     for(int x = 0 ; x < [recordingsArray count] ; x++) 
     { 
       NSURL *recordingPathUrl = [recordingsArray objectAtIndex:x]; 
       BOOL yes = [[NSFileManager defaultManager] removeItemAtPath:recordingPathUrl.relativePath error:NULL]; 
       if (yes) 
       { 
        NSLog(@"File Removed at Path %@",recordingPathUrl.relativePath); 
       } 
       else 
       { 
        NSLog(@"Problem During Removal of Recording At Path %@",recordingPathUrl.relativePath); 
       } 

     } 

     NSString *exportFile = [NSString stringWithFormat:@"%@",exportURL]; 
     NSString *recordingFileName = [self setRecordingFileName]; 
     BOOL isInserted = [[DbFile sharedDatabase] insertRecordingDataIntoTable:recordingFileName recordingPath:exportFile]; 

     if(isInserted) 
     { 
      NSLog(@"Recording Inserted In Database"); 
     } 
     else 
     { 
      NSLog(@"Recording Inserted In Database"); 
     } 


     if([timer isKindOfClass:[NSTimer class]]) 
      [timer invalidate]; 

    } 
    else if(session.status == AVAssetExportSessionStatusFailed) 
    { 

      [self.activityIndicator stopAnimating]; 
      NSLog(@"Recording Export Failed"); 

      UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Recording Export Failed" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles: nil]; 
      [alertView show]; 
      [alertView release]; 

      if([timer isKindOfClass:[NSTimer class]]) 
       [timer invalidate]; 

    } 
    else if(session.status == AVAssetExportSessionStatusCancelled) 
    { 

      [self.activityIndicator stopAnimating]; 
      NSLog(@"Recording Export Cancelled"); 

      UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Recording Export Cancelled" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles: nil]; 
      [alertView show]; 
      [alertView release]; 
      if([timer isKindOfClass:[NSTimer class]]) 
       [timer invalidate]; 
    } 
} 


- (void) buildSequenceComposition:(AVMutableComposition *)composition 
{ 
    AVMutableCompositionTrack *audioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio 
                     preferredTrackID:kCMPersistentTrackID_Invalid]; 
    CMTime nextClipStartTime = kCMTimeZero; 

    for(NSURL * view in recordingsArray) 
    { 
     AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:view options:nil]; 
     CMTimeRange timeRangeInAsset; 
     timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [audioAsset duration]); 

     AVAssetTrack *clipVideoTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
     [audioTrack1 insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil]; 
     nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration); 
    } 
}