2014-07-22 35 views
5

我有一個音頻文件,並希望更改其專輯封面圖稿。那麼,有可能嗎?而且,如何在iOS編程中將音頻文件的封面設置爲專輯封面?如何在音頻文件中添加藝術作品,以在專輯封面中顯示?

其實,我合併了兩個音頻文件,並且想要爲iTune中顯示的專輯封面添加插圖。

守則如下:

- (BOOL) combineVoices1 
{ 
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES); 
NSString *libraryCachesDirectory = [paths objectAtIndex:0]; 
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:@"Caches"]; 
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:@"/%@.m4a",textFieldMixFile.text]; 
NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath]; 
NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file> 
NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file> 

if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput) 
{ 
    return NO; 
} 

[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL]; 
//CMTime nextClipStartTime = kCMTimeZero; 
AVMutableComposition *composition = [[AVMutableComposition alloc] init]; 
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix1; 
if (playbackDelayAfterTimeMix1 > 0) { 
    nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1); 
}else{ 
    nextClipStartTimeMix1 = kCMTimeZero; 
} 
CMTime startTimeMix1; 
if (playbackDelayMix1 > 0) { 
    startTimeMix1 = CMTimeMake(playbackDelayMix1, 1); 
}else{ 
    startTimeMix1 = kCMTimeZero; 
} 
[compositionAudioTrack setPreferredVolume:[NSTSharedData instance].volumeOfMIX1]; 
NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne]; 
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil]; 
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack; 
if (tracks.count > 0) { 
    clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil]; 

//avAsset.commonMetadata 
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix2; 
if (playbackDelayAfterTimeMix2 > 0) { 
    nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1); 
}else{ 
    nextClipStartTimeMix2 = kCMTimeZero; 
} 
CMTime startTimeMix2; 
if (playbackDelayMix2 > 0) { 
    startTimeMix2 = CMTimeMake(playbackDelayMix2, 1); 
}else{ 
    startTimeMix2 = kCMTimeZero; 
} 

[compositionAudioTrack1 setPreferredVolume:[NSTSharedData instance].volumeOfMIX2]; 
//NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:@"test" ofType:@"caf"]; 
NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1]; 
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil]; 
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack1; 
if (tracks1.count > 0) { 
    clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil]; 


AVAssetExportSession *exportSession = [AVAssetExportSession 
             exportSessionWithAsset:composition 
             presetName:AVAssetExportPresetAppleM4A]; 

if (nil == exportSession) return NO; 


exportSession.outputURL = audioFileOutput; 
exportSession.outputFileType = AVFileTypeAppleM4A; 

[exportSession exportAsynchronouslyWithCompletionHandler:^ 
{ 
    if (AVAssetExportSessionStatusCompleted == exportSession.status) 
    { 

     [self performSelectorOnMainThread:@selector(performAction) withObject:nil waitUntilDone:NO]; 

    } 
    else if (AVAssetExportSessionStatusFailed == exportSession.status) 
    { 
     [self performSelectorOnMainThread:@selector(hideSpinningWheel) withObject:nil waitUntilDone:NO]; 
     [[NSTSharedData instance] showAlertForTitle:@"Error!" andMessage:[NSString stringWithFormat:@"%@",[[exportSession error] localizedDescription]]]; 
     //NSLog(@"Export failed: %@", [[exportSession error] localizedDescription]); 
    } 
}]; 

return YES; 
} 
+1

你在看什麼?任何代碼沒有? – ljacqu

+0

我想這是可能的,可能使用這個類'AVURLAsset','AVMetadataItem'。 –

+0

我合併了兩個音頻文件,並且想爲該合併文件添加圖稿。哪些(藝術作品)將在iTune中展示。 – Rathore

回答

3

我解決我的問題,現在,它做工精細,我在上面的代碼中加入接近「AVAssetExportSession」代碼。最後的方法是:

- (BOOL) combineVoices1 
{ 
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES); 
NSString *libraryCachesDirectory = [paths objectAtIndex:0]; 
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:@"Caches"]; 
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:@"/%@.m4a",textFieldMixFile.text]; 
NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath]; 
NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file> 
NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file> 

if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput) 
{ 
    return NO; 
} 

[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL]; 
//CMTime nextClipStartTime = kCMTimeZero; 
AVMutableComposition *composition = [[AVMutableComposition alloc] init]; 
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix1; 
if (playbackDelayAfterTimeMix1 > 0) { 
    nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1); 
}else{ 
    nextClipStartTimeMix1 = kCMTimeZero; 
} 
CMTime startTimeMix1; 
if (playbackDelayMix1 > 0) { 
    startTimeMix1 = CMTimeMake(playbackDelayMix1, 1); 
}else{ 
    startTimeMix1 = kCMTimeZero; 
} 
[compositionAudioTrack setPreferredVolume:[NSTSharedData instance].volumeOfMIX1]; 
NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne]; 
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil]; 
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack; 
if (tracks.count > 0) { 
    clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil]; 

//avAsset.commonMetadata 
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix2; 
if (playbackDelayAfterTimeMix2 > 0) { 
    nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1); 
}else{ 
    nextClipStartTimeMix2 = kCMTimeZero; 
} 
CMTime startTimeMix2; 
if (playbackDelayMix2 > 0) { 
    startTimeMix2 = CMTimeMake(playbackDelayMix2, 1); 
}else{ 
    startTimeMix2 = kCMTimeZero; 
} 

[compositionAudioTrack1 setPreferredVolume:[NSTSharedData instance].volumeOfMIX2]; 
//NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:@"test" ofType:@"caf"]; 
NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1]; 
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil]; 
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack1; 
if (tracks1.count > 0) { 
    clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil]; 

/** 
added MetadataItem 
**/ 

AVMutableMetadataItem *artistMetadata = [[AVMutableMetadataItem alloc] init]; 
artistMetadata.key = AVMetadataiTunesMetadataKeyArtist; 
artistMetadata.keySpace = AVMetadataKeySpaceiTunes; 
artistMetadata.locale = [NSLocale currentLocale]; 
artistMetadata.value = uTakeTheMicArtist; 

AVMutableMetadataItem *albumMetadata = [[AVMutableMetadataItem alloc] init]; 
albumMetadata.key = AVMetadataiTunesMetadataKeyAlbum; 
albumMetadata.keySpace = AVMetadataKeySpaceiTunes; 
albumMetadata.locale = [NSLocale currentLocale]; 
albumMetadata.value = uTakeTheMicAlbum; 

AVMutableMetadataItem *songMetadata = [[AVMutableMetadataItem alloc] init]; 
songMetadata.key = AVMetadataiTunesMetadataKeySongName; 
songMetadata.keySpace = AVMetadataKeySpaceiTunes; 
songMetadata.locale = [NSLocale currentLocale]; 
songMetadata.value = textFieldMixFile.text; 

AVMutableMetadataItem *imageMetadata = [[AVMutableMetadataItem alloc] init]; 
imageMetadata.key = AVMetadataiTunesMetadataKeyCoverArt; 
imageMetadata.keySpace = AVMetadataKeySpaceiTunes; 
imageMetadata.locale = [NSLocale currentLocale]; 
imageMetadata.value = imageData; //imageData is NSData of UIImage. 
NSArray *metadata = [NSArray arrayWithObjects:artistMetadata, albumMetadata, songMetadata, imageMetadata, nil]; 

AVAssetExportSession *exportSession = [AVAssetExportSession 
             exportSessionWithAsset:composition 
             presetName:AVAssetExportPresetAppleM4A]; 

if (nil == exportSession) return NO; 

exportSession.metadata = metadata; 
exportSession.outputURL = audioFileOutput; 
exportSession.outputFileType = AVFileTypeAppleM4A; 

[exportSession exportAsynchronouslyWithCompletionHandler:^ 
{ 
    if (AVAssetExportSessionStatusCompleted == exportSession.status) 
    { 

     [self performSelectorOnMainThread:@selector(performAction) withObject:nil waitUntilDone:NO]; 

    } 
    else if (AVAssetExportSessionStatusFailed == exportSession.status) 
    { 
     [self performSelectorOnMainThread:@selector(hideSpinningWheel) withObject:nil waitUntilDone:NO]; 
     [[NSTSharedData instance] showAlertForTitle:@"Error!" andMessage:[NSString stringWithFormat:@"%@.",[[exportSession error] localizedDescription]]]; 
     //NSLog(@"Export failed: %@", [[exportSession error] localizedDescription]); 
    } 
}]; 

return YES; 
} 
相關問題