最後我得到了解決這個問題,我米張貼在這裏的代碼, 我使用的Objective-C橋接報頭,以使這成爲可能。
//This method adds watermark image to video
+(void)createWatermarkForVideo:(NSURL*)videoURL watermark:(UIImage*)watermarkImage stickerContainerView:(UIView*)containerView completionAction:(VideoMergeCompletionBlock)completion{
AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *aVideoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo][0];
AVAssetTrack *aAudioAssetTrack= nil;
@try{
if([[videoAsset tracksWithMediaType:AVMediaTypeAudio] count]>0){
aAudioAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeAudio][0];
}
}
@catch(NSError *error){
}
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration);
@try{
[compositionVideoTrack insertTimeRange:video_timeRange ofTrack:aVideoAssetTrack atTime:kCMTimeZero error:nil];
@try{
if(aAudioAssetTrack!=nil)
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
else
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
}
@catch(NSError *error){
}
}
@catch(NSError *error){
}
AVMutableVideoComposition *videoComp = [AVMutableVideoComposition videoComposition] ;
//add video composition as per video size
if([self isVideoPortait:aVideoAssetTrack]){
videoComp = [self addWatermarkForPortaitVideoAsset:videoAsset WatermakImage:watermarkImage Composition:mixComposition stickerContainerView:containerView];
}
else{
videoComp = [self addWatermarkToLandscapeVideoForAssetTrack:aVideoAssetTrack WatermarkImage:watermarkImage ContainerView:containerView Composition:mixComposition];
}
//Exporting File
NSString *fullMoviePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[@"WaterMarkedMovie" stringByAppendingPathExtension:@"mp4"]];
NSURL *finalVideoFileURL = [NSURL fileURLWithPath:fullMoviePath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
[exportSession setOutputFileType:AVFileTypeMPEG4];
[exportSession setOutputURL:finalVideoFileURL];
[exportSession setVideoComposition:videoComp];
NSError *error;
[[NSFileManager defaultManager] removeItemAtPath:fullMoviePath error:&error];
if (error){
NSLog(@"%@", error);
}
[exportSession exportAsynchronouslyWithCompletionHandler:
^(void) {
dispatch_async(dispatch_get_main_queue(), ^{
completion(exportSession);
});
}
];
}
您還需要以下方法來識別視頻類型是縱向還是橫向,並相應地合併視頻。
+(AVMutableVideoComposition*)addWatermarkForPortaitVideoAsset:(AVURLAsset*)videoAsset WatermakImage:(UIImage*)watermarkImage Composition:(AVMutableComposition*)mixComposition stickerContainerView:(UIView*)containerView{
//WaterMark
AVMutableVideoComposition *videoComp = [AVMutableVideoComposition videoComposition] ;
AVAssetTrack *assetVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] ;
CALayer *waterMarkLayer = [CALayer layer];
CGSize videoSize = [assetVideoTrack naturalSize];
UIImage *newwatermarkImage = [AVUtils scaleImage:watermarkImage toSize:CGRectMake(0, 0, videoSize.width, videoSize.height)];
//[waterMarkLayer setContents:(id)[AVUtils scaleImage:watermarkImage toSize:[AVUtils getResizedVideoFrame:CGRectMake(0, 0, videoSize.width, videoSize.height) andPlayerViewSize:containerView.frame]].CGImage];
[waterMarkLayer setContents:(id)newwatermarkImage.CGImage];
waterMarkLayer.frame = CGRectMake(0, 0, videoSize.height, videoSize.width);
[waterMarkLayer setOpacity:1] ;
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
[parentLayer setFrame:CGRectMake(0, 0, videoSize.height, videoSize.width)];
[videoLayer setFrame:CGRectMake(0, 0, videoSize.height, videoSize.width)];
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:waterMarkLayer];
//Instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
[instruction setTimeRange:CMTimeRangeMake(kCMTimeZero, [mixComposition duration])];
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoSize.height, 0);
CGAffineTransform t2 = CGAffineTransformRotate(t1, degreesToRadians(90.0));
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
[instruction setLayerInstructions:[NSArray arrayWithObject:layerInstruction]];
[videoComp setRenderSize:CGSizeMake(videoSize.height, videoSize.width)];
[videoComp setFrameDuration:CMTimeMake(1, 30)];
[videoComp setAnimationTool:[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]];
[videoComp setInstructions:[NSArray arrayWithObject:instruction]];
return videoComp;
}
+(AVMutableVideoComposition*)addWatermarkToLandscapeVideoForAssetTrack:(AVAssetTrack*)aVideoAssetTrack WatermarkImage:(UIImage*)watermarkImage ContainerView:(UIView*)containerView Composition:(AVMutableComposition*)mixComposition{
//adding image layer
AVMutableVideoComposition *videoComp = [AVMutableVideoComposition videoComposition] ;
CGSize videoSize = [aVideoAssetTrack naturalSize];
CALayer *aLayer = [CALayer layer];
//aLayer.contents = (id)[AVUtils scaleImage:watermarkImage toSize:[AVUtils getResizedVideoFrame:CGRectMake(0, 0, videoSize.width, videoSize.height) andPlayerViewSize:containerView.frame]].CGImage;
UIImage *newWaterMarkImage = [AVUtils scaleImage:watermarkImage toSize:CGRectMake(0, 0, videoSize.width, videoSize.height)];
aLayer.contents = (id)newWaterMarkImage.CGImage;
//CGFloat videoScale = videoSize.width/containerView.frame.size.width;
aLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
aLayer.opacity = 1; //Feel free to alter the alpha here
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
/// instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
return videoComp;
}
CGFloat degreesToRadians (CGFloat deg) {
return deg * (M_PI/180.0f);
}
+(BOOL)isVideoPortait:(AVAssetTrack*)videoAssetTrack{
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
return isVideoAssetPortrait_;
}
+(CGRect)getResizedVideoFrame:(CGRect)naturalSize andPlayerViewSize:(CGRect)playerSize{
CGFloat resVi = naturalSize.size.width/naturalSize.size.height;
CGFloat resPl = playerSize.size.width/playerSize.size.height;
return (resPl > resVi ? CGRectMake(0, 0, naturalSize.size.width*playerSize.size.height/naturalSize.size.height, playerSize.size.height) : CGRectMake(0, 0, playerSize.size.width, naturalSize.size.height*playerSize.size.width/naturalSize.size.width));
}
/*class func getResizedVideoFrame(_ naturalSize: CGRect, andPlayerViewSize playerSize: CGRect) -> CGRect {
let resVi: CGFloat = CGFloat(naturalSize.size.width)/CGFloat(naturalSize.size.height)
let resPl: CGFloat = CGFloat(playerSize.size.width)/CGFloat(playerSize.size.height)
return (resPl > resVi ? CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(naturalSize.size.width * playerSize.size.height/naturalSize.size.height), height: CGFloat(playerSize.size.height)) : CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(playerSize.size.width), height: CGFloat(naturalSize.size.height * playerSize.size.width/naturalSize.size.width)))
}*/
+(UIImage *)scaleImage:(UIImage *)originalImage toSize:(CGRect)size
{
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(NULL, size.size.width, size.size.height, 8, 0, colorSpace, kCGImageAlphaPremultipliedLast);
CGContextClearRect(context, CGRectMake(0, 0, size.size.width, size.size.height));
if (originalImage.imageOrientation == UIImageOrientationRight) {
CGContextRotateCTM(context, -M_PI_2);
CGContextTranslateCTM(context, -size.size.height, 0.0f);
CGContextDrawImage(context, CGRectMake(0, 0, size.size.height, size.size.width), originalImage.CGImage);
} else {
CGContextDrawImage(context, CGRectMake(0, 0, size.size.width, size.size.height), originalImage.CGImage);
}
CGImageRef scaledImage = CGBitmapContextCreateImage(context);
CGColorSpaceRelease(colorSpace);
CGContextRelease(context);
UIImage *image = [UIImage imageWithCGImage:scaledImage];
CGImageRelease(scaledImage);
return image;
}
+(void)convertVideoToMP4:(NSURL*)videoURL completionAction:(VideoConvertCompletionBlock)completion {
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset presetName:AVAssetExportPresetMediumQuality];
NSString *fullMoviePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[@"temp" stringByAppendingPathExtension:@"mp4"]];
NSError *error;
[[NSFileManager defaultManager] removeItemAtPath:fullMoviePath error:&error];
if (error){
NSLog(@"%@", error);
}
exportSession.outputURL = [NSURL fileURLWithPath:fullMoviePath];
//set the output file format if you want to make it in other file format (ex .3gp)
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = NO;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusFailed:{
NSLog(@"Export session failed");
dispatch_async(dispatch_get_main_queue(), ^{
completion(nil, false);
});
}
break;
case AVAssetExportSessionStatusCancelled:{
NSLog(@"Export canceled");
dispatch_async(dispatch_get_main_queue(), ^{
completion(nil, false);
});
}
break;
case AVAssetExportSessionStatusCompleted:
{
//Video conversion finished
NSLog(@"Successful!");
dispatch_async(dispatch_get_main_queue(), ^{
completion(exportSession.outputURL, true);
});
}
break;
default:
break;
}
}];
}
使用 我打電話從銀行代碼這種方法,波紋管
AVUtils.createWatermark(forVideo: videoURL, watermark: LIMITUtils.getScreenShot(containerView: self.stickerContainer), stickerContainerView: self.stickerContainer) { (assetExport) in
let exportSession : AVAssetExportSession = assetExport! as AVAssetExportSession
switch(exportSession.status){
case .completed :
DispatchQueue.main.async {
self.saveVideo(url : exportSession.outputURL!)
}
break
case .failed :
//Error occurred while merge
break
case .exporting : break
default :
break
}
DispatchQueue.main.async {
ProgressUtils.hideHUD()
}
}
你找到一個解決辦法? –
@ Mc.Lover是的,我得到了解決方案 –
請你分享一下代碼嗎? –