2017-01-18 71 views
0

我使用以下代碼使用CALayer將圖像添加到視頻。但是我面臨的問題是水印的大小,當我選擇不同分辨率的視頻時,它的位置會發生變化。我正在製作一個像snapchat這樣的應用程序,用戶可以通過調整其位置和大小來選擇貼紙並將其添加到視頻中。誰能告訴我,我在這裏做了什麼錯誤?或者我現在應該做什麼,以便這是正確的。提前致謝!水印圖像尺寸根據iOS中的視頻尺寸而變化

- (void) createWatermark:(UIImage*)image video:(NSURL*)videoURL 
{ 
    if (videoURL == nil) 
     return; 
    AppDelegate* appDelegate = [[UIApplication sharedApplication] delegate]; 
    dispatch_async(dispatch_get_main_queue(), ^{ 

    [appDelegate showLoadingView: YES]; 
    }); 

    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil]; 
    AVMutableComposition* mixComposition = [AVMutableComposition composition]; 

    AVMutableCompositionTrack* compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

    AVAssetTrack* clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) 
            ofTrack:clipVideoTrack 
            atTime:kCMTimeZero error:nil]; 

    [compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]]; 

    // create the layer with the watermark image 
    CALayer* aLayer = [CALayer layer]; 
    aLayer.contents = (id)image.CGImage; 
    aLayer.frame = CGRectMake(0, 100, 500, 200); 
    aLayer.opacity = 0.9; 

    //sorts the layer in proper order 

    AVAssetTrack* videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    CGSize videoSize = [videoTrack naturalSize]; 
    CALayer *parentLayer = [CALayer layer]; 
    CALayer *videoLayer = [CALayer layer]; 
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
    [parentLayer addSublayer:videoLayer]; 
    [parentLayer addSublayer:aLayer]; 

    printf("Video Size %f %f",videoSize.width,videoSize.height); 
    // create text Layer 
    CATextLayer* titleLayer = [CATextLayer layer]; 
    titleLayer.backgroundColor = [UIColor clearColor].CGColor; 
    titleLayer.string = @"Dummy text"; 
    titleLayer.foregroundColor = (__bridge CGColorRef _Nullable)([UIColor redColor]); 
    titleLayer.font = CFBridgingRetain(@"Helvetica"); 
    titleLayer.fontSize = 28; 
    titleLayer.shadowOpacity = 0.5; 
    titleLayer.alignmentMode = kCAAlignmentCenter; 
    titleLayer.frame = CGRectMake(videoSize.width/2.0, videoSize.height/2.0, videoSize.width,100); 
    [parentLayer addSublayer:titleLayer]; 

    //create the composition and add the instructions to insert the layer: 

    AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition]; 
    videoComp.renderSize = videoSize; 
    videoComp.frameDuration = CMTimeMake(1, 30); 
    videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; 

    /// instruction 
    AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 

    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]); 
    AVAssetTrack* mixVideoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mixVideoTrack]; 
    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction]; 
    videoComp.instructions = [NSArray arrayWithObject: instruction]; 

    // export video 

    _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality]; 
    _assetExport.videoComposition = videoComp; 

    NSLog (@"created exporter. supportedFileTypes: %@", _assetExport.supportedFileTypes); 

    NSString* videoName = @"NewWatermarkedVideo.mov"; 

    NSString* exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName]; 
    NSURL* exportUrl = [NSURL fileURLWithPath:exportPath]; 

    if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) 
     [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil]; 

    _assetExport.outputFileType = AVFileTypeQuickTimeMovie; 
    _assetExport.outputURL = exportUrl; 
    _assetExport.shouldOptimizeForNetworkUse = YES; 

    [_assetExport exportAsynchronouslyWithCompletionHandler: 
    ^(void) { 

     [appDelegate showLoadingView:NO]; 

     //Final code here 

     switch (_assetExport.status) 
     { 
      case AVAssetExportSessionStatusUnknown: 
       NSLog(@"Unknown"); 
       break; 
      case AVAssetExportSessionStatusWaiting: 
       NSLog(@"Waiting"); 
       break; 
      case AVAssetExportSessionStatusExporting: 
       NSLog(@"Exporting"); 
       break; 
      case AVAssetExportSessionStatusCompleted: 
       NSLog(@"Created new water mark image"); 
       _playButton.hidden = NO; 
       break; 
      case AVAssetExportSessionStatusFailed: 
       NSLog(@"Failed- %@", _assetExport.error); 
       break; 
      case AVAssetExportSessionStatusCancelled: 
       NSLog(@"Cancelled"); 
       break; 
      } 
    } 
    ]; 
} 
+0

你找到一個解決辦法? –

+0

@ Mc.Lover是的,我得到了解決方案 –

+0

請你分享一下代碼嗎? –

回答

0

最後我得到了解決這個問題,我米張貼在這裏的代碼, 我使用的Objective-C橋接報頭,以使這成爲可能。

//This method adds watermark image to video 
+(void)createWatermarkForVideo:(NSURL*)videoURL watermark:(UIImage*)watermarkImage stickerContainerView:(UIView*)containerView completionAction:(VideoMergeCompletionBlock)completion{ 


    AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil]; 

    AVMutableComposition* mixComposition = [AVMutableComposition composition]; 

    AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 

    AVAssetTrack *aVideoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo][0]; 

    AVAssetTrack *aAudioAssetTrack= nil; 
    @try{ 
     if([[videoAsset tracksWithMediaType:AVMediaTypeAudio] count]>0){ 
     aAudioAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeAudio][0]; 
     } 
    } 
    @catch(NSError *error){ 

    } 
    CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration); 

    @try{ 

     [compositionVideoTrack insertTimeRange:video_timeRange ofTrack:aVideoAssetTrack atTime:kCMTimeZero error:nil]; 


      @try{ 
       if(aAudioAssetTrack!=nil) 
        [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil]; 
       else 
        [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil]; 
      } 
      @catch(NSError *error){ 

      } 

    } 
    @catch(NSError *error){ 

    } 
    AVMutableVideoComposition *videoComp = [AVMutableVideoComposition videoComposition] ; 
    //add video composition as per video size 
    if([self isVideoPortait:aVideoAssetTrack]){ 
     videoComp = [self addWatermarkForPortaitVideoAsset:videoAsset WatermakImage:watermarkImage Composition:mixComposition stickerContainerView:containerView]; 
    } 
    else{ 
     videoComp = [self addWatermarkToLandscapeVideoForAssetTrack:aVideoAssetTrack WatermarkImage:watermarkImage ContainerView:containerView Composition:mixComposition]; 
    } 


    //Exporting File 
    NSString *fullMoviePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[@"WaterMarkedMovie" stringByAppendingPathExtension:@"mp4"]]; 
    NSURL *finalVideoFileURL = [NSURL fileURLWithPath:fullMoviePath]; 


    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; 
    [exportSession setOutputFileType:AVFileTypeMPEG4]; 
    [exportSession setOutputURL:finalVideoFileURL]; 
    [exportSession setVideoComposition:videoComp]; 

    NSError *error; 
    [[NSFileManager defaultManager] removeItemAtPath:fullMoviePath error:&error]; 
    if (error){ 
     NSLog(@"%@", error); 
    } 
    [exportSession exportAsynchronouslyWithCompletionHandler: 
    ^(void) { 

     dispatch_async(dispatch_get_main_queue(), ^{ 
      completion(exportSession); 

     }); 
    } 
    ]; 

} 

您還需要以下方法來識別視頻類型是縱向還是橫向,並相應地合併視頻。

+(AVMutableVideoComposition*)addWatermarkForPortaitVideoAsset:(AVURLAsset*)videoAsset WatermakImage:(UIImage*)watermarkImage Composition:(AVMutableComposition*)mixComposition stickerContainerView:(UIView*)containerView{ 
    //WaterMark 
    AVMutableVideoComposition *videoComp = [AVMutableVideoComposition videoComposition] ; 
    AVAssetTrack *assetVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] ; 


    CALayer *waterMarkLayer = [CALayer layer]; 
    CGSize videoSize = [assetVideoTrack naturalSize]; 

    UIImage *newwatermarkImage = [AVUtils scaleImage:watermarkImage toSize:CGRectMake(0, 0, videoSize.width, videoSize.height)]; 
    //[waterMarkLayer setContents:(id)[AVUtils scaleImage:watermarkImage toSize:[AVUtils getResizedVideoFrame:CGRectMake(0, 0, videoSize.width, videoSize.height) andPlayerViewSize:containerView.frame]].CGImage]; 
    [waterMarkLayer setContents:(id)newwatermarkImage.CGImage]; 

    waterMarkLayer.frame = CGRectMake(0, 0, videoSize.height, videoSize.width); 
    [waterMarkLayer setOpacity:1] ; 


    CALayer *parentLayer = [CALayer layer]; 
    CALayer *videoLayer = [CALayer layer]; 
    [parentLayer setFrame:CGRectMake(0, 0, videoSize.height, videoSize.width)]; 
    [videoLayer setFrame:CGRectMake(0, 0, videoSize.height, videoSize.width)]; 
    [parentLayer addSublayer:videoLayer]; 
    [parentLayer addSublayer:waterMarkLayer]; 

    //Instruction 
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
    [instruction setTimeRange:CMTimeRangeMake(kCMTimeZero, [mixComposition duration])]; 
    AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 

    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; 

    CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoSize.height, 0); 
    CGAffineTransform t2 = CGAffineTransformRotate(t1, degreesToRadians(90.0)); 
    [layerInstruction setTransform:t2 atTime:kCMTimeZero]; 


    [instruction setLayerInstructions:[NSArray arrayWithObject:layerInstruction]]; 


    [videoComp setRenderSize:CGSizeMake(videoSize.height, videoSize.width)]; 
    [videoComp setFrameDuration:CMTimeMake(1, 30)]; 
    [videoComp setAnimationTool:[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]]; 
    [videoComp setInstructions:[NSArray arrayWithObject:instruction]]; 
    return videoComp; 
} 



+(AVMutableVideoComposition*)addWatermarkToLandscapeVideoForAssetTrack:(AVAssetTrack*)aVideoAssetTrack WatermarkImage:(UIImage*)watermarkImage ContainerView:(UIView*)containerView Composition:(AVMutableComposition*)mixComposition{ 
    //adding image layer 
    AVMutableVideoComposition *videoComp = [AVMutableVideoComposition videoComposition] ; 
    CGSize videoSize = [aVideoAssetTrack naturalSize]; 
    CALayer *aLayer = [CALayer layer]; 
    //aLayer.contents = (id)[AVUtils scaleImage:watermarkImage toSize:[AVUtils getResizedVideoFrame:CGRectMake(0, 0, videoSize.width, videoSize.height) andPlayerViewSize:containerView.frame]].CGImage; 
    UIImage *newWaterMarkImage = [AVUtils scaleImage:watermarkImage toSize:CGRectMake(0, 0, videoSize.width, videoSize.height)]; 
    aLayer.contents = (id)newWaterMarkImage.CGImage; 
    //CGFloat videoScale = videoSize.width/containerView.frame.size.width; 
    aLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 

    aLayer.opacity = 1; //Feel free to alter the alpha here 



    CALayer *parentLayer = [CALayer layer]; 
    CALayer *videoLayer = [CALayer layer]; 
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
    [parentLayer addSublayer:videoLayer]; 
    [parentLayer addSublayer:aLayer]; 

    videoComp.renderSize = videoSize; 
    videoComp.frameDuration = CMTimeMake(1, 30); 
    videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; 


    /// instruction 
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]); 
    AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; 
    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction]; 
    videoComp.instructions = [NSArray arrayWithObject: instruction]; 

    return videoComp; 
} 




CGFloat degreesToRadians (CGFloat deg) { 
    return deg * (M_PI/180.0f); 
} 



+(BOOL)isVideoPortait:(AVAssetTrack*)videoAssetTrack{ 
    UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; 
    BOOL isVideoAssetPortrait_ = NO; 
    CGAffineTransform videoTransform = videoAssetTrack.preferredTransform; 
    if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) { 
     videoAssetOrientation_ = UIImageOrientationRight; 
     isVideoAssetPortrait_ = YES; 
    } 
    if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) { 
     videoAssetOrientation_ = UIImageOrientationLeft; 
     isVideoAssetPortrait_ = YES; 
    } 
    if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) { 
     videoAssetOrientation_ = UIImageOrientationUp; 
    } 
    if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) { 
     videoAssetOrientation_ = UIImageOrientationDown; 
    } 
    return isVideoAssetPortrait_; 
} 


+(CGRect)getResizedVideoFrame:(CGRect)naturalSize andPlayerViewSize:(CGRect)playerSize{ 
    CGFloat resVi = naturalSize.size.width/naturalSize.size.height; 
    CGFloat resPl = playerSize.size.width/playerSize.size.height; 


    return (resPl > resVi ? CGRectMake(0, 0, naturalSize.size.width*playerSize.size.height/naturalSize.size.height, playerSize.size.height) : CGRectMake(0, 0, playerSize.size.width, naturalSize.size.height*playerSize.size.width/naturalSize.size.width)); 
} 

/*class func getResizedVideoFrame(_ naturalSize: CGRect, andPlayerViewSize playerSize: CGRect) -> CGRect { 
    let resVi: CGFloat = CGFloat(naturalSize.size.width)/CGFloat(naturalSize.size.height) 
    let resPl: CGFloat = CGFloat(playerSize.size.width)/CGFloat(playerSize.size.height) 
    return (resPl > resVi ? CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(naturalSize.size.width * playerSize.size.height/naturalSize.size.height), height: CGFloat(playerSize.size.height)) : CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(playerSize.size.width), height: CGFloat(naturalSize.size.height * playerSize.size.width/naturalSize.size.width))) 
}*/ 




+(UIImage *)scaleImage:(UIImage *)originalImage toSize:(CGRect)size 
{ 
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
    CGContextRef context = CGBitmapContextCreate(NULL, size.size.width, size.size.height, 8, 0, colorSpace, kCGImageAlphaPremultipliedLast); 
    CGContextClearRect(context, CGRectMake(0, 0, size.size.width, size.size.height)); 

    if (originalImage.imageOrientation == UIImageOrientationRight) { 
     CGContextRotateCTM(context, -M_PI_2); 
     CGContextTranslateCTM(context, -size.size.height, 0.0f); 
     CGContextDrawImage(context, CGRectMake(0, 0, size.size.height, size.size.width), originalImage.CGImage); 
    } else { 
     CGContextDrawImage(context, CGRectMake(0, 0, size.size.width, size.size.height), originalImage.CGImage); 
    } 

    CGImageRef scaledImage = CGBitmapContextCreateImage(context); 
    CGColorSpaceRelease(colorSpace); 
    CGContextRelease(context); 

    UIImage *image = [UIImage imageWithCGImage:scaledImage]; 
    CGImageRelease(scaledImage); 

    return image; 
} 


+(void)convertVideoToMP4:(NSURL*)videoURL completionAction:(VideoConvertCompletionBlock)completion { 

    AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil]; 

    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset presetName:AVAssetExportPresetMediumQuality]; 


    NSString *fullMoviePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[@"temp" stringByAppendingPathExtension:@"mp4"]]; 
    NSError *error; 
    [[NSFileManager defaultManager] removeItemAtPath:fullMoviePath error:&error]; 
    if (error){ 
     NSLog(@"%@", error); 
    } 
    exportSession.outputURL = [NSURL fileURLWithPath:fullMoviePath]; 
    //set the output file format if you want to make it in other file format (ex .3gp) 
    exportSession.outputFileType = AVFileTypeMPEG4; 
    exportSession.shouldOptimizeForNetworkUse = NO; 


    [exportSession exportAsynchronouslyWithCompletionHandler:^{ 
     switch ([exportSession status]) 
     { 
      case AVAssetExportSessionStatusFailed:{ 
       NSLog(@"Export session failed"); 
       dispatch_async(dispatch_get_main_queue(), ^{ 
        completion(nil, false); 
       }); 
      } 
       break; 
      case AVAssetExportSessionStatusCancelled:{ 
       NSLog(@"Export canceled"); 
       dispatch_async(dispatch_get_main_queue(), ^{ 
        completion(nil, false); 
       }); 
      } 
       break; 
      case AVAssetExportSessionStatusCompleted: 
      { 
       //Video conversion finished 
       NSLog(@"Successful!"); 
       dispatch_async(dispatch_get_main_queue(), ^{ 
        completion(exportSession.outputURL, true); 
       }); 

      } 
       break; 
      default: 
       break; 
     } 
    }]; 
} 

使用 我打電話從銀行代碼這種方法,波紋管

AVUtils.createWatermark(forVideo: videoURL, watermark: LIMITUtils.getScreenShot(containerView: self.stickerContainer), stickerContainerView: self.stickerContainer) { (assetExport) in 
    let exportSession : AVAssetExportSession = assetExport! as AVAssetExportSession 
    switch(exportSession.status){ 
    case .completed : 
     DispatchQueue.main.async { 
      self.saveVideo(url : exportSession.outputURL!) 

     } 
     break 
    case .failed : 
     //Error occurred while merge 
     break 

    case .exporting : break 
    default : 

     break 
    } 
    DispatchQueue.main.async { 
     ProgressUtils.hideHUD() 
    } 

}