1
我已經創建了一個顯示當前時間的水印,但它不會每秒都改變一次。我如何做到這一點,所以文字每秒都在變化?我想在安全錄像等視頻上留出時間。以下代碼可以很好地在視頻頂部添加文本。如何將文字水印添加到Swift 3中每秒更改的視頻中?
func waterMark(){
let filePath: String = Bundle.main.path(forResource: "Zombie", ofType: "mp4")!
let videoAsset = AVURLAsset(url: URL(fileURLWithPath: filePath), options: nil)
let mixComposition = AVMutableComposition()
let compositionVideoTrack: AVMutableCompositionTrack? = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
try? compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
compositionVideoTrack?.preferredTransform = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0].preferredTransform
let videoSize: CGSize = clipVideoTrack.naturalSize
let aLayer = CALayer()
aLayer.contents = (Any).self
aLayer.frame = CGRect(x: videoSize.width - 65, y: videoSize.height - 75, width: 57, height: 57)
aLayer.opacity = 0.65
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(aLayer)
let titleLayer = CATextLayer()
let dateFormatter = DateFormatter()
dateFormatter.timeStyle = .medium
// titleLayer.backgroundColor = UIColor.black.cgColor
titleLayer.string = String((dateFormatter.string(from: Date() as Date)))
titleLayer.font = UIFont.systemFont(ofSize: 100)
titleLayer.shadowOpacity = 0.5
titleLayer.frame = parentLayer.frame
titleLayer.display()
//You may need to adjust this for proper display
parentLayer.addSublayer(titleLayer as? CALayer ?? CALayer())
let videoComp = AVMutableVideoComposition()
videoComp.renderSize = videoSize
videoComp.frameDuration = CMTimeMake(1, 30)
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
let videoTrack = mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0]
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
//AVAssetExportPresetPassthrough
assetExport?.videoComposition = videoComp
var paths: [Any] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let documentsDirectory: String = paths[0] as? String ?? ""
let VideoName: String = "\(documentsDirectory)/mynewwatermarkedvideo.mp4"
let exportUrl = URL(fileURLWithPath: VideoName)
if FileManager.default.fileExists(atPath: VideoName) {
print(VideoName)
try? FileManager.default.removeItem(atPath: VideoName)
print("file found again")
}
assetExport?.outputFileType = AVFileTypeQuickTimeMovie
assetExport?.outputURL = exportUrl
assetExport?.shouldOptimizeForNetworkUse = true
//[strRecordedFilename setString: exportPath];
assetExport?.exportAsynchronously(completionHandler: {() -> Void in
DispatchQueue.main.async(execute: {() -> Void in
})
})
print("Completed")
}
///call
waterMark()
嗨MwcsMac,謝謝你的回答。如果我正在實時處理這個方法,那麼這個方法就行得通了。不幸的是,據我所知,NSTimers在離線渲染中不起作用。 –