This code used to work on iOS9 to add a watermark and text to a video but since iOS10 it's not working anymore. There is a iOS 10 bug that has been filed but no answer from Apple. I have not been able to implement any workaround to add watermark and text on video. With this code some times the video will be exported successfully but most of time it won't be exported.
How should I use AVVideoCompositionCoreAnimationTool
so that it works as it did on iOS9.
let videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(1, 60)
videoComposition.renderSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height)
let instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
// transformer is applied to set the video in portrait otherwise it is rotated by 90 degrees
let transformer: AVMutableVideoCompositionLayerInstruction =
AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1: CGAffineTransform = CGAffineTransform(translationX: clipVideoTrack.naturalSize.height, y: -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height)/2)
let t2: CGAffineTransform = t1.rotated(by: CGFloat(M_PI_2))
var finalTransform: CGAffineTransform = t2
transformer.setTransform(finalTransform, at: kCMTimeZero)
instruction.layerInstructions = NSArray(object: transformer) as! [AVVideoCompositionLayerInstruction]
videoComposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: clipVideoTrack, at: kCMTimeZero)
} catch {
print(error)
}
//Add watermark
let myImage = UIImage(named: "logo")
let aLayer = CALayer()
aLayer.contents = myImage!.cgImage
aLayer.frame = CGRect(x: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-45))/self.view.bounds.width, y: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-40))/self.view.bounds.width, width: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width, height: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width)
let titleLayer = CATextLayer()
titleLayer.string = "text"
titleLayer.font = UIFont(name: "helvetica", size: 0)
titleLayer.fontSize = clipVideoTrack.naturalSize.height/16
titleLayer.shadowOpacity = 0.5
titleLayer.alignmentMode = kCAAlignmentCenter
titleLayer.frame = CGRect(x: 0, y: 0, width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height/6)
titleLayer.display()
let videoSize = asset.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height)
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(aLayer)
parentLayer.addSublayer(titleLayer)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
do { try FileManager.default.removeItem(at: filePath) }
catch let error as NSError {
NSLog("\(error), \(error.localizedDescription)")
}
var exportUrl: URL = filePath
self.videoUrl = filePath as NSURL
var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetMediumQuality)
exporter!.videoComposition = videoComposition
exporter!.outputFileType = AVFileTypeQuickTimeMovie
exporter!.outputURL = URL(fileURLWithPath: exportUrl.path)
exporter!.exportAsynchronously(completionHandler: {
DispatchQueue.main.async {
self.view.layer.addSublayer(self.avPlayerLayer)
let item = AVPlayerItem(url: exportUrl)
self.player.replaceCurrentItem(with: item)
if (self.player.currentItem != nil) {
print("Starting playback!")
self.player.play()
}
}
})
PLEASE NOTE: If I remove AVVideoCompositionCoreAnimationTool
then the video is always exported but this ,hans no watermark and text on video. How to make it work so that AVVideoCompositionCoreAnimationTool
does not conflict with AVAssetExportSession
?
Some have implemented a workaround with customVideoCompositorClass
and AVVideoCompositing
protocol but this seems to be a heavy workaround compared to how it used to work.
This solution work for me, super easy and super fast
func addWatermark(inputURL: URL, outputURL: URL, handler:@escaping (_ exportSession: AVAssetExportSession?)-> Void) {
let mixComposition = AVMutableComposition()
let asset = AVAsset(url: inputURL)
let videoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
let timerange = CMTimeRangeMake(kCMTimeZero, asset.duration)
let compositionVideoTrack:AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))!
do {
try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero)
compositionVideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
let watermarkFilter = CIFilter(name: "CISourceOverCompositing")!
let watermarkImage = CIImage(image: UIImage(named: "waterMark")!)
let videoComposition = AVVideoComposition(asset: asset) { (filteringRequest) in
let source = filteringRequest.sourceImage.clampedToExtent()
watermarkFilter.setValue(source, forKey: "inputBackgroundImage")
let transform = CGAffineTransform(translationX: filteringRequest.sourceImage.extent.width - (watermarkImage?.extent.width)! - 2, y: 0)
watermarkFilter.setValue(watermarkImage?.transformed(by: transform), forKey: "inputImage")
filteringRequest.finish(with: watermarkFilter.outputImage!, context: nil)
}
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPreset640x480) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
exportSession.videoComposition = videoComposition
exportSession.exportAsynchronously { () -> Void in
handler(exportSession)
}
}
Sorry if i haven't posted how to call this function. It's been a while that i haven't touched that code, so i hope this will be enough:
let outputURL = NSURL.fileURL(withPath: "TempPath")
let inputURL = NSURL.fileURL(withPath: "VideoWithWatermarkPath")
addWatermark(inputURL: inputURL, outputURL: outputURL, handler: { (exportSession) in
guard let session = exportSession else {
// Error
return
}
switch session.status {
case .completed:
guard NSData(contentsOf: outputURL) != nil else {
// Error
return
}
// Now you can find the video with the watermark in the location outputURL
default:
// Error
}
})