Related
Here is my code that adds image & text overlays to a local video. The problem is that it's extremely SLOW. Any ideas how to fix it?
Also I would appreciate if you can suggest 3rd party libraries that can do watermarking.
public func addWatermark(
fromVideoAt videoURL: URL,
watermark: Watermark,
fileName: String,
onSuccess: #escaping (URL) -> Void,
onFailure: #escaping ((Error?) -> Void)
) {
let asset = AVURLAsset(url: videoURL)
let composition = AVMutableComposition()
guard
let compositionTrack = composition.addMutableTrack(
withMediaType: .video,
preferredTrackID: kCMPersistentTrackID_Invalid
),
let assetTrack = asset.tracks(withMediaType: .video).first
else {
onFailure(nil)
return
}
do {
let timeRange = CMTimeRange(start: .zero, duration: assetTrack.timeRange.duration)
try compositionTrack.insertTimeRange(timeRange, of: assetTrack, at: .zero)
if let audioAssetTrack = asset.tracks(withMediaType: .audio).first,
let compositionAudioTrack = composition.addMutableTrack(
withMediaType: .audio,
preferredTrackID: kCMPersistentTrackID_Invalid
) {
try compositionAudioTrack.insertTimeRange(
timeRange,
of: audioAssetTrack,
at: .zero
)
}
} catch {
onFailure(error)
return
}
compositionTrack.preferredTransform = assetTrack.preferredTransform
let videoInfo = orientation(from: assetTrack.preferredTransform)
let videoSize: CGSize
if videoInfo.isPortrait {
videoSize = CGSize(
width: assetTrack.naturalSize.height,
height: assetTrack.naturalSize.width
)
} else {
videoSize = assetTrack.naturalSize
}
let videoLayer = CALayer()
videoLayer.frame = CGRect(origin: .zero, size: videoSize)
let overlayLayer = CALayer()
overlayLayer.frame = CGRect(origin: .zero, size: videoSize)
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
let imageFrame = watermark.calculateImageFrame(parentSize: videoSize)
addImage(watermark.image, to: overlayLayer, frame: imageFrame)
let textOrigin = CGPoint(x: imageFrame.minX + 4, y: imageFrame.minY)
if let text = watermark.text {
addText(
text,
to: overlayLayer,
origin: textOrigin,
textAttributes: Watermark.textAttributes(type: watermark.type)
)
}
let outputLayer = CALayer()
outputLayer.frame = CGRect(origin: .zero, size: videoSize)
outputLayer.addSublayer(videoLayer)
outputLayer.addSublayer(overlayLayer)
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = videoSize
videoComposition.frameDuration = CMTime(value: 1, timescale: 60)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(
postProcessingAsVideoLayer: videoLayer,
in: outputLayer
)
videoComposition.colorPrimaries = AVVideoColorPrimaries_ITU_R_709_2
videoComposition.colorTransferFunction = "sRGB"
videoComposition.colorYCbCrMatrix = nil
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(start: .zero, duration: composition.duration)
videoComposition.instructions = [instruction]
let layerInstruction = compositionLayerInstruction(
for: compositionTrack,
assetTrack: assetTrack
)
instruction.layerInstructions = [layerInstruction]
guard let export = AVAssetExportSession(
asset: composition,
presetName: AVAssetExportPresetHighestQuality
)
else {
onFailure(nil)
return
}
let exportURL = URL(fileURLWithPath: NSTemporaryDirectory())
.appendingPathComponent(fileName)
.appendingPathExtension("mov")
export.videoComposition = videoComposition
export.outputFileType = .mov
export.outputURL = exportURL
export.exportAsynchronously {
DispatchQueue.main.async {
switch export.status {
case .completed:
onSuccess(exportURL)
default:
onFailure(export.error)
}
}
}
}
Watermark is the wrapper struct. It contains image/text, text attributes, size and other similar helpful information.
I've tried without any luck:
export.shouldOptimizeForNetworkUse = true. It did not work.
AVAssetExportPresetPassthrough instead of AVAssetExportPresetHighestQuality. It removed overlays.
I have the following code which is relatively fast. It watermarks an 8 second video in about 2.56 seconds. When I ran it under Metal System Trace Instrument it seemed to be balanced and using GPU-acceleration the whole time. You just call exportIt()
As a side matter, this code uses async await wrapping of AVKit functions and migrates off any deprecated interfaces as of iOS 16.
A tidied up and working sample app with resource files is https://github.com/faisalmemon/watermark
The core code is as follows:
//
// WatermarkHelper.swift
// watermark
//
// Created by Faisal Memon on 09/02/2023.
//
import Foundation
import AVKit
struct WatermarkHelper {
enum WatermarkError: Error {
case cannotLoadResources
case cannotAddTrack
case cannotLoadVideoTrack(Error?)
case cannotCopyOriginalAudioVideo(Error?)
case noVideoTrackPresent
case exportSessionCannotBeCreated
}
func compositionAddMediaTrack(_ composition: AVMutableComposition, withMediaType mediaType: AVMediaType) throws -> AVMutableCompositionTrack {
guard let compositionTrack = composition.addMutableTrack(
withMediaType: mediaType,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw WatermarkError.cannotAddTrack
}
return compositionTrack
}
func loadTrack(inputVideo: AVAsset, withMediaType mediaType: AVMediaType) async throws -> AVAssetTrack? {
return try await withCheckedThrowingContinuation({
(continuation: CheckedContinuation<AVAssetTrack?, Error>) in
inputVideo.loadTracks(withMediaType: mediaType) { tracks, error in
if let tracks = tracks {
continuation.resume(returning: tracks.first)
} else {
continuation.resume(throwing: WatermarkError.cannotLoadVideoTrack(error))
}
}
})
}
func bringOverVideoAndAudio(inputVideo: AVAsset, assetTrack: AVAssetTrack, compositionTrack: AVMutableCompositionTrack, composition: AVMutableComposition) async throws {
do {
let timeRange = await CMTimeRange(start: .zero, duration: try inputVideo.load(.duration))
try compositionTrack.insertTimeRange(timeRange, of: assetTrack, at: .zero)
if let audioAssetTrack = try await loadTrack(inputVideo: inputVideo, withMediaType: .audio) {
let compositionAudioTrack = try compositionAddMediaTrack(composition, withMediaType: .audio)
try compositionAudioTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: .zero)
}
} catch {
print(error)
throw WatermarkError.cannotCopyOriginalAudioVideo(error)
}
}
private func orientation(from transform: CGAffineTransform) -> (orientation: UIImage.Orientation, isPortrait: Bool) {
var assetOrientation = UIImage.Orientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}
func preferredTransformAndSize(compositionTrack: AVMutableCompositionTrack, assetTrack: AVAssetTrack) async throws -> (preferredTransform: CGAffineTransform, videoSize: CGSize) {
let transform = try await assetTrack.load(.preferredTransform)
let videoInfo = orientation(from: transform)
let videoSize: CGSize
let naturalSize = try await assetTrack.load(.naturalSize)
if videoInfo.isPortrait {
videoSize = CGSize(
width: naturalSize.height,
height: naturalSize.width)
} else {
videoSize = naturalSize
}
return (transform, videoSize)
}
private func compositionLayerInstruction(for track: AVCompositionTrack, assetTrack: AVAssetTrack, preferredTransform: CGAffineTransform) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
instruction.setTransform(preferredTransform, at: .zero)
return instruction
}
private func addImage(to layer: CALayer, watermark: UIImage, videoSize: CGSize) {
let imageLayer = CALayer()
let aspect: CGFloat = watermark.size.width / watermark.size.height
let width = videoSize.width
let height = width / aspect
imageLayer.frame = CGRect(
x: 0,
y: -height * 0.15,
width: width,
height: height)
imageLayer.contents = watermark.cgImage
layer.addSublayer(imageLayer)
}
func composeVideo(composition: AVMutableComposition, videoComposition: AVMutableVideoComposition, compositionTrack: AVMutableCompositionTrack, assetTrack: AVAssetTrack, preferredTransform: CGAffineTransform) {
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(
start: .zero,
duration: composition.duration)
videoComposition.instructions = [instruction]
let layerInstruction = compositionLayerInstruction(
for: compositionTrack,
assetTrack: assetTrack, preferredTransform: preferredTransform)
instruction.layerInstructions = [layerInstruction]
}
func exportSession(composition: AVMutableComposition, videoComposition: AVMutableVideoComposition, outputURL: URL) throws -> AVAssetExportSession {
guard let export = AVAssetExportSession(
asset: composition,
presetName: AVAssetExportPresetHighestQuality)
else {
print("Cannot create export session.")
throw WatermarkError.exportSessionCannotBeCreated
}
export.videoComposition = videoComposition
export.outputFileType = .mp4
export.outputURL = outputURL
return export
}
func executeSession(_ session: AVAssetExportSession) async throws -> AVAssetExportSession.Status {
return try await withCheckedThrowingContinuation({
(continuation: CheckedContinuation<AVAssetExportSession.Status, Error>) in
session.exportAsynchronously {
DispatchQueue.main.async {
if let error = session.error {
continuation.resume(throwing: error)
} else {
continuation.resume(returning: session.status)
}
}
}
})
}
func addWatermarkTopDriver(inputVideo: AVAsset, outputURL: URL, watermark: UIImage) async throws -> AVAssetExportSession.Status {
let composition = AVMutableComposition()
let compositionTrack = try compositionAddMediaTrack(composition, withMediaType: .video)
guard let videoAssetTrack = try await loadTrack(inputVideo: inputVideo, withMediaType: .video) else {
throw WatermarkError.noVideoTrackPresent
}
try await bringOverVideoAndAudio(inputVideo: inputVideo, assetTrack: videoAssetTrack, compositionTrack: compositionTrack, composition: composition)
let transformAndSize = try await preferredTransformAndSize(compositionTrack: compositionTrack, assetTrack: videoAssetTrack)
compositionTrack.preferredTransform = transformAndSize.preferredTransform
let videoLayer = CALayer()
videoLayer.frame = CGRect(origin: .zero, size: transformAndSize.videoSize)
let overlayLayer = CALayer()
overlayLayer.frame = CGRect(origin: .zero, size: transformAndSize.videoSize)
addImage(to: overlayLayer, watermark: watermark, videoSize: transformAndSize.videoSize)
let outputLayer = CALayer()
outputLayer.frame = CGRect(origin: .zero, size: transformAndSize.videoSize)
outputLayer.addSublayer(videoLayer)
outputLayer.addSublayer(overlayLayer)
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = transformAndSize.videoSize
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(
postProcessingAsVideoLayer: videoLayer,
in: outputLayer)
composeVideo(composition: composition, videoComposition: videoComposition, compositionTrack: compositionTrack, assetTrack: videoAssetTrack, preferredTransform: transformAndSize.preferredTransform)
let session = try exportSession(composition: composition, videoComposition: videoComposition, outputURL: outputURL)
return try await executeSession(session)
}
/// Creates a watermarked movie and saves it to the documents directory.
///
/// For an 8 second video (251 frames), this code takes 2.56 seconds on iPhone 11 producing a high quality video at 30 FPS.
/// - Returns: Time interval taken for processing.
public func exportIt() async throws -> TimeInterval {
let timeStart = Date()
guard
let filePath = Bundle.main.path(forResource: "donut-spinning", ofType: "mp4"),
let docUrl = try? FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true),
let watermarkImage = UIImage(systemName: "seal") else {
throw WatermarkError.cannotLoadResources
}
let videoAsset = AVAsset(url: URL(filePath: filePath))
let outputURL = docUrl.appending(component: "watermark-donut-spinning.mp4")
try? FileManager.default.removeItem(at: outputURL)
print(outputURL)
let result = try await addWatermarkTopDriver(inputVideo: videoAsset, outputURL: outputURL, watermark: watermarkImage)
let timeEnd = Date()
let duration = timeEnd.timeIntervalSince(timeStart)
print(result)
return duration
}
}
Use this below method for super fast watermark adding to video
func addWatermark(inputURL: URL, outputURL: URL, handler:#escaping (_ exportSession: AVAssetExportSession?)-> Void) {
let mixComposition = AVMutableComposition()
let asset = AVAsset(url: inputURL)
let videoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
let timerange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration)
let compositionVideoTrack:AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))!
do {
try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: CMTime.zero)
compositionVideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
let watermarkFilter = CIFilter(name: "CISourceOverCompositing")!
let watermarkImage = CIImage(image: UIImage(named: "waterMark")!)
let videoComposition = AVVideoComposition(asset: asset) { (filteringRequest) in
let source = filteringRequest.sourceImage.clampedToExtent()
watermarkFilter.setValue(source, forKey: "inputBackgroundImage")
let transform = CGAffineTransform(translationX: filteringRequest.sourceImage.extent.width - (watermarkImage?.extent.width)! - 2, y: 0)
watermarkFilter.setValue(watermarkImage?.transformed(by: transform), forKey: "inputImage")
filteringRequest.finish(with: watermarkFilter.outputImage!, context: nil)
}
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPreset640x480) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
exportSession.videoComposition = videoComposition
exportSession.exportAsynchronously { () -> Void in
handler(exportSession)
}
}
Call this method when you want to add watermark easily
let outputURL = NSURL.fileURL(withPath: "TempPath")
let inputURL = NSURL.fileURL(withPath: "VideoWithWatermarkPath")
addWatermark(inputURL: inputURL, outputURL: outputURL, handler: { (exportSession) in
guard let session = exportSession else {
// Error
return
}
switch session.status {
case .completed:
guard NSData(contentsOf: outputURL) != nil else {
// Error
return
}
// Now you can find the video with the watermark in the location outputURL
default:
// Error
}
})
Have you checked out Apple's documentation? It adds a title layer (CALayer) on top of an existing AVMutableComposition or an AVAsset? Since it's a legacy doc from iOS 6, you'll need to refactor a bit, but it should be fast on today's tech.
I trying to export video using following code. It works fine for first 3 times and then fails for more than 3 attempts, I am trying to add recorded voices over video, I am pretty new to all these concepts so any help will be appreciated
open func generate(video url: URL, with frame: CGRect? = nil, byApplying transformation: CGAffineTransform? = nil, in previewArea: CGRect? = nil, previewCornerRadius: Float = 0, overlayImage: UIImage? = nil, setOverlayAsBackground: Bool = false, gifLayer: CALayer? = nil, audioUrl: URL? = nil, muteAudio: Bool = false, success: #escaping ((URL) -> Void), failure: #escaping ((Error) -> Void)) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: AVMutableCompositionTrack? = nil
var mutableCompositionOriginalAudioTrack: AVMutableCompositionTrack? = nil
var mutableCompositionAudioTrack: AVMutableCompositionTrack? = nil
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: url)
var aAudioAsset: AVAsset? = nil
if let url = audioUrl {
aAudioAsset = AVAsset(url: url)
}
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack = videoTrack
if aAudioAsset != nil, let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionAudioTrack = audioTrack
}
if !muteAudio, aVideoAsset.hasAudio, let originalAudioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
/// If original audio present
mutableCompositionOriginalAudioTrack = originalAudioTrack
}
}
do {
var originalVideoSize: CGSize = self.prefferedVideoSize
let ratio = self.prefferedVideoSize.width / Utility.get9By16ScreenSize().width
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first {
originalVideoSize = aVideoAssetTrack.naturalSize
var transforms = aVideoAssetTrack.preferredTransform
if aVideoAsset.videoOrientation().orientation == .landscapeLeft || aVideoAsset.videoOrientation().orientation == .landscapeRight {
let ratio = self.prefferedVideoSize.width / originalVideoSize.width
let centerY: CGFloat = (self.prefferedVideoSize.height - (originalVideoSize.height * ratio)) / 2
transforms = transforms.concatenating(CGAffineTransform(translationX: 0, y: centerY).scaledBy(x: ratio, y: ratio))
}
try mutableCompositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
if !muteAudio, aVideoAsset.hasAudio, let audioAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .audio).first {
try mutableCompositionOriginalAudioTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: audioAssetTrack.timeRange.duration), of: audioAssetTrack, at: CMTime.zero)
}
if let audioAsset = aAudioAsset, let aAudioAssetTrack: AVAssetTrack = audioAsset.tracks(withMediaType: .audio).first {
try mutableCompositionAudioTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aAudioAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
let mixVideoTrack = mixComposition.tracks(withMediaType: AVMediaType.video)[0]
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: mixVideoTrack)
layerInstruction.setTransform(transforms, at: CMTime.zero)
totalVideoCompositionInstruction.layerInstructions = [layerInstruction]
}
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 12)
mutableVideoComposition.renderSize = self.prefferedVideoSize
mutableVideoComposition.instructions = [totalVideoCompositionInstruction]
let parentLayer = CALayer()
parentLayer.frame = self.prefferedVideoRect
parentLayer.isGeometryFlipped = true
let videoLayer = CALayer()
videoLayer.contentsGravity = .resizeAspect
videoLayer.contentsScale = 1
videoLayer.frame = self.prefferedVideoRect
if let frame = frame {
let scalledFrame = frame.scale(by: ratio)
videoLayer.frame = scalledFrame
let videoContainerLayer = CALayer()
parentLayer.frame = self.prefferedVideoRect
parentLayer.addSublayer(videoContainerLayer)
videoContainerLayer.addSublayer(videoLayer)
if let transformation = transformation {
if let previewFrame = previewArea {
let maskLayer = CALayer()
maskLayer.backgroundColor = UIColor.black.cgColor
let scalledMaskFrame = previewFrame.scale(by: ratio)
maskLayer.frame = scalledMaskFrame
maskLayer.cornerRadius = previewCornerRadius.cgFloat
maskLayer.masksToBounds = true
videoContainerLayer.mask = maskLayer
}
videoLayer.transform = CATransform3DMakeAffineTransform(transformation)
}
} else {
parentLayer.addSublayer(videoLayer)
}
/// Add overlay if overlay image present
if let image = overlayImage {
let imageLayer = CALayer()
imageLayer.contents = image.cgImage
imageLayer.frame = self.prefferedVideoRect
imageLayer.masksToBounds = true
if setOverlayAsBackground {
parentLayer.insertSublayer(imageLayer, at: 0)
} else {
parentLayer.addSublayer(imageLayer)
}
}
/// Add overlay if overlay image present
if let overlay = gifLayer {
overlay.frame = CGRect(origin: CGPoint(x: (self.prefferedVideoSize.width - overlay.frame.width) / 2, y: (self.prefferedVideoSize.height - overlay.frame.height) / 2), size: overlay.frame.size)
overlay.transform = CATransform3DMakeAffineTransform(CGAffineTransform(scaleX: ratio, y: ratio))
parentLayer.addSublayer(overlay)
}
mutableVideoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
let outputURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("tmp-rendered-video-R6S9K2B4.m4v")
self.exportVideo(from: mixComposition, toFile: outputURL, with: mutableVideoComposition, success: success, failure: failure)
} catch{
DCDebug.print(error)
failure(error)
}
}
func exportVideo(from composition: AVComposition, toFile output: URL, with videoComposition: AVVideoComposition? = nil, success: #escaping ((URL) -> Void), failure: #escaping ((Error) -> Void)) {
do {
if FileManager.default.fileExists(atPath: output.path) {
try FileManager.default.removeItem(at: output)
}
if let exportSession = AVAssetExportSession(asset: composition, presetName: self.presetName ?? AVAssetExportPresetHighestQuality) {
exportSession.outputURL = output
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
if let videoComposition = videoComposition {
exportSession.videoComposition = videoComposition
}
/// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
DispatchQueue.main.async {
switch exportSession.status {
case .completed:
success(output)
case .failed:
if let _error = exportSession.error {
failure(_error)
}
case .cancelled:
if let _error = exportSession.error {
failure(_error)
}
default:
success(output)
}
}
})
} else {
failure(VideoMakerError(error: .kFailedToStartAssetExportSession))
}
} catch {
DCDebug.print(error)
failure(error)
}
}
I am getting following error
Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.}
following are time range values when export fails
(lldb) po aVideoAsset.tracks(withMediaType: .audio).first?.timeRange.duration
▿ Optional<CMTime>
▿ some : CMTime
- value : 1852
- timescale : 600
▿ flags : CMTimeFlags
- rawValue : 1
- epoch : 0
(lldb) po aVideoAsset.tracks(withMediaType: .video).first?.timeRange.duration
▿ Optional<CMTime>
▿ some : CMTime
- value : 1800
- timescale : 600
▿ flags : CMTimeFlags
- rawValue : 1
- epoch : 0
I solved this issue by replacing following line
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
By
if let originalAudioTrack = mutableCompositionOriginalAudioTrack, originalAudioTrack.timeRange.duration > aVideoAssetTrack.timeRange.duration, !muteAudio, aVideoAsset.hasAudio {
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: originalAudioTrack.timeRange.duration)
} else {
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
}
This solved my issue, but I am not sure if this is correct solution to this problem or just a hack, so if anyone provide me proper explanation to this issue and a valid solution other than this, then bounty is yours.
I have bee trying to add image overlay on video and save video to device library with video overlay.
let myURL = "https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4"
let url = URL(string:myURL)
player = AVPlayer(url: url!)
avpController.player = player
avpController.player?.play()
avpController.showsPlaybackControls = false
avpController.view.frame.size.height = videoView.frame.size.height
avpController.view.frame.size.width = videoView.frame.size.width
self.videoView.addSubview(avpController.view)
How to add image overlay on video? any help much appreciated pls..
I have created this
import UIKit
import AVFoundation
import AVKit
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
shareClicked()
}
func shareClicked() {
let url = URL(string:"https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4")!
DispatchQueue.global(qos: .background).async {
if let urlData = try? Data(contentsOf: url) {
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first!
let fileUrl = URL(fileURLWithPath: documentsPath).appendingPathComponent("fffffff.mp4")
if FileManager.default.fileExists(atPath:fileUrl.path) {
try? FileManager.default.removeItem(at:fileUrl)
print("removed")
}
try? urlData.write(to: fileUrl)
self.merge(video: fileUrl.path, withForegroundImage:UIImage(named: "images.png")!, completion: { (uuu) in
DispatchQueue.main.async {
self.play(uuu!)
}
})
}
}
}
func play(_ url : URL) {
DispatchQueue.main.async {
let vc = AVPlayerViewController()
vc.player = AVPlayer(url: url)
vc.player?.externalPlaybackVideoGravity = AVLayerVideoGravity.resizeAspect
self.present(vc, animated: true, completion: nil)
}
}
private func addAudioTrack(composition: AVMutableComposition, videoUrl: URL) {
let videoUrlAsset = AVURLAsset(url: videoUrl, options: nil)
let audioTracks = videoUrlAsset.tracks(withMediaType: AVMediaType.audio)
let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID())!
for audioTrack in audioTracks {
try! compositionAudioTrack.insertTimeRange(audioTrack.timeRange, of: audioTrack, at: CMTime.zero)
}
}
func merge(
video videoPath: String,
withForegroundImage foregroundImage: UIImage,
completion: #escaping (URL?) -> Void) -> () {
let videoUrl = URL(fileURLWithPath: videoPath)
let videoUrlAsset = AVURLAsset(url: videoUrl, options: nil)
// Setup `mutableComposition` from the existing video
let mutableComposition = AVMutableComposition()
let videoAssetTrack = videoUrlAsset.tracks(withMediaType: AVMediaType.video).first!
let videoCompositionTrack = mutableComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
videoCompositionTrack?.preferredTransform = videoAssetTrack.preferredTransform
try! videoCompositionTrack?.insertTimeRange(CMTimeRange(start:CMTime.zero, duration:videoAssetTrack.timeRange.duration), of: videoAssetTrack, at: CMTime.zero)
addAudioTrack(composition: mutableComposition, videoUrl: videoUrl)
let videoSize: CGSize = (videoCompositionTrack?.naturalSize)!
let frame = CGRect(x: 0.0, y: 0.0, width: videoSize.width, height: videoSize.height)
let imageLayer = CALayer()
imageLayer.contents = foregroundImage.cgImage
imageLayer.frame = CGRect(x: 0.0, y: 0.0, width:50, height:50)
let videoLayer = CALayer()
videoLayer.frame = frame
let animationLayer = CALayer()
animationLayer.frame = frame
animationLayer.addSublayer(videoLayer)
animationLayer.addSublayer(imageLayer)
let videoComposition = AVMutableVideoComposition(propertiesOf: (videoCompositionTrack?.asset!)!)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: animationLayer)
let documentDirectory = NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.cachesDirectory, FileManager.SearchPathDomainMask.userDomainMask, true).first!
let documentDirectoryUrl = URL(fileURLWithPath: documentDirectory)
let destinationFilePath = documentDirectoryUrl.appendingPathComponent("result.mp4")
do {
if FileManager.default.fileExists(atPath: destinationFilePath.path) {
try FileManager.default.removeItem(at: destinationFilePath)
print("removed")
}
} catch {
print(error)
}
let exportSession = AVAssetExportSession( asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)!
exportSession.videoComposition = videoComposition
exportSession.outputURL = destinationFilePath
exportSession.outputFileType = AVFileType.mp4
exportSession.exportAsynchronously { [weak exportSession] in
if let strongExportSession = exportSession {
completion(strongExportSession.outputURL!)
//self.play(strongExportSession.outputURL!)
}
}
}
}
And you will get this result
I am working on a task in which I have to trim the recorded video from particular start point to particular end point as entered or selected by user.
How am I supposed to do that. As I used UIVideoEditorController before but I don't want to use the default view and I want to trim the video directly.
let FinalUrlTosave = NSURL(string: "\(newURL)")
exportSession!.outputURL=FinalUrlTosave
exportSession!.shouldOptimizeForNetworkUse = true
// exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession!.outputFileType = AVFileTypeQuickTimeMovie;
let start:CMTime
let duration:CMTime
var st = starttime.doubleValue
var ed = endTime.doubleValue
start = CMTimeMakeWithSeconds(st, 600)
duration = CMTimeMakeWithSeconds(ed, 600)
// let timeRangeForCurrentSlice = CMTimeRangeMake(start, duration)
let range = CMTimeRangeMake(start, duration);
exportSession!.timeRange = range
exportSession!.exportAsynchronouslyWithCompletionHandler({
switch exportSession!.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(exportSession!.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(exportSession!.error)")
default:
print("complete....complete")
// self.SaveVideoToPhotoLibrary(destinationURL1!)
}
})
I am trying to achieve my goal using this but not succeeding.
Error message:
failed Optional(Error Domain=NSURLErrorDomain Code=-1100 "The
requested URL was not found on this server."
UserInfo={NSErrorFailingURLStringKey=file:///var/mobile/Containers/Data/Application/E68D3BFD-6923-4EA6-9FB3-C020CE4AA9D4/Documents/moment/jGq_9AUFa47s2ZiiPP4x.mp4,
NSErrorFailingURLKey=file:///var/mobile/Containers/Data/Application/E68D3BFD-6923-4EA6-9FB3-C020CE4AA9D4/Documents/moment/jGq_9AUFa47s2ZiiPP4x.mp4,
NSLocalizedDescription=The requested URL was not found on this
server., NSUnderlyingError=0x1553c220 {Error Domain=N
Error occured second time:
failed Optional(Error Domain=NSURLErrorDomain Code=-3000 "Cannot
create file" UserInfo={NSUnderlyingError=0x14e00000 {Error
Domain=NSOSStatusErrorDomain Code=-12124 "(null)"},
NSLocalizedDescription=Cannot create file})
I found my solution using this method and it works like a charm....
func cropVideo(sourceURL1: NSURL, statTime:Float, endTime:Float)
{
let manager = NSFileManager.defaultManager()
guard let documentDirectory = try? manager.URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: true) else {return}
guard let mediaType = "mp4" as? String else {return}
guard let url = sourceURL1 as? NSURL else {return}
if mediaType == kUTTypeMovie as String || mediaType == "mp4" as String {
let asset = AVAsset(URL: url)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
let start = statTime
let end = endTime
var outputURL = documentDirectory.URLByAppendingPathComponent("output")
do {
try manager.createDirectoryAtURL(outputURL, withIntermediateDirectories: true, attributes: nil)
let name = Moment.newName()
outputURL = outputURL.URLByAppendingPathComponent("\(name).mp4")
}catch let error {
print(error)
}
//Remove existing file
_ = try? manager.removeItemAtURL(outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else {return}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeMPEG4
let startTime = CMTime(seconds: Double(start ?? 0), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ?? length), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
exportSession.exportAsynchronouslyWithCompletionHandler{
switch exportSession.status {
case .Completed:
print("exported at \(outputURL)")
self.saveVideoTimeline(outputURL)
case .Failed:
print("failed \(exportSession.error)")
case .Cancelled:
print("cancelled \(exportSession.error)")
default: break
}
}
}
}
Swift 5
func cropVideo(sourceURL1: URL, statTime:Float, endTime:Float)
{
let manager = FileManager.default
guard let documentDirectory = try? manager.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true) else {return}
let mediaType = "mp4"
if mediaType == kUTTypeMovie as String || mediaType == "mp4" as String {
let asset = AVAsset(url: sourceURL1 as URL)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
let start = statTime
let end = endTime
var outputURL = documentDirectory.appendingPathComponent("output")
do {
try manager.createDirectory(at: outputURL, withIntermediateDirectories: true, attributes: nil)
outputURL = outputURL.appendingPathComponent("\(UUID().uuidString).\(mediaType)")
}catch let error {
print(error)
}
//Remove existing file
_ = try? manager.removeItem(at: outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else {return}
exportSession.outputURL = outputURL
exportSession.outputFileType = .mp4
let startTime = CMTime(seconds: Double(start ), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
exportSession.exportAsynchronously{
switch exportSession.status {
case .completed:
print("exported at \(outputURL)")
case .failed:
print("failed \(exportSession.error)")
case .cancelled:
print("cancelled \(exportSession.error)")
default: break
}
}
}
}
A swift4 version for this.
static func cropVideo(sourceURL: URL, startTime: Double, endTime: Double, completion: ((_ outputUrl: URL) -> Void)? = nil)
{
let fileManager = FileManager.default
let documentDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask)[0]
let asset = AVAsset(url: sourceURL)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
var outputURL = documentDirectory.appendingPathComponent("output")
do {
try fileManager.createDirectory(at: outputURL, withIntermediateDirectories: true, attributes: nil)
outputURL = outputURL.appendingPathComponent("\(sourceURL.lastPathComponent).mp4")
}catch let error {
print(error)
}
//Remove existing file
try? fileManager.removeItem(at: outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else { return }
exportSession.outputURL = outputURL
exportSession.outputFileType = .mp4
let timeRange = CMTimeRange(start: CMTime(seconds: startTime, preferredTimescale: 1000),
end: CMTime(seconds: endTime, preferredTimescale: 1000))
exportSession.timeRange = timeRange
exportSession.exportAsynchronously {
switch exportSession.status {
case .completed:
print("exported at \(outputURL)")
completion?(outputURL)
case .failed:
print("failed \(exportSession.error.debugDescription)")
case .cancelled:
print("cancelled \(exportSession.error.debugDescription)")
default: break
}
}
}
This one does the job and it fixes the rotation problem.
extension AVAsset {
func assetByTrimming(startTime: CMTime, endTime: CMTime) throws -> AVAsset {
let duration = CMTimeSubtract(endTime, startTime)
let timeRange = CMTimeRange(start: startTime, duration: duration)
let composition = AVMutableComposition()
do {
for track in tracks {
let compositionTrack = composition.addMutableTrack(withMediaType: track.mediaType, preferredTrackID: track.trackID)
compositionTrack?.preferredTransform = track.preferredTransform
try compositionTrack?.insertTimeRange(timeRange, of: track, at: CMTime.zero)
}
} catch let error {
throw TrimError("error during composition", underlyingError: error)
}
return composition
}
struct TrimError: Error {
let description: String
let underlyingError: Error?
init(_ description: String, underlyingError: Error? = nil) {
self.description = "TrimVideo: " + description
self.underlyingError = underlyingError
}
}
func cropVideo1(_ sourceURL1: URL, statTime:Float, endTime:Float){
let videoAsset: AVAsset = AVAsset(url: sourceURL1) as AVAsset
let composition = AVMutableComposition()
composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: 1280, height: 768)
videoComposition.frameDuration = CMTimeMake(8, 15)
let instruction = AVMutableVideoCompositionInstruction()
let length = Float(videoAsset.duration.value)
print(length)
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
let start = statTime
let end = endTime
let exportSession = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exportSession.outputFileType = AVFileTypeMPEG4
let startTime = CMTime(seconds: Double(start ), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
let formatter = DateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
let date = Date()
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString
let outputPath = "\(documentsPath)/\(formatter.string(from: date)).mp4"
let outputURL = URL(fileURLWithPath: outputPath)
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
print("sucess")
exportSession.exportAsynchronously(completionHandler: { () -> Void in
DispatchQueue.main.async(execute: {
self.exportDidFinish(exportSession)
print("sucess")
})
})
}
func exportDidFinish(_ session: AVAssetExportSession) {
if session.status == AVAssetExportSessionStatus.completed {
let outputURL = session.outputURL
let library = ALAssetsLibrary()
if library.videoAtPathIs(compatibleWithSavedPhotosAlbum: outputURL) {
library.writeVideoAtPath(toSavedPhotosAlbum: outputURL) { alAssetURL, error in
if error != nil {
DispatchQueue.main.async(execute: {
print("Failed to save video")
})
} else {
DispatchQueue.main.async(execute: {
Print("Sucessfully saved Video")
})
}
self.activityIndicator.stopAnimating()
}
}
}
}
I wrote a program in Swift.I want to merge a video with an audio file, but got this error.
"failed Error Domain=AVFoundationErrorDomain Code=-11838 "Operation Stopped" UserInfo=0x17da4230 {NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The operation is not supported for this media.}"
code
func mergeAudio(audioURL: NSURL, moviePathUrl: NSURL, savePathUrl: NSURL) {
var composition = AVMutableComposition()
let trackVideo:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let trackAudio:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let option = NSDictionary(object: true, forKey: "AVURLAssetPreferPreciseDurationAndTimingKey")
let sourceAsset = AVURLAsset(URL: moviePathUrl, options: option as [NSObject : AnyObject])
let audioAsset = AVURLAsset(URL: audioURL, options: option as [NSObject : AnyObject])
let tracks = sourceAsset.tracksWithMediaType(AVMediaTypeVideo)
let audios = audioAsset.tracksWithMediaType(AVMediaTypeAudio)
if tracks.count > 0 {
let assetTrack:AVAssetTrack = tracks[0] as! AVAssetTrack
let assetTrackAudio:AVAssetTrack = audios[0] as! AVAssetTrack
let audioDuration:CMTime = assetTrackAudio.timeRange.duration
let audioSeconds:Float64 = CMTimeGetSeconds(assetTrackAudio.timeRange.duration)
trackVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero,audioDuration), ofTrack: assetTrack, atTime: kCMTimeZero, error: nil)
trackAudio.insertTimeRange(CMTimeRangeMake(kCMTimeZero,audioDuration), ofTrack: assetTrackAudio, atTime: kCMTimeZero, error: nil)
}
var assetExport: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
self.tmpMovieURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.Completed:
let assetsLib = ALAssetsLibrary()
assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
println("success")
case AVAssetExportSessionStatus.Failed:
println("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(assetExport.error)")
default:
println("complete")
}
}
}
In my idea media type like mpeg4 is wrong.
Where is the problem? What am i missing?
Improved code (of Govind's answer) with some additional features:
Merge audio of the video + external audio (the initial answer was dropping the sound of the video)
Flip video horizontally if needed (I personally use it when user captures using frontal camera, btw instagram flips it too)
Apply preferredTransform correctly which solves the issue when video was saved rotated (video is external: captured by other device/generated by other app)
Removed some unused code with VideoComposition.
Added a completion handler to the method so that it can be called from a different class.
Update to Swift 4.
Step 1.
import UIKit
import AVFoundation
import AVKit
import AssetsLibrary
Step 2.
/// Merges video and sound while keeping sound of the video too
///
/// - Parameters:
/// - videoUrl: URL to video file
/// - audioUrl: URL to audio file
/// - shouldFlipHorizontally: pass True if video was recorded using frontal camera otherwise pass False
/// - completion: completion of saving: error or url with final video
func mergeVideoAndAudio(videoUrl: URL,
audioUrl: URL,
shouldFlipHorizontally: Bool = false,
completion: #escaping (_ error: Error?, _ url: URL?) -> Void) {
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()
//start merge
let aVideoAsset = AVAsset(url: videoUrl)
let aAudioAsset = AVAsset(url: audioUrl)
let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
preferredTrackID: kCMPersistentTrackID_Invalid)
let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio,
preferredTrackID: kCMPersistentTrackID_Invalid)
let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio,
preferredTrackID: kCMPersistentTrackID_Invalid)
let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioOfVideoAssetTrack: AVAssetTrack? = aVideoAsset.tracks(withMediaType: AVMediaTypeAudio).first
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
// Default must have tranformation
compositionAddVideo.preferredTransform = aVideoAssetTrack.preferredTransform
if shouldFlipHorizontally {
// Flip video horizontally
var frontalTransform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
frontalTransform = frontalTransform.translatedBy(x: -aVideoAssetTrack.naturalSize.width, y: 0.0)
frontalTransform = frontalTransform.translatedBy(x: 0.0, y: -aVideoAssetTrack.naturalSize.width)
compositionAddVideo.preferredTransform = frontalTransform
}
mutableCompositionVideoTrack.append(compositionAddVideo)
mutableCompositionAudioTrack.append(compositionAddAudio)
mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo)
do {
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aVideoAssetTrack,
at: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aAudioAssetTrack,
at: kCMTimeZero)
// adding audio (of the video if exists) asset to the final composition
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aAudioOfVideoAssetTrack,
at: kCMTimeZero)
}
} catch {
print(error.localizedDescription)
}
// Exporting
let savePathUrl: URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl)
} catch { print(error.localizedDescription) }
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("success")
completion(nil, savePathUrl)
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
default:
print("complete")
completion(assetExport.error, nil)
}
}
}
Again thanks to #Govind's answer! It helped me a lot!
Hope this update helps someone too:)
In Above question same error I found due to wrong savePathUrl, destination URL should be like below code including new video name.
I was looking for the code to Merge audio and video files into one video but couldn't find anywhere so after spending hours while reading apple docs I wrote this code.
NOTE : This is tested and 100% working code for me.
Stap : 1
Import this modules in your viewController.
import UIKit
import AVFoundation
import AVKit
import AssetsLibrary
step 2:
Add this function in your code
func mergeFilesWithUrl(videoUrl:NSURL, audioUrl:NSURL)
{
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(URL: videoUrl)
let aAudioAsset : AVAsset = AVAsset(URL: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracksWithMediaType(AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aVideoAssetTrack, atTime: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
//Use this instead above line if your audiofile and video file's playing durations are same
// try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSizeMake(1280,720)
// playerItem = AVPlayerItem(asset: mixComposition)
// player = AVPlayer(playerItem: playerItem!)
//
//
// AVPlayerVC.player = player
//find your video on this URl
let savePathUrl : NSURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.Completed:
//Uncomment this if u want to store your video in asset
//let assetsLib = ALAssetsLibrary()
//assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
print("success")
case AVAssetExportSessionStatus.Failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
}
}
}
Step 3:
Call function where u want like this
let videoUrl : NSURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("SampleVideo", ofType: "mp4")!)
let audioUrl : NSURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("SampleAudio", ofType: "mp3")!)
mergeFilesWithUrl(videoUrl, audioUrl: audioUrl)
hope this will help you and will save your time.
Swift 4.2 / 5
func mergeVideoWithAudio(videoUrl: URL, audioUrl: URL, success: #escaping ((URL) -> Void), failure: #escaping ((Error?) -> Void)) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack.append(videoTrack)
mutableCompositionAudioTrack.append(audioTrack)
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first, let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
videoTrack.preferredTransform = aVideoAssetTrack.preferredTransform
} catch{
print(error)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero,duration: aVideoAssetTrack.timeRange.duration)
}
}
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 480, height: 640)
if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("\("fileName").m4v")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
/// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
if let _error = exportSession.error {
failure(_error)
}
case .cancelled:
if let _error = exportSession.error {
failure(_error)
}
default:
print("finished")
success(outputURL)
}
})
} else {
failure(nil)
}
}
}
Version Swift3 with URL and new syntax.
func mergeFilesWithUrl(videoUrl:URL, audioUrl:URL)
{
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(url: videoUrl)
let aAudioAsset : AVAsset = AVAsset(url: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
//Use this instead above line if your audiofile and video file's playing durations are same
// try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)
// playerItem = AVPlayerItem(asset: mixComposition)
// player = AVPlayer(playerItem: playerItem!)
//
//
// AVPlayerVC.player = player
//find your video on this URl
let savePathUrl : URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
//Uncomment this if u want to store your video in asset
//let assetsLib = ALAssetsLibrary()
//assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
print("success")
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
}
}
}
Swift 5 version (Also repeats audio if video is larger than audio) : Just pass audio and video URLs. I have tried this with local video and remote audio url.
func mergeVideoWithAudio(videoUrl: URL,
audioUrl: URL,
success: #escaping ((URL) -> Void),
failure: #escaping ((Error?) -> Void)) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack.append( videoTrack )
mutableCompositionAudioTrack.append( audioTrack )
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first, let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
let videoDuration = aVideoAsset.duration
if CMTimeCompare(videoDuration, aAudioAsset.duration) == -1 {
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
} else if CMTimeCompare(videoDuration, aAudioAsset.duration) == 1 {
var currentTime = CMTime.zero
while true {
var audioDuration = aAudioAsset.duration
let totalDuration = CMTimeAdd(currentTime, audioDuration)
if CMTimeCompare(totalDuration, videoDuration) == 1 {
audioDuration = CMTimeSubtract(totalDuration, videoDuration)
}
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: currentTime)
currentTime = CMTimeAdd(currentTime, audioDuration)
if CMTimeCompare(currentTime, videoDuration) == 1 || CMTimeCompare(currentTime, videoDuration) == 0 {
break
}
}
}
videoTrack.preferredTransform = aVideoAssetTrack.preferredTransform
} catch {
print(error)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
}
}
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 480, height: 640)
if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("\("fileName").m4v")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
if let error = exportSession.error {
failure(error)
}
case .cancelled:
if let error = exportSession.error {
failure(error)
}
default:
print("finished")
success(outputURL)
}
})
} else {
failure(nil)
}
}
}
Updated for Swift Concurrency (Swift 5.7)
Some errors to throw:
enum VideoAudioMergeError: Error {
case compositionAddVideoFailed, compositionAddAudioFailed, compositionAddAudioOfVideoFailed, unknownError
}
And the method:
/// Merges video and sound while keeping sound of the video too
///
/// - Parameters:
/// - videoUrl: URL to video file
/// - audioUrl: URL to audio file
/// - shouldFlipHorizontally: pass True if video was recorded using frontal camera otherwise pass False
func mergeVideoAndAudio(videoUrl: URL,
audioUrl: URL,
shouldFlipHorizontally: Bool = false) async throws -> URL {
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()
//start merge
let aVideoAsset = AVAsset(url: videoUrl)
let aAudioAsset = AVAsset(url: audioUrl)
guard let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddVideoFailed
}
guard let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddAudioFailed
}
guard let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddAudioOfVideoFailed
}
do {
let aVideoAssetTrack: AVAssetTrack = try await aVideoAsset.loadTracks(withMediaType: AVMediaType.video)[0]
let aAudioOfVideoAssetTrack: AVAssetTrack? = try await aVideoAsset.loadTracks(withMediaType: AVMediaType.audio).first
let aAudioAssetTrack: AVAssetTrack = try await aAudioAsset.loadTracks(withMediaType: AVMediaType.audio)[0]
// Default must have transformation
compositionAddVideo.preferredTransform = try await aVideoAssetTrack.load(.preferredTransform)
if shouldFlipHorizontally {
// Flip video horizontally
var frontalTransform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
let naturalSize = try await aVideoAssetTrack.load(.naturalSize)
frontalTransform = frontalTransform.translatedBy(x: -naturalSize.width, y: 0.0)
frontalTransform = frontalTransform.translatedBy(x: 0.0, y: -naturalSize.width)
compositionAddVideo.preferredTransform = frontalTransform
}
mutableCompositionVideoTrack.append(compositionAddVideo)
mutableCompositionAudioTrack.append(compositionAddAudio)
mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo)
let videoTimeRange = try await aVideoAssetTrack.load(.timeRange)
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aVideoAssetTrack,
at: CMTime.zero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aAudioAssetTrack,
at: CMTime.zero)
// adding audio (of the video if exists) asset to the final composition
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aAudioOfVideoAssetTrack,
at: CMTime.zero)
}
} catch {
throw error
}
// Exporting
let savePathUrl: URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl)
} catch { print(error.localizedDescription) }
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
await assetExport.export()
if assetExport.status == .completed {
return savePathUrl
}
if let error = assetExport.error {
throw error
} else {
throw VideoAudioMergeError.unknownError
}
}