I am using 3 videos in a composition. I can't seem to figure out why the 2nd one plays black after merging them together. It has the correct time and a value when debugging so I know it is there. Here is my code:
import UIKit
import AVFoundation
import MobileCoreServices
import CoreMedia
import AssetsLibrary
import MediaPlayer
import Photos
class MergeViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate, MPMediaPickerControllerDelegate {
let clipPicker = UIImagePickerController()
let audioPicker = MPMediaPickerController()
var isSelectingAsset: Int!
// video clip 1
var firstAsset: AVAsset!
// video clip 2
var secondAsset: AVAsset!
// video clip 3
var thirdAsset: AVAsset!
// sound track
var audioAsset: AVAsset!
// activity view indicator
override func viewDidLoad() {
super.viewDidLoad()
clipPicker.delegate = self
clipPicker.sourceType = UIImagePickerControllerSourceType.PhotoLibrary
clipPicker.mediaTypes = [kUTTypeMovie]
audioPicker.delegate = self
audioPicker.prompt = "Select Audio"
}
#IBAction func loadAsset1(sender: AnyObject) {
if UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.PhotoLibrary) {
isSelectingAsset = 1
self.presentViewController(clipPicker, animated: true, completion: nil)
}
}
#IBAction func loadAsset2(sender: AnyObject) {
if UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.PhotoLibrary) {
isSelectingAsset = 2
self.presentViewController(clipPicker, animated: true, completion: nil)
}
}
#IBAction func loadAsset3(sender: AnyObject) {
if UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.PhotoLibrary) {
isSelectingAsset = 3
self.presentViewController(clipPicker, animated: true, completion: nil)
}
}
#IBAction func loadAudio(sender: AnyObject) {
self.presentViewController(audioPicker, animated: true, completion: nil)
}
#IBAction func mergeMedia(sender: AnyObject) {
if firstAsset != nil && secondAsset != nil && thirdAsset != nil {
// set up container to hold media tracks.
var mixComposition = AVMutableComposition()
// track times
let track1to2Time = CMTimeAdd(firstAsset.duration, secondAsset.duration)
let totalTime = CMTimeAdd(track1to2Time, thirdAsset.duration)
// create separate video tracks for individual adjustments before merge
var firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration),
ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack,
atTime: kCMTimeZero,
error: nil)
var secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration),
ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack,
atTime: firstAsset.duration,
error: nil)
var thirdTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
thirdTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, thirdAsset.duration),
ofTrack: thirdAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack,
atTime: track1to2Time,
error: nil)
// Set up an overall instructions array
var mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime)
// Create seperate instructions for each track with helper method to correct orientation.
let firstInstruction = videoCompositionInstructionForTrack(firstTrack, asset: firstAsset)
// Make sure each track becomes transparent at end for the next one to play.
firstInstruction.setOpacity(0.0, atTime: firstAsset.duration)
let secondInstruction = videoCompositionInstructionForTrack(secondTrack, asset: secondAsset)
secondInstruction.setOpacity(0.0, atTime: secondAsset.duration)
let thirdInstruction = videoCompositionInstructionForTrack(thirdTrack, asset: thirdAsset)
// Add individual instructions to main for execution.
mainInstruction.layerInstructions = [firstInstruction, secondInstruction, thirdInstruction]
let mainComposition = AVMutableVideoComposition()
// Add instruction composition to main composition and set frame rate to 30 per second.
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(
width: UIScreen.mainScreen().bounds.width,
height: UIScreen.mainScreen().bounds.height)
// get audio
if audioAsset != nil {
let audioTrack: AVMutableCompositionTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: 0)
audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, totalTime), ofTrack: audioAsset.tracksWithMediaType(AVMediaTypeAudio)[0] as! AVAssetTrack,
atTime: kCMTimeZero,
error: nil)
}
// get path
var paths: NSArray = NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)
let documentsDirectory: String = paths[0] as! String
let id = String(arc4random() % 1000)
let myPathDocs: String = documentsDirectory.stringByAppendingPathComponent("mergeVideo-\(id).mov")
let url = NSURL(fileURLWithPath: myPathDocs)
// make exporter
var exporter = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality)
exporter.outputURL = url
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = mainComposition
exporter.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.exportDidFinish(exporter)
})
}
}
}
// MARK: Image Picker Methods
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [NSObject : AnyObject]) {
// Get the media type
let imageType: String = info[UIImagePickerControllerMediaType] as! String
let url: NSURL = info[UIImagePickerControllerMediaURL] as! NSURL
println(imageType)
// Place movie in queue.
if isSelectingAsset == 1 {
println("Success loading 1")
firstAsset = AVAsset.assetWithURL(url) as! AVAsset
} else if isSelectingAsset == 2 {
println("Success loading 2")
secondAsset = AVAsset.assetWithURL(url) as! AVAsset
} else if isSelectingAsset == 3 {
println("Success loading 3")
thirdAsset = AVAsset.assetWithURL(url) as! AVAsset
}
// Dismiss movie selection.
self.dismissViewControllerAnimated(true, completion: nil)
}
func mediaPicker(mediaPicker: MPMediaPickerController!, didPickMediaItems mediaItemCollection: MPMediaItemCollection!) {
let song: NSArray = [mediaItemCollection.items]
if song.count > 0 {
var selectedSong: MPMediaItem! = song[0][0] as! MPMediaItem
let url: NSURL = selectedSong.valueForProperty(MPMediaItemPropertyAssetURL) as! NSURL
audioAsset = AVAsset.assetWithURL(url) as! AVAsset
println("Audio loaded")
}
self.dismissViewControllerAnimated(true, completion: nil)
}
func mediaPickerDidCancel(mediaPicker: MPMediaPickerController!) {
self.dismissViewControllerAnimated(true, completion: nil)
}
// MARK: Merge Helper Methods
func exportDidFinish(session:AVAssetExportSession) {
assert(session.status == AVAssetExportSessionStatus.Completed, "Session status not completed")
if session.status == AVAssetExportSessionStatus.Completed {
let outputURL: NSURL = session.outputURL
let library = ALAssetsLibrary()
if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
library.writeVideoAtPathToSavedPhotosAlbum(outputURL, completionBlock: { (assetURL: NSURL!, error: NSError!) -> Void in
dispatch_async(dispatch_get_main_queue(), { () -> Void in
if (error != nil) {
let alert = UIAlertView(title: "Error", message: "Failed to save video.", delegate: nil, cancelButtonTitle: "OK")
alert.show()
} else {
let alert = UIAlertView(title: "Success", message: "Video saved.", delegate: nil, cancelButtonTitle: "OK")
alert.show()
}
})
})
}
}
audioAsset = nil
firstAsset = nil
secondAsset = nil
thirdAsset = nil
}
// Identify the correct orientation for the output video based on the input.
func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
var assetOrientation = UIImageOrientation.Up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .Right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .Left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .Up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .Down
}
return (assetOrientation, isPortrait)
}
func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
// get the asset tracks current orientation
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
var transform = assetTrack.preferredTransform
// identify the needed orientation
let assetInfo = orientationFromTransform(transform)
// find the size needed to fit the track in the screen for landscape
var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width
// if it is portrait, get the size to fit the track in the screen and return instruction to scale.
if assetInfo.isPortrait {
scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor),
atTime: kCMTimeZero)
} else {
// If it is landscape then check for incorrect orientation and correct if needed, then return instructon to re-orient and scale.
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2))
if assetInfo.orientation == .Down {
let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI))
let windowBounds = UIScreen.mainScreen().bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix)
concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor)
}
instruction.setTransform(concat, atTime: kCMTimeZero)
}
Shouldn't you set the opacity of the second track to 0.0 after the first+second track duration instead of what you have now:
secondInstruction.setOpacity(0.0, atTime: secondAsset.duration)
to:
secondInstruction.setOpacity(0.0, atTime: firstAsset.duration+secondAsset.duration) //(Untested, but the idea is there).
Related
I've been trying to let user choose from video gallery and split video to chunks of 30 seconds!
When I select a video of 1 minute, it splits to two videos of 30 seconds each, and it works fine!
I tried another example of video 35 seconds, and it splits to two videos of 30 and 4 seconds each
But when I select a video with more than 1 minute, it splits the video to chunks of random
numbers such as 34 seconds or 40, etc.
I don't want that!
I want to split videos to 30 seconds each!
This's my viewController code so far
import UIKit
import AVKit
import MobileCoreServices
import Photos
class ViewController: UIViewController {
#IBOutlet weak var videoView: UIImageView!
#IBOutlet var imageView: UIImageView!
var player: AVPlayer!
var avpController = AVPlayerViewController()
var isVideoGettinGEdited = false
#IBAction func didTapButton(){
let picker = UIImagePickerController()
picker.delegate = self
picker.sourceType = .savedPhotosAlbum
picker.mediaTypes = ["public.movie"]
picker.allowsEditing = false
present(picker, animated: true, completion: nil)
}
}
extension ViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate {
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let image = info[UIImagePickerController.InfoKey(rawValue: "UIImagePickerControllerEditedImage")]as? UIImage{
imageView.image = image
} else {
guard
let mediaType = info[UIImagePickerController.InfoKey.mediaType] as? String,
mediaType == (kUTTypeMovie as String),
let url = info[UIImagePickerController.InfoKey.mediaURL] as? URL
else { return }
let videoAsset = AVURLAsset(url: url)
let videoDuration = videoAsset.duration
let durationTime = ceil( CMTimeGetSeconds(videoDuration))
var startTime = 0.0
var endTime = durationTime
var numberOfBreaks = Int((Double(durationTime)/30.0))
let isReminderTime = Double(durationTime.truncatingRemainder(dividingBy: 30.0))
if isReminderTime > 0 {
numberOfBreaks = numberOfBreaks + 1
}
if Double(durationTime) <= 30 {
self.cropVideo(atURL: url, startTime: startTime, endTime: endTime, fileName: "Output.mp4")
} else {
endTime = 30
while numberOfBreaks != 0 {
if !isVideoGettinGEdited {
print("Start time = \(startTime) and Endtime = \(endTime)")
self.cropVideo(atURL: url, startTime: startTime, endTime: endTime, fileName: "Output-\(numberOfBreaks).mp4")
numberOfBreaks = numberOfBreaks - 1
startTime = endTime
let timeLeft = Double(durationTime) - startTime
if timeLeft >= 30.0 {
endTime = endTime + 30.0
} else {
endTime = timeLeft
}
}
}
}
}
picker.dismiss(animated: true, completion: nil)
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
picker.dismiss(animated: true, completion: nil)
}
}
extension ViewController {
func cropVideo(atURL url:URL, startTime:Double, endTime:Double, fileName:String) {
let asset = AVURLAsset(url: url)
let exportSession = AVAssetExportSession.init(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
var outputURL = URL(string:NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.documentDirectory, FileManager.SearchPathDomainMask.userDomainMask, true).last!)
let fileManager = FileManager.default
do {
try fileManager.createDirectory(at: outputURL!, withIntermediateDirectories: true, attributes: nil)
} catch {
print(error)
}
outputURL?.appendPathComponent("\(fileName).mp4")
// Remove existing file
do {
try fileManager.removeItem(atPath: outputURL!.absoluteString)
} catch {
print(error)
}
exportSession.outputURL = URL(fileURLWithPath: outputURL!.absoluteString)
exportSession.shouldOptimizeForNetworkUse = true
exportSession.outputFileType = AVFileType.mp4
let start = CMTimeMakeWithSeconds(startTime, preferredTimescale: 600) // you will modify time range here
let duration = CMTimeMakeWithSeconds(endTime, preferredTimescale: 600)
let range = CMTimeRangeMake(start: start, duration: duration)
exportSession.timeRange = range
exportSession.exportAsynchronously {
self.isVideoGettinGEdited = false
switch(exportSession.status) {
case .completed:
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: outputURL!.absoluteString))
}) { completed, error in
DispatchQueue.main.async {
self.view.isUserInteractionEnabled = true
if completed {
print("Video has been saved to your photos.")
} else {
if error != nil {
print("Failed to save video in photos \(error).")
}
}
}
}
break
case .failed:
print("failed with \(exportSession.error)")
break
case .cancelled: break
default:
print("default")
break
}
}
}
//MARK:- saveVideoFromURL
func saveVideoFromURL(_ videoURL:String) {
self.view.isUserInteractionEnabled = false
DispatchQueue.global(qos: .background).async {
if let url = URL(string: videoURL),
let urlData = NSData(contentsOf: url) {
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0];
print("url path component = \(url.lastPathComponent)")
let filePath="\(documentsPath)/\(url.lastPathComponent)"
urlData.write(toFile: filePath, atomically: true)
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: filePath))
}) { completed, error in
DispatchQueue.main.async {
self.view.isUserInteractionEnabled = true
if completed {
print("Video has been saved to your photos.")
} else {
if error != nil {
print("Failed to save video.")
}
}
}
}
}
}
}
}
Thanks!
You main issue is when calculating the duration. Btw I would change the preferred scale to 1 as well:
change
let duration = CMTimeMakeWithSeconds(endTime, preferredTimescale: 1)
to
let duration = CMTimeMakeWithSeconds(endTime-startTime, preferredTimescale: 1)
I have also done some other changes to your code as following:
extension ViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate {
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let image = info[.editedImage] as? UIImage {
imageView.image = image
} else {
guard
let mediaType = info[.mediaType] as? String,
mediaType == "public.movie",
let url = info[.mediaURL] as? URL
else { return }
let videoAsset = AVURLAsset(url: url)
let videoDuration = videoAsset.duration
let durationTime = ceil(videoDuration.seconds)
print("durationTime:" , durationTime)
struct Duration {
let start: Double
let end: Double
}
let durations: [Duration]
if durationTime < 30 {
durations = [Duration(start: 0, end: durationTime)]
} else {
durations = (0...Int(durationTime)/30).compactMap {
if Double($0*30) == min(Double($0*30)+30, durationTime) {
return nil
}
return Duration(
start: Double($0*30),
end: min(Double($0*30)+30, durationTime)
)
}
}
for index in durations.indices {
let startTime = durations[index].start
let endTime = durations[index].end
print("Start time = \(startTime) and Endtime = \(endTime)")
saveVideo(
at: url,
startTime: startTime,
endTime: endTime,
fileName: "Output-\(index)"
)
}
}
picker.dismiss(animated: true, completion: nil)
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
picker.dismiss(animated: true, completion: nil)
}
}
extension ViewController {
func saveVideo(
at url: URL,
startTime: Double,
endTime:Double,
fileName: String
) {
let asset = AVURLAsset(url: url)
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
let outputURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
.appendingPathComponent(fileName)
.appendingPathExtension("mp4")
// Remove existing file
if FileManager.default.fileExists(atPath: outputURL.path) {
do {
try FileManager.default.removeItem(at: outputURL)
} catch {
print(error)
}
}
exportSession.outputURL = outputURL
exportSession.shouldOptimizeForNetworkUse = true
exportSession.outputFileType = .mp4
let start = CMTimeMakeWithSeconds(startTime, preferredTimescale: 1)
let duration = CMTimeMakeWithSeconds(endTime-startTime, preferredTimescale: 1)
let range = CMTimeRangeMake(start: start, duration: duration)
print("Will Render \(fileName) from \(start.seconds) to \(duration)")
exportSession.timeRange = range
exportSession.exportAsynchronously {
print("Did Render \(fileName) from \(start.seconds) to \(duration)")
self.isVideoGettinGEdited = false
switch exportSession.status {
case .completed:
self.checkDuration(for: fileName, at: outputURL)
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL)
}) { completed, error in
if let error = error {
print("Failed to save video in photos", error)
return
}
DispatchQueue.main.async {
self.view.isUserInteractionEnabled = true
if completed {
print("Video has been saved to your photos.")
} else {
print("Video saving has NOT been completed")
}
}
}
break
case .failed:
print("failed with:", exportSession.error ?? "no error")
break
case .cancelled: break
default: break
}
}
}
}
Sample Project
I'm working on a project that needs to record video segments and then merge these segments in one video. I'm using AVFoundation framework to do so.
The problem is that when I merge the segments, there's always a black frame or no sound between the chunks. Also, sometimes the sound is not synchronised after merging.
I've tried many options to solve this problem but didn't find a good solution.
I've tried many solutions on Stack overflow but none of them worked.
I've also tried to use MKOVideoMerge but I still have the problem.
I've made a small View controller below that records video camera and create a new segment every 10 seconds. When the user taps on "stop", all the segments are merged and saved to camera roll :
If anyone have managed to merge two video segments without dropping frame or sound, help would be much appreciated :)
import UIKit
import AVFoundation
import Photos
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate {
private enum SessionSetupResult {
case success
case notAuthorized
case configurationFailed
}
#IBOutlet weak var btnStartStop: UIButton!
#IBOutlet weak var previewView: PreviewView!
private let session = AVCaptureSession()
private let sessionQueue = DispatchQueue(label: "session queue")
private let mergeQueue = DispatchQueue(label: "merge queue")
private var setupResult: SessionSetupResult = .success
private var videoDeviceInput: AVCaptureDeviceInput!
lazy private var movieBufferOutput = AVCaptureVideoDataOutput()
lazy private var audioBufferOutput = AVCaptureAudioDataOutput()
private var movieConnection: AVCaptureConnection!
private var audioConnection: AVCaptureConnection!
private var assetWriter: AVAssetWriter! = nil
private var assetWriterInput: AVAssetWriterInput! = nil
private var audioWriterInput: AVAssetWriterInput! = nil
private var chunkNumber = 0
private let chunkMaxDuration = 10.0
private var chunkStartTime: CMTime! = nil
private var chunkOutputURL: URL! = nil
private var stopRecording: Bool = false
#IBAction func startStop(_ sender: Any) {
stopRecording = true
btnStartStop.setTitle("recording", for: .normal)
}
override func viewDidLoad() {
super.viewDidLoad()
previewView.session = session
previewView.videoPreviewLayer.videoGravity = .resizeAspectFill
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
break
case .notDetermined:
sessionQueue.suspend()
AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in
if !granted {
self.setupResult = .notAuthorized
}
self.sessionQueue.resume()
})
default:
setupResult = .notAuthorized
}
sessionQueue.async {
self.configureSession()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.cleanTempDirectory()
sessionQueue.async {
switch self.setupResult {
case .success:
break
case .notAuthorized:
DispatchQueue.main.async {
let changePrivacySetting = "Not authorized"
let message = NSLocalizedString(changePrivacySetting, comment: "Alert message when the user has denied access to the camera")
let alertController = UIAlertController(title: "Not authorized", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
style: .cancel,
handler: nil))
alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", comment: "Alert button to open Settings"),
style: .`default`,
handler: { _ in
UIApplication.shared.open(URL(string: UIApplicationOpenSettingsURLString)!, options: [:], completionHandler: nil)
}))
self.present(alertController, animated: true, completion: nil)
}
case .configurationFailed:
DispatchQueue.main.async {
let alertMsg = "Error"
let message = NSLocalizedString("Error", comment: alertMsg)
let alertController = UIAlertController(title: "Error", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
style: .cancel,
handler: nil))
self.present(alertController, animated: true, completion: nil)
}
}
}
}
private func configureSession() {
if setupResult != .success {
return
}
session.beginConfiguration()
session.sessionPreset = .high
do {
var defaultVideoDevice: AVCaptureDevice?
if let dualCameraDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) {
defaultVideoDevice = dualCameraDevice
} else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) {
defaultVideoDevice = backCameraDevice
} else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) {
defaultVideoDevice = frontCameraDevice
}
let videoDeviceInput = try AVCaptureDeviceInput(device: defaultVideoDevice!)
if session.canAddInput(videoDeviceInput) {
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
DispatchQueue.main.async {
self.previewView.videoPreviewLayer.connection?.videoOrientation = .landscapeRight
}
} else {
setupResult = .configurationFailed
session.commitConfiguration()
return
}
} catch {
setupResult = .configurationFailed
session.commitConfiguration()
return
}
do {
let audioDevice = AVCaptureDevice.default(for: .audio)
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!)
if session.canAddInput(audioDeviceInput) {
session.addInput(audioDeviceInput)
}
} catch {
}
movieBufferOutput.videoSettings = [
String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
]
if self.session.canAddOutput(movieBufferOutput) {
self.session.addOutput(movieBufferOutput)
if let connection = self.movieBufferOutput.connection(with: .video) {
movieConnection = connection
connection.videoOrientation = .landscapeRight
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
}
} else {
setupResult = .configurationFailed
session.commitConfiguration()
return
}
if self.session.canAddOutput(audioBufferOutput) {
self.session.addOutput(audioBufferOutput)
if let connection = self.audioBufferOutput.connection(with: .audio) {
audioConnection = connection
}
} else {
print("Could not add audio output to the session")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
let queue: DispatchQueue = DispatchQueue(label: "MediaOutputQueue")
let audioQueue: DispatchQueue = DispatchQueue(label: "AudioOutputQueue")
self.movieBufferOutput.setSampleBufferDelegate(self, queue: queue)
self.audioBufferOutput.setSampleBufferDelegate(self, queue: audioQueue)
self.movieBufferOutput.alwaysDiscardsLateVideoFrames = true
session.commitConfiguration()
self.session.startRunning()
}
func createWriterInput(for presentationTimeStamp: CMTime) {
self.stopRecording = false
let fileManager = FileManager.default
let outputFileName = "chunk\(chunkNumber)"
let outputFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((outputFileName as NSString).appendingPathExtension("mp4")!)
chunkOutputURL = URL(fileURLWithPath: outputFilePath)
try? fileManager.removeItem(at: chunkOutputURL)
assetWriter = try! AVAssetWriter(outputURL: chunkOutputURL, fileType: .mp4)
let outputSettings: [String: Any] = [AVVideoCodecKey:AVVideoCodecH264, AVVideoWidthKey: 1280, AVVideoHeightKey: 720]
assetWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: outputSettings)
assetWriterInput.expectsMediaDataInRealTime = true
assetWriter.add(assetWriterInput)
let audioSettings = [
AVFormatIDKey : kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey : 1,
AVSampleRateKey : 44100.0,
AVEncoderBitRateKey: 192000
] as [String : Any]
audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
audioWriterInput.expectsMediaDataInRealTime = true;
assetWriter.add(audioWriterInput)
chunkNumber += 1
chunkStartTime = presentationTimeStamp
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: chunkStartTime)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override var shouldAutorotate: Bool {
return false
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .landscapeRight
}
override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
return .landscapeRight
}
func cleanTempDirectory() {
let tempVideosPath = NSTemporaryDirectory()
var isDirectory = ObjCBool(true)
if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
return
}
let tempVideosURL = URL(fileURLWithPath: tempVideosPath)
do {
let directoryContents = try FileManager.default.contentsOfDirectory(at: tempVideosURL, includingPropertiesForKeys: [.contentModificationDateKey], options: [.skipsHiddenFiles,.skipsSubdirectoryDescendants])
let mp4Files = directoryContents.filter{ $0.pathExtension == "mp4" }.map { url in
(url, (try? url.resourceValues(forKeys: [.creationDateKey]))?.creationDate ?? Date.distantPast)
}.sorted(by: { $0.1 < $1.1 })
for mp4 in mp4Files {
try? FileManager.default.removeItem(at: mp4.0)
}
} catch {
}
}
func getTempVideos() -> [URL] {
let tempVideosPath = NSTemporaryDirectory()
var isDirectory = ObjCBool(true)
if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
return []
}
var videosURL: [URL] = []
let videosUrl = URL(fileURLWithPath: tempVideosPath)
do {
let directoryContents = try FileManager.default.contentsOfDirectory(at: videosUrl, includingPropertiesForKeys: [.contentModificationDateKey], options: [.skipsHiddenFiles,.skipsSubdirectoryDescendants])
let mp4Files = directoryContents.filter{ $0.pathExtension == "mp4" }.map { url in
(url, (try? url.resourceValues(forKeys: [.creationDateKey]))?.creationDate ?? Date.distantPast)
}.sorted(by: { $0.1 > $1.1 })
var i = 0
for mp4 in mp4Files {
videosURL.append(mp4.0)
i = i + 1
if i > 5 {
break
}
}
} catch {
return []
}
return videosURL
}
func getRecordedVideoURL() -> URL {
var tempVideosPath = NSTemporaryDirectory()
tempVideosPath = (tempVideosPath as NSString).appendingPathComponent("videos")
var isDirectory = ObjCBool(true)
if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
do {
try FileManager.default.createDirectory(at: URL(fileURLWithPath: tempVideosPath), withIntermediateDirectories: true, attributes: nil)
} catch {
}
}
let outputFileName = "test-" + NSUUID().uuidString
let outputFileURL = URL(fileURLWithPath: tempVideosPath).appendingPathComponent(outputFileName).appendingPathExtension("mp4")
try? FileManager.default.removeItem(at: outputFileURL)
return outputFileURL
}
func mergeVideos(urls:[URL], excludedUrl: URL, completion:#escaping (_ exporter: AVAssetExportSession?) -> ()) -> Void {
let mainComposition = AVMutableComposition()
let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let soundtrackTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
let assetOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: true]
var insertTime = kCMTimeZero
var audioInsertTime = kCMTimeZero
var videos: [(asset: AVURLAsset, videoTrack: AVAssetTrack, videoDuration: CMTime)] = []
for url in urls {
if url.path != excludedUrl.path {
let videoAsset = AVURLAsset(url: url, options : assetOptions)
if videoAsset.tracks(withMediaType: .video).count > 0 && videoAsset.tracks(withMediaType: .audio).count > 0 {
let videoTrack = videoAsset.tracks(withMediaType: .video)[0]
let videoDuration = videoTrack.timeRange.duration
videos.append((asset: videoAsset, videoTrack: videoTrack, videoDuration: videoDuration))
} else {
break
}
}
}
var hasError: Bool = false
for video in videos.reversed() {
let audioTrack = video.asset.tracks(withMediaType: .audio)[0]
let audioDuration = audioTrack.timeRange.duration
do {
try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video.videoDuration), of: video.videoTrack, at: insertTime)
} catch let error {
hasError = true
print(error)
}
do {
try soundtrackTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, audioDuration), of: audioTrack, at: audioInsertTime)
} catch let error {
hasError = true
print(error)
}
insertTime = CMTimeAdd(insertTime, video.videoDuration)
audioInsertTime = CMTimeAdd(audioInsertTime, audioDuration)
}
if videos.count == 0 {
hasError = true
}
if !hasError {
let outputFileURL = getRecordedVideoURL()
let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously {
DispatchQueue.main.async {
completion(exporter!)
}
}
} else {
completion(nil)
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if connection == self.audioConnection {
if let audioInput = self.audioWriterInput, audioInput.isReadyForMoreMediaData {
if !audioInput.append(sampleBuffer) {
print("Error writing audio buffer");
}
}
} else {
if let videoInput = self.assetWriterInput, videoInput.isReadyForMoreMediaData {
if !videoInput.append(sampleBuffer) {
print("Error writing video buffer");
}
}
}
if connection == movieConnection {
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
if assetWriter == nil {
createWriterInput(for: presentationTimeStamp)
} else {
let currentChunkDuration = CMTimeGetSeconds(CMTimeSubtract(presentationTimeStamp, chunkStartTime))
if currentChunkDuration >= chunkMaxDuration || self.stopRecording == true {
let chunkAssetWriter = assetWriter!
let assetWriterInput = self.assetWriterInput
let audioWriterInput = self.audioWriterInput
let stopRecording = self.stopRecording
createWriterInput(for: presentationTimeStamp)
assetWriterInput?.markAsFinished()
audioWriterInput?.markAsFinished()
chunkAssetWriter.endSession(atSourceTime: presentationTimeStamp)
chunkAssetWriter.finishWriting {
DispatchQueue.main.async {
self.btnStartStop.setTitle("stop", for: .normal)
}
if stopRecording {
self.mergeQueue.async {
self.mergeVideos(urls: self.getTempVideos(), excludedUrl: self.chunkOutputURL!, completion: { exportSession in
if let exportSession = exportSession {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportSession.outputURL!)
}) { saved, error in
DispatchQueue.main.async {
if saved {
let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
alertController.addAction(defaultAction)
self.present(alertController, animated: true, completion: nil)
}
}
}
}
})
}
}
}
}
}
}
}}
Thanks for your help :)
func trimVideo (sourceURL: URL, destinationURL: URL, trimPoints: TrimPoints, completion: #escaping () -> Void) {
guard sourceURL.isFileURL else { return }
guard destinationURL.isFileURL else { return }
let options = [
AVURLAssetPreferPreciseDurationAndTimingKey: true
]
let asset = AVURLAsset(url: sourceURL, options: options)
let preferredPreset = AVAssetExportPresetPassthrough
if verifyPresetForAsset(preset: preferredPreset, asset: asset) {
let composition = AVMutableComposition()
let videoCompTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: CMPersistentTrackID())
let audioCompTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: CMPersistentTrackID())
guard let assetVideoTrack: AVAssetTrack = asset.tracks(withMediaType: .video).first else { return }
guard let assetAudioTrack: AVAssetTrack = asset.tracks(withMediaType: .audio).first else { return }
var accumulatedTime = kCMTimeZero
for (startTimeForCurrentSlice, endTimeForCurrentSlice) in trimPoints {
let durationOfCurrentSlice = CMTimeSubtract(endTimeForCurrentSlice, startTimeForCurrentSlice)
let timeRangeForCurrentSlice = CMTimeRangeMake(startTimeForCurrentSlice, durationOfCurrentSlice)
do {
try videoCompTrack!.insertTimeRange(timeRangeForCurrentSlice, of: assetVideoTrack, at: accumulatedTime)
try audioCompTrack!.insertTimeRange(timeRangeForCurrentSlice, of: assetAudioTrack, at: accumulatedTime)
accumulatedTime = CMTimeAdd(accumulatedTime, durationOfCurrentSlice)
}
catch let compError {
print("TrimVideo: error during composition: \(compError)")
}
}
guard let exportSession = AVAssetExportSession(asset: composition, presetName: preferredPreset) else { return }
exportSession.outputURL = destinationURL as URL
exportSession.outputFileType = AVFileType.m4v
exportSession.shouldOptimizeForNetworkUse = true
removeFileAtURLIfExists(url: destinationURL as URL)
exportSession.exportAsynchronously {
completion()
}
}
else {
print("TrimVideo - Could not find a suitable export preset for the input video")
}
}
#IBAction func nextButtonPressed(_ sender: Any) {
if MyVariables.isScreenshot == true {
//get image from current time
print("screenshot")
guard let currentTime = trimmerView.currentPlayerTime else {
return
}
self.thumbnailImage = imageFromVideo(url: footageURL!, time: currentTime )
self.screenshotOut = imageFromVideo(url: footageURL!, time: currentTime )
self.performSegue(withIdentifier: "CreatePost_Segue", sender: nil)
} else {
print("video")
let outputFileName = NSUUID().uuidString
let outputFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((outputFileName as NSString).appendingPathExtension("mov")!)
self.videoURL = URL(fileURLWithPath: outputFilePath)
trimVideo(sourceURL: self.footageURL!, destinationURL: self.videoURL!, trimPoints: [(trimmerView.startTime!,trimmerView.endTime!)], completion: self.finishVideo)
}
}
func finishVideo() -> Void {
guard let VideoURL = self.videoURL else { return }
self.thumbnailImage = setThumbnailFrom(path: VideoURL)
print("got to segue") //This does print successfully so something is happening in the segue...
self.performSegue(withIdentifier: "CreatePost_Segue", sender: nil)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?){
if segue.identifier == "CreatePost_Segue" {
let controller = segue.destination as! CreatePostViewController
controller.thumbnailImage = self.thumbnailImage
controller.videoURL = self.videoURL
controller.screenshotOut = self.screenshotOut
}
}
So in nextButtonPressed you can see that I if the. media is a video (it is), I am using the trimVideo function and a custom completion handler of finishVideo to create a thumbnail with the trimmed video as well as perform the segue itself.
Everything executes without error until the segue so I believe I am sending the data wrong perhaps? Perhaps something to do with screenshotOut not being set if its a video?
The full error is
*** Assertion failure in void _UIPerformResizeOfTextViewForTextContainer(NSLayoutManager *, UIView<NSTextContainerView> *, NSTextContainer *, NSUInteger)(), /BuildRoot/Library/Caches/com.apple.xbs/Sources/UIFoundation/UIFoundation-546.2/UIFoundation/TextSystem/NSLayoutManager_Private.m:1619
I fixed this with the advice #aBilal17 gave to run the segue on the main thread like so:
func finishVideo() -> Void {
guard let VideoURL = self.videoURL else { return }
self.thumbnailImage = setThumbnailFrom(path: VideoURL)
print("got to segue")
DispatchQueue.main.async {
self.performSegue(withIdentifier: "CreatePost_Segue", sender: nil)
}
}
func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
print(transform)
var assetOrientation = UIImageOrientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
}
else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
}
else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
}
else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
else if transform.a == 0 && transform.b == 1.0 && transform.c == 1.0 && transform.d == 0 {
isPortrait = true
}
return (assetOrientation, isPortrait)
}
func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
let transform = assetTrack.preferredTransform
let assetInfo = orientationFromTransform(transform: transform)
var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
if assetInfo.isPortrait {
scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor),
at: kCMTimeZero)
}
else {
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
if assetInfo.orientation == .down {
let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(M_PI))
let windowBounds = UIScreen.main.bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
}
instruction.setTransform(concat, at: kCMTimeZero)
}
return instruction
}
func mergeVideos(sourceVideos: [URL], completion: #escaping (_ outputURL: URL, _ success: Bool) -> Void) {
let composition = AVMutableComposition()
let track = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID:Int32(kCMPersistentTrackID_Invalid))
var duration = kCMTimeZero
var instructions = [AVVideoCompositionLayerInstruction]()
for url in sourceVideos {
let videoAsset = AVURLAsset(url: url)
if url == sourceVideos.first {
do {
try track.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
}
catch {
completion(URL(string: "")!, false)
}
}
else {
do {
try track.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: composition.duration)
}
catch {
completion(URL(string: "")!, false)
}
}
duration = CMTimeAdd(duration, videoAsset.duration)
instructions.append(videoCompositionInstructionForTrack(track: track, asset: videoAsset))
}
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration)
mainInstruction.layerInstructions = instructions
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 60)
mainComposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
var paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let documentsDirectory = paths[0] as String
let videoPathToSave = documentsDirectory.stringByAppendingPathComponent(path: "mergeVideo-\(arc4random()%1000)-d.mov")
let videoURLToSave = NSURL(fileURLWithPath: videoPathToSave)
guard let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else {
completion(URL(string: "")!, false)
return
}
exporter.outputURL = videoURLToSave as URL
exporter.videoComposition = mainComposition
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = true
exporter.exportAsynchronously {
DispatchQueue.main.async {
for url in sourceVideos {
do {
try FileManager.default.removeItem(at: url)
}
catch {}
}
completion(exporter.outputURL!, true)
}
}
}
This code is working well for back camera but if records with face camera, it shows wrong scale. This code is that I considered from following link.
https://www.raywenderlich.com/94404/play-record-merge-videos-ios-swift
I really want to know how to fix orientation in face camera...
enter image description here
I have already asked this doubt in my co workers profile, but no solution yet get. Please help me to solve this problem. i am struck more than 4 hours.
So far i did:
I use to choose the video from gallery and display that in screen using mpmovieplayercontroller. Here i am choosing 2 video one by one. If i select first video and to display and when i move to load second video my first video screen is in black. But i need to display the video image in that player. How to do that.please help me out
My code:
import UIKit
import AVFoundation
import MobileCoreServices
import AssetsLibrary
import MediaPlayer
import CoreMedia
class ViewController: UIViewController,UIGestureRecognizerDelegate {
var Asset1: AVAsset?
var Asset2: AVAsset?
var Asset3: AVAsset?
var audioAsset: AVAsset?
var loadingAssetOne = false
// swt duplicate image for thumbnail image for audio
#IBOutlet weak var musicImg: UIImageView!
var videoPlayer = MPMoviePlayerController()
var mediaUI = UIImagePickerController()
var videoURL = NSURL()
override func viewDidLoad() {
super.viewDidLoad()
musicImg.hidden = true
let gestureRecognizer: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: "handleTapGesture")
gestureRecognizer.delegate = self;
videoPlayer.view.addGestureRecognizer(gestureRecognizer)
}
func handleTap(gestureRecognizer: UIGestureRecognizer) {
//location = gestureRecognizer .locationInView(videoPlayer.view)
print("tapped")
}
//#pragma mark - gesture delegate
// this allows you to dispatch touches
func gestureRecognizer(gestureRecognizer: UIGestureRecognizer, shouldReceiveTouch touch: UITouch) -> Bool {
return true
}
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer,
shouldRecognizeSimultaneouslyWithGestureRecognizer otherGestureRecognizer: UIGestureRecognizer) -> Bool {
return true
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func startMediaBrowserFromViewController(viewController: UIViewController!, usingDelegate delegate : protocol<UINavigationControllerDelegate, UIImagePickerControllerDelegate>!) -> Bool {
if UIImagePickerController.isSourceTypeAvailable(.SavedPhotosAlbum) == false {
return false
}
let mediaUI = UIImagePickerController()
mediaUI.sourceType = .SavedPhotosAlbum
mediaUI.mediaTypes = [kUTTypeMovie as String]
mediaUI.allowsEditing = true
mediaUI.delegate = delegate
presentViewController(mediaUI, animated: true, completion: nil)
return true
}
// after merge all video and audio. the final video will be saved in gallery and also will display like preview
func exportDidFinish(session: AVAssetExportSession) {
if session.status == AVAssetExportSessionStatus.Completed {
let outputURL = session.outputURL
let library = ALAssetsLibrary()
if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
if error != nil {
print("some files went wrong")
} else {
// get the output url to display the final video in screen
self.videoURL = outputURL!
self.mediaUI.dismissViewControllerAnimated(true, completion: nil)
self.videoPlayer = MPMoviePlayerController()
self.videoPlayer.contentURL = self.videoURL
self.videoPlayer.controlStyle = .Embedded
self.videoPlayer.scalingMode = .AspectFill
self.videoPlayer.shouldAutoplay = true
self.videoPlayer.backgroundView.backgroundColor = UIColor.clearColor()
self.videoPlayer.fullscreen = true
self.videoPlayer.view.frame = CGRectMake(38, 442, 220, 106)
self.view.addSubview(self.videoPlayer.view)
self.videoPlayer.play()
self.videoPlayer.prepareToPlay()
}
})
}
}
Asset1 = nil
Asset2 = nil
Asset3 = nil
audioAsset = nil
}
// click first video
#IBAction func FirstVideo(sender: AnyObject) {
loadingAssetOne = true
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// clcik second video
#IBAction func SecondVideo(sender: AnyObject) {
loadingAssetOne = false
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// click audio
#IBAction func Audio(sender: AnyObject) {
let mediaPickerController = MPMediaPickerController(mediaTypes: .Any)
mediaPickerController.delegate = self
mediaPickerController.prompt = "Select Audio"
presentViewController(mediaPickerController, animated: true, completion: nil)
}
#IBAction func playPreview(sender: AnyObject) {
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// orientation for the video
func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
var assetOrientation = UIImageOrientation.Up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .Right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .Left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .Up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .Down
}
return (assetOrientation, isPortrait)
}
func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
let transform = assetTrack.preferredTransform
let assetInfo = orientationFromTransform(transform)
var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width
if assetInfo.isPortrait {
scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor),
atTime: kCMTimeZero)
} else {
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2))
if assetInfo.orientation == .Down {
let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI))
let windowBounds = UIScreen.mainScreen().bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix)
concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor)
}
instruction.setTransform(concat, atTime: kCMTimeZero)
}
return instruction
}
// merge all file
#IBAction func MergeAll(sender: AnyObject) {
if let firstAsset = Asset1, secondAsset = Asset2 {
let mixComposition = AVMutableComposition()
//load first video
let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration),
ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
atTime: kCMTimeZero)
} catch _ {
}
// load second video
let secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration),
ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
atTime: firstAsset.duration)
} catch _ {
}
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration))
let firstInstruction = videoCompositionInstructionForTrack(firstTrack, asset: firstAsset)
firstInstruction.setOpacity(0.0, atTime: firstAsset.duration)
let secondInstruction = videoCompositionInstructionForTrack(secondTrack, asset: secondAsset)
mainInstruction.layerInstructions = [firstInstruction, secondInstruction]
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width: UIScreen.mainScreen().bounds.width, height: UIScreen.mainScreen().bounds.height)
//load audio
if let loadedAudioAsset = audioAsset {
let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: 0)
do {
try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)),
ofTrack: loadedAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0] ,
atTime: kCMTimeZero)
} catch _ {
}
}
// save the final video to gallery
let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let dateFormatter = NSDateFormatter()
dateFormatter.dateStyle = .LongStyle
dateFormatter.timeStyle = .ShortStyle
let date = dateFormatter.stringFromDate(NSDate())
// let savePath = documentDirectory.URLByAppendingPathComponent("mergeVideo-\(date).mov")
let savePath = (documentDirectory as NSString).stringByAppendingPathComponent("final-\(date).mov")
let url = NSURL(fileURLWithPath: savePath)
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter!.outputURL = url
exporter!.outputFileType = AVFileTypeQuickTimeMovie
exporter!.shouldOptimizeForNetworkUse = true
exporter!.videoComposition = mainComposition
exporter!.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.exportDidFinish(exporter!)
})
}
}
}
}
extension ViewController: UIImagePickerControllerDelegate {
// display the first & second video after it picked from gallery
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject]) {
let mediaType = info[UIImagePickerControllerMediaType] as! NSString
dismissViewControllerAnimated(true, completion: nil)
if mediaType == kUTTypeMovie {
let avAsset = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)
if loadingAssetOne {
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
self.videoPlayer = MPMoviePlayerController()
self.videoPlayer.contentURL = videoURL
self.videoPlayer.view.frame = CGRectMake(38, 57, 220, 106)
self.view.addSubview(self.videoPlayer.view)
self.videoPlayer.controlStyle = .Embedded
self.videoPlayer.scalingMode = .AspectFill
self.videoPlayer.shouldAutoplay = true
self.videoPlayer.prepareToPlay()
self.videoPlayer.play()
Asset1 = avAsset
} else {
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
self.videoPlayer = MPMoviePlayerController()
self.videoPlayer.contentURL = videoURL
self.videoPlayer.view.frame = CGRectMake(38, 206, 220, 106)
self.view.addSubview(self.videoPlayer.view)
self.videoPlayer.play()
self.videoPlayer.controlStyle = .Embedded
self.videoPlayer.scalingMode = .AspectFill
self.videoPlayer.shouldAutoplay = true
self.videoPlayer.prepareToPlay()
Asset2 = avAsset
}
}
}
}
extension ViewController: UINavigationControllerDelegate {
}
extension ViewController: MPMediaPickerControllerDelegate {
func mediaPicker(mediaPicker: MPMediaPickerController, didPickMediaItems mediaItemCollection: MPMediaItemCollection) {
let selectedSongs = mediaItemCollection.items
if selectedSongs.count > 0 {
let song = selectedSongs[0]
if let vURL = song.valueForProperty(MPMediaItemPropertyAssetURL) as? NSURL {
audioAsset = AVAsset(URL: vURL)
dismissViewControllerAnimated(true, completion: nil)
mediaUI.dismissViewControllerAnimated(true, completion: nil)
musicImg.hidden = false
let alert = UIAlertController(title: "yes", message: "Audio Loaded", preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "OK", style: .Cancel, handler:nil))
presentViewController(alert, animated: true, completion: nil)
} else {
dismissViewControllerAnimated(true, completion: nil)
let alert = UIAlertController(title: "No audio", message: "Audio Not Loaded", preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "OK", style: .Cancel, handler:nil))
presentViewController(alert, animated: true, completion: nil)
}
} else {
dismissViewControllerAnimated(true, completion: nil)
}
}
func mediaPickerDidCancel(mediaPicker: MPMediaPickerController) {
dismissViewControllerAnimated(true, completion: nil)
}
}
I need to show the image of selected video in screen.Please help me to resolve,Thanks
I follow this link tutorial here
If want to play more than one video in same view, you can use AVPlayer of AVFoundation.I have read more about this in StackOverflow and AppleDoucument and Other forums.All they say is not possible using MPMoviePlayerViewController.But it is possible using AVPlayer.
import UIKit
import AVFoundation
import MobileCoreServices
import AssetsLibrary
import MediaPlayer
import CoreMedia
class ViewController: UIViewController,UIGestureRecognizerDelegate {
var optionalInteger: Int?
var Asset1: AVAsset?
var Asset2: AVAsset?
var Asset3: AVAsset?
var audioAsset: AVAsset?
var loadingAssetOne = false
#IBOutlet weak var musicImg: UIImageView!
#IBOutlet var videoView: UIView!
#IBOutlet var videoViewTwo: UIView!
var player : AVPlayer? = nil
var playerLayer : AVPlayerLayer? = nil
var asset : AVAsset? = nil
var playerItem: AVPlayerItem? = nil
override func viewDidLoad()
{
super.viewDidLoad()
musicImg.hidden = true
let gestureRecognizer: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: "handleTapGesture")
gestureRecognizer.delegate = self;
videoPlayer.view.addGestureRecognizer(gestureRecognizer)
imageViewVideoOne.hidden = true
imageViewVideoTwo.hidden = true
}
func startMediaBrowserFromViewController(viewController: UIViewController!, usingDelegate delegate : protocol<UINavigationControllerDelegate, UIImagePickerControllerDelegate>!) -> Bool
{
if UIImagePickerController.isSourceTypeAvailable(.SavedPhotosAlbum) == false {
return false
}
let mediaUI = UIImagePickerController()
mediaUI.sourceType = .SavedPhotosAlbum
mediaUI.mediaTypes = [kUTTypeMovie as String]
mediaUI.allowsEditing = true
mediaUI.delegate = delegate
presentViewController(mediaUI, animated: true, completion: nil)
return true
}
// click first video
#IBAction func FirstVideo(sender: AnyObject) {
loadingAssetOne = true
optionalInteger = 0
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// clcik second video
#IBAction func SecondVideo(sender: AnyObject) {
loadingAssetOne = false
optionalInteger = 1
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// click audio
#IBAction func Audio(sender: AnyObject) {
let mediaPickerController = MPMediaPickerController(mediaTypes: .Any)
mediaPickerController.delegate = self
mediaPickerController.prompt = "Select Audio"
presentViewController(mediaPickerController, animated: true, completion: nil)
}
#IBAction func playPreview(sender: AnyObject)
{
startMediaBrowserFromViewController(self, usingDelegate: self)
}
extension ViewController: UIImagePickerControllerDelegate
{
// display the first & second video after it picked from gallery
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject])
{
let mediaType = info[UIImagePickerControllerMediaType] as! NSString
dismissViewControllerAnimated(true, completion: nil)
if mediaType == kUTTypeMovie
{
if loadingAssetOne
{
let avAsset = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)
print(avAsset)
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
let videoURLWithPath = info[UIImagePickerControllerMediaURL] as? NSURL
self.videoURL = videoURLWithPath!
asset = AVAsset(URL: videoURL) as AVAsset
playerItem = AVPlayerItem(asset: asset!)
player = AVPlayer (playerItem: self.playerItem!)
playerLayer = AVPlayerLayer(player: self.player)
videoView.frame = CGRectMake(38, 57, 220, 106)
playerLayer?.frame = videoView.frame
videoView.layer.addSublayer(self.playerLayer!)
player!.play()
}
else
{
let avAssetTwo = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)
print(avAssetTwo)
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
let videoURLWithPath = info[UIImagePickerControllerMediaURL] as? NSURL
self.videoURL = videoURLWithPath!
asset = AVAsset(URL: videoURL) as AVAsset
playerItem = AVPlayerItem(asset: asset!)
player = AVPlayer (playerItem: self.playerItem!)
playerLayer = AVPlayerLayer(player: self.player)
videoView.frame = CGRectMake(38, 206, 220, 106)
playerLayer?.frame = videoView.frame
videoView.layer.addSublayer(self.playerLayer!)
player!.play()
}
}
}
}
Sources for your question
Multiple Video same screen
Multiple Video Playback on iOS
Apple MPMoviePlayerViewController Document
Playing Multiple Video Simultaneously
Apple AVPlayer Document
Playing Multiple Videos
Playback
Playing Video in iOS