AVFoundation -Videos merge but only last video plays - ios

I have an array of [AVAsset](). Whenever I record different videos at different durations the below code merges all the durations into 1 video but it will only play the last video in a loop.
For eg. video1 is 1 minute and shows a dog walking, video2 is 1 minute and shows a bird flying, video3 is 1 minute and shows a horse running. The video will merge and play for 3 minutes but it will only show the horse running for 1 minute each three consecutive times.
Where am I going wrong at?
var movieFileOutput = AVCaptureMovieFileOutput()
var arrayVideos = [AVAsset]()
var videoFileUrl: URL?
// button to record video
#objc func recordButtonTapped() {
// Stop recording
if movieFileOutput.isRecording {
movieFileOutput.stopRecording()
print("Stop Recording")
} else {
// Start recording
movieFileOutput.connection(with: AVMediaType.video)?.videoOrientation = videoOrientation()
movieFileOutput.maxRecordedDuration = maxRecordDuration()
videoFileUrl = URL(fileURLWithPath: videoFileLocation())
if let videoFileUrlFromCamera = videoFileUrl {
movieFileOutput.startRecording(to: videoFileUrlFromCamera, recordingDelegate: self)
}
}
}
func videoFileLocation() -> String {
return NSTemporaryDirectory().appending("videoFile.mov")
}
// button to save the merged video
#objc func saveButtonTapped() {
mergeVids()
}
// function to merge and save videos
func mergeVids() {
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
compositionVideoTrack?.preferredTransform = CGAffineTransform(rotationAngle: .pi / 2)
let soundtrackTrack = mixComposition.addMutableTrack(withMediaType: .audio,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
var insertTime = CMTime.zero
for videoAsset in arrayVideos {
do {
try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .video)[0],
at: insertTime)
try soundtrackTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .audio)[0],
at: insertTime)
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
} catch let error as NSError {
print("\(error.localizedDescription)")
}
}
let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge.mp4")
let path = outputFileURL.path
if FileManager.default.fileExists(atPath: path) {
try! FileManager.default.removeItem(atPath: path)
}
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter!.outputURL = outputFileURL
exporter!.outputFileType = AVFileType.mp4
exporter!.shouldOptimizeForNetworkUse = true
exporter!.exportAsynchronously { [weak self] in
let cameraVideoURL = exporter!.outputURL!
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: cameraVideoURL)
}) { (saved, error) in
if let error = error { return }
if !saved { return }
// url is saved
self?.videoFileUrl = nil
self?.arrayVideos.removeAll()
}
}
}
// AVCaptureFileOutputRecording Delegates
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
print("+++++++++++++++Started")
print("*****Started recording: \(fileURL)\n")
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error == nil {
let asset = AVAsset(url: outputFileURL)
arrayVideos.append(asset)
print(arrayVideos.count)
} else {
print("Error recording movie: \(error!.localizedDescription)")
}
func cleanUp() {
let path = outputFileURL.path
if FileManager.default.fileExists(atPath: path) {
do {
try FileManager.default.removeItem(atPath: path)
} catch {
print("Could not remove file at url: \(outputFileURL)")
}
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("++++++Frame Drop: \(connection.description)")
}

Thanks to #alxlives for testing out the merge function and pointing out that since it was fine on his machine the problem must've been somewhere else.
The problem was here:
func videoFileLocation() -> String {
return NSTemporaryDirectory().appending("videoFile.mov")
}
In the recordButtonTapped when it used the above code it kept using the same "videoFile.mov" extension:
videoFileUrl = URL(fileURLWithPath: videoFileLocation()) // <<< it gets called here every time a new video runs
if let videoFileUrlFromCamera = videoFileUrl {
movieFileOutput.startRecording(to: videoFileUrlFromCamera, recordingDelegate: self)
}
To fix it I needed to make each extension unique:
func videoFileLocation() -> String {
let uuid = UUID().uuidString
return NSTemporaryDirectory().appending("videoFile_\(uuid).mov")
}

Related

AVAssetExportSession succeeds to convert mp4 to m4a on iPhone simulator but iPhone device

I'm trying to convert mp4 video file to m4a audio format by AVAssetExportSession on my iOS app.
This is the conversion code:
let outputUrl = URL(fileURLWithPath: NSTemporaryDirectory() + "out.m4a")
if FileManager.default.fileExists(atPath: outputUrl.path) {
try? FileManager.default.removeItem(atPath: outputUrl.path)
}
let asset = AVURLAsset(url: inputUrl)
// tried the `AVAssetExportPresetAppleM4A` preset name but the same result
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetPassthrough)!
exportSession.outputFileType = AVFileType.m4a
exportSession.outputURL = outputUrl
await exportSession.export()
switch exportSession.status {
case .completed:
return outputUrl
default:
// This becomes `4` which is `.failed`
print("Status: \(exportSession.status)")
throw exportSession.error!
}
Currently, it seems to work on iPhone simulators (confirmed on iOS 16.1/15.5) but it doesn't on my iPhone 7 (iOS 15.7.1) real device. It doesn't seem to work as well on my colleague's iOS 16.1 real device, so it shouldn't be a matter of the iOS version.
The mp4 file is located in the iOS Files app and the inputUrl in the above code becomes something like this (I get this URL via UIDocumentPickerViewController):
file:///private/var/mobile/Library/Mobile%20Documents/com~apple~CloudDocs/Downloads/%E3%81%8A%E3%81%97%E3%82%83%E3%81%B8%E3%82%99%E3%82%8A%E3%81%B2%E3%82%8D%E3%82%86%E3%81%8D.mp4
and the error is:
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={NSUnderlyingError=0x2808f30c0 {Error Domain=NSOSStatusErrorDomain Code=-16979 "(null)"}, NSLocalizedFailureReason=An unknown error occurred (-16979), NSLocalizedRecoverySuggestion=XXXXDEFAULTVALUEXXXX, NSURL=file:///private/var/mobile/Library/Mobile%20Documents/com~apple~CloudDocs/Downloads/%E3%81%8A%E3%81%97%E3%82%83%E3%81%B8%E3%82%99%E3%82%8A%E3%81%B2%E3%82%8D%E3%82%86%E3%81%8D.mp4, NSLocalizedDescription=The operation could not be completed}
It seems to be resolved by calling startAccessingSecurityScopedResource() to the inputUrl before exporting.
inputUrl.startAccessingSecurityScopedResource()
Not sure exactly why but that's probably because the inputUrl is under the file:///private namespace?
Use this function for extract audio from video :----
Export audio from video url into new path :-
func extractAudioFromVideo(videoUrl:URL) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionAudioVideoTrack: [AVMutableCompositionTrack] = []
let videoAsset: AVAsset = AVAsset(url: videoUrl)
if let audioVideoTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid){
mutableCompositionAudioVideoTrack.append(audioVideoTrack)
if let audioVideoAssetTrack: AVAssetTrack = videoAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionAudioVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: videoAsset.duration), of: audioVideoAssetTrack, at: CMTime.zero)
} catch {
print(error)
}
}
}
if let documentsPath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent(".m4a")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetAppleM4A) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.m4a
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case . completed:
DispatchQueue.main.async {
print("audio url :---- \(outputURL)")
// -------- play output audio URL in player ------
}
case .failed:
if let _error = exportSession.error {
print(_error.localizedDescription)
}
case .cancelled:
if let _error = exportSession.error {
print(_error.localizedDescription)
}
default:
print("")
}
})
}
}
}
AVMutableComposition Play :-
You can play direct AVMutableComposition without exporting audio track.
Benefits of play AVMutableComposition is that you can instant play audio into player.
var avplayer = AVPlayer()
var playerController : AVPlayerViewController?
#IBAction func btnAudioPlay(sender:UIButton) {
self.playAudioCompositionFromVideo(fromVideoURL: URL(string: "")!) { Composition in
let playerItem = AVPlayerItem(asset: Composition)
self.playerController = AVPlayerViewController()
self.avplayer = AVPlayer(playerItem: playerItem)
self.playerController?.player = self.avplayer
self.playerController?.player?.play()
} failure: { errore in
print(errore as Any)
}
}
func playAudioCompositionFromVideo(fromVideoURL url: URL, success: #escaping ((AVMutableComposition) -> Void), failure: #escaping ((String?) -> Void)) {
let asset = AVPlayerItem(url: url).asset
let mixComposition = AVMutableComposition()
let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration)
//------------ Get Audio Tracks From Asset ---------
let audioTracks = asset.tracks(withMediaType: AVMediaType.audio)
if audioTracks.count > 0 {
// ---- Use audio if video contains the audio track ---
let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
// -------- Get First Audio track --------
guard let audioTrack = audioTracks.first else { return }
do {
try compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: CMTime.zero)
compositionAudioTrack?.preferredTransform = audioTrack.preferredTransform
success(mixComposition)
} catch _ {
failure("audio track insert failed!")
}
} else {
failure("audio track is not available!")
}
}

How do you create a new AVAsset video that consists of only frames from given `CMTimeRange`s of another video?

Apple's sample code Identifying Trajectories in Video contains the following delegate callback:
func cameraViewController(_ controller: CameraViewController, didReceiveBuffer buffer: CMSampleBuffer, orientation: CGImagePropertyOrientation) {
let visionHandler = VNImageRequestHandler(cmSampleBuffer: buffer, orientation: orientation, options: [:])
if gameManager.stateMachine.currentState is GameManager.TrackThrowsState {
DispatchQueue.main.async {
// Get the frame of rendered view
let normalizedFrame = CGRect(x: 0, y: 0, width: 1, height: 1)
self.jointSegmentView.frame = controller.viewRectForVisionRect(normalizedFrame)
self.trajectoryView.frame = controller.viewRectForVisionRect(normalizedFrame)
}
// Perform the trajectory request in a separate dispatch queue.
trajectoryQueue.async {
do {
try visionHandler.perform([self.detectTrajectoryRequest])
if let results = self.detectTrajectoryRequest.results {
DispatchQueue.main.async {
self.processTrajectoryObservations(controller, results)
}
}
} catch {
AppError.display(error, inViewController: self)
}
}
}
}
However, instead of drawing UI whenever detectTrajectoryRequest.results exist (https://developer.apple.com/documentation/vision/vndetecttrajectoriesrequest/3675672-results), I'm interested in using the CMTimeRange provided by each result to construct a new video. In effect, this would filter down the original video to only frames with trajectories.
What would be a good approach to transferring only frames with trajectories from an AVAssetReader to an AVAssetWriter?
By the time you identify a trajectory in captured video frames or from frames decoded from a file you may not have the initial frames in memory any more, so the easiest way to create your file containing only trajectories is to keep the original file on hand, and then insert its trajectory snippets into an AVComposition which you then export using AVAssetExportSession.
This sample captures frames from the camera, encodes them to a file whilst analysing them for trajectories and after 20 seconds, it closes the file and then creates the new file containing only trajectory snippets.
If you're interested in detecting trajectories in a pre-existing file, it's not too hard to rewire this code.
import UIKit
import AVFoundation
import Vision
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
let session = AVCaptureSession()
var assetWriter: AVAssetWriter!
var assetWriterInput: AVAssetWriterInput!
var assetWriterStartTime: CMTime = .zero
var assetWriterStarted = false
var referenceFileURL: URL!
var timeRangesOfInterest: [Double : CMTimeRange] = [:]
func startWritingFile(outputURL: URL, initialSampleBuffer: CMSampleBuffer) {
try? FileManager.default.removeItem(at: outputURL)
assetWriter = try! AVAssetWriter(outputURL: outputURL, fileType: .mov)
let dimensions = initialSampleBuffer.formatDescription!.dimensions
assetWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey: AVVideoCodecType.h264, AVVideoWidthKey: dimensions.width, AVVideoHeightKey: dimensions.height])
assetWriter.add(assetWriterInput)
assetWriter.startWriting()
self.assetWriterStartTime = CMSampleBufferGetPresentationTimeStamp(initialSampleBuffer)
assetWriter.startSession(atSourceTime: self.assetWriterStartTime)
}
func stopWritingFile(completion: #escaping (() -> Void)) {
let assetWriterToFinish = self.assetWriter!
self.assetWriterInput = nil
self.assetWriter = nil
assetWriterToFinish.finishWriting {
print("finished writing: \(assetWriterToFinish.status.rawValue)")
completion()
}
}
func exportVideoTimeRanges(inputFileURL: URL, outputFileURL: URL, timeRanges: [CMTimeRange]) {
let inputAsset = AVURLAsset(url: inputFileURL)
let inputVideoTrack = inputAsset.tracks(withMediaType: .video).first!
let composition = AVMutableComposition()
let compositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)!
var insertionPoint: CMTime = .zero
for timeRange in timeRanges {
try! compositionTrack.insertTimeRange(timeRange, of: inputVideoTrack, at: insertionPoint)
insertionPoint = insertionPoint + timeRange.duration
}
let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
try? FileManager.default.removeItem(at: outputFileURL)
exportSession.outputURL = outputFileURL
exportSession.outputFileType = .mov
exportSession.exportAsynchronously {
print("export finished: \(exportSession.status.rawValue) - \(exportSession.error)")
}
}
override func viewDidLoad() {
super.viewDidLoad()
let inputDevice = AVCaptureDevice.default(for: .video)!
let input = try! AVCaptureDeviceInput(device: inputDevice)
let output = AVCaptureVideoDataOutput()
output.setSampleBufferDelegate(self, queue: DispatchQueue.main)
session.addInput(input)
session.addOutput(output)
session.startRunning()
DispatchQueue.main.asyncAfter(deadline: .now() + 20) {
self.stopWritingFile {
print("finished writing")
let trajectoriesFileURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] .appendingPathComponent("trajectories.mov")
self.exportVideoTimeRanges(inputFileURL: self.referenceFileURL, outputFileURL: trajectoriesFileURL, timeRanges: self.timeRangesOfInterest.map { $0.1 })
}
}
}
// Lazily create a single instance of VNDetectTrajectoriesRequest.
private lazy var request: VNDetectTrajectoriesRequest = {
return VNDetectTrajectoriesRequest(frameAnalysisSpacing: .zero,
trajectoryLength: 10,
completionHandler: completionHandler)
}()
// AVCaptureVideoDataOutputSampleBufferDelegate callback.
func captureOutput(_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection) {
if !assetWriterStarted {
self.referenceFileURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] .appendingPathComponent("reference.mov")
startWritingFile(outputURL: self.referenceFileURL, initialSampleBuffer: sampleBuffer)
assetWriterStarted = true
}
if assetWriterInput != nil && assetWriterInput.isReadyForMoreMediaData {
assetWriterInput.append(sampleBuffer)
}
do {
let requestHandler = VNImageRequestHandler(cmSampleBuffer: sampleBuffer)
try requestHandler.perform([request])
} catch {
// Handle the error.
}
}
func completionHandler(request: VNRequest, error: Error?) {
guard let request = request as? VNDetectTrajectoriesRequest else { return }
if let results = request.results,
results.count > 0 {
NSLog("\(results)")
for result in results {
var fileRelativeTimeRange = result.timeRange
fileRelativeTimeRange.start = fileRelativeTimeRange.start - self.assetWriterStartTime
self.timeRangesOfInterest[fileRelativeTimeRange.start.seconds] = fileRelativeTimeRange
}
}
}
}

Swift video trimming extension error - AVFoundationErrorDomain Code=-11822

I have this code (it's a react native module)
import Foundation
import AVFoundation
import CoreMedia
extension FileManager {
func removeFileIfNecessary(at url: URL) throws {
guard fileExists(atPath: url.path) else {
return
}
do {
try removeItem(at: url)
}
catch let error {
throw TrimError("Couldn't remove existing destination file: \(error)")
}
}
}
struct TrimError: Error {
let description: String
let underlyingError: Error?
init(_ description: String, underlyingError: Error? = nil) {
self.description = "TrimVideo: " + description
self.underlyingError = underlyingError
}
}
extension AVMutableComposition {
convenience init(asset: AVAsset) {
self.init()
for track in asset.tracks {
addMutableTrack(withMediaType: track.mediaType, preferredTrackID: track.trackID)
}
}
func trim(timeOffStart: Double) {
let duration = CMTime(seconds: timeOffStart, preferredTimescale: 1)
let timeRange = CMTimeRange(start: kCMTimeZero, duration: duration)
for track in tracks {
track.removeTimeRange(timeRange)
}
removeTimeRange(timeRange)
}
}
extension AVAsset {
func assetByTrimming(trimStartTime: CMTime,
assetDuration: CMTime) throws -> AVAsset {
let timeRange = CMTimeRange(start: trimStartTime, end: assetDuration)
let composition = AVMutableComposition()
do {
for track in tracks {
let compositionTrack = composition.addMutableTrack(withMediaType: track.mediaType, preferredTrackID: track.trackID)
compositionTrack?.preferredTransform = track.preferredTransform
try compositionTrack?.insertTimeRange(timeRange, of: track, at: kCMTimeZero)
}
} catch let error {
throw TrimError("error during composition", underlyingError: error)
}
return composition
}
func export(to destination: URL) throws {
guard let exportSession = AVAssetExportSession(asset: self, presetName: AVAssetExportPresetPassthrough) else {
throw TrimError("Could not create an export session")
}
exportSession.outputURL = destination
exportSession.outputFileType = AVFileType.mov
exportSession.shouldOptimizeForNetworkUse = true
let group = DispatchGroup()
group.enter()
// try FileManager.default.removeFileIfNecessary(at: destination)
exportSession.exportAsynchronously {
group.leave()
}
group.wait()
if let error = exportSession.error {
throw TrimError("error during export", underlyingError: error)
}
}
}
#objc(VideoTrimmer)
class VideoTrimmer : NSObject {
func time(_ operation: () throws -> ()) rethrows {
let start = Date()
try operation()
let end = Date().timeIntervalSince(start)
print(end)
}
#objc func trimVideo(_ sourceURL: NSURL,
destinationURL: NSURL,
duration: Int,
preClipDuration: Int,
resolver resolve: #escaping RCTPromiseResolveBlock,
rejecter reject: RCTPromiseRejectBlock) -> Void {
do {
try time {
let asset = AVURLAsset(url: sourceURL as URL)
let assetDuration : CMTime = asset.duration
let convertedDuration : CMTime = CMTime(seconds: Double(duration), preferredTimescale: 1)
let preClipConvertedDuration : CMTime = CMTime(seconds: Double(preClipDuration), preferredTimescale: 1)
var trimStartTime : CMTime
trimStartTime = CMTimeSubtract(assetDuration, convertedDuration)
let comparison : Int32 = CMTimeCompare(trimStartTime, preClipConvertedDuration)
if comparison == 1 {
trimStartTime = CMTimeSubtract(trimStartTime, preClipConvertedDuration)
} else {
trimStartTime = kCMTimeZero
}
let trimmedAsset = try asset.assetByTrimming(trimStartTime: trimStartTime, assetDuration: assetDuration)
try trimmedAsset.export(to: destinationURL as URL)
resolve("Video trimmed successfully")
}
} catch let error {
reject("💩 \(error)", nil, error)
}
}
}
which fails with the following error:
💩 TrimError(description: "TrimVideo: error during export", underlyingError: Optional(Error Domain=AVFoundationErrorDomain Code=-11822 "Cannot Open" UserInfo={NSLocalizedFailureReason=This media format is not supported., NSLocalizedDescription=Cannot Open, NSUnderlyingError=0x1c4250b00 {Error Domain=NSOSStatusErrorDomain Code=-16976 "(null)"}}))
I'm passing the following path:
assets-library://asset/asset.mov?id=E7A2D28D-0FBA-4B40-80CE-B05013D74F28&ext=mov
also tried without the query assets-library://asset/asset.mov but no success.
The file exists because I have it in the camera roll.
Unfortunately I don't know swift well enough to investigate.
Any ideas how to make it read the source without complaining about the format?

Custom camera , video is not playing with Audio in swift

I am new in swift also stake overflow. Advanced thank's for attention.
Basically am trying to build a custom camera that will record video with Audio. it means video will play with sound when i play this video. las few days i was try to build this custom camera. i already followed my tutorial but Still missing something from my camera. i was try as per my custom camera is only recording video. maybe it not recording audio. i don't understand. i was searching for this answer, not find appropriate answer for this.
here is What i did
import UIKit
import AVFoundation
import SVProgressHUD
import MediaPlayer
import MobileCoreServices
import AVKit
var videoUrl = [AnyObject]()
class TestViewController: UIViewController {
#IBOutlet var viewVidioPlayer: UIView!
#IBOutlet weak var myView: UIView!
var session: AVCaptureSession?
var userreponsevideoData = NSData()
var userreponsethumbimageData = NSData()
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
}
// here i create session
func createSession() {
var input: AVCaptureDeviceInput?
let movieFileOutput = AVCaptureMovieFileOutput()
var prevLayer: AVCaptureVideoPreviewLayer?
prevLayer?.frame.size = myView.frame.size
session = AVCaptureSession()
let error: NSError? = nil
do {
input = try AVCaptureDeviceInput(device: self.cameraWithPosition(position: .front)!) } catch {return}
if error == nil {
session?.addInput(input)
} else {
print("camera input error: \(String(describing: error))")
}
prevLayer = AVCaptureVideoPreviewLayer(session: session)
prevLayer?.frame.size = myView.frame.size
prevLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
prevLayer?.connection.videoOrientation = .portrait
myView.layer.addSublayer(prevLayer!)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filemainurl = NSURL(string: ("\(documentsURL.appendingPathComponent("temp"))" + ".mp4"))
let maxDuration: CMTime = CMTimeMake(600, 10)
movieFileOutput.maxRecordedDuration = maxDuration
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024
if self.session!.canAddOutput(movieFileOutput) {
self.session!.addOutput(movieFileOutput)
}
session?.startRunning()
movieFileOutput.startRecording(toOutputFileURL: filemainurl! as URL, recordingDelegate: self)
}
func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices! {
if (device as AnyObject).position == position {
return device as? AVCaptureDevice
}
}
return nil
}
#IBAction func pressbackbutton(sender: AnyObject) {
session?.stopRunning()
}
#IBAction func Record(_ sender: Any) {
createSession()
}
#IBAction func play(_ sender: Any) {
self.videoPlay()
}
func videoPlay()
{
let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
do {
// Get the directory contents urls (including subfolders urls)
let directoryContents = try FileManager.default.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil, options: [])
print(directoryContents)
// if you want to filter the directory contents you can do like this:
videoUrl = directoryContents.filter{ $0.pathExtension == "mp4" } as [AnyObject]
print("mp3 urls:",videoUrl)
let playerController = AVPlayerViewController()
playerController.delegate = self as? AVPlayerViewControllerDelegate
let movieURL = videoUrl[0]
print(movieURL)
let player = AVPlayer(url: movieURL as! URL)
playerController.player = player
self.addChildViewController(playerController)
self.view.addSubview(playerController.view)
playerController.view.frame = self.view.frame
player.play()
player.volume = 1.0
player.rate = 1.0
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
extension TestViewController: AVCaptureFileOutputRecordingDelegate
{
#available(iOS 4.0, *)
private func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: URL!, fromConnections connections: [AnyObject]!) {
print(fileURL)
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
let filemainurl = outputFileURL
do
{
let asset = AVURLAsset(url: filemainurl! as URL, options: nil)
//AVURLAsset(URL: filemainurl as! URL, options: nil)
print(asset)
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil)
let uiImage = UIImage(cgImage: cgImage)
userreponsethumbimageData = try NSData(contentsOf: filemainurl! as URL)
print(userreponsethumbimageData.length)
print(uiImage)
// imageData = UIImageJPEGRepresentation(uiImage, 0.1)
}
catch let error as NSError
{
print(error)
return
}
SVProgressHUD.show(with: SVProgressHUDMaskType.clear)
let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d")!.appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: VideoFilePath)
{
do
{
try FileManager.default.removeItem(atPath: VideoFilePath)
}
catch { }
}
let tempfilemainurl = NSURL(string: VideoFilePath)!
let sourceAsset = AVURLAsset(url: filemainurl! as URL, options: nil)
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)!
assetExport.outputFileType = AVFileTypeQuickTimeMovie
assetExport.outputURL = tempfilemainurl as URL
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status
{
case AVAssetExportSessionStatus.completed:
DispatchQueue.main.async(execute: {
do
{
SVProgressHUD .dismiss()
self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
print("MB - \(self.userreponsevideoData.length) byte")
}
catch
{
SVProgressHUD .dismiss()
print(error)
}
})
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
SVProgressHUD .dismiss()
}
}
}
}
There all i have done. so I don't understand what is missing from this code. Why audio is not playing with video or why not recoding audio with video.
Use this cocopods for your project. It makes your job quiet easy.
It has all instructions on what to do and also contains a demo project to test it works as you intended it to.
SwiftyCam

append or concatenate audio files in swift

Hi I want to append voice files.
I'm recording voice with AVAudioRecorder, but to play the recording I need to call "stop", but after playing it I want to continue record. Like the native iOS Voice memo app.
Should I use AVMutableCompositionTrack and how do I do that in swift? Thanks!
If you are looking to simply pause your recording and continue it later you can use AVAudioRecorder's pause() function rather than stop() and it will continue the recording when you use play() again.
However, if you are looking to actually concatenate audio files, you can do it like this:
func concatenateFiles(audioFiles: [NSURL], completion: (concatenatedFile: NSURL?) -> ()) {
guard audioFiles.count > 0 else {
completion(concatenatedFile: nil)
return
}
if audioFiles.count == 1 {
completion(concatenatedFile: audioFiles.first)
return
}
// Concatenate audio files into one file
var nextClipStartTime = kCMTimeZero
let composition = AVMutableComposition()
let track = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
// Add each track
for recording in audioFiles {
let asset = AVURLAsset(URL: NSURL(fileURLWithPath: recording.path!), options: nil)
if let assetTrack = asset.tracksWithMediaType(AVMediaTypeAudio).first {
let timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
do {
try track.insertTimeRange(timeRange, ofTrack: assetTrack, atTime: nextClipStartTime)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRange.duration)
} catch {
print("Error concatenating file - \(error)")
completion(concatenatedFile: nil)
return
}
}
}
// Export the new file
if let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough) {
let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
let documents = NSURL(string: paths.first!)
if let fileURL = documents?.URLByAppendingPathComponent("file_name.caf") {
// Remove existing file
do {
try NSFileManager.defaultManager().removeItemAtPath(fileURL.path!)
print("Removed \(fileURL)")
} catch {
print("Could not remove file - \(error)")
}
// Configure export session output
exportSession.outputURL = NSURL.fileURLWithPath(fileURL.path!)
exportSession.outputFileType = AVFileTypeCoreAudioFormat
// Perform the export
exportSession.exportAsynchronouslyWithCompletionHandler() { handler -> Void in
if exportSession.status == .Completed {
print("Export complete")
dispatch_async(dispatch_get_main_queue(), {
completion(file: fileURL)
})
return
} else if exportSession.status == .Failed {
print("Export failed - \(exportSession.error)")
}
completion(concatenatedFile: nil)
return
}
}
}
}

Resources