iOS - Cast video to Chromecast using PhotoKit - ios

I want to cast device local video to Chromecast using PhotoKit framework but only loading screen is displayed on Chromecast and no video is played. If replace avUrlAsset.url.absoluteString with http_url_of_video than it play the video successfully.
Code
let options = PHVideoRequestOptions()
options.isNetworkAccessAllowed = true
options.deliveryMode = .automatic
// create a meta data
let metadata = GCKMediaMetadata(metadataType: .movie)
metadata.setString("Title", forKey: kGCKMetadataKeyTitle)
metadata.setString("Subtitle", forKey: kGCKMetadataKeySubtitle)
PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in
if let avUrlAsset = avAsset as? AVURLAsset {
// create media information
let mediaInfo = GCKMediaInformation(contentID: avUrlAsset.url.absoluteString,
streamType: .buffered,
contentType: "video/quicktime",
metadata: metadata,
streamDuration: 0,
customData: nil)
self._remotMediaClient?.loadMedia(mediaInfo, autoplay: true)
}
})
Please suggest me how can I play local video to cromecast. I also try to copy the video to document directory and pass url of copied video to cromecast but not working.

I solve using local http server
HttpServerManager.swift
import UIKit
class HttpServerManager: NSObject {
static let shared = HttpServerManager()
private var httpServer:HTTPServer!
override init() {
super.init()
// Create server using our custom MyHTTPServer class
httpServer = HTTPServer()
// Tell the server to broadcast its presence via Bonjour.
// This allows browsers such as Safari to automatically discover our service.
httpServer.setType("_http._tcp.")
// Normally there's no need to run our server on any specific port.
// Technologies like Bonjour allow clients to dynamically discover the server's port at runtime.
// However, for easy testing you may want force a certain port so you can just hit the refresh button.
// [httpServer setPort:12345];
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first
httpServer.setDocumentRoot(documentsDirectory)
}
func startServer() {
// Start the server (and check for problems)
do{
try httpServer?.start()
DDLogWrapper.logInfo("Started HTTP Server on port \(httpServer?.listeningPort())")
}catch {
DDLogWrapper.logError("Error starting HTTP Server: \(error) ")
}
}
func stopServer() {
httpServer.stop()
}
func getListeningPort() -> UInt16 {
return httpServer.listeningPort()
}
func setDocumentRoot(path stirng:String) {
httpServer.setDocumentRoot(stirng)
}
}
start server in AppDelege.swift
class AppDelegate: UIResponder, UIApplicationDelegate, GCKLoggerDelegate {
var window: UIWindow?
var httpServer:HTTPServer?
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
GCKLogger.sharedInstance().delegate = self
// Configure our logging framework.
// To keep things simple and fast, we're just going to log to the Xcode console.
LoggerFactory.initLogging()
// start local http server
HttpServerManager.shared.startServer()
return true
}
}
Play local video to Crome cast using below method
func playToRemotePlayer(with asset:PHAsset, forViewController viewController:UIViewController) {
// if video is paused than resume it
if _remotMediaClient?.mediaStatus?.playerState == .paused {
_remotMediaClient?.play()
return
}
// lets keep track of recent assets that is played on Crome cast
if recentPlayedAsset == nil {
recentPlayedAsset = asset
}else{
if recentPlayedAsset == asset {
self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true)
return
}else{
recentPlayedAsset = asset
}
}
let options = PHVideoRequestOptions()
options.isNetworkAccessAllowed = true
options.deliveryMode = .highQualityFormat
options.version = .original
// create a meta data
let metadata = GCKMediaMetadata(metadataType: .movie)
metadata.setString("you video title", forKey: kGCKMetadataKeyTitle)
metadata.setString("you video subtitle", forKey: kGCKMetadataKeySubtitle)
PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in
if (avAsset as? AVURLAsset) != nil {
let startDate = NSDate()
//Create Export session
let exportSession = AVAssetExportSession(asset: avAsset!, presetName: AVAssetExportPresetHighestQuality)
let filePathURL = documentDirectoryUrl.appendingPathComponent("rendered_video.mp4")
let filePath = NSURL(string: (filePathURL?.absoluteString)!)
CommanUtilites.deleteFile(filePath: filePath!)
exportSession!.outputURL = filePath as URL?
exportSession!.outputFileType = AVFileTypeMPEG4
exportSession!.shouldOptimizeForNetworkUse = true
let start = CMTimeMakeWithSeconds(0.0, 0)
let range = CMTimeRangeMake(start, (avAsset?.duration)!)
exportSession?.timeRange = range
print("Exporting Media...")
DispatchQueue.main.async {
self.progressHUD = MBProgressHUD.showAdded(to: viewController.view, animated: true)
self.progressHUD?.mode = MBProgressHUDMode.indeterminate
self.progressHUD?.label.text = "Exporting video please wait..."
}
exportSession!.exportAsynchronously(completionHandler: {() -> Void in
DispatchQueue.main.async {
self.progressHUD?.hide(animated: true)
}
switch exportSession!.status {
case .failed:
print("Error : " + (exportSession?.error?.localizedDescription)!)
case .cancelled:
print("Export canceled")
case .completed:
//Video conversion finished
let endDate = NSDate()
let time = endDate.timeIntervalSince(startDate as Date)
print(time)
print("Export Successful!")
print(exportSession?.outputURL?.path ?? "")
let port = String(HttpServerManager.shared.getListeningPort())
let videoHttpUrl = "http://127.0.0.1:" + port + "/rendered_video.mp4"
// create media information
self.recentMediaInfo = GCKMediaInformation(contentID: videoHttpUrl,
streamType: .buffered,
contentType: "video/mp4",
metadata: nil,
streamDuration: (avAsset?.duration.seconds)!,
customData: nil)
self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true)
default:
break
}
})
}
})
}

Related

AVFoundation over Cellular Data

When I use this code below and I pull my https video link from Firebase over Wifi everything is smooth, the video immediately plays with zero issues. When I use this same code over Cellular everything moves extremely slow, like the video pauses and takes forever to load.
If it plays from file wether I'm on Cellular or Wifi shouldn't matter. What is the issue here?
DataModel:
class Video {
var httpsStr: String?
var videoURL: URL?
convenience init(dict: [String: Any] {
self.init()
if let httpsStr = dict["httpsStr"] as? String {
self.httpsStr = httpsStr
let url = URL(string: httpsStr)!
let assetKeys = [ "playable", "duration"]
let asset = AVURLAsset(url: url)
asset.loadValuesAsynchronously(forKeys: assetKeys, completionHandler: {
DispatchQueue.main.async {
self.videoURL = asset.url
// save videoURL to FileManager to play video from disk
}
})
}
}
}
Firebase Pull:
ref.observeSingleEvent(of: .value) { (snapshot) in
guard let dict = snapshot.value as? [String: Any] else { return }
let video = Video(dict: dict)
self.video = video
DispatchQueue.main.asyncAfter(deadline: .now() + 2, execute: {
self.playVideo()
}
}
Play Video:
func playVideo() {
// init AVPlayer ...
guard let videoURL = self.video.videoURL else { return }
let lastPathComponent = videoURL.lastPathComponent
let file = FileManager...appendingPathComponent(lastPathComponent)
if FileManager.default.fileExists(atPath: file.path) {
let asset = AVAsset(url: file)
play(asset)
} else {
let asset = AVAsset(url: videoURL)
play(asset)
}
}
func play(_ asset: AVAsset) {
self.playerItem = AVPlayerItem(asset: asset)
self.player?.automaticallyWaitsToMinimizeStalling = false // I also set this to true
self.playerItem?.preferredForwardBufferDuration = TimeInterval(1)
self.player?.replaceCurrentItem(with: playerItem!)
// play video
}
I followed this answer and now everything seems to work smoothly while on Cellular Data. I needed to include the tracks property in the assetKeys.
You create an asset from a URL using AVURLAsset. Creating the asset,
however, does not necessarily mean that it’s ready for use. To be
used, an asset must have loaded its tracks.
class Video {
var httpsStr: String?
var videoURL: URL?
convenience init(dict: [String: Any] {
self.init()
if let httpsStr = dict["httpsStr"] as? String {
self.httpsStr = httpsStr
let url = URL(string: httpsStr)!
let assetKeys = ["playable", "duration", "tracks"] // <----- "tracks" added here
let asset = AVURLAsset(url: url)
asset.loadValuesAsynchronously(forKeys: assetKeys, completionHandler: {
var error: NSError? = nil
let status = asset.statusOfValue(forKey: "tracks", error: &error)
switch status {
case .loaded:
// Sucessfully loaded, continue processing
DispatchQueue.main.async {
self.videoURL = asset.url
// save videoURL to FileManager to play video from disk
}
case .failed:
// Examine NSError pointer to determine failure
print("Error", error?.localizedDescription as Any)
default:
// Handle all other cases
print("default")
}
})
}
}
}

iOS App is crashing when selecting multiple videos from photo library - memory warning

My app is crashing when I select multiple videos(1+ min each) from photo library because of memory warning. I'm trying to compress videos but still the same issue although compression code is working properly. I want my app to select 5 videos at once and send them in chat. Whatsapp is allowing the user to select 30 videos and send them in chat but my app is crashing because of memory issues after 3 videos only.
I am using "AssetsPickerViewController" lib for multiple pic/video selection.
func assetsPicker(controller: AssetsPickerViewController, selected assets: [PHAsset]) {
self.dismiss(animated: true, completion: nil)
var isImages = false
var mediaData: [Data] = []
let imageManager = PHCachingImageManager.default()
DispatchQueue.main.async {
self.appDelegate.helper.showHUD(withMessage: "Preparing media", withObject: self)
}
autoreleasepool {
for selectedAsset in assets {
if selectedAsset.mediaType == .image {
isImages = true
let option = PHImageRequestOptions()
option.isSynchronous = true
option.isNetworkAccessAllowed = true
imageManager.requestImageData(for: selectedAsset, options: option) { (assetData, assetDataUTI, assetOrientation, assetInfo) in
if let data = assetData {
if let image = UIImage(data: data)?.upOrientation(), let finalData = image.jpegData(compressionQuality: 0.5) {
if let newData = ImageHelper.removeExifData(data: finalData as NSData) {
mediaData.append(newData as Data)
self.checkFileStatus(mediaCount: mediaData.count, assetsCount: assets.count, data: mediaData, isImages: isImages)
} else {
mediaData.append(finalData)
self.checkFileStatus(mediaCount: mediaData.count, assetsCount: assets.count, data: mediaData, isImages: isImages)
}
} else {
mediaData.append(data)
self.checkFileStatus(mediaCount: mediaData.count, assetsCount: assets.count, data: mediaData, isImages: isImages)
}
}
}
} else if selectedAsset.mediaType == .video {
let options: PHVideoRequestOptions = PHVideoRequestOptions()
options.isNetworkAccessAllowed = true
options.deliveryMode = .fastFormat
self.convertVideo(phAsset: selectedAsset) { (data) in
if let finalData = data {
mediaData.append(finalData)
self.checkFileStatus(mediaCount: mediaData.count, assetsCount: assets.count, data: mediaData, isImages: isImages)
} else {
}
}
}
}
}
}
func compressVideo(inputURL: URL, outputURL: URL, handler:#escaping (_ exportSession: AVAssetExportSession?)-> Void) {
let urlAsset = AVURLAsset(url: inputURL, options: nil)
guard let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously { () -> Void in
handler(exportSession)
}
}

iOS download and play mp3 file

I am looking for the way how can I download and play mp3 file simultaneously.
I can download it, save to local storage and play after.
But, how can I start downloading and playing it simultaneously, and after it will completely download - save it to local storage. Which tools should I use for it?
Currently I use TCBlobDownload to download, after it I save it and AVAudioPlay to play.
You can use AVAssetResourceLoader to plays an audio file as soon as there is enough data while continuing to download.
Configure the delegate of resourceloader
var playerAsset: AVAsset!
if fileURL.pathExtension.count == 0 {
var components = URLComponents(url: fileURL, resolvingAgainstBaseURL: false)!
components.scheme = "fake" // make custom URL scheme
components.path += ".mp3"
playerAsset = AVURLAsset(url: components.url!)
(playerAsset as! AVURLAsset).resourceLoader.setDelegate(self, queue: DispatchQueue.global())
} else {
playerAsset = AVAsset(url: fileURL)
}
let playerItem = AVPlayerItem(asset: playerAsset)
then, read audio's data and responds to the resource loader
// MARK: - AVAssetResourceLoaderDelegate methods
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
if let url = loadingRequest.request.url {
var components = URLComponents(url: url, resolvingAgainstBaseURL: false)!
components.scheme = NSURLFileScheme // replace with the real URL scheme
components.path = String(components.path.dropLast(4))
if let attributes = try? FileManager.default.attributesOfItem(atPath: components.url!.path),
let fileSize = attributes[FileAttributeKey.size] as? Int64 {
loadingRequest.contentInformationRequest?.isByteRangeAccessSupported = true
loadingRequest.contentInformationRequest?.contentType = "audio/mpeg3"
loadingRequest.contentInformationRequest?.contentLength = fileSize
let requestedOffset = loadingRequest.dataRequest!.requestedOffset
let requestedLength = loadingRequest.dataRequest!.requestedLength
if let handle = try? FileHandle(forReadingFrom: components.url!) {
handle.seek(toFileOffset: UInt64(requestedOffset))
let data = handle.readData(ofLength: requestedLength)
loadingRequest.dataRequest?.respond(with: data)
loadingRequest.finishLoading()
return true
} else {
return false
}
} else {
return false
}
} else {
return false
}
}
And if you want to do this with objective c, refer this

iOS sending audio file to web server

I currently building an iPhone app with Swift and I want to send audio files from my app to my web server. I am currently using MPMediaPickerController, which allows me to select an audio file within my app but once I select the file, it keeps telling me:
ipod-library://item/item.mp3?id=12341234
and I am not able to send the file to my web server. I need to send the audio file to my web server in NSData format. Can anyone shine a light into:
1) What I may be doing wrong or,
2) another way to send the audio files?
import AssetsLibrary,
import AVFoundation,
import MediaPlayer,
var soundFileURL:URL!,
var audio_data: Data? = nil**
func mediaPicker(_ mediaPicker: MPMediaPickerController, didPickMediaItems mediaItemCollection: MPMediaItemCollection)
{
let item = mediaItemCollection.items[0] as? MPMediaItem ?? MPMediaItem()
let url: URL? = item.value(forProperty: MPMediaItemPropertyAssetURL) as? URL
exportiTunesSong(assetURL: url!)
{
(response) in
print(response ?? "responce")
}
let songTitle: String = item.value(forProperty: MPMediaItemPropertyTitle) as! String
lbl_for_file_name.text = songTitle
self.dismiss(animated: true, completion: nil)
}
func mediapicker()
{
let mediaPicker = MPMediaPickerController(mediaTypes: .music)
mediaPicker.delegate = self
present(mediaPicker, animated: true, completion: {})
}
func mediaPickerDidCancel(_ mediaPicker: MPMediaPickerController)
{
print("User selected Cancel tell me what to do")
self.dismiss(animated: true, completion: nil)
mediaPicker.dismiss(animated: true) { _ in }
}
func exportiTunesSong(assetURL: URL, completionHandler: #escaping (_ fileURL: URL?) -> ()) {
let songAsset = AVURLAsset(url: assetURL, options: nil)
let exporter = AVAssetExportSession(asset: songAsset, presetName: AVAssetExportPresetAppleM4A)
exporter?.outputFileType = "com.apple.m4a-audio"
exporter?.metadata = songAsset.commonMetadata
let filename = AVMetadataItem.metadataItems(from: songAsset.commonMetadata, withKey: AVMetadataCommonKeyTitle, keySpace: AVMetadataKeySpaceCommon)
var songName = "Unknown"
if filename.count > 0 {
songName = ((filename[0] as AVMetadataItem).value?.copy(with: nil) as? String)!
}
//Export mediaItem to temp directory
exportURL = URL(fileURLWithPath: NSTemporaryDirectory())
.appendingPathComponent(songName)
.appendingPathExtension("m4a")
exporter?.outputURL = exportURL
do
{
self.audio_data = try Data.init(contentsOf: exportURL!)
print("here audio data is \(self.audio_data!)")
} catch {
print(error)
}
}
P.S use Audio_data to send or upload server side using Alamofire
I think this answer will help you:
Sending audio from a Swift App to PHP Server, and somewhere the audio is lost
Especially should pay attention to this section:
let boundary = "--------14737809831466499882746641449----"
let beginningBoundary = "--\(boundary)"
let endingBoundary = "--\(boundary)--"
let contentType = "multipart/form-data;boundary=\(boundary)"
So for audio file uploads that's also important.

IOS Video Compression Swift iOS 8 corrupt video file

I am trying to compress video taken with the users camera from UIImagePickerController (Not an existing video but one on the fly) to upload to my server and take a small amount of time to do so, so a smaller size is ideal instead of 30-45 mb on newer quality cameras.
Here is the code to do a compression in swift for iOS 8 and it compresses wonderfully, i go from 35 mb down to 2.1 mb easily.
func convertVideo(inputUrl: NSURL, outputURL: NSURL)
{
//setup video writer
var videoAsset = AVURLAsset(URL: inputUrl, options: nil) as AVAsset
var videoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
var videoSize = videoTrack.naturalSize
var videoWriterCompressionSettings = Dictionary(dictionaryLiteral:(AVVideoAverageBitRateKey,NSNumber(integer:960000)))
var videoWriterSettings = Dictionary(dictionaryLiteral:(AVVideoCodecKey,AVVideoCodecH264),
(AVVideoCompressionPropertiesKey,videoWriterCompressionSettings),
(AVVideoWidthKey,videoSize.width),
(AVVideoHeightKey,videoSize.height))
var videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoWriterSettings)
videoWriterInput.expectsMediaDataInRealTime = true
videoWriterInput.transform = videoTrack.preferredTransform
var videoWriter = AVAssetWriter(URL: outputURL, fileType: AVFileTypeQuickTimeMovie, error: nil)
videoWriter.addInput(videoWriterInput)
var videoReaderSettings: [String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
var videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
var videoReader = AVAssetReader(asset: videoAsset, error: nil)
videoReader.addOutput(videoReaderOutput)
//setup audio writer
var audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: nil)
audioWriterInput.expectsMediaDataInRealTime = false
videoWriter.addInput(audioWriterInput)
//setup audio reader
var audioTrack = videoAsset.tracksWithMediaType(AVMediaTypeAudio)[0] as AVAssetTrack
var audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil) as AVAssetReaderOutput
var audioReader = AVAssetReader(asset: videoAsset, error: nil)
audioReader.addOutput(audioReaderOutput)
videoWriter.startWriting()
//start writing from video reader
videoReader.startReading()
videoWriter.startSessionAtSourceTime(kCMTimeZero)
//dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue", nil)
var queue = dispatch_queue_create("processingQueue", nil)
videoWriterInput.requestMediaDataWhenReadyOnQueue(queue, usingBlock: { () -> Void in
println("Export starting")
while videoWriterInput.readyForMoreMediaData
{
var sampleBuffer:CMSampleBufferRef!
sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
if (videoReader.status == AVAssetReaderStatus.Reading && sampleBuffer != nil)
{
videoWriterInput.appendSampleBuffer(sampleBuffer)
}
else
{
videoWriterInput.markAsFinished()
if videoReader.status == AVAssetReaderStatus.Completed
{
if audioReader.status == AVAssetReaderStatus.Reading || audioReader.status == AVAssetReaderStatus.Completed
{
}
else {
audioReader.startReading()
videoWriter.startSessionAtSourceTime(kCMTimeZero)
var queue2 = dispatch_queue_create("processingQueue2", nil)
audioWriterInput.requestMediaDataWhenReadyOnQueue(queue2, usingBlock: { () -> Void in
while audioWriterInput.readyForMoreMediaData
{
var sampleBuffer:CMSampleBufferRef!
sampleBuffer = audioReaderOutput.copyNextSampleBuffer()
println(sampleBuffer == nil)
if (audioReader.status == AVAssetReaderStatus.Reading && sampleBuffer != nil)
{
audioWriterInput.appendSampleBuffer(sampleBuffer)
}
else
{
audioWriterInput.markAsFinished()
if (audioReader.status == AVAssetReaderStatus.Completed)
{
videoWriter.finishWritingWithCompletionHandler({ () -> Void in
println("Finished writing video asset.")
self.videoUrl = outputURL
var data = NSData(contentsOfURL: outputURL)!
println("Byte Size After Compression: \(data.length / 1048576) mb")
println(videoAsset.playable)
//Networking().uploadVideo(data, fileName: "Test2")
self.dismissViewControllerAnimated(true, completion: nil)
})
break
}
}
}
})
break
}
}
}// Second if
}//first while
})// first block
// return
}
Here is the code for my UIImagePickerController that calls the compress method
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [NSObject : AnyObject])
{
// Extract the media type from selection
let type = info[UIImagePickerControllerMediaType] as String
if (type == kUTTypeMovie)
{
self.videoUrl = info[UIImagePickerControllerMediaURL] as? NSURL
var uploadUrl = NSURL.fileURLWithPath(NSTemporaryDirectory().stringByAppendingPathComponent("captured").stringByAppendingString(".mov"))
var data = NSData(contentsOfURL: self.videoUrl!)!
println("Size Before Compression: \(data.length / 1048576) mb")
self.convertVideo(self.videoUrl!, outputURL: uploadUrl!)
// Get the video from the info and set it appropriately.
/*self.dismissViewControllerAnimated(true, completion: { () -> Void in
//self.next.enabled = true
})*/
}
}
As i mentioned above this works as far as file size reduction, but when i get the file back (it is still of type .mov) quicktime cannot play it. Quicktime does try to convert it initially but fails halfway through (1-2 seconds after opening the file.) I've even tested the video file in AVPlayerController but it doesn't give any info about the movie, its just a play button without ant loading and without any length just "--" where the time is usually in the player. IE a corrupt file that won't play.
Im sure it has something to do with the settings for writing the asset out wether it is the video writing or the audio writing I'm not sure at all. It could even be the reading of the asset that is causing it to be corrupt. I've tried changing the variables around and setting different keys for reading and writing but i haven't found the right combination and this sucks that i can compress but get a corrupt file out of it. I'm not sure at all and any help would be appreciated. Pleeeeeeeeease.
This answer has been completely rewritten and annotated to support Swift 4.0. Keep in mind that changing the AVFileType and presetName values allows you to tweak the final output in terms of size and quality.
import AVFoundation
extension ViewController: AVCaptureFileOutputRecordingDelegate {
// Delegate function has been updated
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
// This code just exists for getting the before size. You can remove it from production code
do {
let data = try Data(contentsOf: outputFileURL)
print("File size before compression: \(Double(data.count / 1048576)) mb")
} catch {
print("Error: \(error)")
}
// This line creates a generic filename based on UUID, but you may want to use your own
// The extension must match with the AVFileType enum
let path = NSTemporaryDirectory() + UUID().uuidString + ".m4v"
let outputURL = URL.init(fileURLWithPath: path)
let urlAsset = AVURLAsset(url: outputURL)
// You can change the presetName value to obtain different results
if let exportSession = AVAssetExportSession(asset: urlAsset,
presetName: AVAssetExportPresetMediumQuality) {
exportSession.outputURL = outputURL
// Changing the AVFileType enum gives you different options with
// varying size and quality. Just ensure that the file extension
// aligns with your choice
exportSession.outputFileType = AVFileType.mov
exportSession.exportAsynchronously {
switch exportSession.status {
case .unknown: break
case .waiting: break
case .exporting: break
case .completed:
// This code only exists to provide the file size after compression. Should remove this from production code
do {
let data = try Data(contentsOf: outputFileURL)
print("File size after compression: \(Double(data.count / 1048576)) mb")
} catch {
print("Error: \(error)")
}
case .failed: break
case .cancelled: break
}
}
}
}
}
Below is the original answer as written for Swift 3.0:
extension ViewController: AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
guard let data = NSData(contentsOf: outputFileURL as URL) else {
return
}
print("File size before compression: \(Double(data.length / 1048576)) mb")
let compressedURL = NSURL.fileURL(withPath: NSTemporaryDirectory() + NSUUID().uuidString + ".m4v")
compressVideo(inputURL: outputFileURL as URL, outputURL: compressedURL) { (exportSession) in
guard let session = exportSession else {
return
}
switch session.status {
case .unknown:
break
case .waiting:
break
case .exporting:
break
case .completed:
guard let compressedData = NSData(contentsOf: compressedURL) else {
return
}
print("File size after compression: \(Double(compressedData.length / 1048576)) mb")
case .failed:
break
case .cancelled:
break
}
}
}
func compressVideo(inputURL: URL, outputURL: URL, handler:#escaping (_ exportSession: AVAssetExportSession?)-> Void) {
let urlAsset = AVURLAsset(url: inputURL, options: nil)
guard let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously { () -> Void in
handler(exportSession)
}
}
}
Figured it out!
Ok so there were 2 problems: 1 problem was with the videoWriter.finishWritingWithCompletionHandler function call. when this completion block gets executed it DOES NOT MEAN that the video writer has finished writing to the output url. So i had to check if the status was completed before i uploaded the actual video file. It's kind of a hack but this is what i did
videoWriter.finishWritingWithCompletionHandler({() -> Void in
while true
{
if videoWriter.status == .Completed
{
var data = NSData(contentsOfURL: outputURL)!
println("Finished: Byte Size After Compression: \(data.length / 1048576) mb")
Networking().uploadVideo(data, fileName: "Video")
self.dismissViewControllerAnimated(true, completion: nil)
break
}
}
})
The second problem I was having was a Failed status and that was because i kept writing to the same temp directory as shown in the code for the UIImagePickerController didFinishSelectingMediaWithInfo method in my question. So i just used the current date as a directory name so it would be unique.
var uploadUrl = NSURL.fileURLWithPath(NSTemporaryDirectory().stringByAppendingPathComponent("\(NSDate())").stringByAppendingString(".mov"))
[EDIT]: BETTER SOLUTION
Ok so after a lot of experimenting and months later I've found a damn good and much simpler solution for getting a video down from 45 mb down to 1.42 mb with pretty good quality.
Below is the function to call instead of the original convertVideo function. note that i had to write my own completion handler paramater which is called after the asynchronous export has finished. i just called it handler.
func compressVideo(inputURL: NSURL, outputURL: NSURL, handler:(session: AVAssetExportSession)-> Void)
{
var urlAsset = AVURLAsset(URL: inputURL, options: nil)
var exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality)
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronouslyWithCompletionHandler { () -> Void in
handler(session: exportSession)
}
}
And here is the code in the uiimagepickercontrollerDidFinisPickingMediaWithInfo function.
self.compressVideo(inputURL!, outputURL: uploadUrl!, handler: { (handler) -> Void in
if handler.status == AVAssetExportSessionStatus.Completed
{
var data = NSData(contentsOfURL: uploadUrl!)
println("File size after compression: \(Double(data!.length / 1048576)) mb")
self.picker.dismissViewControllerAnimated(true, completion: nil)
}
else if handler.status == AVAssetExportSessionStatus.Failed
{
let alert = UIAlertView(title: "Uh oh", message: " There was a problem compressing the video maybe you can try again later. Error: \(handler.error.localizedDescription)", delegate: nil, cancelButtonTitle: "Okay")
alert.show()
})
}
})
Your conversion method is asynchronous, yet doesn't have a completion block. So how can your code know when the file is ready? Maybe you're using the file before it is been completely written.
The conversion itself also looks strange - audio and video are usually written in parallel, not in series.
Your miraculous compression ratio might indicate that you've written out fewer frames than you actually think.

Resources