I currently building an iPhone app with Swift and I want to send audio files from my app to my web server. I am currently using MPMediaPickerController, which allows me to select an audio file within my app but once I select the file, it keeps telling me:
ipod-library://item/item.mp3?id=12341234
and I am not able to send the file to my web server. I need to send the audio file to my web server in NSData format. Can anyone shine a light into:
1) What I may be doing wrong or,
2) another way to send the audio files?
import AssetsLibrary,
import AVFoundation,
import MediaPlayer,
var soundFileURL:URL!,
var audio_data: Data? = nil**
func mediaPicker(_ mediaPicker: MPMediaPickerController, didPickMediaItems mediaItemCollection: MPMediaItemCollection)
{
let item = mediaItemCollection.items[0] as? MPMediaItem ?? MPMediaItem()
let url: URL? = item.value(forProperty: MPMediaItemPropertyAssetURL) as? URL
exportiTunesSong(assetURL: url!)
{
(response) in
print(response ?? "responce")
}
let songTitle: String = item.value(forProperty: MPMediaItemPropertyTitle) as! String
lbl_for_file_name.text = songTitle
self.dismiss(animated: true, completion: nil)
}
func mediapicker()
{
let mediaPicker = MPMediaPickerController(mediaTypes: .music)
mediaPicker.delegate = self
present(mediaPicker, animated: true, completion: {})
}
func mediaPickerDidCancel(_ mediaPicker: MPMediaPickerController)
{
print("User selected Cancel tell me what to do")
self.dismiss(animated: true, completion: nil)
mediaPicker.dismiss(animated: true) { _ in }
}
func exportiTunesSong(assetURL: URL, completionHandler: #escaping (_ fileURL: URL?) -> ()) {
let songAsset = AVURLAsset(url: assetURL, options: nil)
let exporter = AVAssetExportSession(asset: songAsset, presetName: AVAssetExportPresetAppleM4A)
exporter?.outputFileType = "com.apple.m4a-audio"
exporter?.metadata = songAsset.commonMetadata
let filename = AVMetadataItem.metadataItems(from: songAsset.commonMetadata, withKey: AVMetadataCommonKeyTitle, keySpace: AVMetadataKeySpaceCommon)
var songName = "Unknown"
if filename.count > 0 {
songName = ((filename[0] as AVMetadataItem).value?.copy(with: nil) as? String)!
}
//Export mediaItem to temp directory
exportURL = URL(fileURLWithPath: NSTemporaryDirectory())
.appendingPathComponent(songName)
.appendingPathExtension("m4a")
exporter?.outputURL = exportURL
do
{
self.audio_data = try Data.init(contentsOf: exportURL!)
print("here audio data is \(self.audio_data!)")
} catch {
print(error)
}
}
P.S use Audio_data to send or upload server side using Alamofire
I think this answer will help you:
Sending audio from a Swift App to PHP Server, and somewhere the audio is lost
Especially should pay attention to this section:
let boundary = "--------14737809831466499882746641449----"
let beginningBoundary = "--\(boundary)"
let endingBoundary = "--\(boundary)--"
let contentType = "multipart/form-data;boundary=\(boundary)"
So for audio file uploads that's also important.
Related
When I use this code below and I pull my https video link from Firebase over Wifi everything is smooth, the video immediately plays with zero issues. When I use this same code over Cellular everything moves extremely slow, like the video pauses and takes forever to load.
If it plays from file wether I'm on Cellular or Wifi shouldn't matter. What is the issue here?
DataModel:
class Video {
var httpsStr: String?
var videoURL: URL?
convenience init(dict: [String: Any] {
self.init()
if let httpsStr = dict["httpsStr"] as? String {
self.httpsStr = httpsStr
let url = URL(string: httpsStr)!
let assetKeys = [ "playable", "duration"]
let asset = AVURLAsset(url: url)
asset.loadValuesAsynchronously(forKeys: assetKeys, completionHandler: {
DispatchQueue.main.async {
self.videoURL = asset.url
// save videoURL to FileManager to play video from disk
}
})
}
}
}
Firebase Pull:
ref.observeSingleEvent(of: .value) { (snapshot) in
guard let dict = snapshot.value as? [String: Any] else { return }
let video = Video(dict: dict)
self.video = video
DispatchQueue.main.asyncAfter(deadline: .now() + 2, execute: {
self.playVideo()
}
}
Play Video:
func playVideo() {
// init AVPlayer ...
guard let videoURL = self.video.videoURL else { return }
let lastPathComponent = videoURL.lastPathComponent
let file = FileManager...appendingPathComponent(lastPathComponent)
if FileManager.default.fileExists(atPath: file.path) {
let asset = AVAsset(url: file)
play(asset)
} else {
let asset = AVAsset(url: videoURL)
play(asset)
}
}
func play(_ asset: AVAsset) {
self.playerItem = AVPlayerItem(asset: asset)
self.player?.automaticallyWaitsToMinimizeStalling = false // I also set this to true
self.playerItem?.preferredForwardBufferDuration = TimeInterval(1)
self.player?.replaceCurrentItem(with: playerItem!)
// play video
}
I followed this answer and now everything seems to work smoothly while on Cellular Data. I needed to include the tracks property in the assetKeys.
You create an asset from a URL using AVURLAsset. Creating the asset,
however, does not necessarily mean that it’s ready for use. To be
used, an asset must have loaded its tracks.
class Video {
var httpsStr: String?
var videoURL: URL?
convenience init(dict: [String: Any] {
self.init()
if let httpsStr = dict["httpsStr"] as? String {
self.httpsStr = httpsStr
let url = URL(string: httpsStr)!
let assetKeys = ["playable", "duration", "tracks"] // <----- "tracks" added here
let asset = AVURLAsset(url: url)
asset.loadValuesAsynchronously(forKeys: assetKeys, completionHandler: {
var error: NSError? = nil
let status = asset.statusOfValue(forKey: "tracks", error: &error)
switch status {
case .loaded:
// Sucessfully loaded, continue processing
DispatchQueue.main.async {
self.videoURL = asset.url
// save videoURL to FileManager to play video from disk
}
case .failed:
// Examine NSError pointer to determine failure
print("Error", error?.localizedDescription as Any)
default:
// Handle all other cases
print("default")
}
})
}
}
}
I am trying to pick a video from the camera roll and then uploading that video to firebase storage. So far I am able to pick a video but it is not uploading to firebase, how can I upload it to firebase storage?
func uploadVideoToDB(url: URL){
let storageReference = Storage.storage().reference().child("video.mov")
storageReference.putFile(from: url)
}
func fetchVideos(section: Int){
imagePickerController.sourceType = .photoLibrary
imagePickerController.delegate = self
imagePickerController.mediaTypes = ["public.movie"]
present(imagePickerController, animated: true, completion: nil)
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]){
let url = info[UIImagePickerController.InfoKey(rawValue: "UIImagePickerControllerReferenceURL")] as? NSURL
DataService.instance.uploadVideoToDB(url: url! as URL)
imagePickerController.dismiss(animated: true, completion: nil)
}
Call this function to upload the video to firebase storage
func uploadTOFireBaseVideo(url: URL,
success : #escaping (String) -> Void,
failure : #escaping (Error) -> Void) {
let name = "\(Int(Date().timeIntervalSince1970)).mp4"
let path = NSTemporaryDirectory() + name
let dispatchgroup = DispatchGroup()
dispatchgroup.enter()
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let outputurl = documentsURL.appendingPathComponent(name)
var ur = outputurl
self.convertVideo(toMPEG4FormatForVideo: url as URL, outputURL: outputurl) { (session) in
ur = session.outputURL!
dispatchgroup.leave()
}
dispatchgroup.wait()
let data = NSData(contentsOf: ur as URL)
do {
try data?.write(to: URL(fileURLWithPath: path), options: .atomic)
} catch {
print(error)
}
let storageRef = Storage.storage().reference().child("Videos").child(name)
if let uploadData = data as Data? {
storageRef.putData(uploadData, metadata: nil
, completion: { (metadata, error) in
if let error = error {
failure(error)
}else{
let strPic:String = (metadata?.downloadURL()?.absoluteString)!
success(strPic)
}
})
}
}
Following function converts the video to mp4 format so that it can be viewed on any device either it be iOS or android
func convertVideo(toMPEG4FormatForVideo inputURL: URL, outputURL: URL, handler: #escaping (AVAssetExportSession) -> Void) {
try! FileManager.default.removeItem(at: outputURL as URL)
let asset = AVURLAsset(url: inputURL as URL, options: nil)
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
exportSession.outputURL = outputURL
exportSession.outputFileType = .mp4
exportSession.exportAsynchronously(completionHandler: {
handler(exportSession)
})
}
This is what did it for me:
needed to convert url into Data then use putData instead of putFile
func uploadVideoToDB(url: URL){
let filename = UUID().uuidString
let ref = Storage.storage().reference().child("videos").child("\(filename).mp4")
do {
let videoData = try Data(contentsOf: url)
ref.putData(videoData)
} catch {
print(error)
}
}
I am trying to use the AWS S3 bucket to store user photos from when they have taken them from their phones. I right now have my code set up to the point where the user is able to take a photo of something and have that show up on the UIImageView.
The issue I am encountering is that I have no clue how to store it on the S3 bucket, I have code right now that is able to store a specified photo the bucket, but not really code that is able to store a photo that is taken from the camera.
Take Photo code
#IBAction func takePhoto(_ sender: Any) {
if UIImagePickerController.isSourceTypeAvailable(UIImagePickerController.SourceType.camera) {
let imagePicker = UIImagePickerController()
imagePicker.delegate = self
imagePicker.sourceType = UIImagePickerController.SourceType.camera
imagePicker.allowsEditing = false
self.present(imagePicker, animated: true, completion: nil)
}
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let pickedImage = info[UIImagePickerController.InfoKey.originalImage] as? UIImage {
takenPhoto.contentMode = .scaleToFill
takenPhoto.image = pickedImage
print(takenPhoto.image = pickedImage)
}
picker.dismiss(animated: true, completion: nil)
}
AWS S3 Bucket Code
#IBAction func uploadFile(_ sender: Any) {
uploadFile(with: "eartj", type: ".jpeg")
}
func uploadFile(with resource: String, type: String){
let key = "\(resource),\(type)"
let imagePath = Bundle.main.path(forResource: resource, ofType: type)!
let imageUrl = URL(fileURLWithPath: imagePath)
let request = AWSS3TransferManagerUploadRequest()!
request.bucket = "wuuurktest"
request.key = key
request.body = imageUrl
request.acl = .publicReadWrite
let transferManager = AWSS3TransferManager.default()
transferManager.upload(request).continueWith(executor: AWSExecutor.mainThread()) { (task) -> Any? in
if let error = task.error {
print(error)
}
if task.result != nil {
print("Uploaded File")
}
return nil
}
}
Link to the guide I am using to create the file upload
https://www.youtube.com/watch?v=UMgApUhg7ic
Most of the answers are outdated and too complicated. I was struggling with the same problem and finally found a solution.
This works best for me and works on Swift 5.
First of all, let's update the function to upload images to AWS.
func uploadToS3(url: URL) {
let fileArr = url.path.components(separatedBy: "/") // Path will be too long, so you have to separate the elements by / and store in an array
let key = fileArr.last // We get the last element of the array which in our case will be the image (my-image.jpg)
let localImageUrl = url
let request = AWSS3TransferManagerUploadRequest()!
request.bucket = bucketName
request.key = key
request.body = localImageUrl
request.acl = .publicReadWrite
let transferManager = AWSS3TransferManager.default()
transferManager.upload(request).continueWith(executor: AWSExecutor.mainThread()) { (task) -> Any? in
if let error = task.error {
print(error)
}
if task.result != nil {
print("Uploaded \(key)")
let contentUrl = self.s3Url.appendingPathComponent(bucketName).appendingPathComponent(key!)
self.contentUrl = contentUrl
}
return nil
}
}
In this block of code:
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let pickedImage = info[UIImagePickerController.InfoKey.originalImage] as? UIImage {
takenPhoto.contentMode = .scaleToFill
takenPhoto.image = pickedImage
print(takenPhoto.image = pickedImage)
// Add here:
let url = URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true)
.appendingPathComponent("my-image", isDirectory: false)
.appendingPathExtension("jpg") /* here we are naming the image 'my-image' and it will be 'jpg', if you want you can add a counter to increase the number each time you upload an image, and you make something like this: "my-image-\(counter)"*/
// Then write to disk
if let data = pickedImage.jpegData(compressionQuality: 0.8) {
do {
try data.write(to: url)
uploadToS3(url: url) //Call the updated function to store to AWS bucket
} catch {
print("Handle the error, i.e. disk can be full")
}
}
}
picker.dismiss(animated: true, completion: nil)
}
With this implementation, the image will be uploaded immediately to the server once you select the image from the library.
First thing you need to do is to store the picked image in your app's document directory as a temporary file. As soon as your image is picked, save it to the document directory using the below function.
func saveFileToDocumentDirectory(file: Data, fileExtension: String, folderName: String) -> URL? {
let formatter = DateFormatter()
formatter.dateFormat = "yyyyMMdd_HHmmss"
let stringOfDateTimeStamp = formatter.string(from: Date())
print("Date time stamp String: \(stringOfDateTimeStamp)")
let directoryPath = (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString).appendingPathComponent("\(folderName)/")
if !FileManager.default.fileExists(atPath: directoryPath) {
do {
try FileManager.default.createDirectory(at: NSURL.fileURL(withPath: directoryPath), withIntermediateDirectories: true, attributes: nil)
} catch {
print(error)
}
}
let filename = "/\(stringOfDateTimeStamp)_\(fileExtension)"
let customPath = "\(folderName)\(filename)"
let filepath = directoryPath+filename
print("FilePATH: \(filepath)")
let url = NSURL.fileURL(withPath: filepath)
do {
try file.write(to: url, options: .atomic)
print("CustomPAth:\(customPath)")
print(String.init("\(directoryPath)\(filename)"))
return url
} catch {
print(error)
print("file cant not be save at path \(filepath), with error : \(error)");
return nil
}
}
This will return a URL and you can then use the below function to upload that file to your S3 bucket.
func uploadToS3(url: URL, contentType: String, fileExtension: String){
SwiftLoader.show(title: "Uploading File", animated: true)
let accessKey = "YOUR_ACCESS_KEY"
let secretKey = "YOUR_SECRET_KEY"
let credentialsProvider = AWSStaticCredentialsProvider(accessKey: accessKey, secretKey: secretKey)
let configuration = AWSServiceConfiguration(region: .USWest2, credentialsProvider: credentialsProvider)
AWSServiceManager.default().defaultServiceConfiguration = configuration
let remoteName = "IMG_\(UUID().uuidString)"+".\(fileExtension)"
let S3BucketName = "YOUR_BUCKET_NAME"
let uploadRequest = AWSS3TransferManagerUploadRequest()!
uploadRequest.body = url
uploadRequest.key = remoteName
uploadRequest.bucket = S3BucketName
uploadRequest.contentType = contentType
uploadRequest.acl = .publicRead
let transferManager = AWSS3TransferManager.default()
transferManager.upload(uploadRequest).continueWith(block: { (task: AWSTask) -> Any? in
if let error = task.error {
print("Upload failed with error: (\(error.localizedDescription))")
DispatchQueue.main.async {
print("An error occurred while Uploading your file, try again.")
SwiftLoader.hide()
}
}
if task.result != nil {
let url = AWSS3.default().configuration.endpoint.url
let publicURL = url?.appendingPathComponent(uploadRequest.bucket!).appendingPathComponent(uploadRequest.key!)
print("Uploaded to:\(String(describing: publicURL))")
}
return nil
})
}
Don't forget to delete your temporary file once your upload is successful.
Here is an example using TransferUtility:-
import AWSCognitoIdentityProvider
import AWSS3
typealias progressBlock = (_ progress: Double) -> Void
typealias completionBlock = (_ response: Any?, _ error: Error?) -> Void
//using Utility upload expression
func uploadImage(with image: URL, key: String?, progress: progressBlock?, completion: completionBlock?) {
let expression = AWSS3TransferUtilityUploadExpression()
expression.progressBlock = { (task: AWSS3TransferUtilityTask, awsProgress: Progress) -> Void in
//print(awsProgress.fractionCompleted)
guard let uploadProgress = progress else { return }
DispatchQueue.main.async {
uploadProgress(awsProgress.fractionCompleted)
}
}
expression.setValue("public-read-write", forRequestHeader: "x-amz-acl")
expression.setValue("public-read-write", forRequestParameter: "x-amz-acl")
// Completion block
var completionHandler: AWSS3TransferUtilityUploadCompletionHandlerBlock?
completionHandler = { (task, error) -> Void in
DispatchQueue.main.async(execute: {
if error == nil {
let url = AWSS3.default().configuration.endpoint.url
let publicURL = url?.appendingPathComponent(AWS.bucketName).appendingPathComponent(key!)
print("Uploaded to:\(String(describing: publicURL))")
if let completionBlock = completion {
completionBlock(publicURL?.absoluteString, nil)
}
} else {
if let completionBlock = completion {
completionBlock(nil, error)
}
}
})
}
// Start uploading using AWSS3TransferUtility
let awsTransferUtility = AWSS3TransferUtility.default()
awsTransferUtility.uploadFile(
image as URL,
bucket: AWS.bucketName, //Make sure you write the correct bucket name here
key: key!, //"private/{user_identity_id}/my-picture.png"
contentType: "image/png",
expression: expression,
completionHandler: completionHandler).continueWith(block: { (task) -> Any? in
if let error = task.error {
print("error is: \(error.localizedDescription)")
}
if let _ = task.result {
// your uploadTask
print("Starting upload...")
}
return nil
})
}
Two parameters I am passing:-
image: URL and key: String?
Here is how I get image and image name (key):-
//setting temp name for upload // I am using a random string here
let imageName = "\(CommonMethod.randomString(length: 6))" + ".png"
//settings temp location for image
let tempDirectoryUrl = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(imageName)
guard let localUrlPath = image!.save(at: tempDirectoryUrl) else { return }
//URL
print(localUrlPath)
Happy coding!
I am developing a music app in Swift3. Where I need to select any audio song from device gallery and upload this to Amazon S3. Here is my code :
func mediaPicker(_ mediaPicker: MPMediaPickerController, didPickMediaItems mediaItemCollection: MPMediaItemCollection)
{
self.dismiss(animated: true, completion: nil)
let item: MPMediaItem? = (mediaItemCollection.items[0] as? MPMediaItem)
let url: URL? = item?.value(forProperty: MPMediaItemPropertyAssetURL) as! URL?
let albumTitle = item?.value( forProperty: MPMediaItemPropertyTitle ) as! String
let albumMediaType = item?.value( forProperty: MPMediaItemPropertyMediaType) as! Int
}
As an output, I am getting Asset Url : ipod-library://item/item.mp3?id=8739819345820495362 And Media Type is coming in integer 1/2/3/4....
Now, my problem is I need to know the file type of the selected song, because when I am adding hard code '.mp3' to file and upload this to Amazon S3 then some of audio files are playing and rest is not!
Can you please help me out to resolve this issue? Thank you.
Edit: Here is my code to convert AssetUrl to path :
func exportMedia(_ assetURL: URL, completionHandler: #escaping (_ fileURL: URL?, _ error: Error?) -> ()) {
let asset = AVURLAsset(url: assetURL)
guard let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A) else {
completionHandler(nil, ExportError.unableToCreateExporter)
return
}
let fileURL = URL(fileURLWithPath: NSTemporaryDirectory())
.appendingPathComponent(NSUUID().uuidString)
.appendingPathExtension("m4a")
exporter.outputURL = fileURL
exporter.outputFileType = "com.apple.m4a-audio"
exporter.exportAsynchronously {
if exporter.status == .completed {
completionHandler(fileURL, nil)
} else {
completionHandler(nil, exporter.error)
}
}
}
I want to cast device local video to Chromecast using PhotoKit framework but only loading screen is displayed on Chromecast and no video is played. If replace avUrlAsset.url.absoluteString with http_url_of_video than it play the video successfully.
Code
let options = PHVideoRequestOptions()
options.isNetworkAccessAllowed = true
options.deliveryMode = .automatic
// create a meta data
let metadata = GCKMediaMetadata(metadataType: .movie)
metadata.setString("Title", forKey: kGCKMetadataKeyTitle)
metadata.setString("Subtitle", forKey: kGCKMetadataKeySubtitle)
PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in
if let avUrlAsset = avAsset as? AVURLAsset {
// create media information
let mediaInfo = GCKMediaInformation(contentID: avUrlAsset.url.absoluteString,
streamType: .buffered,
contentType: "video/quicktime",
metadata: metadata,
streamDuration: 0,
customData: nil)
self._remotMediaClient?.loadMedia(mediaInfo, autoplay: true)
}
})
Please suggest me how can I play local video to cromecast. I also try to copy the video to document directory and pass url of copied video to cromecast but not working.
I solve using local http server
HttpServerManager.swift
import UIKit
class HttpServerManager: NSObject {
static let shared = HttpServerManager()
private var httpServer:HTTPServer!
override init() {
super.init()
// Create server using our custom MyHTTPServer class
httpServer = HTTPServer()
// Tell the server to broadcast its presence via Bonjour.
// This allows browsers such as Safari to automatically discover our service.
httpServer.setType("_http._tcp.")
// Normally there's no need to run our server on any specific port.
// Technologies like Bonjour allow clients to dynamically discover the server's port at runtime.
// However, for easy testing you may want force a certain port so you can just hit the refresh button.
// [httpServer setPort:12345];
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first
httpServer.setDocumentRoot(documentsDirectory)
}
func startServer() {
// Start the server (and check for problems)
do{
try httpServer?.start()
DDLogWrapper.logInfo("Started HTTP Server on port \(httpServer?.listeningPort())")
}catch {
DDLogWrapper.logError("Error starting HTTP Server: \(error) ")
}
}
func stopServer() {
httpServer.stop()
}
func getListeningPort() -> UInt16 {
return httpServer.listeningPort()
}
func setDocumentRoot(path stirng:String) {
httpServer.setDocumentRoot(stirng)
}
}
start server in AppDelege.swift
class AppDelegate: UIResponder, UIApplicationDelegate, GCKLoggerDelegate {
var window: UIWindow?
var httpServer:HTTPServer?
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
GCKLogger.sharedInstance().delegate = self
// Configure our logging framework.
// To keep things simple and fast, we're just going to log to the Xcode console.
LoggerFactory.initLogging()
// start local http server
HttpServerManager.shared.startServer()
return true
}
}
Play local video to Crome cast using below method
func playToRemotePlayer(with asset:PHAsset, forViewController viewController:UIViewController) {
// if video is paused than resume it
if _remotMediaClient?.mediaStatus?.playerState == .paused {
_remotMediaClient?.play()
return
}
// lets keep track of recent assets that is played on Crome cast
if recentPlayedAsset == nil {
recentPlayedAsset = asset
}else{
if recentPlayedAsset == asset {
self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true)
return
}else{
recentPlayedAsset = asset
}
}
let options = PHVideoRequestOptions()
options.isNetworkAccessAllowed = true
options.deliveryMode = .highQualityFormat
options.version = .original
// create a meta data
let metadata = GCKMediaMetadata(metadataType: .movie)
metadata.setString("you video title", forKey: kGCKMetadataKeyTitle)
metadata.setString("you video subtitle", forKey: kGCKMetadataKeySubtitle)
PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in
if (avAsset as? AVURLAsset) != nil {
let startDate = NSDate()
//Create Export session
let exportSession = AVAssetExportSession(asset: avAsset!, presetName: AVAssetExportPresetHighestQuality)
let filePathURL = documentDirectoryUrl.appendingPathComponent("rendered_video.mp4")
let filePath = NSURL(string: (filePathURL?.absoluteString)!)
CommanUtilites.deleteFile(filePath: filePath!)
exportSession!.outputURL = filePath as URL?
exportSession!.outputFileType = AVFileTypeMPEG4
exportSession!.shouldOptimizeForNetworkUse = true
let start = CMTimeMakeWithSeconds(0.0, 0)
let range = CMTimeRangeMake(start, (avAsset?.duration)!)
exportSession?.timeRange = range
print("Exporting Media...")
DispatchQueue.main.async {
self.progressHUD = MBProgressHUD.showAdded(to: viewController.view, animated: true)
self.progressHUD?.mode = MBProgressHUDMode.indeterminate
self.progressHUD?.label.text = "Exporting video please wait..."
}
exportSession!.exportAsynchronously(completionHandler: {() -> Void in
DispatchQueue.main.async {
self.progressHUD?.hide(animated: true)
}
switch exportSession!.status {
case .failed:
print("Error : " + (exportSession?.error?.localizedDescription)!)
case .cancelled:
print("Export canceled")
case .completed:
//Video conversion finished
let endDate = NSDate()
let time = endDate.timeIntervalSince(startDate as Date)
print(time)
print("Export Successful!")
print(exportSession?.outputURL?.path ?? "")
let port = String(HttpServerManager.shared.getListeningPort())
let videoHttpUrl = "http://127.0.0.1:" + port + "/rendered_video.mp4"
// create media information
self.recentMediaInfo = GCKMediaInformation(contentID: videoHttpUrl,
streamType: .buffered,
contentType: "video/mp4",
metadata: nil,
streamDuration: (avAsset?.duration.seconds)!,
customData: nil)
self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true)
default:
break
}
})
}
})
}