iOS download and play mp3 file - ios

I am looking for the way how can I download and play mp3 file simultaneously.
I can download it, save to local storage and play after.
But, how can I start downloading and playing it simultaneously, and after it will completely download - save it to local storage. Which tools should I use for it?
Currently I use TCBlobDownload to download, after it I save it and AVAudioPlay to play.

You can use AVAssetResourceLoader to plays an audio file as soon as there is enough data while continuing to download.
Configure the delegate of resourceloader
var playerAsset: AVAsset!
if fileURL.pathExtension.count == 0 {
var components = URLComponents(url: fileURL, resolvingAgainstBaseURL: false)!
components.scheme = "fake" // make custom URL scheme
components.path += ".mp3"
playerAsset = AVURLAsset(url: components.url!)
(playerAsset as! AVURLAsset).resourceLoader.setDelegate(self, queue: DispatchQueue.global())
} else {
playerAsset = AVAsset(url: fileURL)
}
let playerItem = AVPlayerItem(asset: playerAsset)
then, read audio's data and responds to the resource loader
// MARK: - AVAssetResourceLoaderDelegate methods
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
if let url = loadingRequest.request.url {
var components = URLComponents(url: url, resolvingAgainstBaseURL: false)!
components.scheme = NSURLFileScheme // replace with the real URL scheme
components.path = String(components.path.dropLast(4))
if let attributes = try? FileManager.default.attributesOfItem(atPath: components.url!.path),
let fileSize = attributes[FileAttributeKey.size] as? Int64 {
loadingRequest.contentInformationRequest?.isByteRangeAccessSupported = true
loadingRequest.contentInformationRequest?.contentType = "audio/mpeg3"
loadingRequest.contentInformationRequest?.contentLength = fileSize
let requestedOffset = loadingRequest.dataRequest!.requestedOffset
let requestedLength = loadingRequest.dataRequest!.requestedLength
if let handle = try? FileHandle(forReadingFrom: components.url!) {
handle.seek(toFileOffset: UInt64(requestedOffset))
let data = handle.readData(ofLength: requestedLength)
loadingRequest.dataRequest?.respond(with: data)
loadingRequest.finishLoading()
return true
} else {
return false
}
} else {
return false
}
} else {
return false
}
}
And if you want to do this with objective c, refer this

Related

AVFoundation over Cellular Data

When I use this code below and I pull my https video link from Firebase over Wifi everything is smooth, the video immediately plays with zero issues. When I use this same code over Cellular everything moves extremely slow, like the video pauses and takes forever to load.
If it plays from file wether I'm on Cellular or Wifi shouldn't matter. What is the issue here?
DataModel:
class Video {
var httpsStr: String?
var videoURL: URL?
convenience init(dict: [String: Any] {
self.init()
if let httpsStr = dict["httpsStr"] as? String {
self.httpsStr = httpsStr
let url = URL(string: httpsStr)!
let assetKeys = [ "playable", "duration"]
let asset = AVURLAsset(url: url)
asset.loadValuesAsynchronously(forKeys: assetKeys, completionHandler: {
DispatchQueue.main.async {
self.videoURL = asset.url
// save videoURL to FileManager to play video from disk
}
})
}
}
}
Firebase Pull:
ref.observeSingleEvent(of: .value) { (snapshot) in
guard let dict = snapshot.value as? [String: Any] else { return }
let video = Video(dict: dict)
self.video = video
DispatchQueue.main.asyncAfter(deadline: .now() + 2, execute: {
self.playVideo()
}
}
Play Video:
func playVideo() {
// init AVPlayer ...
guard let videoURL = self.video.videoURL else { return }
let lastPathComponent = videoURL.lastPathComponent
let file = FileManager...appendingPathComponent(lastPathComponent)
if FileManager.default.fileExists(atPath: file.path) {
let asset = AVAsset(url: file)
play(asset)
} else {
let asset = AVAsset(url: videoURL)
play(asset)
}
}
func play(_ asset: AVAsset) {
self.playerItem = AVPlayerItem(asset: asset)
self.player?.automaticallyWaitsToMinimizeStalling = false // I also set this to true
self.playerItem?.preferredForwardBufferDuration = TimeInterval(1)
self.player?.replaceCurrentItem(with: playerItem!)
// play video
}
I followed this answer and now everything seems to work smoothly while on Cellular Data. I needed to include the tracks property in the assetKeys.
You create an asset from a URL using AVURLAsset. Creating the asset,
however, does not necessarily mean that it’s ready for use. To be
used, an asset must have loaded its tracks.
class Video {
var httpsStr: String?
var videoURL: URL?
convenience init(dict: [String: Any] {
self.init()
if let httpsStr = dict["httpsStr"] as? String {
self.httpsStr = httpsStr
let url = URL(string: httpsStr)!
let assetKeys = ["playable", "duration", "tracks"] // <----- "tracks" added here
let asset = AVURLAsset(url: url)
asset.loadValuesAsynchronously(forKeys: assetKeys, completionHandler: {
var error: NSError? = nil
let status = asset.statusOfValue(forKey: "tracks", error: &error)
switch status {
case .loaded:
// Sucessfully loaded, continue processing
DispatchQueue.main.async {
self.videoURL = asset.url
// save videoURL to FileManager to play video from disk
}
case .failed:
// Examine NSError pointer to determine failure
print("Error", error?.localizedDescription as Any)
default:
// Handle all other cases
print("default")
}
})
}
}
}

get naturalSize and list resolution video m3u8

I play video m3u8.
I try uselet videoAssetSource = AVAsset(url: videoURL) but videoAssetSource.tracks(withMediaType: .video).count always return 0.
When I use link mp4 this is successful.
How to get list quality link m3u8 support and change quality when playing video.
You will have to create your own respective models for resolutions but then a code like this should work.
/// Downloads the stream file and converts it to the raw playlist.
/// - Parameter completion: In successful case should return the `RawPlalist` which contains the url with which was the request performed
/// and the string representation of the downloaded file as `content: String` parameter.
func getPlaylist(from url: URL, completion: #escaping (Result<RawPlaylist, Error>) -> Void) {
task = URLSession.shared.dataTask(with: url) { data, response, error in
if let error = error {
completion(.failure(error))
} else if let data = data, let string = String(data: data, encoding: .utf8) {
completion(.success(RawPlaylist(url: url, content: string)))
} else {
completion(.failure(PlayerException.MEDIA_ERR_DECODE)) // Probably an MP4 file.
}
}
task?.resume()
}
/// Iterates over the provided playlist contetn and fetches all the stream info data under the `#EXT-X-STREAM-INF"` key.
/// - Parameter playlist: Playlist object obtained from the stream url.
/// - Returns: All available stream resolutions for respective bandwidth.
func getStreamResolutions(from playlist: RawPlaylist) -> [StreamResolution] {
var resolutions = [StreamResolution]()
playlist.content.enumerateLines { line, shouldStop in
let infoline = line.replacingOccurrences(of: "#EXT-X-STREAM-INF", with: "")
let infoItems = infoline.components(separatedBy: ",")
let bandwidthItem = infoItems.first(where: { $0.contains(":BANDWIDTH") })
let resolutionItem = infoItems.first(where: { $0.contains("RESOLUTION")})
if let bandwidth = bandwidthItem?.components(separatedBy: "=").last,
let numericBandwidth = Double(bandwidth),
let resolution = resolutionItem?.components(separatedBy: "=").last?.components(separatedBy: "x"),
let strignWidth = resolution.first,
let stringHeight = resolution.last,
let width = Double(strignWidth),
let height = Double(stringHeight) {
resolutions.append(StreamResolution(maxBandwidth: numericBandwidth,
averageBandwidth: numericBandwidth,
resolution: CGSize(width: width, height: height)))
}
}
return resolutions
}
}
You need to subscribe an observer to the property tracks on a player item:
//Define this variable globally
var observers:[NSKeyValueObservation]? = [NSKeyValueObservation]()
//Find tracks
let videoAssetSource = AVAsset(url: videoURL)
let playerItem = AVPlayerItem(asset: videoAssetSource)
let tracksObserver = self.playerItem.observe(\.tracks, options: [.old, .new]) { (item, change) in
for track in item.tracks {
let _assetTrack:AVAssetTrack? = track.assetTrack
if let assetTrack = _assetTrack {
if assetTrack.mediaType == .video {
//we found a video track
}
}
}
}
//Keep observer reference
observers?.append(tracksObserver)
Im using Swift 4 block-based key value observer, but you can use the observeValue(forKeyPath:…) if you want.

Load local encrypted video file Asset into AVPlayer

I want to try read the Data from a encrypted "video" file, decrypt the data and make it run on AVPlayer.
My app can download a video, the video comes encrypted from my API. All I have to do is: decrypt the file with a cypher and then it becomes a valid mp4 file.
Theses steps are covered. I did the process and did write the decrypted data in a file, played it on my machine and passed it with file URL to AVPlayer and it played as well.
However, I do not want have to save the decrypted file. I did a bit of research and came up with AVAssetResourceLoaderDelegate. Tried to implement it but without success. The examples I saw was hitting a online URL so I'm not entirely sure if its possible to do it local as I tried.
Any one can help me?
guard var components = URLComponents.init(url: video.url, resolvingAgainstBaseURL: false) else { return }
components.scheme = "encryptedVideo"
guard let url = components.url else { return }
let asset = AVURLAsset(url: fileDecryptedURL)
asset.resourceLoader.setDelegate(self, queue: DispatchQueue.global(qos: .background))
let playerItem = AVPlayerItem(asset: asset)
let player = AVPlayer(playerItem: playerItem)
player.actionAtItemEnd = .none
self.avPlayerViewController?.player = player
self.avPlayerViewController?.player?.play()
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
if let dataRequest = loadingRequest.dataRequest,
let url = loadingRequest.request.url,
let contentRequest = loadingRequest.contentInformationRequest {
guard var components = URLComponents.init(url: url, resolvingAgainstBaseURL: false) else { return false }
components.scheme = "file"
guard let localUrl = components.url else { return false }
let storageProvider = StorageVideo()
let dataMaybe = storageProvider.videoData(url: localUrl)
guard let encryptedData = dataMaybe,
let decryptedData = try? RNCryptor.decrypt(data: encryptedData,
withPassword: "omitted") else {
return false
}
contentRequest.contentType = AVFileType.mp4.rawValue
contentRequest.contentLength = Int64(decryptedData.count)
// contentRequest.isByteRangeAccessSupported = true
dataRequest.respond(with: decryptedData)
loadingRequest.finishLoading()
// Did this to save the file to see if it was ok. I could play the file in my machine
// If I pass this file url to a asset (above step) it load as well
// if let documentDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first {
// let fileDecryptedURL = documentDirectory.appendingPathComponent("test").appendingPathExtension("mp4")
// try? decryptedData.write(to: fileDecryptedURL)
// }
}
return true
}

Swift won't play audio from link - iOS

I am using Subsonic so mp3 files are served to me via a webservice.
When I test using files that have a .mp3 extension this code works. When I use it with the link I have below it does not.
var player: AVPlayer!
override func viewDidLoad() {
super.viewDidLoad()
let url = URL(string: "http://192.168.1.74:4040/rest/download?u=admin&p=admin&v=1.12.0&c=myapp&id=114&format=mp3")!
let playerItem = CachingPlayerItem(url: url)
playerItem.delegate = self
player = AVPlayer(playerItem: playerItem)
player.automaticallyWaitsToMinimizeStalling = false
player.play()
}
}
Browsing the link in 'url' provides me with the file I'd expect. I have also successfully downloaded the file saved it as MP3 and played it from my documents container, this is not how I want the application to work though.
TL:DR how can I get my application to play audio from a rest API without an extension
You can use AVAssetResourceLoader to play audio without extension.
Here is an example.
First, configure the delegate of resourceloader
var playerAsset: AVAsset!
if fileURL.pathExtension.count == 0 {
var components = URLComponents(url: fileURL, resolvingAgainstBaseURL: false)!
components.scheme = "fake" // make custom URL scheme
components.path += ".mp3"
playerAsset = AVURLAsset(url: components.url!)
(playerAsset as! AVURLAsset).resourceLoader.setDelegate(self, queue: DispatchQueue.global())
} else {
playerAsset = AVAsset(url: fileURL)
}
let playerItem = AVPlayerItem(asset: playerAsset)
then, read audio's data and responds to the resource loader
// MARK: - AVAssetResourceLoaderDelegate methods
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
if let url = loadingRequest.request.url {
var components = URLComponents(url: url, resolvingAgainstBaseURL: false)!
components.scheme = NSURLFileScheme // replace with the real URL scheme
components.path = String(components.path.dropLast(4))
if let attributes = try? FileManager.default.attributesOfItem(atPath: components.url!.path),
let fileSize = attributes[FileAttributeKey.size] as? Int64 {
loadingRequest.contentInformationRequest?.isByteRangeAccessSupported = true
loadingRequest.contentInformationRequest?.contentType = "audio/mpeg3"
loadingRequest.contentInformationRequest?.contentLength = fileSize
let requestedOffset = loadingRequest.dataRequest!.requestedOffset
let requestedLength = loadingRequest.dataRequest!.requestedLength
if let handle = try? FileHandle(forReadingFrom: components.url!) {
handle.seek(toFileOffset: UInt64(requestedOffset))
let data = handle.readData(ofLength: requestedLength)
loadingRequest.dataRequest?.respond(with: data)
loadingRequest.finishLoading()
return true
} else {
return false
}
} else {
return false
}
} else {
return false
}
}

Video Streaming Fails when using AVAssetResourceLoader

We are using video streaming in our Swift IOS application and it works very well. The problem is that we would like to use AVAssetResourceLoader so we can make the requests to the streaming server using our own URLSession rather than whatever AVPlayer uses (I have been completely unable to find out what session AVPlayer uses or how to influence what session it uses.)
The exact behavior is that shouldWaitForLoadingOfRequestedResource is called once for two bytes on on the .m3u8 file then it is called again asking for the whole file, and then (based on the start time) the correct .ts file is requested. After we fetch the .ts file the video player simply doesn't do anything further.
This happens whether or not we use a sample streaming video file "https://tungsten.aaplimg.com/VOD/bipbop_adv_example_v2/master.m3u8" or our own server. The sequence is identical if we don't use AVAssetResourceLoader (we can tell from our server.) right up until the .ts file is requested. At that point when we don't use the custom loader the AvPlayer brings up the video and keeps requesting .ts files. If we comment out all other interactions with the AVPlayer including setting the initial time the behavior is identical so I am only going to include the code from viewDidLoad and shouldWaitForLoadingOfRequestedResource.
Again if we simply remove the "xyzzy" prefix so that AVAssetResourceLoader isn't used, everything works. Also, I guess importantly, if we target a video file that is not a streaming file everything works either way.
One more thing our transformation of the mime type for the .ts file produced some kind of weird dynamic uti, but this doesn't seem to have anything to do with the problem because even if we hardcode the uti the same thing happens.
override func viewDidLoad() {
super.viewDidLoad()
avPlayer = AVPlayer()
avPlayerLayer = AVPlayerLayer(player: avPlayer)
videoView.layer.insertSublayer(avPlayerLayer, at: 0)
videoView.backgroundColor = UIColor.black
url = URL(string: "xyzzy" + currentPatient.videoURL())!
let asset = AVURLAsset(url: url)
asset.resourceLoader.setDelegate(self, queue: DispatchQueue.main)
let item = AVPlayerItem(asset: asset)
let avPlayerItem = item
avPlayer.replaceCurrentItem(with: avPlayerItem)
videoScrollView.delegate = self
videoScrollView.minimumZoomScale = 1.0
videoScrollView.maximumZoomScale = 6.0
}
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
let urlString = loadingRequest.request.url?.absoluteString
let urlComponents = urlString?.components(separatedBy: "xyzzy")
let url = URL(string: urlComponents![1])
let request = loadingRequest.dataRequest!
let infoRequest = loadingRequest.contentInformationRequest
let task = globalSession.dataTask(with: url!) { (data, response, error) in
self.avPlayerThread.async {
if error == nil && data != nil {
let uti = UTTypeCreatePreferredIdentifierForTag(kUTTagClassMIMEType, response?.mimeType as! CFString, nil)
if let infoRequest = infoRequest {
infoRequest.contentType = uti?.takeRetainedValue() as? String
if request.requestsAllDataToEndOfResource == false {
infoRequest.contentLength = Int64(request.requestedLength)
} else {
infoRequest.contentLength = Int64((data?.count)!)
}
infoRequest.isByteRangeAccessSupported = true
}
if infoRequest == nil || request.requestsAllDataToEndOfResource == true {
loadingRequest.dataRequest?.respond(with: data!)
}
loadingRequest.finishLoading()
} else {
print ("error \(error)")
loadingRequest.finishLoading(with: error)
}
}
}
task.resume()
return true
}

Resources