Load local encrypted video file Asset into AVPlayer - ios

I want to try read the Data from a encrypted "video" file, decrypt the data and make it run on AVPlayer.
My app can download a video, the video comes encrypted from my API. All I have to do is: decrypt the file with a cypher and then it becomes a valid mp4 file.
Theses steps are covered. I did the process and did write the decrypted data in a file, played it on my machine and passed it with file URL to AVPlayer and it played as well.
However, I do not want have to save the decrypted file. I did a bit of research and came up with AVAssetResourceLoaderDelegate. Tried to implement it but without success. The examples I saw was hitting a online URL so I'm not entirely sure if its possible to do it local as I tried.
Any one can help me?
guard var components = URLComponents.init(url: video.url, resolvingAgainstBaseURL: false) else { return }
components.scheme = "encryptedVideo"
guard let url = components.url else { return }
let asset = AVURLAsset(url: fileDecryptedURL)
asset.resourceLoader.setDelegate(self, queue: DispatchQueue.global(qos: .background))
let playerItem = AVPlayerItem(asset: asset)
let player = AVPlayer(playerItem: playerItem)
player.actionAtItemEnd = .none
self.avPlayerViewController?.player = player
self.avPlayerViewController?.player?.play()
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
if let dataRequest = loadingRequest.dataRequest,
let url = loadingRequest.request.url,
let contentRequest = loadingRequest.contentInformationRequest {
guard var components = URLComponents.init(url: url, resolvingAgainstBaseURL: false) else { return false }
components.scheme = "file"
guard let localUrl = components.url else { return false }
let storageProvider = StorageVideo()
let dataMaybe = storageProvider.videoData(url: localUrl)
guard let encryptedData = dataMaybe,
let decryptedData = try? RNCryptor.decrypt(data: encryptedData,
withPassword: "omitted") else {
return false
}
contentRequest.contentType = AVFileType.mp4.rawValue
contentRequest.contentLength = Int64(decryptedData.count)
// contentRequest.isByteRangeAccessSupported = true
dataRequest.respond(with: decryptedData)
loadingRequest.finishLoading()
// Did this to save the file to see if it was ok. I could play the file in my machine
// If I pass this file url to a asset (above step) it load as well
// if let documentDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first {
// let fileDecryptedURL = documentDirectory.appendingPathComponent("test").appendingPathExtension("mp4")
// try? decryptedData.write(to: fileDecryptedURL)
// }
}
return true
}

Related

AVFoundation over Cellular Data

When I use this code below and I pull my https video link from Firebase over Wifi everything is smooth, the video immediately plays with zero issues. When I use this same code over Cellular everything moves extremely slow, like the video pauses and takes forever to load.
If it plays from file wether I'm on Cellular or Wifi shouldn't matter. What is the issue here?
DataModel:
class Video {
var httpsStr: String?
var videoURL: URL?
convenience init(dict: [String: Any] {
self.init()
if let httpsStr = dict["httpsStr"] as? String {
self.httpsStr = httpsStr
let url = URL(string: httpsStr)!
let assetKeys = [ "playable", "duration"]
let asset = AVURLAsset(url: url)
asset.loadValuesAsynchronously(forKeys: assetKeys, completionHandler: {
DispatchQueue.main.async {
self.videoURL = asset.url
// save videoURL to FileManager to play video from disk
}
})
}
}
}
Firebase Pull:
ref.observeSingleEvent(of: .value) { (snapshot) in
guard let dict = snapshot.value as? [String: Any] else { return }
let video = Video(dict: dict)
self.video = video
DispatchQueue.main.asyncAfter(deadline: .now() + 2, execute: {
self.playVideo()
}
}
Play Video:
func playVideo() {
// init AVPlayer ...
guard let videoURL = self.video.videoURL else { return }
let lastPathComponent = videoURL.lastPathComponent
let file = FileManager...appendingPathComponent(lastPathComponent)
if FileManager.default.fileExists(atPath: file.path) {
let asset = AVAsset(url: file)
play(asset)
} else {
let asset = AVAsset(url: videoURL)
play(asset)
}
}
func play(_ asset: AVAsset) {
self.playerItem = AVPlayerItem(asset: asset)
self.player?.automaticallyWaitsToMinimizeStalling = false // I also set this to true
self.playerItem?.preferredForwardBufferDuration = TimeInterval(1)
self.player?.replaceCurrentItem(with: playerItem!)
// play video
}
I followed this answer and now everything seems to work smoothly while on Cellular Data. I needed to include the tracks property in the assetKeys.
You create an asset from a URL using AVURLAsset. Creating the asset,
however, does not necessarily mean that it’s ready for use. To be
used, an asset must have loaded its tracks.
class Video {
var httpsStr: String?
var videoURL: URL?
convenience init(dict: [String: Any] {
self.init()
if let httpsStr = dict["httpsStr"] as? String {
self.httpsStr = httpsStr
let url = URL(string: httpsStr)!
let assetKeys = ["playable", "duration", "tracks"] // <----- "tracks" added here
let asset = AVURLAsset(url: url)
asset.loadValuesAsynchronously(forKeys: assetKeys, completionHandler: {
var error: NSError? = nil
let status = asset.statusOfValue(forKey: "tracks", error: &error)
switch status {
case .loaded:
// Sucessfully loaded, continue processing
DispatchQueue.main.async {
self.videoURL = asset.url
// save videoURL to FileManager to play video from disk
}
case .failed:
// Examine NSError pointer to determine failure
print("Error", error?.localizedDescription as Any)
default:
// Handle all other cases
print("default")
}
})
}
}
}

How do I asynchronously download and cache videos for use in my app?

I know that SDWebImage loads the image in a background thread so you're not blocking the UI/main thread when this downloading is going on. Furthermore, it will also disk-cache all the images you've downloaded and will NEVER re-download an image from the same URL.
So I wonder if there is something similar or the same for videos?
Something to note: I add Videos as Sublayer.
let videoURL = URL(string: postArray[indexPath.item].media[0].videoURLString!)//need to do error handlin here
print(videoURL as Any, "<-- video url in dispkay")
let player = AVPlayer(url: videoURL! as URL)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = CGRect(x: -8, y: 0, width: 138, height: 217)//cell.frame
cell.imageOrVideoView.layer.addSublayer(playerLayer)
//Other code and play()
This was recommended in the past but it seems like it does something different or at the very leased has too much extra functionality I dont need.
Update:
What I am testing:
DispatchQueue.global(qos: .default).async(execute: {
var downloadedData: Data? = nil
if let url = URL(string: videoURL) {
do {
downloadedData = try Data(contentsOf: url)
} catch {
print(error, "downloaded Data failed")
}
}
if downloadedData != nil {
// STORE IN FILESYSTEM
var cachesDirectory = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true)[0]
var file = URL(fileURLWithPath: cachesDirectory).appendingPathComponent(videoURL).absoluteString
do {
try downloadedData?.write(to: URL(string: file)!)
} catch {
print(error, "error dowloading data and writing it")
}
// STORE IN MEMORY
if let downloadedData = downloadedData {
memoryCache?.setObject(downloadedData as AnyObject, forKey: videoURL as AnyObject)
}
}
// NOW YOU CAN CREATE AN AVASSET OR UIIMAGE FROM THE FILE OR DATA
})
I do not understand however if I should do something right after the last line or if I should do it after the }) or if I need to add a Update UI there.
So I was able to solve the problem with the following:
Swift 4:
import Foundation
public enum Result<T> {
case success(T)
case failure(NSError)
}
class CacheManager {
static let shared = CacheManager()
private let fileManager = FileManager.default
private lazy var mainDirectoryUrl: URL = {
let documentsUrl = self.fileManager.urls(for: .cachesDirectory, in: .userDomainMask).first!
return documentsUrl
}()
func getFileWith(stringUrl: String, completionHandler: #escaping (Result<URL>) -> Void ) {
let file = directoryFor(stringUrl: stringUrl)
//return file path if already exists in cache directory
guard !fileManager.fileExists(atPath: file.path) else {
completionHandler(Result.success(file))
return
}
DispatchQueue.global().async {
if let videoData = NSData(contentsOf: URL(string: stringUrl)!) {
videoData.write(to: file, atomically: true)
DispatchQueue.main.async {
completionHandler(Result.success(file))
}
} else {
DispatchQueue.main.async {
let error = NSError(domain: "SomeErrorDomain", code: -2001 /* some error code */, userInfo: ["description": "Can't download video"])
completionHandler(Result.failure(error))
}
}
}
}
private func directoryFor(stringUrl: String) -> URL {
let fileURL = URL(string: stringUrl)!.lastPathComponent
let file = self.mainDirectoryUrl.appendingPathComponent(fileURL)
return file
}
}
Usage:
CacheManager.shared.getFileWith(stringUrl: videoURL) { result in
switch result {
case .success(let url):
// do some magic with path to saved video
break;
case .failure(let error):
// handle errror
print(error, " failure in the Cache of video")
break;
}
}

Swift won't play audio from link - iOS

I am using Subsonic so mp3 files are served to me via a webservice.
When I test using files that have a .mp3 extension this code works. When I use it with the link I have below it does not.
var player: AVPlayer!
override func viewDidLoad() {
super.viewDidLoad()
let url = URL(string: "http://192.168.1.74:4040/rest/download?u=admin&p=admin&v=1.12.0&c=myapp&id=114&format=mp3")!
let playerItem = CachingPlayerItem(url: url)
playerItem.delegate = self
player = AVPlayer(playerItem: playerItem)
player.automaticallyWaitsToMinimizeStalling = false
player.play()
}
}
Browsing the link in 'url' provides me with the file I'd expect. I have also successfully downloaded the file saved it as MP3 and played it from my documents container, this is not how I want the application to work though.
TL:DR how can I get my application to play audio from a rest API without an extension
You can use AVAssetResourceLoader to play audio without extension.
Here is an example.
First, configure the delegate of resourceloader
var playerAsset: AVAsset!
if fileURL.pathExtension.count == 0 {
var components = URLComponents(url: fileURL, resolvingAgainstBaseURL: false)!
components.scheme = "fake" // make custom URL scheme
components.path += ".mp3"
playerAsset = AVURLAsset(url: components.url!)
(playerAsset as! AVURLAsset).resourceLoader.setDelegate(self, queue: DispatchQueue.global())
} else {
playerAsset = AVAsset(url: fileURL)
}
let playerItem = AVPlayerItem(asset: playerAsset)
then, read audio's data and responds to the resource loader
// MARK: - AVAssetResourceLoaderDelegate methods
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
if let url = loadingRequest.request.url {
var components = URLComponents(url: url, resolvingAgainstBaseURL: false)!
components.scheme = NSURLFileScheme // replace with the real URL scheme
components.path = String(components.path.dropLast(4))
if let attributes = try? FileManager.default.attributesOfItem(atPath: components.url!.path),
let fileSize = attributes[FileAttributeKey.size] as? Int64 {
loadingRequest.contentInformationRequest?.isByteRangeAccessSupported = true
loadingRequest.contentInformationRequest?.contentType = "audio/mpeg3"
loadingRequest.contentInformationRequest?.contentLength = fileSize
let requestedOffset = loadingRequest.dataRequest!.requestedOffset
let requestedLength = loadingRequest.dataRequest!.requestedLength
if let handle = try? FileHandle(forReadingFrom: components.url!) {
handle.seek(toFileOffset: UInt64(requestedOffset))
let data = handle.readData(ofLength: requestedLength)
loadingRequest.dataRequest?.respond(with: data)
loadingRequest.finishLoading()
return true
} else {
return false
}
} else {
return false
}
} else {
return false
}
}

iOS download and play mp3 file

I am looking for the way how can I download and play mp3 file simultaneously.
I can download it, save to local storage and play after.
But, how can I start downloading and playing it simultaneously, and after it will completely download - save it to local storage. Which tools should I use for it?
Currently I use TCBlobDownload to download, after it I save it and AVAudioPlay to play.
You can use AVAssetResourceLoader to plays an audio file as soon as there is enough data while continuing to download.
Configure the delegate of resourceloader
var playerAsset: AVAsset!
if fileURL.pathExtension.count == 0 {
var components = URLComponents(url: fileURL, resolvingAgainstBaseURL: false)!
components.scheme = "fake" // make custom URL scheme
components.path += ".mp3"
playerAsset = AVURLAsset(url: components.url!)
(playerAsset as! AVURLAsset).resourceLoader.setDelegate(self, queue: DispatchQueue.global())
} else {
playerAsset = AVAsset(url: fileURL)
}
let playerItem = AVPlayerItem(asset: playerAsset)
then, read audio's data and responds to the resource loader
// MARK: - AVAssetResourceLoaderDelegate methods
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
if let url = loadingRequest.request.url {
var components = URLComponents(url: url, resolvingAgainstBaseURL: false)!
components.scheme = NSURLFileScheme // replace with the real URL scheme
components.path = String(components.path.dropLast(4))
if let attributes = try? FileManager.default.attributesOfItem(atPath: components.url!.path),
let fileSize = attributes[FileAttributeKey.size] as? Int64 {
loadingRequest.contentInformationRequest?.isByteRangeAccessSupported = true
loadingRequest.contentInformationRequest?.contentType = "audio/mpeg3"
loadingRequest.contentInformationRequest?.contentLength = fileSize
let requestedOffset = loadingRequest.dataRequest!.requestedOffset
let requestedLength = loadingRequest.dataRequest!.requestedLength
if let handle = try? FileHandle(forReadingFrom: components.url!) {
handle.seek(toFileOffset: UInt64(requestedOffset))
let data = handle.readData(ofLength: requestedLength)
loadingRequest.dataRequest?.respond(with: data)
loadingRequest.finishLoading()
return true
} else {
return false
}
} else {
return false
}
} else {
return false
}
}
And if you want to do this with objective c, refer this

Video Streaming Fails when using AVAssetResourceLoader

We are using video streaming in our Swift IOS application and it works very well. The problem is that we would like to use AVAssetResourceLoader so we can make the requests to the streaming server using our own URLSession rather than whatever AVPlayer uses (I have been completely unable to find out what session AVPlayer uses or how to influence what session it uses.)
The exact behavior is that shouldWaitForLoadingOfRequestedResource is called once for two bytes on on the .m3u8 file then it is called again asking for the whole file, and then (based on the start time) the correct .ts file is requested. After we fetch the .ts file the video player simply doesn't do anything further.
This happens whether or not we use a sample streaming video file "https://tungsten.aaplimg.com/VOD/bipbop_adv_example_v2/master.m3u8" or our own server. The sequence is identical if we don't use AVAssetResourceLoader (we can tell from our server.) right up until the .ts file is requested. At that point when we don't use the custom loader the AvPlayer brings up the video and keeps requesting .ts files. If we comment out all other interactions with the AVPlayer including setting the initial time the behavior is identical so I am only going to include the code from viewDidLoad and shouldWaitForLoadingOfRequestedResource.
Again if we simply remove the "xyzzy" prefix so that AVAssetResourceLoader isn't used, everything works. Also, I guess importantly, if we target a video file that is not a streaming file everything works either way.
One more thing our transformation of the mime type for the .ts file produced some kind of weird dynamic uti, but this doesn't seem to have anything to do with the problem because even if we hardcode the uti the same thing happens.
override func viewDidLoad() {
super.viewDidLoad()
avPlayer = AVPlayer()
avPlayerLayer = AVPlayerLayer(player: avPlayer)
videoView.layer.insertSublayer(avPlayerLayer, at: 0)
videoView.backgroundColor = UIColor.black
url = URL(string: "xyzzy" + currentPatient.videoURL())!
let asset = AVURLAsset(url: url)
asset.resourceLoader.setDelegate(self, queue: DispatchQueue.main)
let item = AVPlayerItem(asset: asset)
let avPlayerItem = item
avPlayer.replaceCurrentItem(with: avPlayerItem)
videoScrollView.delegate = self
videoScrollView.minimumZoomScale = 1.0
videoScrollView.maximumZoomScale = 6.0
}
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
let urlString = loadingRequest.request.url?.absoluteString
let urlComponents = urlString?.components(separatedBy: "xyzzy")
let url = URL(string: urlComponents![1])
let request = loadingRequest.dataRequest!
let infoRequest = loadingRequest.contentInformationRequest
let task = globalSession.dataTask(with: url!) { (data, response, error) in
self.avPlayerThread.async {
if error == nil && data != nil {
let uti = UTTypeCreatePreferredIdentifierForTag(kUTTagClassMIMEType, response?.mimeType as! CFString, nil)
if let infoRequest = infoRequest {
infoRequest.contentType = uti?.takeRetainedValue() as? String
if request.requestsAllDataToEndOfResource == false {
infoRequest.contentLength = Int64(request.requestedLength)
} else {
infoRequest.contentLength = Int64((data?.count)!)
}
infoRequest.isByteRangeAccessSupported = true
}
if infoRequest == nil || request.requestsAllDataToEndOfResource == true {
loadingRequest.dataRequest?.respond(with: data!)
}
loadingRequest.finishLoading()
} else {
print ("error \(error)")
loadingRequest.finishLoading(with: error)
}
}
}
task.resume()
return true
}

Resources