How to add/use GCKMediaQueue in Swift? - ios

So I have managed to play a video on Chromecast. But only one at a time. I've been trying to figure how to programmatically add to the queue. The idea is to keep playing videos all day. In the code below "playthisvideo()" randomly returns a string that contain an http://.....mp4 . I've look at Google's documentation, it's either too vague or I just don't understand it. And I can't seem to find any examples that would lead the way for me to follow.
func castthevideo() {
let metadata = GCKMediaMetadata()
metadata.setString("Los Simpsons", forKey: kGCKMetadataKeyTitle)
metadata.setString ("Barista: ¿Cómo tomas tu café? " +
" Yo: Muy, muy en serio.",
forKey: kGCKMetadataKeySubtitle)
metadata.addImage(GCKImage(url: URL(string: "https://m.media-amazon.com/images/M/MV5BYjFkMTlkYWUtZWFhNy00M2FmLThiOTYtYTRiYjVlZWYxNmJkXkEyXkFqcGdeQXVyNTAyODkwOQ##._V1_.jpg")!,
width: 480,
height: 360))
let PTV = playthisvideo()
let url = URL.init(string: PTV)
print ("****** ", PTV)
guard let mediaURL = url else {
print("****** invalid mediaURL")
return }
//let mediaInfoBuilder = GCKMediaInformationBuilder.init(contentURL: mediaURL)
let mediaInfoBuilder = GCKMediaInformationBuilder.init(contentURL: mediaURL)
mediaInfoBuilder.streamType = GCKMediaStreamType.none;
mediaInfoBuilder.contentType = "video/mp4"
mediaInfoBuilder.metadata = metadata;
let mediaInformation = mediaInfoBuilder.build()
if let request = sessionManager.currentSession?.remoteMediaClient?.loadMedia(mediaInformation) { request.delegate = self }
GCKCastContext.sharedInstance().presentDefaultExpandedMediaControls()
}
func castanthor(byAppending appending: Bool) {
let PTV = playthisvideo()
let url = URL.init(string: PTV)
guard let mediaURL = url else {
print("invalid mediaURL")
return
}
myNSNumber = (1 as NSNumber)
if let remoteMediaClient = GCKCastContext.sharedInstance().sessionManager.currentCastSession?.remoteMediaClient {
let builder = GCKMediaQueueItemBuilder()
builder.mediaInformation = selectedItem.mediaInfo
builder.autoplay = true
builder.preloadTime = 3
let item = builder.build
if remoteMediaClient.mediaStatus != nil, appending {
let request = remoteMediaClient.queueInsert(item(), beforeItemWithID: kGCKMediaQueueInvalidItemID)
request.delegate = self
} else {
let options = GCKMediaQueueLoadOptions()
options.repeatMode = remoteMediaClient.mediaStatus?.queueRepeatMode ?? .off
let request = castSession.remoteMediaClient?.queueLoad([item()], with: options)
request?.delegate = self
}
}}

var mediaItems = [GCKMediaQueueItem]()
var urls = // Array of only audio and videos
for index in 0..<urls.count {
let builder = GCKMediaQueueItemBuilder()
let mediaInfoBuilder = GCKMediaInformationBuilder.init(contentURL: urls[i])
mediaInfoBuilder.streamType = GCKMediaStreamType.none;
mediaInfoBuilder.contentType = "video/mp4"
mediaInfoBuilder.metadata = metadata;
let mediaInformation = mediaInfoBuilder.build()
builder.mediaInformation = mediaInformation
builder.autoplay = true
builder.preloadTime = 3
let item = builder.build
mediaItems.append(item)
}
if let remoteMediaClient = GCKCastContext.sharedInstance().sessionManager.currentCastSession?.remoteMediaClient {
let loadOptions = GCKMediaQueueLoadOptions()
loadOptions.repeatMode = .all
loadOptions.startPosition = 0
remoteMediaClient.queueLoadItems(mediaItems, withOptions:loadOptions)
}

Related

compressing Video Error: Terminated due to memory issue

I want to first trimming video that choose from photoLibrary, and then compress video file for custom size and bitrate. I'm using PryntTrimmerView for Trimming video, and then use trimmed video for compress video file.
there is my code for trimming and compressing video file.
I successfully export trimming asset, and then get compressed file successfully. when I choose short video from gallery there is no problem, but when choose video big size after compressing I have this error in console:
Message from debugger: Terminated due to memory issue
there is my code for trimming and compressing video file.
func prepareAssetComposition() throws {
topActivity.isHidden = false
topActivity.startAnimating()
confirmButton.isUserInteractionEnabled = false
//get asset and track
guard let asset = trimmerView.asset, let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else {
return
}
let assetComposition = AVMutableComposition()
let start = trimmerView.startTime?.seconds
let end = trimmerView.endTime?.seconds
let startTime = CMTime(seconds: Double(start ?? 0), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ?? 0), preferredTimescale: 1000)
let trackTimeRange = CMTimeRange(start: startTime, end: endTime)
let videoCompositionTrack = assetComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
try videoCompositionTrack.insertTimeRange(trackTimeRange, of: videoTrack, at: kCMTimeZero)
if let audioTrack = asset.tracks(withMediaType: AVMediaTypeAudio).first {
let audioCompositionTrack = assetComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
try audioCompositionTrack.insertTimeRange(trackTimeRange, of: audioTrack, at: kCMTimeZero)
}
//set video oriention to portrati
let size = videoTrack.naturalSize
let txf = videoTrack.preferredTransform
var recordType = ""
if (size.width == txf.tx && size.height == txf.ty){
recordType = "UIInterfaceOrientationLandscapeRight"
}else if (txf.tx == 0 && txf.ty == 0){
recordType = "UIInterfaceOrientationLandscapeLeft"
}else if (txf.tx == 0 && txf.ty == size.width){
recordType = "UIInterfaceOrientationPortraitUpsideDown"
}else{
recordType = "UIInterfaceOrientationPortrait"
}
if recordType == "UIInterfaceOrientationPortrait" {
let t1: CGAffineTransform = CGAffineTransform(translationX: videoTrack.naturalSize.height, y: -(videoTrack.naturalSize.width - videoTrack.naturalSize.height)/2)
let t2: CGAffineTransform = t1.rotated(by: CGFloat(Double.pi / 2))
let finalTransform: CGAffineTransform = t2
videoCompositionTrack.preferredTransform = finalTransform
}else if recordType == "UIInterfaceOrientationLandscapeRight" {
let t1: CGAffineTransform = CGAffineTransform(translationX: videoTrack.naturalSize.height, y: -(videoTrack.naturalSize.width - videoTrack.naturalSize.height)/2)
let t2: CGAffineTransform = t1.rotated(by: -CGFloat(Double.pi))
let finalTransform: CGAffineTransform = t2
videoCompositionTrack.preferredTransform = finalTransform
}else if recordType == "UIInterfaceOrientationPortraitUpsideDown" {
let t1: CGAffineTransform = CGAffineTransform(translationX: videoTrack.naturalSize.height, y: -(videoTrack.naturalSize.width - videoTrack.naturalSize.height)/2)
let t2: CGAffineTransform = t1.rotated(by: -CGFloat(Double.pi/2))
let finalTransform: CGAffineTransform = t2
videoCompositionTrack.preferredTransform = finalTransform
}
//start exporting video
var name = ""
var url: URL!
if self.state == .Left {
url = URL(fileURLWithPath: "\(NSTemporaryDirectory())TrimmedMovie1.mp4")
name = "TrimmedMovie1.mp4"
}else if state == .Right {
url = URL(fileURLWithPath: "\(NSTemporaryDirectory())TrimmedMovie3.mp4")
name = "TrimmedMovie3.mp4"
}else if state == .Center {
url = URL(fileURLWithPath: "\(NSTemporaryDirectory())TrimmedMovie2.mp4")
name = "TrimmedMovie2.mp4"
}
try? FileManager.default.removeItem(at: url)
let exportSession = AVAssetExportSession(asset: assetComposition, presetName: AVAssetExportPresetHighestQuality)
if UIDevice.current.userInterfaceIdiom == .phone {
exportSession?.outputFileType = AVFileTypeQuickTimeMovie
}else {
exportSession?.outputFileType = AVFileTypeQuickTimeMovie
}
exportSession?.shouldOptimizeForNetworkUse = true
exportSession?.outputURL = url
exportSession?.exportAsynchronously(completionHandler: {
DispatchQueue.main.async {
if let url = exportSession?.outputURL, exportSession?.status == .completed {
let asset = AVAsset(url: url)
print(asset.duration)
var thump: UIImage?
var vData: Data?
if let img = asset.videoThumbnail {
thump = img
if recordType == "UIInterfaceOrientationPortrait" {
if thump != nil {
let img = UIImage(cgImage: thump!.cgImage!, scale: CGFloat(1.0), orientation: .right)
thump = img
thump = thump?.fixedOrientation()
}
}else if recordType == "UIInterfaceOrientationLandscapeRight" {
if thump != nil {
let img = UIImage(cgImage: thump!.cgImage!, scale: CGFloat(1.0), orientation: .down)
thump = img
thump = thump?.fixedOrientation()
}
}else if recordType == "UIInterfaceOrientationPortraitUpsideDown" {
if thump != nil {
let img = UIImage(cgImage: thump!.cgImage!, scale: CGFloat(1.0), orientation: .left)
thump = img
thump = thump?.fixedOrientation()
}
}
}
if let videoData = NSData(contentsOf: url) {
vData = videoData as Data
}
if let delegate = self.delegate {
self.playbackTimeCheckerTimer?.invalidate()
self.playButton.setImage(#imageLiteral(resourceName: "play"), for: .normal)
self.playbackTimeCheckerTimer = nil
let size = CGSize(width: 1280, height: 720)
if let videoData = NSData(contentsOf: url) {
vData = videoData as Data
}
let directoryURL: URL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let folderPath: URL = directoryURL.appendingPathComponent(name, isDirectory: true)
do {
try vData?.write(to: folderPath, options: [])
}
catch {
print(error.localizedDescription)
}
self.compress(fileName:name,videoPath: folderPath.path, exportVideoPath: folderPath.path, renderSize: size, completion: {res in
if res {
OperationQueue.main.addOperation {
self.topActivity.isHidden = true
self.topActivity.stopAnimating()
self.confirmButton.isUserInteractionEnabled = true
delegate.setVideoFromPath(path: folderPath.path, thump: thump, videoData: vData)
self.dismiss(animated: true, completion: nil)
return
}
}else {
print("can not compress")
}
})
}
} else {
self.topActivity.isHidden = true
self.topActivity.stopAnimating()
self.confirmButton.isUserInteractionEnabled = true
let error = exportSession?.error
print("error exporting video \(String(describing: error))")
}
}
})
}
private func existsFileAtUrl(url:String,name:String) -> Bool {
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
let url = URL(fileURLWithPath: path)
let filePath = url.appendingPathComponent(name).path
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
return true
} else {
return false
}
}
//MARK: Compress
func compress(fileName:String,videoPath : String, exportVideoPath : String, renderSize : CGSize, completion : #escaping (Bool) -> ()) {
let videoUrl = URL(fileURLWithPath: videoPath)
if (!existsFileAtUrl(url: videoUrl.absoluteString,name:fileName)) {
completion(false)
return
}
let videoAssetUrl = AVURLAsset(url: videoUrl)
let videoTrackArray = videoAssetUrl.tracks(withMediaType: AVMediaTypeVideo)
if videoTrackArray.count < 1 {
completion(false)
return
}
let videoAssetTrack = videoTrackArray[0]
let audioTrackArray = videoAssetUrl.tracks(withMediaType: AVMediaTypeAudio)
if audioTrackArray.count < 1 {
completion(false)
return
}
let audioAssetTrack = audioTrackArray[0]
let outputUrl = URL(fileURLWithPath: exportVideoPath)
var videoWriter = try? AVAssetWriter(url: outputUrl, fileType: AVFileTypeQuickTimeMovie)
videoWriter?.shouldOptimizeForNetworkUse = true
let vSetting = videoSettings(size: renderSize)
let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: vSetting)
videoWriterInput.expectsMediaDataInRealTime = false
videoWriterInput.transform = videoAssetTrack.preferredTransform
videoWriter?.add(videoWriterInput)
// output readers
let videoReaderSettings : [String : Int] = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)]
let videoReaderOutput = AVAssetReaderTrackOutput(track: videoAssetTrack, outputSettings: videoReaderSettings)
let videoReader = try! AVAssetReader(asset: videoAssetUrl)
videoReader.add(videoReaderOutput)
videoWriter?.startWriting()
videoReader.startReading()
videoWriter?.startSession(atSourceTime: kCMTimeZero)
let processingVideoQueue = DispatchQueue(label: "processingVideoCompressionQueue")
videoWriterInput.requestMediaDataWhenReady(on: processingVideoQueue, using: {
while(videoWriterInput.isReadyForMoreMediaData){
let sampleVideoBuffer = videoReaderOutput.copyNextSampleBuffer()
if (videoReader.status == .reading && sampleVideoBuffer != nil) {
videoWriterInput.append(sampleVideoBuffer!)
}else {
videoWriterInput.markAsFinished()
if (videoReader.status == .completed) {
videoWriter?.finishWriting(completionHandler: {
videoWriter = nil
completion(true)
})
}
}
}
})
}
//MARK: Setting
func videoSettings(size : CGSize) -> [String : AnyObject] {
var compressionSettings = [String : AnyObject]()
compressionSettings[AVVideoAverageBitRateKey] = 5 as AnyObject
var settings = [String : AnyObject]()
settings[AVVideoCompressionPropertiesKey] = compressionSettings as AnyObject
settings[AVVideoCodecKey] = AVVideoCodecH264 as AnyObject?
settings[AVVideoHeightKey] = size.height as AnyObject?
settings[AVVideoWidthKey] = size.width as AnyObject?
return settings
}
I found issue, the problem is while statement. when I dismiss view controller this statement repeatedly call and I get this error. now when I want to dismiss view controller stop while loop with break and everything is working fine.

How to add Brightness to more than 4 videos using GPUImage in iOS Swift?

Below is my code to apply brightness to multiple videos.It works fine for 3 videos but for more than 4 videos GPUImage crash the application.
//arrVideoDetail -> Contains video Data
//isPortrait -> Getting video orientation
func addBrightNessToVideo(arrVideoDetail:[SelectedAssestData]?,isPortrait:Bool,completion: ((_ updatedVideos:[SelectedAssestData]) -> Void)?){
SVProgressHUD.show()
let imageDataGroup: DispatchGroup? = DispatchGroup()
var updatedVideoDetail = [SelectedAssestData]()
var arrForRemoveVideosPath = [String]()
for videoDict in (arrVideoDetail)! {
let videoDetail = videoDict
let videoUrl = URL(fileURLWithPath:(videoDetail.DocumentLocalAssetsPath?.path)!)
let brightNessValue = videoDetail.lightingPercent ?? 0.0
if brightNessValue == 0 {
updatedVideoDetail.append(videoDetail)
}else {
arrForRemoveVideosPath.append(videoUrl.path)
imageDataGroup?.enter()
let movie = GPUImageMovie(url: videoUrl)
movie?.runBenchmark = true
movie?.playAtActualSpeed = true
let brightnessFilter = GPUImageBrightnessFilter()
// Need to check this value with different different videos
brightnessFilter.brightness = brightNessValue //videoDetail["brightness"] as! CGFloat // Applying Brightness value
movie?.addTarget(brightnessFilter)
let anAsset = AVAsset(url: videoUrl)
let tracks = anAsset.tracks(withMediaType: AVMediaTypeVideo)
if(tracks.count>0){
let videoAssetTrack = anAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
var naturalSize = CGSize()
naturalSize = videoAssetTrack.naturalSize //Fetching naturalSize of video
var videoWidth:CGFloat!
var videoHeight:CGFloat!
if isPortrait {
videoWidth = 1080
videoHeight = 1920
}else {
videoWidth = 1920
videoHeight = 1080
}
//New path of video where movie created after filter apply
let pathToMovie = NSTemporaryDirectory().appending("\(String(NSDate().timeIntervalSince1970)).mov")
print(pathToMovie)
let filemgr = FileManager.default
do {
if filemgr.fileExists(atPath: pathToMovie) {
try filemgr.removeItem(atPath: pathToMovie)
} else {
print("\(pathToMovie) not found on applyEffect()")
}
} catch _ {
print("FAIL REMOVE \(pathToMovie) on applyEffect()")
}
videoDetail.DocumentLocalAssetsPath = URL(fileURLWithPath:pathToMovie)
unlink(pathToMovie)
//videoDetail["mediaUrl"] = pathToMovie as AnyObject
updatedVideoDetail.append(videoDetail)
let movieWriter = GPUImageMovieWriter(movieURL: URL(fileURLWithPath:pathToMovie), size: CGSize(width: videoWidth, height: videoHeight))
let input = brightnessFilter as GPUImageOutput
input.addTarget(movieWriter)
movieWriter?.shouldPassthroughAudio = true
let orientation = orientationForAsset(anAsset)
let gpuOrientation = imageRotationMode(forUIInterfaceOrientation: orientation)
movieWriter?.setInputRotation(gpuOrientation!, at: 0)
movieWriter?.enableSynchronizationCallbacks()
//Add Audio encoding target if audio available
if anAsset.tracks(withMediaType: AVMediaTypeAudio).count > 0 {
movie?.audioEncodingTarget = movieWriter
}
else
{
movie?.audioEncodingTarget = nil
}
print(movieWriter?.assetWriter.status.rawValue)
if movieWriter?.assetWriter.status != AVAssetWriterStatus.writing{
movieWriter?.startRecording()
movie?.startProcessing()
}
movieWriter?.completionBlock = {
print("complete video editing")
DispatchQueue.main.async {
input.removeTarget(movieWriter)
movieWriter?.finishRecording()
imageDataGroup?.leave()
}
}
}
else{
imageDataGroup?.leave()
}
}
}
Getting below crash
**** Terminating app due to uncaught exception 'NSInternalInconsistencyException', reason: '*** -[AVAssetWriter startWriting] Cannot call method when status is 3'*

Modifing metadata from existing phAsset seems not working

In my App I want to make it possible, that the user sets an StarRating from 0 to 5 for any Image he has in his PhotoLibrary. My research shows, that there are a couple of ways to get this done:
Save the exif metadata using the new PHPhotoLibrary
Swift: Custom camera save modified metadata with image
Writing a Photo with Metadata using Photokit
Most of these Answers were creating a new Photo. My snippet now looks like this:
let options = PHContentEditingInputRequestOptions()
options.isNetworkAccessAllowed = true
self.requestContentEditingInput(with: options, completionHandler: {
(contentEditingInput, _) -> Void in
if contentEditingInput != nil {
if let url = contentEditingInput!.fullSizeImageURL {
if let nsurl = url as? NSURL {
if let imageSource = CGImageSourceCreateWithURL(nsurl, nil) {
var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as Dictionary?
if imageProperties != nil {
imageProperties![kCGImagePropertyIPTCStarRating] = rating as AnyObject
let imageData = NSMutableData(contentsOf: url)
let image = UIImage(contentsOfFile: url.path)
let destination = CGImageDestinationCreateWithData(imageData!, CGImageSourceGetType(imageSource)!, 1, nil)
CGImageDestinationAddImage(destination!, image!.cgImage!, imageProperties! as CFDictionary)
var contentEditingOutput : PHContentEditingOutput? = nil
if CGImageDestinationFinalize(destination!) {
let archievedData = NSKeyedArchiver.archivedData(withRootObject: rating)
let identifier = "com.example.starrating"
let adjustmentData = PHAdjustmentData(formatIdentifier: identifier, formatVersion: "1.0", data: archievedData)
contentEditingOutput = PHContentEditingOutput(contentEditingInput: contentEditingInput!)
contentEditingOutput!.adjustmentData = adjustmentData
if imageData!.write(to: contentEditingOutput!.renderedContentURL, atomically: true) {
PHPhotoLibrary.shared().performChanges({
let request = PHAssetChangeRequest(for: self)
request.contentEditingOutput = contentEditingOutput
}, completionHandler: {
success, error in
if success && error == nil {
completion(true)
} else {
completion(false)
}
})
}
} else {
completion(false)
}
}
}
}
}
}
})
Now when I want to read the metadata from the PHAsset I request the ContentEditingInput again and do the following:
if let url = contentEditingInput!.fullSizeImageURL {
if let nsurl = url as? NSURL {
if let imageSource = CGImageSourceCreateWithURL(nsurl, nil) {
if let imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as Dictionary? {
if let starRating = imageProperties[kCGImagePropertyIPTCStarRating] as? Int {
rating = starRating
}
}
}
}
}
But I never get my rating because it says that the value of imageProperties[kCGImagePropertyIPTCStarRating] is nil.
I also tried the examples from the Answers I posted above, but I always get the same result.
I hope anybody knows, what I can do to change the Metadata.
Also, how can I change the Metadata from an PHAsset with the MediaType .video? I tried to achieve that through the AVAssetWriter and AVExportSession Objects, but in both cases it does not work. Here what I tried for Videos:
var exportSession = AVAssetExportSession(asset: asset!, presetName: AVAssetExportPresetPassthrough)
exportSession!.outputURL = outputURL
exportSession!.outputFileType = AVFileTypeQuickTimeMovie
exportSession!.timeRange = CMTimeRange(start: start, duration: duration)
var modifiedMetadata = asset!.metadata
let metadataItem = AVMutableMetadataItem()
metadataItem.keySpace = AVMetadataKeySpaceQuickTimeMetadata
metadataItem.key = AVMetadataQuickTimeMetadataKeyRatingUser as NSCopying & NSObjectProtocol
metadataItem.value = rating as NSCopying & NSObjectProtocol
modifiedMetadata.append(metadataItem)
exportSession!.metadata = modifiedMetadata
exportSession!.exportAsynchronously(completionHandler: {
let status = exportSession?.status
let success = status == AVAssetExportSessionStatus.completed
if success {
do {
let sourceURL = urlAsset.url
let manager = FileManager.default
_ = try manager.removeItem(at: sourceURL)
_ = try manager.moveItem(at: outputURL, to: sourceURL)
} catch {
LogError("\(error)")
completion(false)
}
} else {
LogError("\(exportSession!.error!)")
completion(false)
}
})
Sorry this isn't a full answer but it covers one part of your question. I noticed you are placing the StarRating in the wrong place. You need to place it in a IPTC dictionary. Also the properties data is stored as strings. Given you have the imageProperties you can add the star rating as follows and read it back using the following two functions
func setIPTCStarRating(imageProperties : NSMutableDictionary, rating : Int) {
if let iptc = imageProperties[kCGImagePropertyIPTCDictionary] as? NSMutableDictionary {
iptc[kCGImagePropertyIPTCStarRating] = String(rating)
} else {
let iptc = NSMutableDictionary()
iptc[kCGImagePropertyIPTCStarRating] = String(rating)
imageProperties[kCGImagePropertyIPTCDictionary] = iptc
}
}
func getIPTCStarRating(imageProperties : NSMutableDictionary) -> Int? {
if let iptc = imageProperties[kCGImagePropertyIPTCDictionary] as? NSDictionary {
if let starRating = iptc[kCGImagePropertyIPTCStarRating] as? String {
return Int(starRating)
}
}
return nil
}
As the imageProperties you get from the image are not mutable you need to create a mutable copy of these properties first before you can call the functions above. When you create your image to save use the mutable properties in your call to CGImageDestinationAddImage()
if let mutableProperties = imageProperties.mutableCopy() as? NSMutableDictionary {
setIPTCStarRating(imageProperties:mutableProperties, rating:rating)
}
One other point you are creating an unnecessary UIImage. If you use CGImageDestinationAddImageFromSource() instead of CGImageDestinationAddImage() you can use the imageSource you created earlier instead of loading the image data into a UIImage.

How to speed up performance of a loop in Swift?

I'm just wondering if there is any way to boost speed of my loop, or suggestions for best practice, cause I feel it looks so bad.
Here is the code:
for (index, _) in filteredArray.enumerate() {
if index == 0 || index % 4 == 0 {
let mediaItem = Item()
mediaItem.id = filteredArray[index + 3]
let photoURL = NSURL(string: filteredArray[index + 1])
guard let url = photoURL else { return }
let data = NSData(contentsOfURL: url)
let finishImage = UIImage(data: data!)
mediaItem.Photo = finishImage
mediaItem.orderCount = filteredArray[index + 2]
mediaItem.UUId = filteredArray[index]
self.dataSourceItems.insert(mediaItem)
}
}
Try to use dispatch_apply. Something like that:
let iterationsCount = filteredArray.count / 4
let queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
dispatch_apply(iterationsCount, queue) { i in
let index = i * 4
let mediaItem = Item()
mediaItem.id = filteredArray[index + 3]
let photoURL = NSURL(string: filteredArray[index + 1])
guard let url = photoURL else { return }
let data = NSData(contentsOfURL: url)
let finishImage = UIImage(data: data!)
mediaItem.Photo = finishImage
mediaItem.orderCount = filteredArray[index + 2]
mediaItem.UUId = filteredArray[index]
self.dataSourceItems.insert(mediaItem)
}
Notice that, depending on your situation, you may need to 1. use self inside closure, if you accessing properties; 2. add some locks if you write to shared memory.

Get Imagename from Url in swift

I have this url https://storage.googleapis.com/user_avatars/63/img_-qLgH80SBqNhMRYbDQeccg.jpg
I need only qLgH80SBqNhMRYbDQeccg image name from this link In ui Image
You can use NSURL to safely isolate the filename then use substring to get the part you want.
let s = "https://storage.googleapis.com/user_avatars/63/img_-qLgH80SBqNhMRYbDQeccg.jpg"
Swift 2
if let url = NSURL(string: s),
withoutExt = url.URLByDeletingPathExtension,
name = withoutExt.lastPathComponent {
let result = name.substringFromIndex(name.startIndex.advancedBy(5))
print(result)
}
Swift 3
if let url = URL(string: s),
withoutExt = try? url.deletingPathExtension(),
name = withoutExt.lastPathComponent {
let result = name.substring(from: name.index(name.startIndex, offsetBy: 5))
print(result)
}
Swift 4
if let url = URL(string: s) {
let withoutExt = url.deletingPathExtension()
let name = withoutExt.lastPathComponent
let result = name.substring(from: name.index(name.startIndex, offsetBy: 5))
print(result)
}
Prints:
qLgH80SBqNhMRYbDQeccg
What about something that uses NSURLComponents to break up the URL:
func parseURLForFileName(url:String) ->String?
{
let components = NSURLComponents(string: url)
if let path:NSString = components?.path
{
let filename = path.lastPathComponent
if let range = filename.rangeOfString("_-")
{
return filename.substringFromIndex(range.endIndex)
}
}
return nil
}
You would then call it like this:
let name = parseURLForFileName("https://storage.googleapis.com/user_avatars/63/img_-qLgH80SBqNhMRYbDQeccg.jpg")
print(name)

Resources