How can we reusable a Function With Different ViewController? - ios

#objc func contextDidSave(_ notification: Notification) {
DispatchQueue.main.async { [weak self] in
guard let me = self else { return }
let fileCount = CoreDataService.instance.getUploadedFilesCount(jobId: nil)
if me.totalFiles == 0 || (fileCount > me.totalFiles) {
me.totalFiles = fileCount
}
// For increase the progress lebel we need the range between 0.1(min) to 1.0(max)
// So converting all values to float so that we can get decimal value within the range
let progress = (Float(me.totalFiles) - Float(fileCount)) / Float(me.totalFiles)
me.progressBarView.setProgress(progress, animated: true)
if fileCount == 0 {
me.progressView.isHidden = true
} else {
me.uploadFiles.text = "\("Uploading: ".localized() + (fileCount.description) + " Files left".localized())"
me.progressView.isHidden = false
}
}
}
ProgressView is UIView
UploadFile is UILabel
This function is reusable in Two Different ViewController

Related

Screen freezing for large task execution

I am trying to use completion block but still screen is freezing may I know what is wrong here? Or how can I improve?
func generateRandom(text: String, frame: CGRect,
stackFont: [StackTextFont], completion: #escaping (StackableTextStyle) -> Void) {
var style = generateStyle(text: text, frame: frame, stackFont: stackFont)
var maxCounter = 0
while style == nil || !isValidStyleForFrameSize(style!.0, frame: frame, stackFonts: style!.1.fonts) {
style = generateStyle(text: text, frame: frame, stackFont: stackFont)
maxCounter = maxCounter + 1
if maxCounter > loopBreakerAt {
break
}
print(maxCounter)
}
completion(style ?? (text, StackTextGroup(fonts: stackFont)))
}
Note: Assume that loop might execute more than 100000 time to get the validate style
Calling the function
var i = 10
var counter = 1
let group = DispatchGroup()
while i > 0 {
group.enter()
QuoteNeuralParser.shared.generateRandom(text: text, frame: frame, stackFont: stackFont) { (style) in
/// Avoid duplicate styles
if !self.data.contains(where: {$0.1.1.fonts == style.1.fonts}) {
if let img = self.getIMStackLayer(frame: frame, style: style).toUIImage() {
self.data.append((img, style))
i = i - 1
}
}
/// Loop breaker for infinite attempt
counter += 1
if counter > 30 { i = -1 }
group.leave()
}
}
group.notify(queue: .main) { [weak self] in
guard let self = self else {
return
}
}

Why does UILabel update kill animation?

I have a super simple UIView animation where the origin y value fails to animate to 88 on the first try if i change the input text set in the UILabels.
The animation runs fine on the 2nd attempt. It feels like an initialization problem. Running layoutSubViews and updateConstraints is not helping. Thanks for any tips on this.
func previewDisplay(notifView: UIView, hdrView: UIView) {
populateText()
self.notifView?.frame.origin.y = 0
self.notifView?.frame.size.height = 33
self.notifView?.layoutSubviews()
self.notifView?.updateConstraints()
self.notifView = notifView
self.closeBtn.isHidden = true
self.notifBodyLabel.isHidden = true
self.closeBtn.alpha = 0
self.notifBodyLabel.alpha = 0
UIView.animate(withDuration: 1.0, animations: {
self.notifView?.frame.origin.y = 88
}, completion: nil)
}
func populateText() {
if let info = notification?.userInfo as? Dictionary<String,String> {
// Check if value present before using it
if let t = info["title"] {
self.notifTitleMessageLabel.text = t
} else {
self.notifTitleMessageLabel.text = ""
}
if let b = info["body"] {
self.notifBodyLabel.text = b
} else {
self.notifBodyLabel.text = ""
}
}
}

Swift 4 - How to update progressView in loop

I have created this progressView:
progress = UIProgressView(progressViewStyle: .default)
progress.center = view.center
progress.setProgress(0.5, animated: true)
view.addSubview(progress)
In my viewDidLoad method, I call getLandGradingImages and inside that I do a loop, inside that I call getLandGradingImage for each result in getLandGradingImages, what would be the best way to update the progressView I created during this entire process:
getLandGradingImages(jobNo: jobNo) { result in
//Define axis variables
var x = 25
var y = 80
//Define image counter variable
var counterImages = 0
var actualImageCounter = 0
//For each Lot Image
for item in result
{
self.getLandGradingImage(image: item["imageBytes"] as! String) { data in
//Create Image from Data
let image = UIImage(data: data)
//Add Image to Image View
let imageView = UIImageView(image: image!)
//Set Image View Frame
imageView.frame = CGRect(x: x, y: y, width: 100, height: 100)
//Define UITapGestureRecognizer
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.imageTapped(tapGestureRecognizer:)))
//Enable user interaction
imageView.isUserInteractionEnabled = true
//Assign tap gesture to image view
imageView.addGestureRecognizer(tapGestureRecognizer)
//Add Image to view
self.view.addSubview(imageView)
//Increase x axis
x = x + 115
//Increase image counter
counterImages = counterImages + 1
actualImageCounter = actualImageCounter + 1
//If image counter is equal to 3, create new line.
if(counterImages == 3)
{
//Increase y axis
y = y + 115
//Reset x axis
x = 25
//Reset image counter
counterImages = 0
}
if(actualImageCounter == result.count)
{
//self.stopIndicator()
}
}
}
}
Here are those two methods I am calling in this process:
func getLandGradingImages(jobNo:String, completionHandler:#escaping (_ result:Array<Dictionary<String, Any>>) -> Void) {
//Define array for returning data
var returnedResults = Array<Dictionary<String, Any>>()
//Call API
WebService().GetLandGradingImages(jobNo: jobNo)
{
(result: Array<Dictionary<String, Any>>) in
DispatchQueue.main.async {
//Return our results
returnedResults = result
completionHandler(returnedResults)
}
}
}
func getLandGradingImage(image:String, completionHandler:#escaping (_ result:Data) -> Void) {
//Define array for returning data
var returnedResults = Data()
//Call API
WebService().GetLandGradingImage(image: image)
{
(result: Data) in
DispatchQueue.main.async {
//Return our results
returnedResults = result
completionHandler(returnedResults)
}
}
}
For these calls, I am using Alamofire:
func GetLandGradingImage(image: String, completion: #escaping (_ result: Data) -> Void)
{
let imagePath : String = image
let url = URL(string: webserviceImages + imagePath)!
Alamofire.request(url).authenticate(user: self.appDelegate.username!, password: self.appDelegate.password!).responseData { response in
let noData = Data()
if(response.error == nil)
{
if let data = response.data {
completion(data)
}
}
else
{
completion(noData)
}
}
}
I have tried this:
var counter:Int = 0 {
didSet {
let fractionalProgress = Float(counter) / 100.0
let animated = counter != 0
progress.setProgress(fractionalProgress, animated: animated)
}
}
and inside the for loop:
DispatchQueue.global(qos: .background).async {
sleep(1)
DispatchQueue.main.async {
self.counter = self.counter + 1
}
}
Still nothing, my the progress view appears but does not get updated.

Swift AVFoundation Reading and Analyzing a file in real time

I am having trouble reading a file from the operating system using AVFoundation and performing rendering and analysis in real time.
I have a pipe line of code that I know runs in real time does analysis on a video file. This pipe line of code works in realtime via the camera session. However this is not the case when I read the file like so.Can anyone let me know where I might be going wrong ?
protocol VideoStreamTestBenchDelegate {
func frameBuffer(buffer:CMSampleBuffer)
}
class VideoStreamTestBench {
let asset:AVAsset
let assetReader:AVAssetReader
let playAtActualSpeed:Bool
let loop:Bool
var videoEncodingIsFinished = false
var previousFrameTime = kCMTimeZero
var previousActualFrameTime = CFAbsoluteTimeGetCurrent()
var numberOfFramesCaptured = 0
var totalFrameTimeDuringCapture:Double = 0.0
var delegate:VideoStreamTestBenchDelegate?
public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws {
let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)]
let inputAsset = AVURLAsset(url:url, options:inputOptions)
try self.init(asset:inputAsset, playAtActualSpeed:playAtActualSpeed, loop:loop)
}
public init(asset:AVAsset, playAtActualSpeed:Bool = false, loop:Bool = false) throws {
self.asset = asset
self.playAtActualSpeed = playAtActualSpeed
self.loop = loop
assetReader = try AVAssetReader(asset:self.asset)
let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_32BGRA))]
let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings)
readerVideoTrackOutput.alwaysCopiesSampleData = false
assetReader.add(readerVideoTrackOutput)
// TODO: Audio here
}
public func start() {
asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{
DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default).async(execute: {
guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return }
guard self.assetReader.startReading() else {
print("Couldn't start reading")
return
}
var readerVideoTrackOutput:AVAssetReaderOutput? = nil;
for output in self.assetReader.outputs {
if(output.mediaType == AVMediaTypeVideo) {
readerVideoTrackOutput = output;
}
}
while (self.assetReader.status == .reading) {
self.readNextVideoFrame(from:readerVideoTrackOutput!)
}
if (self.assetReader.status == .completed) {
self.assetReader.cancelReading()
if (self.loop) {
// TODO: Restart movie processing
} else {
self.endProcessing()
}
}
})
})
}
public func cancel() {
assetReader.cancelReading()
self.endProcessing()
}
func endProcessing() {
}
func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) {
if ((assetReader.status == .reading) && !videoEncodingIsFinished) {
if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() {
if (playAtActualSpeed) {
// Do this outside of the video processing queue to not slow that down while waiting
let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
let differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime)
let currentActualTime = CFAbsoluteTimeGetCurrent()
let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame)
let actualTimeDifference = currentActualTime - previousActualFrameTime
if (frameTimeDifference > actualTimeDifference) {
usleep(UInt32(round(1000000.0 * (frameTimeDifference - actualTimeDifference))))
}
previousFrameTime = currentSampleTime
previousActualFrameTime = CFAbsoluteTimeGetCurrent()
}
DispatchQueue.global().sync {
self.delegate?.frameBuffer(buffer: sampleBuffer)
CMSampleBufferInvalidate(sampleBuffer)
}
} else {
if (!loop) {
videoEncodingIsFinished = true
if (videoEncodingIsFinished) {
self.endProcessing()
}
}
}
}
}
}
// This is the delegate
public func bufferReader(_ reader: BufferReader!, didGetNextVideoSample bufferRef: CMSampleBuffer!) {
// let posePoints:[Any] = self.visageBackend.posePoints(with: bufferRef)
// var regions:[Any]? = nil
//
// if (posePoints.count > 0) {
// regions = (self.luaBackend?.regions(forPosePoints: posePoints))!
// }
//
// // extract
// if(regions != nil) {
// let rois:[Any] = (self.luaBackend?.extractedRegionInfos(for: bufferRef, andRegions: regions))!
// print(rois)
// }
//
// self.dLibRenderEngine.render(with: bufferRef, andPoints: posePoints, andRegions: regions)
self.backgroundRenderQueue.async { [weak self] in
if self?.previewLayer?.isReadyForMoreMediaData == true {
self?.previewLayer?.enqueue(bufferRef!)
}
}
}

How to parse string to NSTimeInterval

How to parse string value like 12:02:21.3213 to NSTimeInterval? NSDateComponentsFormatter, available since iOS8, supports only formatting, not parsing.
Here is how you can do it in Swift,
It works for values like
2:12:12,
02:01:23.123213
Swift 5 (by #Youstanzr):
extension String {
func convertToTimeInterval() -> TimeInterval {
guard self != "" else {
return 0
}
var interval:Double = 0
let parts = self.components(separatedBy: ":")
for (index, part) in parts.reversed().enumerated() {
interval += (Double(part) ?? 0) * pow(Double(60), Double(index))
}
return interval
}
}
Swift 3 (by #Torre Lasley)
func parseDuration(_ timeString:String) -> TimeInterval {
guard !timeString.isEmpty else {
return 0
}
var interval:Double = 0
let parts = timeString.components(separatedBy: ":")
for (index, part) in parts.reversed().enumerated() {
interval += (Double(part) ?? 0) * pow(Double(60), Double(index))
}
return interval
}
Swift 2
func parseDuration(timeString:String) -> NSTimeInterval {
guard !timeString.isEmpty else {
return 0
}
var interval:Double = 0
let parts = timeString.componentsSeparatedByString(":")
for (index, part) in parts.reverse().enumerate() {
interval += (Double(part) ?? 0) * pow(Double(60), Double(index))
}
return interval
}
The solution provided by Bartosz Hernas worked for me, thank you!
For convenience, here it is for Swift 3:
func parseDuration(_ timeString:String) -> TimeInterval {
guard !timeString.isEmpty else {
return 0
}
var interval:Double = 0
let parts = timeString.components(separatedBy: ":")
for (index, part) in parts.reversed().enumerated() {
interval += (Double(part) ?? 0) * pow(Double(60), Double(index))
}
return interval
}
Here is the Swift 5 version that I've made of #Bartosz answer
extension String {
func convertToTimeInterval() -> TimeInterval {
guard self != "" else {
return 0
}
var interval:Double = 0
let parts = self.components(separatedBy: ":")
for (index, part) in parts.reversed().enumerated() {
interval += (Double(part) ?? 0) * pow(Double(60), Double(index))
}
return interval
}
}

Resources