NSOperationQueue finishes all tasks swift 3 - ios

I am trying to achieve NSOperationQueue finishing all tasks operation in swift 3. I create a below demo code and it is working according to my expectation.
func downloadTopStroiesDetails(){
let operationQueue: OperationQueue = OperationQueue()
let operation1 = BlockOperation() {
print("BlockOperation1")
for id in 0...5{
operationQueue.addOperation(downloadArticle(index: id))
}
let operation2 = BlockOperation() {
print("BlockOperation2")
}
operationQueue.addOperation(operation2)
}
operationQueue.addOperation(operation1)
}
func downloadArticle(index:Int) -> Operation {
let operation: Operation = BlockOperation { () -> Void in
print(index)
}
return operation
}
downloadTopStroiesDetails() // start calling
Output :
BlockOperation1
0
1
2
3
4
5
BlockOperation2
But when I call a Web API with Alamofire in downloadArticle method output is different.
func downloadArticle(index:Int) -> Operation {
let operation = BlockOperation(block: {
RequestManager.networkManager.fetchFromNetworkwithID(articleid: index) { (response:Any ,sucess:Bool) in
if sucess{
print(index)
//let art = article.init(json:(response as? json)!)!
// self.saveDataIntoCoreData(data: art)
//self.all_TopArticle.append(art)
}
};
})
return operation
}
Now output :
BlockOperation1
BlockOperation2
0
1
2
3
4
5
What i am doing wrong here ?

Your downloadArticle method is creating a block operation that completes immediately because it in turn performs an asynchronous operation.
You need to prevent the block from reaching the end until the async fetch completes. Using a semaphore would be one solution.
func downloadArticle(index:Int) -> Operation {
let operation = BlockOperation(block: {
let semaphore = DispatchSemaphore(value: 0)
RequestManager.networkManager.fetchFromNetworkwithID(articleid: index) { (response:Any ,sucess:Bool) in
if sucess{
print(index)
//let art = article.init(json:(response as? json)!)!
// self.saveDataIntoCoreData(data: art)
//self.all_TopArticle.append(art)
}
semaphore.signal()
};
semaphore.wait()
})
return operation
}
The use of this semaphore ensure the operation doesn't actually complete until the network fetch is also complete.
You might also want to make your operation queue serial instead of concurrent to ensure you only allow one operation to run at a time. If this is what you want, then set the operation queue's maxConcurrentOperationCount to 1.

Related

How to schedule a NSOperation to the head of queue and let all other operations waiting for it? [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 1 year ago.
Improve this question
I have a NSOperationQueue that is concurrent. For a specific NSOperation, if it fails, I want to immediately retry this operation at the highest priority, and suspend all other operations until it succeeded.
I can think of scheduling a operation with higher priority, but how can I make all other operations waiting for this one in an efficient way? Changing all remaining operations dependencies seem too time consuming.
There are a few approaches:
One simple approach, which cuts the Gordian knot, is to just make the task that may require multiple attempts not finish until the retries are done (i.e., incorporate the retry login within the operation, itself). Then schedule the first task with a barrier, schedule the subsequent tasks, and that way none of the subsequent tasks will be able to run until the first one finishes (including all of its retries).
Alternatively, if you want to make the retry tasks separate operations, but do not want to use dependencies, you could add the subsequent tasks to a separate, suspended, queue:
let taskQueue = OperationQueue()
taskQueue.maxConcurrentOperationCount = 4
taskQueue.isSuspended = true
for i in 0 ..< 20 {
taskQueue.addOperation {
...
}
}
Then, add the task that may require retries to another queue (i.e., obviously, one that is not suspended):
func attempt(_ count: Int = 0) {
retryQueue.addOperation {
...
if isSuccessful {
taskQueue.isSuspended = false
} else {
attempt(count + 1)
}
...
}
}
When you do this, the first operation will un-suspend the task queue when the necessary criteria have been met:
For the sake of completeness, the other alternative is to subclass Operation and make the isReady logic not only return its super implementation, but also observe some property. E.g.
class WaitingOperation: Operation {
#objc dynamic var canStart = false
var object: NSObject
var observer: NSKeyValueObservation?
let taskId: Int
override var isReady: Bool { super.isReady && canStart }
init<T>(object: T, canStartTasksKeyPath keyPath: KeyPath<T, Bool>, taskId: Int) where T: NSObject {
self.object = object
self.taskId = taskId
super.init()
observer = object.observe(keyPath, options: [.initial, .new]) { [weak self] _, changes in
if let newValue = changes.newValue {
self?.canStart = newValue
}
}
}
override class func keyPathsForValuesAffectingValue(forKey key: String) -> Set<String> {
var set = super.keyPathsForValuesAffectingValue(forKey: key)
if key == #keyPath(isReady) {
set.insert(#keyPath(canStart))
}
return set
}
override func main() {
...
}
}
and then
#objc dynamic var canStartTasks = false
func begin() {
let queue = OperationQueue()
queue.maxConcurrentOperationCount = 4
for i in 0 ..< 20 {
queue.addOperation(WaitingOperation(object: self, canStartTasksKeyPath: \.canStartTasks, taskId: i))
}
let start = CACurrentMediaTime()
attempt()
func attempt(_ count: Int = 0) {
queue.addOperation { [self] in
...
if notSuccessful {
attempt(count + 1)
} else {
canStartTasks = true
}
...
}
}
}

Wait for DispatchQueue [duplicate]

How could I make my code wait until the task in DispatchQueue finishes? Does it need any CompletionHandler or something?
func myFunction() {
var a: Int?
DispatchQueue.main.async {
var b: Int = 3
a = b
}
// wait until the task finishes, then print
print(a) // - this will contain nil, of course, because it
// will execute before the code above
}
I'm using Xcode 8.2 and writing in Swift 3.
If you need to hide the asynchronous nature of myFunction from the caller, use DispatchGroups to achieve this. Otherwise, use a completion block. Find samples for both below.
DispatchGroup Sample
You can either get notified when the group's enter() and leave() calls are balanced:
func myFunction() {
var a = 0
let group = DispatchGroup()
group.enter()
DispatchQueue.main.async {
a = 1
group.leave()
}
// does not wait. But the code in notify() is executed
// after enter() and leave() calls are balanced
group.notify(queue: .main) {
print(a)
}
}
or you can wait:
func myFunction() {
var a = 0
let group = DispatchGroup()
group.enter()
// avoid deadlocks by not using .main queue here
DispatchQueue.global(qos: .default).async {
a = 1
group.leave()
}
// wait ...
group.wait()
print(a) // you could also `return a` here
}
Note: group.wait() blocks the current queue (probably the main queue in your case), so you have to dispatch.async on another queue (like in the above sample code) to avoid a deadlock.
Completion Block Sample
func myFunction(completion: #escaping (Int)->()) {
var a = 0
DispatchQueue.main.async {
let b: Int = 1
a = b
completion(a) // call completion after you have the result
}
}
// on caller side:
myFunction { result in
print("result: \(result)")
}
In Swift 3, there is no need for completion handler when DispatchQueue finishes one task.
Furthermore you can achieve your goal in different ways
One way is this:
var a: Int?
let queue = DispatchQueue(label: "com.app.queue")
queue.sync {
for i in 0..<10 {
print("Ⓜ️" , i)
a = i
}
}
print("After Queue \(a)")
It will wait until the loop finishes but in this case your main thread will block.
You can also do the same thing like this:
let myGroup = DispatchGroup()
myGroup.enter()
//// Do your task
myGroup.leave() //// When your task completes
myGroup.notify(queue: DispatchQueue.main) {
////// do your remaining work
}
One last thing: If you want to use completionHandler when your task completes using DispatchQueue, you can use DispatchWorkItem.
Here is an example how to use DispatchWorkItem:
let workItem = DispatchWorkItem {
// Do something
}
let queue = DispatchQueue.global()
queue.async {
workItem.perform()
}
workItem.notify(queue: DispatchQueue.main) {
// Here you can notify you Main thread
}
Swift 5 version of the solution
func myCriticalFunction() {
var value1: String?
var value2: String?
let group = DispatchGroup()
group.enter()
//async operation 1
DispatchQueue.global(qos: .default).async {
// Network calls or some other async task
value1 = //out of async task
group.leave()
}
group.enter()
//async operation 2
DispatchQueue.global(qos: .default).async {
// Network calls or some other async task
value2 = //out of async task
group.leave()
}
group.wait()
print("Value1 \(value1) , Value2 \(value2)")
}
Use dispatch group
dispatchGroup.enter()
FirstOperation(completion: { _ in
dispatchGroup.leave()
})
dispatchGroup.enter()
SecondOperation(completion: { _ in
dispatchGroup.leave()
})
dispatchGroup.wait() // Waits here on this thread until the two operations complete executing.
In Swift 5.5+ you can take advantage of Swift Concurrency which allows to return a value from a closure dispatched to the main thread
func myFunction() async {
var a : Int?
a = await MainActor.run {
let b = 3
return b
}
print(a)
}
Task {
await myFunction()
}
Swift 4
You can use Async Function for these situations. When you use DispatchGroup(),Sometimes deadlock may be occures.
var a: Int?
#objc func myFunction(completion:#escaping (Bool) -> () ) {
DispatchQueue.main.async {
let b: Int = 3
a = b
completion(true)
}
}
override func viewDidLoad() {
super.viewDidLoad()
myFunction { (status) in
if status {
print(self.a!)
}
}
}
Somehow the dispatchGroup enter() and leave() commands above didn't work for my case.
Using sleep(5) in a while loop on the background thread worked for me though. Leaving here in case it helps someone else and it didn't interfere with my other threads.

Async tasks execution with dependency

Situation:
I have 2 tasks says T1 & T2 in async background mode. T2 depends on T1 and have successBlock which is executes after the completion of the both tasks T1 & T2.
Quick diagram is below for better understanding.
Edit:
To better understanding the tasks, you can assume T1 and T2 are the API calls which always be going to execute in async mode. I need some output data from T1 to hit T2 API. After the completion of the both tasks I need to update UI.
To accomplish this scenario, I have added my first async work in T1 and second work in T2 and dependency of T2 to T1 and successblock have dependency on both tasks.
Code Work
My Tasks
class TaskManager {
static let shared = TaskManager()
func task1Call(complete: #escaping ()->()) {
DispatchQueue.global(qos: .background).async {
for i in 0...10 {
print("~~> Task 1 Executing ..", i)
sleep(1)
}
complete()
}
}
func task2Call(complete: #escaping ()->()) {
DispatchQueue.global(qos: .background).async {
for i in 0...10 {
print("==> Task 2 Executing ..", i)
sleep(1)
}
complete()
}
}
}
Execute Tasks
class Execution {
// Managing tasks with OperationQueue
func executeTaskWithOperation() {
let t1 = BlockOperation {
TaskManager.shared.task1Call {
print("Task 1 Completed")
}
}
let t2 = BlockOperation {
TaskManager.shared.task2Call {
print("Task 2 Completed")
}
}
let successBlock = BlockOperation {
print("Tasks Completed")
}
let oper = OperationQueue()
t2.addDependency(t1)
successBlock.addDependency(t2)
successBlock.addDependency(t1)
oper.addOperations([t1, t2, successBlock], waitUntilFinished: true)
}
}
let e = Execution()
e.executeTaskWithOperation()
Issue:
Both tasks are executing parallelly and successBlock executes before the completion of task 1 and task 2.
Console Output:
==> Task 2 Executing .. 0
Tasks Completed
~~> Task 1 Executing .. 0
~~> Task 1 Executing .. 1
==> Task 2 Executing .. 1
==> Task 2 Executing .. 2
~~> Task 1 Executing .. 2
==> Task 2 Executing .. 3
~~> Task 1 Executing .. 3
==> Task 2 Executing .. 4
~~> Task 1 Executing .. 4
==> Task 2 Executing .. 5
~~> Task 1 Executing .. 5
==> Task 2 Executing .. 6
~~> Task 1 Executing .. 6
==> Task 2 Executing .. 7
~~> Task 1 Executing .. 7
==> Task 2 Executing .. 8
~~> Task 1 Executing .. 8
==> Task 2 Executing .. 9
~~> Task 1 Executing .. 9
~~> Task 1 Executing .. 10
==> Task 2 Executing .. 10
Task 1 Completed
Task 2 Completed
I unable to figure out what wrong I am doing, even same code work fines when I use sync mode instead of async.
Your t1 and t2 are block operations that spawn background threads (which each do some printing and then exit, but it doesn't matter). Once they finish spawning, they're considered completed. successBlock depends on the two background threads being spawned, and then it's done. You want the work in the BlockOperation itself:
class Execution {
// Managing tasks with OperationQueue
func executeTaskWithOperation() {
let t1 = BlockOperation {
for i in 0...10 {
print("~~> Task 1 Executing ..", i)
sleep(1)
}
print("Task 1 completed")
}
let t2 = BlockOperation {
for i in 0...10 {
print("==> Task 2 Executing ..", i)
sleep(1)
}
print("Task 2 Completed")
}
let successBlock = BlockOperation {
print("Tasks Completed")
}
let oper = OperationQueue()
t2.addDependency(t1) // Remove this to see concurrent exec of t1 and t2
successBlock.addDependency(t2)
successBlock.addDependency(t1)
oper.addOperations([t1, t2, successBlock], waitUntilFinished: true)
}
}
let e = Execution()
e.executeTaskWithOperation()
Edit: For execution on a background thread, override Operation.
class AsyncOp: Operation {
let task: String
var running = false
var done = false
init(_ task: String) {
self.task = task
}
override var isAsynchronous: Bool { true }
override var isExecuting: Bool {
get { running }
set {
willChangeValue(forKey: "isExecuting")
running = newValue
didChangeValue(forKey: "isExecuting")
}
}
override var isFinished: Bool {
get { done }
set {
willChangeValue(forKey: "isFinished")
done = newValue
didChangeValue(forKey: "isFinished")
}
}
override func main() {
DispatchQueue.global(qos: .background).async {
self.isExecuting = true
for i in 0...10 {
print("\(self.task) Executing ..", i)
sleep(1)
}
print("Done")
self.isExecuting = false
self.isFinished = true
}
}
override func start() {
print("\(task) starting")
main()
}
}
class Execution {
// Managing tasks with OperationQueue
func executeTaskWithOperation() {
let t1 = AsyncOp("task1")
let t2 = AsyncOp("task2")
let successBlock = BlockOperation {
print("Tasks Completed")
}
let oper = OperationQueue()
t2.addDependency(t1)
successBlock.addDependency(t2)
successBlock.addDependency(t1)
oper.addOperations([t1, t2, successBlock], waitUntilFinished: true)
}
}
let e = Execution()
e.executeTaskWithOperation()
After Joshua's comment , I able to conclude the answer.
Execution changed from OperationQueue to DispatchGroup and DispatchSemaphore.
DispatchGroup : It makes sure both task tasks are done and then it calls notify block.
DispatchSemaphore : It holds the async resource with wait command until we wont send the signal command i.e. we are saying to semaphore to hold yourself until the task1 is not completed.
Sample code of tasks.
class Execution {
// Managing tasks with DispatchGroup
func executeTaskWithGroup() {
let groups = DispatchGroup()
let semaphore = DispatchSemaphore(value: 1)
groups.enter()
semaphore.wait()
TaskManager.shared.task1Call {
groups.leave()
semaphore.signal()
}
groups.enter()
TaskManager.shared.task2Call {
groups.leave()
}
groups.notify(queue: DispatchQueue.global(qos: .background)) {
print("Tasks Completed")
}
}
}
To execute command all we need to do is.
let e = Execution()
e.executeTaskWithGroup()
But above code is executed in the main thread and block the UI. To prevent this you need to call above piece of code in background queue like below.
let queue = DispatchQueue.init(label: "MyQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
queue.async {
let e = Execution()
e.executeTaskWithGroup()
}
Now everything works fine as per my needed.
AddOn
In case, if someone requirement is to call multiple API along with the above scenario then add your tasks in async in the queue.
let queue = DispatchQueue.init(label: "MyQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
queue.async {
let e1 = Execution()
e1.executeTaskWithGroup()
}
queue.async {
let e2 = Execution()
e2.executeTaskWithGroup()
}
Now both e1 and e2 executes parallelly without blocking main thread.
References :
Using Dispatch Group & Semaphore to Group iOS Async Tasks
A Quick Look at Semaphores in Swift
Dexecutor for the rescue here
Disclaimer: I am the owner of Dexecutor
Dexecutor can be used easily for workflow like use case
Here is sample Application

Swift 3 DispatchGroup single success DispatchWorkItem

Here is the deal. I'm attempting to walk the tree. But want to do so concurrently. So each time i walk onto a node i need to concurrently walk all of it's nodes and so on.
But. I do not want to wait for the whole DispatchGroup to finish to get results since it's like a worst case scenario in Big O.
Instead i want to cancel all the other DispatchWorkItems and leave group for them in the successing one. Tried to do so with counting the task which ended. Obviously i'm doing something wrong or misunderstand how to use this.
The code below was written just for purpose of example and to test the idea.
Consider the real world situation is that in the DispatchWorkItem you can call recursively another handle function of a current node of tree.
func handle(completion: #escaping (Int) -> Void) {
var result: Int = 0
var count = 7
let group = DispatchGroup()
let queue = DispatchQueue(label: "q", attributes: .concurrent)
var items = [DispatchWorkItem]()
let item1 = DispatchWorkItem(flags: .inheritQoS) {
for _ in 0...1000 { continue }
count -= 1
group.leave()
print("left 1")
}
let item2 = DispatchWorkItem(flags: .inheritQoS) {
for _ in 0...2000 { continue }
count -= 1
group.leave()
print("left 2")
}
let item3 = DispatchWorkItem(flags: .inheritQoS) {
for _ in 0...6000 { continue }
count -= 1
group.leave()
print("left 3")
}
let item4 = DispatchWorkItem(flags: .inheritQoS) {
for _ in 0...3000 { continue }
result = 42
items.forEach { $0.cancel() }
for _ in 0..<count {
group.leave()
}
print("ok; left 4")
}
let item5 = DispatchWorkItem(flags: .inheritQoS) {
for _ in 0...50000 { continue }
count -= 1
group.leave()
print("left 5")
}
let item6 = DispatchWorkItem(flags: .inheritQoS) {
for _ in 0...6000 { continue }
count -= 1
group.leave()
print("left 6")
}
let item7 = DispatchWorkItem(flags: .inheritQoS) {
for _ in 0...8000 { continue }
count -= 1
group.leave()
print("left 7")
}
items.append(item1)
items.append(item2)
items.append(item3)
items.append(item4)
items.append(item5)
items.append(item6)
items.append(item7)
for item in items {
group.enter()
queue.async(execute: item)
}
group.notify(queue: queue) {
return
}
}
test() {
handle { result in
print(result)
}
}
You can't be reading from and writing to your count variable from multiple threads at once. You need to put a mutex lock on the count. You have an unstable situation trying to access and/or change count from multiple threads. Also, you should design this to not need counting at all.
A couple of thoughts:
If you want to cancel a time consuming task, you need to periodically check isCancelled. See https://stackoverflow.com/a/38372384/1271826.
If you are going to update count or items from multiple threads, you have to synchronize that interaction (e.g. with a lock or dedicated serial queue). Int and Array are not thread-safe, so you'll have to manage that yourself.
Keeping track of count and using DispatchGroup and keeping track of your own collection of DispatchWorkItem references is going to take a little work. Operation queues get your out of all of that. That having been said, if maximum efficiency is required, then perhaps you want to stay within dispatch queues, but it's just a lot more work.

Multiple requests with DispatchQueue.main.async not executing properly in swift 3

I have a JSON through which I get list of boards. Can be accessed by self.jsonGame.boards. Now I have to call all these boards and display the content from it.
But the boards are not consistently called. They’ll only show up sometimes.
func fetchBoard(){
let repo = GameRepository()
let prefs = UserDefaults.standard
if self.jsonGame.boards.count > 0 {
self.sortedBoardArr.reserveCapacity(self.BoardArr.count)
for board in self.jsonGame.boards{
DispatchQueue.main.async
{
repo.GetBoardInfo(gameID: self.jsonGame.Id, boardID: board , completion : {(response , errorCode ) -> Void in
if errorCode == ErrorCode.NoError{
DispatchQueue.main.sync {
self.BoardArr.append(response)
self.sortArr()
self.collectionView.reloadData()
}
}
})
}
}
}
}
func sortArr(){
if self.jsonGame.boards.count == self.BoardArr.count{
for board in self.jsonGame.boards{
for boardarr in self.BoardArr{
if boardarr.id == board{
self.sortedBoardArr.append(boardarr)
}
}
}
}
}
If anyone can help me figure out how to make sure the boards are called with a consistency.
I'm noob at async handling. Sorry for the trouble.
I had a similar issue, where I populated an array with elements coming from different asynchronous network requests and when the requests were running concurrently, the final size of my array depended on the execution of the concurrent tasks.
I managed to solve my problem using a serial queue and Dispatch Groups.
This is how I would change your code:
func fetchBoard(){
let repo = GameRepository()
let prefs = UserDefaults.standard
if self.jsonGame.boards.count > 0 {
self.sortedBoardArr.reserveCapacity(self.BoardArr.count)
let serialQueue = DispatchQueue(label: "serialQueue")
let group = DispatchGroup()
for board in self.jsonGame.boards{
group.enter()
serialQueue.async {
repo.GetBoardInfo(gameID: self.jsonGame.Id, boardID: board , completion : {(response , errorCode ) -> Void in
if errorCode == ErrorCode.NoError{
self.BoardArr.append(response)
}
group.leave()
})
DispatchQueue.main.async{
group.wait()
self.sortArr()
self.collectionView.reloadData()
}
}
}
}
}
These 2 answers for similar questions here on stack overflow were quite helpful for me when I had a similar issue: Blocking async requests and Concurrent and serial queues

Resources