I'm trying to use the receivedMIDINoteOn function to flash a UILabel when the sequencer is playing a note. I have tried using the AKMIDIListener protocol with no success. Also I have made a sub class of AKMIDISampler and send midi to it from the sequencer. It plays the midi but the receivedMIDINoteOn is not called.
This is what I have in the init() of the conductor:
init() {
[ahSampler, beeSampler, gooSampler,flasher] >>> samplerMixer
AudioKit.output = samplerMixer
AudioKit.start()
let midi = AKMIDI()
midi.createVirtualPorts()
midi.openInput("Session 1")
midi.addListener(self)
}
The conductor follows AKMIDIListener protocol
this is the function: it is never called
func receivedMIDINoteOn(noteNumber: MIDINoteNumber, velocity: MIDIVelocity, channel: MIDIChannel)
{
print("got it")
}
And this is the sub class of AKMIDISampler, it gets midi and plays the sine synth, but the receivedMIDINoteOn is never called.
class Flasher: AKMIDISampler
{
override func receivedMIDINoteOn(noteNumber: MIDINoteNumber, velocity: MIDIVelocity, channel: MIDIChannel)
{
print("Flasher got it!")
}
}
edit: I should have been using the AKCallbackInstrument class instead, and overriding it's start() function.
Ben,
Without seeing your entire project, I would guess that if your project is able to receive and trigger MIDI notes, then it's an issue with only sending its output to the UILabel. I recommend using NotificationCenter to inform the ViewController when a MIDI event has been received in the Conductor class. Be sure to add the DispatchQueue.main.async code, or else the text won't update as expected. This was noted in the AudioKit Google Group here.
Example:
DispatchQueue.main.async(execute: {
nc.post(name: NSNotification.Name(rawValue: "outputMessage"),
object: nil,
userInfo: [
"message": self.outputMIDIMessage,
"midiSignalReceived": self.midiSignalReceived,
"midiTypeReceived": self.midiTypeReceived
])
})
I would also recommend the following:
Move let midi = AKMIDI() into an instance variable outside of the init() at the top of your Conductor class, rather than inside of it. It looks like you're attempting to create it after AudioKit.start().
I posted a sample project that demonstrates how you can change a UILabel's color whenever a MIDI note number has been received via AudioKit:
https://github.com/markjeschke/AKMidiReceiver
Conductor class:
import AudioKit
enum MidiEventType: String {
case
noteNumber = "Note Number",
continuousControl = "Continuous Control",
programChange = "Program Change"
}
class Conductor: AKMIDIListener {
// Globally accessible
static let sharedInstance = Conductor()
// Set the instance variables outside of the init()
let midi = AKMIDI()
var demoSampler = SamplerAudioFileLoader()
var samplerMixer = AKMixer()
var outputMIDIMessage = ""
var midiSignalReceived = false
var midiTypeReceived: MidiEventType = .noteNumber
init() {
// Session settings
AKSettings.bufferLength = .medium
AKSettings.defaultToSpeaker = true
// Allow audio to play while the iOS device is muted.
AKSettings.playbackWhileMuted = true
do {
try AKSettings.setSession(category: .playAndRecord, with: [.defaultToSpeaker, .allowBluetooth, .mixWithOthers])
} catch {
AKLog("Could not set session category.")
}
// File path options are:
// "TX Brass"
// "TX LoTine81z"
// "TX Metalimba"
// "TX Pluck Bass"
demoSampler.loadEXS24Sample(filePath: "TX Brass")
// If you wish to load a wav file, comment the `loadEXS24` method and uncomment this one:
// demoSampler.loadWavSample(filePath: "Kick") // Load Kick wav file
[demoSampler] >>> samplerMixer
AudioKit.output = samplerMixer
AudioKit.start()
// MIDI Configure
midi.createVirtualInputPort(98909, name: "AKMidiReceiver")
midi.createVirtualOutputPort(97789, name: "AKMidiReceiver")
midi.openInput()
midi.openOutput()
midi.addListener(self)
}
// Capture the MIDI Text within a DispatchQueue, so that it's on the main thread.
// Otherwise, it won't display.
func captureMIDIText() {
let nc = NotificationCenter.default
DispatchQueue.main.async(execute: {
nc.post(name: NSNotification.Name(rawValue: "outputMessage"),
object: nil,
userInfo: [
"message": self.outputMIDIMessage,
"midiSignalReceived": self.midiSignalReceived,
"midiTypeReceived": self.midiTypeReceived
])
})
}
// MARK: MIDI received
// Note On Number + Velocity + MIDI Channel
func receivedMIDINoteOn(noteNumber: MIDINoteNumber, velocity: MIDIVelocity, channel: MIDIChannel) {
midiTypeReceived = .noteNumber
outputMIDIMessage = "\(midiTypeReceived.rawValue)\nChannel: \(channel+1) noteOn: \(noteNumber) velocity: \(velocity)"
print(outputMIDIMessage)
midiSignalReceived = true
captureMIDIText()
playNote(note: noteNumber, velocity: velocity, channel: channel)
}
// Note Off Number + Velocity + MIDI Channel
func receivedMIDINoteOff(noteNumber: MIDINoteNumber, velocity: MIDIVelocity, channel: MIDIChannel) {
midiTypeReceived = .noteNumber
outputMIDIMessage = "\(midiTypeReceived.rawValue)\nChannel: \(channel+1) noteOff: \(noteNumber) velocity: \(velocity)"
print(outputMIDIMessage)
midiSignalReceived = false
captureMIDIText()
stopNote(note: noteNumber, channel: channel)
}
// Controller Number + Value + MIDI Channel
func receivedMIDIController(_ controller: MIDIByte, value: MIDIByte, channel: MIDIChannel) {
// If the controller value reaches 127 or above, then trigger the `demoSampler` note.
// If the controller value is less, then stop the note.
// This creates an on/off type of "momentary" MIDI messaging.
if value >= 127 {
playNote(note: 30 + controller, velocity: 80, channel: channel)
} else {
stopNote(note: 30 + controller, channel: channel)
}
midiTypeReceived = .continuousControl
outputMIDIMessage = "\(midiTypeReceived.rawValue)\nChannel: \(channel+1) controller: \(controller) value: \(value)"
midiSignalReceived = true
captureMIDIText()
}
// Program Change Number + MIDI Channel
func receivedMIDIProgramChange(_ program: MIDIByte, channel: MIDIChannel) {
// Trigger the `demoSampler` note and release it after half a second (0.5), since program changes don't have a note off release.
triggerSamplerNote(program, channel: channel)
midiTypeReceived = .programChange
outputMIDIMessage = "\(midiTypeReceived.rawValue)\nChannel: \(channel+1) programChange: \(program)"
midiSignalReceived = true
captureMIDIText()
}
func receivedMIDISetupChange() {
print("midi setup change")
print("midi.inputNames: \(midi.inputNames)")
let listInputNames = midi.inputNames
for inputNames in listInputNames {
print("inputNames: \(inputNames)")
midi.openInput(inputNames)
}
}
func playNote(note: MIDINoteNumber, velocity: MIDIVelocity, channel: MIDIChannel) {
demoSampler.play(noteNumber: note, velocity: velocity, channel: channel)
}
func stopNote(note: MIDINoteNumber, channel: MIDIChannel) {
demoSampler.stop(noteNumber: note, channel: channel)
}
func triggerSamplerNote(_ program: MIDIByte, channel: MIDIChannel) {
playNote(note: 60 + program, velocity: 80, channel: channel)
let releaseNoteDelay = DispatchTime.now() + 0.5 // Change 0.5 to desired number of seconds
DispatchQueue.main.asyncAfter(deadline: releaseNoteDelay) {
self.stopNote(note: 60 + program, channel: channel)
self.midiSignalReceived = false
}
}
}
ViewController with the UILabel:
import UIKit
import AudioKit
class ViewController: UIViewController {
#IBOutlet weak var outputTextLabel: UILabel!
var conductor = Conductor.sharedInstance
var midiSignalReceived = false
var midiTypeReceived: MidiEventType = .noteNumber
override func viewDidLoad() {
super.viewDidLoad()
let nc = NotificationCenter.default
nc.addObserver(forName:NSNotification.Name(rawValue: "outputMessage"), object:nil, queue:nil, using:catchNotification)
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
flashBackgroundColor()
midiSignalReceived = false
self.outputTextLabel.text = "Listening for MIDI events..."
}
#objc func catchNotification(notification:Notification) -> Void {
guard
let userInfo = notification.userInfo,
let message = userInfo["message"] as? String,
let midiSignalReceived = userInfo["midiSignalReceived"] as? Bool,
let midiTypeReceived = userInfo["midiTypeReceived"] as? MidiEventType else {
print("No userInfo found in notification")
return
}
DispatchQueue.main.async(execute: {
self.outputTextLabel.text = message
self.midiSignalReceived = midiSignalReceived
self.midiTypeReceived = midiTypeReceived
self.flashBackgroundColor()
})
}
#objc func flashBackgroundColor() {
if midiSignalReceived {
self.outputTextLabel.backgroundColor = UIColor.green
self.view.backgroundColor = UIColor.lightGray
if midiTypeReceived != .noteNumber {
self.perform(#selector(dismissFlashBackgroundColor), with: nil, afterDelay: 0.5)
}
} else {
dismissFlashBackgroundColor()
}
}
#objc func dismissFlashBackgroundColor() {
UIView.animate(withDuration: 0.5) {
self.outputTextLabel.backgroundColor = UIColor.clear
self.view.backgroundColor = UIColor.white
self.midiSignalReceived = false
self.conductor.midiSignalReceived = false
}
}
deinit {
NotificationCenter.default.removeObserver(self,
name: NSNotification.Name(rawValue: "outputMessage"),
object: nil)
}
}
SamplerAudioFileLoader.swift:
import AudioKit
class SamplerAudioFileLoader: AKMIDISampler {
internal func loadWavSample(filePath: String) {
do {
try self.loadWav("Sounds/\(filePath)")
} catch {
print("Could not locate the Wav file.")
}
}
internal func loadEXS24Sample(filePath: String) {
do {
try self.loadEXS24("Sounds/Sampler Instruments/\(filePath)")
} catch {
print("Could not locate the EXS24 file.")
}
}
}
I hope this helps. Please let me know if you have any questions about this.
Take care,
Mark
P.S. If you clone this AKMidiReceiver example, open the Workspace, and no scheme appears in the Xcode project, please follow these steps that were found here:
Click on No Scheme
Click on Manage Scheme
Click on Autocreate Schemes Now
Depending on how you initialize flasher, you may have to run flasher.enableMIDI() optionally with names.
Related
I've been trying to add Shazam matching to my app using the new ShazamKit. I've used Apple's sample code found here and adapted it slightly.
import ShazamKit
import AVFAudio
import Combine
#available(iOS 15.0, *)
class ShazamMatcher: NSObject, ObservableObject, SHSessionDelegate {
// MARK: - Properties
#Published var result: SHMatch?
#Published var isRecording = false
private var isInitialSetupDone = false
private var session: SHSession?
private let audioEngine = AVAudioEngine()
// MARK: - Actions
func match() throws {
result = nil
session = SHSession()
session?.delegate = self
try doInitialSetupIfNeeded()
AVAudioSession.sharedInstance().requestRecordPermission { [weak self] success in
guard success, let self = self else {
return
}
try? self.audioEngine.start()
self.isRecording = true
}
}
func stopMatching() {
audioEngine.stop()
isRecording = false
}
// MARK: - Setup
private func doInitialSetupIfNeeded() throws {
guard !isInitialSetupDone else {
return
}
let audioFormat = AVAudioFormat(
standardFormatWithSampleRate: audioEngine.inputNode.outputFormat(forBus: 0).sampleRate,
channels: 1
)
audioEngine.inputNode.installTap(onBus: 0, bufferSize: 2048, format: audioFormat) { [weak session] buffer, audioTime in
session?.matchStreamingBuffer(buffer, at: audioTime)
}
try AVAudioSession.sharedInstance().setCategory(.record)
isInitialSetupDone = true
}
// MARK: - SHSessionDelegate
func session(_ session: SHSession, didFind match: SHMatch) {
// Handle match here
}
func session(_ session: SHSession, didNotFindMatchFor signature: SHSignature, error: Error?) {
// Handle error here
}
}
However, when calling match(), the delegate eventually reports an error The operation couldn’t be completed. (com.apple.ShazamKit error 202.)
I've added a new key using my bundle identifier for the ShazamKit services and downloaded the .p8 file. Do I need this file and if so, how?
Has anybody been able to resolve this error?
I've found a solution. First, apparently the inter-app audio entitlement has to be enabled.
Second, it seems like you need a SHSignatureGenerator as well (I though it would be enough to call matchStreamingBuffer
Here's code that works:
https://github.com/heysaik/ShazamKit-Demo/blob/main/Shazam/ViewController.swift
The environment for this is iOS 13.6 and Swift 5. I have a very simple app that successfully plays an MP3 file in the foreground or background. I added MPRemoteCommandCenter play and pause command handlers to it. I play the sound file in the foreground and then pause it.
When I tap the play button from the lock screen, my code calls audioPlayer.play(), which returns true. I hear the sound start playing again, but the currentTime of the player does not advance. After that, the play and pause buttons on the lock screen do nothing. When I foreground the app again, the play button plays from where it was before I went to the lock screen.
Here is my AudioPlayer class:
import AVFoundation
import MediaPlayer
class AudioPlayer: RemoteAudioCommandDelegate {
var audioPlayer = AVAudioPlayer()
let remoteCommandHandler = RemoteAudioCommandHandler()
var timer:Timer!
func play(title: String) {
let path = Bundle.main.path(forResource: title, ofType: "mp3")!
let url = URL(fileURLWithPath: path)
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback)
try AVAudioSession.sharedInstance().setActive(true)
audioPlayer = try AVAudioPlayer(contentsOf: url)
remoteCommandHandler.delegate = self
remoteCommandHandler.enableDisableRemoteCommands(true)
timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(updateNowPlayingInfo), userInfo: nil, repeats: true)
} catch let error as NSError {
print("error = \(error)")
}
}
func play() {
print ("audioPlayer.play() returned \(audioPlayer.play())")
}
func pause() {
audioPlayer.pause()
}
func stop() {
audioPlayer.stop()
}
func currentTime() -> TimeInterval {
return audioPlayer.currentTime
}
func setCurrentTime(_ time:TimeInterval) {
audioPlayer.currentTime = time
}
#objc func updateNowPlayingInfo() {
// Hard-code the nowPlayingInfo since this is a simple test app
var nowPlayingDict =
[MPMediaItemPropertyTitle: "Tin Man",
MPMediaItemPropertyAlbumTitle: "The Complete Greatest Hits",
MPMediaItemPropertyAlbumTrackNumber: NSNumber(value: UInt(10) as UInt),
MPMediaItemPropertyArtist: "America",
MPMediaItemPropertyPlaybackDuration: 208,
MPNowPlayingInfoPropertyPlaybackRate: NSNumber(value: 1.0 as Float)] as [String : Any]
nowPlayingDict[MPNowPlayingInfoPropertyElapsedPlaybackTime] = NSNumber(value: audioPlayer.currentTime as Double)
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingDict
}
}
Here is my RemoteCommandHandler class:
import Foundation
import MediaPlayer
protocol RemoteAudioCommandDelegate: class {
func play()
func pause()
}
class RemoteAudioCommandHandler: NSObject {
weak var delegate: RemoteAudioCommandDelegate?
var remoteCommandCenter = MPRemoteCommandCenter.shared()
var playTarget: Any? = nil
var pauseTarget: Any? = nil
func enableDisableRemoteCommands(_ enabled: Bool) {
print("Called with enabled = \(enabled)")
remoteCommandCenter.playCommand.isEnabled = enabled
remoteCommandCenter.pauseCommand.isEnabled = enabled
if enabled {
addRemoteCommandHandlers()
} else {
removeRemoteCommandHandlers()
}
}
fileprivate func addRemoteCommandHandlers() {
print( "Entered")
if playTarget == nil {
print( "Adding playTarget")
playTarget = remoteCommandCenter.playCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
print("addRemoteCommandHandlers calling delegate play")
self.delegate?.play()
return .success
}
}
if pauseTarget == nil {
print( "Adding pauseTarget")
pauseTarget = remoteCommandCenter.pauseCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
print("addRemoteCommandHandlers calling delegate pause")
self.delegate?.pause()
return .success
}
}
}
fileprivate func removeRemoteCommandHandlers() {
print( "Entered")
if playTarget != nil {
print( "Removing playTarget")
remoteCommandCenter.playCommand.removeTarget(playTarget)
playTarget = nil
}
if pauseTarget != nil {
print( "Removing pauseTarget")
remoteCommandCenter.pauseCommand.removeTarget(pauseTarget)
pauseTarget = nil
}
}
}
I will gladly supply further required info, because I'm baffled at why this relatively straightforward code (in my mind) code doesn't work.
Assistance is much appreciated!
After some more debugging, I found that the AVAudioPlayer started to play the sound from the lock screen, but stopped again shortly after.
I mitigated the problem by adding a Timer. The timer checks if the last command by the user was play, but the sound is not playing. I also change the status when the user selects pause or the song stops playing at its natural end.
I am still at a loss for an actual fix for this problem.
I've been pouring over stack overflow for ages trying to find a way out of this error:
unexpected non void return value in void function
that I am getting with returning a Bool within my function.
i just can't seem to dig my way out of this one. I'm sure its something to do with async but I'm not very familiar with these types of functions.
class CheckReachability {
class func setupReachability (hostName:String?, useClosures: Bool) -> Bool{
var reachability : Reachability!
var connected = false
let reachability2 = hostName == nil ? Reachability() : Reachability(hostname: hostName!)
reachability = reachability2
try! reachability?.startNotifier()
if useClosures {
reachability2?.whenReachable = { reachability in
DispatchQueue.main.async {
connected = true
print("Reachable....")
}
}
reachability2?.whenUnreachable = { reachability in
DispatchQueue.main.async {
connected = false
print("Not Connected....")
}
}
} else {
NotificationCenter.default.addObserver(self, selector: Selector(("reachabilityChanged:")), name: ReachabilityChangedNotification, object: reachability2)
}
return connected
}
}
calling this from viewdidload on another vc doesn't allow enough time to get a true result
let connected = CheckReachability.setupReachability(hostName: nil, useClosures: true)
if connected {
Your question is confusing because the code you posted does not have the error you describe. However, you're trying to create a function that returns a result from an async function. That is not how async works.
Async functions start to do a task in the background, where that task won't be finished before it's time to return.
You need to adjust your thinking. Instead of trying to return a result from your function, you need to write your function to take a completion handler. You then call the completion handler once the long-running task has finished (which is after your function has returned.)
#bubuxu provided you with code showing how to modify your function as I described.
If you want to write a checking class to listen to the reachability, define it as a singleton and pass the completeBlock to it like this:
class CheckReachability {
static let shared = CheckReachability()
var reachability: Reachability?
func setupReachability(hostName:String?, completeBlock: ((Bool) -> Void)? = nil) {
reachability = hostName == nil ? Reachability() : Reachability(hostname: hostName!)
try? reachability?.startNotifier()
if let block = completeBlock {
reachability?.whenReachable = { reachability in
DispatchQueue.main.async {
print("Reachable....")
block(true)
}
}
reachability?.whenUnreachable = { reachability in
DispatchQueue.main.async {
print("Not Connected....")
block(false)
}
}
} else {
// If we don't use block, there is no point to observe it.
NotificationCenter.default.addObserver(self, selector: #selector(reachabilityChanged(_:)), name: .ReachabilityChangedNotification, object: nil)
}
}
deinit {
NotificationCenter.default.removeObserver(self)
}
#objc func reachabilityChanged(_ notification: Notification) {
// ?? what should we do here?
}
}
I'm trying to create a typewriter animation effect with a UILabel, but can't find any answers. Is the UILabel the correct object to use? I want the text to print to the screen an array of strings like, "Logging in... Opening Folder... Rebooting system.." etc. I should mention that I'm new to coding and I've tried searching the Documentation and API reference but no luck. I'm currently learning SWIFT if thats worth mentioning
Based on this Answer:
Letter by letter animation for UILabel?
I've updated it to Swift 4 and solved the CPU animation problem with DispatchWorkItem in order to create a queue.
Swift 4
extension UILabel {
func setTextWithTypeAnimation(typedText: String, characterDelay: TimeInterval = 5.0) {
text = ""
var writingTask: DispatchWorkItem?
writingTask = DispatchWorkItem { [weak weakSelf = self] in
for character in typedText {
DispatchQueue.main.async {
weakSelf?.text!.append(character)
}
Thread.sleep(forTimeInterval: characterDelay/100)
}
}
if let task = writingTask {
let queue = DispatchQueue(label: "typespeed", qos: DispatchQoS.userInteractive)
queue.asyncAfter(deadline: .now() + 0.05, execute: task)
}
}
}
Usage
label.setTextWithTypeAnimation(typedText: text, characterDelay: 10) //less delay is faster
Swift 5
func setTyping(text: String, characterDelay: TimeInterval = 5.0) {
self.text = ""
let writingTask = DispatchWorkItem { [weak self] in
text.forEach { char in
DispatchQueue.main.async {
self?.text?.append(char)
}
Thread.sleep(forTimeInterval: characterDelay/100)
}
}
let queue: DispatchQueue = .init(label: "typespeed", qos: .userInteractive)
queue.asyncAfter(deadline: .now() + 0.05, execute: writingTask)
}
Usage
label.setTyping(text: "Your text")
update: Xcode 7.0 GM • Swift 2.0
import UIKit
class ViewController: UIViewController {
#IBOutlet weak var myTypeWriter: UITextField!
let myText = Array("Hello World !!!".characters)
var myCounter = 0
var timer:NSTimer?
func fireTimer(){
timer = NSTimer.scheduledTimerWithTimeInterval(0.5, target: self, selector: "typeLetter", userInfo: nil, repeats: true)
}
func typeLetter(){
if myCounter < myText.count {
myTypeWriter.text = myTypeWriter.text! + String(myText[myCounter])
let randomInterval = Double((arc4random_uniform(8)+1))/20
timer?.invalidate()
timer = NSTimer.scheduledTimerWithTimeInterval(randomInterval, target: self, selector: "typeLetter", userInfo: nil, repeats: false)
} else {
timer?.invalidate()
}
myCounter++
}
override func viewDidLoad() {
super.viewDidLoad()
fireTimer()
// Do any additional setup after loading the view, typically from a nib.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
I have written a subclass of UILabel called CLTypingLabel, available on GitHub. This should do what you want.
After installing CocoaPods, add the following like to your Podfile to use it:
pod 'CLTypingLabel'
Sample Code
Change the class of a label from UILabel to CLTypingLabel;
#IBOutlet weak var myTypeWriterLabel: CLTypingLabel!
At runtime, set text of the label will trigger animation automatically:
myTypeWriterLabel.text = "This is a demo of typing label animation..."
You can customize time interval between each character:
myTypeWriterLabel.charInterval = 0.08 //optional, default is 0.1
You can pause the typing animation at any time:
myTypeWriterLabel.pauseTyping() //this will pause the typing animation
myTypeWriterLabel.continueTyping() //this will continue paused typing animation
Also there is a sample project that comes with cocoapods
my version of the typewriter effect animation using a timer:
var text = "text"
_ = Timer.scheduledTimer(
withTimeInterval: 0.1,
repeats: true
) { [weak self] timer in
let char = text.removeFirst()
self?.yourLabel.text?.append(char.description)
if text.isEmpty {
timer.invalidate()
}
}
Background:
I'm using GCDAsyncSocket on a project successfully with Swift 1.2 (via a bridging header).
The challenge right now is that it needs some sort of queue because the system it's connecting to can only process and return one command at a time.
So if it calls methods back to back, for example:
getSystemInfo()
getSystemStatus()
Only getSystemInfo() is returned via the delegate callback because the system was busy processing it, however, the getSystemStatus() was sent asynchronously successfully but not processed by the controller. I'd like it to be able to make the calls back to back and have them queue and processed once the controller is done processing and returning back the previous response -- basically making the process synchronous.
Question:
As you can see below in the example code under, didConnectToHost delegate callback, when it connects to the controller, it calls getSystemInfo() then getSystemStatus() back to back, it should call getSystemStatus() after it gets the results from the system info.
I have been looking at NSCondition, NSOperation, even GCD, but I'm not sure what the most elegant way to approach this is. I don't want to put yet another queue processor in the mix since there already is a queue setup for the GCDAsyncSocket. What is the best, most elegant way to handle this?
Pseudo Class Code:
public class SendNet: NSObject, GCDAsyncSocketDelegate {
var socket:GCDAsyncSocket! = nil
func setupConnection(){
var error : NSError?
if (socket == nil) {
socket = GCDAsyncSocket(delegate: self, delegateQueue: dispatch_get_main_queue())
} else {
socket.disconnect()
}
if (!socket.connectToHost(host, onPort: port, withTimeout: 5.0, error: &error)){
println("Error: \(error)")
} else {
println("Connecting...")
}
}
public func socket(socket : GCDAsyncSocket, didConnectToHost host:String, port p:UInt16) {
println("Connected to \(host) on port \(p).")
self.socket = socket
getSystemInfo()
getSystemStatus()
}
func send(msgBytes: [UInt8]) {
var msgData = NSData(bytes: msgBytes, length: msgBytes)
socket.writeData(msgData, withTimeout: -1.0, tag: 0)
socket.readDataWithTimeout(-1.0, tag: 0)
}
func getSystemInfo() {
var sendBytes:[UInt8] = [0x0, 0x1, 0x2, 0x3]
send(sendBytes)
}
func getSystemStatus() {
var sendBytes:[UInt8] = [0x4, 0x5, 0x6, 0x7]
send(sendBytes)
}
public func socket(socket : GCDAsyncSocket!, didReadData data:NSData!, withTag tag:Int){
var msgData = NSMutableData()
msgData.setData(data)
var msgType:UInt16 = 0
msgData.getBytes(&msgType, range: NSRange(location: 2,length: 1))
println(msgType)
}
}
Any suggestions would be great -- thanks!
So I decided to use NSOperation for this.
Created a class file called SyncRequest.swift with the following code:
import Foundation
class SyncRequest : NSOperation {
var socket:GCDAsyncSocket! = nil
var msgData:NSData! = nil
override var concurrent: Bool {
return false
}
override var asynchronous: Bool {
return false
}
private var _executing: Bool = false
override var executing: Bool {
get {
return _executing
}
set {
if (_executing != newValue) {
self.willChangeValueForKey("isExecuting")
_executing = newValue
self.didChangeValueForKey("isExecuting")
}
}
}
private var _finished: Bool = false;
override var finished: Bool {
get {
return _finished
}
set {
if (_finished != newValue) {
self.willChangeValueForKey("isFinished")
_finished = newValue
self.didChangeValueForKey("isFinished")
}
}
}
/// Complete the operation
func completeOperation() {
executing = false
finished = true
}
override func start() {
if (cancelled) {
finished = true
return
}
executing = true
main()
}
override func main() -> (){
println("starting...")
NSNotificationCenter.defaultCenter().addObserver(self, selector: "didReadData:", name: "DidReadData", object: nil)
sendData()
}
func sendData() {
socket.writeData(msgData, withTimeout: -1.0, tag: 0)
println("Sending: \(msgData)")
socket.readDataWithTimeout(-1.0, tag: 0)
}
func didReadData(notif: NSNotification) {
println("Data Received!")
NSNotificationCenter.defaultCenter().removeObserver(self, name: "DidReadData", object: nil)
completeOperation()
}
}
Then, when I need to send something out to the controller I do the following:
// sync the request to the controller
let queue = NSOperationQueue() // sync request queue
let requestOperation = SyncRequest()
requestOperation.socket = socket // pass the socket to send through
requestOperation.msgData = msgData // pass the msgData to send
queue.maxConcurrentOperationCount = 1
queue.addOperation(requestOperation)
Don't forget to send the NSNotification when data comes in from where you are handling the GCDAsyncSocket's "didReadData" delegate.
public func socket(socket : GCDAsyncSocket!, didReadData data:NSData!, withTag tag:Int){
...
NSNotificationCenter.defaultCenter().postNotificationName("DidReadData", object: data)
...
}