Accessing Camera in Swift 2 iOS - ios

I am trying to access camera and take a picture in my iPad app, below is the code which works for swift 1.2 but is failing with 3 types of errors(highlighted below in BLOCK comments) in Swift 2. I have spent time looking for swift 2 way of writing it but haven't gotten anywhere. Any suggestion is highly appreciated.
import UIKit
import AVFoundation
class PictureController: UIViewController {
let captureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer?
// If we find a device we'll store it here for later use
var captureDevice : AVCaptureDevice?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
print("Capture device found")
beginSession()
}
}
}
}
}
func focusTo(value : Float) {
if let device = captureDevice {
if(device.lockForConfiguration(nil)) { // CALL CAN THROW BUT IS NOT MARKED WITH 'TRY' AND THE ERROR IS NOT HANDLED
// nil IS NOT COMPATIBLE WITH EXPECTED ARGUMENT TYPE '()'
device.setFocusModeLockedWithLensPosition(value, completionHandler: { (time) -> Void in
//
})
device.unlockForConfiguration()
}
}
}
let screenWidth = UIScreen.mainScreen().bounds.size.width
override func touchesBegan(touches: NSSet, withEvent event: UIEvent) { //METHOD DOES NOT OVERRIDE ANY METHOD FROM SUPERCLASS
let anyTouch = touches.anyObject() as! UITouch
let touchPercent = anyTouch.locationInView(self.view).x / screenWidth
focusTo(Float(touchPercent))
}
override func touchesMoved(touches: NSSet, withEvent event: UIEvent) { //METHOD DOES NOT OVERRIDE ANY METHOD FROM SUPERCLASS
let anyTouch = touches.anyObject() as! UITouch
let touchPercent = anyTouch.locationInView(self.view).x / screenWidth
focusTo(Float(touchPercent))
}
func configureDevice() {
if let device = captureDevice {
device.lockForConfiguration(nil) // CALL CAN THROW BUT IS NOT MARKED WITH 'TRY' AND THE ERROR IS NOT HANDLED
// nil IS NOT COMPATIBLE WITH EXPECTED ARGUMENT TYPE '()'
device.focusMode = .Locked
device.unlockForConfiguration()
}
}
func beginSession() {
configureDevice()
var err : NSError? = nil
captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
// Cannot invoke initializer for type 'AVCaptureDeviceInput' with an argument list of type '(device: AVCaptureDevice?, error: inout NSError?)'
if err != nil {
print("error: \(err?.localizedDescription)")
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.view.layer.addSublayer(previewLayer)
previewLayer?.frame = self.view.layer.frame
captureSession.startRunning()
}
}
error 1: Call can throw, but it is not marked with 'try' and the error is not handled
Type '()' does not conform to protocol 'BooleanType'
error 2: Method does not override any method from its superclass
for touchesBegan and touchesMoved
error 3: Cannot invoke initializer for type 'AVCaptureDeviceInput' with an argument list of type '(device: AVCaptureDevice?, error: inout NSError?)'
UPDATE:
ALL OTHER ISSUES ARE FIXED (please ref comments if you are looking for solutions), EXCEPT FOR THE BELOW ONE.
func focusTo(value : Float) {
if let device = captureDevice {
if(device.lockForConfiguration()) { // THIS LINE IS THROWING BELOW MENTIONED ERROR
device.setFocusModeLockedWithLensPosition(value, completionHandler: { (time) -> Void in
})
device.unlockForConfiguration()
}
}
}
ERROR:
Type '()' does not conform to protocol 'BooleanType'
Call can throw, but it is not marked with 'try' and the error is not handled

do {
try device.lockForConfiguration()
device.setFocusModeLockedWithLensPosition(value, completionHandler: { (time) -> Void in
//
})
device.unlockForConfiguration()
} catch let error as NSError {
print(error.code)
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
}

I am posting a complete solution here for Swift2, which incorporates solutions from gkhanacer https://stackoverflow.com/a/36719940/2144994 and all the code examples in this question. This is a complete ViewController
import UIKit
import AVFoundation
class FirstViewController: UIViewController {
let captureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer?
// If we find a device we'll store it here for later use
var captureDevice : AVCaptureDevice?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
print("Capture device found")
beginSession()
}
}
}
}
}
let screenWidth = UIScreen.mainScreen().bounds.size.width
func configureDevice() {
if let device = captureDevice {
do {
try device.lockForConfiguration()
device.focusMode = .AutoFocus
device.unlockForConfiguration()
} catch let error as NSError {
print(error.code)
}
}
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
let screenSize = previewLayer!.bounds.size
let frameSize:CGSize = view.frame.size
if let touchPoint = touches.first {
let location:CGPoint = touchPoint.locationInView(self.view)
let x = location.x / frameSize.width
let y = 1.0 - (location.x / frameSize.width)
let focusPoint = CGPoint(x: x, y: y)
print("POINT : X: \(x), Y: \(y)")
let captureDevice = (AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]).filter{$0.position == .Back}.first
if let device = captureDevice {
do {
try device.lockForConfiguration()
let support:Bool = device.focusPointOfInterestSupported
if support {
print("focusPointOfInterestSupported: \(support)")
device.focusPointOfInterest = focusPoint
// device.focusMode = .ContinuousAutoFocus
device.focusMode = .AutoFocus
// device.focusMode = .Locked
device.unlockForConfiguration()
print("Focus point was set successfully")
}
else{
print("focusPointOfInterestSupported is not supported: \(support)")
}
}
catch {
// just ignore
print("Focus point error")
}
}
}
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
if let touch = touches.first{
print("\(touch)")
}
super.touchesEnded(touches, withEvent: event)
}
override func touchesMoved(touches: Set<UITouch>, withEvent event: UIEvent?) {
if let touch = touches.first{
print("\(touch)")
}
super.touchesMoved(touches, withEvent: event)
}
func beginSession() {
configureDevice()
try! captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
// Cannot invoke initializer for type 'AVCaptureDeviceInput' with an argument list of type '(device: AVCaptureDevice?, error: inout NSError?)'
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.view.layer.addSublayer(previewLayer!)
previewLayer?.frame = self.view.layer.frame
captureSession.startRunning()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}

Related

playback recorded content in AVCapture

I am trying to playback the recorded session in full view after it is recorder.
Kind of like "snapchat".
I can record and play the videoplay back in a UIView, but it is shown with "play", "Done" "Stop" buttons. I don't want that. I want it to look like snapchat.
This is my code, where i found Here, but I modified a tiny little bit. :)
import UIKit
import AVFoundation
import AssetsLibrary
import Photos
import MediaPlayer
import AVKit
class Camera: UIViewController, AVCaptureFileOutputRecordingDelegate {
#IBOutlet var cameraView: UIView!
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice:AVCaptureDevice!
var CamChoser = false
var moviePlayer: MPMoviePlayerController?
#IBOutlet weak var playback: UIView!
#IBOutlet weak var exitCameraModeButton: UIButton!
#IBAction func exitCameraModeButton(sender: AnyObject) {
self.dismissViewControllerAnimated(true, completion: nil)
}
var captureSession = AVCaptureSession()
lazy var cameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .Front}.first
}()
lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
}()
var movieOutput = AVCaptureMovieFileOutput()
private var tempFilePath: NSURL = {
let tempPath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("tempMovie").URLByAppendingPathExtension("mp4").absoluteString
if NSFileManager.defaultManager().fileExistsAtPath(tempPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(tempPath)
} catch { }
}
return NSURL(string: tempPath)!
}()
private var library = ALAssetsLibrary()
//private var library = PHPhotoLibrary()
#IBOutlet weak var switchCameraButton: UIButton!
#IBAction func switchCameraButton(sender: AnyObject) {
//startSession()
}
override func viewDidLoad() {
super.viewDidLoad()
//start session configuration
captureSession.beginConfiguration()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
startSession()
}
func startSession() {
// add device inputs (front camera and mic)
print(CamChoser)
captureSession.addInput(deviceInputFromDevice(cameraDevice))
captureSession.addInput(deviceInputFromDevice(micDevice))
// add output movieFileOutput
movieOutput.movieFragmentInterval = kCMTimeInvalid
captureSession.addOutput(movieOutput)
// start session
captureSession.commitConfiguration()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.cameraView.layer.addSublayer(previewLayer!)
self.cameraView.bringSubviewToFront(self.exitCameraModeButton)
self.cameraView.bringSubviewToFront(self.switchCameraButton)
previewLayer?.frame = self.cameraView.layer.frame
captureSession.startRunning()
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("touch")
// start capture
movieOutput.startRecordingToOutputFileURL(tempFilePath, recordingDelegate: self)
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("release")
//stop capture
movieOutput.stopRecording()
let videoUrl = movieOutput.outputFileURL
moviePlayer = MPMoviePlayerController(contentURL: videoUrl)
moviePlayer!.movieSourceType = MPMovieSourceType.Unknown
moviePlayer!.view.frame = playback.bounds
moviePlayer!.scalingMode = MPMovieScalingMode.AspectFill
moviePlayer!.controlStyle = MPMovieControlStyle.Embedded
moviePlayer!.shouldAutoplay = true
playback.addSubview((moviePlayer?.view)!)
//moviePlayer!.prepareToPlay()
moviePlayer?.setFullscreen(true, animated: true)
moviePlayer!.play()
cameraView.bringSubviewToFront(playback)
}
private func deviceInputFromDevice(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil }
do {
return try AVCaptureDeviceInput(device: validDevice)
} catch let outError {
print("Device setup error occured \(outError)")
return nil
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
if (error != nil) {
print("Unable to save video to the iPhone \(error.localizedDescription)")
} else {
// save video to photo album
library.writeVideoAtPathToSavedPhotosAlbum(outputFileURL, completionBlock: { (assetURL: NSURL?, error: NSError?) -> Void in
if (error != nil) {
print("Unable to save video to the iPhone \(error!.localizedDescription)")
}
})
}
}
}
Just so you know, MPMoviePlayerController has been deprecated for iOS 9. The issue is your control style is set to embedded which by default, displays the control buttons. Use MPMovieControleStyle.None to remove the controls.
See MPMovieControlStyle documentation for more details.

Stopping the background music when starting a game

I have background music which starts when the app is launched in GameViewController.swift using the following code:
class GameViewController: UIViewController {
// VARIABLES
var backgroundMusicPlayer : AVAudioPlayer!
// AUDIO PLAYER
func playBackgroundMusic(filename: String) {
let url = NSBundle.mainBundle().URLForResource(filename, withExtension: nil)
var error : NSError? = nil
do {
backgroundMusicPlayer = try AVAudioPlayer(contentsOfURL: url!)
} catch let error1 as NSError {
error = error1
backgroundMusicPlayer = nil
}
if backgroundMusicPlayer == nil {
print("Could not create audio player: \(error!)")
return
}
backgroundMusicPlayer.numberOfLoops = -1
backgroundMusicPlayer.prepareToPlay()
backgroundMusicPlayer.play()
}
func stopBackgroundMusic() {
backgroundMusicPlayer.stop()
}
override func viewDidLoad() {
super.viewDidLoad()
playBackgroundMusic("MainTheme.mp3")
<< Various irrelevant code >>
}
Because this is run in the viewController, it persists through changing scenes on the menu (i.e. opening the "shop" scene) and creates a seamless track. When I click the "Play" button on the menu scene I want the music to then stop, and transition to the game. I have the stopBackgroundMusic() method in the GameViewController but I don't know how to call it on on the menu scene. IN THE MENU SCENE I tried this:
// TOUCH
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
let touch = touches.first as UITouch?
let touchLocation = touch!.locationInNode(self)
let touchedNode = self.nodeAtPoint(touchLocation)
if touchedNode.name == "startGame" {
GameViewController.stopBackgroundMusic()
let transitionType = SKTransition.fadeWithDuration(2)
let viewSize = self.view?.bounds.size
let scene = GameScene(size: viewSize!)
self.view?.presentScene(scene, transition: transitionType)
}
}
But I get an error saying I'm missing parameter #1 in call for stopBackgroundMusic() which shouldn't require any parameters. Am I calling this method wrong? Thanks!
You are referring to your class by using GameViewController but your function is at the object instance level.
If you declare the variable and function at the class level, your code in the touchesBegan function should work fine.
static var backgroundMusicPlayer : AVAudioPlayer!
class func playBackgroundMusic(filename: String) ...
class func stopBackgroundMusic()
override func viewDidLoad() {
super.viewDidLoad()
GameViewController.playBackgroundMusic("MainTheme.mp3")
<< Various irrelevant code >>
}

iOS Swift 2 Record Video AVCaptureSession

I created an AVCaptureSession and attached to it the front facing camera
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
}catch{print("err")}
Now I want to start and stop recording on touche events. How do I do this?
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("touch")
//Start Recording
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("release");
//End Recording and Save
}
You didn't mention if you're using AVCaptureMovieFileOutput or AVCaptureVideoDataOutput as an output for your session. The former is well suited for recording a video quickly and without further coding the later is used for more advanced recording by getting chunks of CMSampleBuffer during the recording session.
For the scope of this answer I'll go with AVCaptureMovieFileOutput, here is some minimalist starting code:
import UIKit
import AVFoundation
import AssetsLibrary
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
var captureSession = AVCaptureSession()
lazy var frontCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .Front}.first
}()
lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
}()
var movieOutput = AVCaptureMovieFileOutput()
private var tempFilePath: NSURL = {
let tempPath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("tempMovie").URLByAppendingPathExtension("mp4").absoluteString
if NSFileManager.defaultManager().fileExistsAtPath(tempPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(tempPath)
} catch { }
}
return NSURL(string: tempPath)!
}()
private var library = ALAssetsLibrary()
override func viewDidLoad() {
super.viewDidLoad()
//start session configuration
captureSession.beginConfiguration()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
// add device inputs (front camera and mic)
captureSession.addInput(deviceInputFromDevice(frontCameraDevice))
captureSession.addInput(deviceInputFromDevice(micDevice))
// add output movieFileOutput
movieOutput.movieFragmentInterval = kCMTimeInvalid
captureSession.addOutput(movieOutput)
// start session
captureSession.commitConfiguration()
captureSession.startRunning()
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("touch")
// start capture
movieOutput.startRecordingToOutputFileURL(tempFilePath, recordingDelegate: self)
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("release")
//stop capture
movieOutput.stopRecording()
}
private func deviceInputFromDevice(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil }
do {
return try AVCaptureDeviceInput(device: validDevice)
} catch let outError {
print("Device setup error occured \(outError)")
return nil
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
if (error != nil)
{
print("Unable to save video to the iPhone \(error.localizedDescription)")
}
else
{
// save video to photo album
library.writeVideoAtPathToSavedPhotosAlbum(outputFileURL, completionBlock: { (assetURL: NSURL?, error: NSError?) -> Void in
if (error != nil) {
print("Unable to save video to the iPhone \(error!.localizedDescription)")
}
})
}
}
}
For more informations on Camera Capture refer to WWDC 2014 - Session 508

Why can't I get the devices to connect together through Game Center Multiplayer?

Im making a game that uses the GameCenter Multiplayer but Im stuck right now trying to figure out how would I make the node move on my device when I move it on my simulator and the other way around also. I got the Game Center to authenticate, find a match, Look for player, and found player but when I get to my gameplay theres no multiplayer activity between the two devices. Can someone help me with this? Sorry for all the code. Thanks!
func lookupPlayers() {
println("Looking up \(match.playerIDs.count) players...")
GKPlayer.loadPlayersForIdentifiers(match?.playerIDs) { (players, error) -> Void in
if error != nil {
println("Error retrieving player info: \(error.localizedDescription)")
self.matchStarted = false
self._delegate?.matchEnded()
}
else {
self.playersDict = NSMutableDictionary(capacity: players.count)
for player in players {
println("Found player: \(player.alias)")
self.playersDict?.setObject(player, forKey: player.playerID)
}
}
self.playersDict?.setObject(GKLocalPlayer.localPlayer(), forKey: GKLocalPlayer.localPlayer().playerID)
self.matchStarted = true
}
}
func match(match: GKMatch!, didFailWithError error: NSError!) {
println("error!")
}
func match(match: GKMatch!, shouldReinviteDisconnectedPlayer player: GKPlayer!) -> Bool {
return true
}
func match(match: GKMatch!, player playerID: String!, didChangeState state: GKPlayerConnectionState) {
println("what!")
self.lookupPlayers()
}
func match(match: GKMatch!, player: GKPlayer!, didChangeConnectionState state: GKPlayerConnectionState) {
println("connects or disconnects")
}
func matchmakerViewController(viewController: GKMatchmakerViewController!, didFindMatch match: GKMatch!) {
println("match found")
var goToMatch = GamePlay(size: self.size)
var transitionToMatch = SKTransition.fadeWithDuration(1.0)
goToMatch.scaleMode = SKSceneScaleMode.AspectFill
self.scene!.view?.presentScene(goToMatch, transition: transitionToMatch)
presentingViewController = viewController
self.presentingViewController.dismissViewControllerAnimated(true, completion: nil)
self.match = match
self.match.delegate = self
self.lookupPlayers()
}
func matchmakerViewController(viewController: GKMatchmakerViewController!, didReceiveAcceptFromHostedPlayer playerID: String!) {
}
func matchmakerViewController(viewController: GKMatchmakerViewController!, didFindPlayers playerIDs: [AnyObject]!) {
}
func matchmakerViewController(viewController: GKMatchmakerViewController!, didFailWithError error: NSError!) {
presentingViewController = viewController
self.presentingViewController.dismissViewControllerAnimated(true, completion: nil);
println("Error finding match: \(error.localizedDescription)");
}
func matchmakerViewController(viewController: GKMatchmakerViewController!, didFindHostedPlayers players: [AnyObject]!) {
}
func matchmakerViewControllerWasCancelled(viewController: GKMatchmakerViewController!) {
println("go back to main menu")
presentingViewController = viewController
self.presentingViewController.dismissViewControllerAnimated(true, completion: nil)
}
func findMatchPlease() {
match = nil
let matchRequest = GKMatchRequest()
matchRequest.minPlayers = 2
matchRequest.maxPlayers = 2
let mmvc = GKMatchmakerViewController(matchRequest: matchRequest)
mmvc.matchmakerDelegate = self
let viewController = self.scene?.view?.window?.rootViewController
viewController?.presentViewController(mmvc, animated: true, completion: nil)
}
override func touchesBegan(touches: Set<NSObject>, withEvent event: UIEvent) {
for touch in (touches as! Set<UITouch>) {
var touch: UITouch = touches.first as! UITouch
var location = touch.locationInNode(self)
var node = self.nodeAtPoint(location)
if node.name == "find" {
findMatchPlease()
}
}
}
You might want to check out Game Center Programming Guide from Apple, especially Real-Time Matches chapter. Basically once your match is started, you can start exchanging data between players, if you wan't to move around some player object (node) so the movement is seen by other players you might want to implement similar code once player moved it's object / node:
func sendPosition() {
var msg = CGPointMake(0, 0) // set the position to send here
let packet = NSData(bytes: &msg, length: sizeof(CGPoint))
// Swift pre-2.0, no error handling
var error: NSError?
if !match.sendDataToAllPlayers(packet, withDataMode: .Unreliable error: &error) {
// some error happened
}
// Swift 2.0:
do {
try match.sendDataToAllPlayers(packet, withDataMode: .Unreliable)
}
catch {
// handle errors
}
}
It will send CGPoint struct data to all players connected to the match, so on the other end you might want to implement GKMatchDelegate's method - match:didReceiveData:fromRemotePlayer:. It could look something like this:
func match(_ match: GKMatch!, didReceiveData data: NSData!, fromRemotePlayer player: GKPlayer!) {
if let msg = data.bytes as? CGPoint {
// new position received, assign it to player it was received from
}
}
You can send most of the data this way and receive and convert it back on the other end. But beware that Swift types can take up more bytes to send over network, I would suggest to create C structs for networking data to send or if you want to keep your code as Swift as possible try tuples:
var myStruct = exampleStruct (
SomeInt: 1,
OtherInt: 2,
SomeCString: (0x48, 0x45, 0x4C, 0x4C, 0x4F) // HELLO
)
var data = NSData(
bytes: &myStruct,
length: sizeof(exampleStruct)
)

Gamescene does not have a member named "waitEvent" error

I am currently learning Apple's new software programming language (Swift) and I have encountered this error:
Gamescene does not have a member named "waitEvent"
This is becoming quite frustrating, if anyone knows why this might be please respond.
This is in Swift and Sprite Kit!
Here is the code:
override func touchesBegan(touches: NSSet, withEvent event: UIEvent) {
/* Called when a touch begins */
for touch: AnyObject in touches {
let location = touch.locationInNode(self)
if (destroyerNode? != nil){
destroyerNode!.position = location
destroyerNode!.physicsBody?.velocity = CGVectorMake(0, 0)
destroyerNode!.physicsBody?.dynamic = true
let waitAction = SKAction.waitForDuration(2)
destroyerNode!.runAction(waitAction, completion: {
self.waitEvent()
//there is an issue with someEvent
}
)
}
else{
println("that destroyer was really not there")
}
}
func waitEvent(){
//make the destroyer a dynamic object
destroyerNode!.makeBodyDynamic()
println("The wait is up")
}
func update(currentTime: CFTimeInterval) {
/* Called before each frame is rendered */
}
}

Resources