How use accelerometer in swift 3 - ios

I need help to use accelerometer with swift 3.
This is my code:
var motion = CMMotionManager()
#IBOutlet weak var statusAccel: UILabel!
override func viewDidAppear(_ animated: Bool) {
motion.startAccelerometerUpdates(to: OperationQueue.current!){
(data , error) in
if let trueData = data {
self.view.reloadInputViews()
self.statusAccel.text = "\(trueData)"
}
}
}
It works but it just show me X Y and Z and i want to use Z.
Example : if Z = 2 do something

You can access the acceleration on the Z-axis by calling CMAccelerometerData.acceleration.z. If you are unsure about how to access a certain property of a class, always check the documentation either in Xcode directly or on Apple's documentation website, you can save a lot of time with this approach.
motion.startAccelerometerUpdates(to: OperationQueue.current!, withHandler: { data, error in
guard error == nil else { return }
guard let accelerometerData = data else { return }
if accelerometerData.acceleration.z == 2.0 {
//do something
}
})

The data object that gets returned by startAccelerometerUpdates(...) is of type CMAccelerometerData which has a CMAcceleration property. From this you can get the z component.
var motion = CMMotionManager()
#IBOutlet weak var statusAccel: UILabel!
override func viewDidAppear(_ animated: Bool) {
motion.startAccelerometerUpdates(to: OperationQueue.current!){
(data , error) in
if let trueData = data {
self.view.reloadInputViews()
self.statusAccel.text = "\(trueData)"
if trueData.acceleration.z == 2 {
// do things...
}
}
}
}

Related

ARKit: Tracking VisonCoreML detected object

I'm new to iOS and I am currently refactoring a code I got from a tutorial on VisionCoreML and ARKit that adds a node to the detected object.
currently, if the I move the object the node does not move and follow the object. I can see from Apple's sample code for Recognizing Objects in Live Capture they use layers and repositions this each time Vision detects the object at a new position which is what I was hoping to replicate with an ARObject.
Is there a way I can achieve this with ARKit?
Any help around this would be greatly appreciated.
Thanks.
EDIT: Working code with solution
#IBOutlet var sceneView: ARSCNView!
private var viewportSize: CGSize!
private var previousAnchor: ARAnchor?
private var trackingNode: SCNNode!
lazy var objectDetectionRequest: VNCoreMLRequest = {
do {
let model = try VNCoreMLModel(for: yolov5s(configuration: MLModelConfiguration()).model)
let request = VNCoreMLRequest(model: model) { [weak self] request, error in
self?.processDetections(for: request, error: error)
}
return request
} catch {
fatalError("Failed to load Vision ML model.")
}
}()
func renderer(_ renderer: SCNSceneRenderer, willRenderScene scene: SCNScene, atTime time: TimeInterval) {
guard let capturedImage = sceneView.session.currentFrame?.capturedImage
else { return }
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: capturedImage, orientation: .leftMirrored, options: [:])
do {
try imageRequestHandler.perform([objectDetectionRequest])
} catch {
print("Failed to perform image request.")
}
}
func processDetections(for request: VNRequest, error: Error?) {
guard error == nil else {
print("Object detection error: \(error!.localizedDescription)")
return
}
guard let results = request.results else { return }
for observation in results where observation is VNRecognizedObjectObservation {
let objectObservation = observation as! VNRecognizedObjectObservation
let topLabelObservation = objectObservation.labels.first
print(topLabelObservation!.identifier + " " + "\(Int(topLabelObservation!.confidence * 100))%")
guard recognisedObject(topLabelObservation!.identifier) && topLabelObservation!.confidence > 0.9
else { continue }
let rect = VNImageRectForNormalizedRect(
objectObservation.boundingBox,
Int(self.sceneView.bounds.width),
Int(self.sceneView.bounds.height))
let midPoint = CGPoint(x: rect.midX, y: rect.midY)
let raycastQuery = self.sceneView.raycastQuery(from: midPoint,
allowing: .estimatedPlane,
alignment: .any)
let raycastArray = self.sceneView.session.raycast(raycastQuery!)
guard let raycastResult = raycastArray.first else { return }
let position = SCNVector3(raycastResult.worldTransform.columns.3.x,
raycastResult.worldTransform.columns.3.y,
raycastResult.worldTransform.columns.3.z)
if let _ = trackingNode {
trackingNode!.worldPosition = position
} else {
trackingNode = createNode()
trackingNode!.worldPosition = position
self.sceneView.scene.rootNode.addChildNode(trackingNode!)
}
}
}
private func recognisedObject(_ identifier: String) -> Bool {
return identifier == "remote" || identifier == "mouse"
}
private func createNode() -> SCNNode {
let sphereNode = SCNNode(geometry: SCNSphere(radius: 0.01))
sphereNode.geometry?.firstMaterial?.diffuse.contents = UIColor.purple
return sphereNode
}
private func loadSession() {
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = []
sceneView.session.run(configuration)
}
override func viewDidLoad() {
super.viewDidLoad()
sceneView.delegate = self
viewportSize = sceneView.frame.size
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
loadSession()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
sceneView.session.pause()
}
To be honest, the technologies you're using here cannot do that out of the box. YOLO (and any other object detection model you swapped out for it) have no built in concept of tracking the same object in a video. They look for objects in a 2D bitmap, and return 2D bounding boxes for them. As either the camera or object moves, and you pass in the next capturedImage buffer, it will give you a new bounding box in the correct position, but it has no way of knowing whether or not it's the same instance of the object detected in a previous frame.
To make this work, you'll need to do some post processing of those Vision results to determine whether or not it's the same object, and if so, manually move the anchor/mesh to match the new position. If you're confident there should only be one object in view at any given time, then it's pretty straightforward. If there will be multiple objects, you're venturing into complex (but still achievable) territory.
You could try to incorporate Vision Tracking, which might work though would depend on the nature and behavior of the tracked object.
Also, sceneView.hitTest() is deprecated. You should probably port that over to use ARSession.raycast()

How can I combine all 3 gyroscope axis to produce 1 int? Xcode Swift Core Motion

I've followed a few things including Apple's documentation on the core motion. This is probably very simple to do, I just can't put my finger on it. Basically, I'm trying to get all of the X, Y, and Z data, combine it and make it constantly add to the "scoreValue" text. Basically, a constant counter that counts the total degrees rotated ever.
import UIKit
import CoreMotion
class ViewController: UIViewController {
#IBOutlet var scoreValue: UITextView!
#IBOutlet weak var Label: UILabel!
var motion = CMMotionManager()
override func viewDidLoad() {
super.viewDidLoad()
myGyroscope()
view.backgroundColor = .systemBlue
}
func myGyroscope() {
motion.gyroUpdateInterval = 0.1
motion.startGyroUpdates(to: OperationQueue.current!) { [self]
(data, error) in
print(data as Any)
if let trueData = data {
self.view.reloadInputViews()
let x = trueData.rotationRate.x
let y = trueData.rotationRate.y
let z = trueData.rotationRate.z
self.UNDEFINED.text = "\(Double(x).rounded(toPlaces :0))"
self.UNDEFINED.text = "\(Double(y).rounded(toPlaces :0))"
self.UNDEFINED.text = "\(Double(z).rounded(toPlaces :0))"
}
}
return
}
}
extension Double {
/// Rounds the double to decimal places value
func rounded(toPlaces places:Int) -> Double {
let divisor = pow(10.0, Double(places))
return (self * divisor).rounded() / divisor
}
}

How do i use mapbox's new MGLOfflinePackDelegate correctly?

I'm creating an app which needs an offline map. I'm testing with MapBox, which supports offline maps since today (yay!). The code I have now seems to work for downloading the map, but the delegate to report on progress never triggers, and I don't have a clue why this is.
I have this class for my mapView:
import UIKit
import Mapbox
class MapController: UIViewController, MGLMapViewDelegate, UIPopoverPresentationControllerDelegate {
#IBOutlet var mapView: MGLMapView!
override func viewDidLoad() {
super.viewDidLoad()
downloadIfNeeded()
mapView.maximumZoomLevel = 18
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
func downloadIfNeeded() {
MGLOfflineStorage.sharedOfflineStorage().getPacksWithCompletionHandler { (packs, error) in guard error == nil else {
return
}
for pack in packs {
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
if userInfo["name"] == "London" {
// allready downloaded
return
}
}
// define the download region
let sw = CLLocationCoordinate2DMake(51.212120, 4.415906)
let ne = CLLocationCoordinate2DMake(51.223781, 4.442401)
let bounds = MGLCoordinateBounds(sw: sw, ne: ne)
let region = MGLTilePyramidOfflineRegion(styleURL: MGLStyle.streetsStyleURL(), bounds: bounds, fromZoomLevel: 10, toZoomLevel: 12)
let userInfo = ["name": "London"]
let context = NSKeyedArchiver.archivedDataWithRootObject(userInfo)
MGLOfflineStorage.sharedOfflineStorage().addPackForRegion(region, withContext: context) { (pack, error) in
guard error == nil else {
return
}
// create popup window with delegate
let storyboard : UIStoryboard = UIStoryboard(name: "Main", bundle: nil)
let downloadProgress: MapDownloadController = storyboard.instantiateViewControllerWithIdentifier("MapDownloadController") as! MapDownloadController
downloadProgress.modalPresentationStyle = .Popover
downloadProgress.preferredContentSize = CGSizeMake(300, 150)
let popoverMapDownloadController = downloadProgress.popoverPresentationController
popoverMapDownloadController?.permittedArrowDirections = .Any
popoverMapDownloadController?.delegate = self
popoverMapDownloadController?.sourceView = self.mapView
popoverMapDownloadController?.sourceRect = CGRect(x: self.mapView.frame.midX, y: self.mapView.frame.midY, width: 1, height: 1)
self.presentViewController(downloadProgress, animated: true, completion: nil)
// set popup as delegate <----
pack!.delegate = downloadProgress
// start downloading
pack!.resume()
}
}
}
}
And the MapDownloadController is a View which is displayed as popup (see code above) and has the MGLOfflinePackDelegate:
import UIKit
import Mapbox
class MapDownloadController: UIViewController, MGLOfflinePackDelegate {
#IBOutlet var progress: UIProgressView!
#IBOutlet var progressText: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
func offlinePack(pack: MGLOfflinePack, progressDidChange progress: MGLOfflinePackProgress) {
// this function is never called, but why? <----
let completed = progress.countOfResourcesCompleted
let expected = progress.countOfResourcesExpected
let bytes = progress.countOfBytesCompleted
let MB = bytes / 1024
let str: String = "\(completed)/\(expected) voltooid (\(MB)MB)"
progressText.text = str
self.progress.setProgress(Float(completed) / Float(expected), animated: true)
}
func offlinePack(pack: MGLOfflinePack, didReceiveError error: NSError) {
// neither is this one... <----
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
let strError = error.localizedFailureReason
}
func offlinePack(pack: MGLOfflinePack, didReceiveMaximumAllowedMapboxTiles maximumCount: UInt64) {
// .. or this one <----
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
}
}
This is all pretty much taken from the documentation, so why are the delegate's functions (func offlinePack) never called? I did test with breakpoints so i am sure it is not. Still, the popup is shown and the region gets downloaded. (Checked with observing network traffic and with other code which lists the offline packs.)
Here’s an extremely simple implementation of Minh’s answer, using the current v3.2.0b1 example code. Expect this answer to become outdated quickly, as we’re still working on the v3.2.0 release.
import UIKit
import Mapbox
class ViewController: UIViewController, UIPopoverPresentationControllerDelegate, MGLOfflinePackDelegate {
#IBOutlet var mapView: MGLMapView!
// Array of offline packs for the delegate work around (and your UI, potentially)
var offlinePacks = [MGLOfflinePack]()
override func viewDidLoad() {
super.viewDidLoad()
mapView.maximumZoomLevel = 2
downloadOffline()
}
func downloadOffline() {
// Create a region that includes the current viewport and any tiles needed to view it when zoomed further in.
let region = MGLTilePyramidOfflineRegion(styleURL: mapView.styleURL, bounds: mapView.visibleCoordinateBounds, fromZoomLevel: mapView.zoomLevel, toZoomLevel: mapView.maximumZoomLevel)
// Store some data for identification purposes alongside the downloaded resources.
let userInfo = ["name": "My Offline Pack"]
let context = NSKeyedArchiver.archivedDataWithRootObject(userInfo)
// Create and register an offline pack with the shared offline storage object.
MGLOfflineStorage.sharedOfflineStorage().addPackForRegion(region, withContext: context) { (pack, error) in
guard error == nil else {
print("The pack couldn’t be created for some reason.")
return
}
// Set the pack’s delegate (assuming self conforms to the MGLOfflinePackDelegate protocol).
pack!.delegate = self
// Start downloading.
pack!.resume()
// Retain reference to pack to work around it being lost and not sending delegate messages
self.offlinePacks.append(pack!)
}
}
func offlinePack(pack: MGLOfflinePack, progressDidChange progress: MGLOfflinePackProgress) {
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
let completed = progress.countOfResourcesCompleted
let expected = progress.countOfResourcesExpected
print("Offline pack “\(userInfo["name"])” has downloaded \(completed) of \(expected) resources.")
}
func offlinePack(pack: MGLOfflinePack, didReceiveError error: NSError) {
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
print("Offline pack “\(userInfo["name"])” received error: \(error.localizedFailureReason)")
}
func offlinePack(pack: MGLOfflinePack, didReceiveMaximumAllowedMapboxTiles maximumCount: UInt64) {
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
print("Offline pack “\(userInfo["name"])” reached limit of \(maximumCount) tiles.")
}
}
(Cross-posted from this GitHub issue.)
This is a bug in the SDK. The workaround is for the completion handler to assign the passed-in MGLOfflinePack object to an ivar or other strong reference in the surrounding MapDownloadController class (example).

UIDynamicAnimator - Move view based on Device Motion Roll

I am trying to read my devices Motion, specifically the Roll of the device when it is in Landscape Mode, and translate the angle returned into a Position of a UIView (Basically making a on screen "Level" to show the user that the phone is at an ideal angle).
This code gives the desired roll result, but for some reason is not updating the levelIndicator view as expected. I am not getting any errors, so I must be using UIPushBehavior incorrectly, but I am unclear what I need to fix. I am not sure about setting to new Y position of the indicator on Motion Update.
import UIKit
import AVFoundation
import CoreMotion
import GLKit
class CameraView: BaseViewController {
var animator : UIDynamicAnimator? = nil;
var currentRoll : Float = 0.0;
let manager = CMMotionManager()
let motionQueue = NSOperationQueue()
var countingDown:Bool = false;
#IBOutlet weak var levelIndicator: UIView!
#IBOutlet weak var level: UIView!
override func viewDidLoad() {
super.viewDidLoad()
self.animator = UIDynamicAnimator(referenceView: self.level)
let continuousPush: UIPushBehavior = UIPushBehavior(items: [levelIndicator], mode: UIPushBehaviorMode.Continuous)
self.animator?.addBehavior(continuousPush)
}
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(true)
self.startReadingMotion()
}
func startReadingMotion() {
if manager.deviceMotionAvailable {
manager.deviceMotionUpdateInterval = 0.1
manager.startDeviceMotionUpdatesToQueue(motionQueue, withHandler: checkStability)
}
}
func checkStability(motion: CMDeviceMotion!, error: NSError!) {
var orientation = UIDevice.currentDevice().orientation
if (error != nil) {
NSLog("\(error)")
}
var quat = motion.attitude.quaternion
//We Probably only need to check the Angle of the Roll (Phone Angle in Landscape mode)
var roll = GLKMathRadiansToDegrees(Float(atan2(2*(quat.y*quat.w - quat.x*quat.z), 1 - 2*quat.y*quat.y - 2*quat.z*quat.z))) ;
//Other Angles are available for more refinement to stabilty
//var pitch = GLKMathRadiansToDegrees(Float(atan2(2*(quat.x*quat.w + quat.y*quat.z), 1 - 2*quat.x*quat.x - 2*quat.z*quat.z)));
//var yaw = GLKMathRadiansToDegrees(Float(asin(2*quat.x*quat.y + 2*quat.w*quat.z)));
if(orientation == .LandscapeLeft) {
roll *= -1
}
if(roll > 100) {
roll = 100
} else if(roll < 0) {
roll = 0
}
self.currentRoll = roll
var pos = self.level.frame.height*CGFloat(roll/100)
var rect = self.levelIndicator.frame
rect.origin.y = pos
self.levelIndicator.frame = rect
if(roll > 85 && roll < 87) {
if(!countingDown) {
//This is the ideal roll position of the phone
self.levelIndicator.backgroundColor = UIColor.redColor()
}
} else {
countingDown = false;
self.levelIndicator.backgroundColor = UIColor.blackColor()
}
}
func stopReading() {
manager.stopDeviceMotionUpdates();
}
}
For anyone interested I ended up not using the UIDynamicAnimator for this but found a much simpler solution converting the returned Radians of the attitude change and using that to check the roll of the device. Also added a dispatch to the main queue to update the UI of the onscreen level.
func checkStability(motion: CMDeviceMotion!, error: NSError!) {
var orientation = UIDevice.currentDevice().orientation
if (error != nil) {
NSLog("\(error)")
}
var roll:Float = 0.0
if let attitude = motion.attitude {
roll = GLKMathRadiansToDegrees(Float(attitude.roll))
}
dispatch_async(dispatch_get_main_queue()) {
var pos = self.level.frame.height*CGFloat(roll/100)
var rect = self.levelIndicator.frame
rect.origin.y = self.level.frame.height-pos
self.levelIndicator.frame = rect
if(roll > 85 && roll < 90) {
//This is where I want the Roll to be
} else {
//The Roll is not correct yet
}
}
}

IBOutlet nil after refreshing data

I'm using XCode 6.3.1 and I'm facing a weird problem that I can't figure it out.
I have a View Controller on my Storyboard opened with a modal segue. When the ViewController is opened it loads some data from my backend (Parse) it looks first on the cache and shows cached data (if exists) while the updated data from server is retrieved on the background. The process is the next:
Get cached data
If cached data exists then update interface
Request server data (In background)
When data arrives update interface
Everything works fine until step 4. When I try to refresh my interface suddenly half of my #IBOutlets are nil and of course app crashes.
what am I missing??
Here's the code:
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
//eventId is set when the ViewController is instantiated
if eventId != nil {
loadEvent(eventId)
}
}
func loadEvent(id: String) {
var query = Event.query()
query?.cachePolicy = Util.getCachePolicy() //First look in cache, the request network data
query?.getObjectInBackgroundWithId(id, block: { (event: PFObject?, error: NSError?) -> Void in
if error == nil {
var updatedEvent = event as! Event
self.event = updatedEvent
self.updateLayout()
//When self.updateLayout() is called with cached data
//all my IBOutlets are fine but when it's called the second time,
//with data from server half of the IBOutlets are nil
}
})
}
func updateLayout() {
if event != nil {
eventTitle.text = event.name
var paletteColor : UIColor!
var location = event.location
var locationName = location["name"] as! String
eventLocation.text = NSString(format: NSLocalizedString("event_subtitle", comment: ""), event.formattedTimes(), locationName) as String
eventDescription.text = event.abstract
if event.paletteColor != 0 {
paletteColor = Util.colorFromInt(event.paletteColor)
eventHeader.backgroundColor = paletteColor
speakersBlockTitle.textColor = paletteColor
mapButton.tintColor = paletteColor
}
if event.hasPhoto() {
self.eventPhoto.file = event.eventPhoto
self.eventPhoto.loadInBackground({ (image:UIImage?, error:NSError?) -> Void in
UIView.animateWithDuration(0.5, animations: { () -> Void in
self.eventPhoto.alpha = 1.0
})
})
} else {
self.eventPhoto.removeFromSuperview()
if paletteColor == nil {
paletteColor = eventHeader.backgroundColor
}
actionBar.backgroundColor = paletteColor
}
if event.speaker.isDataAvailable() {
var speaker = event.speaker
speakerName.text = speaker["name"] as? String
speakerInfo.text = speaker["speakerInfo"] as? String
speaker["speakerPhoto"]?.getDataInBackgroundWithBlock({ (imageData:NSData?, error:NSError?) -> Void in
if error == nil {
self.speakerPhoto.image = UIImage(data:imageData!)
self.speakerPhoto.layer.cornerRadius = self.speakerPhoto.frame.size.width/2
self.speakerPhoto.clipsToBounds = true
}
})
} else {
speakerBlock.removeFromSuperview()
}
UIView.animateWithDuration(0.5, animations: { () -> Void in
self.eventHeader.alpha = 1.0
self.eventDescription.alpha = 1.0
self.speakerBlock.alpha = 1.0
self.mapButton.alpha = 1.0
})
}
}
This are all the nil IBOutlets:
Since the code in my comment above doesn't display correctly, here's that comment again:
Something like this:
#IBOutlet weak var myOutlet: UIButton! {
didSet {
if myOutlet == nil {
// put a breakpoint on the next line
println("myOutlet set to nil")
}
}
}

Resources