How to use SCNView as a SubView of ARView? - ios

I'm using ARKit-CoreLocation library to present POIs in AR World, in iOS 14. But the thing is, I am not able to use ARCL's SceneLocationView because it is SCNView. So when I add it as a subview, it overlaps my ARView contents and creates a new ARSession, leaving my ARView in the background.
Code:
extension RealityKitViewController {
typealias Context = UIViewControllerRepresentableContext<RealityKitViewControllerRepresentable>
}
class RealityKitViewController: UIViewController {
let sceneLocationView = SceneLocationView()
let arView = ARView(frame: .zero)
let context : Context
let pins: [Pin]
var currentLocation : CLLocation? {
return sceneLocationView.sceneLocationManager.currentLocation
}
init (_ context : Context, pins: [Pin]) {
self.context = context
self.pins = pins
super.init(nibName: nil, bundle: nil)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func makeArView()
{
// Start AR session
let session = arView.session
let config = ARWorldTrackingConfiguration()
config.planeDetection = [.horizontal, .vertical]
session.run(config)
// Add coaching overlay
let coachingOverlay = ARCoachingOverlayView()
coachingOverlay.session = session
coachingOverlay.goal = .horizontalPlane
coachingOverlay.delegate = context.coordinator
arView.addSubview(coachingOverlay)
arView.debugOptions = [.showFeaturePoints, .showAnchorOrigins, .showAnchorGeometry]
// Handle ARSession events via delegate
context.coordinator.view = arView
session.delegate = context.coordinator
}
override func viewDidLoad() {
super.viewDidLoad()
// probably problem here
sceneLocationView.frame = view.bounds
arView.frame = sceneLocationView.bounds
sceneLocationView.addSubview(arView)
view.addSubview(sceneLocationView)
addPins()
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
sceneLocationView.frame = view.bounds
}
override func viewWillAppear(_ animated: Bool) {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
super.viewWillAppear(animated)
self.sceneLocationView.run()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
sceneLocationView.pause()
}
func addPins() {
guard let currentLocation = currentLocation, currentLocation.horizontalAccuracy < 16 else {
return DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
self?.addPins()
}
}
self.pins.forEach { pin in
guard pin.isLocation else { return }
guard let location = pin.location else { return assertionFailure() }
guard let image = UIImage(named: pin.image) else { return assertionFailure() }
let node = LocationAnnotationNode(location : location, image: image)
node.scaleRelativeToDistance = true
sceneLocationView.addLocationNodeWithConfirmedLocation(locationNode: node)
}
}
}
// if you want to test it, you can try to place these pins to a location where you can easily get coordinates from Google Earth.
struct RealityKitViewControllerRepresentable : UIViewControllerRepresentable {
let pins = [Pin(image: "test", location: CLLocation(coordinate: CLLocationCoordinate2D(latitude: 0.03275742958, longitude: 0.32827424), altitude: 772.1489524841309), isLocation: true)]
#Binding var arActivate : Bool
func makeUIViewController(context: Context) -> RealityKitViewController {
let viewController = RealityKitViewController(context, pins: pins)
return viewController
}
func updateUIViewController(_ uiViewController: RealityKitViewController, context: Context) {
}
func makeCoordinator() -> Coordinator {
Coordinator()
}
}
class Coordinator: NSObject, ARSessionDelegate {
weak var view: ARView?
}

Related

Record and play Video based on TensorFlow example Swift

#DEFINE UPDATE
I realised I had forgotten to ask for recording permission. That has now been fixed. However, when I press the "Record button" I get the error Cannot create file. So when I start the recording, something is fishy with the path maybe?
#UNDEF UPDATE
I am working on an app where I want to have my own neural network with the functionality to start recording a video. Thereafter I want to play the video and use information from the neural network.
I have a working function in Android, now I am trying to make something similar for iPhone. As a start, I have used an ImageClassifierExample from TensorFlowLite. The first task is to add a button Record which starts recording a video and then a button Play which plays the video.
I have implemented the two features, but when I try and play the video, it is just loading. It can either be the recording is not working, or the video player is not working (or both). I have checked so the paths are the same.
I am not so familiar with iOS development so some help would be nice.
This is the base I am starting from.
Here is my slightly adopted ViewController:
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import AVFoundation
import AVKit
import UIKit
class ViewController: UIViewController {
// MARK: Storyboards Connections
#IBOutlet weak var previewView: PreviewView!
#IBOutlet weak var cameraUnavailableLabel: UILabel!
#IBOutlet weak var resumeButton: UIButton!
#IBOutlet weak var bottomSheetView: CurvedView!
#IBOutlet weak var bottomSheetViewBottomSpace: NSLayoutConstraint!
#IBOutlet weak var bottomSheetStateImageView: UIImageView!
// MARK: Constants
private let animationDuration = 0.5
private let collapseTransitionThreshold: CGFloat = -40.0
private let expandThransitionThreshold: CGFloat = 40.0
private let delayBetweenInferencesMs: Double = 1000
// MARK: Instance Variables
// Holds the results at any time
private var result: Result?
private var initialBottomSpace: CGFloat = 0.0
private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
// MARK: Controllers that manage functionality
// Handles all the camera related functionality
private lazy var cameraCapture = CameraFeedManager(previewView: previewView)
private var isRecording = false // <<<----- Mine
private let captureSession: AVCaptureSession = AVCaptureSession()
// Handles all data preprocessing and makes calls to run inference through the `Interpreter`.
private var modelDataHandler: ModelDataHandler? =
ModelDataHandler(modelFileInfo: MobileNet.modelInfo, labelsFileInfo: MobileNet.labelsInfo)
#IBAction func startRecording(_ sender: Any) {. // <<<----- Mine
print("Recording pressed")
if (!isRecording) {
cameraCapture.startRecording()
} else {
cameraCapture.stopRecording()
}
isRecording = !isRecording
}
// Handles the presenting of results on the screen
private var inferenceViewController: InferenceViewController?
// MARK: View Handling Methods
override func viewDidLoad() {
super.viewDidLoad()
guard modelDataHandler != nil else {
fatalError("Model set up failed")
}
#if targetEnvironment(simulator)
previewView.shouldUseClipboardImage = true
NotificationCenter.default.addObserver(self,
selector: #selector(classifyPasteboardImage),
name: UIApplication.didBecomeActiveNotification,
object: nil)
#endif
cameraCapture.delegate = self
addPanGesture()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
changeBottomViewState()
#if !targetEnvironment(simulator)
cameraCapture.checkCameraConfigurationAndStartSession()
#endif
}
#if !targetEnvironment(simulator)
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
cameraCapture.stopSession()
}
#endif
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
func presentUnableToResumeSessionAlert() {
let alert = UIAlertController(
title: "Unable to Resume Session",
message: "There was an error while attempting to resume session.",
preferredStyle: .alert
)
alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
self.present(alert, animated: true)
}
// MARK: Storyboard Segue Handlers
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
super.prepare(for: segue, sender: sender)
if segue.identifier == "EMBED" {
guard let tempModelDataHandler = modelDataHandler else {
return
}
inferenceViewController = segue.destination as? InferenceViewController
inferenceViewController?.wantedInputHeight = tempModelDataHandler.inputHeight
inferenceViewController?.wantedInputWidth = tempModelDataHandler.inputWidth
inferenceViewController?.maxResults = tempModelDataHandler.resultCount
inferenceViewController?.threadCountLimit = tempModelDataHandler.threadCountLimit
inferenceViewController?.delegate = self
}
}
#objc func classifyPasteboardImage() {
guard let image = UIPasteboard.general.images?.first else {
return
}
guard let buffer = CVImageBuffer.buffer(from: image) else {
return
}
previewView.image = image
DispatchQueue.global().async {
self.didOutput(pixelBuffer: buffer)
}
}
deinit {
NotificationCenter.default.removeObserver(self)
}
}
// MARK: InferenceViewControllerDelegate Methods
extension ViewController: InferenceViewControllerDelegate {
func didChangeThreadCount(to count: Int) {
if modelDataHandler?.threadCount == count { return }
modelDataHandler = ModelDataHandler(
modelFileInfo: MobileNet.modelInfo,
labelsFileInfo: MobileNet.labelsInfo,
threadCount: count
)
}
}
// MARK: CameraFeedManagerDelegate Methods
extension ViewController: CameraFeedManagerDelegate {
func didOutput(pixelBuffer: CVPixelBuffer) {
let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return }
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
// Display results by handing off to the InferenceViewController.
DispatchQueue.main.async {
let resolution = CGSize(width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
self.inferenceViewController?.inferenceResult = self.result
self.inferenceViewController?.resolution = resolution
self.inferenceViewController?.tableView.reloadData()
}
}
// MARK: Session Handling Alerts
func sessionWasInterrupted(canResumeManually resumeManually: Bool) {
// Updates the UI when session is interupted.
if resumeManually {
self.resumeButton.isHidden = false
} else {
self.cameraUnavailableLabel.isHidden = false
}
}
func sessionInterruptionEnded() {
// Updates UI once session interruption has ended.
if !self.cameraUnavailableLabel.isHidden {
self.cameraUnavailableLabel.isHidden = true
}
if !self.resumeButton.isHidden {
self.resumeButton.isHidden = false
}
}
func sessionRunTimeErrorOccured() {
// Handles session run time error by updating the UI and providing a button if session can be manually resumed.
self.resumeButton.isHidden = false
previewView.shouldUseClipboardImage = true
}
func presentCameraPermissionsDeniedAlert() {
let alertController = UIAlertController(title: "Camera Permissions Denied", message: "Camera permissions have been denied for this app. You can change this by going to Settings", preferredStyle: .alert)
let cancelAction = UIAlertAction(title: "Cancel", style: .cancel, handler: nil)
let settingsAction = UIAlertAction(title: "Settings", style: .default) { (action) in
UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!, options: [:], completionHandler: nil)
}
alertController.addAction(cancelAction)
alertController.addAction(settingsAction)
present(alertController, animated: true, completion: nil)
previewView.shouldUseClipboardImage = true
}
func presentVideoConfigurationErrorAlert() {
let alert = UIAlertController(title: "Camera Configuration Failed", message: "There was an error while configuring camera.", preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
self.present(alert, animated: true)
previewView.shouldUseClipboardImage = true
}
}
// MARK: Bottom Sheet Interaction Methods
extension ViewController {
// MARK: Bottom Sheet Interaction Methods
/**
This method adds a pan gesture to make the bottom sheet interactive.
*/
private func addPanGesture() {
let panGesture = UIPanGestureRecognizer(target: self, action: #selector(ViewController.didPan(panGesture:)))
bottomSheetView.addGestureRecognizer(panGesture)
}
/** Change whether bottom sheet should be in expanded or collapsed state.
*/
private func changeBottomViewState() {
guard let inferenceVC = inferenceViewController else {
return
}
if bottomSheetViewBottomSpace.constant == inferenceVC.collapsedHeight - bottomSheetView.bounds.size.height {
bottomSheetViewBottomSpace.constant = 0.0
}
else {
bottomSheetViewBottomSpace.constant = inferenceVC.collapsedHeight - bottomSheetView.bounds.size.height
}
setImageBasedOnBottomViewState()
}
/**
Set image of the bottom sheet icon based on whether it is expanded or collapsed
*/
private func setImageBasedOnBottomViewState() {
if bottomSheetViewBottomSpace.constant == 0.0 {
bottomSheetStateImageView.image = UIImage(named: "down_icon")
}
else {
bottomSheetStateImageView.image = UIImage(named: "up_icon")
}
}
/**
This method responds to the user panning on the bottom sheet.
*/
#objc func didPan(panGesture: UIPanGestureRecognizer) {
// Opens or closes the bottom sheet based on the user's interaction with the bottom sheet.
let translation = panGesture.translation(in: view)
switch panGesture.state {
case .began:
initialBottomSpace = bottomSheetViewBottomSpace.constant
translateBottomSheet(withVerticalTranslation: translation.y)
case .changed:
translateBottomSheet(withVerticalTranslation: translation.y)
case .cancelled:
setBottomSheetLayout(withBottomSpace: initialBottomSpace)
case .ended:
translateBottomSheetAtEndOfPan(withVerticalTranslation: translation.y)
setImageBasedOnBottomViewState()
initialBottomSpace = 0.0
default:
break
}
}
/**
This method sets bottom sheet translation while pan gesture state is continuously changing.
*/
private func translateBottomSheet(withVerticalTranslation verticalTranslation: CGFloat) {
let bottomSpace = initialBottomSpace - verticalTranslation
guard bottomSpace <= 0.0 && bottomSpace >= inferenceViewController!.collapsedHeight - bottomSheetView.bounds.size.height else {
return
}
setBottomSheetLayout(withBottomSpace: bottomSpace)
}
/**
This method changes bottom sheet state to either fully expanded or closed at the end of pan.
*/
private func translateBottomSheetAtEndOfPan(withVerticalTranslation verticalTranslation: CGFloat) {
// Changes bottom sheet state to either fully open or closed at the end of pan.
let bottomSpace = bottomSpaceAtEndOfPan(withVerticalTranslation: verticalTranslation)
setBottomSheetLayout(withBottomSpace: bottomSpace)
}
/**
Return the final state of the bottom sheet view (whether fully collapsed or expanded) that is to be retained.
*/
private func bottomSpaceAtEndOfPan(withVerticalTranslation verticalTranslation: CGFloat) -> CGFloat {
// Calculates whether to fully expand or collapse bottom sheet when pan gesture ends.
var bottomSpace = initialBottomSpace - verticalTranslation
var height: CGFloat = 0.0
if initialBottomSpace == 0.0 {
height = bottomSheetView.bounds.size.height
}
else {
height = inferenceViewController!.collapsedHeight
}
let currentHeight = bottomSheetView.bounds.size.height + bottomSpace
if currentHeight - height <= collapseTransitionThreshold {
bottomSpace = inferenceViewController!.collapsedHeight - bottomSheetView.bounds.size.height
}
else if currentHeight - height >= expandThransitionThreshold {
bottomSpace = 0.0
}
else {
bottomSpace = initialBottomSpace
}
return bottomSpace
}
/**
This method layouts the change of the bottom space of bottom sheet with respect to the view managed by this controller.
*/
func setBottomSheetLayout(withBottomSpace bottomSpace: CGFloat) {
view.setNeedsLayout()
bottomSheetViewBottomSpace.constant = bottomSpace
view.setNeedsLayout()
}
}
CameraFeedManager:
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import UIKit
import AVFoundation
// MARK: CameraFeedManagerDelegate Declaration
protocol CameraFeedManagerDelegate: AnyObject {
/**
This method delivers the pixel buffer of the current frame seen by the device's camera.
*/
func didOutput(pixelBuffer: CVPixelBuffer)
/**
This method initimates that the camera permissions have been denied.
*/
func presentCameraPermissionsDeniedAlert()
/**
This method initimates that there was an error in video configurtion.
*/
func presentVideoConfigurationErrorAlert()
/**
This method initimates that a session runtime error occured.
*/
func sessionRunTimeErrorOccured()
/**
This method initimates that the session was interrupted.
*/
func sessionWasInterrupted(canResumeManually resumeManually: Bool)
/**
This method initimates that the session interruption has ended.
*/
func sessionInterruptionEnded()
}
/**
This enum holds the state of the camera initialization.
*/
enum CameraConfiguration {
case success
case failed
case permissionDenied
}
/**
This class manages all camera related functionality
*/
class CameraFeedManager: NSObject, AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { // << --- Mine
print("Video recorded to: " + outputFileURL.absoluteString)
}
// MARK: Camera Related Instance Variables
private let session: AVCaptureSession = AVCaptureSession()
private let previewView: PreviewView
private let sessionQueue = DispatchQueue(label: "sessionQueue")
private var cameraConfiguration: CameraConfiguration = .failed
private lazy var videoDataOutput = AVCaptureVideoDataOutput()
private var movieDataOutput = AVCaptureMovieFileOutput() // << --- Mine
private var isSessionRunning = false
// MARK: CameraFeedManagerDelegate
weak var delegate: CameraFeedManagerDelegate?
// MARK: Initializer
init(previewView: PreviewView) {
self.previewView = previewView
super.init()
// Initializes the session
session.sessionPreset = .high
self.previewView.session = session
self.previewView.previewLayer.connection?.videoOrientation = .portrait
self.previewView.previewLayer.videoGravity = .resizeAspectFill
self.attemptToConfigureSession()
}
// MARK: Session Start and End methods
/**
This method starts an AVCaptureSession based on whether the camera configuration was successful.
*/
func checkCameraConfigurationAndStartSession() {
sessionQueue.async {
switch self.cameraConfiguration {
case .success:
self.addObservers()
self.startSession()
case .failed:
DispatchQueue.main.async {
self.delegate?.presentVideoConfigurationErrorAlert()
}
case .permissionDenied:
DispatchQueue.main.async {
self.delegate?.presentCameraPermissionsDeniedAlert()
}
}
}
}
/**
This method stops a running an AVCaptureSession.
*/
func stopSession() {
self.removeObservers()
sessionQueue.async {
if self.session.isRunning {
self.session.stopRunning()
self.isSessionRunning = self.session.isRunning
}
}
}
/**
This method resumes an interrupted AVCaptureSession.
*/
func resumeInterruptedSession(withCompletion completion: #escaping (Bool) -> ()) {
sessionQueue.async {
self.startSession()
DispatchQueue.main.async {
completion(self.isSessionRunning)
}
}
}
/**
This method starts the AVCaptureSession
**/
private func startSession() {
self.session.startRunning()
self.isSessionRunning = self.session.isRunning
}
// MARK: Session Configuration Methods.
/**
This method requests for camera permissions and handles the configuration of the session and stores the result of configuration.
*/
private func attemptToConfigureSession() {
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
self.cameraConfiguration = .success
case .notDetermined:
self.sessionQueue.suspend()
self.requestCameraAccess(completion: { (granted) in
self.sessionQueue.resume()
})
case .denied:
self.cameraConfiguration = .permissionDenied
default:
break
}
self.sessionQueue.async {
self.configureSession()
}
}
/**
This method requests for camera permissions.
*/
private func requestCameraAccess(completion: #escaping (Bool) -> ()) {
AVCaptureDevice.requestAccess(for: .video) { (granted) in
if !granted {
self.cameraConfiguration = .permissionDenied
}
else {
self.cameraConfiguration = .success
}
completion(granted)
}
}
/**
This method handles all the steps to configure an AVCaptureSession.
*/
private func configureSession() {
guard cameraConfiguration == .success else {
return
}
session.beginConfiguration()
// Tries to add an AVCaptureDeviceInput.
guard addVideoDeviceInput() == true else {
self.session.commitConfiguration()
self.cameraConfiguration = .failed
return
}
// Tries to add an AVCaptureVideoDataOutput.
guard addVideoDataOutput() else {
self.session.commitConfiguration()
self.cameraConfiguration = .failed
return
}
session.commitConfiguration()
self.cameraConfiguration = .success
}
func startRecording() {. // << --- Mine
self.session.addOutput(movieDataOutput)
guard let homeDirectory = FileManager.default.urls(for: .desktopDirectory, in: .userDomainMask).first else { return }
let url = URL(fileURLWithPath: homeDirectory.absoluteString + "/mymovie.mov")
movieDataOutput.startRecording(to: url , recordingDelegate: self)
}
func stopRecording() { // <<< -- Mine
self.movieDataOutput.stopRecording()
self.session.removeOutput(movieDataOutput)
}
/**
This method tries to an AVCaptureDeviceInput to the current AVCaptureSession.
*/
private func addVideoDeviceInput() -> Bool {
/**Tries to get the default back camera.
*/
guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else {
return false
}
do {
let videoDeviceInput = try AVCaptureDeviceInput(device: camera)
if session.canAddInput(videoDeviceInput) {
session.addInput(videoDeviceInput)
return true
}
else {
return false
}
}
catch {
fatalError("Cannot create video device input")
}
}
/**
This method tries to an AVCaptureVideoDataOutput to the current AVCaptureSession.
*/
private func addVideoDataOutput() -> Bool {
let sampleBufferQueue = DispatchQueue(label: "sampleBufferQueue")
videoDataOutput.setSampleBufferDelegate(self, queue: sampleBufferQueue)
videoDataOutput.alwaysDiscardsLateVideoFrames = true
videoDataOutput.videoSettings = [ String(kCVPixelBufferPixelFormatTypeKey) : kCMPixelFormat_32BGRA]
if session.canAddOutput(videoDataOutput) {
session.addOutput(videoDataOutput)
videoDataOutput.connection(with: .video)?.videoOrientation = .portrait
return true
}
return false
}
// MARK: Notification Observer Handling
private func addObservers() {
NotificationCenter.default.addObserver(self, selector: #selector(CameraFeedManager.sessionRuntimeErrorOccured(notification:)), name: NSNotification.Name.AVCaptureSessionRuntimeError, object: session)
NotificationCenter.default.addObserver(self, selector: #selector(CameraFeedManager.sessionWasInterrupted(notification:)), name: NSNotification.Name.AVCaptureSessionWasInterrupted, object: session)
NotificationCenter.default.addObserver(self, selector: #selector(CameraFeedManager.sessionInterruptionEnded), name: NSNotification.Name.AVCaptureSessionInterruptionEnded, object: session)
}
private func removeObservers() {
NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureSessionRuntimeError, object: session)
NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureSessionWasInterrupted, object: session)
NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureSessionInterruptionEnded, object: session)
}
// MARK: Notification Observers
#objc func sessionWasInterrupted(notification: Notification) {
if let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?,
let reasonIntegerValue = userInfoValue.integerValue,
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) {
print("Capture session was interrupted with reason \(reason)")
var canResumeManually = false
if reason == .videoDeviceInUseByAnotherClient {
canResumeManually = true
} else if reason == .videoDeviceNotAvailableWithMultipleForegroundApps {
canResumeManually = false
}
self.delegate?.sessionWasInterrupted(canResumeManually: canResumeManually)
}
}
#objc func sessionInterruptionEnded(notification: Notification) {
self.delegate?.sessionInterruptionEnded()
}
#objc func sessionRuntimeErrorOccured(notification: Notification) {
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
print("Capture session runtime error: \(error)")
if error.code == .mediaServicesWereReset {
sessionQueue.async {
if self.isSessionRunning {
self.startSession()
} else {
DispatchQueue.main.async {
self.delegate?.sessionRunTimeErrorOccured()
}
}
}
} else {
self.delegate?.sessionRunTimeErrorOccured()
}
}
}
/**
AVCaptureVideoDataOutputSampleBufferDelegate
*/
extension CameraFeedManager: AVCaptureVideoDataOutputSampleBufferDelegate {
/** This method delegates the CVPixelBuffer of the frame seen by the camera currently.
*/
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// Converts the CMSampleBuffer to a CVPixelBuffer.
let pixelBuffer: CVPixelBuffer? = CMSampleBufferGetImageBuffer(sampleBuffer)
guard let imagePixelBuffer = pixelBuffer else {
return
}
// Delegates the pixel buffer to the ViewController.
delegate?.didOutput(pixelBuffer: imagePixelBuffer)
}
}
PlayerController:
import Foundation
import UIKit
import AVFoundation
import AVKit
class PlayerController : UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
guard let homeDirectory = FileManager.default.urls(for: .desktopDirectory, in: .userDomainMask).first else { return }
let url = URL(fileURLWithPath: homeDirectory.absoluteString + "/mymovie.mov")
print(url.absoluteString)
let player = AVPlayer(url: url) // video path coming from above function
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
}
The solution was to create the path using:
private func documentDirectory() -> String {
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory,
.userDomainMask,
true)
return documentDirectory[0]
}
private func append(toPath path: String,
withPathComponent pathComponent: String) -> String? {
if var pathURL = URL(string: path) {
pathURL.appendPathComponent(pathComponent)
return pathURL.absoluteString
}
return nil
}
and
guard let path = append(toPath: documentDirectory(), withPathComponent: "movie_test.mov") else {return}

Scanning barcodes one at a time in SwiftUI

I've used this guide to create my scanner:
I first run QRCodeScan() from content view with an overlay:
import SwiftUI
struct ContentView: View {
var body: some View {
ZStack {
QRCodeScan()
ScannerOverlayView()
}
}
}
QRCodeScan:
import SwiftUI
struct QRCodeScan: UIViewControllerRepresentable {
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
func makeUIViewController(context: Context) -> ScannerViewController {
let vc = ScannerViewController()
vc.delegate = context.coordinator
return vc
}
func updateUIViewController(_ vc: ScannerViewController, context: Context) {
}
class Coordinator: NSObject, QRCodeScannerDelegate {
#State var barcode = ""
func codeDidFind(_ code: String) {
barcode = code
FoundItemSheet(barcode: self.$barcode) //This creates a sheet at the bottom of the screen, saying if a product was found with the barcode
}
var parent: QRCodeScan
init(_ parent: QRCodeScan) {
self.parent = parent
}
}
}
ScannerViewController:
protocol QRCodeScannerDelegate {
func codeDidFind(_ code: String)
}
import AVFoundation
import UIKit
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
var delegate: QRCodeScannerDelegate?
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.ean13, .ean8, .qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
self.delegate?.codeDidFind(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
It scans a barcode successfully, but that picture remains on the screen. I want a barcode to be scanned; the code be passed do FoundItemSheet(), then open the scanner again.
I've tried running QRCodeScan() inside of codeDidFind() and calling the ScannerViewController again.

BarCodeScanning Not Working Swift iOS 11

Before you jump and tell me its a duplicate or send me links check out the code.
The camera shows up but the delegate for output is not called.
import AVFoundation
import UIKit
protocol ScannerViewControllerOutputDelegate : class {
func scannerOutput(scannedString: String?)
}
class ScannerViewController: UIViewController {
public weak var delegate: ScannerViewControllerOutputDelegate?
#IBOutlet private weak var cameraView: UIView!
#IBOutlet private weak var lblDataType: UILabel!
#IBOutlet private weak var lblDataInfo: UILabel!
private let captureSession: AVCaptureSession = AVCaptureSession()
private var previewLayer: AVCaptureVideoPreviewLayer!
private var presenter: ScannerViewPresenter?
deinit {
captureSession.stopRunning()
presenter = nil
delegate = nil
}
#IBAction func didTapCancelButton(_ sender: UIBarButtonItem) {
print("ScannerViewController -> USER CANCLED: will hide scannerview controller")
captureSession.stopRunning()
dismiss(animated: true) {
print("ScannerViewController -> USER CANCLED: did hide scannerview controller")
self.delegate?.scannerOutput(scannedString: nil)
}
}
override func viewDidLoad() {
super.viewDidLoad()
presenter = ScannerViewPresenter(withController: self)
self.modalPresentationStyle = .formSheet
view.backgroundColor = .eify_darkGray
guard let videoInput = initializeDeviceCamera() else {
presenter?.deviceNotSupportedAlert()
return
}
guard captureSession.add(captureInput: videoInput) else {
presenter?.deviceNotSupportedAlert()
return
}
guard captureSession.addMetadataOutput(withDelegate: self, andQueue: .global()) else {
presenter?.deviceNotSupportedAlert()
return
}
setupScanPreview()
captureSession.startRunning()
}
fileprivate func initializeDeviceCamera() -> AVCaptureDeviceInput? {
guard let videoCaptureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
presenter?.deviceNotSupportedAlert()
return nil
}
do {
let videoInput: AVCaptureDeviceInput
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
return videoInput
} catch let outError as NSError {
presenter?.deviceNotSupportedAlert()
print("ScannerViewController -> Device Not Supported: \(outError.description)")
return nil
}
}
fileprivate func setupScanPreview() {
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = cameraView.layer.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
view.layer.addSublayer(previewLayer)
}
fileprivate func stopSession() {
if (captureSession.isRunning == true) {
captureSession.stopRunning()
}
}
fileprivate func startSession() {
if (captureSession.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
startSession()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
stopSession()
}
fileprivate func found(code: String?) {
delegate?.scannerOutput(scannedString: code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .all
}
}
extension ScannerViewController : AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
let readableObject = metadataObject as! AVMetadataMachineReadableCodeObject
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: readableObject.stringValue)
}
dismiss(animated: true)
}
}
extension AVCaptureSession {
func add(captureInput videoInput: AVCaptureDeviceInput) -> Bool {
if self.canAddInput(videoInput) {
self.addInput(videoInput)
return true
}
return false
}
func addMetadataOutput(withDelegate delegate: AVCaptureMetadataOutputObjectsDelegate, andQueue queue: DispatchQueue) -> Bool {
let metadataOutput = AVCaptureMetadataOutput()
if self.canAddOutput(metadataOutput) {
metadataOutput.setMetadataObjectsDelegate(delegate, queue: queue)
if metadataOutput.canAddObject(ofType: .qr) {
metadataOutput.metadataObjectTypes = [.qr]
}
self.addOutput(metadataOutput)
return true
}
return false
}
}

How to take a picture using the proximity sensor?

I am having trouble getting the device to take an image using the rear view camera when the proximity sensor is enabled. I don't want the camera preview to show up, just want the device to take the photo and present it in the imageView. I have the proximity sensor working, and I am using imagePicker.takePicture() to take the image when the proximity sensor is enabled, but that doesn't seem to work. What is the method/function that I can use to programmatically take the picture without the user input.
This is my code so far:
class ViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate {
#IBOutlet var imageView: UIImageView!
var imagePicker: UIImagePickerController!
//*The function in question*
func proximityChanged(notification: NSNotification) {
let device = notification.object as? UIDevice
if device?.proximityState == true {
print("\(device) detected!")
If you have troubles capturing photos with UIImagePickerController, I suggest using AVFoundation.
Below is a working example. Photo capture is triggered by the proximity sensor.
You can add a preview if you need it.
import UIKit
import AVFoundation
final class CaptureViewController: UIViewController {
#IBOutlet weak var imageView: UIImageView!
private static let captureSessionPreset = AVCaptureSessionPresetPhoto
private var captureSession: AVCaptureSession!
private var photoOutput: AVCaptureStillImageOutput!
private var initialized = false
override func viewDidLoad() {
super.viewDidLoad()
initialized = setupCaptureSession()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
if initialized {
captureSession.startRunning()
UIDevice.currentDevice().proximityMonitoringEnabled = true
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(proximityStateDidChange), name: UIDeviceProximityStateDidChangeNotification, object: nil)
}
}
override func viewDidDisappear(animated: Bool) {
super.viewDidDisappear(animated)
if initialized {
NSNotificationCenter.defaultCenter().removeObserver(self, name: UIDeviceProximityStateDidChangeNotification, object: nil)
UIDevice.currentDevice().proximityMonitoringEnabled = false
captureSession.stopRunning()
}
}
dynamic func proximityStateDidChange(notification: NSNotification) {
if UIDevice.currentDevice().proximityState {
captureImage()
}
}
// MARK: - Capture Image
private func captureImage() {
if let c = findConnection() {
photoOutput.captureStillImageAsynchronouslyFromConnection(c) { sampleBuffer, error in
if let jpeg = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer),
let image = UIImage(data: jpeg)
{
dispatch_async(dispatch_get_main_queue()) { [weak self] in
self?.imageView.image = image
}
}
}
}
}
private func findConnection() -> AVCaptureConnection? {
for c in photoOutput.connections {
let c = c as? AVCaptureConnection
for p in c?.inputPorts ?? [] {
if p.mediaType == AVMediaTypeVideo {
return c
}
}
}
return nil
}
// MARK: - Setup Capture Session
private func setupCaptureSession() -> Bool {
captureSession = AVCaptureSession()
if captureSession.canSetSessionPreset(CaptureViewController.captureSessionPreset) {
captureSession.sessionPreset = CaptureViewController.captureSessionPreset
if setupCaptureSessionInput() && setupCaptureSessionOutput() {
return true
}
}
return false
}
private func setupCaptureSessionInput() -> Bool {
if let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo),
let captureDeviceInput = try? AVCaptureDeviceInput.init(device: captureDevice)
{
if captureSession.canAddInput(captureDeviceInput) {
captureSession.addInput(captureDeviceInput)
return true
}
}
return false
}
private func setupCaptureSessionOutput() -> Bool {
photoOutput = AVCaptureStillImageOutput()
photoOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(photoOutput) {
captureSession.addOutput(photoOutput)
return true
}
return false
}
}

Tutorial for Google autocomplete places api for swift

I would like to have an autocomplete textfield that autocompletes locations for me like the one for android:
https://developers.google.com/places/training/autocomplete-android
Does anyone know where I can find a tutorial for this or an example?
Thanks!
Steps :
Add the Alamofire CocoaPods in your swift project.
Find your Google place API key on Google APIs Console.
Add following code
ViewController.swift
import UIKit
class ViewController: UIViewController {
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
let gpaViewController = GooglePlacesAutocomplete(
apiKey: "YOUR GOOGLE PLACE API KEY",
placeType: .Address
)
gpaViewController.placeDelegate = self
presentViewController(gpaViewController, animated: true, completion: nil)
}
}
extension ViewController: GooglePlacesAutocompleteDelegate {
func placeSelected(place: Place) {
println(place.description)
}
func placeViewClosed() {
dismissViewControllerAnimated(true, completion: nil)
}
}
GooglePlacesAutocomplete.swift
import UIKit
import Alamofire
enum PlaceType: Printable {
case All
case Geocode
case Address
case Establishment
case Regions
case Cities
var description : String {
switch self {
case .All: return ""
case .Geocode: return "geocode"
case .Address: return "address"
case .Establishment: return "establishment"
case .Regions: return "regions"
case .Cities: return "cities"
}
}
}
struct Place {
let id: String
let description: String
}
protocol GooglePlacesAutocompleteDelegate {
func placeSelected(place: Place)
func placeViewClosed()
}
// MARK: - GooglePlacesAutocomplete
class GooglePlacesAutocomplete: UINavigationController {
var gpaViewController: GooglePlacesAutocompleteContainer?
var placeDelegate: GooglePlacesAutocompleteDelegate? {
get { return gpaViewController?.delegate }
set { gpaViewController?.delegate = newValue }
}
convenience init(apiKey: String, placeType: PlaceType = .All) {
let gpaViewController = GooglePlacesAutocompleteContainer(
apiKey: apiKey,
placeType: placeType
)
self.init(rootViewController: gpaViewController)
self.gpaViewController = gpaViewController
let closeButton = UIBarButtonItem(barButtonSystemItem: UIBarButtonSystemItem.Stop, target: self, action: "close")
gpaViewController.navigationItem.leftBarButtonItem = closeButton
gpaViewController.navigationItem.title = "Enter Address"
}
func close() {
placeDelegate?.placeViewClosed()
}
}
// MARK: - GooglePlaceSearchDisplayController
class GooglePlaceSearchDisplayController: UISearchDisplayController {
override func setActive(visible: Bool, animated: Bool) {
if active == visible { return }
searchContentsController.navigationController?.navigationBarHidden = true
super.setActive(visible, animated: animated)
searchContentsController.navigationController?.navigationBarHidden = false
if visible {
searchBar.becomeFirstResponder()
} else {
searchBar.resignFirstResponder()
}
}
}
// MARK: - GooglePlacesAutocompleteContainer
class GooglePlacesAutocompleteContainer: UIViewController {
var delegate: GooglePlacesAutocompleteDelegate?
var apiKey: String?
var places = [Place]()
var placeType: PlaceType = .All
convenience init(apiKey: String, placeType: PlaceType = .All) {
self.init(nibName: "GooglePlacesAutocomplete", bundle: nil)
self.apiKey = apiKey
self.placeType = placeType
}
override func viewDidLoad() {
super.viewDidLoad()
let tv: UITableView? = searchDisplayController?.searchResultsTableView
tv?.registerClass(UITableViewCell.self, forCellReuseIdentifier: "Cell")
}
}
// MARK: - GooglePlacesAutocompleteContainer (UITableViewDataSource / UITableViewDelegate)
extension GooglePlacesAutocompleteContainer: UITableViewDataSource, UITableViewDelegate {
func tableView(tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return places.count
}
func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath) -> UITableViewCell {
let cell = self.searchDisplayController?.searchResultsTableView?.dequeueReusableCellWithIdentifier("Cell", forIndexPath: indexPath) as UITableViewCell
// Get the corresponding candy from our candies array
let place = self.places[indexPath.row]
// Configure the cell
cell.textLabel.text = place.description
cell.accessoryType = UITableViewCellAccessoryType.DisclosureIndicator
return cell
}
func tableView(tableView: UITableView, didSelectRowAtIndexPath indexPath: NSIndexPath) {
delegate?.placeSelected(self.places[indexPath.row])
}
}
// MARK: - GooglePlacesAutocompleteContainer (UISearchDisplayDelegate)
extension GooglePlacesAutocompleteContainer: UISearchDisplayDelegate {
func searchDisplayController(controller: UISearchDisplayController, shouldReloadTableForSearchString searchString: String!) -> Bool {
getPlaces(searchString)
return false
}
private func getPlaces(searchString: String) {
Alamofire.request(.GET,
"https://maps.googleapis.com/maps/api/place/autocomplete/json",
parameters: [
"input": searchString,
"type": "(\(placeType.description))",
"key": apiKey ?? ""
]).responseJSON { request, response, json, error in
if let response = json as? NSDictionary {
if let predictions = response["predictions"] as? Array<AnyObject> {
self.places = predictions.map { (prediction: AnyObject) -> Place in
return Place(
id: prediction["id"] as String,
description: prediction["description"] as String
)
}
}
}
self.searchDisplayController?.searchResultsTableView?.reloadData()
}
}
}
GooglePlacesAutocomplete.xib
Hope this will help others.
Here's full updated code for Google Autocomplete place API.
Xcode 10.0 & Swift 4.2
Follow this link as to Get Google API KEY.
After Getting the API KEY
Install Cocoa Pods:
source 'https://github.com/CocoaPods/Specs.git'
target 'YOUR_APPLICATION_TARGET_NAME_HERE' do
pod 'GooglePlaces'
pod 'GooglePlacePicker'
pod 'GoogleMaps'
end
Appdelegate File:
import UIKit
import GooglePlaces
let GOOGLE_API_KEY = "AIzaSyCuZkL7bh_hIDggnJob-b0cDueWlvRgpck"
#UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
GMSPlacesClient.provideAPIKey(GOOGLE_API_KEY)
return true
}
func applicationWillResignActive(_ application: UIApplication) {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
}
func applicationDidEnterBackground(_ application: UIApplication) {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
func applicationWillEnterForeground(_ application: UIApplication) {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}
func applicationDidBecomeActive(_ application: UIApplication) {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
func applicationWillTerminate(_ application: UIApplication) {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
}
ViewController File:
import UIKit
import GooglePlaces
class ViewController: UIViewController ,CLLocationManagerDelegate{
var placesClient: GMSPlacesClient!
// Add a pair of UILabels in Interface Builder, and connect the outlets to these variables.
#IBOutlet var nameLabel: UILabel!
#IBOutlet var addressLabel: UILabel!
let locationManager = CLLocationManager()
var resultsViewController: GMSAutocompleteResultsViewController?
var searchController: UISearchController?
var resultView: UITextView?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
locationManager.delegate = self
if CLLocationManager.authorizationStatus() == .notDetermined
{
locationManager.requestAlwaysAuthorization()
}
placesClient = GMSPlacesClient.shared()
// self.addToNavbar()
// self.addToSubview()
self.addToPopover()
}
func addToNavbar(){
resultsViewController = GMSAutocompleteResultsViewController()
resultsViewController?.delegate = self
searchController = UISearchController(searchResultsController: resultsViewController)
searchController?.searchResultsUpdater = resultsViewController
// Put the search bar in the navigation bar.
searchController?.searchBar.sizeToFit()
navigationItem.titleView = searchController?.searchBar
// When UISearchController presents the results view, present it in
// this view controller, not one further up the chain.
definesPresentationContext = true
// Prevent the navigation bar from being hidden when searching.
searchController?.hidesNavigationBarDuringPresentation = false
}
func addToSubview(){
resultsViewController = GMSAutocompleteResultsViewController()
resultsViewController?.delegate = self
searchController = UISearchController(searchResultsController: resultsViewController)
searchController?.searchResultsUpdater = resultsViewController
let subView = UIView(frame: CGRect(x: 0, y: 65.0, width: 350.0, height: 45.0))
subView.addSubview((searchController?.searchBar)!)
view.addSubview(subView)
searchController?.searchBar.sizeToFit()
searchController?.hidesNavigationBarDuringPresentation = false
// When UISearchController presents the results view, present it in
// this view controller, not one further up the chain.
definesPresentationContext = true
}
func addToPopover(){
resultsViewController = GMSAutocompleteResultsViewController()
resultsViewController?.delegate = self
searchController = UISearchController(searchResultsController: resultsViewController)
searchController?.searchResultsUpdater = resultsViewController
// Add the search bar to the right of the nav bar,
// use a popover to display the results.
// Set an explicit size as we don't want to use the entire nav bar.
searchController?.searchBar.frame = (CGRect(x: 0, y: 0, width: 250.0, height: 44.0))
navigationItem.rightBarButtonItem = UIBarButtonItem(customView: (searchController?.searchBar)!)
// When UISearchController presents the results view, present it in
// this view controller, not one further up the chain.
definesPresentationContext = true
// Keep the navigation bar visible.
searchController?.hidesNavigationBarDuringPresentation = false
searchController?.modalPresentationStyle = .popover
}
func locationManager(_ manager: CLLocationManager, didChangeAuthorization status: CLAuthorizationStatus)
{
print(status)
}
// Add a UIButton in Interface Builder, and connect the action to this function.
#IBAction func getCurrentPlace(_ sender: UIButton) {
placesClient.currentPlace(callback: { (placeLikelihoodList, error) -> Void in
if let error = error {
print("Pick Place error: \(error.localizedDescription)")
return
}
self.nameLabel.text = "No current place"
self.addressLabel.text = ""
if let placeLikelihoodList = placeLikelihoodList {
print("placeLikelihoodList -- \(placeLikelihoodList)")
let place = placeLikelihoodList.likelihoods.first?.place
if let place = place {
self.nameLabel.text = place.name
self.addressLabel.text = place.formattedAddress?.components(separatedBy: ", ")
.joined(separator: "\n")
print(place.name)
print(place.coordinate)
print(place.placeID)
print(place.phoneNumber)
print(place.formattedAddress ?? "")
}
}
})
}
}
//MARK: Extentions
// Handle the user's selection.
extension ViewController: GMSAutocompleteResultsViewControllerDelegate {
func resultsController(_ resultsController: GMSAutocompleteResultsViewController,
didAutocompleteWith place: GMSPlace) {
searchController?.isActive = false
// Do something with the selected place.
print("Place name: \(place.name)")
print("Place address: \(String(describing: place.formattedAddress))")
print("Place attributions: \(place.attributions)")
}
func resultsController(_ resultsController: GMSAutocompleteResultsViewController,
didFailAutocompleteWithError error: Error){
// TODO: handle the error.
print("Error: ", error.localizedDescription)
}
// Turn the network activity indicator on and off again.
func didRequestAutocompletePredictions(_ viewController: GMSAutocompleteViewController) {
UIApplication.shared.isNetworkActivityIndicatorVisible = true
}
func didUpdateAutocompletePredictions(_ viewController: GMSAutocompleteViewController) {
UIApplication.shared.isNetworkActivityIndicatorVisible = false
}
}
Lightweight Solution!
Instead of using Google framework and Third party library to make simple requests I created a simple library where you can Make a number of Google api requests like Google Autocomplete, Google ReverseGeo , Place Information and Path api for getting path between two location.
To use the library all you have to do is
step-1 Import GoogleApiHelper into your project.
step-2 Initialise GoogleApiHelper
GoogleApi.shared.initialiseWithKey("API_KEY")
step-3 Call the methods
var input = GInput()
input.keyword = "San francisco"
GoogleApi.shared.callApi(input: input) { (response) in
if let results = response.data as? [GApiResponse.Autocomplete], response.isValidFor(.autocomplete) {
//Enjoy the Autocomplete Api
} else { print(response.error ?? "ERROR") }
}
You can find the library here
Using Alamofire get the autocomplete Google places result from data, you can show it in table view cell
plist configuration
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
Code
import UIKit
import Alamofire
class GooglePlacesViewController: UIViewController,UISearchBarDelegate,UITableViewDataSource,UITableViewDelegate {
#IBOutlet weak var srchLocation: UISearchBar!
#IBOutlet weak var tblLoction: UITableView!
var arrPlaces = NSMutableArray(capacity: 100)
let operationQueue = OperationQueue()
let currentLat = 51.5033640
let currentLong = -0.1276250
var LocationDataDelegate : LocationData! = nil
var tblLocation : UITableView!
var lblNodata = UILabel()
override func viewDidLoad()
{
super.viewDidLoad()
lblNodata.frame = CGRect(x: 0, y: 80, width:
self.view.frame.size.width, height: self.view.frame.size.height-60)
lblNodata.text = "Please enter text to get your location"
self.view.addSubview(lblNodata)
srchLocation.placeholder = "Ente your location details"
lblNodata.textAlignment = .center
srchLocation.delegate = self
}
func searchBar(_ searchBar: UISearchBar, textDidChange searchText: String) {
self.beginSearching(searchText: searchText)
}
func beginSearching(searchText:String) {
if searchText.characters.count == 0 {
self.arrPlaces.removeAllObjects()
tblLoction.isHidden = true
lblNodata.isHidden = false
return
}
operationQueue.addOperation { () -> Void in
self.forwardGeoCoding(searchText: searchText)
}
}
//MARK: - Search place from Google -
func forwardGeoCoding(searchText:String) {
googlePlacesResult(input: searchText) { (result) -> Void in
let searchResult:NSDictionary = ["keyword":searchText,"results":result]
if result.count > 0
{
let features = searchResult.value(forKey: "results") as! NSArray
self.arrPlaces = NSMutableArray(capacity: 100)
print(features.count)
for jk in 0...features.count-1
{
let dict = features.object(at: jk) as! NSDictionary
self.arrPlaces.add(dict)
}
DispatchQueue.main.async(execute: {
if self.arrPlaces.count != 0
{
self.tblLoction.isHidden = false
self.lblNodata.isHidden = true
self.tblLoction.reloadData()
}
else
{
self.tblLoction.isHidden = true
self.lblNodata.isHidden = false
self.tblLoction.reloadData()
}
});
}
}
}
//MARK: - Google place API request -
func googlePlacesResult(input: String, completion: #escaping (_ result: NSArray) -> Void) {
let searchWordProtection = input.replacingOccurrences(of: " ", with: ""); if searchWordProtection.characters.count != 0 {
let urlString = NSString(format: "https://maps.googleapis.com/maps/api/place/autocomplete/json?input=%#&types=establishment|geocode&location=%#,%#&radius=500&language=en&key= your key",input,"\(currentLocationLatitude)","\(currentLocationLongtitude)")
print(urlString)
let url = NSURL(string: urlString.addingPercentEscapes(using: String.Encoding.utf8.rawValue)!)
print(url!)
let defaultConfigObject = URLSessionConfiguration.default
let delegateFreeSession = URLSession(configuration: defaultConfigObject, delegate: nil, delegateQueue: OperationQueue.main)
let request = NSURLRequest(url: url! as URL)
let task = delegateFreeSession.dataTask(with: request as URLRequest, completionHandler:
{
(data, response, error) -> Void in
if let data = data
{
do {
let jSONresult = try JSONSerialization.jsonObject(with: data, options: JSONSerialization.ReadingOptions.allowFragments) as! [String:AnyObject]
let results:NSArray = jSONresult["predictions"] as! NSArray
let status = jSONresult["status"] as! String
if status == "NOT_FOUND" || status == "REQUEST_DENIED"
{
let userInfo:NSDictionary = ["error": jSONresult["status"]!]
let newError = NSError(domain: "API Error", code: 666, userInfo: userInfo as [NSObject : AnyObject])
let arr:NSArray = [newError]
completion(arr)
return
}
else
{
completion(results)
}
}
catch
{
print("json error: \(error)")
}
}
else if let error = error
{
print(error)
}
})
task.resume()
}
}
func numberOfSections(in tableView: UITableView) -> Int {
return 1
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return arrPlaces.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell
{
let tblCell = tableView.dequeueReusableCell(withIdentifier: "locationCell")
let dict = arrPlaces.object(at: indexPath.row) as! NSDictionary
tblCell?.textLabel?.text = dict.value(forKey: "description") as? String
tblCell?.textLabel?.numberOfLines = 0
tblCell?.textLabel?.sizeToFit()
return tblCell!
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath)
{
if LocationDataDelegate != nil
{
let dict = arrPlaces.object(at: indexPath.row) as! NSDictionary
print(dict.value(forKey: "terms") as! NSArray)
let ArrSelected = dict.value(forKey: "terms") as! NSArray
LocationDataDelegate.didSelectLocationData(LocationData: ArrSelected)
}
self.dismiss(animated: true, completion: nil)
}
}
class AddNewAddressVC: UIViewController,UITextFieldDelegate{
func autocompleteClicked() {
let autocompleteController = GMSAutocompleteViewController()
autocompleteController.delegate = self
// Specify the place data types to return.
let fields: GMSPlaceField = GMSPlaceField(rawValue: UInt(GMSPlaceField.name.rawValue) |
UInt(GMSPlaceField.placeID.rawValue))!
autocompleteController.placeFields = fields
// Specify a filter.
let filter = GMSAutocompleteFilter()
filter.type = .address
autocompleteController.autocompleteFilter = filter
// Display the autocomplete view controller.
present(autocompleteController, animated: true, completion: nil)
}
#IBAction func action_selectGooglePlaces(_ sender: UIButton) {
autocompleteClicked()
}
}
extension AddNewAddressVC: GMSAutocompleteViewControllerDelegate {
// Handle the user's selection.
func viewController(_ viewController: GMSAutocompleteViewController, didAutocompleteWith place: GMSPlace) {
print("Place name: \(place.name)")
print("Place ID: \(place.placeID)")
print("Place attributions: \(place.attributions)")
dismiss(animated: true, completion: nil)
}
func viewController(_ viewController: GMSAutocompleteViewController, didFailAutocompleteWithError error: Error) {
// TODO: handle the error.
print("Error: ", error.localizedDescription)
}
// User canceled the operation.
func wasCancelled(_ viewController: GMSAutocompleteViewController) {
dismiss(animated: true, completion: nil)
}
// Turn the network activity indicator on and off again.
func didRequestAutocompletePredictions(_ viewController: GMSAutocompleteViewController) {
UIApplication.shared.isNetworkActivityIndicatorVisible = true
}
func didUpdateAutocompletePredictions(_ viewController: GMSAutocompleteViewController) {
UIApplication.shared.isNetworkActivityIndicatorVisible = false
}
}

Resources