Drawing on previewLayer: AVCaptureVideoPreviewLayer - ios

I have a small app, a SimpleCamera that shows a live (video) preview, with a button on the screen to take a photo. The photo is then displayed and you can save it or discard it. It all works, and I have used this code to draw a grey border around the screen preview. That too works fine. But that's all I can draw on that preview screen? I can't work out how to add the next bit of code shown below this first code block?
// Provide a camera preview
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
view.layer.addSublayer(cameraPreviewLayer!)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.frame = view.layer.frame
//Add preview layer for drawing
let previewLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
previewLayer.frame = self.view.layer.frame
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.view.layer.addSublayer(previewLayer)
//Add Rectangle
let cgRect = CGRect(x: 0, y: 0, width: self.view.bounds.width, height: self.view.bounds.height)
let myView = UIImageView()
myView.frame = cgRect
myView.backgroundColor = UIColor.clear
myView.isOpaque = false
myView.layer.cornerRadius = 10
myView.layer.borderColor = UIColor.lightGray.cgColor
myView.layer.borderWidth = 3
myView.layer.masksToBounds = true
previewLayer.addSublayer(myView.layer)
// Bring the camera button to front
view.bringSubview(toFront: cameraButton)
captureSession.startRunning()
No matter where I put this code, it simply doesn't show up.
//Add circles
let midX = screenWidth / 2
let midY = screenHeight / 2
let w = screenWidth
var circlePath = UIBezierPath(arcCenter: CGPoint(x: midX,y: midY), radius: CGFloat(w * 0.010), startAngle: CGFloat(0), endAngle:CGFloat(Double.pi * 2), clockwise: true)
let circleRads = [ 0.07, 0.13, 0.17, 0.22, 0.29, 0.36, 0.40, 0.48, 0.60, 0.75 ]
for pct in circleRads {
let rad = w * CGFloat(pct)
circlePath = UIBezierPath(arcCenter: CGPoint(x: midX, y: midY), radius: CGFloat(rad), startAngle: CGFloat(0), endAngle:CGFloat(Double.pi * 2), clockwise: true)
circlePath.lineWidth = 2.5
circlePath.stroke()
}
// draw text time stamp on image
let now = Date()
let formatter = DateFormatter()
formatter.timeZone = TimeZone.current
formatter.dateFormat = "yyyy-MM-dd HH:mm"
let dateString = formatter.string(from: now)
let paragraphStyle = NSMutableParagraphStyle()
paragraphStyle.alignment = .center
let attrs = [NSAttributedStringKey.font: UIFont(name: "HelveticaNeue-Thin", size: 26)!, NSAttributedStringKey.paragraphStyle: paragraphStyle]
let string = dateString
string.draw(with: CGRect(x: 12, y: 38, width: 448, height: 448), options: .usesLineFragmentOrigin, attributes: attrs, context: nil)

Part answered. I can draw a Border around the entire screen. This is the SimpleCamera app from the AppCoda Swift 4 Intermediate iOS 11 Book. This is the code for the CameraController.swift file, and the Border drawing section is Line 176 to Line 192 when opened in XCode.
But I still can't figure out how to make the commented section draw a set of circles, and put a date stamp on the image, and save it.
//
// CameraController.swift
// Camera
//
// Created by Simon Ng on 16/10/2016.
// Copyright © 2016 AppCoda. All rights reserved.
//
import UIKit
import AVFoundation
import Foundation
class CameraController: UIViewController {
#IBOutlet var cameraButton:UIButton!
//===================================
#IBOutlet weak var navigationBar: UINavigationBar!
#IBOutlet weak var imgOverlay: UIImageView!
#IBOutlet weak var btnCapture: UIButton!
#IBOutlet weak var btnInfo: UIButton!
#IBOutlet weak var btnSocial: UIButton!
#IBOutlet weak var shapeLayer: UIView!
#IBOutlet weak var btnRed: UIButton!
#IBOutlet weak var btnGreen: UIButton!
#IBOutlet weak var btnBlue: UIButton!
#IBOutlet weak var btnYellow: UIButton!
#IBOutlet weak var btnWhite: UIButton!
//===================================
var backFacingCamera: AVCaptureDevice?
var frontFacingCamera: AVCaptureDevice?
var currentDevice: AVCaptureDevice!
var stillImageOutput: AVCapturePhotoOutput!
var stillImage: UIImage?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
let captureSession = AVCaptureSession()
var toggleCameraGestureRecognizer = UISwipeGestureRecognizer()
var zoomInGestureRecognizer = UISwipeGestureRecognizer()
var zoomOutGestureRecognizer = UISwipeGestureRecognizer()
//===============================
//let stillImageOutput = AVCaptureStillImageOutput()
var previewLayer : AVCaptureVideoPreviewLayer?
let screenWidth = UIScreen.main.bounds.size.width
let screenHeight = UIScreen.main.bounds.size.height
var aspectRatio: CGFloat = 1.0
var viewFinderHeight: CGFloat = 0.0
var viewFinderWidth: CGFloat = 0.0
var viewFinderMarginLeft: CGFloat = 0.0
var viewFinderMarginTop: CGFloat = 0.0
var lineColor : UIColor?
var color: Int = 0
//==============================
override func viewDidLoad() {
super.viewDidLoad()
configure()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// MARK: - Action methods
#IBAction func capture(sender: UIButton) {
// Set photo settings
let photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
photoSettings.isAutoStillImageStabilizationEnabled = true
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.flashMode = .off
stillImageOutput.isHighResolutionCaptureEnabled = true
stillImageOutput.capturePhoto(with: photoSettings, delegate: self)
}
// MARK: - Segues
#IBAction func unwindToCameraView(segue: UIStoryboardSegue) {
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
if segue.identifier == "showPhoto" {
let photoViewController = segue.destination as! PhotoViewController
photoViewController.image = stillImage
}
}
// MARK: - Helper methods
private func configure() {
// Preset the session for taking photo in full resolution
captureSession.sessionPreset = AVCaptureSession.Preset.photo
// Get the front and back-facing camera for taking photos
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .unspecified)
for device in deviceDiscoverySession.devices {
if device.position == .back {
backFacingCamera = device
} else if device.position == .front {
frontFacingCamera = device
}
}
currentDevice = backFacingCamera
guard let captureDeviceInput = try? AVCaptureDeviceInput(device: currentDevice) else {
return
}
// Configure the session with the output for capturing still images
stillImageOutput = AVCapturePhotoOutput()
// Configure the session with the input and the output devices
captureSession.addInput(captureDeviceInput)
captureSession.addOutput(stillImageOutput)
// Provide a camera preview
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
view.layer.addSublayer(cameraPreviewLayer!)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.frame = view.layer.frame
//////////////
//Add circles
// red circles - radius in %
/*
let midX = screenWidth / 2
let midY = screenHeight / 2
let w = screenWidth
//let h = screenHeight
var circlePath = UIBezierPath(arcCenter: CGPoint(x: midX,y: midY), radius: CGFloat(w * 0.010), startAngle: CGFloat(0), endAngle:CGFloat(Double.pi * 2), clockwise: true)
let circleRads = [ 0.07, 0.13, 0.17, 0.22, 0.29, 0.36, 0.40, 0.48, 0.60, 0.75 ]
for pct in circleRads {
let rad = w * CGFloat(pct)
circlePath = UIBezierPath(arcCenter: CGPoint(x: midX, y: midY), radius: CGFloat(rad), startAngle: CGFloat(0), endAngle:CGFloat(Double.pi * 2), clockwise: true)
circlePath.lineWidth = 2.5
circlePath.stroke()
}
// draw text time stamp on image
let now = Date()
let formatter = DateFormatter()
formatter.timeZone = TimeZone.current
formatter.dateFormat = "yyyy-MM-dd HH:mm"
let dateString = formatter.string(from: now)
// print(dateString)
let paragraphStyle = NSMutableParagraphStyle()
paragraphStyle.alignment = .center
let attrs = [NSAttributedStringKey.font: UIFont(name: "HelveticaNeue-Thin", size: 26)!, NSAttributedStringKey.paragraphStyle: paragraphStyle]
let string = dateString
string.draw(with: CGRect(x: 22, y: 18, width: 448, height: 448), options: .usesLineFragmentOrigin, attributes: attrs, context: nil)
print("Did the date")
*/
//Add Rectangular border
let previewLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
previewLayer.frame = self.view.layer.frame
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.view.layer.addSublayer(previewLayer)
let cgRect = CGRect(x: 0, y: 0, width: self.view.bounds.width, height: self.view.bounds.height)
let myView = UIImageView()
myView.frame = cgRect
myView.backgroundColor = UIColor.clear
myView.isOpaque = false
myView.layer.cornerRadius = 10
myView.layer.borderColor = UIColor.lightGray.cgColor
myView.layer.borderWidth = 3
myView.layer.masksToBounds = true
previewLayer.addSublayer(myView.layer)
///////////////
// Bring the camera button to front
view.bringSubview(toFront: cameraButton)
captureSession.startRunning()
print("so far 2")
// Toggle Camera recognizer
toggleCameraGestureRecognizer.direction = .up
toggleCameraGestureRecognizer.addTarget(self, action: #selector(toggleCamera))
view.addGestureRecognizer(toggleCameraGestureRecognizer)
// Zoom In recognizer
zoomInGestureRecognizer.direction = .right
zoomInGestureRecognizer.addTarget(self, action: #selector(zoomIn))
view.addGestureRecognizer(zoomInGestureRecognizer)
// Zoom Out recognizer
zoomOutGestureRecognizer.direction = .left
zoomOutGestureRecognizer.addTarget(self, action: #selector(zoomOut))
view.addGestureRecognizer(zoomOutGestureRecognizer)
}
#objc func toggleCamera() {
captureSession.beginConfiguration()
// Change the device based on the current camera
guard let newDevice = (currentDevice?.position == AVCaptureDevice.Position.back) ? frontFacingCamera : backFacingCamera else {
return
}
// Remove all inputs from the session
for input in captureSession.inputs {
captureSession.removeInput(input as! AVCaptureDeviceInput)
}
// Change to the new input
let cameraInput:AVCaptureDeviceInput
do {
cameraInput = try AVCaptureDeviceInput(device: newDevice)
} catch {
print(error)
return
}
if captureSession.canAddInput(cameraInput) {
captureSession.addInput(cameraInput)
}
currentDevice = newDevice
captureSession.commitConfiguration()
}
#objc func zoomIn() {
if let zoomFactor = currentDevice?.videoZoomFactor {
if zoomFactor < 5.0 {
let newZoomFactor = min(zoomFactor + 1.0, 5.0)
do {
try currentDevice?.lockForConfiguration()
currentDevice?.ramp(toVideoZoomFactor: newZoomFactor, withRate: 1.0)
currentDevice?.unlockForConfiguration()
} catch {
print(error)
}
}
}
}
#objc func zoomOut() {
if let zoomFactor = currentDevice?.videoZoomFactor {
if zoomFactor > 1.0 {
let newZoomFactor = max(zoomFactor - 1.0, 1.0)
do {
try currentDevice?.lockForConfiguration()
currentDevice?.ramp(toVideoZoomFactor: newZoomFactor, withRate: 1.0)
currentDevice?.unlockForConfiguration()
} catch {
print(error)
}
}
}
}
}
extension CameraController: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard error == nil else {
return
}
// Get the image from the photo buffer
guard let imageData = photo.fileDataRepresentation() else {
return
}
stillImage = UIImage(data: imageData)
performSegue(withIdentifier: "showPhoto", sender: self)
}
}

You need a CAShapeLayer to add the bezierpath to.
let circleLayer = CAShapeLayer()
circleLayer.path = circlePath.cgPath
self.view.layer.addSublayer(circleLayer)

Related

How to reload data for line chart in swift?

I am using Charts library for rendering line chart but I am unable to reload the data as I am trying to fetch the data from the API, there is a method given notifyDataSetChanged() but not working. I am using the chart within tableView cell. If anybody has some idea please help me out..............................................
import UIKit
import Charts
class ChartViewCell: UITableViewCell, ChartViewDelegate {
#IBOutlet weak var lineChartViewContainer: UIStackView?
var yVlaues = [ChartDataEntry]()
lazy var lineChartView: LineChartView = {
let chartView = LineChartView()
return chartView
}()
override func awakeFromNib() {
super.awakeFromNib()
setLineChart()
loadData()
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
}
func setLineChart() {
lineChartView.delegate = self
lineChartView.noDataText = "No Data Found"
lineChartView.rightAxis.enabled = false
lineChartView.xAxis.drawGridLinesEnabled = false
lineChartView.leftAxis.drawGridLinesEnabled = false
lineChartView.legend.enabled = false
//lineChartView.leftAxis.labelFont = .boldSystemFont(ofSize: 14)
lineChartView.leftAxis.labelCount = 6
lineChartView.leftAxis.labelTextColor = .black
lineChartView.xAxis.labelPosition = .bottom
let screenSize = UIScreen.main.bounds
let screenWidth = screenSize.width
lineChartView.frame = CGRect(x: 10, y: 70, width: screenWidth - 20, height: 200)
setData()
self.addSubview(lineChartView)
}
func chartValueSelected(_ chartView: ChartViewBase, entry: ChartDataEntry, highlight: Highlight) {
print("Abhay x \(entry.x) y \(entry.y)")
}
func setData() {
let set1 = LineChartDataSet(entries: yVlaues)
set1.lineWidth = 1
set1.colors = [UIColor.colorFromHex(hexString: "#80005661")]
set1.drawVerticalHighlightIndicatorEnabled = true
set1.drawHorizontalHighlightIndicatorEnabled = false
set1.highlightColor = UIColor.colorFromHex(hexString: "#80005661")
set1.highlightLineWidth = 1.0
set1.drawValuesEnabled = false
set1.circleHoleColor = .white
set1.circleColors = [UIColor.colorFromHex(hexString: "#80005661")]
set1.circleRadius = 5
let data = LineChartData(dataSet: set1)
lineChartView.data = data
let startColor = UIColor.colorFromHex(hexString: "#80005661").cgColor
let endColor = UIColor.white.cgColor
let gradientColors = [startColor, endColor] as CFArray // Colors of the gradient
let colorLocations:[CGFloat] = [1.0, 0.0] // Positioning of the gradient
let gradient = CGGradient.init(colorsSpace: CGColorSpaceCreateDeviceRGB(), colors: gradientColors, locations: colorLocations) // Gradient Object
if let gradient = gradient {
set1.fill = Fill.fillWithLinearGradient(gradient, angle: 90.0) // Set the Gradient
}
set1.drawFilledEnabled = true // Draw the Gradient
}
func loadData() {
var params = [String: Any]()
guard let userId = Reusable.getUserInfo()?.id else { return }
params = ["user_id": userId]
WebServiceHelper.postWebServiceCall(Constants.baseURL + "performedWorkoutsGraph", params: params, isShowLoader: false, success: { (responceObj) in
let statusMsg = StatusBool(json: responceObj)
if statusMsg.status {
self.yVlaues.removeAll()
let responseJsonArr = responceObj["data"].arrayValue
for item in responseJsonArr {
let graphData = GraphDataModel(json: item)
let chartDataEntry = ChartDataEntry(x: Double(graphData.interval), y: Double(graphData.workouts))
self.yVlaues.append(chartDataEntry)
}
self.lineChartView.data?.notifyDataChanged()
self.lineChartView.notifyDataSetChanged()
} else {
CommonUtils.showToastMessage(message: statusMsg.message)
}
}
, failure: { (failure) in
print(failure)
})
}
}
I just found the solution but not the reloading tricks I m calling setData() inside my loadData() and it's working

Drop annotation on virtual map swift3

please help me out.
First time working on a project, with iBeacon involved, which creating Virtual map of inside of a store.
I know how to drop pins, with a MapKit, but how can i do it, if i only have CGPoints on View ?
I managed how to drip UIImage(with pin image), on a view. But when i rotate or pinch, it's not stay on that coordinates that i dropped.
Here's a code :
import UIKit
class MapViewController: UIViewController{
private var scaleView: CGFloat = 1
private var rotateView: CGFloat = 0
private var anchorPoint = CGPoint(x: 0.5, y: 0.5)
private let gestureRecognizerDelegate = GestureRecognizerDelegate()
#IBOutlet weak var mapView: MapView!
#IBOutlet var pinchGestureRecognizer: UIPinchGestureRecognizer!
#IBOutlet var panGestureRecognizer: UIPanGestureRecognizer!
#IBOutlet var rotateGestureRecognizer: UIRotationGestureRecognizer!
#IBOutlet weak var pin: UIImageView!
override func viewDidAppear(_ animated: Bool) {
if !cartIsEmpty {
cartBtn.setImage(UIImage(named: "haveitem"), for: .normal)
} else {
cartBtn.setImage(UIImage(named: "2772"), for: .normal)
}
cartBtn.addTarget(self, action: #selector(openCart), for: .touchUpInside)
let item1 = UIBarButtonItem(customView: cartBtn)
self.navigationItem.rightBarButtonItem = item1
}
override func viewDidDisappear(_ animated: Bool) {
cartBtn.removeTarget(self, action: #selector(openCart), for: .touchUpInside)
}
override func viewDidLoad() {
super.viewDidLoad()
ApplicationManager.sharedInstance.onApplicationStart()
NotificationCenter.default.addObserver(self, selector: #selector(self.onOrientationChanged), name: NSNotification.Name.UIDeviceOrientationDidChange, object: nil)
self.panGestureRecognizer.delegate = gestureRecognizerDelegate
self.pinchGestureRecognizer.delegate = gestureRecognizerDelegate
self.rotateGestureRecognizer.delegate = gestureRecognizerDelegate
ApplicationManager.sharedInstance.gotFloorData = drawFloor
ApplicationManager.sharedInstance.currentUserPoint = drawCurrentUserPoint
}
func drawFloor (floor: Floor) {
mapView.setFloor(currentFloor: floor)
mapView.setNeedsDisplay()
}
func drawCurrentUserPoint(currentUserPoint: CurrentUserLocation, beaconRangingData: [BeaconRangingPoint]) {
mapView.setUserPoint(currentUserPoint: currentUserPoint, beaconRangingData: beaconRangingData)
mapView.setNeedsDisplay()
}
#IBAction func handlePan(recognizer:UIPanGestureRecognizer) {
let translation = recognizer.translation(in: mapView)
if recognizer.view != nil {
let offsetX = translation.x * CGFloat(cosf(Float(rotateView))) - translation.y * CGFloat(sinf(Float(rotateView)))
let offsetY = translation.x * CGFloat(sinf(Float(rotateView))) + translation.y * CGFloat(cosf(Float(rotateView)))
mapView.center = CGPoint(x:mapView.center.x + offsetX * scaleView,
y:mapView.center.y + offsetY * scaleView)
}
recognizer.setTranslation(CGPoint(x: 0, y: 0), in: mapView)
}
#IBAction func handlePinch(recognizer : UIPinchGestureRecognizer) {
if recognizer.view != nil {
setAnchor(point: recognizer.location(in: mapView))
mapView.transform = mapView.transform.scaledBy(x: recognizer.scale, y: recognizer.scale)
scaleView = recognizer.scale * scaleView
recognizer.scale = 1
}
}
#IBAction func handleRotate(recognizer : UIRotationGestureRecognizer) {
if recognizer.view != nil {
setAnchor(point: recognizer.location(in: mapView))
mapView.transform = mapView.transform.rotated(by: recognizer.rotation)
rotateView = rotateView + recognizer.rotation
recognizer.rotation = 0
}
}
private func setAnchor(point : CGPoint) {
let anchor = CGPoint(x: point.x / mapView.bounds.width, y: point.y / mapView.bounds.height)
mapView.layer.anchorPoint = CGPoint(x: anchor.x, y: anchor.y)
let translationX = (mapView.bounds.width * (anchor.x - anchorPoint.x)) * scaleView
let translationY = (mapView.bounds.height * (anchor.y - anchorPoint.y)) * scaleView
let offsetX = translationX * CGFloat(cosf(Float(rotateView))) - translationY * CGFloat(sinf(Float(rotateView)))
let offsetY = translationX * CGFloat(sinf(Float(rotateView))) + translationY * CGFloat(cosf(Float(rotateView)))
mapView.layer.position = CGPoint(x: mapView.layer.position.x + offsetX,
y: mapView.layer.position.y + offsetY)
anchorPoint = anchor
}
private func showAlert(title: String, message: String?, style: UIAlertControllerStyle = .alert) {
let alertController = UIAlertController(title: title, message: message, preferredStyle: style)
let tryAgainAction = UIAlertAction(title: "Try again", style: .default) {
alertAction in
ApplicationManager.sharedInstance.onApplicationStart()
}
let cancelAction = UIAlertAction(title: "Cancel", style: .default, handler: nil)
alertController.addAction(tryAgainAction)
alertController.addAction(cancelAction)
present(alertController, animated: true, completion: nil)
}
func onOrientationChanged() {
self.mapView.setNeedsDisplay()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
also code of mapView:
import UIKit
class MapView: UIView {
private var floor = Floor(walls: [], doors: [], beacons: [])
private let wallColor = UIColor.black
private let doorColor = UIColor.red
private let triangleColor = UIColor.red
private let perpendicularColor = UIColor.darkGray
private let doorLength = 3.0
private let beaconColor = UIColor.green
private let beaconNoActiveColor = UIColor(red: 20/255.0, green: 154.0/255.0, blue: 53.0/255.0, alpha: 1.0)
private let beaconFrameColor = UIColor.brown
private let lineWidthBeaconFrame:CGFloat = 0.25
private let beaconRadius: CGFloat = 5.0
private let userColor = UIColor.blue
private let userFrameColor = UIColor.brown
private let lineWidthUserFrame:CGFloat = 0.25
private let userRadius: CGFloat = 5.0
private let distanceBeaconColor = UIColor.clear
private let distanceBeaconFrameColor = UIColor.green
private let lineWidthOfDistanceFrame: CGFloat = 0.5
private let userRawRadius: CGFloat = 3.0
private let userRawColor = UIColor.darkGray
private let userRawFrameColor = UIColor.lightGray
private let lineWidthOfuserRawFrame: CGFloat = 0.8
private var beaconText: NSString = ""
private let textColor: UIColor = UIColor.red
private let textFont: UIFont = UIFont(name: "Helvetica Neue", size: 5)!
private var currentUserLocation = CurrentUserLocation()
private var beaconRangingData: [BeaconRangingPoint] = []
func setFloor (currentFloor: Floor) {
floor = currentFloor
}
func setUserPoint(currentUserPoint: CurrentUserLocation, beaconRangingData: [BeaconRangingPoint]) {
self.currentUserLocation = currentUserPoint
self.beaconRangingData = beaconRangingData
}
func dropPin(location: CGPoint) {
}
override func draw(_ rect: CGRect) {
let frameToDraw = CoordinatesConverter(boundsWidth: bounds.width, boundsHeight: bounds.height, paddingX: 5, paddingY: 5)
let mapWithScaleCoordinaates = frameToDraw.getSuitableCoordinates(floor: floor, currentUserLocation: currentUserLocation, beaconRangingData: beaconRangingData)
let lines = mapWithScaleCoordinaates.lines
let circles = mapWithScaleCoordinaates.circles
drawLines(lines: lines)
drawCircles(circles: circles)
}
private func drawLines(lines :[Line]) {
let wallPath = UIBezierPath()
let doorPath = UIBezierPath()
let trianglePath = UIBezierPath()
let perpendicularPath = UIBezierPath()
for line in lines {
if line.type == .wall {
wallPath.move(to: CGPoint(x: line.x1, y: line.y1))
wallPath.addLine(to: CGPoint(x: line.x2, y: line.y2))
wallColor.setStroke()
wallPath.stroke()
}
if line.type == .door {
doorPath.move(to: CGPoint(x: line.x1, y: line.y1))
doorPath.addLine(to: CGPoint(x: line.x2, y: line.y2))
doorPath.lineWidth = CGFloat(doorLength)
doorColor.setStroke()
doorPath.stroke()
}
if line.type == .triangle {
trianglePath.move(to: CGPoint(x: line.x1, y: line.y1))
trianglePath.addLine(to: CGPoint(x: line.x2, y: line.y2))
triangleColor.setStroke()
trianglePath.stroke()
}
if line.type == .perpendicular {
perpendicularPath.move(to: CGPoint(x: line.x1, y: line.y1))
perpendicularPath.addLine(to: CGPoint(x: line.x2, y: line.y2))
perpendicularColor.setStroke()
perpendicularPath.stroke()
}
}
}
private func drawCircles(circles: [Circle]) {
let layerViews = layer.sublayers
if layerViews != nil {
for view in layerViews! {
if type(of: view) === CAShapeLayer.self {
view.removeFromSuperlayer()
}
}
}
for circle in circles {
if type(of: circle) === BeaconCircle.self {
if (circle as! BeaconCircle).correctedDistance == 0 {
infoToDrawCircle(circle: circle, radius: beaconRadius, color: beaconNoActiveColor.cgColor, frameColor: beaconFrameColor.cgColor, frameWidth: lineWidthBeaconFrame)
drawBeaconText(circle: circle as! BeaconCircle)
} else {
infoToDrawCircle(circle: circle, radius: beaconRadius, color: beaconColor.cgColor, frameColor: beaconFrameColor.cgColor, frameWidth: lineWidthBeaconFrame)
drawBeaconText(circle: circle as! BeaconCircle)
}
if type(of: circle) === BeaconCircle.self && (circle as! BeaconCircle).correctedDistance != 0 {
infoToDrawCircle(circle: circle, radius: CGFloat((circle as! BeaconCircle).correctedDistance), color: distanceBeaconColor.cgColor, frameColor: distanceBeaconFrameColor.cgColor, frameWidth: lineWidthOfDistanceFrame)
// infoToDrawCircle(circle: circle, radius: CGFloat((circle as! BeaconCircle).notCorrectedDistance), color: UIColor.gray.cgColor, frameColor: distanceBeaconFrameColor.cgColor, frameWidth: lineWidthOfDistanceFrame)
}
} else if type(of: circle) === UserCircle.self {
infoToDrawCircle(circle: circle, radius: userRadius, color: userColor.cgColor, frameColor: userFrameColor.cgColor, frameWidth: lineWidthUserFrame)
} else if type (of: circle) === UserRawCircle.self {
infoToDrawCircle(circle: circle, radius: userRawRadius, color: userRawColor.cgColor, frameColor: userRawFrameColor.cgColor, frameWidth: lineWidthOfuserRawFrame)
} else {
Logger.logMessage(message: "incorrect circle type", level: .error)
break
}
}
}
private func drawBeaconText (circle: BeaconCircle) {
let attributes: NSDictionary = [
NSForegroundColorAttributeName: textColor,
NSFontAttributeName: textFont
]
let minor = circle.minor
let correctDistance = Int(round(100 * circle.correctDistanceForText) / 100)
let notCorrectDistance = Int(round(100 * circle.notCorrectDistanceForText) / 100)
beaconText = "m: \(minor), cd: \(correctDistance), nd: \(notCorrectDistance)" as NSString
if circle.x > Int((bounds.width - 50)) {
beaconText.draw(at: CGPoint(x: circle.x - 70, y: circle.y + 5), withAttributes: attributes as? [String : Any])
} else if circle.x < 20 {
beaconText.draw(at: CGPoint(x: circle.x + 5, y: circle.y + 5), withAttributes: attributes as? [String : Any])
} else {
beaconText.draw(at: CGPoint(x: circle.x + 6, y: circle.y + 5), withAttributes: attributes as? [String : Any])
}
}
private func infoToDrawCircle (circle: Circle, radius: CGFloat, color: CGColor, frameColor: CGColor, frameWidth: CGFloat) {
let center = CGPoint(x: circle.x, y: circle.y)
let path = UIBezierPath(arcCenter: center, radius: radius, startAngle: 0, endAngle: 360, clockwise: true)
let shapeLayer = CAShapeLayer()
shapeLayer.path = path.cgPath
shapeLayer.fillColor = color
shapeLayer.lineWidth = frameWidth
shapeLayer.strokeColor = frameColor
layer.addSublayer(shapeLayer)
}
}
I suggest you conform your MapViewController to the MKMapViewDelegate. In this way you can add annotations (also animated) to the map and you won't have to worry about them anymore since it's all handled by the map delegate.
You can use:
self.map.showAnnotations(pins, animated: true)
where pins is an array of MKAnnotation. Here is the link to the documentation for the method.
Cheers

Combining Images in CameraView with Overlay. (Swift 3)?

I just about have this solved. Thanks to some brilliant help getting me on the right track.
This is the code I have now.
Basically, I can now make an image out of the drawn overlay, and the cameraPreview. But can't yet combine them. There seems very little useful code that I can find that does this simply.
So the important part is the extension block right at the top, and the additions to the
func saveToCamera() near the bottom of the code.
In short, I now have the two images I need, I think. The snap of the myImage is appearing on a white background - so not sure if that's natural - or not. That's how it appears on a Simulator. So it may just be natural.
Image 1. A screen capture.
Image 2. The saved image of myImage as per the explaination.
import UIKit
import AVFoundation
import Foundation
// extension must be outside class
extension UIImage {
convenience init(view: UIView) {
UIGraphicsBeginImageContext(view.frame.size)
view.layer.render(in: UIGraphicsGetCurrentContext()!)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
self.init(cgImage: (image?.cgImage)!)
}
}
class ViewController: UIViewController {
#IBOutlet weak var navigationBar: UINavigationBar!
#IBOutlet weak var imgOverlay: UIImageView!
#IBOutlet weak var btnCapture: UIButton!
#IBOutlet weak var shapeLayer: UIView!
let captureSession = AVCaptureSession()
let stillImageOutput = AVCaptureStillImageOutput()
var previewLayer : AVCaptureVideoPreviewLayer?
//var shapeLayer : CALayer?
// If we find a device we'll store it here for later use
var captureDevice : AVCaptureDevice?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
//=======================
let midX = self.view.bounds.midX
let midY = self.view.bounds.midY
for index in 1...10 {
let circlePath = UIBezierPath(arcCenter: CGPoint(x: midX,y: midY), radius: CGFloat((index * 10)), startAngle: CGFloat(0), endAngle:CGFloat(M_PI * 2), clockwise: true)
let shapeLayerPath = CAShapeLayer()
shapeLayerPath.path = circlePath.cgPath
//change the fill color
shapeLayerPath.fillColor = UIColor.clear.cgColor
//you can change the stroke color
shapeLayerPath.strokeColor = UIColor.blue.cgColor
//you can change the line width
shapeLayerPath.lineWidth = 0.5
// add the blue-circle layer to the shapeLayer ImageView
shapeLayer.layer.addSublayer(shapeLayerPath)
}
print("Shape layer drawn")
//=====================
captureSession.sessionPreset = AVCaptureSessionPresetHigh
if let devices = AVCaptureDevice.devices() as? [AVCaptureDevice] {
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.back) {
captureDevice = device
if captureDevice != nil {
print("Capture device found")
beginSession()
}
}
}
}
}
}
#IBAction func actionCameraCapture(_ sender: AnyObject) {
print("Camera button pressed")
saveToCamera()
}
func beginSession() {
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
}
catch {
print("error: \(error.localizedDescription)")
}
guard let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) else {
print("no preview layer")
return
}
// this is what displays the camera view. But - it's on TOP of the drawn view, and under the overview. ??
self.view.layer.addSublayer(previewLayer)
previewLayer.frame = self.view.layer.frame
captureSession.startRunning()
print("Capture session running")
self.view.addSubview(navigationBar)
//self.view.addSubview(imgOverlay)
self.view.addSubview(btnCapture)
// shapeLayer ImageView is already a subview created in IB
// but this will bring it to the front
self.view.addSubview(shapeLayer)
}
func saveToCamera() {
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in
if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) {
if let cameraImage = UIImage(data: imageData) {
// cameraImage is the camera preview image.
// I need to combine/merge it with the myImage that is actually the blue circles.
// This converts the UIView of the bllue circles to an image. Uses 'extension' at top of code.
let myImage = UIImage(view: self.shapeLayer)
print("converting myImage to an image")
UIImageWriteToSavedPhotosAlbum(cameraImage, nil, nil, nil)
}
}
})
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Give this a try... instead of combining your overlay view, it draws the circles and combines the output:
import UIKit
import AVFoundation
import Foundation
class CameraWithTargetViewController: UIViewController {
#IBOutlet weak var navigationBar: UINavigationBar!
#IBOutlet weak var imgOverlay: UIImageView!
#IBOutlet weak var btnCapture: UIButton!
#IBOutlet weak var shapeLayer: UIView!
let captureSession = AVCaptureSession()
let stillImageOutput = AVCaptureStillImageOutput()
var previewLayer : AVCaptureVideoPreviewLayer?
//var shapeLayer : CALayer?
// If we find a device we'll store it here for later use
var captureDevice : AVCaptureDevice?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
//=======================
captureSession.sessionPreset = AVCaptureSessionPresetHigh
if let devices = AVCaptureDevice.devices() as? [AVCaptureDevice] {
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.back) {
captureDevice = device
if captureDevice != nil {
print("Capture device found")
beginSession()
}
}
}
}
}
}
#IBAction func actionCameraCapture(_ sender: AnyObject) {
print("Camera button pressed")
saveToCamera()
}
func beginSession() {
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
}
catch {
print("error: \(error.localizedDescription)")
}
guard let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) else {
print("no preview layer")
return
}
// this is what displays the camera view. But - it's on TOP of the drawn view, and under the overview. ??
self.view.layer.addSublayer(previewLayer)
previewLayer.frame = self.view.layer.frame
imgOverlay.frame = self.view.frame
imgOverlay.image = self.drawCirclesOnImage(fromImage: nil, targetSize: imgOverlay.bounds.size)
self.view.bringSubview(toFront: navigationBar)
self.view.bringSubview(toFront: imgOverlay)
self.view.bringSubview(toFront: btnCapture)
// don't use shapeLayer anymore...
// self.view.bringSubview(toFront: shapeLayer)
captureSession.startRunning()
print("Capture session running")
}
func getImageWithColor(color: UIColor, size: CGSize) -> UIImage {
let rect = CGRect(origin: CGPoint(x: 0, y: 0), size: CGSize(width: size.width, height: size.height))
UIGraphicsBeginImageContextWithOptions(size, false, 0)
color.setFill()
UIRectFill(rect)
let image: UIImage = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return image
}
func drawCirclesOnImage(fromImage: UIImage? = nil, targetSize: CGSize? = CGSize.zero) -> UIImage? {
if fromImage == nil && targetSize == CGSize.zero {
return nil
}
var tmpimg: UIImage?
if targetSize == CGSize.zero {
tmpimg = fromImage
} else {
tmpimg = getImageWithColor(color: UIColor.clear, size: targetSize!)
}
guard let img = tmpimg else {
return nil
}
let imageSize = img.size
let scale: CGFloat = 0
UIGraphicsBeginImageContextWithOptions(imageSize, false, scale)
img.draw(at: CGPoint.zero)
let w = imageSize.width
let midX = imageSize.width / 2
let midY = imageSize.height / 2
// red circles - radius in %
let circleRads = [ 0.07, 0.13, 0.17, 0.22, 0.29, 0.36, 0.40, 0.48, 0.60, 0.75 ]
// center "dot" - radius is 1.5%
var circlePath = UIBezierPath(arcCenter: CGPoint(x: midX,y: midY), radius: CGFloat(w * 0.015), startAngle: CGFloat(0), endAngle:CGFloat(M_PI * 2), clockwise: true)
UIColor.red.setFill()
circlePath.stroke()
circlePath.fill()
// blue circle is between first and second red circles
circlePath = UIBezierPath(arcCenter: CGPoint(x: midX,y: midY), radius: w * CGFloat((circleRads[0] + circleRads[1]) / 2.0), startAngle: CGFloat(0), endAngle:CGFloat(M_PI * 2), clockwise: true)
UIColor.blue.setStroke()
circlePath.lineWidth = 2.5
circlePath.stroke()
UIColor.red.setStroke()
for pct in circleRads {
let rad = w * CGFloat(pct)
circlePath = UIBezierPath(arcCenter: CGPoint(x: midX, y: midY), radius: CGFloat(rad), startAngle: CGFloat(0), endAngle:CGFloat(M_PI * 2), clockwise: true)
circlePath.lineWidth = 2.5
circlePath.stroke()
}
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
func saveToCamera() {
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in
if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) {
if let cameraImage = UIImage(data: imageData) {
if let nImage = self.drawCirclesOnImage(fromImage: cameraImage, targetSize: CGSize.zero) {
UIImageWriteToSavedPhotosAlbum(nImage, nil, nil, nil)
}
}
}
})
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Ok, solved it pretty much. The important code is here. The resulting image is slightly out of skew, but I'll work away and fix that, unless someone can see a good fix for it.
func saveToCamera() {
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in
if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) {
if let cameraImage = UIImage(data: imageData) {
// cameraImage is the camera preview image.
// I need to combine/merge it with the myImage that is actually the blue circles.
// This converts the UIView of the bllue circles to an image. Uses 'extension' at top of code.
let myImage = UIImage(view: self.shapeLayer)
print("converting myImage to an image")
let newImage = self.composite(image:cameraImage, overlay:(myImage), scaleOverlay:true)
UIImageWriteToSavedPhotosAlbum(newImage!, nil, nil, nil)
}
}
})
}
}
func composite(image:UIImage, overlay:(UIImage), scaleOverlay: Bool = false)->UIImage?{
UIGraphicsBeginImageContext(image.size)
var rect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
image.draw(in: rect)
if scaleOverlay == false {
rect = CGRect(x: 0, y: 0, width: overlay.size.width, height: overlay.size.height)
}
overlay.draw(in: rect)
return UIGraphicsGetImageFromCurrentImageContext()
}
The resulting saved image.

Dissmissed UIViewController didreceivememorywarning - swift

I have a custom UIImagePickerController that still, after being dismissed and the variable set to nil is receiving memory warnings, and it is causing my app to crash because of it.
Here is my UIImagePickerController
import UIKit
import MobileCoreServices
class Picker: UIImagePickerController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
var myView = PickerView(frame: UIScreen.mainScreen().bounds)
var takenImages = [UIImage]()
var takenImagesThumbs = [UIImage]()
override init() {
super.init()
myView.picker = self
self.delegate = self
self.sourceType = UIImagePickerControllerSourceType.Camera
self.mediaTypes = [kUTTypeImage]
self.showsCameraControls = false
self.cameraOverlayView = myView
self.cameraCaptureMode = UIImagePickerControllerCameraCaptureMode.Photo
self.setFullscreen()
myView.viewing()
}
override init(nibName nibNameOrNil: String?, bundle nibBundleOrNil: NSBundle?) {
super.init(nibName: nibNameOrNil, bundle: nibBundleOrNil)
}
required init(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
func setFullscreen(){
let screenSize = UIScreen.mainScreen().bounds.size
let scale = screenSize.height / screenSize.width*3/4
let translate = CGAffineTransformMakeTranslation(0,(screenSize.height - screenSize.width*4/3)*0.5);
let fullScreen = CGAffineTransformMakeScale(scale, scale);
self.cameraViewTransform = CGAffineTransformConcat(fullScreen, translate)
}
var endFunc:((images:[UIImage], thumbs:[UIImage]) -> Void)!
var ViewController:UIViewController!
func show(vc:UIViewController, complete: ((images:[UIImage], thumbs:[UIImage]) -> Void)) {
vc.presentViewController(self, animated: true, completion: nil)
endFunc = complete
ViewController = vc
}
var closeQueue = dispatch_queue_create("areyouareyou", nil)
func close() {
let priority = DISPATCH_QUEUE_PRIORITY_DEFAULT
dispatch_async(dispatch_get_global_queue(priority, 0)) {
dispatch_async(self.closeQueue) {
self.endFunc(images: self.takenImages, thumbs: self.takenImagesThumbs)
self.takenImagesThumbs = [UIImage]()
self.takenImages = [UIImage]()
self.myView.update(self.takenImages.count)
}
}
self.removeFromParentViewController()
self.dismissViewControllerAnimated(true, completion: nil)
}
var resizequeue = dispatch_queue_create("hangingthree", nil)
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [NSObject : AnyObject]) {
myView.takeButton.enabled = true
let nfo = info as NSDictionary
var image:UIImage = nfo.valueForKey(UIImagePickerControllerOriginalImage) as UIImage
takenImages.append(image)
self.myView.update(self.takenImages.count)
let priority = DISPATCH_QUEUE_PRIORITY_DEFAULT
dispatch_async(dispatch_get_global_queue(priority, 0)) {
dispatch_async(self.resizequeue) {
var theimg = Common.resizeImage(image, scaledToSize: CGSizeMake(UIScreen.mainScreen().bounds.width / 3, UIScreen.mainScreen().bounds.width / 3))
self.takenImagesThumbs.append(theimg)
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
println(self.takenImages.count)
}
}
and here is my overlayview
import UIKit
class PickerView: UIView {
var picker:Picker!
var takeButton = UIButton()
var takeButtonPath = UIBezierPath()
var takeButtonLayer = CAShapeLayer()
var takeButtonIconLayer = CAShapeLayer()
var closeButton = UIButton()
var closeButtonBgPath = UIBezierPath()
var closeButtonBgLayer = CAShapeLayer()
var bottomBar:UIVisualEffectView!
var imageCount = UILabel()
override init() {
super.init()
}
override init(frame: CGRect) {
super.init(frame: frame)
}
required init(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
func viewing() {
var colors = Colors()
self.backgroundColor = UIColor.clearColor()
imageCount.frame = CGRectMake(62, 0, 30, 31)
imageCount.text = "0"
imageCount.textAlignment = NSTextAlignment.Center
imageCount.font = UIFont(name: "Arial-BoldMT", size: 20)
}
override func drawRect(rect: CGRect) {
let context = UIGraphicsGetCurrentContext()
var colors = Colors()
CGContextSetLineWidth(context, 2)
CGContextSetStrokeColorWithColor(context, colors.pickerStrokeColor.CGColor)
CGContextSetFillColorWithColor(context, colors.pickerFillColor.CGColor)
var point = CGPointMake((self.bounds.width / 2), self.bounds.height - 30)
var start = CGFloat(Common.radians(0))
var end = CGFloat(Common.radians(360))
var moon = UIBezierPath()
moon.addArcWithCenter(point, radius: 45, startAngle: end, endAngle: start, clockwise: true)
moon.stroke()
moon.fill()
var left = UIBezierPath()
left.moveToPoint(CGPointMake(0, point.y))
left.addLineToPoint(CGPointMake(point.x - 45, point.y))
left.stroke()
var right = UIBezierPath()
right.moveToPoint(CGPointMake(self.bounds.width, point.y))
right.addLineToPoint(CGPointMake(point.x + 45, point.y))
right.stroke()
btnCloseing()
btnTakeing()
bottomBaring()
//add
self.addSubview(closeButton)
self.addSubview(takeButton)
}
func bottomBaring() {
var point = CGPointMake((self.bounds.width / 2), self.bounds.height - 30)
var frame = CGRectMake(0, point.y, self.bounds.width, self.bounds.height - point.y)
//blur
var blurEffect = UIBlurEffect(style: UIBlurEffectStyle.Dark)
bottomBar = UIVisualEffectView(effect: blurEffect)
bottomBar.frame = frame
//blur add
self.addSubview(bottomBar)
//vib
var vibEffect = UIVibrancyEffect(forBlurEffect: blurEffect)
var vibView = UIVisualEffectView(effect: vibEffect)
vibView.frame = CGRectMake(0, 0, frame.width, frame.height)
//add
vibView.contentView.addSubview(imageCount)
bottomBar.contentView.addSubview(vibView)
}
func btnCloseing() {
var start = CGFloat(Common.radians(0))
var end = CGFloat(Common.radians(360))
var colors = Colors()
closeButtonBgPath.addArcWithCenter(CGPointMake(0, 0), radius: 20, startAngle: start, endAngle: end, clockwise: true)
closeButtonBgLayer.bounds = CGRectMake(-40, -40, 40, 40)
closeButtonBgLayer.path = closeButtonBgPath.CGPath
closeButtonBgLayer.fillColor = colors.pickerCloseBtnFill.CGColor
closeButtonBgLayer.strokeColor = colors.pickerStrokeColor.CGColor
closeButton.frame = CGRectMake(self.bounds.width - 50, 10, 40, 40)
closeButton.layer.addSublayer(closeButtonBgLayer)
closeButton.setTitle("X", forState: UIControlState.Normal)
closeButton.setTitleColor(colors.pickerStrokeColor, forState: UIControlState.Normal)
closeButton.titleLabel?.font = UIFont(name: "Arial-BoldMT", size: 25)
closeButton.addTarget(self, action: "closePhoto", forControlEvents: UIControlEvents.TouchDown)
}
func btnTakeing() {
var start = CGFloat(Common.radians(0))
var end = CGFloat(Common.radians(360))
var point = CGPointMake((self.bounds.width / 2), self.bounds.height - 30)
takeButtonPath.addArcWithCenter(CGPointMake(0, 0), radius: 40, startAngle: start, endAngle: end, clockwise: true)
takeButtonLayer.bounds = CGRectMake(-80, -90, 80, 80)
takeButtonLayer.path = takeButtonPath.CGPath
takeButtonLayer.fillColor = Colors().pickerTakeBtnFill.CGColor
takeButtonLayer.strokeColor = UIColor.clearColor().CGColor
takeButtonIconLayer.contents = UIImage(named: "CameraIcon")?.CGImage
takeButtonIconLayer.frame = CGRectMake(26, 30, 30, 30)
takeButton.frame = CGRectMake(point.x - 40, point.y - 50, 80, 100)
takeButton.layer.addSublayer(takeButtonLayer)
takeButton.layer.addSublayer(takeButtonIconLayer)
takeButton.addTarget(self, action: "takePhoto", forControlEvents: UIControlEvents.TouchDown)
takeButton.addTarget(self, action: "takePhotoEnd", forControlEvents: UIControlEvents.TouchUpInside)
takeButton.addTarget(self, action: "takePhotoEnd", forControlEvents: UIControlEvents.TouchUpOutside)
}
func takePhoto() {
self.takeButton.enabled = false
takeButtonLayer.fillColor = Colors().pickerTakeBtnFillClick.CGColor
picker.takePicture()
}
func takePhotoEnd() {
takeButtonLayer.fillColor = Colors().pickerTakeBtnFill.CGColor
}
func closePhoto() {
picker.close()
}
func update(count: Int) {
imageCount.text = String(count)
}
}
Anyone knows the problem or how to fix it?
Your problem is that you are subclassing UIImagePickerController which is prohibited. Read the Apple documentation:
IMPORTANT
The UIImagePickerController class supports portrait mode only. This class is intended to be used as-is and does not support subclassing. The view hierarchy for this class is private and must not be modified, with one exception. You can assign a custom view to the cameraOverlayView property and use that view to present additional information or manage the interactions between the camera interface and your code.

How to Add Live Camera Preview to UIView

I run into a problem, I'm trying to solve within UIView boundary, is there any way to Add Camera Preview to UIView? And Add other content on top of The UIView (Buttons, Label etc.)?
I try to Using AVFoundation Framework but there is not enough documentation for Swift.
UPDATED TO SWIFT 5
You can try something like this:
import UIKit
import AVFoundation
class ViewController: UIViewController{
var previewView : UIView!
var boxView:UIView!
let myButton: UIButton = UIButton()
//Camera Capture requiered properties
var videoDataOutput: AVCaptureVideoDataOutput!
var videoDataOutputQueue: DispatchQueue!
var previewLayer:AVCaptureVideoPreviewLayer!
var captureDevice : AVCaptureDevice!
let session = AVCaptureSession()
override func viewDidLoad() {
super.viewDidLoad()
previewView = UIView(frame: CGRect(x: 0,
y: 0,
width: UIScreen.main.bounds.size.width,
height: UIScreen.main.bounds.size.height))
previewView.contentMode = UIView.ContentMode.scaleAspectFit
view.addSubview(previewView)
//Add a view on top of the cameras' view
boxView = UIView(frame: self.view.frame)
myButton.frame = CGRect(x: 0, y: 0, width: 200, height: 40)
myButton.backgroundColor = UIColor.red
myButton.layer.masksToBounds = true
myButton.setTitle("press me", for: .normal)
myButton.setTitleColor(UIColor.white, for: .normal)
myButton.layer.cornerRadius = 20.0
myButton.layer.position = CGPoint(x: self.view.frame.width/2, y:200)
myButton.addTarget(self, action: #selector(self.onClickMyButton(sender:)), for: .touchUpInside)
view.addSubview(boxView)
view.addSubview(myButton)
self.setupAVCapture()
}
override var shouldAutorotate: Bool {
if (UIDevice.current.orientation == UIDeviceOrientation.landscapeLeft ||
UIDevice.current.orientation == UIDeviceOrientation.landscapeRight ||
UIDevice.current.orientation == UIDeviceOrientation.unknown) {
return false
}
else {
return true
}
}
#objc func onClickMyButton(sender: UIButton){
print("button pressed")
}
}
// AVCaptureVideoDataOutputSampleBufferDelegate protocol and related methods
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
func setupAVCapture(){
session.sessionPreset = AVCaptureSession.Preset.vga640x480
guard let device = AVCaptureDevice
.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera,
for: .video,
position: AVCaptureDevice.Position.back) else {
return
}
captureDevice = device
beginSession()
}
func beginSession(){
var deviceInput: AVCaptureDeviceInput!
do {
deviceInput = try AVCaptureDeviceInput(device: captureDevice)
guard deviceInput != nil else {
print("error: cant get deviceInput")
return
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
videoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput.alwaysDiscardsLateVideoFrames=true
videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
videoDataOutput.connection(with: .video)?.isEnabled = true
previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect
let rootLayer :CALayer = self.previewView.layer
rootLayer.masksToBounds=true
previewLayer.frame = rootLayer.bounds
rootLayer.addSublayer(self.previewLayer)
session.startRunning()
} catch let error as NSError {
deviceInput = nil
print("error: \(error.localizedDescription)")
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// do stuff here
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
}
}
Here i use a UIView called previewView to start the camera and then i add a new UIView called boxView wich is above previewView. I add a UIButton to boxView
IMPORTANT
Remember that in iOS 10 and later you need to first ask the user for permission in order to have access to the camera. You do this by adding a usage
key to your app’s Info.plist together with a purpose string
because if you fail to declare the usage, your app will crash when it
first makes the access.
Here's a screenshot to show the Camera access request
Swift 4
Condensed version of mauricioconde's solution
You can use this as a drop in component:
//
// CameraView.swift
import Foundation
import AVFoundation
import UIKit
final class CameraView: UIView {
private lazy var videoDataOutput: AVCaptureVideoDataOutput = {
let v = AVCaptureVideoDataOutput()
v.alwaysDiscardsLateVideoFrames = true
v.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
v.connection(with: .video)?.isEnabled = true
return v
}()
private let videoDataOutputQueue: DispatchQueue = DispatchQueue(label: "JKVideoDataOutputQueue")
private lazy var previewLayer: AVCaptureVideoPreviewLayer = {
let l = AVCaptureVideoPreviewLayer(session: session)
l.videoGravity = .resizeAspect
return l
}()
private let captureDevice: AVCaptureDevice? = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
private lazy var session: AVCaptureSession = {
let s = AVCaptureSession()
s.sessionPreset = .vga640x480
return s
}()
override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
commonInit()
}
private func commonInit() {
contentMode = .scaleAspectFit
beginSession()
}
private func beginSession() {
do {
guard let captureDevice = captureDevice else {
fatalError("Camera doesn't work on the simulator! You have to test this on an actual device!")
}
let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
if session.canAddInput(deviceInput) {
session.addInput(deviceInput)
}
if session.canAddOutput(videoDataOutput) {
session.addOutput(videoDataOutput)
}
layer.masksToBounds = true
layer.addSublayer(previewLayer)
previewLayer.frame = bounds
session.startRunning()
} catch let error {
debugPrint("\(self.self): \(#function) line: \(#line). \(error.localizedDescription)")
}
}
override func layoutSubviews() {
super.layoutSubviews()
previewLayer.frame = bounds
}
}
extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate {}
iOS 13/14 and Swift 5.3:
private var imageVC: UIImagePickerController?
and then call showCameraVC() when you want to show the camera view
func showCameraVC() {
self.imageVC = UIImagePickerController()
if UIImagePickerController.isCameraDeviceAvailable(.front) {
self.imageVC?.sourceType = .camera
self.imageVC?.cameraDevice = .front
self.imageVC?.showsCameraControls = false
let screenSize = UIScreen.main.bounds.size
let cameraAspectRatio = CGFloat(4.0 / 3.0)
let cameraImageHeight = screenSize.width * cameraAspectRatio
let scale = screenSize.height / cameraImageHeight
self.imageVC?.cameraViewTransform = CGAffineTransform(translationX: 0, y: (screenSize.height - cameraImageHeight)/2)
self.imageVC?.cameraViewTransform = self.imageVC!.cameraViewTransform.scaledBy(x: scale, y: scale)
self.imageVC?.view.frame = CGRect(x: 0, y: 0, width: screenSize.width, height: screenSize.height)
self.view.addSubview(self.imageVC!.view)
self.view.sendSubviewToBack(self.imageVC!.view)
}
}
Camera view will be also fullscreen (other answers wouldn't fix a letterboxed view)
Swift 3:
#IBOutlet weak var cameraContainerView:UIView!
var imagePickers:UIImagePickerController?
On ViewDidLoad:
override func viewDidLoad() {
super.viewDidLoad()
addImagePickerToContainerView()
}
Add Camera Preview to the container view:
func addImagePickerToContainerView(){
imagePickers = UIImagePickerController()
if UIImagePickerController.isCameraDeviceAvailable( UIImagePickerControllerCameraDevice.front) {
imagePickers?.delegate = self
imagePickers?.sourceType = UIImagePickerControllerSourceType.camera
//add as a childviewcontroller
addChildViewController(imagePickers!)
// Add the child's View as a subview
self.cameraContainerView.addSubview((imagePickers?.view)!)
imagePickers?.view.frame = cameraContainerView.bounds
imagePickers?.allowsEditing = false
imagePickers?.showsCameraControls = false
imagePickers?.view.autoresizingMask = [.flexibleWidth, .flexibleHeight]
}
}
On custom button action:
#IBAction func cameraButtonPressed(_ sender: Any) {
if UIImagePickerController.isSourceTypeAvailable(.camera){
imagePickers?.takePicture()
} else{
//Camera not available.
}
}
swift 5
easy way
import UIKit
import AVFoundation
class ViewController: UIViewController, UINavigationControllerDelegate,UIImagePickerControllerDelegate{
//Camera Capture requiered properties
var imagePickers:UIImagePickerController?
#IBOutlet weak var customCameraView: UIView!
override func viewDidLoad() {
addCameraInView()
super.viewDidLoad()
}
func addCameraInView(){
imagePickers = UIImagePickerController()
if UIImagePickerController.isCameraDeviceAvailable( UIImagePickerController.CameraDevice.rear) {
imagePickers?.delegate = self
imagePickers?.sourceType = UIImagePickerController.SourceType.camera
//add as a childviewcontroller
addChild(imagePickers!)
// Add the child's View as a subview
self.customCameraView.addSubview((imagePickers?.view)!)
imagePickers?.view.frame = customCameraView.bounds
imagePickers?.allowsEditing = false
imagePickers?.showsCameraControls = false
imagePickers?.view.autoresizingMask = [.flexibleWidth, .flexibleHeight]
}
}
#IBAction func cameraButtonPressed(_ sender: Any) {
if UIImagePickerController.isSourceTypeAvailable(.camera){
imagePickers?.takePicture()
} else{
//Camera not available.
}
}
}

Resources