Key value observing exposureDuration - ios

I'm building a camera app, and I'm trying to expose the current exposure duration to the user. Since this value constantly changes until manually set, I need to use kvo to stream the values to the user. I've successfully done this with the ISO, and can observe changes to the exposureDuration, but cannot coerce the new value to a CMTime object (which is what exposureDuration is). Below is the code I'm using to try and accomplish this:
override init() {
super.init()
captureDevice = self.selectCamera()
captureDevice?.addObserver(self, forKeyPath: "ISO", options: .New, context: &isoContext)
captureDevice?.addObserver(self, forKeyPath: "exposureDuration", options: .New, context: &shutterContext)
}
deinit {
captureDevice?.removeObserver(self, forKeyPath: "ISO")
captureDevice?.removeObserver(self, forKeyPath: "exposureDuration")
}
override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
let newValue = change?[NSKeyValueChangeNewKey]
if context == &isoContext {
store.iso.value = newValue as! Float
} else if context == &shutterContext {
// The app crashes at this line.
// Thread 1: EXC_BREAKPOINT (code=1, subcode=0x100091670)
// newValue is "AnyObject" in the debug area
store.shutterSpeed.value = newValue as! CMTime
}
}
Am I doing something wrong, or is this a legitimate bug that I need to file with apple?

exposureDuration's newValue is not CMTime, but NSValue.
This is fixed code(swift3).
store.shutterSpeed.value = (newValue as! NSValue).timeValue

Related

How to get WhiteBalance (Kelvin) from captureOutput

Apple has various ways to change and view the Kelvin of using AVCaptureDevice
https://developer.apple.com/documentation/avfoundation/avcapturedevice/white_balance
Example:
guard let videoDevice = AVCaptureDevice
.default(.builtInWideAngleCamera, for: .video, position: .back) else {
return
}
guard
let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice),
captureSession.canAddInput(videoDeviceInput) else {
print("There seems to be a problem with the camera on your device.")
return
}
captureSession.addInput(videoDeviceInput)
let kelvin = videoDevice.temperatureAndTintValues(for: videoDevice.deviceWhiteBalanceGains)
print("Kelvin temp \(kelvin.temperature)")
print("Kelvin tint \(kelvin.tint)")
let captureOutput = AVCaptureVideoDataOutput()
captureOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
captureOutput.setSampleBufferDelegate(self, queue: DispatchQueue.global(qos: DispatchQoS.QoSClass.default))
captureSession.addOutput(captureOutput)
This will always return
Kelvin temp 3900.0889
Kelvin tint 4.966322
How can I get the White Balance (Kelvin value) through the live camera feed?
It is giving you one value because videoDevice.temperatureAndTintValues(for: videoDevice.deviceWhiteBalanceGains) is called only once. To get an updating value following the camera feed, you have two options:
Key-value observing, which will notify when the WB changes
Call the function videoDevice.temperatureAndTintValues(for: videoDevice.deviceWhiteBalanceGains) for each frame.
I would suggest you use the second, key-value observing is somewhat annoying. In that case, I guess you already have implemented the method func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) of the AVCaptureVideoDataOutputSampleBufferDelegate. That method is called every time a frame is returned, so, you can update your videoDevice.temperatureAndTintValues for each frame of the live camera feed.
For the key-value observing, you first setup the observer (e.g in viewDidAppear), for example:
func addObserver() {
self.addObserver(self, forKeyPath: "videoDevice.deviceWhiteBalanceGains", options: .new, context: &DeviceWhiteBalanceGainsContext)
}
Keep a reference to the videoDevice, declaring it this way:
#objc dynamic var videoDevice : AVCaptureDevice!
Then #objc and dynamic are needed for the key-value observing.
Now you can implement this function, which will be called every time the observed value changes:
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
guard let context = context else {
super.observeValue(forKeyPath: keyPath, of: object, change: change, context: nil)
return
}
if context == &DeviceWhiteBalanceGainsContext {
// do your work on WB here
}
}
Finally, you can define the context this way (I have it outside my ViewController):
private var DeviceWhiteBalanceGainsContext = 0
I have implemented both methods in my apps and they both work well.
WARNING: sometimes, the WB values will be outside the allowed range (especially at startup) and the API raises an exception. Make sure to handle this otherwise, the app will crash.

Is it possible to use KVO to track changes to the `MKMapView.camera.heading` property?

I am trying to track changes to the map orientation using the camera heading property.
// Register for notifications of changes to camera
if let camera = self.mapView?.camera {
self.camera = camera
camera.addObserver(self, forKeyPath: "heading", options: NSKeyValueObservingOptions.new, context: &MapViewController.myContext)
}
...
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if context == &MapViewController.myContext {
if keyPath == "heading" {
if let heading = change?[NSKeyValueChangeKey.newKey] as? CLLocationDirection {
self.heading = heading
}
}
} else {
super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
}
}
The camera property mode is copy so you are not observing the current camera instance just one that was copied when you called the getter. You need to observe key path of #"camera.heading" or #"camera" on the map view and hope that a new camera object is set internally when the heading changes.

How to get notified when Flash is ON or OFF in Auto flash mode due to low light

Am using UIImagePickerController to present the camera and initially make the flash mode to Auto.
videoCapturer.sourceType = UIImagePickerControllerSourceType.Camera
videoCapturer.mediaTypes = [kUTTypeMovie as String]
videoCapturer.cameraFlashMode = UIImagePickerControllerCameraFlashMode.Auto
[self .presentViewController(videoCapturer, animated: true, completion: nil)]
i want to get notified when the flash is set to ON or off according to the lighting.
Just use KVO.
let capture = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
capture.addObserver(self, forKeyPath: "torchActive", options: NSKeyValueObservingOptions.New.union(.Initial), context: nil)
And implement this method:
public override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
if keyPath == "torchActive" {
// do something when torchActive changed
} else {
super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context)
}
}
Here is Apple's description of torchActive:
#property torchActive
#abstract
Indicates whether the receiver's torch is currently active.
#discussion
The value of this property is a BOOL indicating whether the receiver's torch is
currently active. If the current torchMode is AVCaptureTorchModeAuto and isTorchActive
is YES, the torch will illuminate once a recording starts (see AVCaptureOutput.h
-startRecordingToOutputFileURL:recordingDelegate:). This property is key-value observable.

How to do Key-Value Observing for NSProgress in Swift

I'm currently using the owncloud iOS SDK to upload a file to my private cloud. I'm trying to bring the Key-Value observing mechanism provided in this example to Swift.
The library forces me to pass a pointer to a NSProgress object in the upload method. I would then like to do a Key Value Observing and update my UI accordingly.
class OwnCloudManager : NSObject {
//Key Value observation requires the dynamic keyword
//NSProgress must also extend from NSObject
dynamic var progress: NSProgress = NSProgress()
let apiURL : NSString = "https://cloud.example.com/remote.php/webdav/"
let ocCommunication = OCCommunication()
override init() {
//configure ocCommunication here, set user name, password
//and adjust ocCommunication.securityPolicy
//...
}
deinit {
//remove observer when object is destroyed
progress.removeObserver(self, forKeyPath: "fractionCompleted")
}
func uploadFile(folderURL : NSURL, filename: String) -> NSURLSessionUploadTask?{
let fileURL = apiURL.stringByAppendingPathComponent(filename)
let progressPointer = AutoreleasingUnsafeMutablePointer<NSProgress?>.init(&progress)
let uploadTask = ocCommunication.uploadFileSession(folderURL.path, toDestiny: fileURL, onCommunication: ocCommunication,
withProgress: progressPointer,
successRequest: ({(response, redirectedServer) in
//...
}),
failureRequest: ({ (response, redirectedServer, error) in
//request failed while execution
}),
failureBeforeRequest: ({ error in
//failure before request is executed
})
)
progress.addObserver(self, forKeyPath: "fractionCompleted", options: .New, context: nil)
uploadTask.resume()
return uploadTask
}
override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
if let observedProgress = object as? NSProgress where keyPath == "fractionCompleted" {
//call my delegate here that updates the UI
}
else {
super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context)
}
}
}
I've set a breakpoint in the observation method but unfortunately it is never called. Can someone tell me how to fix this?
See this code for Progressing..!!
override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
if keyPath == "fractionCompleted"{
let progress : NSProgress = object as! NSProgress
print(progress)
}
}

iOS Native Camera AVFoundation brightness exposure difference

AVFoundation brightness/exposure is different than the native camera. Is it possible to replicate the native camera settings with the AVFoundation framework?
auto-exposure in Swift 4.2
// MARK: Exposure Methods
#objc
func tapToExpose(recognizer: UIGestureRecognizer){
if activeInput.device.isExposurePointOfInterestSupported{
let point = recognizer.location(in: camPreview)
// The tap location is converted from the `GestureRecognizer` to the preview's coordinates.
let pointOfInterest = previewLayer.captureDevicePointConverted(fromLayerPoint: point)
// Then , the second conversion is made for the coordinate space of the camera.
showMarkerAtPoint(point: point, marker: exposureMarker)
exposeAtPoint(pointOfInterest)
}
}
func exposeAtPoint(_ point: CGPoint){
let device = activeInput.device
if device.isExposurePointOfInterestSupported, device.isFocusModeSupported(.continuousAutoFocus){
do{
try device.lockForConfiguration()
device.exposurePointOfInterest = point
device.exposureMode = .continuousAutoExposure
if device.isFocusModeSupported(.locked){
// Now let us add the illumination for the `observeValueForKeyPath` method,
device.addObserver(self, forKeyPath: kExposure, options: .new, context: &adjustingExposureContext)
device.unlockForConfiguration()
}
}
catch{
print("Error Exposing on POI: \(String(describing: error.localizedDescription))")
}
}
}
// MARK: KVO
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
// First , check to make sure that the context matches the adjusting exposure context,
// Otherwise, pass the observation on to super.
if context == &adjustingExposureContext {
let device = object as! AVCaptureDevice
// then determine if the camera has stopped adjusting exposure , and if the locked mode is supported.
if !device.isAdjustingExposure , device.isExposureModeSupported(.locked){
// remove self from the observer to stop subsequent notification,
device.removeObserver(self, forKeyPath: kExposure, context: &adjustingExposureContext)
DispatchQueue.main.async {
do{
try device.lockForConfiguration()
device.exposureMode = .locked
device.unlockForConfiguration()
}
catch{
print("Error exposing on POI: \(String(describing: error.localizedDescription))")
}
}// DispatchQueue.main.async
}// if !device.isAdjustingExposure , device.isExposureModeSupported(.locked)
}// if context == &adjustingExposureContext {
else{
super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
}
}
If you're looking for auto-exposure, set a key-value observer for "adjustingExposure" and set the exposure mode to AVCaptureExposureModeContinuousAutoExposure. If you're trying to implement manual exposure, have a look at this WWDC session on camera controls.

Resources