I have the following code to determine ProRes and HDR support on iOS devices:
extension AVCaptureDevice.Format {
var supports10bitHDR:Bool {
let mediaType = CMFormatDescriptionGetMediaType(formatDescription)
let mediaSubtype = CMFormatDescriptionGetMediaSubType(formatDescription)
return mediaType == kCMMediaType_Video && mediaSubtype == kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange
}
var supportsProRes422:Bool {
let mediaType = CMFormatDescriptionGetMediaType(formatDescription)
let mediaSubtype = CMFormatDescriptionGetMediaSubType(formatDescription)
return (mediaType == kCMMediaType_Video && (mediaSubtype == kCVPixelFormatType_422YpCbCr10BiPlanarVideoRange))
}
}
However, on iPad Pro with M2 chip, supportsProRes422 is returning false for every format in array device.formats (for the default wide angle camera). Is this an AVFoundation bug or something intentional? How do I enable ProRes recording on iPad Pro M2 device?
Related
I'm building a voice call app for android using WebView. Everything works fine except in android 12, I can not use the earpiece (or even wired headphones in some phones) for audio out. The audio is always playing through loudspeaker. And some of the users reported that they can hear the internal ring(Which is a MediaPlayer object) in earpiece, but once the call answered it switches to loudspeaker. And the worst part is, I'm not able to reproduce this issue as this works perfectly on all my test phones (android 12).
My code,
private fun setAudioOutDevice(target: AudioDeviceInfo?) {
//need some delay for bluetooth sco
var audioChangeDelay: Long = 15
if (System.currentTimeMillis() - lastAudioChangeTime <= 300) audioChangeDelay = 300
Handler(Looper.getMainLooper()).postDelayed({
var targetDevice = target
//no device selected. scan all output devices and select one automatically
if (targetDevice == null) {
var wiredHeadsetDevice: AudioDeviceInfo? =
getAudioDevice(AudioDeviceInfo.TYPE_WIRED_HEADSET)
if (wiredHeadsetDevice == null) wiredHeadsetDevice =
getAudioDevice(AudioDeviceInfo.TYPE_WIRED_HEADPHONES)
val bluetoothDevice: AudioDeviceInfo? =
getAudioDevice(AudioDeviceInfo.TYPE_BLUETOOTH_SCO)
val speakerDevice: AudioDeviceInfo? =
getAudioDevice(AudioDeviceInfo.TYPE_BUILTIN_SPEAKER)
val earpieceDevice: AudioDeviceInfo? =
getAudioDevice(AudioDeviceInfo.TYPE_BUILTIN_EARPIECE)
//update global variables
isBluetoothAvailable = bluetoothDevice != null
//disable BT if wired headset connected
if (wiredHeadsetDevice != null) isBluetoothAvailable = false
//choose an output device
targetDevice =
if (callType == videoCall && bluetoothDevice == null && wiredHeadsetDevice == null) speakerDevice
else if (callType == videoCall && wiredHeadsetDevice == null && !isBluetoothAudioEnabled) speakerDevice
else wiredHeadsetDevice
?: if (isBluetoothAvailable && isBluetoothAudioEnabled && bluetoothDevice != null) bluetoothDevice
else earpieceDevice ?: speakerDevice
//no earpiece
if (earpieceDevice == null) {
setBigStatus("Unable to access Earpiece", 0)
}
}
//set output device
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
var success = false
try {
// am.clearCommunicationDevice()
if (targetDevice != null) success = am.setCommunicationDevice(targetDevice)
} catch (e: Exception) {
debugMsg(e.message.toString())
}
if (!success) setBigStatus(getString(R.string.unable_to_set_audio), 0)
}
//older devices, android 11 or lower
else {
am.mode = AudioManager.MODE_IN_COMMUNICATION
if (targetDevice?.type == AudioDeviceInfo.TYPE_BUILTIN_SPEAKER) {
changeSco(false)
am.isSpeakerphoneOn = true
} else {
if (targetDevice?.type == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) changeSco(true)
else changeSco(false)
am.isSpeakerphoneOn = false
}
}
}, audioChangeDelay)
}
private fun changeSco(enable: Boolean) {
if (enable) {
am.startBluetoothSco()
am.isBluetoothScoOn = true
} else {
am.stopBluetoothSco()
am.isBluetoothScoOn = false
}
}
private fun getAudioDevice(type: Int): AudioDeviceInfo? {
val audioDevices = am.getDevices(AudioManager.GET_DEVICES_OUTPUTS)
for (deviceInfo in audioDevices) {
if (type == deviceInfo.type) return deviceInfo
}
return null
}
The above code is working fine on android 11 and lower and even on some android 12 phones. How can I make this work on all devices?
There is an issue tracker here. But it seems that nobody is working on it.
UPDATE Today I tried it on a pixel 3a and it gives mixed behaviors. Sometimes it plays on the earpiece or wired headset but for most of the calls, it plays on the loudspeaker only. And even if I don't set any mode, it automatically switches to MODE_IN_COMMUNICATION right after the stream starts. I think, android is trying to figure out the proper mode by itself and that messing with my app logic.
I am trying to make an app with Audio, video call using WebRTC.
remote video and audio are working properly in my app, but my local stream is not appearing on the client side.
here is what I have written to add a video track
let videoSource = self.rtcPeerFactory.videoSource()
let videoCapturer = RTCCameraVideoCapturer(delegate: videoSource)
guard let frontCamera = (RTCCameraVideoCapturer.captureDevices().first { $0.position == .front }),
// choose highest res
let format = (RTCCameraVideoCapturer.supportedFormats(for: frontCamera).sorted { (f1, f2) -> Bool in
let width1 = CMVideoFormatDescriptionGetDimensions(f1.formatDescription).width
let width2 = CMVideoFormatDescriptionGetDimensions(f2.formatDescription).width
return width1 < width2
}).last,
// choose highest fps
let fps = (format.videoSupportedFrameRateRanges.sorted { return $0.maxFrameRate < $1.maxFrameRate }.last) else {
print(.error, "Error in createLocalVideoTrack")
return nil
}
videoCapturer.startCapture(with: frontCamera,
format: format,
fps: Int(fps.maxFrameRate))
self.callManagerDelegate?.didAddLocalVideoTrack(videoTrack: videoCapturer)
let videoTrack = self.rtcPeerFactory.videoTrack(with: videoSource, trackId: K.CONSTANT.VIDEO_TRACK_ID)
and this is to add Audio track
let constraints: RTCMediaConstraints = RTCMediaConstraints.init(mandatoryConstraints: [:], optionalConstraints: nil)
let audioSource: RTCAudioSource = self.rtcPeerFactory.audioSource(with: constraints)
let audioTrack: RTCAudioTrack = self.rtcPeerFactory.audioTrack(with: audioSource, trackId: K.CONSTANT.AUDIO_TRACK_ID)
my full webRTC log attached here.
some logs I am getting (I think this is something wrong)
(thread.cc:303): Waiting for the thread to join, but blocking calls have been disallowed
(basic_port_allocator.cc:1035): Port[31aba00:0:1:0:relay:Net[ipsec4:2405:204:8888:x:x:x:x:x/64:VPN/Unknown:id=2]]: Port encountered error while gathering candidates.
...
(basic_port_allocator.cc:1017): Port[38d7400:audio:1:0:local:Net[en0:192.168.1.x/24:Wifi:id=1]]: Port completed gathering candidates.
(basic_port_allocator.cc:1035): Port[3902c00:video:1:0:relay:Net[ipsec5:2405:204:8888:x:x:x:x:x/64:VPN/Unknown:id=3]]: Port encountered error while gathering candidates.
finally, find the solution
it was due to TCP protocol in the TURN server.
The following builds and runs and prints the non error console message at the end when passed two valid MIDIEndPointRefs. But midi events are not passed thru from source to dest as expected. Is something missing?
func createThru2(source:MIDIEndpointRef?, dest:MIDIEndpointRef?) {
var connectionRef = MIDIThruConnectionRef()
var params = MIDIThruConnectionParams()
MIDIThruConnectionParamsInitialize(¶ms)
if let s = source {
let thruEnd = MIDIThruConnectionEndpoint(endpointRef: s, uniqueID: MIDIUniqueID(1))
params.sources.0 = thruEnd
params.numSources = 1
print("thru source is \(s)")
}
if let d = dest {
let thruEnd = MIDIThruConnectionEndpoint(endpointRef: d, uniqueID: MIDIUniqueID(2))
params.destinations.0 = thruEnd
params.numDestinations = 1
print("thru dest is \(d)")
}
var localParams = params
let nsdata = withUnsafePointer(to: ¶ms) { p in
NSData(bytes: p, length: MIDIThruConnectionParamsSize(&localParams))
}
let status = MIDIThruConnectionCreate(nil, nsdata, &connectionRef)
if status == noErr {
print("created thru")
} else {
print("error creating thru \(status)")
}
}
Your code works fine in Swift 5 on macOS 10.13.6. A thru connection is established and MIDI events are passed from source to destination. So the problem does not seem to be the function you posted, but in the endpoints you provided or in using Swift 4.2.
I used the following code to call your function:
var source:MIDIEndpointRef = MIDIGetSource(5)
var dest:MIDIEndpointRef = MIDIGetDestination(9)
createThru2(source:source, dest:dest)
5 is a MIDI keyboard and 9 is a MIDI port on my audio interface.
Hmmm. I just tested this same code on Swift 5 and OS X 12.4, and it doesn't seem to work for me. MIDIThruConnectionCreate returns noErr, but no MIDI packets seem to flow.
I'm using the MIDIKeys virtual source, and MIDI Monitor virtual destination.
I'll try it with some hardware and see what happens.
How to check the “Allow Full Access” is enabled in iOS 11?
I have tried multiple methods which do not seem to be working in iOS 10 or iOS 11.
Here is one that I tried:
func hasFullAccess() -> Bool
{
var hasFullAccess = false
if #available(iOSApplicationExtension 10.0, *) {
let pasty = UIPasteboard.general
if pasty.hasURLs || pasty.hasColors || pasty.hasStrings || pasty.hasImages {
hasFullAccess = true
} else {
pasty.string = "TEST"
if pasty.hasStrings {
hasFullAccess = true
pasty.string = ""
}
}
} else {
// Fallback on earlier versions
var clippy : UIPasteboard?
clippy = UIPasteboard.general
if clippy != nil {
hasFullAccess = true
}
}
return hasFullAccess
}
Every time it returns true and I am running this on a device not on the simulator.
First of all, it feels like you're checking the iOS version wrong. It usually looks like this if #available(iOS 11.0, *) where iOS version you pass is the latest one you use. In your declaration iOS 11 is not even there, it checks for iOS 10 and below.
Second, you're using || which is OR operator. So if any of those statements is true, the whole thing will return true. You need && the AND operator to check whether everything is matching.
Whatever I seem to try I cannot currently get back the Battery level from the iOS/SocketScan API. I am using version 10.3.36, here is my code so far:
func onDeviceArrival(result: SKTRESULT, device deviceInfo: DeviceInfo!) {
print("onDeviceArrival:\(deviceInfo.getName())")
scanApiHelper.postGetBattery(deviceInfo, target: self, response: #selector(onGetBatteryInfo))
}
func onGetBatteryInfo(scanObj: ISktScanObject) {
let result:SKTRESULT = scanObj.Msg().Result()
print("GetBatteryInfo status:\(result)")
if (result == ESKT_NOERROR) {
let batterylevel = scanObj.Property().getUlong()
print("Battery is:\(batterylevel)")
} else {
print("Error GetBatteryInfo status:\(result)")
}
However, the values I get back are:
GetBatteryInfo status:0
Battery is:1677741312
If my code is correct then how do I make the Battery result I get back a meaningful result, like a percentage? If I'm way off then how do I get back info like the battery level, firmware version etc?
Thanks
David
EDIT: SKTBATTERY_GETCURLEVEL isn't supported in Swift. However, the docs explain that the battery level response includes the min, current and max levels encoded in the first, second and third bytes, respectively.
The following is equivalent to using SKTBATTERY_GETCURLEVEL
Swift
func onGetBatteryInfo(scanObj: ISktScanObject) {
let result:SKTRESULT = scanObj.Msg().Result()
if(SKTSUCCESS(result)){
let batteryInfo = scanObj.Property().getUlong();
let batteryMin = ((batteryInfo >> 4) & 0xff);
let batteryCurrent = ((batteryInfo >> 8) & 0xff);
let batteryMax = ((batteryInfo >> 12) & 0xff);
let batteryPercentage = batteryCurrent / (batteryMax - batteryMin);
print("Battery is:\(batteryPercentage)")
self.setBatteryLevel = batteryPercentage
self.tableView.reloadData
} else {
print("Error GetBatteryInfo status:\(result)")
}
}
Objective-C
-(void) onGetBatteryInfo:(ISktScanObject*)scanObj {
SKTRESULT result=[[scanObj Msg]Result];
if(SKTSUCCESS(result)){
long batteryLevel = SKTBATTERY_GETCURLEVEL([[scanObj Property] getUlong]);
NSLog(#"BatteryInfo %ld", batteryLevel);
[self setBatteryLevel:batteryLevel];
[self.tableView reloadData];
} else {
NSLog(#"Error GetBatteryInfo status: %ld",result);
}
}
Here's code I use. Theres a variable defined in appDelegate for the batteryPercentage, and that is read when the v value is needed. The value is updated each 120 seconds by a timer, this way actions can occur as the level drops etc.
func onBatteryLevel (scanObj: ISktScanObject) {
let result: SKTRESULT = scanObj.Msg().Result()
if (SKTRESULT(result) > -1) {
let property: ISktScanProperty = scanObj.Property()
var batteryLevel = property.getUlong()
#if arch(x86_64) || arch(arm64)
batteryLevel = (batteryLevel<<(48))>>(56)
#else
batteryLevel = (batteryLevel<<(48-32))>>(56-32)
#endif
batteryPercentage = Int(batteryLevel)
} else {
debug ("data error \(result)")
}
}
For Swift 4 I just came across this problem and came up with the following solution.
var lastDeviceConnected : CaptureHelperDevice? {
didSet {
guard let lastDevice = self.lastDeviceConnected else { return }
lastDevice.getBatteryLevelWithCompletionHandler { result, batteryLevel in
guard result == SKTResult.E_NOERROR, let batteryLevel = batteryLevel else { return }
let minimum = SKTHelper.getMinimumLevel(fromBatteryLevel: Int(batteryLevel))
let maximum = SKTHelper.getMaximumLevel(fromBatteryLevel: Int(batteryLevel))
let current = SKTHelper.getCurrentLevel(fromBatteryLevel: Int(batteryLevel))
print("minimum: \(minimum)")
print("maximum: \(maximum)")
print("current: \(current)")
// current is out battery level in %
// minimum and maximum could for example be used for
// for a slider or something that visually presents
// the battery status
}
}
}
In my example I'm not handling the case that there could be no device or that the battery status might not have been retrieved as expected. I simply guard / return. In your example you might want to handle the issue.