I am working on a Video/Audio Call App where i need to provide four options related to the Audio Output:
Speaker, Built in mic, Any BLE Device supporting audio, No Audio output
Below functions i have used:
static func setBuiltInMic() {
let outputs = audioSession.availableInputs
for output in outputs! {
if output.portType.rawValue == AVAudioSession.Port.builtInMic.rawValue {
do {
try audioSession.setPreferredInput(output)
} catch let error {
print("Setting Built in Mic Port: \(error.localizedDescription)")
}
}
}
}
static func setAndCheckBLEAudioPort() -> Bool {
let outputs = audioSession.availableInputs
for output in outputs! {
if output.portType.rawValue == AVAudioSession.Port.bluetoothHFP.rawValue {
do {
try audioSession.setPreferredInput(output)
return true
} catch let error {
print("Setting BLE Port: \(error.localizedDescription)")
return false
}
}
}
return false
}
static func setupAudioSession(isSpeakerEnabled: Bool) {
do {
try audioSession.setCategory(.playAndRecord)
try audioSession.setMode(.voiceChat)
try audioSession.overrideOutputAudioPort(isSpeakerEnabled ? .speaker : .none)
try audioSession.setActive(true, options: [])
} catch let error as NSError {
print("Fail: \(error.localizedDescription)")
}
}
But this doesn't work Audio keeps coming from different source like speaker even if i try to mute it using setupAudioSession
Anyone has an idea or reference for me to look into it?
The code is working fine, it was an issue with a third party library used for audio and video calls prior to Twilio.
Related
I am trying to build an audio app for apple watch. But the problem is whenever I keep my hands down , audio will stop playing.
I have turned background mode on as well.
Can anyone please help me with this? I am stuck at this part.
Here is the Code I have used for playing audio.
func play(url : URL) {
do {
if #available(watchOSApplicationExtension 4.0, *) {
WKExtension.shared().isFrontmostTimeoutExtended = true
} else {
// Fallback on earlier versions
}
self.player = try AVAudioPlayer(contentsOf: url)
player!.prepareToPlay()
player?.delegate = self
player?.play()
print("-----------------")
print("Playing Audio")
print("*****************\nCurrent Time \(String(describing: self.player?.currentTime))")
} catch let error as NSError {
self.player = nil
print(error.localizedDescription)
} catch {
print("*************************")
print("AVAudioPlayer init failed")
}
}
Make sure you are trying to play with Audio Data, not Audio URL and have added policy: .longFormAudio in your category setup. As per Apple documentation, these two settings have to be set for audio to play in background mode.
// Set up the session.
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(
.playback,
mode: .default,
policy: .longFormAudio
)
} catch let error {
fatalError("*** Unable to set up the audio session: \(error.localizedDescription) ***")
}
// Set up the player.
let player: AVAudioPlayer
do {
player = try AVAudioPlayer(data: audioData)
} catch let error {
print("*** Unable to set up the audio player: \(error.localizedDescription) ***")
// Handle the error here.
return
}
// Activate and request the route.
session.activate(options: []) { (success, error) in
guard error == nil else {
print("*** An error occurred: \(error!.localizedDescription) ***")
// Handle the error here.
return
}
// Play the audio file.
player.play()
}
I have tested this code and its working with only Bluetooth connectivity in Watch application not in watch speaker.
Simply turning on background mode is not enough. You also need to activate the AVAudioSession.
It's all well documented by Apple here: Playing Background Audio.
Configure and Activate the Audio Session
Before you can play audio, you need to set up and activate the audio session.
session.setCategory(AVAudioSession.Category.playback,
mode: .default,
policy: .longForm,
options: [])
Next, activate the session, by calling the activate(options:completionHandler:) method.
session.activate(options: []) { (success, error) in
// Check for an error and play audio.
}
Ref: https://developer.apple.com/documentation/watchkit/playing_background_audio
Example:
var player: AVAudioPlayer?
let session: AVAudioSession = .sharedInstance()
func prepareSession() {
do {
try session.setCategory(AVAudioSession.Category.playback,
mode: .default,
policy: .longForm,
options: [])
}
catch {
print(error)
}
}
func play(url: URL) {
do {
player = try AVAudioPlayer(contentsOf: url)
}
catch {
print(error)
return
}
session.activate(options: []) { (success, error) in
guard error == nil else {
print(error!)
return
}
// Play the audio file
self.player?.play()
}
}
Simple Test:
prepareSession()
if let url = Bundle.main.url(forResource: "test", withExtension: "mp3") {
play(url: url)
}
else {
print("test.mp3 not found in project: put any mp3 file in and name it so")
}
I'm implementing video chat using webrtc. In that I want use the main speaker when the other participant joins the session. For that, I wrote this code, but I'm getting a low voice volume (means voice coming from ear speaker)
func audioSetting() {
RTCAudioSession.sharedInstance().isAudioEnabled = true
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSession.Category.playAndRecord, mode: .videoChat, options: [])
if self.speakerOn {
try session.overrideOutputAudioPort(.none)
}
else {
try session.overrideOutputAudioPort(.speaker)
}
try session.setActive(true)
self.speakerOn = !self.speakerOn
}
catch let error {
print("Couldn't set audio to speaker: \(error)")
}
}
I am working on webRTC with socket.IO,
func setSpeakerStates(enabled: Bool)
{
let session = AVAudioSession.sharedInstance()
var _: Error?
try? session.setCategory(AVAudioSession.Category.playAndRecord)
try? session.setMode(AVAudioSession.Mode.voiceChat)
if enabled {
try? session.overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
} else {
try? session.overrideOutputAudioPort(AVAudioSession.PortOverride.none)
}
try? session.setActive(true)
}
Please try this method at the end of the viewdidload after adding audio streaming and video streaming.
I'm trying to enable video calling on my swift app using Linphone.
I was able to enable audio calling, but I can't make it working with video. The app is always crashing if I enable this line:
linphone_call_params_enable_video(linCallParams, 1)
I want to only receive video and audio here.
#objc func startVideoCall() {
linphone_core_enable_video_display(theLinphone.lc, 1)
linphone_core_enable_video_capture(theLinphone.lc, 1)
let linCallParams = linphone_core_create_call_params(theLinphone.lc, nil)
linphone_call_params_enable_video(linCallParams, 1)
linphone_call_params_set_video_direction(linCallParams, LinphoneMediaDirectionSendRecv)
linphone_call_params_set_audio_direction(linCallParams, LinphoneMediaDirectionSendRecv)
let call = linphone_core_invite_with_params(theLinphone.lc, calleeAccount, linCallParams)
linphone_core_set_native_video_window_id(theLinphone.lc, &videoStreamView)
linphone_core_set_native_preview_window_id(theLinphone.lc, &videoStreamPreview)
do {
try audioSession.setActive(true)
} catch {
print("Audio error: \(error.localizedDescription)")
}
linphone_call_params_unref(linCallParams)
}
This code combo fixed my issue
private func bridge<T: AnyObject>(obj : T) -> UnsafeRawPointer {
let pointer = Unmanaged.passUnretained(obj).toOpaque()
return UnsafeRawPointer(pointer)
}
let viewPointer = UnsafeMutableRawPointer(mutating: bridge(obj: view))
linphone_core_set_native_video_window_id(theLinphone.lc, viewPointer)
let previewPointer = UnsafeMutableRawPointer(mutating: bridge(obj: previewStream))
linphone_core_set_native_preview_window_id(theLinphone.lc, previewPointer)
I found this sample code online: https://developer.apple.com/library/content/samplecode/AVCam/Introduction/Intro.html
I am trying to change the input microphone from the default microphone to the bottom microphone on an iPhone. Does anyone have any experience going about this in Swift? The only examples I've found were in Obj-C and caused errors when I implemented them. I'm using apple's AVCam sample app for reference, the audio part is included below.
// Add audio input.
do {
let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
if session.canAddInput(audioDeviceInput) {
session.addInput(audioDeviceInput)
}
else {
print("Could not add audio device input to the session")
}
}
catch {
print("Could not create audio device input: \(error)")
}
You should try settings the category of the session using:
session.setCategory(AVAudioSessionCategoryPlayAndRecord, withOptions: AVAudioSessionCategoryOptions.DefaultToSpeaker, error: nil)
this should make use the bottom microphone by default
If you only need audio you should use AVAudioSession - https://developer.apple.com/reference/avfoundation/avaudiosession
Not tested Sample code you could play around with:
import AVFoundation
.
.
private var session: AVAudioSession!
private var input: AVAudioSessionPortDescription!
.
.
.
session = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryRecord)
// Fetch Built in Mic
if let availableInputs = session.availableInputs {
for inputSource in availableInputs {
if inputSource.portType == AVAudioSessionPortBuiltInMic {
input = inputSource
break
}
}
// Set preferred data source by location
if let dataSources = input.dataSources {
for dataSource in dataSources {
if dataSource.location == AVAudioSessionLocationLower {
input.setPreferredDataSource(dataSource)
break
}
}
}
session.setPreferredInput(input)
.
.
} catch {
....
}
I built a camera app for auto capture. I want to keep the flash on as long as the camera is on. I set the following code :
cameraDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
if (cameraDevice.hasTorch) {
do {
try cameraDevice.lockForConfiguration()
if cameraDevice.isTorchActive {
cameraDevice.torchMode = AVCaptureTorchMode.on
} else {
// sets the torch intensity to 100%
try cameraDevice.setTorchModeOnWithLevel(0.8)
}
cameraDevice.unlockForConfiguration()
} catch {
print(error)
}
}
But when I run the app, it only flashes for one time and then goes off. How can I solve this problem?
Call this method
Inside your camera active/Open func or When device camera active -
func flashActive() {
if let currentDevice = AVCaptureDevice.default(for: AVMediaType.video), currentDevice.hasTorch {
do {
try currentDevice.lockForConfiguration()
let torchOn = !currentDevice.isTorchActive
try currentDevice.setTorchModeOn(level:1.0)//Or whatever you want
currentDevice.torchMode = torchOn ? .on : .off
currentDevice.unlockForConfiguration()
} catch {
print("error")
}
}
}