WebRTC IOS hear self audio track echo - ios

I've configured WebRTC in IOS native app, but faced with a problem:
When I'm talking I hear myself. I tried to turn off all RTCEAGLVideoView and I still hear myself. And only when I comment the next peace of code I stoped hearing myself:
let audioTrack = pcFactory?.audioTrack(withTrackId: "localAudioTrack")
localStream?.addAudioTrack(audioTrack!)
Also, I've tried to use audioTrack?.isEnabled = false, and it work, but the opposite site of conversation does not hear me to when I do that.
So, is there any way to mute audio track locally, but to let it be unmuted for the remote RTCMediaStream? Thanks. My PeerConnection setup is listed bellow:
init(peerId:String, sender:RtcDataSender, isPublisher:Bool, isVideo:Bool = true, isAudio:Bool = true) {
super.init()
self.audio = isAudio
self.video = isVideo
self.rtcDataSender = sender
self.peerId = peerId
let decoderFactory = ARDVideoDecoderFactory.init()
let encoderFactory = ARDVideoEncoderFactory.init()
rtcDictionaryFactory = RtcDictionaryFactory()
// pcFactory = RTCPeerConnectionFactory.init(encoderFactory: encoderFactory, decoderFactory: decoderFactory)
pcFactory = RTCPeerConnectionFactory.init()
let config = RTCConfiguration.init()
config.iceServers = servers
let constraints:[String:String] = ["OfferToReceiveAudio":"\(isAudio)", "OfferToReceiveVideo":"\(isVideo)"]
let constraintsConnection:[String:String] = ["kRTCMediaConstraintsMinWidth" : "640",
"kRTCMediaConstraintsMinHeight" : "480",
"kRTCMediaConstraintsMinFrameRate" : "15",
"kRTCMediaConstraintsMaxWidth" : "1280",
"kRTCMediaConstraintsMaxHeight" : "720",
"kRTCMediaConstraintsMaxFrameRate" : "30"]
let mediaConnectionConstraints = RTCMediaConstraints.init(mandatoryConstraints: constraintsConnection, optionalConstraints: nil)
rtcMediaConstaints = RTCMediaConstraints.init(mandatoryConstraints: constraints, optionalConstraints: nil)
peerConnection = pcFactory?.peerConnection(with: config, constraints: rtcMediaConstaints!, delegate: self)
if (isPublisher) {
localStream = pcFactory?.mediaStream(withStreamId: "localStream")
if isVideo {
let track = self.createLocalVideoTrack()
localStream?.addVideoTrack(track!)
}
if isAudio {
let audioTrack = pcFactory?.audioTrack(withTrackId: "localAudioTrack")
localStream?.addAudioTrack(audioTrack!)
audioTrack?.isEnabled = false
// audioTrack?.isEnabled = false
// audioTrack?.source.volume = 0
}
rtcDataSender?.localStreamAdded(peerId: peerId, stream: localStream!, isVideo: isVideo)
peerConnection?.add(localStream!)
if isVideo {
didCreateCapturer(capturer: capturer!)
}
}
}

Related

GoogleWebRTC is crashing with signaling_thread 0x0x15381f380 (29): EXC_BAD_ACCESS (code=1, address=0x10)

Below is my configuration code.
var peerConnection:RTCPeerConnection! = nil
func initializeSocket() {
let constraints = RTCMediaConstraints(mandatoryConstraints: nil,
optionalConstraints: ["DtlsSrtpKeyAgreement":kRTCMediaConstraintsValueTrue])
let config = RTCConfiguration()
let url1 = "stun:stun.l.google.com:19302"
let url2 = "stun:stun1.l.google.com:19302?transport=udp"
var icsServers: [RTCIceServer] = []
icsServers.append(RTCIceServer(urlStrings: [url1,url2], username:"",credential: ""))
icsServers.append(RTCIceServer(urlStrings: WRManager.shared.turnServerUrl, username:WRManager.shared.turnUserName, credential:WRManager.shared.turnPassword))
config.tcpCandidatePolicy = RTCTcpCandidatePolicy.disabled
config.bundlePolicy = RTCBundlePolicy.maxBundle config.rtcpMuxPolicy = RTCRtcpMuxPolicy.require
config.iceTransportPolicy = .all;
config.iceServers = icsServers
config.sdpSemantics = .unifiedPlan
config.continualGatheringPolicy = .gatherContinually
config.keyType = .ECDSA
self.peerConnection = self.factory.peerConnection(with: config, constraints: constraints, delegate: nil)
self.peerConnection.setConfiguration(config) // <<<<<<< ————— Crashing Here
self.peerConnection.delegate = self
}
When I added that configuration in peer connection that time facing crash.
Using this pod
pod 'GoogleWebRTC'

No Voice Commands in Mapbox Turn By Turn Directions View

so I've made a map on Mapbox using two different styles. All in all everything is working perfectly until the point where there is supposed to be a voice command of next turn etc.
Ive tried using mapboxVoiceController and as such I have no clue what else to try.
var voiceController: RouteVoiceController!
func calculateRoute(from originCoor: CLLocationCoordinate2D,
to destinationCoor: CLLocationCoordinate2D,
completion: #escaping (Route?, Error?) -> Void) {
// Coordinate accuracy is the maximum distance away from the waypoint that the route may still be considered viable, measured in meters. Negative values indicate that a indefinite number of meters away from the route and still be considered viable.
let origin = Waypoint(coordinate: originCoor, coordinateAccuracy: -1, name: "Start")
let destination = Waypoint(coordinate: destinationCoor, coordinateAccuracy: -1, name: "Finish")
// Specify that the route is intended for automobiles avoiding traffic
let options = RouteOptions(waypoints: [origin, destination], profileIdentifier: .automobileAvoidingTraffic)
options.routeShapeResolution = .full
options.includesSteps = true
options.includesVisualInstructions = true
options.includesSpokenInstructions = true
options.includesExitRoundaboutManeuver = true
_ = Directions.shared.calculate(options) { [unowned self] (waypoints, routes, error) in
guard let route = routes?.first else { return }
let defaults = UserDefaults.standard
let mapPref = defaults.object(forKey: "mapPref") as! String
let navigationService = MapboxNavigationService(route: route)
self.voiceController = RouteVoiceController(navigationService: navigationService)
//loads map based on user Pref
if mapPref == "0" {
let newDayStyle = NavigationOptions(styles: [mapDayStyle()], navigationService: navigationService, voiceController: self.voiceController)
let viewController = NavigationViewController(for: route, options: newDayStyle)
self.present(viewController, animated: true, completion: nil)
} else {
let newNightStyle = NavigationOptions(styles: [mapNightStyle()], navigationService: navigationService, voiceController: self.voiceController)
let viewController = NavigationViewController(for: route, options: newNightStyle)
self.present(viewController, animated: true, completion: nil)
}
}
}
and my two different map class's :
// DayMap Mode
class mapDayStyle : DayStyle {
required init() {
super.init()
mapStyleURL = URL(string: "mapbox://styles/tristanmellett/cjxfrpz7x04881cmj8er9avsf")!
styleType = .day
}
override func apply() {
super.apply()
BottomBannerView.appearance().backgroundColor = .white
}
}
//Night Map Mode
class mapNightStyle : NightStyle {
required init() {
super.init()
mapStyleURL = URL(string: "mapbox://styles/tristanmellett/cjxtzfdrb8ttb1ckc9dpgf18a")!
styleType = .night
}
override func apply() {
super.apply()
BottomBannerView.appearance().backgroundColor = .darkGray
TopBannerView.appearance().backgroundColor = .darkGray
}
}
I would like the voice Command to start working.

Allowing background audio with Swift not working

I want to allow background audio while the app is not in focus. I currently have this code, which should allow that:
do {
try AKSettings.setSession(category: .playback, with: .mixWithOthers)
} catch {
print("error")
}
AKSettings.playbackWhileMuted = true
I also have the setting 'Audio, Airplay and Picture in Picture' enabled in capabilities settings. However, when I press the home button on my device the audio doesn't keep playing. What am I doing wrong? I am using AudioKit to produce sounds if that matters.
I am using a singleton to house all of the AudioKit components which I named AudioPlayer.swift. Here is what I have in my AudioPlayer.swift singleton file:
class AudioPlayer: NSObject {
var currentFrequency = String()
var soundIsPlaying = false
var leftOscillator = AKOscillator()
var rightOscillator = AKOscillator()
var rain = try! AKAudioFile()
var rainPlayer: AKAudioPlayer!
var envelope = AKAmplitudeEnvelope()
override init() {
super.init()
do {
try AKSettings.setSession(category: .playback, with: .mixWithOthers)
} catch {
print("error")
}
AKSettings.playbackWhileMuted = true
AudioKit.output = envelope
AudioKit.start()
}
func setupFrequency(left: AKOscillator, right: AKOscillator, frequency: String) {
currentFrequency = frequency
leftOscillator = left
rightOscillator = right
let leftPanner = AKPanner(leftOscillator)
leftPanner.pan = -1
let rightPanner = AKPanner(rightOscillator)
rightPanner.pan = 1
//Set up rain and rainPlayer
do {
rain = try AKAudioFile(readFileName: "rain.wav")
rainPlayer = try AKAudioPlayer(file: rain, looping: true, deferBuffering: false, completionHandler: nil)
} catch { print(error) }
let mixer = AKMixer(leftPanner, rightPanner, rainPlayer)
//Put mixer in sound envelope
envelope = AKAmplitudeEnvelope(mixer)
envelope.attackDuration = 2.0
envelope.decayDuration = 0
envelope.sustainLevel = 1
envelope.releaseDuration = 0.2
//Start AudioKit stuff
AudioKit.output = envelope
AudioKit.start()
leftOscillator.start()
rightOscillator.start()
rainPlayer.start()
envelope.start()
soundIsPlaying = true
}
}
And here is an example of one of my sound effect view controllers, which reference the AudioKit singleton to send it a certain frequency (I have about a dozen of these view controllers, each with its own frequency settings):
class CalmView: UIViewController {
let leftOscillator = AKOscillator()
let rightOscillator = AKOscillator()
override func viewDidLoad() {
super.viewDidLoad()
leftOscillator.amplitude = 0.3
leftOscillator.frequency = 220
rightOscillator.amplitude = 0.3
rightOscillator.frequency = 230
}
#IBAction func playSound(_ sender: Any) {
if shared.soundIsPlaying == false {
AudioKit.stop()
shared.setupFrequency(left: leftOscillator, right: rightOscillator, frequency: "Calm")
} else if shared.soundIsPlaying == true && shared.currentFrequency != "Calm" {
AudioKit.stop()
shared.leftOscillator.stop()
shared.rightOscillator.stop()
shared.rainPlayer.stop()
shared.envelope.stop()
shared.setupFrequency(left: leftOscillator, right: rightOscillator, frequency: "Calm")
} else {
shared.soundIsPlaying = false
shared.envelope.stop()
}
}
}
I instantiated the AudioPlayer singleton in my ViewController.swift file.
It depends on when you are doing your configuration in relation to when AudioKit is started. If you're using AudioKit you should be using its AKSettings to manage your session category. Basically not only the playback category but also mixWithOthers. By default, does this:
/// Set the audio session type
#objc open static func setSession(category: SessionCategory,
with options: AVAudioSessionCategoryOptions = [.mixWithOthers]) throws {
So you'd do something like this in your ViewController:
do {
if #available(iOS 10.0, *) {
try AKSettings.setSession(category: .playAndRecord, with: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP])
} else {
// Fallback on earlier versions
}
} catch {
print("Errored setting category.")
}
So I think its a matter of getting that straight. It might also help to have inter-app audio set up. If you still have trouble and provide more information, I can help more, but this is as good an answer as I can muster based on the info you've given so far.

How to disable audio in webrtc mobile app(ios) without changing in framework

I am working with webrtc mobile(ios). I can't disable audio in webrtc(ios). I have got no flag to disable audio.By changing in framwork/library it can done easily. My purpose is that I have to disable audio without changing in framework/library. Can anyone help me?.
Update your question with code snippet, how you are creating mediaStrem or tracks(audio/video).
Generally with default Native WebRTC Framework,
RTCMediaStream localStream = [_factory mediaStreamWithStreamId:kARDMediaStreamId];
if(audioRequired) {
RTCAudioTrack *aTrack = [_lmStream createLocalAudioTrack];
[localStream addAudioTrack:aTrack];
}
RTCVideoTrack *vTrack = [_lmStream createLocalVideoTrack];
[localStream addVideoTrack:vTrack];
[_peerConnection addStream:localStream];
If you want to mute the Audio during the call, use below function.
- (void)enableAudio:(NSString *)id isAudioEnabled:(BOOL) isAudioEnabled {
NSLog(#"Auido enabled: %d streams count:%d ", id, isAudioEnabled, _peerConnection.localStreams.count);
if(_peerConnection.localStreams.count > 0) {
RTCMediaStream *lStream = _peerConnection.localStreams[0];
if(lStream.audioTracks.count > 0) { // Usually we will have only one track. If you have more than one, need to traverse all.
// isAudioEnabled == 1 -> Unmute
// isAudioEnabled == 0 -> Mute
[lStream.audioTracks[0] setIsEnabled:isAudioEnabled];
}
}
}
in my case I didnt use streams and directly add audio track to peerconnection.
private func createMediaSenders() {
let streamId = "stream"
// Audio
let audioTrack = self.createAudioTrack()
self.pc.add(audioTrack, streamIds: [streamId])
// Video
/* let videoTrack = self.createVideoTrack()
self.localVideoTrack = videoTrack
self.peerConnection.add(videoTrack, streamIds: [streamId])
self.remoteVideoTrack = self.peerConnection.transceivers.first { $0.mediaType == .video }?.receiver.track as? RTCVideoTrack
// Data
if let dataChannel = createDataChannel() {
dataChannel.delegate = self
self.localDataChannel = dataChannel
}*/
}
private func createAudioTrack() -> RTCAudioTrack {
let audioConstrains = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: nil)
let audioSource = sessionFactory.audioSource(with: audioConstrains)
let audioTrack = sessionFactory.audioTrack(with: audioSource, trackId: "audio0")
return audioTrack
}
to mute and unmute microphone I use this function
public func muteMicrophone(_ mute:Bool){
for sender in pc.senders{
if (sender.track?.kind == "audio") {
sender.track?.isEnabled = mute
}
}
}

App always goes back to main after enter background

The App has 2 viewControllers under navigationController, UINavigationController--(root view)-->UIViewController--(present modally)-->UIViewController2.
After entering UIViewController2, press home and let the App enter background, then reactive the App, it's screen goes back to UIViewController. This happens on real device, but not on simulator.
I had suspected that is the lack of memory, but there is few objects in UIViewController2. Any other reason makes the App to do so?
Update:
I found the problem when the iPhone was not connected with XCode!!!
I have just tested the App connecting with Xcode, when I press the home button to enter background, the App crashed!!! It means when I "reactive" the app, it actually restart again, and it do go to UIViewController.
I got the following errors
let localfilePath = NSBundle.mainBundle().URLForResource("home", withExtension: "html");
var webViewRequest:NSURLRequest!
override func viewDidLoad() {
super.viewDidLoad()
webViewRequest = NSURLRequest(URL: localfilePath!);
initSubviews()
}
func initSubviews() {
let fm: CGRect = UIScreen.mainScreen().bounds
self.mainScrollView = UIScrollView(frame:CGRectMake(0, 0, fm.size.width, fm.size.height))
self.mainScrollView!.contentSize = CGSizeMake(self.mainScrollView!.frame.size.width * CGFloat(numPages), self.mainScrollView!.frame.size.height)
self.mainScrollView!.backgroundColor = UIColor.whiteColor()
self.mainScrollView!.pagingEnabled = true
self.mainScrollView!.bounces = false
self.mainScrollView!.showsHorizontalScrollIndicator = false;
self.mainScrollView!.scrollRectToVisible(CGRectMake(mainScrollView!.frame.size.width, 0, mainScrollView!.frame.size.width, mainScrollView!.frame.size.height), animated: false)
self.mainScrollView!.delegate = self
self.view.addSubview(mainScrollView!);
prevWebView = runFunc.setWebView(0)
currWebView = runFunc.setWebView(runFunc.screenWidth)
nextWebView = runFunc.setWebView(runFunc.screenWidth*2)
currWebView.delegate = self
prevWebView.delegate = self
nextWebView.delegate = self
prevWebView.tag = 0
currWebView.tag = 1
nextWebView.tag = 2
prevWebView.backgroundColor = UIColor.whiteColor()
currWebView.backgroundColor = UIColor.whiteColor()
nextWebView.backgroundColor = UIColor.whiteColor()
mainScrollView.addSubview(prevWebView)
mainScrollView.addSubview(currWebView)
mainScrollView.addSubview(nextWebView)
currWebView.scrollView.delegate = self
prevWebView.scrollView.delegate = self
nextWebView.scrollView.delegate = self
prevWebView.scrollView.decelerationRate = UIScrollViewDecelerationRateNormal
currWebView.scrollView.decelerationRate = UIScrollViewDecelerationRateNormal
nextWebView.scrollView.decelerationRate = UIScrollViewDecelerationRateNormal
currWebView.loadRequest(webViewRequest);
NSTimer.scheduledTimerWithTimeInterval(1.0, target: self, selector: "loadPrevNext", userInfo: nil, repeats: false)
}
func loadPrevNext() {
let prev = getPrev(currVolume, theChapter: currChapter)
prevVolume = prev["volume"]!
prevChapter = prev["chapter"]!
prevChapterArr = runFunc.getLection(prev["volume"]!, theChapter: prev["chapter"]!)
let next = getNext(currVolume, theChapter: currChapter)
nextVolume = next["volume"]!
nextChapter = next["chapter"]!
nextChapterArr = runFunc.getLection(next["volume"]!, theChapter: next["chapter"]!)
prevWebView.loadRequest(webViewRequest);
nextWebView.loadRequest(webViewRequest);
}

Resources