WebRTC - how to switch between getUserMedia and getDisplayMedia tracks inside RTCPeerConnection - stream

I'm trying to develop an app where users can can video call to each other and share their screens using WebRTC technology. I have succeed with either video call or screen sharing app and now I'm trying to make it to be able to switch between getUserMedia and getDisplayMedia on button click during a call inside the same RTCPeerConnection but it doesn't work.
This is how I thought it could work:
function onLogin(success) {
var configuration = { offerToReceiveAudio: true, offerToReceiveVideo: true, "iceServers" : [ { "url" : "stun:stun.1.google.com:19302" } ] };
myConnection = window.RTCPeerConnection ? new RTCPeerConnection(configuration, { optional: [] }) : new RTCPeerConnection(configuration, { optional: [] });
myConnection.onicecandidate = function (event) {
console.log("onicecandidate");
if (event.candidate) send({ type: "candidate", candidate: event.candidate });
};
myConnection.ontrack=function(e){
try{remoteVideo.src = window.webkitURL?window.webkitURL.createObjectURL(e.streams[0]):window.URL.createObjectURL(e.streams[0])}
catch(err){remoteVideo.srcObject=e.streams[0]}
}
myConnection.ondatachannel=openDataChannel
openDataChannel();
startAVStream();
//startSStream()
};
function startAVStream(enable){
if(sStream)sStream.getTracks().forEach( function (track) {
try{myConnection.removeTrack( track, sStream );}
catch(e){}
} );
navigator.mediaDevices.getUserMedia({ video: true, audio: true }).then(s => {
if(!avStream){
avStream = s;
avStream.getTracks().forEach( function (track) {
myConnection.addTrack( track, avStream );
} );
}
}, function (error) { console.log(error); });
}
function startSStream(enable){
if(avStream)avStream.getTracks().forEach( function (track) {
try{myConnection.removeTrack( track, avStream );}
catch(e){}
} );
navigator.mediaDevices.getDisplayMedia({ video: true }).then(s => {
if(!sStream){
sStream = s;
sStream.getTracks().forEach( function (track) {
myConnection.addTrack( track, sStream );
} );
}
}, function (error) { console.log(error); });
}
Can anyone tell me how I can switch between tracks inside the same RTCPeerConnection or should I create 2 separate RTCPeerConnection - one for video/audio streaming and another for screen sharing?
Any help appreciated! Thanks!

You could use RTCRtpSender.replaceTrack to splice the screen capture track. This doesn't require renegotiation, and therefore has very low latency.
let newstream = navigator.mediaDevices.getDisplayMedia({});
let newtrack = newstream.getTracks()[1];
if(newtrack.kind !== 'video')
throw new Error('Eek!?');
pc.getSenders().forEach(async s => {
if(s.track && s.track.kind === 'video')
await s.replaceTrack(newtrack);
});
The test for s.track not being null deals with the case where you previously called replaceTrack(..., null).

shareScreen = () =>{
const success = (stream) => {
window.localStream = stream
// this.localVideoref.current.srcObject = stream
// localStream.replaceStream(stream);
this.setState({
localStream: stream
})
Object.values(this.state.peerConnections).forEach(pc => {
pc.getSenders().forEach(async s => {
console.log("s.track ",s.track);
if(s.track && s.track.kind === 'video'){
stream.getTracks().forEach(track => {
// pc.addTrack(track, this.state.localStream)
s.replaceTrack(track);
});
}
});
});
}
const failure = (e) => {
console.log('getUserMedia Error: ', e)
}
navigator.mediaDevices.getDisplayMedia({ cursor: true }).then(success).catch(failure)}

Related

How to pass geolocation permission to react-native-webview?

When using the geolcoation api in a react-native-webview, I am asked twice whether the app is allowed to use my current location. How is it possible to forward the already (not) granted permission to the webview?
I am currently using react-native 0.68 and react-native-webview 11.22.
First prompt:
Second prompt:
I should only be asked once for permission to use my current geolocation.
In case somebody faces the same problem, I solved this issue with the following workaround. I injected a custome javacscript into the webview to override the used geolocation api in the webview. My custome script does all the communication with the app. The app returns the geolocation and so the webview doesn't need to aks for permission.
Custome Script
export const getGeoLocationJS = () => {
const getCurrentPosition = `
navigator.geolocation.getCurrentPosition = (success, error, options) => {
window.ReactNativeWebView.postMessage(JSON.stringify({ event: 'getCurrentPosition', options: options }));
window.addEventListener('message', (e) => {
let eventData = {}
try {
eventData = JSON.parse(e.data);
} catch (e) {}
if (eventData.event === 'currentPosition') {
success(eventData.data);
} else if (eventData.event === 'currentPositionError') {
error(eventData.data);
}
});
};
true;
`;
const watchPosition = `
navigator.geolocation.watchPosition = (success, error, options) => {
window.ReactNativeWebView.postMessage(JSON.stringify({ event: 'watchPosition', options: options }));
window.addEventListener('message', (e) => {
let eventData = {}
try {
eventData = JSON.parse(e.data);
} catch (e) {}
if (eventData.event === 'watchPosition') {
success(eventData.data);
} else if (eventData.event === 'watchPositionError') {
error(eventData.data);
}
});
};
true;
`;
const clearWatch = `
navigator.geolocation.clearWatch = (watchID) => {
window.ReactNativeWebView.postMessage(JSON.stringify({ event: 'clearWatch', watchID: watchID }));
};
true;
`;
return `
(function() {
${getCurrentPosition}
${watchPosition}
${clearWatch}
})();
`;
};
Webview
import Geolocation from '#react-native-community/geolocation';
let webview = null;
<WebView
geolocationEnabled={ true }
injectedJavaScript={ getGeoLocationJS() }
javaScriptEnabled={ true }
onMessage={ event => {
let data = {}
try {
data = JSON.parse(event.nativeEvent.data);
} catch (e) {
console.log(e);
}
if (data?.event && data.event === 'getCurrentPosition') {
Geolocation.getCurrentPosition((position) => {
webview.postMessage(JSON.stringify({ event: 'currentPosition', data: position }));
}, (error) => {
webview.postMessage(JSON.stringify({ event: 'currentPositionError', data: error }));
}, data.options);
} else if (data?.event && data.event === 'watchPosition') {
Geolocation.watchPosition((position) => {
webview.postMessage(JSON.stringify({ event: 'watchPosition', data: position }));
}, (error) => {
webview.postMessage(JSON.stringify({ event: 'watchPositionError', data: error }));
}, data.options);
} else if (data?.event && data.event === 'clearWatch') {
Geolocation.clearWatch(data.watchID);
}
}}
ref={ ref => {
webview = ref;
if (onRef) {
onRef(webview)
}
}}
source={ url }
startInLoadingState={ true }
/>

'Location enable permission' alert disappear after few second(3 to 4 second) in react-native

I am using 'react-native-geolocation-service' library for enabling the location for the app, if location is disabled. So if location is disabled then permission alert is working fine in android but In IOS it is appear for few second like 2 or 3 second, after that it will close. Below is the sample of method.
static hasLocationPermissionIOS = async () => {
const status = await Geolocation.requestAuthorization('always');
if (status === 'granted') {
return 'GRANTED';
}
if (status === 'denied') {
return 'DENIED';
}
if (status === 'disabled') {
return 'DISABLED';
}
};
static hasLocationPermission = async () => {
if (Platform.OS === 'ios') {
Geolocation.requestAuthorization('whenInUse');
const hasPermission = await this.hasLocationPermissionIOS();
return hasPermission;
}
if (Platform.OS === 'android') {
const hasPermission = await this.hasLocationPermissionAndroid();
return hasPermission;
}
return false;
};
static hasLocationPermission = async () => {
if (Platform.OS === 'ios') {
Geolocation.requestAuthorization('whenInUse');
const hasPermission = await this.hasLocationPermissionIOS();
return hasPermission;
}
if (Platform.OS === 'android') {
const hasPermission = await this.hasLocationPermissionAndroid();
return hasPermission;
}
return false;
};
static getLocation = async () => {
const hasLocationPermission = await this.hasLocationPermission();
if (!hasLocationPermission) {
return;
}
return new Promise((resolve, reject = (error) => {}) => {
Geolocation.getCurrentPosition((position)=> {
resolve(position);
}, (error)=>{
resolve(error);
}, {
accuracy: {
android: 'high',
ios: 'best',
},
enableHighAccuracy: true,
timeout: 15000,
maximumAge: 10000,
distanceFilter: 0,
forceRequestLocation: true,
showLocationDialog: true,
});
});
};
I referred the link but not able to find solution,
https://github.com/douglasjunior/react-native-get-location/issues/18
Thanks in advance!!!

react-native-webrtc Mic not closing after video call on iOS

Our iOS app has audio video calling implemented using the following technologies:
"react-native": "0.63.4"
"react": "16.13.1"
"react-native-webrtc": "^1.87.3"
"react-native-incall-manager": "^3.3.0"
iOS version 14.4.1
Our calling module works like the following:
First request and initiate audio call
Then request and initiate video call
On the code side things work like this:
We call the getStream() function which gets the user media for audio call i.e Audio only
Then we call the startStream() function which connects the peer connection
On requesting video we call the getVideoStream() method to get Audio and Video streams
Call startStream() again to start peer connection with video
The scenario is as follows:
We start off by connecting an audio call. On success the audio call is connected and works fine as expected
We request for video and connect video call, all works fine as expected and I receive video on both ends
When I disconnect the call and stop tracks using this.state.localStream.getTracks(), the mic does not close. An orange indicator for mic is visible on iOS.
Important Notes:
Disconnecting from the audio call closes the mic just fine
Even if we get video stream on audio call and disconnect without connecting video it still works fine and closes both tracks
Its only when I connect the video is when the issue arises
Calling InCallManager.stop() closes the mic but does not open it on second call. The mic does not open on second call and the orange mic indicator on iOS is not shown.
Get User Media Audio Call
getStream() {
InCallManager.setSpeakerphoneOn(false);
InCallManager.setMicrophoneMute(false);
mediaDevices.enumerateDevices().then((sourceInfos) => {
let videoSourceId;
for (let i = 0; i < sourceInfos.length; i++) {
const sourceInfo = sourceInfos[i];
if (
sourceInfo.kind === 'videoinput' &&
sourceInfo.facing === (true ? 'front' : 'back')
) {
videoSourceId = sourceInfo.deviceId;
}
}
mediaDevices
.getUserMedia({
audio: true,
})
.then((stream) => {
this.setState({
localStream: stream,
});
})
.catch((error) => {
// Log error
console.log('stream get error', error);
});
});
}
Get User Media for Video Call
getVideoStream() {
this.state.peerConn.removeStream(this.state.localStream);
InCallManager.setSpeakerphoneOn(false);
InCallManager.setMicrophoneMute(false);
mediaDevices.enumerateDevices().then((sourceInfos) => {
let videoSourceId;
for (let i = 0; i < sourceInfos.length; i++) {
const sourceInfo = sourceInfos[i];
if (
sourceInfo.kind === 'videoinput' &&
sourceInfo.facing === (true ? 'front' : 'back')
) {
videoSourceId = sourceInfo.deviceId;
}
}
mediaDevices
.getUserMedia({
audio: true,
mirror: true,
video: {
mandatory: {
minWidth: 500,
minHeight: 300,
minFrameRate: 30,
},
facingMode: true ? 'user' : 'environment',
optional: videoSourceId ? [{sourceId: videoSourceId}] : [],
},
})
.then((stream) => {
this.setState(
{
localStream: stream,
},
() => {
this.startStream();
},
);
})
.catch((error) => {
// Log error
console.log('stream get error', error);
});
});
}
Start Stream Function
startStream() {
console.log('start Stream');
this.newPeerConnection();
setTimeout(() => {
this.state.peerConn
.createOffer()
.then((sessionDescription) =>
this.setLocalAndSendMessage(sessionDescription),
)
.catch((error) => this.defaultErrorCallback(error));
}, 3000);
}
newPeerConnection()
newPeerConnection() {
var peerConn = new RTCPeerConnection({
iceServers: turnServer,
});
peerConn.onicecandidate = (evt) => {
console.log(`OnIceCan`);
if (evt.candidate) {
this.state.connection.invoke(
'addIceCandidate',
parseInt(this.state.ticket.pkTicketId),
JSON.stringify({
type: 'candidate',
sdpMLineIndex: evt.candidate.sdpMLineIndex,
sdpMid: evt.candidate.sdpMid,
candidate: evt.candidate.candidate,
}),
);
}
};
peerConn.addStream(this.state.localStream);
peerConn.addEventListener(
'addstream',
(stream) => {
InCallManager.setForceSpeakerphoneOn(false);
this.setState({
isSpeakerEnabled: false,
});
this.setState({
remoteStream: stream,
showAudioCallTimer: true,
});
},
false,
);
this.setState(
{
peerConn,
});
}
Close Tracks
if (this.state.localStream) {
const tracks = this.state.localStream.getTracks();
tracks.map((track, index) => {
track.stop();
});
}
if (this.state.peerConn) {
this.state.peerConn.removeStream(this.state.localStream);
this.state.peerConn.close();
if (!this.state.callRatingSubmitted && this.state.remoteStream) {
this._handleCallFeedbackModal(true);
}
}

Video as a background image not working in Gatsby PWA on iOS

I created a opt-in app for potential interims for our company, i worked with Gatsby and for now am quite satisfied with the result. I made it an Progressive Web App as that is fairly easy with the gatsby plugin.
The PWA works great on Android and shows the background video as expected, but on iOS the video doesn't show.
I updated all the packages and dependencies to the last versions but that doesn't change a thing. I tried googling the issue but got a lot of search results off people trying to let a PWA play video in the background when the app is closed (not my case).
{
resolve: `gatsby-plugin-manifest`,
options: {
name: `Afstuderen bij Arcady`,
short_name: `Afstuderen`,
start_url: `/`,
background_color: `#FFF`,
theme_color: `#00a667`,
display: `standalone`,
icon: `src/images/bear_green.png`,
},
},
'gatsby-plugin-offline',
And the content of the service worker
importScripts("workbox-v3.6.3/workbox-sw.js");
workbox.setConfig({modulePathPrefix: "workbox-v3.6.3"});
workbox.core.setCacheNameDetails({prefix: "gatsby-plugin-offline"});
workbox.skipWaiting();
workbox.clientsClaim();
/**
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
* requests for URLs in the manifest.
*/
self.__precacheManifest = [
{
"url": "webpack-runtime-aec2408fe3a97f1352af.js"
},
{
"url": "app-5b624d17337895ddf874.js"
},
{
"url": "component---node-modules-gatsby-plugin-offline-app-shell-js-b97c345e19bb442c644f.js"
},
{
"url": "offline-plugin-app-shell-fallback/index.html",
"revision": "ac0d57f6ce61fac4bfa64e7e08d076c2"
},
{
"url": "0-d2c3040ae352cda7b69f.js"
},
{
"url": "component---src-pages-404-js-cf647f7c3110eab2f912.js"
},
{
"url": "static/d/285/path---404-html-516-62a-0SUcWyAf8ecbYDsMhQkEfPzV8.json"
},
{
"url": "static/d/604/path---offline-plugin-app-shell-fallback-a-30-c5a-BawJvyh36KKFwbrWPg4a4aYuc8.json"
},
{
"url": "manifest.webmanifest",
"revision": "5a580d53785b72eace989a49ea1e24f7"
}
].concat(self.__precacheManifest || []);
workbox.precaching.suppressWarnings();
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
workbox.routing.registerRoute(/(\.js$|\.css$|static\/)/, workbox.strategies.cacheFirst(), 'GET');
workbox.routing.registerRoute(/^https?:.*\.(png|jpg|jpeg|webp|svg|gif|tiff|js|woff|woff2|json|css)$/, workbox.strategies.staleWhileRevalidate(), 'GET');
workbox.routing.registerRoute(/^https?:\/\/fonts\.googleapis\.com\/css/, workbox.strategies.staleWhileRevalidate(), 'GET');
/* global importScripts, workbox, idbKeyval */
importScripts(`idb-keyval-iife.min.js`)
const WHITELIST_KEY = `custom-navigation-whitelist`
const navigationRoute = new workbox.routing.NavigationRoute(({ event }) => {
const { pathname } = new URL(event.request.url)
return idbKeyval.get(WHITELIST_KEY).then((customWhitelist = []) => {
// Respond with the offline shell if we match the custom whitelist
if (customWhitelist.includes(pathname)) {
const offlineShell = `/offline-plugin-app-shell-fallback/index.html`
const cacheName = workbox.core.cacheNames.precache
return caches.match(offlineShell, { cacheName }).then(cachedResponse => {
if (cachedResponse) return cachedResponse
console.error(
`The offline shell (${offlineShell}) was not found ` +
`while attempting to serve a response for ${pathname}`
)
return fetch(offlineShell).then(response => {
if (response.ok) {
return caches.open(cacheName).then(cache =>
// Clone is needed because put() consumes the response body.
cache.put(offlineShell, response.clone()).then(() => response)
)
} else {
return fetch(event.request)
}
})
})
}
return fetch(event.request)
})
})
workbox.routing.registerRoute(navigationRoute)
let updatingWhitelist = null
function rawWhitelistPathnames(pathnames) {
if (updatingWhitelist !== null) {
// Prevent the whitelist from being updated twice at the same time
return updatingWhitelist.then(() => rawWhitelistPathnames(pathnames))
}
updatingWhitelist = idbKeyval
.get(WHITELIST_KEY)
.then((customWhitelist = []) => {
pathnames.forEach(pathname => {
if (!customWhitelist.includes(pathname)) customWhitelist.push(pathname)
})
return idbKeyval.set(WHITELIST_KEY, customWhitelist)
})
.then(() => {
updatingWhitelist = null
})
return updatingWhitelist
}
function rawResetWhitelist() {
if (updatingWhitelist !== null) {
return updatingWhitelist.then(() => rawResetWhitelist())
}
updatingWhitelist = idbKeyval.set(WHITELIST_KEY, []).then(() => {
updatingWhitelist = null
})
return updatingWhitelist
}
const messageApi = {
whitelistPathnames(event) {
let { pathnames } = event.data
pathnames = pathnames.map(({ pathname, includesPrefix }) => {
if (!includesPrefix) {
return `${pathname}`
} else {
return pathname
}
})
event.waitUntil(rawWhitelistPathnames(pathnames))
},
resetWhitelist(event) {
event.waitUntil(rawResetWhitelist())
},
}
self.addEventListener(`message`, event => {
const { gatsbyApi } = event.data
if (gatsbyApi) messageApi[gatsbyApi](event)
})
I expect the iOS PWA (safari) to show the video as it does on Android but instead it gives a grey screen.
I hope some one can help me out or point me in the right direction.
How big is your video ?
Last time I checked, iOS has a limit of 50MB for the cache of a PWA, so if your video is bigger than 50MB, that may be the reason it works only on Android (which doesn't have such restrictions).
I found this blog post that helped me fix this problem
To add Range request handling to gatsby-plugin-offline, I added a script called range-request-handler.js with the following:
// range-request-handler.js
// Define workbox globally
importScripts('https://storage.googleapis.com/workbox-cdn/releases/5.0.0/workbox-sw.js');
// Bring in workbox libs
const { registerRoute } = require('workbox-routing');
const { CacheFirst } = require('workbox-strategies');
const { RangeRequestsPlugin } = require('workbox-range-requests'); // The fix
// Add Range Request support to fetching videos from cache
registerRoute(
/(\.webm$|\.mp4$)/,
new CacheFirst({
plugins: [
new RangeRequestsPlugin(),
],
})
);
Then in my gatsby-config.js I configured the plugin to append the above script:
// gatsby-config.js
module.exports = {
// ...
plugins: [
// ...plugins
{
resolve: 'gatsby-plugin-offline',
options: {
appendScript: require.resolve('./range-request-handler.js'),
},
},
// ...plugins
],
// ...
};
Videos now work in the Safari browser for me. If there is a better way to implement this, I am all ears. For now it works as intended

How to call downloadItem.pause() and downloadItem.pause() from outside of session.on(‘will-download’)?

I am trying to implement pause and resume functionality with electron libraries. how could i call item.pause out of session.on(‘will-download’) because it is function of item inside session.on(‘will-download’) or from any other way?
var url, path, filename;
ipcMain.on(‘item:file’, function(e, file) {
if(file.msg === ‘d’){
console.log(file);
url = file.url;
path = pathLib.join(__dirname, file.path);
filename = file.filename;
contents.downloadURL(url);
}
if(file.msg === ‘p’){
item.pause();
}
if(file.msg === ‘r’){
item.resume();
}
contents.session.on('will-download', (event, item, contents) => {
// Set the save path, making Electron not to prompt a save dialog.
path = `${path}${filename}`;
item.setSavePath(path);
item.on('updated', (event, state) => {
if (state === 'interrupted') {
console.log('Download is interrupted but can be resumed');
} else if (state === 'progressing') {
if (item.isPaused()) {
console.log('Download is paused');
} else {
console.log(`Received bytes: ${item.getReceivedBytes()} out of ${item.getTotalBytes()}`);
var percentage = (item.getReceivedBytes()/item.getTotalBytes())*100;
var progress = {'rBytes':item.getReceivedBytes(), 'tBytes':item.getTotalBytes(), 'p':percentage};
contents.send('item:progress', progress);
}
}
});
item.once('done', (event, state) => {
if (state === 'completed') {
console.log('Download successfully');
} else {
console.log(`Download failed: ${state}`);
}
});
});
});
https://github.com/sindresorhus/electron-dl/blob/master/index.js
Following, given above, link Worked for me!

Resources