Muting local tracks on Twilio Video? - twilio

Following guides, I'm attempting to disable and enable localAudioTracks.
Within the roomJoined function, I have these two functions:
document.getElementById("audio-toggle-off").onclick = function() {
console.log("muting this users audio");
room.localParticipant.audioTracks.forEach(function(trackId, track) {
track.disable();
});
};
document.getElementById("audio-toggle-on").onclick = function() {
console.log("enabling this users audio");
room.localParticipant.audioTracks.forEach(function(trackId, track) {
track.enable();
});
};
But on clicking these, I get the error that:
VideoChat.js:241 Uncaught TypeError: track.enable is not a function
at VideoChat.js:241
at Map.forEach (<anonymous>)
at HTMLButtonElement.document.getElementById.onclick (VideoChat.js:240)
Any ideas? For reference, I'm including the full roomJoined function below:
function roomJoined(room) {
window.room = activeRoom = room;
log("Joined as '" + identity + "'");
if (hide) {
return;
}
// document.getElementById("button-join").style.display = "none";
// document.getElementById("button-leave").style.display = "inline";
// Attach LocalParticipant's Tracks, if not already attached.
var previewContainer = document.getElementById("local-media");
if (previewContainer && !previewContainer.querySelector("video")) {
attachParticipantTracks(room.localParticipant, previewContainer);
}
// Attach the Tracks of the Room's Participants.
room.participants.forEach(function(participant) {
log("Already in Room: '" + participant.identity + "'");
var previewContainer = document.getElementById("remote-media");
attachParticipantTracks(participant, previewContainer);
});
// When a Participant joins the Room, log the event.
room.on("participantConnected", function(participant) {
log("Joining: '" + participant.identity + "'");
});
// When a Participant adds a Track, attach it to the DOM.
room.on("trackAdded", function(track, participant) {
log(participant.identity + " added track: " + track.kind);
var previewContainer = document.getElementById("remote-media");
attachTracks([track], previewContainer);
});
// When a Participant removes a Track, detach it from the DOM.
room.on("trackRemoved", function(track, participant) {
log(participant.identity + " removed track: " + track.kind);
detachTracks([track]);
});
// When a Participant leaves the Room, detach its Tracks.
room.on("participantDisconnected", function(participant) {
log("Participant '" + participant.identity + "' left the room");
detachParticipantTracks(participant);
});
// Once the LocalParticipant leaves the room, detach the Tracks
// of all Participants, including that of the LocalParticipant.
room.on("disconnected", function() {
log("Left");
if (previewTracks) {
previewTracks.forEach(function(track) {
track.stop();
});
}
detachParticipantTracks(room.localParticipant);
room.participants.forEach(detachParticipantTracks);
activeRoom = null;
document.getElementById("button-join").style.display = "inline";
document.getElementById("button-leave").style.display = "none";
});
document.getElementById("audio-toggle-off").onclick = function() {
console.log("muting this users audio");
room.localParticipant.audioTracks.forEach(function(trackId, track) {
track.disable();
});
};
document.getElementById("audio-toggle-on").onclick = function() {
console.log("enabling this users audio");
room.localParticipant.audioTracks.forEach(function(trackId, track) {
track.enable();
});
};
}

So it looks like this was able to work:
document.getElementById("audio-toggle-off").onclick = function() {
room.localParticipant.audioTracks.forEach(function(
audioTrack,
key,
map
) {
console.log("muting this users audio");
audioTrack.disable();
});

Related

IOS error: Media stream has no audio tracks createMediaStreamSource

I tried to mix remote audio tracks in one track and replace my local stream but I got the following error
InvalidStateError: Media stream has no audio tracks createMediaStreamSource
Note: This happen in IOS and I used sipjs, angular , Ionic, and IOSRTC
async mixNWayCall(nawayCall: NWayCall) {
var receivedTracks = [];
nawayCall.lines.forEach((line: Line) => {
if (line !== null && line !== undefined) {
const sessionDescriptionHandler = line.sipSession.sessionDescriptionHandler;
if (!(sessionDescriptionHandler instanceof Web.SessionDescriptionHandler)) {
throw new Error("Session's session description handler not instance of SessionDescriptionHandler.");
}
const peerConnection = sessionDescriptionHandler.peerConnection;
if (!peerConnection) {
throw new Error("Peer connection closed.");
}
peerConnection.getReceivers().forEach((receiver) => {
if (receiver.track) {
receivedTracks.push(receiver.track);
}
});
}
});
let context = new AudioContext();
let allReceivedMediaStreams = new MediaStream();
nawayCall.lines.forEach((line: Line) => {
if (line !== null && line !== undefined) {
let mixedOutput = context.createMediaStreamDestination();
const sessionDescriptionHandler = line.sipSession.sessionDescriptionHandler;
if (!(sessionDescriptionHandler instanceof Web.SessionDescriptionHandler)) {
throw new Error("Session's session description handler not instance of SessionDescriptionHandler.");
}
const senderPeerConnection = sessionDescriptionHandler.peerConnection;
if (!senderPeerConnection) { throw new Error("Peer connection closed."); }
senderPeerConnection.getReceivers().forEach((receiver) => {
receivedTracks.forEach((track) => {
allReceivedMediaStreams.addTrack(receiver.track);
console.log(receiver.track.id, ' receiver.track.id');
console.log(track.id, ' track.id');
if (receiver.track.id !== track.id) {
var sourceStream = context.createMediaStreamSource(new MediaStream([track]));
sourceStream.connect(mixedOutput);
}
});
});
senderPeerConnection.getSenders().forEach((sender) => {
nawayCall.mergeTracks.push(sender.track);
let senderSourceStream = context.createMediaStreamSource(new MediaStream([sender.track]));
senderSourceStream.connect(mixedOutput);
sender.replaceTrack(mixedOutput.stream.getTracks()[0])
});
senderPeerConnection.getSenders()[0].replaceTrack(mixedOutput.stream.getTracks()[0]);
}
});
nawayCall.lines.forEach(async (line: Line) => {
if (line.held) await this.lineService.onResume(line.id, true);
});
nawayCall.held = false;
if (nawayCall.media.mute)
await this.lineService.onNWayCallUnmute(nawayCall.id);
}
}
from the code in the description I got an error that no audio tracks and
I expected to mix audio tracks in one audio track then merge the call

Twilio ios Video Call issue

I have created a video call using Twilio JS 2.x Version. And using this in my both mobile app Android & iOS. On Android it is working perfectly.
But on iOS, there is an issue, when users reach the video call page, it's showing preview but as the user clicks on the Join Room button, then his/her video stops and just showing a black screen. But Audio option alone work's fine in ios APP.
Actual thing is we are converting web view in to Mobile sdk using React Native APP. But the twilio logic's has been written using Javascript. Can anyone help on this?
I am using the following logic:
$.ajax({
url: callInitiate + "?userSTAuthToken=" + token,
type: "POST",
data: {
callType: "video",
subType: "wb-video",
},
error: function () {},
success: function (data) {
identity = "user_" + userId;
roomName = meetingId;
videoToken = data.token;
console.log("Joining room '" + roomName + "'...");
if (previewTracks) {
connectOptions.tracks = previewTracks;
}
// Join the Room with the token from the server and the LocalParticipant's Tracks.
console.log("Test token : " + data.token);
Twilio.Video.connect(videoToken, connectOptions).then(
roomJoined,
function (error) {
console.log("Unable to connect to Room: " + error.name);
TV.Util.DialogIntimation(
"Notification",
"Unable to connect to Room " + error.name
);
logTwilioerror(10);
$("#maskDiv").hide();
}
);
},
});
function roomJoined(room) {
window.room = activeRoom = room;
console.log("Joined as '" + identity + "'");
addIcons();
logSessionDetails(videoToken, room.sid, room.localParticipant.sid, room.name);
// Attach LocalParticipant's Tracks, if not already attached.
var previewContainer = document.getElementById("local-media");
if (!previewContainer.querySelector("video")) {
attachTracks(getTracks(room.localParticipant), previewContainer);
}
// Attach the Tracks of the Room's Participants.
var remoteMediaContainer = document.getElementById("remote-media");
room.participants.forEach(function (participant) {
console.log("Already in Room: '" + participant.identity + "'");
participantConnected(participant, remoteMediaContainer);
});
// When a Participant joins the Room, log the event.
room.on("participantConnected", function (participant) {
console.log("Joining: '" + participant.identity + "'");
participantConnected(participant, remoteMediaContainer);
});
// When a Participant leaves the Room, detach its Tracks.
room.on("participantDisconnected", function (participant) {
console.log(
"RemoteParticipant '" + participant.identity + "' left the room"
);
detachParticipantTracks(participant);
removeName(participant);
});
// Once the LocalParticipant leaves the room, detach the Tracks of all Participants, including that of the LocalParticipant.
room.on("disconnected", function () {
console.log("Left---disconnected");
logVideoActivity(
"10841",
"Tutor side disconnected the video Successfully."
);
if (previewTracks) {
previewTracks.forEach(function (track) {
track.stop();
});
previewTracks = null;
}
detachParticipantTracks(room.localParticipant);
room.participants.forEach(detachParticipantTracks);
room.participants.forEach(removeName);
activeRoom = null;
});
}

Audio file does not persist in Cordova with LocalFileSystem.PERSISTENT

I have been trying to store Audio file in persistent storage for two days without success.
So far I am able to create an audio file which records audio from Microphone (The app has the permission) using the code attached below.
The audio file is getting generated & stored successfully, I can play it.
But the real problem is when I close the app and come back and try to play the file it shows error.
"{"message": "Cannot use audio file from resource '/myrecording.wav'",
"code":1}"
The file is not persistent across app sessions even though I used LocalFileSystem.PERSISTENT.
I am not sure whether the problem is with my Media/Audio code or File storage code.
Please find the code attached below:
Below function records the audio from the microphone.
function _recordAudio() {
var deferred = $q.defer();
var src = "myrecording.wav";
alert("SRC:" + src);
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, function (fileSystem) {
fileSystem.root.getFile(src, {
create: true,
exclusive: false
}, function (fileEntry) {
alert("File " + src + " created at " + fileEntry.fullPath);
var mediaRec = new Media(fileEntry.fullPath,
function () {
alert("Success");
}, function (error) {
alert("error:" + JSON.stringify(error));
});
// Record audio
mediaRec.startRecord();
// Stop recording after 10 sec
var recTime = 0;
var recInterval = setInterval(function () {
recTime = recTime + 1;
if (recTime >= 5) {
clearInterval(recInterval);
mediaRec.stopRecord();
deferred.resolve(fileEntry.fullPath);
}
}, 1000);
}, function (error) {
alert("getFile error:" + JSON.stringify(error));
deferred.reject();
}); //of getFile
}, function (error) {
alert("requestFileSystem error:" + JSON.stringify(error));
deferred.reject();
}); //of requestFileSystem
return deferred.promise;
}
Below function plays the audio.
function _play2() {
var src = "myrecording.wav";
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, function (fileSystem) {
fileSystem.root.getFile(src, null, function (fileEntry) {
alert("File " + src + " created at " + fileEntry.fullPath);
var mediaRec = new Media(fileEntry.fullPath,
function () {
alert("Success play2");
}, function (error) {
//Getting error after closing and opening the app
//Error message = {"message": "Cannot use audio file from resource '/myrecording.wav'","code":1}
alert("error play2:" + JSON.stringify(error));
});
mediaRec.play();
});
});
}
I solved this problem by passing cdvfile: path to the Media plugin in PlayAudio function code and copying the file from Temp storage to persistent storage.
I had to use localURL of the file.
This part solved my problem:
fileEntry.file(function (file) {
_playNow(file.localURL);
}
For full functions refer code snippets below:
recordAudio: function (projectNo, ItemNo) {
try {
var deferred = $q.defer();
var recordingTime = 0;
_audioLoader = $("#audioLoader");
_audioLoader.show();
UtilityService.showPopup('audio');
_isRecording = true;
_recordFileName = "Audio_" + projectNo + "_" + ItemNo + ".wav";
_mediaRecord = new Media(_recordFileName);
//Record audio
_mediaRecord.startRecord();
var recordingInterval = setInterval(function () {
recordingTime = recordingTime + 1;
$('#audioPosition').text(_secondsToHms(recordingTime));
if (!_isRecording) {
clearInterval(recordingInterval);
_mediaRecord.stopRecord();
_mediaRecord.release();
deferred.resolve();
}
}, 1000);
//document.getElementById('audioPosition').innerHTML = '0 sec';
$('#audioPosition').text('0 sec');
return deferred.promise;
}
catch (ex) {
alert('WMMCPA|recordAudio:- ' + ex.message);
}
},
Get file path from the persistent storage and send it to the play method.
//To play recorded audio for specific project item
playAudio: function (projectNo, ItemNo) {
try {
_recordFileName = "Audio_" + projectNo + "_" + ItemNo + ".wav";
var newFileUri = cordova.file.dataDirectory + _recordFileName;
window.resolveLocalFileSystemURL(newFileUri, function (fileEntry) {
fileEntry.file(function (file) {
_playNow(file.localURL);
}, function (error) {
alert("WMMCPA|playAudio.file:-" + JSON.stringify(error));
});
}, function (error) {
alert("WMMCPA|playAudio.resolveLocalFileSystemURL:-" + JSON.stringify(error));
});
}
catch (ex) {
alert("WMMCPA|playAudio:-" + ex.message);
}
}
function _playNow(src) {
try {
var mediaTimer = null;
_audioLoader = $("#audioLoader");
_audioLoader.show();
UtilityService.showPopup('audio');
//Create Media object from src
_mediaRecord = new Media(src);
//Play audio
_mediaRecord.play();
} catch (ex) {
alert('WMMCPA|_playNow.mediaTimer:- ' + ex.message);
}
}, 1000);
} catch (ex) {
alert('WMMCPA|_playNow:- ' + ex.message);

Send a recorded file via Filetransfer with Cordova/Phonegap

I am trying to send a voice recording that I recorded via the Media plugin.
When I try to send the file I get this FileError.NOT_FOUND_ERR error:
Error opening file /myRecording100.wav: Error Domain=NSCocoaErrorDomain Code=260 "The operation couldn’t be completed. (Cocoa error 260.)" UserInfo=0xa358640 {NSFilePath=/myRecording100.wav, NSUnderlyingError=0xa34fb30 "The operation couldn’t be completed. No such file or directory"}
2014-08-06 17:02:26.919 Bring Me[40961:c07] FileTransferError {
code = 1;
source = "/myRecording100.wav";
target = "http://XXXX.xom";
}
However, I can play the voice recording after recording it.
Why would I be able to play the file (showing that the file was recorded and saved correctly) but FileTransfer be unable to send it?
Here is my code (for ios):
var my_recorder = null;
var mediaFileFullName = null; // iOS
var mediaRecFile = "myRecording100.wav";
var checkFileOnly = false;
/******
Call when start recording
******/
function startRecording() {
checkFileOnly = false;
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, onSuccessFileSystem, function() {
console.log("***test: failed in creating media file in requestFileSystem");
});
}
function onSuccessFileSystem(fileSystem) {
if (checkFileOnly === true) {
// Get File and send
fileSystem.root.getFile(mediaRecFile, { create: false, exclusive: false }, onOK_GetFile, onFail_GetFile);
}
else {
// Create File
fileSystem.root.getFile(mediaRecFile, { create: true, exclusive: false }, onOK_SaveFile, onFail_GetFile);
}
}
/* Save the file*/
function onOK_SaveFile(fileEntry) {
mediaFileFullName = fileEntry.fullPath;
my_recorder = new Media(mediaFileFullName,
function() { document.location ="address_form.html"; // Redirect the user to an other page },
function(err) { console.log("playAudio():callback Record Error: "+err);}
);
my_recorder.startRecord();
}
/* Get the file and send it */
function onOK_GetFile(fileEntry) {
mediaFileFullName = fileEntry.fullPath;
/*
// Read the recorded file is WORKING !
my_player = new Media(mediaFileFullName, onMediaCallSuccess, onMediaCallError);
my_player.play();
*/
var options = new FileUploadOptions();
options.fileKey = "want";
options.fileName = "file.wav";
options.mimeType = "audio/wav";
options.chunkedMode = false;
options.params = parameters;
var ft = new FileTransfer();
ft.upload(mediaFileFullName, "https://SERVER_ADDRESS", win, fail, options);
}
/******
Called when stop recording
******/
function stopRecording() {
if (my_recorder) {
my_recorder.stopRecord();
}
}
Since the v1.0 of File plugin, to upload a file in the filesystem via the file-transfer plugin, you'll need to use the .toURL() method to access to it.
If you are upgrading to a new (1.0.0 or newer) version of File, and
you have previously been using entry.fullPath as arguments to
download() or upload(), then you will need to change your code to use
filesystem URLs instead.
FileEntry.toURL() and DirectoryEntry.toURL() return a filesystem URL
of the form
So the correct code is :
/* Get the file and send it */
function onOK_GetFile(fileEntry) {
var options = new FileUploadOptions();
options.fileKey = "want";
options.fileName = "file.wav";
options.mimeType = "audio/wav";
options.chunkedMode = false;
options.params = parameters;
var ft = new FileTransfer();
ft.upload(fileEntry.toURL(), "https://SERVER_ADDRESS", win, fail, options);
}
I got the exact same issue on iOS,and FileUploadOptions didn't work for me.
In case someone is struggling as well, the solution for me has been to switch to LocalFileSystem.Temporary.
Here there is a snippet which shows a full example (not tested on Android):
var accessType = LocalFileSystem.TEMPORARY; // It was LocalFileSystem.PERSISTENT;
/** Utility function to return a fileEntry together with the metadata. */
var getFile = function(name, create, successCallback, failCallback) {
WL.Logger.debug("Request for file " + name + " received, create is " + create + ".");
var onSuccessFileSystem = function(fileSystem) {
fileSystem.root.getFile(name, { create: create, exclusive: false },
function(fileEntry){
WL.Logger.debug("Success, file entry for " + name + " is " + JSON.stringify(fileEntry));
fileEntry.getMetadata(function(metadata){
WL.Logger.debug("File entry " + name + " metadata is: " + JSON.stringify(metadata));
successCallback(fileEntry, metadata);
}, function(err) {
WL.Logger.debug("Fail to retrieve metadata, error: " + JSON.stringify(err));
if(failCallback) failCallback(err);
});
},
function(err) {
WL.Logger.error("Failed to retrieve the media file " + name + ".");
if(failCallback) failCallback(err);
});
}
window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
window.requestFileSystem(accessType, 0, onSuccessFileSystem, function(err) {
WL.Logger.error("Failed to access file system.");
if(failCallback) failCallback(err);
});
};
var Recorder = declare([ ], {
mediaSrc : null,
mediaObj : null,
constructor : function(data, domNode){
this.mediaSrc = "new_recording.wav";
},
startRecord : function() {
var self = this;
var startRecording = function(source) {
var onMediaCallSuccess = function() { WL.Logger.debug("Media object success."); };
var onMediaCallError = function(err) { WL.Logger.error("Error on the media object: " + JSON.stringify(err)); };
self.mediaObj = new Media(source, onMediaCallSuccess, onMediaCallError);
self.mediaObj.startRecord();
};
// On iOS, first I need to create the file and then I can record.
if (deviceCheck.phone.ios) {
WL.Logger.debug("iOS detected, making sure the file exists.");
getFile(this.mediaSrc, true, function(fileEntry){ startRecording(fileEntry.fullPath); });
} else {
if (!deviceCheck.phone.android)
WL.Logger.warn("Don't know the device, trying to record ...");
else
WL.Logger.debug("Android detected.");
startRecording(this.mediaSrc);
}
},
stopRecord : function() {
this.mediaObj.stopRecord();
this.mediaObj.release();
},
play: function() {
var p,
playSuccess = function() { WL.Logger.debug("Play success."); p.release(); },
playFail = function() { WL.Logger.debug("Play fail."); };
p = new Media(this.mediaSrc, playSuccess, playFail);
p.play();
},
getData : function(successCallback, failCallback) {
var fileName = (deviceCheck.phone.android ? "/sdcard/" : "") + this.mediaSrc;
WL.Logger.debug("Asking for the file entry ... ");
getFile(this.mediaSrc, false,
function(fileEntry, metadata) {
WL.Logger.debug("Success: I found a file entry: " + fileEntry.nativeURL + ", size is " + metadata.size);
fileEntry.file(function(file) {
WL.Logger.debug("Success: file retrieved!");
var reader = new FileReader();
reader.onloadend = function(evt) {
WL.Logger.debug("Sending content and event data to success callback.");
successCallback(this.result, metadata, evt);
};
reader.readAsDataURL(file);
}, function(err){
WL.Logger.error("Error: Impossible to retrieve the file");
failCallback(err);
})
}, function(err){
WL.Logger.error("Fail: no file entry found: " + JSON.stringify(err));
failCallback(err);
});
}
});
There is a bit of Worklight (debug output) and dojo (declare), but this code could be useful as reference.

Titanium: InApp purchase for Iphone (Ti.Storekit)

I'm using the Ti.Storekit module and I have a problem. I can't use it, well, this is my code:
Storekit.requestProducts(['com.rotary.imrotarian.upgradeyears1', 'com.rotary.imrotarian.upgradeyears2'], function (evt) {
if (!evt.success) {
alert('ERROR: We failed to talk to Apple!');
}
else if (evt.invalid) {
alert('ERROR: We requested an invalid product!');
alert(evt);
}
else {
success(evt.products[0]);
success(evt.products[1]);
var dialog = Titanium.UI.createOptionDialog({
title: APP.L('inapp_purchase_store_name'),
options: [product1.title + ' - ' + product1.formattedPrice, product2.title + ' - ' + product2.formattedPrice, APP.L('Cancel')],
cancel:3
});
dialog.show();
dialog.addEventListener('click',function(e)
{
if (e.index == 0)
{
purchaseProduct(product1);
}
if (e.index == 1)
{
purchaseProduct(product2);
}
});
}
});
And when I use it it's not an apple error, it's an evt error.
If I print evt alert is like this:
{
invalid= {
"com.rotary.imrotarian.upgradeyears1",
"com.rotary.imrotarian.upgradeyears2"
};
products = {
};
source = "[object TiStorekitProductRequest]";
success = 1;
type = callback;
}
}
The purchaseProduct function is this:
function purchaseProduct(product) {
Ti.API.info('----- CALL APPLE PURCHASE ' + JSON.stringify(product.identifier));
Storekit.purchase(product, 1, function (evt) {
Ti.API.info('----- Apple answer purchase: ' + JSON.stringify(evt));
switch (evt.state) {
case Storekit.FAILED:
alert(APP.L('inapp_purchase_store_error_buy_failed'));
break;
case Storekit.PURCHASED:
case Storekit.RESTORED:
//var receipt = Ti.Utils.base64encode(evt.receipt).text;
//Ti.App.Properties.setString('recibo', receipt);
alert(APP.L('inapp_purchase_store_recipt_verified'));
markProductAsPurchased(product.identifier);
break;
}
});
};
Dunno what I'm doing wrong? But it seems like it doesn't detect this products.
Hope anyone can help me
Thanks.

Resources