add live captions

This commit is contained in:
ian ramzy 2020-03-28 22:47:24 -04:00
parent cb99d5197e
commit 4c4ed45113
4 changed files with 151 additions and 75 deletions

View File

@ -79,12 +79,12 @@
</button> </button>
<div class="HoverState">End Call</div> <div class="HoverState">End Call</div>
</div> </div>
<!-- <div class="buttonContainer">--> <div class="buttonContainer">
<!-- <button class="hoverButton" onclick="{speechWrapper()}">--> <button class="hoverButton" onclick="{requestToggleCaptions()}">
<!-- <i class="fas fa-closed-captioning fa-xs"></i>--> <i class="fas fa-closed-captioning fa-xs"></i>
<!-- </button>--> </button>
<!-- <div class="HoverState">Start Live Captioning</div>--> <div class="HoverState" id="caption-text">Start Live Caption</div>
<!-- </div>--> </div>
</div> </div>

View File

@ -114,19 +114,25 @@ video {
} }
#remote-video-text { #remote-video-text {
padding: 0; /*padding: 0;*/
margin: 0; margin: 0;
width: 60vw;
position: absolute; position: absolute;
top: calc(40%); top: calc(80%);
left: 50%; left: 20vw;
-ms-transform: translate(-50%, -50%); /*-ms-transform: translate(-50%, -90%);*/
transform: translate(-50%, -50%); /*transform: translate(-50%, -90%);*/
z-index: 1; z-index: 1;
color: white; color: white;
font-family: "Heebo", sans-serif; font-family: "Heebo", sans-serif;
white-space: nowrap; font-size: 40px;
/*white-space: nowrap;*/
font-weight: bold; font-weight: bold;
text-align: left;
background: rgba(0, 0, 0, 0.20);
border-radius: 10px;
padding: 10px;
} }
#remote-video { #remote-video {

View File

@ -4,14 +4,7 @@ if (window.location.pathname === "/room") {
url = window.location.href; url = window.location.href;
const roomHash = url.substring(url.lastIndexOf('/') + 1).toLowerCase(); const roomHash = url.substring(url.lastIndexOf('/') + 1).toLowerCase();
document.title = 'Neon Chat - ' + url.substring(url.lastIndexOf('/') + 1);
// audio: {
// echoCancellation: {exact: hasEchoCancellation}
// },
function getBrowserName() { function getBrowserName() {
@ -57,6 +50,7 @@ var VideoChat = {
socket: io(), socket: io(),
remoteVideo: document.getElementById('remote-video'), remoteVideo: document.getElementById('remote-video'),
localVideo: document.getElementById('local-video'), localVideo: document.getElementById('local-video'),
recognition: undefined,
// Call to getUserMedia (provided by adapter.js for cross browser compatibility) // Call to getUserMedia (provided by adapter.js for cross browser compatibility)
// asking for access to both the video and audio streams. If the request is // asking for access to both the video and audio streams. If the request is
@ -153,10 +147,32 @@ var VideoChat = {
// over the socket connection. // over the socket connection.
VideoChat.socket.on('candidate', VideoChat.onCandidate); VideoChat.socket.on('candidate', VideoChat.onCandidate);
VideoChat.socket.on('answer', VideoChat.onAnswer); VideoChat.socket.on('answer', VideoChat.onAnswer);
VideoChat.socket.on('requestToggleCaptions', () => toggleSendCaptions());
VideoChat.socket.on('recieveCaptions', (captions) => VideoChat.recieveCaptions(captions));
callback(); callback();
}; };
}, },
recieveCaptions: function (captions) {
// reset button to start captions
$('#remote-video-text').text("").fadeIn();
if (!receivingCaptions) {
$('#remote-video-text').text("").fadeOut();
}
if (captions === "notusingchrome") {
alert("Other caller must be using chrome for this feature to work");
receivingCaptions = false;
$('#remote-video-text').text("").fadeOut();
$('#caption-text').text("Start Live Caption");
return
}
if (captions.length > 100) {
$('#remote-video-text').text(captions.substr(captions.length - 199));
} else {
$('#remote-video-text').text(captions);
}
},
// When the peerConnection generates an ice candidate, send it over the socket to the peer. // When the peerConnection generates an ice candidate, send it over the socket to the peer.
onIceCandidate: function (event) { onIceCandidate: function (event) {
logIt("onIceCandidate"); logIt("onIceCandidate");
@ -263,7 +279,7 @@ var VideoChat = {
Snackbar.close(); Snackbar.close();
VideoChat.remoteVideo.style.background = 'none'; VideoChat.remoteVideo.style.background = 'none';
VideoChat.connected = true; VideoChat.connected = true;
$('#remote-video-text').text(""); $('#remote-video-text').fadeOut();
$('#local-video-text').fadeOut(); $('#local-video-text').fadeOut();
var timesRun = 0; var timesRun = 0;
@ -448,60 +464,100 @@ function switchStreamHelper(stream) {
$("#moveable").draggable({containment: 'window'}); $("#moveable").draggable({containment: 'window'});
var sendingCaptions = false;
var receivingCaptions = false;
// function speechWrapper() {
// try { function requestToggleCaptions() {
// var SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; if (!VideoChat.connected) {
// var recognition = new SpeechRecognition(VideoChat.remoteVideo.srcObject.getAudioTracks()[0]); alert("You must be connected to a peer to use Live Caption")
// // var recognition = new SpeechRecognition(); return
// } catch (e) { }
// console.error(e); if (receivingCaptions) {
// alert("error importing speech library") $('#remote-video-text').text("").fadeOut();
// } $('#caption-text').text("Start Live Caption");
// receivingCaptions = false;
// // If false, the recording will stop after a few seconds of silence. } else {
// // When true, the silence period is longer (about 15 seconds), alert("This is an expirimental feature. Live transcription requires the other user to have chrome.");
// // allowing us to keep recording even when the user pauses. $('#caption-text').text("End Live Caption");
// recognition.continuous = true; receivingCaptions = true;
// recognition.interimResults = true; }
// // recognition.maxAlternatives = 3; VideoChat.socket.emit('requestToggleCaptions', roomHash);
// }
// var finalTranscript;
// recognition.onresult = (event) => { function toggleSendCaptions() {
// let interimTranscript = ''; if (sendingCaptions) {
// for (let i = event.resultIndex, len = event.results.length; i < len; i++) { sendingCaptions = false;
// let transcript = event.results[i][0].transcript; VideoChat.recognition.stop();
// if (event.results[i].isFinal) { } else {
// finalTranscript += transcript; startSpeech();
// } else { sendingCaptions = true;
// interimTranscript += transcript; }
// $('#remote-video-text').text(interimTranscript); }
function startSpeech() {
try {
var SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
var recognition = new SpeechRecognition();
VideoChat.recognition = recognition;
} catch (e) {
sendingCaptions = false;
logIt(e);
logIt("error importing speech library");
VideoChat.socket.emit('sendCaptions', "notusingchrome", roomHash);
return
}
// If false, the recording will stop after a few seconds of silence.
// When true, the silence period is longer (about 15 seconds),
// allowing us to keep recording even when the user pauses.
recognition.continuous = true;
recognition.interimResults = true;
// recognition.maxAlternatives = 3;
var finalTranscript;
recognition.onresult = (event) => {
let interimTranscript = '';
for (let i = event.resultIndex, len = event.results.length; i < len; i++) {
let transcript = event.results[i][0].transcript;
if (event.results[i].isFinal) {
finalTranscript += transcript;
} else {
interimTranscript += transcript;
VideoChat.socket.emit('sendCaptions', interimTranscript, roomHash);
// console.log(interimTranscript); // console.log(interimTranscript);
// } }
// } }
// }; };
//
// recognition.onstart = function () { recognition.onstart = function () {
// console.log("recording on"); console.log("recording on");
// }; };
//
// recognition.onspeechend = function () { recognition.onspeechend = function () {
// console.log("on speech end"); console.log("on speech end");
// }; };
//
// recognition.onerror = function (event) { recognition.onerror = function (event) {
// if (event.error === 'no-speech') { if (event.error === 'no-speech') {
// console.log("no speech detected"); console.log("no speech detected");
// } }
// }; };
//
// recognition.onend = function () { recognition.onend = function () {
// console.log("on end"); console.log("on end");
// }; console.log(sendingCaptions);
// if (sendingCaptions) {
// // recognition.stop(); startSpeech()
// recognition.start(); } else {
// } VideoChat.recognition.stop();
}
};
recognition.start();
}
// auto get media // auto get media

View File

@ -85,6 +85,20 @@ io.on('connection', function (socket) {
logIt('Received answer. Broadcasting...', room); logIt('Received answer. Broadcasting...', room);
socket.broadcast.to(room).emit('answer', answer); socket.broadcast.to(room).emit('answer', answer);
}); });
// Relay answers
socket.on('sendCaptions', function (captions, room) {
logIt(captions, room);
socket.broadcast.to(room).emit('recieveCaptions', captions);
});
// Relay answers
socket.on('requestToggleCaptions', function (room) {
logIt("requesting captions", room);
socket.broadcast.to(room).emit('requestToggleCaptions');
});
}); });