mirror of
https://github.com/ianramzy/decentralized-video-chat.git
synced 2024-11-23 10:39:20 +08:00
update readme + licesnse, attempt voice recognition
This commit is contained in:
parent
93c38f29a9
commit
cb99d5197e
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 Ian Ramzy
|
||||
Copyright (c) 2020 Ian Ramzy
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
14
README.md
14
README.md
@ -12,9 +12,15 @@ technology.
|
||||
|
||||
![screenshot](public/images/githubpreview.png "Video Calling")
|
||||
|
||||
## Features
|
||||
* Screen sharing
|
||||
* Auto-scaling video quality
|
||||
* No download required, entirely browser based
|
||||
* Direct peer to peer connection ensures lowest latency
|
||||
* Single use disposable chat rooms
|
||||
|
||||
## Quick start
|
||||
* You will need to have Node.js installed, this project has been tested with Node version 10.X and 12.X but
|
||||
probably works with more versions
|
||||
* You will need to have Node.js installed, this project has been tested with Node version 10.X and 12.X
|
||||
* Clone this repo
|
||||
```
|
||||
git clone https://github.com/ianramzy/decentralized-video-chat
|
||||
@ -28,8 +34,8 @@ npm install
|
||||
```
|
||||
npm start
|
||||
```
|
||||
* Open two browsers on your laptop and point them `localhost:3000`.
|
||||
* If you want to use a client on another computer, make sure you publish your server on an HTTPS connection.
|
||||
* Open `localhost:3000` in Chrome or Firefox.
|
||||
* If you want to use a client on another computer/network, make sure you publish your server on an HTTPS connection.
|
||||
You can use a service like [ngrok](https://ngrok.com/) for that.
|
||||
|
||||
## Quick Deployment
|
||||
|
@ -79,6 +79,12 @@
|
||||
</button>
|
||||
<div class="HoverState">End Call</div>
|
||||
</div>
|
||||
<!-- <div class="buttonContainer">-->
|
||||
<!-- <button class="hoverButton" onclick="{speechWrapper()}">-->
|
||||
<!-- <i class="fas fa-closed-captioning fa-xs"></i>-->
|
||||
<!-- </button>-->
|
||||
<!-- <div class="HoverState">Start Live Captioning</div>-->
|
||||
<!-- </div>-->
|
||||
</div>
|
||||
|
||||
|
||||
|
@ -6,6 +6,14 @@ url = window.location.href;
|
||||
const roomHash = url.substring(url.lastIndexOf('/') + 1).toLowerCase();
|
||||
|
||||
|
||||
|
||||
// audio: {
|
||||
// echoCancellation: {exact: hasEchoCancellation}
|
||||
// },
|
||||
|
||||
|
||||
|
||||
|
||||
function getBrowserName() {
|
||||
var name = "Unknown";
|
||||
if (window.navigator.userAgent.indexOf("MSIE") !== -1) {
|
||||
@ -30,7 +38,6 @@ var isWebRTCSupported =
|
||||
navigator.msGetUserMedia ||
|
||||
window.RTCPeerConnection;
|
||||
|
||||
// Handle IE
|
||||
if (!isWebRTCSupported || browserName === "Safari" || browserName === "MSIE") {
|
||||
alert("Your browser doesn't support Neon Chat. Please use Chrome or Firefox.");
|
||||
window.location.href = "/";
|
||||
@ -427,7 +434,7 @@ function swap() {
|
||||
function switchStreamHelper(stream) {
|
||||
let videoTrack = stream.getVideoTracks()[0];
|
||||
if (VideoChat.connected) {
|
||||
var sender = VideoChat.peerConnection.getSenders().find(function (s) {
|
||||
var sender = VideoChat.peerConnection.getSenders().find(function (s) {
|
||||
return s.track.kind === videoTrack.kind;
|
||||
});
|
||||
sender.replaceTrack(videoTrack);
|
||||
@ -440,5 +447,67 @@ function switchStreamHelper(stream) {
|
||||
|
||||
$("#moveable").draggable({containment: 'window'});
|
||||
|
||||
|
||||
|
||||
// function speechWrapper() {
|
||||
// try {
|
||||
// var SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
|
||||
// var recognition = new SpeechRecognition(VideoChat.remoteVideo.srcObject.getAudioTracks()[0]);
|
||||
// // var recognition = new SpeechRecognition();
|
||||
// } catch (e) {
|
||||
// console.error(e);
|
||||
// alert("error importing speech library")
|
||||
// }
|
||||
//
|
||||
// // If false, the recording will stop after a few seconds of silence.
|
||||
// // When true, the silence period is longer (about 15 seconds),
|
||||
// // allowing us to keep recording even when the user pauses.
|
||||
// recognition.continuous = true;
|
||||
// recognition.interimResults = true;
|
||||
// // recognition.maxAlternatives = 3;
|
||||
//
|
||||
// var finalTranscript;
|
||||
// recognition.onresult = (event) => {
|
||||
// let interimTranscript = '';
|
||||
// for (let i = event.resultIndex, len = event.results.length; i < len; i++) {
|
||||
// let transcript = event.results[i][0].transcript;
|
||||
// if (event.results[i].isFinal) {
|
||||
// finalTranscript += transcript;
|
||||
// } else {
|
||||
// interimTranscript += transcript;
|
||||
// $('#remote-video-text').text(interimTranscript);
|
||||
// console.log(interimTranscript);
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
//
|
||||
// recognition.onstart = function () {
|
||||
// console.log("recording on");
|
||||
// };
|
||||
//
|
||||
// recognition.onspeechend = function () {
|
||||
// console.log("on speech end");
|
||||
// };
|
||||
//
|
||||
// recognition.onerror = function (event) {
|
||||
// if (event.error === 'no-speech') {
|
||||
// console.log("no speech detected");
|
||||
// }
|
||||
// };
|
||||
//
|
||||
// recognition.onend = function () {
|
||||
// console.log("on end");
|
||||
// };
|
||||
//
|
||||
// // recognition.stop();
|
||||
// recognition.start();
|
||||
// }
|
||||
|
||||
|
||||
// auto get media
|
||||
VideoChat.requestMediaStream();
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user