Untitled
unknown
plain_text
2 years ago
24 kB
9
Indexable
import { useContext, useState, useEffect, useRef } from 'react';
import { setUserApp } from '../../action/action';
import { connect } from 'react-redux';
import { SocketContext } from './SocketContext';
import { Col, Row } from 'antd';
import Player from '../tchat/elements/player/Player'
import Banner from '../tchat/elements/banner/Banner'
import Center from '../tchat/elements/center/Center'
import Middle from '../tchat/elements/middle/Middle'
import sendRequest from '../socket/requestModule'
import audioVideoTools from '../../audioTools/audioTools'
import { loader } from '../../mediasoup/mediasoup'
import JspanelCam from "../tchat/elements/jsPanel/JsPanel"
import { listAllEventListeners } from '../utils/ListAllEventListeners'
import { join_room } from './chatUtils/utils';
import useRefs from './ref/useRefs'
const InterfaceChat = ({ user, ...props }) => {
//audio & video setting
let device = useRef(null)
let audioContext = null;
let audioFftArray = null;
let id = null;
let audioAnalysers = {};
let audioConsumers = useRef({})
let audioConstraints = null;
let videoConstraints = null;
let producerTransport = useRef({})
let consumerTransport = useRef({})
let videoTrack = useRef(null)
let videoProducer = useRef(null)
let socket = useContext(SocketContext);
let videoEnabled = useRef(null)
let videoConsumers = useRef({})
const username = useRef(null)
let audioEnabled = useRef(false)
let audioTrack = useRef(null)
let audioProducer = useRef(null)
const [renderJspanelCam, setRenderJspanelCam] = useState([]);
let micro = useRef(false)
let webcam = useRef(false)
const localwebcamRef = useRef();
const localMicroRef = useRef();
const toggleWebcam = async () => {
if (!webcam.current) {
await enableLocalVideo();
localwebcamRef.current.style.color = "green";
webcam.current = true
} else {
disableLocalVideo();
localwebcamRef.current.style.color = "black";
webcam.current = false
}
};
const toggleAudio = async () => {
if (!micro.current) {
localMicroRef.current.style.color = "green";
micro.current = true
await enableLocalAudio();
} else {
stopAudioProducer();
localMicroRef.current.style.color = "black";
micro.current = false
}
};
const starting = async () => {
let isConnected = true
audioVideoTools.initializeAudioTools(audioContext, audioFftArray, id, audioAnalysers);
audioVideoTools.initializeDeviceOptions(audioConstraints, videoConstraints);
const routerRtpCapabilities = await sendRequest(socket, 'getRouterRtpCapabilities', {});
device.current = await loader(routerRtpCapabilities)
if (!producerTransport.current.on) {
let params = await sendRequest(socket, 'createProducerTransport', {});
producerTransport.current = await device.current.createSendTransport(params);
console.log('producerTransport ready')
}
if (!consumerTransport.current.on) {
let params = await sendRequest(socket, 'createConsumerTransport', {});
consumerTransport.current = await device.current.createRecvTransport(params);
console.log('consumerTransport ready')
}
//init with mediasoup listener
setupTransportListeners();
let currentInfo;
try {
currentInfo = await sendRequest(socket, 'getCurrentRemoteInfo', { localId: socket.id });
//todo : envoyer le array de user dans userliste
currentInfo.remoteUsers.forEach(user => {
// addUser(user.id, user);
});
//ouvre les cams automatiquement quand on entre dans le salon
currentInfo.remoteVideoIds.forEach(rId => {
// const addConsumer = async (transport, remoteSocketId, prdId, trackKind, username)
console.log(rId)
//addConsumer(consumerTransport.current, props.consumeIdWebcam[0], null, 'video', name);
addConsumer(consumerTransport, rId, null, 'video', rId);
});
currentInfo.remoteAudioIds.forEach(rId => {
// addConsumer(consumerTransport, rId, null, 'audio');
});
} catch (error) {
console.error(`[connect] [getCurrentRemoteInfo] mensagem de erro: ${error}`);
}
isConnected = true
// updateUI();
console.log('Socket IO Connection estabilished.');
//await enableLocalAudio()
}
useEffect(() => {
props?.dispatch(setUserApp({ addConsumer: addConsumer, addConsumerPrivate: addConsumerPrivate }))
if (socket) {
const init = async () => {
socket.emit('hello', 'hello world')
socket.on('connect', (e) => join_room({ ...e, ...props, socket, user }));
socket.on('removeJspanel', removeJSpanel);
// socket.on('disconnect' , reset )
await starting()
}
init().then(() => {
})
return () => {
// socket.off('join_room', join_room);
}
};
}, [socket]);
const setupTransportListeners = () => {
try {
if (producerTransport) {
producerTransport.current.on('connect', async ({ dtlsParameters }, callback, errback) => {
sendRequest(socket, 'connectProducerTransport', { dtlsParameters: dtlsParameters })
.then(callback)
.catch(errback);
});
producerTransport.current.on('produce', async ({ kind, rtpParameters }, callback, errback) => {
try {
const { id } = await sendRequest(socket, 'produce', {
transportId: producerTransport.current.id,
kind,
rtpParameters
});
callback({ id });
} catch (err) {
errback(err);
}
});
producerTransport.current.on('connectionstatechange', (state) => {
switch (state) {
case 'connecting':
console.log('producerTransport connecting...');
break;
case 'connected':
console.log('producerTransport connected.');
break;
case 'failed':
console.log('producerTransport connection failed.');
producerTransport.current.close();
break;
default:
break;
}
});
} else {
console.error('[setupTransportListeners] ProducerTransport n\'a pas été initialisé (l\'application plantera probablement');
}
if (consumerTransport) {
consumerTransport.current.on('connect', async ({ dtlsParameters }, callback, errback) => {
sendRequest(socket, 'connectConsumerTransport', { dtlsParameters: dtlsParameters })
.then(callback)
.catch(errback);
});
consumerTransport.current.on('connectionstatechange', (state) => {
switch (state) {
case 'connecting':
console.log('consumerTransport connecting...');
break;
case 'connected':
console.log('consumerTransport connected.');
break;
case 'failed':
console.log('consumerTransport connection failed.');
consumerTransport.current.close();
break;
default:
break;
}
});
} else {
console.error('[setupTransportListeners] consumerTransport n\'a pas été initialisé (l\'application va probablement planter');
}
}
catch (err) {
console.log(err)
}
}
function addUser(id, user) {
if (!user.isGhost) {
//let userFeedCheck = $(`.user-feed[data-id="${id}"]`);
}
}
const enableLocalVideo = async () => {
if (!videoEnabled.current) {
//props.localWebcamUserRef.current.style.backgroundColor = "green";
await enableLocalVideoTrack()
setRenderJspanelCam(component => ([...component, <JspanelCam toggleWebcam={toggleWebcam} toggleAudio={toggleAudio} key={'local'} removeJspanel={disableLocalVideoTrack} webcam={webcam} micro={micro} cam={{ id: 'local', track: videoTrack.current, username: username.current }} />]))
await startVideoProducer();
videoEnabled.current = true;
}
}
async function disableLocalVideo() {
if (videoEnabled.current) {
await stopVideoProducer();
disableLocalVideoTrack();
// isScreensharing = false;
videoEnabled.current = false;
}
}
async function stopVideoProducer() {
if (videoProducer.current) {
videoProducer.current.close();
videoProducer.current = null;
await sendRequest(socket, 'stopVideoProducer', {});
}
}
function disableLocalVideoTrack() {
if (videoTrack.current) {
// videoTrack.current.stop();
videoTrack.current = null;
const jsPanel = document.getElementById("jsPanel_local")
if (jsPanel) {
// jsPanel.close()
removeJSpanel("local", "video")
}
} else {
console.warn('[disableLocalVideoTrack] videoTrack n\'existe pas (ne devrait poser aucun problème)');
}
}
const enableLocalVideoTrack = async () => {
if (!videoTrack.current) {
let stream = null;
let localVideo = document.getElementById('local_video')
try {
stream = await navigator.mediaDevices.getUserMedia({ audio: false, video: true });
if (stream) {
let tracks = stream.getVideoTracks();
if (tracks && tracks.length) {
videoTrack.current = tracks[0];
for (let i = 1; i < tracks.length; i++) {
tracks[i].stop();
tracks[i] = null;
}
tracks = null;
stream = null;
}
}
} catch (error) {
videoTrack.current = null;
console.error(`[enableLocalVideoTrack] la caméra n'a pas pu être activée. Message d'erreur ${error}`);
}
}
}
function disableLocalVideoTrack() {
if (videoTrack.current) {
videoTrack.current = null;
const jsPanel = document.getElementById("jsPanel_local")
if (jsPanel) {
removeJSpanel("local", "video")
}
} else {
console.warn('[disableLocalVideoTrack] videoTrack n\'existe pas (ne devrait poser aucun problème)');
}
}
const startVideoProducer = async (value) => {
if (!videoProducer.current && videoTrack.current) {
try {
videoProducer.current = await producerTransport.current.produce(
{
track: videoTrack.current,
encodings: [
{ maxBitrate: 100000 }
],
oneToOne: value
}
);
console.log('Started videoProducer');
} catch (error) {
console.error(`[startVideoProducer] La vidéo de la caméra n'a pas pu être envoyée au serveur. Message d'erreur: ${error}`);
}
}
}
async function enableLocalAudio() {
if (!audioEnabled.current) {
// props.localMicroUserRef.current.style.backgroundColor = "green";
await enableLocalAudioTrack();
await startAudioProducer();
}
}
const enableLocalAudioTrack = async () => {
if (!audioTrack.current) {
let stream = null;
try {
stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
if (stream) {
let tracks = stream.getAudioTracks();
if (tracks && tracks.length) {
audioTrack.current = tracks[0];
for (let i = 1; i < tracks.length; i++) {
tracks[i].stop();
tracks[i] = null;
}
tracks = null;
stream = null;
}
}
} catch (error) {
audioTrack.current = null;
console.error(`[enableLocalAudioTrack] Le microphone n'a pas pu être activé. Message d'erreur: ${error}`);
}
}
}
async function startAudioProducer() {
if (!audioProducer.current && audioTrack.current) {
try {
audioProducer.current = await producerTransport.current.produce(
{
track: audioTrack.current,
encodings: [
{ maxBitrate: 7500 }
]
}
);
console.log('Started audioProducer');
} catch (error) {
console.error(`[startAudioProducer] L'audio du microphone n'a pas pu être envoyé au serveur. Message d'erreur: ${error}`);
}
}
}
async function stopAudioProducer() {
if (audioProducer.current) {
// props.localMicroUserRef.current.style.backgroundColor = "transparent";
audioProducer.current.close()
audioProducer.current = false;
audioTrack.current = null
await sendRequest(socket, 'stopAudioProducer', {});
}
}
const addConsumer = async (transport, remoteSocketId, prdId, trackKind, username, socketContext) => {
if (!socket) {
socket = socketContext;
}
const { rtpCapabilities } = device.current;
const data = await sendRequest(socket, 'consumeAdd', {
rtpCapabilities: rtpCapabilities,
remoteId: remoteSocketId,
kind: trackKind
})
.catch(error => {
console.error(`[addConsumer] consumeAdd retornou um erro: ${error}`);
});
let {
producerId,
id,
kind,
rtpParameters,
isTeacher
} = data;
if (prdId && (prdId !== producerId)) {
console.warn('[addConsumer] producteurId ne correspond pas, problème de synchronisation de serveur possible (problématique, ne devrait pas se produire');
}
let codecOptions = {};
const consumer = await consumerTransport.current.consume({
id,
producerId,
kind,
rtpParameters,
codecOptions
});
if (kind === 'video') {
props?.dispatch(setUserApp({ userConsumer: { id: remoteSocketId, consumer: consumer } }));
videoConsumers.current[remoteSocketId] = consumer;
} else if (kind === 'audio') {
audioConsumers.current[remoteSocketId] = consumer;
} else {
console.error(`[addConsumer] . (erreur critique, résultat imprévisible)`);
}
consumer.remoteId = remoteSocketId;
consumer.on("transportclose", () => {
alert("transportclose");
});
consumer.on("producerclosed", () => {
alert("producerclose");
});
consumer.on("close", () => {
alert("producerclose");
});
consumer.on('trackended', () => {
alert('trackended');
//removeConsumer(consumer.remoteId, kind);
});
if (kind === 'video') {
sendRequest(socket, 'resumeAdd', { remoteId: remoteSocketId, kind: kind })
.then(() => {
// console.log('resumeAdd OK');
})
.catch(err => {
console.error(`[addConsumer] erreur dans l'action resumeAdd (l'utilisateur n'aura plus de vidéo`);
});
}
let elementjs = document.getElementById(`${remoteSocketId}`)
if (elementjs) {
// Récupérer la piste vidéo existante
const videoElement = document.getElementById(`${remoteSocketId}`);
const videoStream = videoElement.captureStream();
const videoTrack = videoStream.getVideoTracks()[0];
// Créer une nouvelle piste audio
const audioStream = await navigator.mediaDevices.getUserMedia({ audio: true });
const audioTrack = audioStream.getAudioTracks()[0];
// Ajouter la nouvelle piste audio à la piste vidéo existante
videoStream.addTrack(consumer.track);
// Mettre à jour la source vidéo avec la nouvelle piste audio
videoElement.srcObject = videoStream;
videoElement.muted = false
videoElement.play();
/*
audioTools.initializeAudioTools()
audioTools.createAudioAnalyser(remoteSocketId, videoElement.srcObject)
setInterval(() => audioTools.watchAudioTracks(), 100);
*/
}
// Ajout une webcam unique par instance, et le fait d'avoir une seule et unique instance permet de ne pas avoir de re-render de la webcam
if (kind === 'video' && !elementjs) {
setRenderJspanelCam(component => ([...component, <JspanelCam key={remoteSocketId} removeJSpanel={removeJSpanel} cam={{ id: remoteSocketId, track: consumer.track, username: username }} />]))
/*
let webcam = props.forwardedRef['IconeWebcamRef'][remoteSocketId]
if (webcam) {
webcam.style.display = "initial"
}
*/
}
if (kind === 'audio' && !elementjs) {
setRenderJspanelCam(component => ([...component, <JspanelCam key={remoteSocketId} removeJSpanel={removeJSpanel} cam={{ id: remoteSocketId, track: consumer.track, username: username }} />]))
// let audio = props.forwardedRef['IconeMicroRef'][remoteSocketId]
if (micro.current) {
micro.style.display = "initial"
}
}
}
const addConsumerPrivate = async (transport, remoteSocketId, prdId, trackKind, username, socketContext) => {
if (!socket) {
socket = socketContext;
}
const { rtpCapabilities } = device.current;
const data = await sendRequest(socket, 'consumeAdd', {
rtpCapabilities: rtpCapabilities,
remoteId: remoteSocketId,
kind: trackKind
})
.catch(error => {
console.error(`[addConsumer] consumeAdd retornou um erro: ${error}`);
});
let {
producerId,
id,
kind,
rtpParameters,
isTeacher
} = data;
if (prdId && (prdId !== producerId)) {
console.warn('[addConsumer] producteurId ne correspond pas, problème de synchronisation de serveur possible (problématique, ne devrait pas se produire');
}
let codecOptions = {};
const consumer = await consumerTransport.current.consume({
id,
producerId,
kind,
rtpParameters,
codecOptions
});
user?.updateConsumer({ id: id, consumer: consumer })
//update fonction user and past consumer update({id:id , consumer: consumer })
if (kind === 'video') {
videoConsumers.current[remoteSocketId] = consumer;
} else if (kind === 'audio') {
audioConsumers.current[remoteSocketId] = consumer;
} else {
console.error(`[addConsumer] . (erreur critique, résultat imprévisible)`);
}
consumer.remoteId = remoteSocketId;
consumer.on("transportclose", () => {
alert("transportclose");
});
consumer.on("producerclosed", () => {
alert("producerclose");
});
consumer.on("close", () => {
alert("producerclose");
});
consumer.on('trackended', () => {
alert('trackended');
//removeConsumer(consumer.remoteId, kind);
});
if (kind === 'video') {
sendRequest(socket, 'resumeAdd', { remoteId: remoteSocketId, kind: kind })
.then(() => {
// console.log('resumeAdd OK');
})
.catch(err => {
console.error(`[addConsumer] erreur dans l'action resumeAdd (l'utilisateur n'aura plus de vidéo`);
});
}
}
const removeJSpanel = (id, kind) => {
console.log(id, kind)
if (id === 'local') {
// stopVideoProducer();
}
/*
if (kind === 'video') {
videoEnabled.current = null
}
*/
if (kind === 'video' && id != 'local') {
videoConsumers.current[id].close()
socket.emit("stopConsume", id, socket.id, kind)
}
if (kind === 'audio') {
audioEnabled.current = null
audioConsumers.current[id].close()
socket.emit("stopConsume", id, socket.id, kind)
}
let element = document.getElementById(`jsPanel_${id}`)
if (element) {
element.close()
setRenderJspanelCam((prevComponent) => {
return prevComponent.filter((item) => {
return item.key !== id;
});
});
console.log(listAllEventListeners())
}
}
return (
<>
{renderJspanelCam}
{ /* mediasoupTrack ? <TestTrack track={mediasoupTrack} /> : '' */}
<Row style={{ maxHeight: '25vh' }} >
<Middle toggleWebcam={toggleWebcam} toggleAudio={toggleAudio} localMicroRef={localMicroRef} localwebcamRef={localwebcamRef} />
</Row>
<Row style={{ height:'94vh'}} >
<Col style={{ textAlign: 'center', backgroundColor: 'white', color: 'black' }} span={24} >
<Center ref={useRefs} startVideoProducer={startVideoProducer} />
</Col>
</Row>
</>
);
};
const mapStateToProps = ({ user }) => ({ user });
export default connect(mapStateToProps)(InterfaceChat);Editor is loading...