Server端使用mediasoup-demo的APP代码搭建,
客户端使用如下代码,
<template>
<header>
<div>
<h1>Mediasoup WebRTC Client</h1>
<video ref='remoteVideo' autoplay></video>
<audio ref='remoteAudio' autoplay></audio> <!-- 添加远程音频元素 -->
<video ref='localVideo' autoplay muted></video>
</div>
</header>
</template>
<script setup>
import { ref, onMounted, onBeforeUnmount } from 'vue';
import ProToo from 'protoo-client'; // 使用 ES6 导入
import * as mediasoupClient from 'mediasoup-client';
const localVideo = ref(null);
const remoteVideo = ref(null);
const remoteAudio = ref(null); // 引用远程音频元素
let device;
let producers = []; // 存储生产者
let consumers = []; // 存储消费者
let _protoo;
let _sendTransport;
let _recvTransport;
const serverUrl = 'wss://192.168.0.104:4443/?roomId=lsk01&peerId=123456&consumerReplicas=1'; // Mediasoup server URL
onMounted(async () => {
try {
const protooTransport = new ProToo.WebSocketTransport(serverUrl);
_protoo = new ProToo.Peer(protooTransport);
_protoo.on('open', async () => {
console.log('ProToo connection established');
device = new mediasoupClient.Device();
const routerRtpCapabilities = await _protoo.request('getRouterRtpCapabilities');
await device.load({ routerRtpCapabilities });
console.log('getRtpCapabilities', routerRtpCapabilities);
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: true });
localVideo.value.srcObject = stream;
const transportInfo = await _protoo.request('createWebRtcTransport', {
forceTcp: false,
producing: true,
consuming: false,
sctpCapabilities: device.sctpCapabilities,
});
console.log('transportInfo', transportInfo);
{
const { id, iceParameters, iceCandidates, dtlsParameters } = transportInfo;
_sendTransport = device.createSendTransport({
id,
iceParameters,
iceCandidates,
dtlsParameters: { ...dtlsParameters, role: 'auto' },
sctpParameters: {},
iceServers: [],
proprietaryConstraints: {},
additionalSettings: {
encodedInsertableStreams: 0,
},
});
}
console.log('_sendTransport', _sendTransport);
const recvTransportInfo = await _protoo.request('createWebRtcTransport', {
forceTcp: false,
producing: false, // 不生产,只消费
consuming: true, // 消费媒体流
sctpCapabilities: device.sctpCapabilities,
});
{
const {id, iceParameters, iceCandidates, dtlsParameters, sctpParameters} = recvTransportInfo;
_recvTransport = device.createRecvTransport({
id,
iceParameters,
iceCandidates,
dtlsParameters :{ ...dtlsParameters, role: 'auto' },
sctpParameters,
iceServers: [],
additionalSettings: {
encodedInsertableStreams: 0,
},
})
}
console.log('_recTransport', _sendTransport);
// 设置连接接收传输的事件监听器
_recvTransport.on('connect', async ({ iceParameters, dtlsParameters }, callback) => {
try {
await _protoo.request('connectWebRtcTransport', {
transportId: _recvTransport.id,
iceParameters,
dtlsParameters
});
callback();
} catch (error) {
console.error('Error connecting transport:', error);
callback(error);
}
});
// 设置连接发送传输的事件监听器
_sendTransport.on('connect', async ({ dtlsParameters }, callback) => {
try {
await _protoo.request('connectWebRtcTransport', {
transportId: _sendTransport.id,
dtlsParameters,
});
callback();
} catch (error) {
console.error('Error connecting transport:', error);
callback(error);
}
});
// 设置 produce 事件监听器
_sendTransport.on('produce', async (parameters, callback, errback) => {
try {
const { id } = await _protoo.request(
'produce',
{
transportId: _sendTransport.id,
...parameters,
}
);
callback({ id });
} catch (error) {
console.error('Error producing:', error);
errback(error);
}
});
const { peers } = await _protoo.request('join', {
displayName: 'haha',
device,
rtpCapabilities: device.rtpCapabilities,
sctpCapabilities: device.sctpCapabilities,
});
console.log("peers",peers)
// 生产音频轨道
const audioTrack = stream.getAudioTracks()[0];
const audioProducer = await _sendTransport.produce({
track: audioTrack,
codecOptions : {
opusStereo : true,
opusDtx : true,
opusFec : true,
opusNack : true
}
});
producers.push(audioProducer);
console.log('Audio Producer created:', audioProducer);
// 生产视频轨道
const videoTrack = stream.getVideoTracks()[0];
const videoProducer = await _sendTransport.produce({ track: videoTrack });
producers.push(videoProducer);
console.log('Video Producer created:', videoProducer);
});
_protoo.on('request', async (request, accept, reject) =>
{
console.log("method and data",request.method, request.data)
switch (request.method){
case 'newConsumer':
{
try{
const { peerId, producerId, id, kind, rtpParameters } = request.data;
console.log("request data", request.data)
const consumer = await _recvTransport.consume(
{
id,
producerId,
kind,
rtpParameters,
}
);
console.log("consumer", consumer)
consumers.push(consumer);
if (consumer.kind === 'video'){
if (!remoteVideo.value.srcObject) {
console.log('create new remoteVideo')
remoteVideo.value.srcObject = new MediaStream(); // 创建新的 MediaStream 对象以存储远程视频轨道。
}
remoteVideo.value.srcObject.addTrack(consumer.track); // 将远程视频轨道添加到远程视频元素。
const tracks = remoteVideo.value.srcObject.getTracks();
console.log('Current tracks in remote video stream:', tracks);
if (tracks.length > 0) {
console.log('Successfully received remote video stream.');
} else {
console.log('No tracks found in the remote video stream.');
}
console.log('Consumer track:', consumer.track);
console.log("add video track success")
remoteVideo.value.play()
console.log("add video track success")
}
if (consumer.kind === 'audio'){
if (!remoteAudio.value.srcObject) {
console.log('create new mediaStream')
remoteAudio.value.srcObject = new MediaStream(); // 创建新的 MediaStream 对象以存储远程音频轨道。
}
remoteAudio.value.srcObject.addTrack(consumer.track); // 将远程视频轨道添加到远程音频元素。
console.log("add audio track success")
}
}catch (error) {
console.log('newConsumer', error);
}
}
}
}
)
} catch (error) {
console.error('Error during setup:', error);
}
});
onBeforeUnmount(() => {
if (_protoo) {
_protoo.close();
}
producers.forEach(producer => producer.close());
});
</script>
<style scoped>
video {
width: 100%;
height: auto; /* 或者设置为固定高度 */
border: 1px solid black; /* 添加边框以便于调试 */
display: block; /* 确保元素为块级元素 */
}
</style>