WebRTC音頻通話升級為影片通話
我們有時候在音頻通話過程中,想要改成影片通話。如果掛斷當前通話再重新發起影片通話就會顯得比較麻煩。
因此很多app提供了將音頻通話升級成影片通話的功能,同時也有將影片通話降為音頻通話的功能。
本文演示的是在本地模擬音頻通話,並且將音頻通話升級為影片通話。
準備
介面很簡單,2個video加上幾個按鈕。
<video id="localVideo" playsinline autoplay muted></video>
<video id="remoteVideo" playsinline autoplay></video>
<div>
<button id="startBtn">開始</button>
<button id="callBtn">Call</button>
<button id="upgradeBtn">升級為影片通話</button>
<button id="hangupBtn">掛斷</button>
</div>
用的是本地的adapter
<script src="../../src/js/adapter-2021.js"></script>
js
先來把元素拿到
const startBtn = document.getElementById('startBtn');
const callBtn = document.getElementById('callBtn');
const upgradeToVideoBtn = document.getElementById('upgradeBtn');
const hangupBtn = document.getElementById('hangupBtn');
const localVideo = document.getElementById('localVideo'); // 本地預覽
const remoteVideo = document.getElementById('remoteVideo'); // 接收方
監聽器
設置一些監聽
localVideo.addEventListener('loadedmetadata', function () {
console.log(`localVideo 寬高: ${this.videoWidth}px, ${this.videoHeight}px`);
});
remoteVideo.addEventListener('loadedmetadata', function () {
console.log(`remoteVideo 寬高: ${this.videoWidth}px, ${this.videoHeight}px`);
});
let startTime;
remoteVideo.onresize = () => {
console.log(`remoteVideo onresize 寬高: ${remoteVideo.videoWidth}x${remoteVideo.videoHeight}`);
if (startTime) {
const elapsedTime = window.performance.now() - startTime;
console.log(`建立連接耗時: ${elapsedTime.toFixed(3)}ms`);
startTime = null;
}
};
startBtn.onclick = start;
callBtn.onclick = call;
upgradeToVideoBtn.onclick = upgrade;
hangupBtn.onclick = hangup;
打一些狀態變化的log
function onCreateSessionDescriptionError(error) {
console.log(`rustfisher.com:創建會話描述失敗, session description err: ${error.toString()}`);
}
function onIceStateChange(pc, event) {
if (pc) {
console.log(`rustfisher.com:${getName(pc)} ICE狀態: ${pc.iceConnectionState}`);
console.log('rustfisher.com:ICE狀態變化: ', event);
}
}
function onAddIceCandidateSuccess(pc) {
console.log(`rustfisher.com:${getName(pc)} addIceCandidate success 添加ICE候選成功`);
}
function onAddIceCandidateError(pc, error) {
console.log(`rustfisher.com:${getName(pc)} 添加ICE候選失敗 failed to add ICE Candidate: ${error.toString()}`);
}
function onSetLocalSuccess(pc) {
console.log(`rustfisher.com:${getName(pc)} setLocalDescription 成功`);
}
function onSetSessionDescriptionError(error) {
console.log(`rustfisher.com:設置會話描述失敗: ${error.toString()}`);
}
function onSetRemoteSuccess(pc) {
console.log(`rustfisher.com:${getName(pc)} 設置遠程描述成功 setRemoteDescription complete`);
}
// 輔助方法
function getName(pc) {
return (pc === pc1) ? 'pc1' : 'pc2';
}
function getOtherPc(pc) {
return (pc === pc1) ? pc2 : pc1;
}
開始
獲取本地的音頻數據流,交給localVideo
function gotStream(stream) {
console.log('獲取到了本地數據流');
localVideo.srcObject = stream;
localStream = stream;
callBtn.disabled = false;
}
function start() {
console.log('請求本地數據流 純音頻');
startBtn.disabled = true;
navigator.mediaDevices
.getUserMedia({ audio: true, video: false })
.then(gotStream)
.catch(e => alert(`getUserMedia() error: ${e.name}`));
}
call
發起音頻呼叫
function call() {
callBtn.disabled = true;
upgradeToVideoBtn.disabled = false;
hangupBtn.disabled = false;
console.log('開始呼叫...');
startTime = window.performance.now();
const audioTracks = localStream.getAudioTracks();
if (audioTracks.length > 0) {
console.log(`使用的音頻設備: ${audioTracks[0].label}`);
}
const servers = null; // 就在本地測試
pc1 = new RTCPeerConnection(servers);
console.log('創建本地節點 pc1');
pc1.onicecandidate = e => onIceCandidate(pc1, e);
pc2 = new RTCPeerConnection(servers);
console.log('rustfisher.com:創建模擬遠端節點 pc2');
pc2.onicecandidate = e => onIceCandidate(pc2, e);
pc1.oniceconnectionstatechange = e => onIceStateChange(pc1, e);
pc2.oniceconnectionstatechange = e => onIceStateChange(pc2, e);
pc2.ontrack = gotRemoteStream;
localStream.getTracks().forEach(track => pc1.addTrack(track, localStream));
console.log('rustfisher.com:將本地數據流交給pc1');
console.log('rustfisher.com:pc1開始創建offer');
pc1.createOffer(offerOptions).then(onCreateOfferSuccess, onCreateSessionDescriptionError);
}
function gotRemoteStream(e) {
console.log('獲取到遠程數據流', e.track, e.streams[0]);
remoteVideo.srcObject = null;
remoteVideo.srcObject = e.streams[0];
}
function onIceCandidate(pc, event) {
getOtherPc(pc)
.addIceCandidate(event.candidate)
.then(() => onAddIceCandidateSuccess(pc), err => onAddIceCandidateError(pc, err));
console.log(`${getName(pc)} ICE candidate:\n${event.candidate ? event.candidate.candidate : '(null)'}`);
}
function onCreateOfferSuccess(desc) {
console.log(`pc1提供了offer\n${desc.sdp}`);
console.log('pc1 setLocalDescription start');
pc1.setLocalDescription(desc).then(() => onSetLocalSuccess(pc1), onSetSessionDescriptionError);
console.log('pc2 setRemoteDescription start');
pc2.setRemoteDescription(desc).then(() => onSetRemoteSuccess(pc2), onSetSessionDescriptionError);
console.log('pc2 createAnswer start');
pc2.createAnswer().then(onCreateAnswerSuccess, onCreateSessionDescriptionError);
}
function onCreateAnswerSuccess(desc) {
console.log(`rustfisher.com:pc2應答成功: ${desc.sdp}`);
console.log('pc2 setLocalDescription start');
pc2.setLocalDescription(desc).then(() => onSetLocalSuccess(pc2), onSetSessionDescriptionError);
console.log('pc1 setRemoteDescription start');
pc1.setRemoteDescription(desc).then(() => onSetRemoteSuccess(pc1), onSetSessionDescriptionError);
}
- 創建RTCPeerConnection
- 設置
onicecandidate
監聽ICE候選 - 設置
oniceconnectionstatechange
監聽ICE連接狀態變化 - 接收方監聽
ontrack
- 發送方pc1
addTrack
將當前數據流添加進去 - 發送方pc1創建offer
createOffer
- pc1創建好offer後,接收方pc2應答
createAnswer
升級到影片通話
upgrade()
方法處理升級操作
function upgrade() {
upgradeToVideoBtn.disabled = true;
navigator.mediaDevices
.getUserMedia({ video: true })
.then(stream => {
console.log('rustfisher.com:獲取到了影片流');
const videoTracks = stream.getVideoTracks();
if (videoTracks.length > 0) {
console.log(`video device: ${videoTracks[0].label}`);
}
localStream.addTrack(videoTracks[0]);
localVideo.srcObject = null; // 重置影片流
localVideo.srcObject = localStream;
pc1.addTrack(videoTracks[0], localStream);
return pc1.createOffer();
})
.then(offer => pc1.setLocalDescription(offer))
.then(() => pc2.setRemoteDescription(pc1.localDescription))
.then(() => pc2.createAnswer())
.then(answer => pc2.setLocalDescription(answer))
.then(() => pc1.setRemoteDescription(pc2.localDescription));
}
發送方去獲取音頻數據流getUserMedia
。
將音頻軌道添加進localStream
,並且發送方也要添加軌道 pc1.addTrack
。
創建offer createOffer
後面就是接收方pc2應答
掛斷
簡單的掛斷功能如下
function hangup() {
console.log('rustfisher.com:掛斷');
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
const videoTracks = localStream.getVideoTracks();
videoTracks.forEach(videoTrack => {
videoTrack.stop();
localStream.removeTrack(videoTrack);
});
localVideo.srcObject = null;
localVideo.srcObject = localStream;
hangupBtn.disabled = true;
callBtn.disabled = false;
}
主要是把呼出方的流關閉掉
程式碼流程描述圖
將用戶的操作(按鈕)和主要程式碼對應起來
效果預覽
效果預覽請參考WebRTC音頻通話升級到影片通話