1. 程式人生 > 其它 >使用AudioContext和WebSocket實現實時對講

使用AudioContext和WebSocket實現實時對講

實現一個簡單的實時對講功能,將一臺電腦的語音實時傳輸到另一臺電腦並播放。

Socket轉發

websocket可以直接轉發音訊流,無需做更多處理

var WebSocketServer = require('ws').Server
var WebSocket = require('ws')

const wss = new WebSocketServer({ port: 1041 });//服務埠8181
wss.on('connection', function (ws) {
    console.log('客戶端已連線');
    ws.on('message', (data, isBinary) => {
        // 收到訊息以後,轉發給所有連線的客戶端
        wss.clients.forEach(function each(client) {
             if (client !== ws && client.readyState === WebSocket.OPEN) {
                 client.send(data, { binary: isBinary });
             }
        });
    });
});

通過麥克風獲取聲音並傳輸

<!DOCTYPE html>
<html lang="en">

<head>
    <meta charset="UTF-8">
    <meta http-equiv="X-UA-Compatible" content="IE=edge">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>Document</title>
</head>

<body>
    <button id="start">start</button>
    <button id="stop">startstop</button>
    <script>
        // 連線 websocket
        const ws = new WebSocket('ws://192.168.220.223:1041')
        ws.onopen = () => {
            console.log('socket 已連線')
        }
        ws.onerror = (e) => {
            console.log('error', e);
        }
        ws.onclose = () => {
            console.log('socket closed')
        }

        document.getElementById('start').onclick = function () {
            // 該變數儲存當前MediaStreamAudioSourceNode的引用
            // 可以通過它關閉麥克風停止音訊傳輸
            let mediaStack
            var audioCtx = new AudioContext();
            // 建立一個ScriptProcessorNode 用於接收當前麥克風的音訊
            var scriptNode = audioCtx.createScriptProcessor(4096, 1, 1);

            navigator.mediaDevices.getUserMedia({ audio: true, video: false })
                .then(function (stream) {
                    mediaStack = stream
                    var source = audioCtx.createMediaStreamSource(stream)
                    
                    source.connect(scriptNode);
                    scriptNode.connect(audioCtx.destination);
                })
                .catch(function (err) {
                    /* 處理error */
                    console.log('err', err)
                });
            // 當麥克風有聲音輸入時,會呼叫此事件
            // 實際上麥克風始終處於開啟狀態時,即使不說話,此事件也在一直呼叫
            scriptNode.onaudioprocess = function (audioProcessingEvent) {
                var inputBuffer = audioProcessingEvent.inputBuffer;
                // 由於只建立了一個音軌,這裡只取第一個頻道的資料
                var inputData = inputBuffer.getChannelData(0);
                console.log(inputData);
                // 通過socket傳輸資料,實際上傳輸的是Float32Array
                ws.send(inputData)
            }

            // 關閉麥克風
            document.getElementById('startstop').onclick = function () {
                mediaStack.getTracks()[0].stop()
                scriptNode.disconnect()
            };
        }


    </script>
</body>

</html>

獲取socket傳輸過來的音訊流並播放

<!DOCTYPE html>
<html lang="en">

<head>
    <meta charset="UTF-8">
    <meta http-equiv="X-UA-Compatible" content="IE=edge">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>Document</title>
</head>

<body>
    <button onclick="play()">play</button>
    <script>
        function play() {
            const audioCtx = new AudioContext();
            // 連線socket
            const ws = new WebSocket('ws://127.0.0.1:1041')
            ws.onopen = () => {
                console.log('socket opened')
            }
            // 接收的資料型別是arraybuffer
            ws.binaryType = 'arraybuffer'
            ws.onmessage = ({data}) => {
                // 將接收的資料轉換成與傳輸過來的資料相同的Float32Array
                const buffer = new Float32Array(data)
                // 建立一個空白的AudioBuffer物件,這裡的4096跟傳送方保持一致,48000是取樣率
                const myArrayBuffer = audioCtx.createBuffer(1, 4096, 48000);
                // 也是由於只建立了一個音軌,可以直接取到0
                const nowBuffering = myArrayBuffer.getChannelData(0);
                // 通過迴圈,將接收過來的資料賦值給簡單音訊物件
                for (let i = 0; i < 4096; i++) {
                    nowBuffering[i] = buffer[i];
                }
                // 使用AudioBufferSourceNode播放音訊                         
                const source = audioCtx.createBufferSource();
                source.buffer = myArrayBuffer
                source.connect(audioCtx.destination);
                source.start();
            }
            ws.onerror = (e) => {
                console.log('error', e);
            }
            ws.onclose = () => {
                console.log('socket closed');
            }
        }
    </script>
</body>

</html>