本文介绍了Chrome将不会通过WebRTC / Peer.js播放WebAudio getUserMedia的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧! 问题描述 29岁程序员,3月因学历无情被辞! 我想通过WebRTC制作一个简单的音频流,使用 Peer.js 。我在本地运行简单的PeerServer。 以下在Fi​​refox 30中运行得非常好,但我无法在 Chrome 35 。我预计PeerJS设置有问题,但Chrome - > Firefox工作正常,而Chrome - > Chrome似乎发送流,但不会播放扬声器。 设置getUserMedia 注意:取消注释下面的这些行可以让我听到Chrome和Firefox中的回环。 navigator.getUserMedia =(navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia); window.AudioContext = window.AudioContext || window.webkitAudioContext;如果(navigator.getUserMedia){ navigator.getUserMedia({video:false,audio:true},getMediaSuccess,getMediaError); } else { alert('getUserMedia not supported。'); } var localMediaStream; // var audioContext = new AudioContext(); 函数getMediaSuccess(mediaStream){ // var microphone = audioContext.createMediaStreamSource(mediaStream); //microphone.connect(audioContext.destination); localMediaStream = mediaStream; } 函数getMediaError(err){ alert('getUserMedia error。See console。'); console.error(err); 建立连接 var peer = new Peer({host:'192.168.1.129',port:9000}); $ b peer.on('open',function(id){ console.log('My ID:',id); }); $ b peer.on('call',function(call){ console.log('回答',localMediaStream); call.answer(localMediaStream); //在CHROME中工作,localMediaStream存在 call.on('stream',function(stream){ console.log('streamRecieved',stream); //这是CHROME的工作,流已经通过 var audioContext = new AudioContext(); var audioStream = audioContext.createMediaStreamSource(stream); audioStream。 connect(audioContext.destination); //我在FIREFOX中听到音频,但不是CHROME }); call.on('error',function (err){ console.log(err); // LOGS NO ERRORS }); }); 函数connect(id){ var voiceStream = peer.call(id,localMediaStream); 解决方案 已知错误,当前从对等连接收集的远程音频流不能通过AudioAPI访问。 最新的错误评论: 我们正在努力工作特征。这需要很长时间的原因是我们需要先将APM移到chrome,实现一个渲染混音器来从WebRtc获得未混合的数据,然后我们可以连接远程音频流到webaudio。 最近在Firefox中打了一个补丁,因为我记得这个在过去也是一个问题。 / p> I want to make a simple audio only stream over WebRTC, using Peer.js. I'm running the simple PeerServer locally.The following works perfectly fine in Firefox 30, but I can't get it to work in Chrome 35. I would expect there was something wrong with the PeerJS setup, but Chrome -> Firefox works perfectly fine, while Chrome -> Chrome seems to send the stream, but won't play over speakers.Setting up getUserMedia Note: uncommenting those lines below will let me hear the loopback in Chrome and Firefox.navigator.getUserMedia = (navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia);window.AudioContext = window.AudioContext || window.webkitAudioContext;if(navigator.getUserMedia) { navigator.getUserMedia({video: false, audio: true}, getMediaSuccess, getMediaError);} else { alert('getUserMedia not supported.');}var localMediaStream;//var audioContext = new AudioContext();function getMediaSuccess(mediaStream) { //var microphone = audioContext.createMediaStreamSource(mediaStream); //microphone.connect(audioContext.destination); localMediaStream = mediaStream;}function getMediaError(err) { alert('getUserMedia error. See console.'); console.error(err);}Making the connectionvar peer = new Peer({host: '192.168.1.129', port: 9000});peer.on('open', function(id) { console.log('My ID:', id);});peer.on('call', function(call) { console.log('answering call with', localMediaStream); call.answer(localMediaStream); //THIS WORKS IN CHROME, localMediaStream exists call.on('stream', function(stream) { console.log('streamRecieved', stream); //THIS WORKS IN CHROME, the stream has come through var audioContext = new AudioContext(); var audioStream = audioContext.createMediaStreamSource(stream); audioStream.connect(audioContext.destination); //I HEAR AUDIO IN FIREFOX, BUT NOT CHROME }); call.on('error', function(err) { console.log(err); //LOGS NO ERRORS });});function connect(id) { var voiceStream = peer.call(id, localMediaStream);} 解决方案 In Chrome, it is a known bug currently where remote audio streams gathered from a peer connection are not accessible through the AudioAPI.Latest comment on the bug: We are working really hard towards the feature. The reason why this takes long time is that we need to move the APM to chrome first, implement a render mixer to get the unmixed data from WebRtc, then we can hook up the remote audio stream to webaudio.It was recently patched in Firefox as I remember this being an issue on there as well in the past. 这篇关于Chrome将不会通过WebRTC / Peer.js播放WebAudio getUserMedia的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持! 上岸,阿里云!
08-05 13:00