我正在尝试添加AnalyserNode并将输出的声音可视化到我制作的Web音频示例中,但是我不知道该怎么做。我认为我没有将正确的来源传递给分析仪(?)
完整的代码在这里:https://jsfiddle.net/kepin95043/1ub0sjo3/
<script>
var fs = 2000;
var gain = 0.2;
class Sound {
constructor(context) {
this.context = context;
}
init() {
this.oscillator = this.context.createOscillator();
this.oscillator.frequency.value = fs;
this.gainNode = this.context.createGain();
this.oscillator.connect(this.gainNode);
this.gainNode.connect(this.context.destination);
}
play(value) {
this.init();
this.gainNode.gain.setValueAtTime(gain, this.context.currentTime);
this.oscillator.start();
}
stop() {
this.gainNode.gain.exponentialRampToValueAtTime(0.001, this.context.currentTime + 1);
this.oscillator.stop(this.context.currentTime + 1);
}
}
var context = new AudioContext();
var sound = new Sound(context);
sound.init();
var wave = 'sine';
var state = 'paused';
var waveSelectors = document.querySelectorAll('.waveform');
var playBtn = document.querySelector('#play');
var container = document.querySelector('.container');
waveSelectors.forEach(function(button) {
button.addEventListener('click', function() {
cleanClass('active');
wave = button.dataset.wave;
sound.oscillator.type = wave;
button.classList.add('active');
})
})
playBtn.addEventListener('click', function() {
context.resume().then(() => {
console.log('Playback resumed successfully');
});
if (playBtn.text == 'Play') {
sound.play();
sound.oscillator.type = wave;
playBtn.text = 'Pause';
} else {
sound.stop();
playBtn.text = 'Play';
}
})
function cleanClass(rclass) {
waveSelectors.forEach(function(button) {
button.classList.remove(rclass);
})
}
function changeFs(val) {
fs = val;
var output = document.getElementById("fsValue");
output.innerHTML = val;
sound.stop();
sound.play();
console.log(val);
};
function changeGain(val) {
gain = val;
var output = document.getElementById("gainValue");
output.innerHTML = val;
sound.stop();
sound.play();
console.log(val);
};
var masterGain;
masterGain = context.createGain();
masterGain.connect(context.destination);
// analyser
var analyser = context.createAnalyser();
masterGain.connect(analyser);
var waveform = new Float32Array(analyser.frequencyBinCount);
analyser.getFloatTimeDomainData(waveform);
(function updateWaveform() {
requestAnimationFrame(updateWaveform);
analyser.getFloatTimeDomainData(waveform);
}());
var spectrum = new Uint8Array(analyser.frequencyBinCount);
(function updateSpectrum() {
requestAnimationFrame(updateSpectrum);
analyser.getByteFrequencyData(spectrum);
}());
// oscilloscope
var scopeCanvas = document.getElementById('canvas');
scopeCanvas.width = waveform.length;
//scopeCanvas.height = 200;
scopeCanvas.height = scopeCanvas.width * 0.33;
var scopeContext = scopeCanvas.getContext('2d');
(function drawOscilloscope() {
requestAnimationFrame(drawOscilloscope);
scopeContext.clearRect(0, 0, scopeCanvas.width, scopeCanvas.height);
scopeContext.strokeStyle="white"; // Green path
scopeContext.beginPath();
for (var i = 0; i < waveform.length; i++) {
var x = i;
var y = (0.5 + waveform[i] / 2) * scopeCanvas.height;
if (i === 0) {
scopeContext.moveTo(x, y);
} else {
scopeContext.lineTo(x, y);
}
}
scopeContext.stroke();
}());
</script>
谁能帮助我确定我在做什么错?
预先感谢!
PS:使用Firefox打开它。对我而言,不适用于基于Chromium的浏览器。
这是一个工作示例:https://codepen.io/dennisgaebel/pen/YEwLaL
最佳答案
您将创建一个Sound
对象以及一个与masterGain
连接的AnalyserNode
。但是我看不到声音连接到masterGain
的位置。否则,您的分析器节点将变得无声。
关于web - 网络音频-振荡器AnalyserNode,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/60956934/