- Joined
- Jun 17, 2015
- Messages
- 97
- Reaction score
- 83
- First Language
- English
- Primarily Uses
I'm unsure if this necessarily counts as a Javascript Learning question (as to me if feels much more advanced) but here it goes. I'm trying to take advantage of the offlineContext function to render audio information out really fast. This is to allow me to process beats per minute in a song and use beat detection.
I've already successfully used the analyser node to observe real-time information of the WebAudio._sourceNode.
I was then able to extract the frequency and use it to project a spectrum.
The following allowed me to get results like this when playing the bgm
My problem is that I don't really know where to connect the offline context too (or if I require a specific way to set it up). It appears most of the reading from audio files is already set up in MV, so I was hoping there was an easy to way to link what the already existing audio context is using, or for me to directly read off the bgm.buffer.
I've been following the code + article provided in this link to try and figure this out. But being unable to get the offlineContext working I've been stuck as to what to do next. Maybe one of the more advanced coders out there could help point me in the right direction.
https://jmperezperez.com/beats-audio-api/
Thanks for your time, I'll update this if I get any further with this.
I've already successfully used the analyser node to observe real-time information of the WebAudio._sourceNode.
WebAudio.prototype._createNodes = function() {
var context = WebAudio._context;
this._sourceNode = context.createBufferSource();
this._sourceNode.buffer = this._buffer;
this._sourceNode.loopStart = this._loopStart;
this._sourceNode.loopEnd = this._loopStart + this._loopLength;
this._sourceNode.playbackRate.value = this._pitch;
this._gainNode = context.createGain();
this._gainNode.gain.value = this._volume;
this._pannerNode = context.createPanner();
this._pannerNode.panningModel = 'equalpower';
this._analyserNode = context.createAnalyser();
this._updatePanner();
};
WebAudio.prototype._connectNodes = function() {
var context = WebAudio._context;
this._sourceNode.connect(this._gainNode);
this._gainNode.connect(this._pannerNode);
this._pannerNode.connect(this._analyserNode);
this._analyserNode.connect(WebAudio._masterGainNode);
//this.frequencyData = new Uint8Array(this._analyserNode.frequencyBinCount);
};
var context = WebAudio._context;
this._sourceNode = context.createBufferSource();
this._sourceNode.buffer = this._buffer;
this._sourceNode.loopStart = this._loopStart;
this._sourceNode.loopEnd = this._loopStart + this._loopLength;
this._sourceNode.playbackRate.value = this._pitch;
this._gainNode = context.createGain();
this._gainNode.gain.value = this._volume;
this._pannerNode = context.createPanner();
this._pannerNode.panningModel = 'equalpower';
this._analyserNode = context.createAnalyser();
this._updatePanner();
};
WebAudio.prototype._connectNodes = function() {
var context = WebAudio._context;
this._sourceNode.connect(this._gainNode);
this._gainNode.connect(this._pannerNode);
this._pannerNode.connect(this._analyserNode);
this._analyserNode.connect(WebAudio._masterGainNode);
//this.frequencyData = new Uint8Array(this._analyserNode.frequencyBinCount);
};
I was then able to extract the frequency and use it to project a spectrum.
Dairnon.DPS.SM_update = Spriteset_Map.prototype.update;
Spriteset_Map.prototype.update = function() {
Dairnon.DPS.SM_update.call(this);
if (AudioManager._bgmBuffer && AudioManager._bgmBuffer._analyserNode) {
AudioManager._bgmBuffer._analyserNode.fftSize = 512
var frequencyData = new Uint8Array(AudioManager._bgmBuffer._analyserNode.frequencyBinCount);
AudioManager._bgmBuffer._analyserNode.getByteFrequencyData(frequencyData);
var _color = 0xff0000;
this.drawTest.clear();
this.drawTest.beginFill(_color, 1);
for (let i =0; i < frequencyData.length; i++) {
this.drawTest.drawRect(200+3*i, 200, 1, -frequencyData/3);
}
this.drawTest.endFill();
}
}
Spriteset_Map.prototype.update = function() {
Dairnon.DPS.SM_update.call(this);
if (AudioManager._bgmBuffer && AudioManager._bgmBuffer._analyserNode) {
AudioManager._bgmBuffer._analyserNode.fftSize = 512
var frequencyData = new Uint8Array(AudioManager._bgmBuffer._analyserNode.frequencyBinCount);
AudioManager._bgmBuffer._analyserNode.getByteFrequencyData(frequencyData);
var _color = 0xff0000;
this.drawTest.clear();
this.drawTest.beginFill(_color, 1);
for (let i =0; i < frequencyData.length; i++) {
this.drawTest.drawRect(200+3*i, 200, 1, -frequencyData/3);
}
this.drawTest.endFill();
}
}
The following allowed me to get results like this when playing the bgm
My problem is that I don't really know where to connect the offline context too (or if I require a specific way to set it up). It appears most of the reading from audio files is already set up in MV, so I was hoping there was an easy to way to link what the already existing audio context is using, or for me to directly read off the bgm.buffer.
I've been following the code + article provided in this link to try and figure this out. But being unable to get the offlineContext working I've been stuck as to what to do next. Maybe one of the more advanced coders out there could help point me in the right direction.
https://jmperezperez.com/beats-audio-api/
Thanks for your time, I'll update this if I get any further with this.
Last edited by a moderator:

