topical media & game development
mobile-query-three-plugins-iimminigame-examples-vendor-webaudio.js / js
Tutorials:
http://www.html5rocks.com/en/tutorials/webaudio/games/
http://www.html5rocks.com/en/tutorials/webaudio/positional_audio/ <- +1 as it is three.js
http://www.html5rocks.com/en/tutorials/webaudio/intro/
Spec:
dvcs.w3.org/hg/audio/raw-file/tip/webaudio/specification.html
Chromium Demo:
http://chromium.googlecode.com/svn/trunk/samples/audio/index.html <- running page
http://code.google.com/p/chromium/source/browse/trunk/samples/audio/ <- source
Notes on removing tQuery dependancy
* some stuff depends on tQuery
* find which one
* tQuery.Webaudio got a world link for the listener
* do a plugin with followListener(world), unfollowListener(world)
* namespace become WebAudio.* instead of WebAudio.*
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
// WebAudio //
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
Main class to handle webkit audio
TODO make the clip detector from http://www.html5rocks.com/en/tutorials/webaudio/games/
@class Handle webkit audio API
parameter: {tQuery.World} [world] the world on which to run
WebAudio = function(){
// sanity check - the api MUST be available
console.assert(WebAudio.isAvailable === true, 'webkitAudioContext isnt available on your browser');
// create the context
this._ctx = new webkitAudioContext();
// setup the end of the node chain
// TODO later code the clipping detection from http://www.html5rocks.com/en/tutorials/webaudio/games/
this._gainNode = this._ctx.createGainNode();
this._compressor= this._ctx.createDynamicsCompressor();
this._gainNode.connect( this._compressor );
this._compressor.connect( this._ctx.destination );
};
vendor.js way to make plugins ala jQuery
@namespace
WebAudio.fn = WebAudio.prototype;
destructor
WebAudio.prototype.destroy = function(){
};
returns: {Boolean} true if it is available or not
WebAudio.isAvailable = window.webkitAudioContext ? true : false;
////////////////////////////////////////////////////////////////////////////
// //
////////////////////////////////////////////////////////////////////////////
get the audio context
returns: {AudioContext} the audio context
WebAudio.prototype.context = function(){
return this._ctx;
};
Create a sound
returns: {WebAudio.Sound} the sound just created
WebAudio.prototype.createSound = function()
{
var webaudio = this;
var sound = new WebAudio.Sound(webaudio);
return sound;
}
return the entry node in the master node chains
WebAudio.prototype._entryNode = function(){
//return this._ctx.destination;
return this._gainNode;
}
getter/setter on the volume
WebAudio.prototype.volume = function(value){
if( value === undefined ) return this._gainNode.gain.value;
this._gainNode.gain.value = value;
return this;
};
Constructor
@class builder to generate nodes chains. Used in WebAudio.Sound
parameter: {webkitAudioContext} audioContext the audio context
WebAudio.NodeChainBuilder = function(audioContext){
console.assert( audioContext instanceof webkitAudioContext );
this._context = audioContext;
this._firstNode = null;
this._lastNode = null;
this._nodes = {};
};
destructor
WebAudio.NodeChainBuilder.prototype.destroy = function(){
};
getter for the nodes
WebAudio.NodeChainBuilder.prototype.nodes = function(){
return this._nodes;
}
returns: the first node of the chain
WebAudio.NodeChainBuilder.prototype.first = function(){
return this._firstNode;
}
returns: the last node of the chain
WebAudio.NodeChainBuilder.prototype.last = function(){
return this._lastNode;
}
WebAudio.NodeChainBuilder.prototype._addNode = function(node, properties)
{
// update this._bufferSourceDst - needed for .cloneBufferSource()
var lastIsBufferSource = this._lastNode && ('playbackRate' in this._lastNode) ? true : false;
if( lastIsBufferSource ) this._bufferSourceDst = node;
// connect this._lastNode to node if suitable
if( this._lastNode !== null ) this._lastNode.connect(node);
// update this._firstNode && this._lastNode
if( this._firstNode === null ) this._firstNode = node;
this._lastNode = node;
// apply properties to the node
for( var property in properties ){
node[property] = properties[property];
}
// for chained API
return this;
};
Clone the bufferSource. Used just before playing a sound
returns: {AudioBufferSourceNode} the clone AudioBufferSourceNode
WebAudio.NodeChainBuilder.prototype.cloneBufferSource = function(){
console.assert(this._nodes.bufferSource, "no buffersource presents. Add one.");
var orig = this._nodes.bufferSource;
var clone = this._context.createBufferSource()
clone.buffer = orig.buffer;
clone.playbackRate = orig.playbackRate;
clone.loop = orig.loop;
clone.connect(this._bufferSourceDst);
return clone;
}
add a bufferSource
parameter: {Object} [properties] properties to set in the created node
WebAudio.NodeChainBuilder.prototype.bufferSource = function(properties){
var node = this._context.createBufferSource()
this._nodes.bufferSource= node;
return this._addNode(node, properties)
};
add a panner
parameter: {Object} [properties] properties to set in the created node
WebAudio.NodeChainBuilder.prototype.panner = function(properties){
var node = this._context.createPanner()
this._nodes.panner = node;
return this._addNode(node, properties)
};
add a analyser
parameter: {Object} [properties] properties to set in the created node
WebAudio.NodeChainBuilder.prototype.analyser = function(properties){
var node = this._context.createAnalyser()
this._nodes.analyser = node;
return this._addNode(node, properties)
};
add a gainNode
parameter: {Object} [properties] properties to set in the created node
WebAudio.NodeChainBuilder.prototype.gainNode = function(properties){
var node = this._context.createGainNode()
this._nodes.gainNode = node;
return this._addNode(node, properties)
};
sound instance
@class Handle one sound for WebAudio
parameter: {tQuery.World} [world] the world on which to run
parameter: {WebAudio.NodeChainBuilder} [nodeChain] the nodeChain to use
WebAudio.Sound = function(webaudio, nodeChain){
this._webaudio = webaudio;
this._context = this._webaudio.context();
console.assert( this._webaudio instanceof WebAudio );
// create a default NodeChainBuilder if needed
if( nodeChain === undefined ){
nodeChain = new WebAudio.NodeChainBuilder(this._context)
.bufferSource().gainNode().analyser().panner();
}
// setup this._chain
console.assert( nodeChain instanceof WebAudio.NodeChainBuilder );
this._chain = nodeChain;
// connect this._chain.last() node to this._webaudio._entryNode()
this._chain.last().connect( this._webaudio._entryNode() );
// create some alias
this._source = this._chain.nodes().bufferSource;
this._gainNode = this._chain.nodes().gainNode;
this._analyser = this._chain.nodes().analyser;
this._panner = this._chain.nodes().panner;
// sanity check
console.assert(this._source , "no bufferSource: not yet supported")
console.assert(this._gainNode , "no gainNode: not yet supported")
console.assert(this._analyser , "no analyser: not yet supported")
console.assert(this._panner , "no panner: not yet supported")
};
destructor
WebAudio.Sound.prototype.destroy = function(){
// disconnect from this._webaudio
this._chain.last().disconnect();
// destroy this._chain
this._chain.destroy();
this._chain = null;
};
vendor.js way to make plugins ala jQuery
@namespace
WebAudio.Sound.fn = WebAudio.Sound.prototype;
////////////////////////////////////////////////////////////////////////////
// //
////////////////////////////////////////////////////////////////////////////
getter of the chain nodes
WebAudio.Sound.prototype.nodes = function(){
return this._chain.nodes();
};
returns: {Boolean} true if the sound is playable, false otherwise
WebAudio.Sound.prototype.isPlayable = function(){
return this._source.buffer ? true : false;
};
play the sound
parameter: {Number} [time] time when to play the sound
WebAudio.Sound.prototype.play = function(time){
if( time === undefined ) time = 0;
// clone the bufferSource
var clonedNode = this._chain.cloneBufferSource();
// set the noteOn
clonedNode.noteOn(time);
// create the source object
var source = {
node : clonedNode,
stop : function(time){
if( time === undefined ) time = 0;
this.node.noteOff(time);
return source; // for chained API
}
}
// return it
return source;
};
getter/setter on the volume
parameter: {Number} [value] the value to set, if not provided, get current value
WebAudio.Sound.prototype.volume = function(value){
if( value === undefined ) return this._gainNode.gain.value;
this._gainNode.gain.value = value;
return this; // for chained API
};
getter/setter on the loop
parameter: {Number} [value] the value to set, if not provided, get current value
WebAudio.Sound.prototype.loop = function(value){
if( value === undefined ) return this._source.loop;
this._source.loop = value;
return this; // for chained API
};
getter/setter on the source buffer
parameter: {Number} [value] the value to set, if not provided, get current value
WebAudio.Sound.prototype.buffer = function(value){
if( value === undefined ) return this._source.buffer;
this._source.buffer = value;
return this; // for chained API
};
Set parameter for the pannerCone
parameter: {Number} innerAngle the inner cone hangle in radian
parameter: {Number} outerAngle the outer cone hangle in radian
parameter: {Number} outerGain the gain to apply when in the outerCone
WebAudio.Sound.prototype.pannerCone = function(innerAngle, outerAngle, outerGain)
{
this._panner.coneInnerAngle = innerAngle * 180 / Math.PI;
this._panner.coneOuterAngle = outerAngle * 180 / Math.PI;
this._panner.coneOuterGain = outerGain;
return this; // for chained API
};
getter/setter on the pannerConeInnerAngle
parameter: {Number} value the angle in radian
WebAudio.Sound.prototype.pannerConeInnerAngle = function(value){
if( value === undefined ) return this._panner.coneInnerAngle / 180 * Math.PI;
this._panner.coneInnerAngle = value * 180 / Math.PI;
return this; // for chained API
};
getter/setter on the pannerConeOuterAngle
parameter: {Number} value the angle in radian
WebAudio.Sound.prototype.pannerConeOuterAngle = function(value){
if( value === undefined ) return this._panner.coneOuterAngle / 180 * Math.PI;
this._panner.coneOuterAngle = value * 180 / Math.PI;
return this; // for chained API
};
getter/setter on the pannerConeOuterGain
parameter: {Number} value the value
WebAudio.Sound.prototype.pannerConeOuterGain = function(value){
if( value === undefined ) return this._panner.coneOuterGain;
this._panner.coneOuterGain = value;
return this; // for chained API
};
compute the amplitude of the sound (not sure at all it is the proper term)
parameter: {Number} width the number of frequencyBin to take into account
returns: {Number} return the amplitude of the sound
WebAudio.Sound.prototype.amplitude = function(width)
{
// handle paramerter
width = width !== undefined ? width : 2;
// inint variable
var analyser = this._analyser;
var freqByte = new Uint8Array(analyser.frequencyBinCount);
// get the frequency data
analyser.getByteFrequencyData(freqByte);
// compute the sum
var sum = 0;
for(var i = 0; i < width; i++){
sum += freqByte[i];
}
// complute the amplitude
var amplitude = sum / (width*256-1);
// return ampliture
return amplitude;
}
Generate a sinusoid buffer.
FIXME should likely be in a plugin
WebAudio.Sound.prototype.tone = function(hertz, seconds){
// handle parameter
hertz = hertz !== undefined ? hertz : 200;
seconds = seconds !== undefined ? seconds : 1;
// set default value
var nChannels = 1;
var sampleRate = 44100;
var amplitude = 2;
// create the buffer
var buffer = webaudio.context().createBuffer(nChannels, seconds*sampleRate, sampleRate);
var fArray = buffer.getChannelData(0);
// filli the buffer
for(var i = 0; i < fArray.length; i++){
var time = i / buffer.sampleRate;
var angle = hertz * time * Math.PI;
fArray[i] = Math.sin(angle)*amplitude;
}
// set the buffer
this.buffer(buffer).loop(true);
return this; // for chained API
}
Put this function is .Sound with getByt as private callback
WebAudio.Sound.prototype.makeHistogram = function(nBar)
{
// get analyser node
var analyser = this._analyser;
// allocate the private histo if needed - to avoid allocating at every frame
//this._privHisto = this._privHisto || new Float32Array(analyser.frequencyBinCount);
this._privHisto = this._privHisto || new Uint8Array(analyser.frequencyBinCount);
// just an alias
var freqData = this._privHisto;
// get the data
//analyser.getFloatFrequencyData(freqData)
analyser.getByteFrequencyData(freqData);
//analyser.getByteTimeDomainData(freqData)
This should be in imageprocessing.js almost
var makeHisto = function(srcArr, dstLength){
var barW = Math.floor(srcArr.length / dstLength);
var nBar = Math.floor(srcArr.length / barW);
var arr = []
for(var x = 0, arrIdx = 0; x < srcArr.length; arrIdx++){
var sum = 0;
for(var i = 0; i < barW; i++, x++){
sum += srcArr[x];
}
var average = sum/barW;
arr[arrIdx] = average;
}
return arr;
}
// build the histo
var histo = makeHisto(freqData, nBar);
// return it
return histo;
}
////////////////////////////////////////////////////////////////////////////
// //
////////////////////////////////////////////////////////////////////////////
Load a sound
parameter: {String} url the url of the sound to load
parameter: {Function} callback function to notify once the url is loaded (optional)
WebAudio.Sound.prototype.load = function(url, callback){
this._loadAndDecodeSound(url, function(buffer){
this._source.buffer = buffer;
callback && callback(this);
}.bind(this), function(){
console.warn("unable to load sound "+url);
});
return this; // for chained API
};
Load and decode a sound
parameter: {String} url the url where to get the sound
parameter: {Function} onLoad the function called when the sound is loaded and decoded (optional)
parameter: {Function} onError the function called when an error occured (optional)
WebAudio.Sound.prototype._loadAndDecodeSound = function(url, onLoad, onError){
var context = this._context;
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
onLoad && onLoad(buffer);
}, function(){
onError && onError();
});
};
// actually start the request
request.send();
}
gowiththeflow.js - a javascript flow control micro library
github.com/jeromeetienne/gowiththeflow.js
WebAudio.Flow = function(){
var self, stack = [], timerId = setTimeout(function(){ timerId = null; self._next(); }, 0);
return self = {
destroy : function(){ timerId && clearTimeout(timerId); },
par : function(callback, isSeq){
if(isSeq || !(stack[stack.length-1] instanceof Array)) stack.push([]);
stack[stack.length-1].push(callback);
return self;
},seq : function(callback){ return self.par(callback, true); },
_next : function(err, result){
var errors = [], results = [], callbacks = stack.shift() || [], nbReturn = callbacks.length, isSeq = nbReturn == 1;
for(var i = 0; i < callbacks.length; i++){
(function(fct, index){
fct(function(error, result){
errors[index] = error;
results[index] = result;
if(--nbReturn == 0) self._next(isSeq?errors[0]:errors, isSeq?results[0]:results)
}, err, result)
})(callbacks[i], i);
}
}
}
};
(C) Æliens
04/09/2009
You may not copy or print any of this material without explicit permission of the author or the publisher.
In case of other copyright issues, contact the author.