1 /**
  2  * This file is part of the Web Enabled Audio and Sound Enhancement Library (aka the Weasel audio library) Copyright 2011 - 2013 Warren Willmey. It is covered by the GNU General Public License version 3 as published by the Free Software Foundation, you should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
  3  */
  4 
  5 if( undefined == window.weasel ) window.weasel = {};
  6 
  7 // ---------------------------------------------------------------------------
  8 /** Object to abstract the different methods for playing audio in different browsers.
  9  * 
 10  * @constructor
 11  * @param {int} iPlaybackFrequency = The playback frequency in hertz to use.
 12  * @param {int} iIntervalMsRate = The expected interval rate between feeds.
 13  * 
 14  * @author Warren Willmey 2011
 15  */
 16 weasel.BrowserAudio = function( iPlaybackFrequency, iIntervalMsRate )
 17 {
 18 
 19 	this.iPlaybackFrequency	= iPlaybackFrequency;
 20 	this.iIntervalMsRate	= iIntervalMsRate;
 21 	this.iSamplesPerInterval= 0;
 22 	this.iSoundChannels	= 2;
 23 	this.iAudioType		= 0;
 24 	this.bPause = true;
 25 	this.iGlobalOverrideInterpolation = 0;
 26 
 27 	this.iAudioSubSystemLag = 0;
 28 	this.fPreBufferingInMS = 270;	// Try and load this amount of audio (in milliseconds) into the audio subsystem to stop crackles and pop in audio. Extra large due to Firefox 15 :(
 29 	this.oAudio			= null;
 30 	/** @type {weasel.AudioBuffer} */
 31 	this.oAudioBuffer	= null;
 32 	this.iAudioBufferSizeDivider = 5;
 33 	
 34 	this.oAudioContext	= null;
 35 	/** @type {JavaScriptAudioNode} */
 36 	this.oAudioNode		= null;
 37 	this.iAudioContextBufferSize = 4096;
 38 	this.lOldPlayedSamples = 0;
 39 	
 40 	this.lAudioContextUsageMS = 0;
 41 	this.lProfileInMS = 0;
 42 
 43 	/** @type {weasel.UltimateSoundTracker121|weasel.UltimateSoundTracker18|weasel.DOCSoundTracker9|weasel.DOCSoundTracker22|weasel.TJCSoundTracker2|weasel.DefJamSoundTracker3|weasel.SpreadpointSoundTracker23|weasel.SpreadpointSoundTracker25|weasel.NoiseTracker11|weasel.NoiseTracker20|weasel.ProTrackerMK|weasel.FSTModule} */
 44 	this.oModule = null;
 45 	
 46 	this.__setSamplesPerInterval();
 47 	
 48 	/** @type {weasel.Base64Stream} */
 49 	this.oBase64Stream = null;
 50 	this.aHTML5AudioObjects = new Array( 2 );
 51 	this.iNextHTML5AudioObject = 0;
 52 	this.bNastyHackForHTMLAudioInFirefox3 = false;
 53 	this.iSyncHTML5AudioTimeBase = weasel.Helper.getHighRezTimer();
 54 	this.iSyncHTML5AudioTimeDelta = 0;
 55 	this.lHTML5AudioUsageMS = 0;
 56 	this.iHTML5AudioPushTimerID = null;
 57 	this.fHTML5AudioPushTimerInSeconds = 2.0;
 58 	this.bHTML5LowFidelityMode = true;
 59 	this.bIgnoreFilter = true;
 60 
 61 	this.__sniffAudioSupport();
 62 };
 63 
 64 // ---------------------------------------------------------------------------
 65 /**
 66  * Constants of the different browser audio sub systems.
 67  * @const
 68  * @enum
 69  */
 70 
 71 weasel.BrowserAudio.prototype.AudioType = {
 72 		  None			: 0
 73 		, HTML5Audio	: 1
 74 		, Mozilla		: 2
 75 		, AudioContext	: 3
 76 };
 77 
 78 // ---------------------------------------------------------------------------
 79 /**
 80  * Get list of supported browser audio types.
 81  * 
 82  * @return {weasel.BrowserAudio.AudioType} = The list of supported browser audio types.
 83  */
 84 weasel.BrowserAudio.prototype.getSupportedBrowserAudioTypes = function()
 85 {
 86 	return weasel.BrowserAudio.prototype.AudioType;
 87 };
 88 
 89 // ---------------------------------------------------------------------------
 90 /**
 91  * Set the number of samples per interval (between feeds).
 92  */
 93 weasel.BrowserAudio.prototype.__setSamplesPerInterval= function( )
 94 {
 95 	this.iSamplesPerInterval = ( this.iPlaybackFrequency / (1000 / this.iIntervalMsRate ) )|0;
 96 };
 97 
 98 // ---------------------------------------------------------------------------
 99 /**
100  * The active browser audio that is/will be used for output.
101  * 
102  * @return {int} weasel.BrowserAudio.AudioType = The active browser audio.
103  */
104 weasel.BrowserAudio.prototype.getAudioType = function( )
105 {
106 	return this.iAudioType;
107 };
108 
109 // ---------------------------------------------------------------------------
110 /**
111  * Get the current replay frequency.
112  * 
113  * @return {int} = The currently used replay frequency in Hz (e.g. 44100, 48000 etc).
114  */
115 weasel.BrowserAudio.prototype.getPlaybackFrequency = function( )
116 {
117 	return this.iPlaybackFrequency;
118 };
119 
120 // ---------------------------------------------------------------------------
121 /**
122  * Get the number of sound channels used for playback (not the number of channels in the mod)
123  * 
124  * @return {int} = The number of sound channels ( 1= mono, 2 = stereo).
125  */
126 weasel.BrowserAudio.prototype.getNumberOfSoundChannels = function( )
127 {
128 	return this.iSoundChannels;
129 };
130 
131 // ---------------------------------------------------------------------------
132 /**
133  * Get the expected interval rate between audio updates in Milliseconds.
134  * 
135  * @return {int} = The expect interval rate between audio updates in Milliseconds.
136  */
137 weasel.BrowserAudio.prototype.getIntervalRate = function( )
138 {
139 	return this.iIntervalMsRate;
140 };
141 
142 // ---------------------------------------------------------------------------
143 /**
144  * Get the number of samples per interval (this is the number of samples that need to be created per interval).
145  * 
146  * @return {int} = The number of samples per interval.
147  */
148 weasel.BrowserAudio.prototype.getSamplesPerInterval = function( )
149 {
150 	return this.iSamplesPerInterval;
151 };
152 
153 // ---------------------------------------------------------------------------
154 /**
155  * Get the *estimated* Audio sub system lag (not all sub systems support this) the value
156  * returned may not include the time take to work its way through the operating system
157  * to the sound card and out the speakers, different O/S and drivers return different values
158  * Pulse+Alsa is different from Alsa alone which is different from Windows XP. So it is best
159  * to give the USER control over how much pre-buffering they should have.
160  * 
161  * @return {int} = The lag in samples between written and played.
162  */
163 weasel.BrowserAudio.prototype.getAudioSubSystemLag = function( )
164 {
165 	return this.iAudioSubSystemLag;
166 };
167 
168 // ---------------------------------------------------------------------------
169 /**
170  * Get the current amount of pre buffering used on the audio sub system (Mozilla, AudioContext is request based).
171  * 
172  * @return {float} = The target pre buffering in milliseconds.
173  */
174 weasel.BrowserAudio.prototype.getPreBufferingInMS = function( )
175 {
176 	return this.fPreBufferingInMS;
177 };
178 
179 // ---------------------------------------------------------------------------
180 /**
181  * Set the current pre buffering used on the audio sub system.
182  * 
183  * @param {float} fPreBufferingInMS = The target amount of pre buffering to use in the audio sub system (although anything more than 500ms-intervalMSRate is asking for trouble, expect "weird" behavour!).
184  */
185 weasel.BrowserAudio.prototype.setPreBufferingInMS = function( fPreBufferingInMS )
186 {
187 	if( fPreBufferingInMS > 1000 )
188 		fPreBufferingInMS = 1000;
189 	if( fPreBufferingInMS < 0 )
190 		fPreBufferingInMS = 0;
191 
192 	if( weasel.BrowserAudio.prototype.AudioType.AudioContext == this.iAudioType )
193 	{
194 		// AudioContext has very limited control over its buffer size, which is basically a limited selection of 2^n.
195 		//
196 		var iBufferSize = ((this.iPlaybackFrequency / 1000.0) * fPreBufferingInMS) / 2;
197 		
198 		var iAllowedBufferSize = 256;
199 		while( iBufferSize > iAllowedBufferSize && iAllowedBufferSize < 16384 )
200 		{
201 			iAllowedBufferSize += iAllowedBufferSize;
202 		}
203 
204 		if( iAllowedBufferSize != this.iAudioContextBufferSize )
205 		{
206 			// Buffer size has actually changed, recreate AudioContext object.
207 			//
208 			this.iAudioContextBufferSize = iAllowedBufferSize;
209 
210 			this.__createAudioContextHooks();
211 		}
212 
213 		// AudioContext appears to have an additional audio buffer of equal size
214 		// in between you and the speakers, as doubling the size of the buffer
215 		// synchronises audio and oscilloscopes.
216 		//
217 		fPreBufferingInMS = ((this.iAudioContextBufferSize * 2) / this.iPlaybackFrequency) * 1000.0;
218 	}
219 
220 	this.fPreBufferingInMS = fPreBufferingInMS |0;
221 };
222 
223 
224 
225 // ---------------------------------------------------------------------------
226 /**
227  * Get the current AudioBuffer object.
228  * 
229  * @return {weasel.AudioBuffer} = The AudioBuffer object.
230  */
231 weasel.BrowserAudio.prototype.getAudioBuffer = function( )
232 {
233 	return this.oAudioBuffer;
234 };
235 
236 // ---------------------------------------------------------------------------
237 /**
238  * Get the number of milliseconds it took to create the audio data for this 
239  * interval (its like measuring in ice ages!!), be aware that the system clock 
240  * may not be that accurate either.
241  * 
242  * @return {int} = Profile time in Milliseconds taken to create audio.
243  */
244 weasel.BrowserAudio.prototype.getProfileInMS = function( )
245 {
246 	var lAudioUsageMS = this.lProfileInMS;
247 
248 	if( 0 == lAudioUsageMS )
249 	{
250 		// Date.now() - Date.now() = 0, but may have actually taken 0.99ms.
251 		// Should not be an issue when the High Resolution timer is used
252 		// but many older browsers don't have it.
253 		//
254 		lAudioUsageMS++;
255 	}
256 
257 	var iAudioType = this.getAudioType();
258 	if( iAudioType == weasel.BrowserAudio.prototype.AudioType.AudioContext )
259 	{
260 		// AudioConext also has some additional overheads.
261 		//
262 		lAudioUsageMS += this.lAudioContextUsageMS;
263 		this.lAudioContextUsageMS = 0;
264 	}
265 	else if( iAudioType == weasel.BrowserAudio.prototype.AudioType.HTML5Audio )
266 	{
267 		// HTML5 Audio also has some additional overheads.
268 		//
269 		lAudioUsageMS += this.lHTML5AudioUsageMS;
270 		this.lHTML5AudioUsageMS = 0;
271 	}
272 
273 	return lAudioUsageMS;
274 };
275 
276 
277 // ---------------------------------------------------------------------------
278 /**
279  * Get the current module playing.
280  * 
281  * @return {weasel.UltimateSoundTracker121|weasel.UltimateSoundTracker18|weasel.DOCSoundTracker9|weasel.DOCSoundTracker22|weasel.TJCSoundTracker2|weasel.DefJamSoundTracker3|weasel.SpreadpointSoundTracker23|weasel.SpreadpointSoundTracker25|weasel.NoiseTracker11|weasel.NoiseTracker20|weasel.ProTrackerMK|weasel.FSTModule} = The current module set to play.
282  */
283 weasel.BrowserAudio.prototype.getModule = function( )
284 {
285 	return this.oModule;
286 };
287 
288 // ---------------------------------------------------------------------------
289 /**
290  * Set the module to play.
291  * 
292  * @param {weasel.UltimateSoundTracker121|weasel.UltimateSoundTracker18|weasel.DOCSoundTracker9|weasel.DOCSoundTracker22|weasel.TJCSoundTracker2|weasel.DefJamSoundTracker3|weasel.SpreadpointSoundTracker23|weasel.SpreadpointSoundTracker25|weasel.NoiseTracker11|weasel.NoiseTracker20|weasel.ProTrackerMK|weasel.FSTModule} oModule = The Ultimate Soundtracker module to play.
293  */
294 weasel.BrowserAudio.prototype.setModule = function( oModule )
295 {
296 	if( oModule instanceof weasel.UltimateSoundTracker121 || oModule instanceof weasel.UltimateSoundTracker18 || oModule instanceof weasel.DOCSoundTracker9 || oModule instanceof weasel.DOCSoundTracker22 || oModule instanceof weasel.TJCSoundTracker2 || oModule instanceof weasel.DefJamSoundTracker3 || oModule instanceof weasel.SpreadpointSoundTracker23 || oModule instanceof weasel.NoiseTracker11 || oModule instanceof weasel.NoiseTracker20 || oModule instanceof weasel.ProTrackerMK || oModule instanceof weasel.FSTModule )
297 	{
298 		var oOldModule = this.oModule;
299 		this.oModule = oModule;
300 		this.getAudioBuffer().clearAudioBuffers();
301 
302 		this.setInterpolation( this.iGlobalOverrideInterpolation );
303 
304 		if( oOldModule )
305 		{
306 			// Copy over master volume setting.
307 			//
308 			oModule.setMasterVolume( oOldModule.getMasterVolume() );
309 		}
310 
311 		if( this.iPlaybackFrequency != oModule.getPlaybackFrequency() )
312 		{
313 			oModule.changePlaybackFrequency( this.iPlaybackFrequency );
314 		}
315 	}
316 	else
317 	{
318 		this.oModule = null;
319 	}
320 };
321 
322 // ---------------------------------------------------------------------------
323 /** Get the user agent string of the browser.
324  * 
325  * @return {string} The user agent string of the browser.
326  */
327 weasel.BrowserAudio.prototype.__getUserAgent = function( )
328 {
329 	return window.navigator.userAgent;
330 };
331 
332 // ---------------------------------------------------------------------------
333 /**
334  * Sniff for browser supported audio (WebkitAudioContext, AudioContext, Mozilla, HTML5 Audio etc).
335  * @private
336  */
337 weasel.BrowserAudio.prototype.__sniffAudioSupport = function( )
338 {
339 	this.iAudioType = weasel.BrowserAudio.prototype.AudioType.None;
340 
341 	if( window.webkitAudioContext && !window.AudioContext )
342 	{
343 		// Map webkitAudioContext to window.AudioContext.
344 		//
345 		window.AudioContext = window.webkitAudioContext;
346 	}
347 
348 	if( window.AudioContext )
349 	{
350 		if( !window.AudioContext.createScriptProcessor )
351 		{
352 			// AudioContext.createJavaScriptNode() function name was changed (see test),
353 			// now called AudioContext.createScriptProcessor().
354 			// Map prior implementation to new name.
355 			//
356 			window.AudioContext.createScriptProcessor = window.AudioContext.createJavaScriptNode;
357 		}
358 		
359 		this.iAudioType = weasel.BrowserAudio.prototype.AudioType.AudioContext;
360 
361 		return;
362 	}
363 	
364 	if( window.Audio )
365 	{
366 		var oAudio = new Audio();
367 
368 		if( oAudio.mozSetup && oAudio.mozCurrentSampleOffset && oAudio.mozWriteAudio )
369 		{
370 			// Mozilla Audio support.
371 			//
372 			this.iAudioType = weasel.BrowserAudio.prototype.AudioType.Mozilla;
373 			this.oAudio = oAudio;
374 
375 			return;
376 		}
377 		else
378 		{
379 			// HTML5 Audio support, which is a kludge at best...
380 			// Force feed with encoded .wav files within a dataURI.
381 			//
382 			this.iAudioType = weasel.BrowserAudio.prototype.AudioType.HTML5Audio;
383 			
384 			// Look for 'Gecko' to identify nasty Firefox < 4.0 Audio behaviour (can't be detected via object detection grrr ).
385 			// Which is that the HTMLAudio cannot be updated by just changing the Audio.src address, it also requires a call
386 			// to Audio.load(). This method works in Opera & Chromium but it is much slower.
387 			// This also effects other browsers that use the Gecko engine, e.g. Ice Weasel etc.
388 			// Midori 0.2.2 also requires the call to Audio.load(), even though its WebKit based.
389 			// Epiphany or "Web" Browser 2.30.6, also requires the Audio.load() function called to play correctly.
390 			//
391 			var sBrowserUserAgent = this.__getUserAgent();
392 
393 			if( -1 != sBrowserUserAgent.search(/(gecko|midori|epiphany)/i) )
394 			{
395 				if( -1 == sBrowserUserAgent.search(/(webkit|khtml|opera)+/i) || -1 != sBrowserUserAgent.search(/(midori|epiphany)+/i) )
396 				{
397 					this.bNastyHackForHTMLAudioInFirefox3 = true;
398 				}
399 			}
400 
401 			return;
402 		}
403 	}
404 };
405 
406 // ---------------------------------------------------------------------------
407 /**
408  * Create the required feeder for the discovered browser audio system, you only need to call this
409  * function once to start the browser audio sub system, after that use stop() and start() to stop/pause/un-pause.
410  */
411 weasel.BrowserAudio.prototype.init = function( )
412 {
413 	switch( this.iAudioType )
414 	{
415 		case weasel.BrowserAudio.prototype.AudioType.AudioContext :
416 				this.__createAudioContextHooks();
417 		break;
418 
419 		case weasel.BrowserAudio.prototype.AudioType.Mozilla :
420 				this.__createMozillaAudioHooks();
421 		break;
422 
423 		case weasel.BrowserAudio.prototype.AudioType.HTML5Audio :
424 				this.__createHTML5AudioHooks();
425 		break;
426 
427 		case weasel.BrowserAudio.prototype.AudioType.None :
428 			this.__createNoneAudioHooks();
429 		break;
430 
431 	};
432 };
433 
434 // ---------------------------------------------------------------------------
435 /**
436  * Stop audio (can be used as a pause).
437  */
438 weasel.BrowserAudio.prototype.stop = function( )
439 {
440 	this.bPause = true;
441 
442 	switch( this.iAudioType )
443 	{
444 		case weasel.BrowserAudio.prototype.AudioType.AudioContext :
445 
446 				if( this.oAudioNode )
447 				{
448 					this.oAudioNode.disconnect();
449 				}
450 
451 			break;
452 
453 		case weasel.BrowserAudio.prototype.AudioType.HTML5Audio :
454 
455 				this.aHTML5AudioObjects[ (this.iNextHTML5AudioObject + 1) & 1 ].pause();
456 
457 				if( null != this.iHTML5AudioPushTimerID )
458 				{
459 					clearInterval( this.iHTML5AudioPushTimerID );
460 					this.iHTML5AudioPushTimerID = null; 
461 				}
462 
463 			break;
464 	};
465 };
466 
467 // ---------------------------------------------------------------------------
468 /**
469  * Start/restart/un-pause stopped audio. 
470  */
471 weasel.BrowserAudio.prototype.start = function( )
472 {
473 	this.bPause = false;
474 
475 	switch( this.iAudioType )
476 	{
477 		case weasel.BrowserAudio.prototype.AudioType.AudioContext :
478 
479 				if( this.oAudioNode )
480 				{
481 					this.oAudioNode.connect( this.oAudioContext.destination );
482 				}
483 
484 			break;
485 
486 		case weasel.BrowserAudio.prototype.AudioType.HTML5Audio :
487 
488 				this.iSyncHTML5AudioTimeBase = weasel.Helper.getHighRezTimer();
489 				this.iSyncHTML5AudioTimeDelta = 0;
490 
491 				this.aHTML5AudioObjects[ (this.iNextHTML5AudioObject + 1) & 1 ].play();
492 
493 				if( null == this.iHTML5AudioPushTimerID )
494 				{
495 					var oSelf = this;
496 					this.iHTML5AudioPushTimerID = setInterval( function(){ oSelf.__pushAudioToHTML5(); }, this.fHTML5AudioPushTimerInSeconds * 1000 );
497 				}
498 
499 			break;
500 
501 	};
502 };
503 
504 // ---------------------------------------------------------------------------
505 /**
506  * Create all the required bits and pieces needed for audio, even though we aren't
507  * actually feeding any Browser Audio sub system, this still allows us to see it visually.
508  * 
509  * @private
510  */
511 weasel.BrowserAudio.prototype.__createNoneAudioHooks = function( )
512 {
513 	this.oAudioBuffer = new weasel.AudioBuffer( this.iPlaybackFrequency, this.iSoundChannels, this.iSamplesPerInterval );
514 };
515 
516 // ---------------------------------------------------------------------------
517 /** Create all the bits and pieces needed for HTML5 Audio, which is played by
518  * buffering up 2 seconds of audio and creating a wav file, encoding the wav file
519  * to Base64 and playing via a DataURI. It is far from perfect.
520  * 
521  * @private
522  */
523 weasel.BrowserAudio.prototype.__createHTML5AudioHooks = function( )
524 {
525 	this.oAudioBuffer = new weasel.AudioBuffer( this.iPlaybackFrequency, this.iSoundChannels, this.iPlaybackFrequency * this.fHTML5AudioPushTimerInSeconds );
526 
527 	// 16 Bit Stereo mode for HTML5 is not advised to use in "Real Time" as the Audio() object
528 	// takes forever to decode and start playing the dataURI causing everything to hang. :(
529 	//
530 	var iBitDepth = 16;
531 	var iSoundChannels = this.iSoundChannels;
532 
533 	if( this.bHTML5LowFidelityMode )
534 	{
535 		// Mono, 8 Bit audio. Uses 4 times less data in the dataURI that the 16bit Stereo version.
536 		// Meaning it's less likely to cause those nasty pauses every 2 seconds.
537 		//
538 		iBitDepth = 8;
539 		iSoundChannels = 1;
540 	}
541 
542 	var aWaveFileHeader = this.__createWaveFileHeader( this.iPlaybackFrequency, iSoundChannels, iBitDepth, this.iPlaybackFrequency * this.fHTML5AudioPushTimerInSeconds );
543 
544 	this.aHTML5AudioObjects[ 0 ] = new Audio();
545 	this.aHTML5AudioObjects[ 1 ] = new Audio();
546 
547 	// Pre-encode the wav file header into base64 and store it for re-use
548 	// as the header will not change (unless the replay frequency is changed).
549 	//
550 	this.oBase64Stream = new weasel.Base64Stream();
551 	this.oBase64Stream.prepend( 'data:audio/wav;base64,' );
552 	for( var iLength = aWaveFileHeader.length, iOffset = 0; iOffset < iLength; iOffset++ )
553 	{
554 		this.oBase64Stream.appendByte( aWaveFileHeader[ iOffset ] );
555 	}
556 	this.oBase64Stream.save();
557 
558 	this.iAudioSubSystemLag = (this.iPlaybackFrequency * this.fHTML5AudioPushTimerInSeconds)|0;
559 };
560 
561 //---------------------------------------------------------------------------
562 /** Generate an array of bytes representing a Wav file header, useful references:
563  * 		https://ccrma.stanford.edu/courses/422/projects/WaveFormat/
564  * 		http://www.codebase.es/riffwave/
565  * 		https://gist.github.com/1342081/d81ffcc84b20f765ddf493f97fd924ac80d1d34d
566  * 
567  * @param {int} iSampleFrequency = The sampling frequency to use in the header (e.g. 44100 ).
568  * @param {int} iNumberOfChannels = The number of sound channels, 2 for stereo.
569  * @param {int} iBitsPerSample = The bits per sample, e.g. 16 = 16 bit samples.
570  * @param {int} iNumberOfSamples = The number samples to be indicated present in the wav header.
571  * 
572  * @return {array} An array of bytes containing the Wav file header.
573  * 
574  *  @private
575  */
576 weasel.BrowserAudio.prototype.__createWaveFileHeader = function( iSampleFrequency, iNumberOfChannels, iBitsPerSample, iNumberOfSamples )
577 {
578 	var iWavNotRiffHeaderSizeBytes = 36;
579 	var iByteRate = ( ( iSampleFrequency * iNumberOfChannels * iBitsPerSample ) / 8) |0;
580 	var iBlockAlign = ( ( iNumberOfChannels * iBitsPerSample ) / 8) |0;
581 	var iSubchunk2Size = ( ( iNumberOfSamples * iNumberOfChannels * iBitsPerSample ) / 8 ) |0;
582 	var iChunkSize = iSubchunk2Size + iWavNotRiffHeaderSizeBytes;
583 	
584 	var WAVEHeader = {	
585 		  ChunkID:		[ 'RIFF'.charCodeAt(0), 'RIFF'.charCodeAt(1), 'RIFF'.charCodeAt(2), 'RIFF'.charCodeAt(3) ]
586 		, ChunkSize:	[ iChunkSize & 0xff, (iChunkSize >>> 8) &0xff, (iChunkSize >>> 16) &0xff, (iChunkSize >>> 24) &0xff ]
587 		, Format:		[ 'WAVE'.charCodeAt(0), 'WAVE'.charCodeAt(1), 'WAVE'.charCodeAt(2), 'WAVE'.charCodeAt(3) ]
588 		, Subchunk1ID:	[ 'fmt '.charCodeAt(0), 'fmt '.charCodeAt(1), 'fmt '.charCodeAt(2), 'fmt '.charCodeAt(3) ]
589 		, Subchunk1Size:[ 16, 0, 0, 0 ]
590 		, AudioFormat:	[ 1, 0 ]
591 		, NumChannels:	[ iNumberOfChannels & 0xff, (iNumberOfChannels >>> 8) &0xff ]
592 		, SampleRate:	[ iSampleFrequency & 0xff, (iSampleFrequency >>> 8) &0xff, (iSampleFrequency >>> 16) &0xff, (iSampleFrequency >>> 24) &0xff ]
593 		, ByteRate:		[ iByteRate & 0xff, (iByteRate >>> 8) &0xff, (iByteRate >>> 16) &0xff, (iByteRate >>> 24) &0xff ]
594 		, BlockAlign:	[ iBlockAlign & 0xff, (iBlockAlign >>> 8) &0xff ]
595 		, BitsPerSample:[ iBitsPerSample & 0xff, (iBitsPerSample >>> 8) &0xff ]
596 		, Subchunk2ID:	[ 'data'.charCodeAt(0), 'data'.charCodeAt(1), 'data'.charCodeAt(2), 'data'.charCodeAt(3) ]
597 		, Subchunk2Size:[ iSubchunk2Size & 0xff, (iSubchunk2Size >>> 8) &0xff, (iSubchunk2Size >>> 16) &0xff, (iSubchunk2Size >>> 24) &0xff ]
598 	};
599 
600 	var aWaveHeader = WAVEHeader.ChunkID.concat( 
601 			  WAVEHeader.ChunkSize
602 			, WAVEHeader.Format
603 			, WAVEHeader.Subchunk1ID
604 			, WAVEHeader.Subchunk1Size
605 			, WAVEHeader.AudioFormat
606 			, WAVEHeader.NumChannels
607 			, WAVEHeader.SampleRate
608 			, WAVEHeader.ByteRate
609 			, WAVEHeader.BlockAlign
610 			, WAVEHeader.BitsPerSample
611 			, WAVEHeader.Subchunk2ID
612 			, WAVEHeader.Subchunk2Size );
613 
614 	return	aWaveHeader;
615 };
616 //---------------------------------------------------------------------------
617 /**
618  * In order to create a new AudioNode the existing one needs stopping and disconnecting.
619  * Firefox's (24) AudioContext implementation behaves differently from Webkit in that
620  * AudioNode.disconnect() is not enough to remove it. The AudioNode.onAudioprocess() event
621  * needs clearing or it will continue to occur causing the audio to muck up.
622  *  
623  * @private
624  */
625 weasel.BrowserAudio.prototype.__disconnectAudioNode = function()
626 {
627 	if( this.oAudioNode )
628 	{
629 		this.oAudioNode.disconnect();
630 		this.oAudioNode.onaudioprocess = null;
631 	}
632 };
633 
634 // ---------------------------------------------------------------------------
635 /**
636  * Create all the required AudioContext bits and pieces needed for audio, should
637  *  be noted AudioContext playback frequency can't really be adjust so best to use
638  *  what it is currently already playing at.
639  *  
640  * @private
641  */
642 weasel.BrowserAudio.prototype.__createAudioContextHooks = function(  )
643 {
644 	var iInputChannels	= 0;
645 	var iOutputChannels	= 2;
646 
647 	if( null == this.oAudioContext )
648 	{
649 		this.oAudioContext = new AudioContext();
650 	}
651 
652 	this.__disconnectAudioNode();
653 	this.oAudioNode = null;
654 
655 	this.oAudioNode = this.oAudioContext.createScriptProcessor( this.iAudioContextBufferSize, iInputChannels , iOutputChannels );
656 	var oSelf = this;
657 	this.oAudioNode.onaudioprocess = function( event ){ oSelf.__audioContextFeeder( event, oSelf ); };;
658 
659 	if( !this.bPause )
660 	{
661 		// Incase latency changes whilst audio is paused.
662 		//
663 		this.oAudioNode.connect( this.oAudioContext.destination );
664 	}
665 
666 	// Create AudioBuffer, size is quite different for AudioConext,
667 	// and change sampling rate to what AudioContext is actually using!
668 	//
669 	this.iPlaybackFrequency = this.oAudioContext.sampleRate;
670 	this.__setSamplesPerInterval();
671 	
672 	// Because Playback Frequency can only be established AFTER AudioContext object is created,
673 	// prebuffering value does not know what the playback frequency is.
674 	//
675 	this.fPreBufferingInMS = (((this.iAudioContextBufferSize * 2) / this.iPlaybackFrequency) * 1000.0)|0;
676 	var fLagInSamples = (this.iPlaybackFrequency / 1000.0) * (this.fPreBufferingInMS);
677 	this.iAudioSubSystemLag = fLagInSamples |0;
678 
679 	var oOldAudioBuffer = this.oAudioBuffer;
680 	this.oAudioBuffer = new weasel.AudioBuffer( this.iPlaybackFrequency, this.iSoundChannels, this.oAudioNode.bufferSize );
681 
682 	if( oOldAudioBuffer )
683 	{
684 		// Preserve AudioBuffer settings.
685 		//
686 		this.oAudioBuffer.setMixerType( oOldAudioBuffer.getMixerType() );
687 		this.oAudioBuffer.setIgnoreCircularBuffer( oOldAudioBuffer.getIgnoreCircularBuffer() );
688 	}
689 };
690 
691 // ---------------------------------------------------------------------------
692 /**
693  * Feed the AudioContext with audio. 
694  * 
695  * @param {AudioProcessingEvent} oEvent = The Webkit/AudioContext audio event.
696  * @param {weasel.BrowserAudio} oBrowserAudio = The BrowserAudio object used to feed the audio event.
697  * 
698  * @private
699  */
700 weasel.BrowserAudio.prototype.__audioContextFeeder = function( oEvent, oBrowserAudio )
701 {
702 	var lStart = weasel.Helper.getHighRezTimer();
703 	var oAudioBuffer = oBrowserAudio.getAudioBuffer();
704 	var oCurrentModule = oBrowserAudio.getModule();
705 
706 	var aLeftBuffer		= oEvent.outputBuffer.getChannelData( 0 );
707 	var aRightBuffer	= oEvent.outputBuffer.getChannelData( 1 );
708 	var aSamples		= oAudioBuffer.getBuffer();
709 
710 	if( null != oCurrentModule )
711 	{
712 		// Finish creating any remaining samples that are needed, in case buffer is not full yet.
713 		//
714 		oCurrentModule.play( oAudioBuffer, this.bIgnoreFilter );
715 	}
716 	else
717 	{
718 		// Play silence if mod missing.
719 		//
720 		for( var iCount = aSamples.length; --iCount >= 0; )
721 		{
722 			aSamples[ iCount ] = 0.0;
723 		}
724 	}
725 
726 	for( var iLength = aLeftBuffer.length, iDestination = 0, iSource = 0; iDestination < iLength;  )
727 	{
728 		aLeftBuffer[ iDestination ] = aSamples[ iSource++ ];
729 		aRightBuffer[ iDestination++ ] = aSamples[ iSource++ ];
730 	}
731 
732 	oAudioBuffer.removeSamples( oAudioBuffer.getBufferLengthInSamples() );
733 
734 	var lEnd = weasel.Helper.getHighRezTimer();
735 	oBrowserAudio.lAudioContextUsageMS = lEnd - lStart;
736 };
737 
738 // ---------------------------------------------------------------------------
739 /**
740  * Create all the required Mozilla bits and pieces needed for audio.
741  * 
742  * @private
743  */
744 weasel.BrowserAudio.prototype.__createMozillaAudioHooks = function( )
745 {
746 	this.oAudioBuffer = new weasel.AudioBuffer( this.iPlaybackFrequency, this.iSoundChannels, (this.iSamplesPerInterval / this.iAudioBufferSizeDivider) |0 );
747 	this.oAudio.mozSetup( this.iSoundChannels, this.iPlaybackFrequency );
748 };
749 
750 // ---------------------------------------------------------------------------
751 /**
752  * Change the replay frequency, if possible (not all audio subsystems allow this) 
753  * and any additional modules (other than the one currently playing) will need to have their 
754  * playback frequency changed with: oModule.changePlaybackFrequency( oBrowserAudio.getPlaybackFrequency() );.
755  * 
756  * @param {int} iNewReplayFrequency = The new replay frequency in Hz (e.g. 48000).
757  */
758 weasel.BrowserAudio.prototype.changeReplayFrequency = function( iNewReplayFrequency )
759 {
760 	var iPreserveMixerType = this.oAudioBuffer.getMixerType();
761 	var bPreserveOutputBufferIgnored = this.oAudioBuffer.getIgnoreCircularBuffer();
762 
763 	if( this.iAudioType != weasel.BrowserAudio.prototype.AudioType.AudioContext )
764 	{
765 		this.iPlaybackFrequency = iNewReplayFrequency;
766 		this.__setSamplesPerInterval();
767 	}
768 
769 	switch( this.iAudioType )
770 	{
771 		case weasel.BrowserAudio.prototype.AudioType.Mozilla :
772 		
773 			this.__createMozillaAudioHooks();
774 			break;
775 			
776 		case weasel.BrowserAudio.prototype.AudioType.HTML5Audio :
777 			this.__createHTML5AudioHooks();
778 			break;
779 		
780 		case weasel.BrowserAudio.prototype.AudioType.None :
781 			this.oAudioBuffer = new weasel.AudioBuffer( this.iPlaybackFrequency, this.iSoundChannels, this.iSamplesPerInterval );
782 			break;
783 	};
784 
785 	var oModule = this.getModule();
786 	if( null != oModule && (this.iAudioType != weasel.BrowserAudio.prototype.AudioType.AudioContext) )
787 	{
788 		oModule.changePlaybackFrequency( this.getPlaybackFrequency() );
789 	}
790 
791 	this.oAudioBuffer.setMixerType( iPreserveMixerType );
792 	this.oAudioBuffer.setIgnoreCircularBuffer( bPreserveOutputBufferIgnored );
793 };
794 
795 
796 // ---------------------------------------------------------------------------
797 /**
798  * Feed the Mozilla Audio subsytem.
799  * 
800  * @return {int} = The number of samples written to the audio sub system.
801  * 
802  * @private
803  */
804 weasel.BrowserAudio.prototype.__feedMozillaAudio = function( )
805 {
806 	var oAudioBuffer = this.oAudioBuffer;
807 	var oAudio = this.oAudio;
808 	// Audio.mozCurrentSampleOffset() may report odd sample position (1,3,5,7,9 etc)
809 	// and not even numbers (2,4,6,8) on Windows (XP SP3 Realtek 660 Realtek High Definition Audio driver version 5.10.0.6029) if odd playback frequencies are used e.g. 32001Hz.
810 	//
811 	// Additional bug with Audio.mozCurrentSampleOffset() is supposed to return a Long (64 bit integer) but on Windows (XP SP3 Realtek 660 Realtek High Definition Audio driver version 5.10.0.6029)
812 	// the value wraps around to 0 at > 134,148,864. Which in Hex is 0x7FEF300, that would suggest loop occurs at 0x7ff0000 
813 	// 134,038,204 samples before breaking, at 192Khz Stereo thats ~5:50s of audio.
814 	// 134,132,354 At 96Khz thats ~11:38s before breaking.
815 	// However something else is odd about it as Audio.mozCurrentSampleOffset() seems to stick at 0 for ~half a second.
816 	//
817 	var iBarrelMask = 0xfffff;				// Use a barrel mask for a work around to Audio.mozCurrentSampleOffset() not returning a 64 bit Long.
818 	var iBarrelLength = iBarrelMask + 1;
819 
820 	var lCurrentSampleOffset = oAudio.mozCurrentSampleOffset();
821 	var lPlayedSamples = ( lCurrentSampleOffset / oAudioBuffer.getSoundChannels() ) & iBarrelMask;
822 
823 	var lWrittenSamples = oAudioBuffer.getWrittenSamples() & iBarrelMask;
824 
825 	if( lWrittenSamples < lPlayedSamples )
826 	{
827 		lWrittenSamples += iBarrelLength;
828 	}
829 
830 	this.iAudioSubSystemLag = (lWrittenSamples - lPlayedSamples) & iBarrelMask;
831 
832 	var iPreBufferingInSamples = ((this.iPlaybackFrequency / 1000.0) * (this.fPreBufferingInMS + this.iIntervalMsRate)) |0;
833 	var iSamplesNeededThisFrame = ((( iPreBufferingInSamples + lPlayedSamples )) - lWrittenSamples);
834 
835 	if( this.lOldPlayedSamples == lPlayedSamples )
836 	{
837 		// Stinking LAG STALL detected.
838 		// Most likely need 500ms worth of sample data before start playing audio again :(
839 		//
840 		iSamplesNeededThisFrame = this.iSamplesPerInterval * 4;
841 	}
842 	this.lOldPlayedSamples = lPlayedSamples;
843 
844 	var iBufferItterations = (iSamplesNeededThisFrame / oAudioBuffer.getBufferLengthInSamples() ) | 0;
845 
846 	if( iBufferItterations <= 0 )
847 	{
848 		// Always generate something, but allow play position to catch up to our current position.
849 		// After a bit of playing around it seems that a value of 80% of audio buffer works quite well,
850 		// this seems to reduce the ping-pong effect that can occur once a lag stall has happen at low latency (<75ms).
851 		// (In that in trying to get back to the correct low latency position it gets missed and causes another lag stall.)
852 		
853 		iBufferItterations = ((this.iSamplesPerInterval * 0.80) / oAudioBuffer.getBufferLengthInSamples()) | 0;
854 	}
855 
856 	while( --iBufferItterations >= 0 )
857 	{
858 		var iWriteStallMaxRetries = 1;	// Sometimes writing audio gets blocked (looking at you Pulse Audio Subsystem....) and no audio is written at all :(
859 		var iSamplesPerBuffer = oAudioBuffer.getBufferLengthInSamples();
860 
861 		try
862 		{
863 			while( iSamplesPerBuffer > 0 && --iWriteStallMaxRetries >= 0 )
864 			{
865 				var lBytesWritten = (oAudio.mozWriteAudio( oAudioBuffer.getBuffer() ) / oAudioBuffer.getSoundChannels())|0;
866 				oAudioBuffer.removeSamples( lBytesWritten );
867 				this.getModule().play( oAudioBuffer, this.bIgnoreFilter );
868 				iSamplesPerBuffer -= lBytesWritten;
869 			}
870 		}catch( oException )
871 		{
872 			// On very slow machines (like the Raspberry PI) Audio.mozWriteAudio()
873 			// can generate exceptions "Component returned failure code 0x80004005 (NS_ERROR_FAILURE)."
874 		}
875 	}
876 
877 	return (((oAudioBuffer.getWrittenSamples() & iBarrelMask)  + iBarrelLength) - lWrittenSamples) & iBarrelMask;
878 };
879 
880 
881 // ---------------------------------------------------------------------------
882 /**
883  * Feed the AudioContext subsytem, which is not actually done here as AudioContext
884  * is request based.
885  * 
886  * @return {int} = The number of samples created.
887  * 
888  * @private
889  */
890 weasel.BrowserAudio.prototype.__feedAudioContext = function( )
891 {
892 	var iSamplesPerFrame = this.iSamplesPerInterval;
893 	var iSamplesToFill = this.oAudioBuffer.samplesToFill();
894 
895 	this.getModule().play( this.oAudioBuffer, this.bIgnoreFilter, iSamplesPerFrame );
896 
897 	return iSamplesToFill - this.oAudioBuffer.samplesToFill();
898 };
899 
900 // ---------------------------------------------------------------------------
901 /**
902  * Feed the None audio subsytem, just so that visuals will work.
903  * 
904  * @return {int} = The number of samples created.
905  * 
906  * @private
907  */
908 weasel.BrowserAudio.prototype.__feedNoneAudio = function( )
909 {
910 	// Still allow module to be played and be "seen" instead of heard.. :(
911 	//
912 	var iSamplesPerFrame = this.iSamplesPerInterval;
913 	var iSamplesFilled = this.oAudioBuffer.samplesToFill();
914 
915 	this.getModule().play( this.oAudioBuffer, this.bIgnoreFilter, iSamplesPerFrame );
916 
917 	iSamplesFilled -= this.oAudioBuffer.samplesToFill();
918 	this.oAudioBuffer.removeSamples( iSamplesFilled );
919 
920 	return iSamplesFilled;
921 };
922 
923 // ---------------------------------------------------------------------------
924 /** Convert a portion of the audio buffer into base 64 (for the DataURI used in HTML5 audio),
925  * a rough conversion into 8 bit mono wav file, samples are not clipped (should not need to be).
926  * 2 Seconds of stereo 16bit 16Khz audio in base64 is a string 170,750 chars in length,
927  * using escaped chars instead its over 340K for the same data 
928  * (length is not consistent as different data encodes to different lengths). :(
929  * 
930  * @param {int} iAudioBufferStartPosition = The starting offset in the AudioBuffer, a stereo pair is ALWAYS converted (may cause problems if end-start is not even).
931  * @param {int} iAudioBufferEndPosition = The starting offset in the AudioBuffer.
932  * 
933  * @private
934  */
935 weasel.BrowserAudio.prototype.__convertAudioTo8BitBase64WavFile = function( iAudioBufferStartPosition, iAudioBufferEndPosition )
936 {
937 	var iByteQueue = 0;
938 	var iByteCount = 0;
939 	for( var iSource = iAudioBufferStartPosition, aSampleData = this.oAudioBuffer.getBuffer(), oBase64Stream = this.oBase64Stream; iSource < iAudioBufferEndPosition; )
940 	{
941 		iByteQueue <<= 8;
942 		iByteQueue |= ( (( (aSampleData[ iSource++ ] + aSampleData[ iSource++ ]) * 63.5 ) + 128.0 ) & 0xff );
943 
944 		// Queue up bytes to reduce call frequency to Base64Stream.appendByte().
945 		//
946 		if( 3 == ++iByteCount )
947 		{
948 			oBase64Stream.appendTriple( iByteQueue );
949 			iByteQueue = 0;
950 			iByteCount = 0;
951 		}
952 	}
953 
954 	// Handle any remaining bytes in the queue that need to be Base64 encoded.
955 	//
956 	if( 2 == iByteCount )
957 	{
958 		this.oBase64Stream.appendWord( iByteQueue );
959 	}
960 	else if( 1 == iByteCount )
961 	{
962 		this.oBase64Stream.appendByte( iByteQueue );
963 	}
964 };
965 
966 // ---------------------------------------------------------------------------
967 /** Convert a portion of the audio buffer into base 64 (for the DataURI used in HTML5 audio),
968  * 16 bit stereo, samples are not clipped (should not need to be).
969  * 
970  * @param {int} iAudioBufferStartPosition = The starting offset in the AudioBuffer.
971  * @param {int} iAudioBufferEndPosition = The starting offset in the AudioBuffer.
972  * 
973  * @private
974  */
975 weasel.BrowserAudio.prototype.__convertAudioTo16BitBase64WavFile = function( iAudioBufferStartPosition, iAudioBufferEndPosition )
976 {
977 	for( var iSource = iAudioBufferStartPosition, aSampleData = this.oAudioBuffer.getBuffer(), oBase64Stream = this.oBase64Stream; iSource < iAudioBufferEndPosition; )
978 	{
979 		var i16BitSample = ( aSampleData[ iSource++ ] * 32767 ) |0;
980 		oBase64Stream.appendWord( ((i16BitSample & 0xff) << 8) | ((i16BitSample >>> 8 ) & 0xff) );
981 	}
982 };
983 
984 // ---------------------------------------------------------------------------
985 /** HTML5 Audio function that plays the module and converts the audio in the audio
986  * buffer to a wave file on the fly. As opposed to converting the whole lot once the audio buffer
987  * is full, as this is far to time consuming at a time critical stage.
988  * 
989  * @param {int} iSamples = The number of samples to play and then convert to a wav file.
990  * 
991  * @private
992  */
993 weasel.BrowserAudio.prototype.__dynamicallyMakeWavFile = function( iSamples )
994 {
995 	var iAudioBufferStartPosition = this.oAudioBuffer.getBufferPosition();
996 	var oModule = this.getModule();
997 	
998 	if( oModule )
999 	{
1000 		oModule.play( this.oAudioBuffer, this.bIgnoreFilter, iSamples );
1001 	}
1002 
1003 	var iAudioBufferEndPosition = this.oAudioBuffer.getBufferPosition();
1004 
1005 	if( this.bHTML5LowFidelityMode )
1006 	{
1007 		this.__convertAudioTo8BitBase64WavFile( iAudioBufferStartPosition, iAudioBufferEndPosition );
1008 	}
1009 	else
1010 	{
1011 		this.__convertAudioTo16BitBase64WavFile( iAudioBufferStartPosition, iAudioBufferEndPosition );
1012 	}
1013 };
1014 
1015 // ---------------------------------------------------------------------------
1016 /** Feed the HTML5 Audio, although this is not actually done here, as it is not
1017  * really possible to create a DataURI at 60hz (the overheads are too great and
1018  * the audio clips do not align up well across browsers).
1019  * 
1020  * @param {int} iSamples = The number of samples to play and then convert to a wav file.
1021  * 
1022  * @private
1023  */
1024 weasel.BrowserAudio.prototype.__feedHTML5Audio = function( )
1025 {
1026 	var iCurrentAudioTime = weasel.Helper.getHighRezTimer();
1027 	var iDeltaAudioTime = iCurrentAudioTime - (this.iSyncHTML5AudioTimeBase + this.iSyncHTML5AudioTimeDelta);
1028 
1029 	var iSamplesPerFrame = this.iSamplesPerInterval;
1030 	var iSamplesFilled = this.oAudioBuffer.samplesToFill();
1031 
1032 	iSamplesPerFrame = iSamplesPerFrame + (( iSamplesPerFrame * (iDeltaAudioTime / this.iIntervalMsRate) ) |0 );
1033 
1034 	if( iSamplesPerFrame > this.iSamplesPerInterval * 2 )
1035 	{
1036 		// Don't hog the cpu, defer leftovers to subsequent frames.
1037 		//
1038 		iSamplesPerFrame = this.iSamplesPerInterval * 2;
1039 	}
1040 
1041 	if( iSamplesPerFrame < 0  )
1042 	{
1043 		iSamplesPerFrame = 0;
1044 	}
1045 	// Correct Audio Frame Counter to contain the extra catch up time or delay.
1046 	//
1047 	this.iSyncHTML5AudioTimeDelta += (this.iIntervalMsRate * (iSamplesPerFrame / this.iSamplesPerInterval)) |0;
1048 
1049 	this.__dynamicallyMakeWavFile( iSamplesPerFrame );
1050 
1051 	iSamplesFilled -= this.oAudioBuffer.samplesToFill();
1052 	return iSamplesFilled;
1053 };
1054 
1055 
1056 // ---------------------------------------------------------------------------
1057 /** This is the function which actually plays the audio via HTML5, which is done
1058  * through a DataURI.
1059  * 
1060  * @private
1061  */
1062 weasel.BrowserAudio.prototype.__pushAudioToHTML5 = function( )
1063 {
1064 	var iStart =  weasel.Helper.getHighRezTimer();
1065 	this.iSyncHTML5AudioTimeBase = iStart;
1066 	this.iSyncHTML5AudioTimeDelta = 0;
1067 
1068 
1069 	// Fill audio buffer with remaining samples (in case it has not already been filled).
1070 	//
1071 	this.__dynamicallyMakeWavFile( undefined );
1072 	this.oAudioBuffer.removeSamples( this.oAudioBuffer.getBufferLengthInSamples() );
1073 
1074 	this.oBase64Stream.flush();
1075 	this.aHTML5AudioObjects[ this.iNextHTML5AudioObject ].src = this.oBase64Stream.getBase64EncodedString();			// Base64 encoded.
1076 
1077 	if( true == this.bNastyHackForHTMLAudioInFirefox3 )
1078 	{
1079 		// Firefox/Gecko engine < 4.0 do not play new dataURI's when the Audio.src is changed,
1080 		// you are expected to call the Audio.load() method to indicate that it has changed.
1081 		// However this is not required on other browsers (Opera/Webkit) and on Webkit consumes 100ms+
1082 		//
1083 		this.aHTML5AudioObjects[ this.iNextHTML5AudioObject ].load();
1084 	}
1085 
1086 	this.aHTML5AudioObjects[ this.iNextHTML5AudioObject ].play();
1087 	this.iNextHTML5AudioObject = (this.iNextHTML5AudioObject + 1) & 1;
1088 
1089 	this.oBase64Stream.load();
1090 
1091 	var iEnd = weasel.Helper.getHighRezTimer();
1092 	this.lHTML5AudioUsageMS = iEnd - iStart;
1093 
1094 };
1095 
1096 
1097 // ---------------------------------------------------------------------------
1098 /**
1099  * Feed the Audio subsytems.
1100  * 
1101  * @return {int} = The number of samples written to the audio sub system.
1102  */
1103 weasel.BrowserAudio.prototype.feedAudio = function( )
1104 {
1105 	var lProfileStart = weasel.Helper.getHighRezTimer();
1106 	var iSamplesWritten = 0;
1107 
1108 	// Module loaded?
1109 	//
1110 	if( null == this.getModule() || this.bPause )
1111 	{
1112 		this.lProfileInMS = weasel.Helper.getHighRezTimer() - lProfileStart;
1113 		return 0;
1114 	}
1115 
1116 	switch( this.iAudioType )
1117 	{
1118 		case weasel.BrowserAudio.prototype.AudioType.Mozilla :
1119 
1120 			iSamplesWritten = this.__feedMozillaAudio( );
1121 
1122 		break;
1123 
1124 		case weasel.BrowserAudio.prototype.AudioType.AudioContext :
1125 
1126 			iSamplesWritten = this.__feedAudioContext();
1127 
1128 		break;
1129 
1130 
1131 		case weasel.BrowserAudio.prototype.AudioType.HTML5Audio :
1132 
1133 			iSamplesWritten = this.__feedHTML5Audio();
1134 
1135 		break;
1136 
1137 		case weasel.BrowserAudio.prototype.AudioType.None :
1138 
1139 			iSamplesWritten = this.__feedNoneAudio();
1140 
1141 		break;
1142 	};
1143 
1144 	// In case of weirdness.
1145 	//
1146 	this.lProfileInMS = weasel.Helper.getHighRezTimer() - lProfileStart;
1147 	return iSamplesWritten;
1148 };
1149 
1150 // ---------------------------------------------------------------------------
1151 /** On some audio subsystems (Firefox) use a audio buffer the size of a single interval
1152  * in order to reduce the number of write calls to the browser audio api. This has the disadvantage
1153  * of poor latency response (because the audio buffer always has to be full before passing 
1154  * to the browser, when we might actually need slightly more or slightly less), but is quicker. 
1155  * 
1156  * @param {bool} bLowerLatency = True : Lower the latency at the cost of some speed, False : Make things run a bit faster at the cost of lower latency.
1157  */
1158 weasel.BrowserAudio.prototype.tradeLowerLatencyForSpeed = function( bLowerLatency )
1159 {
1160 	if( true == bLowerLatency )
1161 	{
1162 		this.iAudioBufferSizeDivider = 5;
1163 	}
1164 	else
1165 	{
1166 		this.iAudioBufferSizeDivider = 1;
1167 	}
1168 
1169 	this.changeReplayFrequency( this.iPlaybackFrequency );
1170 
1171 };
1172 
1173 // ---------------------------------------------------------------------------
1174 /** Get the state of Low Latency vs Speed setting (currently only applies to Firefox).
1175  * 
1176  * @return {bool} = True : Lower the latency is used at the cost of some speed, False : faster is used at the cost of lower latency.
1177  */
1178 weasel.BrowserAudio.prototype.getTradeLowerLatencyForSpeed = function( )
1179 {
1180 	return this.iAudioBufferSizeDivider == 1 ? false : true;
1181 };
1182 
1183 // ---------------------------------------------------------------------------
1184 /** Set HTML5 Audio to low fidelity mode, 8 bit mono, which is needed as all sample
1185  * data has to be encoded into a wav file, base64 encoded and stuck into a DataURI for
1186  * the browser to play. Unfortunately the browser takes a long time decoding the DataURI
1187  * and start playing it, causing annoying pauses (unless you have a really fast machine) on
1188  * the JavaScript side (a new audio clip is created every 2 seconds, which is when you'll see the pause).
1189  * 
1190  * @param {bool} bLowFidelity = true : Low fidelity mode is enabled (8 bit, mono), false : high fidelity mode (16 bit, stereo).
1191  */
1192 weasel.BrowserAudio.prototype.setHTML5LowFidelityMode = function( bLowFidelity )
1193 {
1194 	if( weasel.BrowserAudio.prototype.AudioType.HTML5Audio == this.getAudioType() )
1195 	{
1196 		this.bHTML5LowFidelityMode = bLowFidelity == true ? true : false;
1197 	
1198 		this.changeReplayFrequency( this.iPlaybackFrequency );
1199 	}
1200 };
1201 
1202 // ---------------------------------------------------------------------------
1203 /** Get the state of HTML5 Audio playback fidelity (8-bit, mono vs 16bit, stereo).
1204  * 
1205  * @return {bool} = true : Low fidelity mode is enabled (8 bit, mono), false : high fidelity mode (16 bit, stereo).
1206  */
1207 weasel.BrowserAudio.prototype.getHTML5LowFidelityMode = function( )
1208 {
1209 	return this.bHTML5LowFidelityMode;
1210 };
1211 
1212 // ---------------------------------------------------------------------------
1213 /** Set interpolation for ALL channels in the current module (you can also manually set the interpolation for each
1214  * channel to be different).
1215  * 
1216  * @param {weasel.Channel.SupportedInterpolationTypes} iInterpolationType = The interpolation type to use.
1217  */
1218 weasel.BrowserAudio.prototype.setInterpolation = function( iInterpolationType )
1219 {
1220 	var oModule = this.getModule();
1221 
1222 	if( oModule )
1223 	{
1224 		for( var iChannel = oModule.getNumberOfChannels(); --iChannel >= 0; )
1225 		{
1226 			oModule.getChannel( iChannel ).setChannelInterpolation( iInterpolationType );
1227 		}
1228 	}
1229 	
1230 	this.iGlobalOverrideInterpolation = iInterpolationType;
1231 
1232 };
1233 
1234 // ---------------------------------------------------------------------------
1235 /** Turn On/Off ability to ignore the Amiga Filter, almost all modules written without filter
1236  * which was not present in the original Amiga being introduced with the Amiga 2000 and Amiga 500.
1237  * It should be noted that this does not turn On the actual filter, the filter gets turned On/Off
1238  * from within the module using the correct Effect Command (typically E-00 and E-01).
1239  * 
1240  * @param {bool} bIgnoreFilter = true : Ignore the Amiga Filter, false : Enable use of Amiga Filter, if module uses filter command.
1241  */
1242 weasel.BrowserAudio.prototype.setIgnoreFilter = function( bIgnoreFilter )
1243 {
1244 	this.bIgnoreFilter = bIgnoreFilter == true ? true : false;
1245 };
1246 
1247 // ---------------------------------------------------------------------------
1248 /** Is the Amiga filter command being ignored?.
1249  * 
1250  * @return {bool} = true : Ignore the Amiga Filter, false : Enable use of Amiga Filter.
1251  */
1252 weasel.BrowserAudio.prototype.getIgnoreFilter = function( )
1253 {
1254 	return this.bIgnoreFilter;
1255 };
1256 
1257 // ---------------------------------------------------------------------------
1258 /** Is Browser Audio currently playing?
1259  * 
1260  * @return {bool} = true : Browser Audio is currently playing, false : Browser Audio is not currently playing.
1261  */
1262 weasel.BrowserAudio.prototype.playing = function( )
1263 {
1264 	return !this.bPause;
1265 };
1266