Javascript Audio APIs - Let's make some noise

Javascript Audio APIs - Let's make some noise

An overview of the current state of how to use JavaScript to synthesize sounds in realtime.

B5b39c8f21b5bb1ab97852ed32c888ab?s=128

Jan Krutisch

August 20, 2011
Tweet

Transcript

  1. Javascript Audio APIs Let‘s make some noise! APIs, Demos, Applications

    Jan Krutisch <jan.krutisch@mindmatters.de> for FrosCon 2011, St. Augustin
  2. Javascript Audio APIs Let‘s make some noise! APIs, Demos, Applications

    Jan Krutisch <jan.krutisch@mindmatters.de> for FrosCon 2011, St. Augustin
  3. Moi.

  4. Realtime audio?

  5. In/Out

  6. Applications?

  7. Music

  8. Collaboration

  9. Games

  10. >1 Sounds at once

  11. Spatial Audio

  12. Ahem. WebGL. Ahem.

  13. ( Build and they will come )

  14. Status Quo.

  15. Nothing.

  16. Nothing serious.

  17. well, Flash.

  18. (insert rant on flash scripting bridge here)

  19. None
  20. None
  21. Mac about:flags Win, Linux Canary builds

  22. None
  23. None
  24. None
  25. Code or it didn‘t happen

  26. Audio Data API

  27. <audio/>

  28. reading

  29. <audio id="audio" src="song.ogg"></audio> <script> var audio = document.getElementById("audio"); audio.addEventListener('MozAudioAvailable', audioAvailable,

    false); audio.addEventListener('loadedmetadata', audioInfo, false); </script>
  30. var channels, rate, frameBufferLength, samples; function audioInfo() { var audio

    = document.getElementById('audio'); // After loadedmetadata event, following media element attributes are known: channels = audio.mozChannels; rate = audio.mozSampleRate; frameBufferLength = audio.mozFrameBufferLength; } function audioAvailable(event) { var samples = event.frameBuffer; var time = event.time; for (var i = 0; i < frameBufferLength; i++) { // Do something with the audio data as it is played. processSample(samples[i], channels, rate); } }
  31. writing

  32. <!DOCTYPE html> <html> <head> <title>JavaScript Audio Write Example</title> </head> <body>

    <input type="text" size="4" id="freq" value="440"><label for="hz">Hz</label> <button onclick="start()">play</button> <button onclick="stop()">stop</button> <script type="text/javascript"> function AudioDataDestination(sampleRate, readFn) { // Initialize the audio output. var audio = new Audio(); audio.mozSetup(1, sampleRate); var currentWritePosition = 0; var prebufferSize = sampleRate / 2; // buffer 500ms var tail = null; // The function called with regular interval to populate // the audio output buffer. setInterval(function() { var written; // Check if some data was not written in previous attempts. if(tail) { written = audio.mozWriteAudio(tail); currentWritePosition += written; if(written < tail.length) { // Not all the data was written, saving the tail... tail = tail.slice(written); return; // ... and exit the function. } tail = null; } // Check if we need add some data to the audio output. var currentPosition = audio.mozCurrentSampleOffset(); var available = currentPosition + prebufferSize - currentWritePosition; if(available > 0) { // Request some sound data from the callback function. var soundData = new Float32Array(available); readFn(soundData); // Writting the data. written = audio.mozWriteAudio(soundData); if(written < soundData.length) { // Not all the data was written, saving the tail. tail = soundData.slice(written); } currentWritePosition += written; } }, 100); } // Control and generate the sound. var frequency = 0, currentSoundSample; var sampleRate = 44100; function requestSoundData(soundData) { if (!frequency) { return; // no sound selected } var k = 2* Math.PI * frequency / sampleRate; for (var i=0, size=soundData.length; i<size; i++) { soundData[i] = Math.sin(k * currentSoundSample++); } } var audioDestination = new AudioDataDestination(sampleRate, requestSoundData); function start() { currentSoundSample = 0; frequency = parseFloat(document.getElementById("freq").value); } function stop() { frequency = 0; } </script>
  33. [...] var audio = new Audio(); audio.mozSetup(1, sampleRate); [...] written

    = audio.mozWriteAudio(tail); [...]
  34. https://wiki.mozilla.org/Audio_Data_API#Writing_Audio

  35. Web Audio API

  36. reading

  37. var context = new webkitAudioContext(); var source = context.createBufferSource(); var

    jsNode = context.createJavaScriptNode(1024 * 8, 1, 1); jsNode.onaudioprocess = processAudio; source.connect(jsNode); jsNode.connect(context.destination); loadSample("data/funkydrummer.mp3");
  38. function loadSample(url) { // Load asynchronously var request = new

    XMLHttpRequest(); request.open("GET", url, true); request.responseType = "arraybuffer"; request.onload = function() { var buffer = context.createBuffer(request.response, false); source.buffer = buffer; source.looping = true; source.noteOn(0.0); console.log("Sample loaded"); } request.send(); }
  39. function loadSample(url) { // Load asynchronously var request = new

    XMLHttpRequest(); request.open("GET", url, true); request.responseType = "arraybuffer"; request.onload = function() { var buffer = context.createBuffer(request.response, false); source.buffer = buffer; source.looping = true; source.noteOn(0.0); console.log("Sample loaded"); } request.send(); }
  40. function loadSample(url) { // Load asynchronously var request = new

    XMLHttpRequest(); request.open("GET", url, true); request.responseType = "arraybuffer"; request.onload = function() { var buffer = context.createBuffer(request.response, false); source.buffer = buffer; source.looping = true; source.noteOn(0.0); console.log("Sample loaded"); } request.send(); }
  41. function processAudio(e) { console.log(e); var channels = e.inputBuffer.numberOfChannels; var loudness

    = 0.0; var i = 0; var c = 0; for(c = 0; c < channels; c++) { var inData = event.inputBuffer.getChannelData(c); var outData = event.outputBuffer.getChannelData(c); for(i=0; i< inData.length; i++) { loudness += Math.abs(inData[i]); outData[i] = inData[i]; } loudness /= inData.length; } loudness /= channels; }
  42. function processAudio(e) { console.log(e); var channels = e.inputBuffer.numberOfChannels; var loudness

    = 0.0; var i = 0; var c = 0; for(c = 0; c < channels; c++) { var inData = event.inputBuffer.getChannelData(c); var outData = event.outputBuffer.getChannelData(c); for(i=0; i< inData.length; i++) { loudness += Math.abs(inData[i]); outData[i] = inData[i]; } loudness /= inData.length; } loudness /= channels; }
  43. writing

  44. ! var context = new webkitAudioContext(); var source = context.createJavaScriptNode(8192,

    0, 1); source.onaudioprocess = requestSoundData; source.connect(context.destination);
  45. var frequency = 0, offset = 0; var sampleRate =

    44100; function requestSoundData(e) { var channels = e.outputBuffer.numberOfChannels; var k = 2* Math.PI * frequency / sampleRate; for (var c=0; c< channels; c++) { var soundData = e.outputBuffer.getChannelData(c); for(var i = 0;i<soundData.length;i++) { soundData[i] = frequency === 0 ? 0.0 : Math.sin(k * (offset + i)); } } offset += soundData.length; }
  46. var frequency = 0, offset = 0; var sampleRate =

    44100; function requestSoundData(e) { var channels = e.outputBuffer.numberOfChannels; var k = 2* Math.PI * frequency / sampleRate; for (var c=0; c< channels; c++) { var soundData = e.outputBuffer.getChannelData(c); for(var i = 0;i<soundData.length;i++) { soundData[i] = frequency === 0 ? 0.0 : Math.sin(k * (offset + i)); } } offset += soundData.length; }
  47. var frequency = 0, offset = 0; var sampleRate =

    44100; function requestSoundData(e) { var channels = e.outputBuffer.numberOfChannels; var k = 2* Math.PI * frequency / sampleRate; for (var c=0; c< channels; c++) { var soundData = e.outputBuffer.getChannelData(c); for(var i = 0;i<soundData.length;i++) { soundData[i] = frequency === 0 ? 0.0 : Math.sin(k * (offset + i)); } } offset += soundData.length; }
  48. chaining

  49. var context = new webkitAudioContext(); var source = context.createBufferSource(); var

    filter = context.createBiquadFilter(); filter.type = filter.LOWPASS; filter.frequency.value = (12000.0); filter.Q = 2; source.connect(filter); filter.connect(context.destination); loadSample("data/funkydrummer.mp3"); setInterval(function() { filter.frequency.value = Math.random() * 12000; //console.log(filter.frequency); }, 200)
  50. Performance

  51. Software Synthesis in Javascript. Crazy?

  52. Not at all

  53. Depends heavily on

  54. JS runtime

  55. Hardware

  56. Breakthrough

  57. V8 Nitro TraceMonkey Carakan Chakra

  58. Typed Arrays

  59. ( Invented for WebGL )

  60. Thank you Mr. Moore.

  61. Curiosities

  62. On The Fly Data URL

  63. http://synth.bitsnbites.eu/play.html

  64. window.btoa()

  65. My little corner

  66. http://github.com/halfbyte/soundbridge.js

  67. Web Audio > Audio Data > Flash

  68. Only Mono/Stereo

  69. Only Output

  70. SoundBridge(2, 44100, '..', function(soundbridge) { soundbridge.setCallback(calc); log("now stating to play.");

    soundbridge.play(); });
  71. ! var absoluteBufferPos = 0; var calc = function(bridge, bufferSize,

    channels) { for(var b=0;b<bufferSize;b++) { var period = ((absoluteBufferPos + b) % 100) / 100; period *= 2 * Math.PI; var val = Math.sin(period); bridge.addToBuffer(val,val); } absoluteBufferPos += bufferSize; };
  72. http://webloop.pixelpoke.de https://github.com/halfbyte/webloop

  73. Demo time

  74. Resources

  75. ‣ https://wiki.mozilla.org/Audio_Data_API ‣ https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/specification.html ‣ https://wiki.mozilla.org/Audio_Data_API#Working_Audio_Data_Demos ‣ http://chromium.googlecode.com/svn/trunk/samples/audio/index.html ‣ https://github.com/halfbyte/webloop

    ‣ https://github.com/halfbyte/soundbridge.js
  76. thanks. http://jan.krutisch.de/ http://twitter.com/halfbyte http://github.com/halfbyte http://soundcloud.com/halfbyte http://www.mindmatters.de/