Upgrade to Pro — share decks privately, control downloads, hide ads and more …

Javascript Audio APIs - Let's make some noise

Javascript Audio APIs - Let's make some noise

An overview of the current state of how to use JavaScript to synthesize sounds in realtime.

Jan Krutisch

August 20, 2011
Tweet

More Decks by Jan Krutisch

Other Decks in Technology

Transcript

  1. Javascript Audio APIs
    Let‘s make some noise!
    APIs, Demos, Applications
    Jan Krutisch
    for FrosCon 2011, St. Augustin

    View Slide

  2. Javascript Audio APIs
    Let‘s make some noise!
    APIs, Demos, Applications
    Jan Krutisch
    for FrosCon 2011, St. Augustin

    View Slide

  3. Moi.

    View Slide

  4. Realtime audio?

    View Slide

  5. In/Out

    View Slide

  6. Applications?

    View Slide

  7. Music

    View Slide

  8. Collaboration

    View Slide

  9. Games

    View Slide

  10. >1 Sounds at once

    View Slide

  11. Spatial Audio

    View Slide

  12. Ahem. WebGL. Ahem.

    View Slide

  13. (
    Build and they will come
    )

    View Slide

  14. Status Quo.

    View Slide

  15. Nothing.

    View Slide

  16. Nothing serious.

    View Slide

  17. well, Flash.

    View Slide

  18. (insert rant on flash
    scripting bridge here)

    View Slide

  19. View Slide

  20. View Slide

  21. Mac
    about:flags
    Win, Linux
    Canary builds

    View Slide

  22. View Slide

  23. View Slide

  24. View Slide

  25. Code or it didn‘t happen

    View Slide

  26. Audio Data API

    View Slide


  27. View Slide

  28. reading

    View Slide


  29. <br/>var audio = document.getElementById("audio");<br/>audio.addEventListener('MozAudioAvailable', audioAvailable, false);<br/>audio.addEventListener('loadedmetadata', audioInfo, false);<br/>

    View Slide

  30. var channels,
    rate,
    frameBufferLength,
    samples;
    function audioInfo() {
    var audio = document.getElementById('audio');
    // After loadedmetadata event, following media element attributes are known:
    channels = audio.mozChannels;
    rate = audio.mozSampleRate;
    frameBufferLength = audio.mozFrameBufferLength;
    }
    function audioAvailable(event) {
    var samples = event.frameBuffer;
    var time = event.time;
    for (var i = 0; i < frameBufferLength; i++) {
    // Do something with the audio data as it is played.
    processSample(samples[i], channels, rate);
    }
    }

    View Slide

  31. writing

    View Slide




  32. JavaScript Audio Write Example


    Hz
    play
    stop
    <br/>function AudioDataDestination(sampleRate, readFn) {<br/>// Initialize the audio output.<br/>var audio = new Audio();<br/>audio.mozSetup(1, sampleRate);<br/>var currentWritePosition = 0;<br/>var prebufferSize = sampleRate / 2; // buffer 500ms<br/>var tail = null;<br/>// The function called with regular interval to populate<br/>// the audio output buffer.<br/>setInterval(function() {<br/>var written;<br/>// Check if some data was not written in previous attempts.<br/>if(tail) {<br/>written = audio.mozWriteAudio(tail);<br/>currentWritePosition += written;<br/>if(written < tail.length) {<br/>// Not all the data was written, saving the tail...<br/>tail = tail.slice(written);<br/>return; // ... and exit the function.<br/>}<br/>tail = null;<br/>}<br/>// Check if we need add some data to the audio output.<br/>var currentPosition = audio.mozCurrentSampleOffset();<br/>var available = currentPosition + prebufferSize - currentWritePosition;<br/>if(available > 0) {<br/>// Request some sound data from the callback function.<br/>var soundData = new Float32Array(available);<br/>readFn(soundData);<br/>// Writting the data.<br/>written = audio.mozWriteAudio(soundData);<br/>if(written < soundData.length) {<br/>// Not all the data was written, saving the tail.<br/>tail = soundData.slice(written);<br/>}<br/>currentWritePosition += written;<br/>}<br/>}, 100);<br/>}<br/>// Control and generate the sound.<br/>var frequency = 0, currentSoundSample;<br/>var sampleRate = 44100;<br/>function requestSoundData(soundData) {<br/>if (!frequency) {<br/>return; // no sound selected<br/>}<br/>var k = 2* Math.PI * frequency / sampleRate;<br/>for (var i=0, size=soundData.length; i<size; i++) {<br/>soundData[i] = Math.sin(k * currentSoundSample++);<br/>}<br/>}<br/>var audioDestination = new AudioDataDestination(sampleRate, requestSoundData);<br/>function start() {<br/>currentSoundSample = 0;<br/>frequency = parseFloat(document.getElementById("freq").value);<br/>}<br/>function stop() {<br/>frequency = 0;<br/>}<br/>

    View Slide

  33. [...]
    var audio = new Audio();
    audio.mozSetup(1, sampleRate);
    [...]
    written = audio.mozWriteAudio(tail);
    [...]

    View Slide

  34. https://wiki.mozilla.org/Audio_Data_API#Writing_Audio

    View Slide

  35. Web Audio API

    View Slide

  36. reading

    View Slide

  37. var context = new webkitAudioContext();
    var source = context.createBufferSource();
    var jsNode = context.createJavaScriptNode(1024 * 8, 1, 1);
    jsNode.onaudioprocess = processAudio;
    source.connect(jsNode);
    jsNode.connect(context.destination);
    loadSample("data/funkydrummer.mp3");

    View Slide

  38. function loadSample(url) {
    // Load asynchronously
    var request = new XMLHttpRequest();
    request.open("GET", url, true);
    request.responseType = "arraybuffer";
    request.onload = function() {
    var buffer = context.createBuffer(request.response, false);
    source.buffer = buffer;
    source.looping = true;
    source.noteOn(0.0);
    console.log("Sample loaded");
    }
    request.send();
    }

    View Slide

  39. function loadSample(url) {
    // Load asynchronously
    var request = new XMLHttpRequest();
    request.open("GET", url, true);
    request.responseType = "arraybuffer";
    request.onload = function() {
    var buffer = context.createBuffer(request.response, false);
    source.buffer = buffer;
    source.looping = true;
    source.noteOn(0.0);
    console.log("Sample loaded");
    }
    request.send();
    }

    View Slide

  40. function loadSample(url) {
    // Load asynchronously
    var request = new XMLHttpRequest();
    request.open("GET", url, true);
    request.responseType = "arraybuffer";
    request.onload = function() {
    var buffer = context.createBuffer(request.response, false);
    source.buffer = buffer;
    source.looping = true;
    source.noteOn(0.0);
    console.log("Sample loaded");
    }
    request.send();
    }

    View Slide

  41. function processAudio(e) {
    console.log(e);
    var channels = e.inputBuffer.numberOfChannels;
    var loudness = 0.0;
    var i = 0;
    var c = 0;
    for(c = 0; c < channels; c++) {
    var inData = event.inputBuffer.getChannelData(c);
    var outData = event.outputBuffer.getChannelData(c);
    for(i=0; i< inData.length; i++) {
    loudness += Math.abs(inData[i]);
    outData[i] = inData[i];
    }
    loudness /= inData.length;
    }
    loudness /= channels;
    }

    View Slide

  42. function processAudio(e) {
    console.log(e);
    var channels = e.inputBuffer.numberOfChannels;
    var loudness = 0.0;
    var i = 0;
    var c = 0;
    for(c = 0; c < channels; c++) {
    var inData = event.inputBuffer.getChannelData(c);
    var outData = event.outputBuffer.getChannelData(c);
    for(i=0; i< inData.length; i++) {
    loudness += Math.abs(inData[i]);
    outData[i] = inData[i];
    }
    loudness /= inData.length;
    }
    loudness /= channels;
    }

    View Slide

  43. writing

    View Slide

  44. ! var context = new webkitAudioContext();
    var source = context.createJavaScriptNode(8192, 0, 1);
    source.onaudioprocess = requestSoundData;
    source.connect(context.destination);

    View Slide

  45. var frequency = 0, offset = 0;
    var sampleRate = 44100;
    function requestSoundData(e) {
    var channels = e.outputBuffer.numberOfChannels;
    var k = 2* Math.PI * frequency / sampleRate;
    for (var c=0; c< channels; c++) {
    var soundData = e.outputBuffer.getChannelData(c);
    for(var i = 0;isoundData[i] = frequency === 0 ? 0.0 : Math.sin(k * (offset +
    i));
    }
    }
    offset += soundData.length;
    }

    View Slide

  46. var frequency = 0, offset = 0;
    var sampleRate = 44100;
    function requestSoundData(e) {
    var channels = e.outputBuffer.numberOfChannels;
    var k = 2* Math.PI * frequency / sampleRate;
    for (var c=0; c< channels; c++) {
    var soundData = e.outputBuffer.getChannelData(c);
    for(var i = 0;isoundData[i] = frequency === 0 ? 0.0 : Math.sin(k * (offset +
    i));
    }
    }
    offset += soundData.length;
    }

    View Slide

  47. var frequency = 0, offset = 0;
    var sampleRate = 44100;
    function requestSoundData(e) {
    var channels = e.outputBuffer.numberOfChannels;
    var k = 2* Math.PI * frequency / sampleRate;
    for (var c=0; c< channels; c++) {
    var soundData = e.outputBuffer.getChannelData(c);
    for(var i = 0;isoundData[i] = frequency === 0 ? 0.0 : Math.sin(k * (offset +
    i));
    }
    }
    offset += soundData.length;
    }

    View Slide

  48. chaining

    View Slide

  49. var context = new webkitAudioContext();
    var source = context.createBufferSource();
    var filter = context.createBiquadFilter();
    filter.type = filter.LOWPASS;
    filter.frequency.value = (12000.0);
    filter.Q = 2;
    source.connect(filter);
    filter.connect(context.destination);
    loadSample("data/funkydrummer.mp3");
    setInterval(function() {
    filter.frequency.value = Math.random() * 12000;
    //console.log(filter.frequency);
    }, 200)

    View Slide

  50. Performance

    View Slide

  51. Software Synthesis in
    Javascript. Crazy?

    View Slide

  52. Not at all

    View Slide

  53. Depends heavily on

    View Slide

  54. JS runtime

    View Slide

  55. Hardware

    View Slide

  56. Breakthrough

    View Slide

  57. V8 Nitro TraceMonkey
    Carakan Chakra

    View Slide

  58. Typed Arrays

    View Slide

  59. ( Invented for WebGL )

    View Slide

  60. Thank you Mr. Moore.

    View Slide

  61. Curiosities

    View Slide

  62. On The Fly Data URL

    View Slide

  63. http://synth.bitsnbites.eu/play.html

    View Slide

  64. window.btoa()

    View Slide

  65. My little corner

    View Slide

  66. http://github.com/halfbyte/soundbridge.js

    View Slide

  67. Web Audio > Audio Data > Flash

    View Slide

  68. Only Mono/Stereo

    View Slide

  69. Only Output

    View Slide

  70. SoundBridge(2, 44100, '..', function(soundbridge) {
    soundbridge.setCallback(calc);
    log("now stating to play.");
    soundbridge.play();
    });

    View Slide

  71. ! var absoluteBufferPos = 0;
    var calc = function(bridge, bufferSize, channels) {
    for(var b=0;bvar period = ((absoluteBufferPos + b) % 100) / 100;
    period *= 2 * Math.PI;
    var val = Math.sin(period);
    bridge.addToBuffer(val,val);
    }
    absoluteBufferPos += bufferSize;
    };

    View Slide

  72. http://webloop.pixelpoke.de
    https://github.com/halfbyte/webloop

    View Slide

  73. Demo time

    View Slide

  74. Resources

    View Slide

  75. ‣ https://wiki.mozilla.org/Audio_Data_API
    ‣ https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/specification.html
    ‣ https://wiki.mozilla.org/Audio_Data_API#Working_Audio_Data_Demos
    ‣ http://chromium.googlecode.com/svn/trunk/samples/audio/index.html
    ‣ https://github.com/halfbyte/webloop
    ‣ https://github.com/halfbyte/soundbridge.js

    View Slide

  76. thanks.
    http://jan.krutisch.de/
    http://twitter.com/halfbyte
    http://github.com/halfbyte
    http://soundcloud.com/halfbyte
    http://www.mindmatters.de/

    View Slide