Upgrade to Pro — share decks privately, control downloads, hide ads and more …

Javascript Audio APIs - Let's make some noise

Javascript Audio APIs - Let's make some noise

An overview of the current state of how to use JavaScript to synthesize sounds in realtime.

Jan Krutisch

August 20, 2011
Tweet

More Decks by Jan Krutisch

Other Decks in Technology

Transcript

  1. var channels, rate, frameBufferLength, samples; function audioInfo() { var audio

    = document.getElementById('audio'); // After loadedmetadata event, following media element attributes are known: channels = audio.mozChannels; rate = audio.mozSampleRate; frameBufferLength = audio.mozFrameBufferLength; } function audioAvailable(event) { var samples = event.frameBuffer; var time = event.time; for (var i = 0; i < frameBufferLength; i++) { // Do something with the audio data as it is played. processSample(samples[i], channels, rate); } }
  2. <!DOCTYPE html> <html> <head> <title>JavaScript Audio Write Example</title> </head> <body>

    <input type="text" size="4" id="freq" value="440"><label for="hz">Hz</label> <button onclick="start()">play</button> <button onclick="stop()">stop</button> <script type="text/javascript"> function AudioDataDestination(sampleRate, readFn) { // Initialize the audio output. var audio = new Audio(); audio.mozSetup(1, sampleRate); var currentWritePosition = 0; var prebufferSize = sampleRate / 2; // buffer 500ms var tail = null; // The function called with regular interval to populate // the audio output buffer. setInterval(function() { var written; // Check if some data was not written in previous attempts. if(tail) { written = audio.mozWriteAudio(tail); currentWritePosition += written; if(written < tail.length) { // Not all the data was written, saving the tail... tail = tail.slice(written); return; // ... and exit the function. } tail = null; } // Check if we need add some data to the audio output. var currentPosition = audio.mozCurrentSampleOffset(); var available = currentPosition + prebufferSize - currentWritePosition; if(available > 0) { // Request some sound data from the callback function. var soundData = new Float32Array(available); readFn(soundData); // Writting the data. written = audio.mozWriteAudio(soundData); if(written < soundData.length) { // Not all the data was written, saving the tail. tail = soundData.slice(written); } currentWritePosition += written; } }, 100); } // Control and generate the sound. var frequency = 0, currentSoundSample; var sampleRate = 44100; function requestSoundData(soundData) { if (!frequency) { return; // no sound selected } var k = 2* Math.PI * frequency / sampleRate; for (var i=0, size=soundData.length; i<size; i++) { soundData[i] = Math.sin(k * currentSoundSample++); } } var audioDestination = new AudioDataDestination(sampleRate, requestSoundData); function start() { currentSoundSample = 0; frequency = parseFloat(document.getElementById("freq").value); } function stop() { frequency = 0; } </script>
  3. var context = new webkitAudioContext(); var source = context.createBufferSource(); var

    jsNode = context.createJavaScriptNode(1024 * 8, 1, 1); jsNode.onaudioprocess = processAudio; source.connect(jsNode); jsNode.connect(context.destination); loadSample("data/funkydrummer.mp3");
  4. function loadSample(url) { // Load asynchronously var request = new

    XMLHttpRequest(); request.open("GET", url, true); request.responseType = "arraybuffer"; request.onload = function() { var buffer = context.createBuffer(request.response, false); source.buffer = buffer; source.looping = true; source.noteOn(0.0); console.log("Sample loaded"); } request.send(); }
  5. function loadSample(url) { // Load asynchronously var request = new

    XMLHttpRequest(); request.open("GET", url, true); request.responseType = "arraybuffer"; request.onload = function() { var buffer = context.createBuffer(request.response, false); source.buffer = buffer; source.looping = true; source.noteOn(0.0); console.log("Sample loaded"); } request.send(); }
  6. function loadSample(url) { // Load asynchronously var request = new

    XMLHttpRequest(); request.open("GET", url, true); request.responseType = "arraybuffer"; request.onload = function() { var buffer = context.createBuffer(request.response, false); source.buffer = buffer; source.looping = true; source.noteOn(0.0); console.log("Sample loaded"); } request.send(); }
  7. function processAudio(e) { console.log(e); var channels = e.inputBuffer.numberOfChannels; var loudness

    = 0.0; var i = 0; var c = 0; for(c = 0; c < channels; c++) { var inData = event.inputBuffer.getChannelData(c); var outData = event.outputBuffer.getChannelData(c); for(i=0; i< inData.length; i++) { loudness += Math.abs(inData[i]); outData[i] = inData[i]; } loudness /= inData.length; } loudness /= channels; }
  8. function processAudio(e) { console.log(e); var channels = e.inputBuffer.numberOfChannels; var loudness

    = 0.0; var i = 0; var c = 0; for(c = 0; c < channels; c++) { var inData = event.inputBuffer.getChannelData(c); var outData = event.outputBuffer.getChannelData(c); for(i=0; i< inData.length; i++) { loudness += Math.abs(inData[i]); outData[i] = inData[i]; } loudness /= inData.length; } loudness /= channels; }
  9. ! var context = new webkitAudioContext(); var source = context.createJavaScriptNode(8192,

    0, 1); source.onaudioprocess = requestSoundData; source.connect(context.destination);
  10. var frequency = 0, offset = 0; var sampleRate =

    44100; function requestSoundData(e) { var channels = e.outputBuffer.numberOfChannels; var k = 2* Math.PI * frequency / sampleRate; for (var c=0; c< channels; c++) { var soundData = e.outputBuffer.getChannelData(c); for(var i = 0;i<soundData.length;i++) { soundData[i] = frequency === 0 ? 0.0 : Math.sin(k * (offset + i)); } } offset += soundData.length; }
  11. var frequency = 0, offset = 0; var sampleRate =

    44100; function requestSoundData(e) { var channels = e.outputBuffer.numberOfChannels; var k = 2* Math.PI * frequency / sampleRate; for (var c=0; c< channels; c++) { var soundData = e.outputBuffer.getChannelData(c); for(var i = 0;i<soundData.length;i++) { soundData[i] = frequency === 0 ? 0.0 : Math.sin(k * (offset + i)); } } offset += soundData.length; }
  12. var frequency = 0, offset = 0; var sampleRate =

    44100; function requestSoundData(e) { var channels = e.outputBuffer.numberOfChannels; var k = 2* Math.PI * frequency / sampleRate; for (var c=0; c< channels; c++) { var soundData = e.outputBuffer.getChannelData(c); for(var i = 0;i<soundData.length;i++) { soundData[i] = frequency === 0 ? 0.0 : Math.sin(k * (offset + i)); } } offset += soundData.length; }
  13. var context = new webkitAudioContext(); var source = context.createBufferSource(); var

    filter = context.createBiquadFilter(); filter.type = filter.LOWPASS; filter.frequency.value = (12000.0); filter.Q = 2; source.connect(filter); filter.connect(context.destination); loadSample("data/funkydrummer.mp3"); setInterval(function() { filter.frequency.value = Math.random() * 12000; //console.log(filter.frequency); }, 200)
  14. ! var absoluteBufferPos = 0; var calc = function(bridge, bufferSize,

    channels) { for(var b=0;b<bufferSize;b++) { var period = ((absoluteBufferPos + b) % 100) / 100; period *= 2 * Math.PI; var val = Math.sin(period); bridge.addToBuffer(val,val); } absoluteBufferPos += bufferSize; };