Slide 1

Slide 1 text

Javascript Audio APIs Let‘s make some noise! APIs, Demos, Applications Jan Krutisch for FrosCon 2011, St. Augustin

Slide 2

Slide 2 text

Javascript Audio APIs Let‘s make some noise! APIs, Demos, Applications Jan Krutisch for FrosCon 2011, St. Augustin

Slide 3

Slide 3 text

Moi.

Slide 4

Slide 4 text

Realtime audio?

Slide 5

Slide 5 text

In/Out

Slide 6

Slide 6 text

Applications?

Slide 7

Slide 7 text

Music

Slide 8

Slide 8 text

Collaboration

Slide 9

Slide 9 text

Games

Slide 10

Slide 10 text

>1 Sounds at once

Slide 11

Slide 11 text

Spatial Audio

Slide 12

Slide 12 text

Ahem. WebGL. Ahem.

Slide 13

Slide 13 text

( Build and they will come )

Slide 14

Slide 14 text

Status Quo.

Slide 15

Slide 15 text

Nothing.

Slide 16

Slide 16 text

Nothing serious.

Slide 17

Slide 17 text

well, Flash.

Slide 18

Slide 18 text

(insert rant on flash scripting bridge here)

Slide 19

Slide 19 text

No content

Slide 20

Slide 20 text

No content

Slide 21

Slide 21 text

Mac about:flags Win, Linux Canary builds

Slide 22

Slide 22 text

No content

Slide 23

Slide 23 text

No content

Slide 24

Slide 24 text

No content

Slide 25

Slide 25 text

Code or it didn‘t happen

Slide 26

Slide 26 text

Audio Data API

Slide 27

Slide 27 text

Slide 28

Slide 28 text

reading

Slide 29

Slide 29 text

var audio = document.getElementById("audio"); audio.addEventListener('MozAudioAvailable', audioAvailable, false); audio.addEventListener('loadedmetadata', audioInfo, false);

Slide 30

Slide 30 text

var channels, rate, frameBufferLength, samples; function audioInfo() { var audio = document.getElementById('audio'); // After loadedmetadata event, following media element attributes are known: channels = audio.mozChannels; rate = audio.mozSampleRate; frameBufferLength = audio.mozFrameBufferLength; } function audioAvailable(event) { var samples = event.frameBuffer; var time = event.time; for (var i = 0; i < frameBufferLength; i++) { // Do something with the audio data as it is played. processSample(samples[i], channels, rate); } }

Slide 31

Slide 31 text

writing

Slide 32

Slide 32 text

JavaScript Audio Write Example Hz play stop function AudioDataDestination(sampleRate, readFn) { // Initialize the audio output. var audio = new Audio(); audio.mozSetup(1, sampleRate); var currentWritePosition = 0; var prebufferSize = sampleRate / 2; // buffer 500ms var tail = null; // The function called with regular interval to populate // the audio output buffer. setInterval(function() { var written; // Check if some data was not written in previous attempts. if(tail) { written = audio.mozWriteAudio(tail); currentWritePosition += written; if(written < tail.length) { // Not all the data was written, saving the tail... tail = tail.slice(written); return; // ... and exit the function. } tail = null; } // Check if we need add some data to the audio output. var currentPosition = audio.mozCurrentSampleOffset(); var available = currentPosition + prebufferSize - currentWritePosition; if(available > 0) { // Request some sound data from the callback function. var soundData = new Float32Array(available); readFn(soundData); // Writting the data. written = audio.mozWriteAudio(soundData); if(written < soundData.length) { // Not all the data was written, saving the tail. tail = soundData.slice(written); } currentWritePosition += written; } }, 100); } // Control and generate the sound. var frequency = 0, currentSoundSample; var sampleRate = 44100; function requestSoundData(soundData) { if (!frequency) { return; // no sound selected } var k = 2* Math.PI * frequency / sampleRate; for (var i=0, size=soundData.length; i<size; i++) { soundData[i] = Math.sin(k * currentSoundSample++); } } var audioDestination = new AudioDataDestination(sampleRate, requestSoundData); function start() { currentSoundSample = 0; frequency = parseFloat(document.getElementById("freq").value); } function stop() { frequency = 0; }

Slide 33

Slide 33 text

[...] var audio = new Audio(); audio.mozSetup(1, sampleRate); [...] written = audio.mozWriteAudio(tail); [...]

Slide 34

Slide 34 text

https://wiki.mozilla.org/Audio_Data_API#Writing_Audio

Slide 35

Slide 35 text

Web Audio API

Slide 36

Slide 36 text

reading

Slide 37

Slide 37 text

var context = new webkitAudioContext(); var source = context.createBufferSource(); var jsNode = context.createJavaScriptNode(1024 * 8, 1, 1); jsNode.onaudioprocess = processAudio; source.connect(jsNode); jsNode.connect(context.destination); loadSample("data/funkydrummer.mp3");

Slide 38

Slide 38 text

function loadSample(url) { // Load asynchronously var request = new XMLHttpRequest(); request.open("GET", url, true); request.responseType = "arraybuffer"; request.onload = function() { var buffer = context.createBuffer(request.response, false); source.buffer = buffer; source.looping = true; source.noteOn(0.0); console.log("Sample loaded"); } request.send(); }

Slide 39

Slide 39 text

function loadSample(url) { // Load asynchronously var request = new XMLHttpRequest(); request.open("GET", url, true); request.responseType = "arraybuffer"; request.onload = function() { var buffer = context.createBuffer(request.response, false); source.buffer = buffer; source.looping = true; source.noteOn(0.0); console.log("Sample loaded"); } request.send(); }

Slide 40

Slide 40 text

function loadSample(url) { // Load asynchronously var request = new XMLHttpRequest(); request.open("GET", url, true); request.responseType = "arraybuffer"; request.onload = function() { var buffer = context.createBuffer(request.response, false); source.buffer = buffer; source.looping = true; source.noteOn(0.0); console.log("Sample loaded"); } request.send(); }

Slide 41

Slide 41 text

function processAudio(e) { console.log(e); var channels = e.inputBuffer.numberOfChannels; var loudness = 0.0; var i = 0; var c = 0; for(c = 0; c < channels; c++) { var inData = event.inputBuffer.getChannelData(c); var outData = event.outputBuffer.getChannelData(c); for(i=0; i< inData.length; i++) { loudness += Math.abs(inData[i]); outData[i] = inData[i]; } loudness /= inData.length; } loudness /= channels; }

Slide 42

Slide 42 text

function processAudio(e) { console.log(e); var channels = e.inputBuffer.numberOfChannels; var loudness = 0.0; var i = 0; var c = 0; for(c = 0; c < channels; c++) { var inData = event.inputBuffer.getChannelData(c); var outData = event.outputBuffer.getChannelData(c); for(i=0; i< inData.length; i++) { loudness += Math.abs(inData[i]); outData[i] = inData[i]; } loudness /= inData.length; } loudness /= channels; }

Slide 43

Slide 43 text

writing

Slide 44

Slide 44 text

! var context = new webkitAudioContext(); var source = context.createJavaScriptNode(8192, 0, 1); source.onaudioprocess = requestSoundData; source.connect(context.destination);

Slide 45

Slide 45 text

var frequency = 0, offset = 0; var sampleRate = 44100; function requestSoundData(e) { var channels = e.outputBuffer.numberOfChannels; var k = 2* Math.PI * frequency / sampleRate; for (var c=0; c< channels; c++) { var soundData = e.outputBuffer.getChannelData(c); for(var i = 0;i

Slide 46

Slide 46 text

var frequency = 0, offset = 0; var sampleRate = 44100; function requestSoundData(e) { var channels = e.outputBuffer.numberOfChannels; var k = 2* Math.PI * frequency / sampleRate; for (var c=0; c< channels; c++) { var soundData = e.outputBuffer.getChannelData(c); for(var i = 0;i

Slide 47

Slide 47 text

var frequency = 0, offset = 0; var sampleRate = 44100; function requestSoundData(e) { var channels = e.outputBuffer.numberOfChannels; var k = 2* Math.PI * frequency / sampleRate; for (var c=0; c< channels; c++) { var soundData = e.outputBuffer.getChannelData(c); for(var i = 0;i

Slide 48

Slide 48 text

chaining

Slide 49

Slide 49 text

var context = new webkitAudioContext(); var source = context.createBufferSource(); var filter = context.createBiquadFilter(); filter.type = filter.LOWPASS; filter.frequency.value = (12000.0); filter.Q = 2; source.connect(filter); filter.connect(context.destination); loadSample("data/funkydrummer.mp3"); setInterval(function() { filter.frequency.value = Math.random() * 12000; //console.log(filter.frequency); }, 200)

Slide 50

Slide 50 text

Performance

Slide 51

Slide 51 text

Software Synthesis in Javascript. Crazy?

Slide 52

Slide 52 text

Not at all

Slide 53

Slide 53 text

Depends heavily on

Slide 54

Slide 54 text

JS runtime

Slide 55

Slide 55 text

Hardware

Slide 56

Slide 56 text

Breakthrough

Slide 57

Slide 57 text

V8 Nitro TraceMonkey Carakan Chakra

Slide 58

Slide 58 text

Typed Arrays

Slide 59

Slide 59 text

( Invented for WebGL )

Slide 60

Slide 60 text

Thank you Mr. Moore.

Slide 61

Slide 61 text

Curiosities

Slide 62

Slide 62 text

On The Fly Data URL

Slide 63

Slide 63 text

http://synth.bitsnbites.eu/play.html

Slide 64

Slide 64 text

window.btoa()

Slide 65

Slide 65 text

My little corner

Slide 66

Slide 66 text

http://github.com/halfbyte/soundbridge.js

Slide 67

Slide 67 text

Web Audio > Audio Data > Flash

Slide 68

Slide 68 text

Only Mono/Stereo

Slide 69

Slide 69 text

Only Output

Slide 70

Slide 70 text

SoundBridge(2, 44100, '..', function(soundbridge) { soundbridge.setCallback(calc); log("now stating to play."); soundbridge.play(); });

Slide 71

Slide 71 text

! var absoluteBufferPos = 0; var calc = function(bridge, bufferSize, channels) { for(var b=0;b

Slide 72

Slide 72 text

http://webloop.pixelpoke.de https://github.com/halfbyte/webloop

Slide 73

Slide 73 text

Demo time

Slide 74

Slide 74 text

Resources

Slide 75

Slide 75 text

‣ https://wiki.mozilla.org/Audio_Data_API ‣ https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/specification.html ‣ https://wiki.mozilla.org/Audio_Data_API#Working_Audio_Data_Demos ‣ http://chromium.googlecode.com/svn/trunk/samples/audio/index.html ‣ https://github.com/halfbyte/webloop ‣ https://github.com/halfbyte/soundbridge.js

Slide 76

Slide 76 text

thanks. http://jan.krutisch.de/ http://twitter.com/halfbyte http://github.com/halfbyte http://soundcloud.com/halfbyte http://www.mindmatters.de/