diff --git a/server/assets/js/audio.js b/server/assets/js/audio.js
new file mode 120000
index 0000000..f939c3f
--- /dev/null
+++ b/server/assets/js/audio.js
@@ -0,0 +1 @@
+../../../static/js/audio.js
\ No newline at end of file
diff --git a/static/index.html b/static/index.html
index 90ed225..3d03203 100755
--- a/static/index.html
+++ b/static/index.html
@@ -57,11 +57,13 @@
+
+
@@ -167,7 +170,7 @@
var code, canvas, gl, buffer, currentProgram, vertexPosition, screenVertexPosition, panButton,
parameters = { startTime: Date.now(), time: 0, mouseX: 0.5, mouseY: 0.5, screenWidth: 0, screenHeight: 0 },
surface = { centerX: 0, centerY: 0, width: 1, height: 1, isPanning: false, isZooming: false, lastX: 0, lastY: 0 },
- frontTarget, backTarget, screenProgram, getWebGL, resizer = {}, compileOnChangeCode = true;
+ frontTarget, backTarget, screenProgram, getWebGL, resizer = {}, compileOnChangeCode = true, audio;
init();
if (gl) { animate(); }
@@ -464,6 +467,8 @@
compileScreenProgram();
+ audio = new Audio();
+
}
function isCodeVisible() {
@@ -615,6 +620,7 @@
// Cache uniforms
+ cacheUniformLocation( program, 'audio' );
cacheUniformLocation( program, 'time' );
cacheUniformLocation( program, 'mouse' );
cacheUniformLocation( program, 'resolution' );
@@ -873,11 +879,13 @@
if ( !currentProgram ) return;
parameters.time = Date.now() - parameters.startTime;
+ audio.tick();
// Set uniforms for custom shader
gl.useProgram( currentProgram );
+ gl.uniform3f( currentProgram.uniformsCache[ 'audio' ], audio.fft[0], audio.fft[1], audio.fft[2]);
gl.uniform1f( currentProgram.uniformsCache[ 'time' ], parameters.time / 1000 );
gl.uniform2f( currentProgram.uniformsCache[ 'mouse' ], parameters.mouseX, parameters.mouseY );
gl.uniform2f( currentProgram.uniformsCache[ 'resolution' ], parameters.screenWidth, parameters.screenHeight );
diff --git a/static/js/audio.js b/static/js/audio.js
new file mode 100644
index 0000000..1f5f2e9
--- /dev/null
+++ b/static/js/audio.js
@@ -0,0 +1,51 @@
+'use strict';
+
+function Audio(numBins, cutoff, smooth, scale) {
+
+ numBins = numBins || 3;
+ smooth = smooth || 0.4;
+
+ function tick() {
+ if (this.meyda) {
+ var features = this.meyda.get()
+ if (features) {
+ var reducer = (accumulator, currentValue) => accumulator + currentValue;
+
+ var spacing = Math.floor(features.loudness.specific.length / this.bins.length);
+ this.prevBins = this.bins.slice(0);
+
+ this.bins = this.bins.map((bin, index) =>
+ features.loudness.specific.slice(index * spacing, (index + 1) * spacing).reduce(reducer) / spacing
+ ).map((bin, index) =>
+ bin * (1.0 - smooth) + this.prevBins[index] * smooth);
+
+ this.fft = this.bins;
+ }
+ }
+ }
+
+ function init() {
+ this.bins = Array(numBins).fill(0)
+ this.prevBins = Array(numBins).fill(0)
+ this.fft = Array(numBins).fill(0)
+
+ window.navigator.mediaDevices.getUserMedia({ video: false, audio: true })
+ .then((stream) => {
+ var context = new AudioContext()
+ var audio_stream = context.createMediaStreamSource(stream)
+
+ this.meyda = Meyda.createMeydaAnalyzer({
+ audioContext: context,
+ source: audio_stream,
+ featureExtractors: [
+ 'loudness',
+ ]
+ })
+ })
+ .catch((err) => console.log('ERROR', err))
+
+ return this;
+ }
+
+ return { init: init, tick: tick }.init();
+}