diff --git a/assets/data.json b/assets/data.json index e38d8f9..e09ef3c 100644 --- a/assets/data.json +++ b/assets/data.json @@ -51,7 +51,7 @@ "dunesTextureDistort": [0.98828125, 1], "titleCreditColor": 13, "bodyCreditColor": 15, - "supported_demos": ["shading", "scanlines", "sphere"], + "supported_demos": ["shading", "scanlines", "sphere", "audioviz"], "texture_packs": { "indexed_color": { "anisotropicLevels": 0, diff --git a/glsl/terrain.glsl b/glsl/terrain.glsl index c7a3c84..0022080 100644 --- a/glsl/terrain.glsl +++ b/glsl/terrain.glsl @@ -9,6 +9,7 @@ precision mediump float; #define DEMO_SHADING 0 #define DEMO_SCANLINES 1 #define DEMO_SPHERE 2 +#define DEMO_AUDIOVIZ 3 #if defined(FRAGMENT_SHADER) #define attribute //attribute @@ -31,6 +32,7 @@ varying vec2 vTexCoord; varying vec3 vBarycentrics; varying float vDepth, vBrightness, vSpotlight; varying float vPointyQuad; +varying float vVolume; varying mat3 vBottomScanline, vTopScanline; varying float vScanlineCut; @@ -47,6 +49,9 @@ uniform float birdsEyeView, lightingCutoff, limitDrawResolution; uniform float fogNear, fogFar; uniform vec2 repeatOffset; +uniform float minDecibels, maxDecibels, binCount, audioStartTime; +uniform float decibels[128]; + uniform sampler2D dunesTexture; uniform sampler2D cairnTexture; #ifdef CURSED @@ -133,8 +138,30 @@ void vert() { float wave = aWaveAmplitude * -10.0 * sin((time * 1.75 + aWavePhase) * PI * 2.0); vec3 offset = vec3(quadCentroidLocalPosition - position.xy, wave); +#if defined(DEMO_ID) && DEMO_ID == DEMO_AUDIOVIZ + float dist = length(-(aPosition.xy + offset.xy) - position.xy); + int i = int(dist * 0.01 + 1.0); + + float audioTimeOffset = mix(1.5, 1.1, birdsEyeView); + dist = cos(dist * 0.003 - (time - audioStartTime + audioTimeOffset) * 0.605 * 4.0) * 0.5 + 0.5; + float strength = 40.0 / (dist + 0.0001); + strength = min(15000.0, strength); + + float volume = (decibels[i] - minDecibels) / (maxDecibels - minDecibels); + // volume += 0.2; + volume = pow(volume, 3.0); + vVolume = volume; + + vSpotlight += strength * volume * 0.0008; +#endif + // Project position from local to world to screen vec4 localPosition = vec4(aPosition + offset, 1.0); + +#if defined(DEMO_ID) && DEMO_ID == DEMO_AUDIOVIZ + localPosition.z += volume * -1.0 * (strength * 0.001 + 5.0); +#endif + vec4 worldPosition = warpWorld(transform * localPosition); vec4 screenPosition = camera * worldPosition; @@ -359,7 +386,9 @@ void frag() { // The quad border is smaller in birds' eye view float quadBorder = quadBorder; if (birdsEyeView == 1.0) { - quadBorder *= 2.0; + quadBorder *= 2.0 * (1.0 + vVolume * 4.0); + } else { + quadBorder *= (1.0 + max(0.0, (vVolume - 0.3) * 60.0)); } if (quadBorder == 0.0) { @@ -389,7 +418,7 @@ void frag() { } vec3 borderColor; - if (vSpotlight == 1.0) { + if (vSpotlight >= 1.0) { // The quad beneath the camera gets a solid border color borderColor = vec3(1.0, 0.8, 0.0); } else { diff --git a/index.html b/index.html index a8d3133..bfe7a59 100644 --- a/index.html +++ b/index.html @@ -234,11 +234,5 @@ - diff --git a/js/audioanalyser.js b/js/audioanalyser.js new file mode 100644 index 0000000..bd86db4 --- /dev/null +++ b/js/audioanalyser.js @@ -0,0 +1,135 @@ +// @fileoverview A system that plays and provides analyser data from a loaded WAV file +// @author Jeremy Sachs +// @version 1.0.0 + +// Web audio is in working draft, and receives less attention than other features. +// Chrome won't allow web audio to initialize without a user gesture. +// Safari has implemented audio analysis of buffer sources, but not media element sources. +// Firefox seems ahead of the curve on most fronts, but there is still +// the matter of finding an audio format that all these browsers can support as well. + +// Consequently, there are more hurdles than usual to jump through in this code. + +const AudioContext = [window.AudioContext, window.webkitAudioContext].find( + (contextClass) => contextClass != null +); + +const fromMediaElement = async (audioContext, analyser, path) => { + let audio = new Audio(path); + audio.loop = true; + let audioNode = audioContext.createMediaElementSource(audio); + audioNode.connect(analyser); + + let playing = false; + + const play = () => { + if (!playing) { + playing = true; + audio.currentTime = 0; + return audio.play(); + } + }; + + const stop = () => { + if (playing) { + playing = false; + audio.pause(); + } + }; + + audio.currentTime = 0; + try { + await play(); + return { play, stop }; + } catch (e) { + audioNode.disconnect(); + audioNode = null; + audio = null; + throw e; + } +}; + +const fromBuffer = async (audioContext, analyser, path) => { + const arrayBuffer = await (await fetch(path)).arrayBuffer(); + const audioBuffer = await new Promise((resolve, reject) => + audioContext.decodeAudioData(arrayBuffer, resolve, reject) + ); + + let audio = null; + + const play = () => { + if (audio == null) { + audio = audioContext.createBufferSource(); + audio.connect(analyser); + audio.buffer = audioBuffer; + audio.loop = true; + audio.start(); + } + }; + + const stop = () => { + if (audio != null) { + audio.stop(); + audio.disconnect(); + audio = null; + } + }; + + play(); + return { play, stop }; +}; + +export default class AudioAnalyser { + constructor(path) { + this.path = path; + this.playing = false; + this.binCount = 2 ** 7; + this.minDecibels = -100; + this.maxDecibels = -30; + this.data = new Float32Array(this.binCount); + this.data.fill(this.minDecibels); + } + + update() { + if (this.playing && this.analyser != null) { + this.analyser.getFloatFrequencyData(this.data); + if (Math.abs(this.data[0]) === Infinity) { + this.data.fill(this.minDecibels); + } + } + } + + stop() { + if (!this.playing) { + return; + } + this.playing = false; + this.scheme.then(({ stop }) => stop()); + this.data.fill(this.minDecibels); + } + + play() { + if (this.playing) { + return; + } + this.playing = true; + this.playStart = Date.now() / 1000; + if (this.scheme == null) { + const audioContext = new AudioContext(); + this.analyser = audioContext.createAnalyser(); + this.analyser.fftSize = this.binCount * 2; + this.analyser.minDecibels = this.minDecibels; + this.analyser.maxDecibels = this.maxDecibels; + this.analyser.smoothingTimeConstant = 0.85; + this.analyser.connect(audioContext.destination); + this.scheme = (async () => { + try { + return await fromMediaElement(audioContext, this.analyser, this.path); + } catch (e) { + return await fromBuffer(audioContext, this.analyser, this.path); + } + })(); + } + this.scheme.then(({ play }) => play()); + } +} diff --git a/js/gui.js b/js/gui.js index 531bdfc..a6bd141 100644 --- a/js/gui.js +++ b/js/gui.js @@ -29,7 +29,6 @@ export default (async () => { const settingsChangedEvent = new Event("settingsChanged"); const toolbar = document.querySelector("command-bar"); - const music = document.querySelector("audio#music"); const aboutButton = toolbar.querySelector(".mso-button#about"); const aboutBox = document.querySelector("#about-box"); const screenshot = document.querySelector("screenshot"); @@ -97,12 +96,6 @@ export default (async () => { } } - if (settings.music && music.paused) { - music.play(); - } else if (!settings.music && !music.paused) { - music.pause(); - } - events.dispatchEvent(settingsChangedEvent); }; diff --git a/js/renderer.js b/js/renderer.js index cd794f5..6272603 100644 --- a/js/renderer.js +++ b/js/renderer.js @@ -1,6 +1,7 @@ import Model from "./model.js"; import GUI from "./gui.js"; import Controls from "./controls.js"; +import AudioAnalyser from "./audioanalyser.js"; import { loadShaderSet, loadBase64Shader, @@ -14,6 +15,8 @@ export default (async () => { const { data, terrain } = await Model; const { update, controlData } = await Controls; + const audioAnalyser = new AudioAnalyser("assets/audio/lumbus_van_rees1.mp3"); + const viewscreenCanvas = document.querySelector("viewscreen canvas"); const viewscreenImage = document.querySelector("viewscreen img"); const viewElement = settings.cursed ? viewscreenImage : viewscreenCanvas; @@ -94,6 +97,12 @@ export default (async () => { ...indexedColorTextures, ...indexedShaderSet, + minDecibels: audioAnalyser.minDecibels, + maxDecibels: audioAnalyser.maxDecibels, + binCount: audioAnalyser.binCount, + decibels: audioAnalyser.data, + audioStartTime: 0, + colorTable, linearColorTable, colorTableWidth: colorTable.width, @@ -133,6 +142,13 @@ export default (async () => { state[key] = settings[key] ? 1 : 0; } + if (settings.music && !audioAnalyser.playing) { + audioAnalyser.play(); + state.audioStartTime = (Date.now() - startTime) / 1000; + } else if (!settings.music && audioAnalyser.playing) { + audioAnalyser.stop(); + } + state.fogFar = data.rendering.fogFar * (settings.lightingCutoff ? 1 : 3); state.quadBorder = settings.showQuadEdges ? data.rendering.quadBorder : 0; @@ -263,6 +279,10 @@ export default (async () => { const time = (Date.now() - startTime) / 1000; const frameTime = time - lastTime; + if (settings.demo === "audioviz") { + audioAnalyser.update(); + } + const mustResize = !vec2.equals(lastScreenSize, screenSize); const mustDraw = mustResize || !settings.limitDrawSpeed || frameTime >= targetFrameTime;