From 896d984573f5e7dc432408e35612f4dd5579ec8a Mon Sep 17 00:00:00 2001 From: Evan Sonderegger Date: Thu, 31 Oct 2024 11:27:09 -0400 Subject: [PATCH] Fix examples for Safari and add pre-fader example (#57) --- README.md | 1 + docs/examples/audio.html | 46 ++++++----- docs/examples/cleanup.html | 46 ++++++----- docs/examples/nodom.html | 46 ++++++----- docs/examples/osc.html | 26 +++--- docs/examples/pre-fader-video.html | 123 +++++++++++++++++++++++++++++ docs/examples/variations.html | 46 ++++++----- docs/examples/video.html | 26 +++--- docs/index.html | 1 + examples/audio.js | 23 +++--- examples/cleanup.js | 23 +++--- examples/nodom.js | 23 +++--- examples/osc.js | 13 +-- examples/pre-fader-video.html | 12 +++ examples/pre-fader-video.js | 39 +++++++++ examples/pre-fader-video.md | 3 + examples/variations.js | 23 +++--- examples/video.js | 13 +-- 18 files changed, 383 insertions(+), 150 deletions(-) create mode 100644 docs/examples/pre-fader-video.html create mode 100644 examples/pre-fader-video.html create mode 100644 examples/pre-fader-video.js create mode 100644 examples/pre-fader-video.md diff --git a/README.md b/README.md index 6d28162..83687f5 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,7 @@ Customizable peak meters, using the web audio API. It can measure peak or true p - [Single audio element](https://esonderegger.github.io/web-audio-peak-meter/examples/audio.html) - [Single video element](https://esonderegger.github.io/web-audio-peak-meter/examples/video.html) - [An oscillator node](https://esonderegger.github.io/web-audio-peak-meter/examples/osc.html) +- [Pre-fader metering](https://esonderegger.github.io/web-audio-peak-meter/examples/pre-fader-video.html) - [Variations using configuration](https://esonderegger.github.io/web-audio-peak-meter/examples/variations.html) - [Dynamic creation and cleanup](https://esonderegger.github.io/web-audio-peak-meter/examples/cleanup.html) - [Usage without a DOM node](https://esonderegger.github.io/web-audio-peak-meter/examples/nodom.html) diff --git a/docs/examples/audio.html b/docs/examples/audio.html index 85c62e8..f3e6a59 100644 --- a/docs/examples/audio.html +++ b/docs/examples/audio.html @@ -33,24 +33,27 @@

Javascript code

const sourceNode = audioCtx.createMediaElementSource(audioElement); sourceNode.connect(audioCtx.destination); +const ctxStatus = document.getElementById('ctx-status'); const buttonElement = document.getElementById('ctx-button'); -buttonElement.addEventListener('click', () => { - if (audioCtx.state === 'suspended') { - audioCtx.resume(); - } else { - audioCtx.suspend(); - } -}); -const ctxStatus = document.getElementById('ctx-status'); -setInterval(() => { +function updateAudioCtxStatus() { ctxStatus.innerText = audioCtx.state; if (audioCtx.state === 'suspended') { buttonElement.innerText = 'Resume'; } else { buttonElement.innerText = 'Suspend'; } -}, 100); +} + +setInterval(updateAudioCtxStatus, 1000); + +buttonElement.addEventListener('click', () => { + if (audioCtx.state === 'suspended') { + audioCtx.resume().then(updateAudioCtxStatus); + } else { + audioCtx.suspend().then(updateAudioCtxStatus); + } +}); const test = new webAudioPeakMeter.WebAudioPeakMeter(sourceNode, meterElement); @@ -63,24 +66,27 @@

Javascript code

const sourceNode = audioCtx.createMediaElementSource(audioElement); sourceNode.connect(audioCtx.destination); +const ctxStatus = document.getElementById('ctx-status'); const buttonElement = document.getElementById('ctx-button'); -buttonElement.addEventListener('click', () => { - if (audioCtx.state === 'suspended') { - audioCtx.resume(); - } else { - audioCtx.suspend(); - } -}); -const ctxStatus = document.getElementById('ctx-status'); -setInterval(() => { +function updateAudioCtxStatus() { ctxStatus.innerText = audioCtx.state; if (audioCtx.state === 'suspended') { buttonElement.innerText = 'Resume'; } else { buttonElement.innerText = 'Suspend'; } -}, 100); +} + +setInterval(updateAudioCtxStatus, 1000); + +buttonElement.addEventListener('click', () => { + if (audioCtx.state === 'suspended') { + audioCtx.resume().then(updateAudioCtxStatus); + } else { + audioCtx.suspend().then(updateAudioCtxStatus); + } +}); const test = new webAudioPeakMeter.WebAudioPeakMeter(sourceNode, meterElement); diff --git a/docs/examples/cleanup.html b/docs/examples/cleanup.html index e630954..19b46db 100644 --- a/docs/examples/cleanup.html +++ b/docs/examples/cleanup.html @@ -46,24 +46,27 @@

Javascript code

const sourceNode = audioCtx.createMediaElementSource(audioElement); sourceNode.connect(audioCtx.destination); +const ctxStatus = document.getElementById('ctx-status'); const buttonElement = document.getElementById('ctx-button'); -buttonElement.addEventListener('click', () => { - if (audioCtx.state === 'suspended') { - audioCtx.resume(); - } else { - audioCtx.suspend(); - } -}); -const ctxStatus = document.getElementById('ctx-status'); -setInterval(() => { +function updateAudioCtxStatus() { ctxStatus.innerText = audioCtx.state; if (audioCtx.state === 'suspended') { buttonElement.innerText = 'Resume'; } else { buttonElement.innerText = 'Suspend'; } -}, 100); +} + +setInterval(updateAudioCtxStatus, 1000); + +buttonElement.addEventListener('click', () => { + if (audioCtx.state === 'suspended') { + audioCtx.resume().then(updateAudioCtxStatus); + } else { + audioCtx.suspend().then(updateAudioCtxStatus); + } +}); let meterInstance = null; @@ -88,24 +91,27 @@

Javascript code

const sourceNode = audioCtx.createMediaElementSource(audioElement); sourceNode.connect(audioCtx.destination); +const ctxStatus = document.getElementById('ctx-status'); const buttonElement = document.getElementById('ctx-button'); -buttonElement.addEventListener('click', () => { - if (audioCtx.state === 'suspended') { - audioCtx.resume(); - } else { - audioCtx.suspend(); - } -}); -const ctxStatus = document.getElementById('ctx-status'); -setInterval(() => { +function updateAudioCtxStatus() { ctxStatus.innerText = audioCtx.state; if (audioCtx.state === 'suspended') { buttonElement.innerText = 'Resume'; } else { buttonElement.innerText = 'Suspend'; } -}, 100); +} + +setInterval(updateAudioCtxStatus, 1000); + +buttonElement.addEventListener('click', () => { + if (audioCtx.state === 'suspended') { + audioCtx.resume().then(updateAudioCtxStatus); + } else { + audioCtx.suspend().then(updateAudioCtxStatus); + } +}); let meterInstance = null; diff --git a/docs/examples/nodom.html b/docs/examples/nodom.html index 19a870e..d271ade 100644 --- a/docs/examples/nodom.html +++ b/docs/examples/nodom.html @@ -63,24 +63,27 @@

Javascript code

const sourceNode = audioCtx.createMediaElementSource(audioElement); sourceNode.connect(audioCtx.destination); +const ctxStatus = document.getElementById('ctx-status'); const buttonElement = document.getElementById('ctx-button'); -buttonElement.addEventListener('click', () => { - if (audioCtx.state === 'suspended') { - audioCtx.resume(); - } else { - audioCtx.suspend(); - } -}); -const ctxStatus = document.getElementById('ctx-status'); -setInterval(() => { +function updateAudioCtxStatus() { ctxStatus.innerText = audioCtx.state; if (audioCtx.state === 'suspended') { buttonElement.innerText = 'Resume'; } else { buttonElement.innerText = 'Suspend'; } -}, 100); +} + +setInterval(updateAudioCtxStatus, 1000); + +buttonElement.addEventListener('click', () => { + if (audioCtx.state === 'suspended') { + audioCtx.resume().then(updateAudioCtxStatus); + } else { + audioCtx.suspend().then(updateAudioCtxStatus); + } +}); const meterInstance = new webAudioPeakMeter.WebAudioPeakMeter(sourceNode); @@ -112,24 +115,27 @@

Javascript code

const sourceNode = audioCtx.createMediaElementSource(audioElement); sourceNode.connect(audioCtx.destination); +const ctxStatus = document.getElementById('ctx-status'); const buttonElement = document.getElementById('ctx-button'); -buttonElement.addEventListener('click', () => { - if (audioCtx.state === 'suspended') { - audioCtx.resume(); - } else { - audioCtx.suspend(); - } -}); -const ctxStatus = document.getElementById('ctx-status'); -setInterval(() => { +function updateAudioCtxStatus() { ctxStatus.innerText = audioCtx.state; if (audioCtx.state === 'suspended') { buttonElement.innerText = 'Resume'; } else { buttonElement.innerText = 'Suspend'; } -}, 100); +} + +setInterval(updateAudioCtxStatus, 1000); + +buttonElement.addEventListener('click', () => { + if (audioCtx.state === 'suspended') { + audioCtx.resume().then(updateAudioCtxStatus); + } else { + audioCtx.suspend().then(updateAudioCtxStatus); + } +}); const meterInstance = new webAudioPeakMeter.WebAudioPeakMeter(sourceNode); diff --git a/docs/examples/osc.html b/docs/examples/osc.html index 37c3dcd..94fd6b2 100644 --- a/docs/examples/osc.html +++ b/docs/examples/osc.html @@ -39,21 +39,24 @@

Javascript code

const audioCtx = new AudioContext();
 
 const ctxStatus = document.getElementById('ctx-status');
-setInterval(() => {
+const buttonElement = document.getElementById('ctx-button');
+
+function updateAudioCtxStatus() {
   ctxStatus.innerText = audioCtx.state;
   if (audioCtx.state === 'suspended') {
     buttonElement.innerText = 'Resume';
   } else {
     buttonElement.innerText = 'Suspend';
   }
-}, 100);
+}
+
+setInterval(updateAudioCtxStatus, 1000);
 
-const buttonElement = document.getElementById('ctx-button');
 buttonElement.addEventListener('click', () => {
   if (audioCtx.state === 'suspended') {
-    audioCtx.resume();
+    audioCtx.resume().then(updateAudioCtxStatus);
   } else {
-    audioCtx.suspend();
+    audioCtx.suspend().then(updateAudioCtxStatus);
   }
 });
 
@@ -90,21 +93,24 @@ 

Javascript code

const audioCtx = new AudioContext(); const ctxStatus = document.getElementById('ctx-status'); -setInterval(() => { +const buttonElement = document.getElementById('ctx-button'); + +function updateAudioCtxStatus() { ctxStatus.innerText = audioCtx.state; if (audioCtx.state === 'suspended') { buttonElement.innerText = 'Resume'; } else { buttonElement.innerText = 'Suspend'; } -}, 100); +} + +setInterval(updateAudioCtxStatus, 1000); -const buttonElement = document.getElementById('ctx-button'); buttonElement.addEventListener('click', () => { if (audioCtx.state === 'suspended') { - audioCtx.resume(); + audioCtx.resume().then(updateAudioCtxStatus); } else { - audioCtx.suspend(); + audioCtx.suspend().then(updateAudioCtxStatus); } }); diff --git a/docs/examples/pre-fader-video.html b/docs/examples/pre-fader-video.html new file mode 100644 index 0000000..daa76b5 --- /dev/null +++ b/docs/examples/pre-fader-video.html @@ -0,0 +1,123 @@ + + + + + Pre-Fade Metering with Video Element + + + +

Pre-Fade Metering with Video Element

+

Sometimes it is useful to display levels for an audio node, even if that level is getting modified somewhere downstream.

+ +

Working Example

+

The web audio API context is loading.

+
+ +
+ + +
+
+
+

HTML code

+
<p>The web audio API context is <span id="ctx-status">loading</span>. <button id="ctx-button">Loading</button></p>
+<div class="demo-video">
+  <video controls id="the-video" crossorigin="anonymous" style="width: 100%;">
+    <source src="https://assets.rpy.xyz/testmedia/hoops.mp4" type="video/mp4">
+    Sorry, your browser doesn't support embedded videos.
+  </video>
+  <div>
+    <input type="range" id="gain" name="gain" min="0" max="1" value="0" step="0.05">
+    <label for="gain">Gain</label>
+  </div>
+  <div id="peak-meter" style="height: 72px"></div>
+</div>
+

Javascript code

+
const audioCtx = new AudioContext();
+
+const ctxStatus = document.getElementById('ctx-status');
+const buttonElement = document.getElementById('ctx-button');
+
+function updateAudioCtxStatus() {
+  ctxStatus.innerText = audioCtx.state;
+  if (audioCtx.state === 'suspended') {
+    buttonElement.innerText = 'Resume';
+  } else {
+    buttonElement.innerText = 'Suspend';
+  }
+}
+
+setInterval(updateAudioCtxStatus, 1000);
+
+buttonElement.addEventListener('click', () => {
+  if (audioCtx.state === 'suspended') {
+    audioCtx.resume().then(updateAudioCtxStatus);
+  } else {
+    audioCtx.suspend().then(updateAudioCtxStatus);
+  }
+});
+
+const videoElement = document.getElementById('the-video');
+const meterElement = document.getElementById('peak-meter');
+const sourceNode = audioCtx.createMediaElementSource(videoElement);
+const gainNode = audioCtx.createGain();
+gainNode.gain.setValueAtTime(0, audioCtx.currentTime);
+
+sourceNode.connect(gainNode);
+gainNode.connect(audioCtx.destination);
+
+const unused = new webAudioPeakMeter.WebAudioPeakMeter(sourceNode, meterElement);
+
+const gainSlider = document.getElementById('gain');
+gainSlider.addEventListener('change', (evt) => {
+  gainNode.gain.setValueAtTime(evt.target.value, audioCtx.currentTime);
+});
+
+ + + + diff --git a/docs/examples/variations.html b/docs/examples/variations.html index de2acf7..784e47c 100644 --- a/docs/examples/variations.html +++ b/docs/examples/variations.html @@ -50,24 +50,27 @@

HTML code

Javascript code

const audioCtx = new AudioContext();
 
+const ctxStatus = document.getElementById('ctx-status');
 const buttonElement = document.getElementById('ctx-button');
-buttonElement.addEventListener('click', () => {
-  if (audioCtx.state === 'suspended') {
-    audioCtx.resume();
-  } else {
-    audioCtx.suspend();
-  }
-});
 
-const ctxStatus = document.getElementById('ctx-status');
-setInterval(() => {
+function updateAudioCtxStatus() {
   ctxStatus.innerText = audioCtx.state;
   if (audioCtx.state === 'suspended') {
     buttonElement.innerText = 'Resume';
   } else {
     buttonElement.innerText = 'Suspend';
   }
-}, 100);
+}
+
+setInterval(updateAudioCtxStatus, 1000);
+
+buttonElement.addEventListener('click', () => {
+  if (audioCtx.state === 'suspended') {
+    audioCtx.resume().then(updateAudioCtxStatus);
+  } else {
+    audioCtx.suspend().then(updateAudioCtxStatus);
+  }
+});
 
 const audioElementOne = document.getElementById('audio-one');
 const audioElementTwo = document.getElementById('audio-two');
@@ -95,24 +98,27 @@ 

Javascript code