Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit 44b7477

Browse files
Version 2.0.4
- Superpowered.getAudioContext(minimumSamplerate) - Superpowered.getUserMediaForAudioAsync and Superpowered.createAudioNodeAsync - Lower Latency And Transparent Audio - Updated Example Projects - Localhost/HTTPS error handling
1 parent fefcae5 commit 44b7477

File tree

9 files changed

+159
-124
lines changed

9 files changed

+159
-124
lines changed

‎README.md‎

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,9 @@ https://superpowered.com/js-wasm-sdk/example_timestretching/
3737
Real-time low-latency reverb and filter in the browser:\
3838
https://superpowered.com/js-wasm-sdk/example_effects/
3939

40+
Real-time low-latency guitar distortion in the browser:\
41+
https://superpowered.com/js-wasm-sdk/example_guitardistortion/
42+
4043

4144
# Supported Web Browsers
4245

‎docs.html‎

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,14 +90,15 @@ <h3>Linear Memory</h3>
9090

9191
<h3>Web Audio</h3>
9292

93+
<p>Superpowered has helper functions for easier Web Audio initialization. The returned objects are standard Web Audio objects without any quirks. Please note that Web Audio requires a secure context: HTTPS or localhost.</p>
9394
<p>Visit the effects example project to see these in context for better understanding.</p>
9495

9596
<pre><code class="language-js">
9697
// Returns with a standard AudioContext.
9798
// This function was made to help with browser-specific quirks.
9899
// Reference: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext
99100
var audioContext = Superpowered.getAudioContext(
100-
44100 // The sample rate to be used by the AudioContext.
101+
44100 // The minimum sample rate of the AudioContext. The actual sample rate may be equal or higher.
101102
);
102103

103104
// Prompts the user for permission to use a media input (typically the microphone) with an audio track and no video tracks. Has no return value.
@@ -118,6 +119,18 @@ <h3>Web Audio</h3>
118119
}
119120
);
120121

122+
// Asynchronous version of getUserMediaForAudio.
123+
// Returns with a standard MediaStream object or undefined on error.
124+
let audioInputStream = await Superpowered.getUserMediaForAudioAsync(
125+
{ // navigator.mediaDevices.getUserMedia constraints or "fastAndTransparentAudio" to disable all processing on the audio input
126+
'fastAndTransparentAudio': true
127+
},
128+
)
129+
.catch((error) => {
130+
// Called when the user provided permission (typically for the microphone).
131+
});
132+
if (!audioInputStream) return; // Program flow will reach this point even on error.
133+
121134
// Creates an Audio Worklet (for new browsers) or an audio processing ScriptProcessorNode (for older browsers).
122135
// This function was made to help with browser-specific quirks and to properly initialize Superpowered in an Audio Worklet context.
123136
var myAudioNode = null;
@@ -141,6 +154,18 @@ <h3>Web Audio</h3>
141154
});
142155
}
143156
);
157+
158+
// Asynchronous version of createAudioNode.
159+
// Returns with a standard AudioNode or ScriptProcessorNode.
160+
let audioNode = await Superpowered.createAudioNodeAsync(
161+
audioContext, // The standard AudioContext instance.
162+
'/example_effects/processor.js', // The JavaScript module source of the node.
163+
'MyProcessor', // The registered processor name.
164+
function(message) {
165+
// Runs in the main scope (main thread) when the audio node sends a message.
166+
// message is a standard JavaScript object.
167+
}
168+
);
144169
</code></pre>
145170

146171
<h3>AudioWorkletProcessor</h3>

‎example_effects/main.js‎

Lines changed: 17 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -12,48 +12,30 @@ function setState(newState) {
1212
document.getElementById('btn').innerText = state;
1313
}
1414

15-
// called when the user provided permission for the microphone
16-
function onStreamCreated(micStream) {
17-
let currentPath = window.location.pathname.substring(0, window.location.pathname.lastIndexOf('/'));
18-
19-
Superpowered.createAudioNode(audioContext, currentPath + '/processor.js', 'MyProcessor',
20-
// runs after the audio node is created
21-
function(newNode) {
22-
// audioInput (microphone, etc.) -> audioNode -> audioContext.destination (audio output)
23-
audioNode = newNode;
24-
let audioInput = audioContext.createMediaStreamSource(micStream);
25-
audioInput.connect(newNode);
26-
newNode.connect(audioContext.destination);
27-
setState(states.RUNNING);
28-
},
29-
30-
// runs when the audio node sends a message
31-
function(message) {
32-
console.log('Message received from the audio node: ' + message);
33-
}
34-
);
35-
}
36-
37-
// called when the user refused microphone permission
38-
function onStreamError(error) {
39-
console.log(error);
40-
setState(states.NOTRUNNING);
15+
function onMessageFromAudioScope(message) {
16+
console.log('Message received from the audio node: ' + message);
4117
}
4218

4319
// when the button is clicked
44-
function toggleAudio() {
20+
asyncfunction toggleAudio() {
4521
if (state == states.NOTRUNNING) {
4622
setState(states.INITIALIZING);
4723
audioContext = Superpowered.getAudioContext(44100);
4824

49-
// request microphone permission
50-
Superpowered.getUserMediaForAudio(
51-
{ // navigator.mediaDevices.getUserMedia constraints
52-
'echoCancellation': false
53-
},
54-
onStreamCreated,
55-
onStreamError
56-
);
25+
let micStream = await Superpowered.getUserMediaForAudioAsync({ 'fastAndTransparentAudio': true })
26+
.catch((error) => {
27+
// called when the user refused microphone permission
28+
console.log(error);
29+
setState(states.NOTRUNNING);
30+
});
31+
if (!micStream) return;
32+
33+
let currentPath = window.location.pathname.substring(0, window.location.pathname.lastIndexOf('/'));
34+
audioNode = await Superpowered.createAudioNodeAsync(audioContext, currentPath + '/processor.js', 'MyProcessor', onMessageFromAudioScope);
35+
let audioInput = audioContext.createMediaStreamSource(micStream);
36+
audioInput.connect(audioNode);
37+
audioNode.connect(audioContext.destination);
38+
setState(states.RUNNING);
5739
} else if (state == states.RUNNING) {
5840
// stop everything
5941
audioContext.close();

‎example_guitardistortion/main.js‎

Lines changed: 48 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -84,18 +84,7 @@ function applyPreset(preset) {
8484
}
8585
}
8686

87-
// we have the audio system created, let's display the UI and start playback
88-
function onAudioDecoded(buffer) {
89-
// send the PCM audio to the audio node
90-
audioNode.sendMessageToAudioScope({
91-
left: buffer.getChannelData(0),
92-
right: buffer.getChannelData(1) }
93-
);
94-
95-
// audioNode -> audioContext.destination (audio output)
96-
audioContext.suspend();
97-
audioNode.connect(audioContext.destination);
98-
87+
function startUserInterface() {
9988
// UI: innerHTML may be ugly but keeps this example relatively small
10089
content.innerHTML = '\
10190
<h3>Choose from these presets for A/B comparison:</h3>\
@@ -162,34 +151,54 @@ function onAudioDecoded(buffer) {
162151
applyPreset(presets.transparent);
163152
}
164153

165-
// when the START button is clicked
166-
function start() {
154+
function onMessageFromAudioScope(message) {
155+
console.log('Message received from the audio node: ' + message);
156+
}
157+
158+
// when the START WITH GUITAR SAMPLE button is clicked
159+
async function startSample() {
167160
content.innerText = 'Creating the audio context and node...';
168161
audioContext = Superpowered.getAudioContext(44100);
169162
let currentPath = window.location.pathname.substring(0, window.location.pathname.lastIndexOf('/'));
163+
audioNode = await Superpowered.createAudioNodeAsync(audioContext, currentPath + '/processor.js', 'MyProcessor', onMessageFromAudioScope);
170164

171-
Superpowered.createAudioNode(audioContext, currentPath + '/processor.js', 'MyProcessor',
172-
// runs after the audio node is created
173-
function(newNode) {
174-
audioNode = newNode;
175-
content.innerText = 'Downloading music...';
176-
177-
// downloading the music
178-
let request = new XMLHttpRequest();
179-
request.open('GET', 'track.wav', true);
180-
request.responseType = 'arraybuffer';
181-
request.onload = function() {
182-
content.innerText = 'Decoding audio...';
183-
audioContext.decodeAudioData(request.response, onAudioDecoded);
184-
}
185-
request.send();
186-
},
187-
188-
// runs when the audio node sends a message
189-
function(message) {
190-
console.log('Message received from the audio node: ' + message);
191-
}
192-
);
165+
content.innerText = 'Downloading music...';
166+
let response = await fetch('track.wav');
167+
168+
content.innerText = 'Decoding audio...';
169+
let rawData = await response.arrayBuffer();
170+
audioContext.decodeAudioData(rawData, function(pcmData) { // Safari doesn't support await for decodeAudioData yet
171+
// send the PCM audio to the audio node
172+
audioNode.sendMessageToAudioScope({
173+
left: pcmData.getChannelData(0),
174+
right: pcmData.getChannelData(1) }
175+
);
176+
177+
// audioNode -> audioContext.destination (audio output)
178+
audioContext.suspend();
179+
audioNode.connect(audioContext.destination);
180+
startUserInterface();
181+
});
182+
}
183+
184+
// when the START WITH AUDIO INPUT button is clicked
185+
async function startInput() {
186+
content.innerText = 'Creating the audio context and node...';
187+
audioContext = Superpowered.getAudioContext(44100);
188+
189+
let micStream = await Superpowered.getUserMediaForAudioAsync({ 'fastAndTransparentAudio': true })
190+
.catch((error) => {
191+
// called when the user refused microphone permission
192+
console.log(error);
193+
});
194+
if (!micStream) return;
195+
196+
let currentPath = window.location.pathname.substring(0, window.location.pathname.lastIndexOf('/'));
197+
audioNode = await Superpowered.createAudioNodeAsync(audioContext, currentPath + '/processor_live.js', 'MyProcessor', onMessageFromAudioScope);
198+
let audioInput = audioContext.createMediaStreamSource(micStream);
199+
audioInput.connect(audioNode);
200+
audioNode.connect(audioContext.destination);
201+
startUserInterface();
193202
}
194203

195204
Superpowered = SuperpoweredModule({
@@ -198,7 +207,8 @@ Superpowered = SuperpoweredModule({
198207

199208
onReady: function() {
200209
content = document.getElementById('content');
201-
content.innerHTML = '<button id="startButton">START</button>';
202-
document.getElementById('startButton').addEventListener('click', start);
210+
content.innerHTML = '<p>Use this if you just want to listen: <button id="startSample">START WITH GUITAR SAMPLE</button></p><p>Use this if you want to play the guitar live: <button id="startInput">START WITH AUDIO INPUT</button></p>';
211+
document.getElementById('startSample').addEventListener('click', startSample);
212+
document.getElementById('startInput').addEventListener('click', startInput);
203213
}
204214
});

‎example_guitardistortion/processor.js‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ class MyProcessor extends SuperpoweredModule.AudioWorkletProcessor {
77
onReady() {
88
Superpowered = this.Superpowered;
99
this.posFrames = -1;
10-
// allocating some WASM memory for passing audio to the time stretcher
10+
// allocating some WASM memory for passing audio to the effect
1111
this.pcm = Superpowered.createFloatArray(2048 * 2);
1212
// the star of the show
1313
this.distortion = Superpowered.new('GuitarDistortion', Superpowered.samplerate);
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
import SuperpoweredModule from '../superpowered.js'
2+
3+
var Superpowered = null;
4+
5+
class MyProcessor extends SuperpoweredModule.AudioWorkletProcessor {
6+
// runs after the constructor
7+
onReady() {
8+
Superpowered = this.Superpowered;
9+
// the star of the show
10+
this.distortion = Superpowered.new('GuitarDistortion', Superpowered.samplerate);
11+
this.distortion.enabled = true;
12+
}
13+
14+
onMessageFromMainScope(message) {
15+
for (let property in message) {
16+
if (typeof this.distortion[property] !== 'undefined') this.distortion[property] = message[property];
17+
}
18+
}
19+
20+
processAudio(inputBuffer, outputBuffer, buffersize, parameters) {
21+
this.distortion.process(inputBuffer.pointer, outputBuffer.pointer, buffersize);
22+
return true;
23+
}
24+
}
25+
26+
if (typeof AudioWorkletProcessor === 'function') registerProcessor('MyProcessor', MyProcessor);
27+
export default MyProcessor;

‎example_timestretching/main.js‎

Lines changed: 33 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -50,62 +50,48 @@ function togglePlayback(e) {
5050
}
5151
}
5252

53-
// we have the audio system created, let's display the UI and start playback
54-
function onAudioDecoded(buffer) {
55-
// send the PCM audio to the audio node
56-
audioNode.sendMessageToAudioScope({
57-
left: buffer.getChannelData(0),
58-
right: buffer.getChannelData(1) }
59-
);
60-
61-
// audioNode -> audioContext.destination (audio output)
62-
audioContext.suspend();
63-
audioNode.connect(audioContext.destination);
64-
65-
// UI: innerHTML may be ugly but keeps this example small
66-
content.innerHTML = '\
67-
<button id="playPause" value="0">PLAY</button>\
68-
<p id="rateDisplay">original tempo</p>\
69-
<input id="rateSlider" type="range" min="5000" max="20000" value="10000" style="width: 100%">\
70-
<button id="pitchMinus" value="-1">-</button>\
71-
<span id="pitchShiftDisplay"> pitch shift: 0 </span>\
72-
<button id="pitchPlus" value="1">+</button>\
73-
';
74-
document.getElementById('rateSlider').addEventListener('input', changeRate);
75-
document.getElementById('rateSlider').addEventListener('dblclick', changeRateDbl);
76-
document.getElementById('pitchMinus').addEventListener('click', changePitchShift);
77-
document.getElementById('pitchPlus').addEventListener('click', changePitchShift);
78-
document.getElementById('playPause').addEventListener('click', togglePlayback);
53+
function onMessageFromAudioScope(message) {
54+
console.log('Message received from the audio node: ' + message);
7955
}
8056

8157
// when the START button is clicked
82-
function start() {
58+
asyncfunction start() {
8359
content.innerText = 'Creating the audio context and node...';
8460
audioContext = Superpowered.getAudioContext(44100);
8561
let currentPath = window.location.pathname.substring(0, window.location.pathname.lastIndexOf('/'));
62+
audioNode = await Superpowered.createAudioNodeAsync(audioContext, currentPath + '/processor.js', 'MyProcessor', onMessageFromAudioScope);
8663

87-
Superpowered.createAudioNode(audioContext, currentPath + '/processor.js', 'MyProcessor',
88-
// runs after the audio node is created
89-
function(newNode) {
90-
audioNode = newNode;
91-
content.innerText = 'Downloading music...';
64+
content.innerText = 'Downloading music...';
65+
let response = await fetch('track.wav');
9266

93-
// downloading the music
94-
let request = new XMLHttpRequest();
95-
request.open('GET', 'track.wav', true);
96-
request.responseType = 'arraybuffer';
97-
request.onload = function() {
98-
content.innerText = 'Decoding audio...';
99-
audioContext.decodeAudioData(request.response, onAudioDecoded);
100-
}
101-
request.send();
102-
},
67+
content.innerText = 'Decoding audio...'; console.log('new');
68+
let rawData = await response.arrayBuffer();
69+
audioContext.decodeAudioData(rawData, function(pcmData) { // Safari doesn't support await for decodeAudioData yet
70+
// send the PCM audio to the audio node
71+
audioNode.sendMessageToAudioScope({
72+
left: pcmData.getChannelData(0),
73+
right: pcmData.getChannelData(1) }
74+
);
75+
76+
// audioNode -> audioContext.destination (audio output)
77+
audioContext.suspend();
78+
audioNode.connect(audioContext.destination);
10379

104-
// runs when the audio node sends a message
105-
function(message) {
106-
console.log('Message received from the audio node: ' + message);
107-
}
108-
);
80+
// UI: innerHTML may be ugly but keeps this example small
81+
content.innerHTML = '\
82+
<button id="playPause" value="0">PLAY</button>\
83+
<p id="rateDisplay">original tempo</p>\
84+
<input id="rateSlider" type="range" min="5000" max="20000" value="10000" style="width: 100%">\
85+
<button id="pitchMinus" value="-1">-</button>\
86+
<span id="pitchShiftDisplay"> pitch shift: 0 </span>\
87+
<button id="pitchPlus" value="1">+</button>\
88+
';
89+
document.getElementById('rateSlider').addEventListener('input', changeRate);
90+
document.getElementById('rateSlider').addEventListener('dblclick', changeRateDbl);
91+
document.getElementById('pitchMinus').addEventListener('click', changePitchShift);
92+
document.getElementById('pitchPlus').addEventListener('click', changePitchShift);
93+
document.getElementById('playPause').addEventListener('click', togglePlayback);
94+
});
10995
}
11096

11197
Superpowered = SuperpoweredModule({

‎superpowered-worker.js‎

Lines changed: 2 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎superpowered.js‎

Lines changed: 2 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
(0)

AltStyle によって変換されたページ (->オリジナル) /