Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Config/electron-launcher.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
"enableDevTools": true,
"devToolsWindowStartPosition": {
"x": 1912,
"y": -128
"y": -8
},
"devToolsWindowStartDimensions": {
"width": 1936,
"height": 1216
},
"maximizeDevToolsWindow": true,
"hideDevToolsWindowTitle": false
}
}
2 changes: 2 additions & 0 deletions Config/eslint.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
},
"globals": {
"AddonOptionsFrame": "writable",
"AudioSource": "writable",
"AudioTrack": "writable",
"assertApproximatelyEquals": "writable",
"assertNotApproximatelyEquals": "writable",
"assertTrue": "writable",
Expand Down
152 changes: 152 additions & 0 deletions Core/API/WebAudio/AudioSource.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
class AudioSource {
static ERROR_INVALID_PLAYBACK_RATE = "Cannot set negative or zero playback rate; only positive values are supported";
static ERROR_NEGATIVE_VOLUME_GAIN = "Cannot set negative volume gain; the lowest possible value is zero";
static ERROR_NEGATIVE_TRANSITION_TIME = "Cannot set negative time value; the lowest possible value is zero";
static ERROR_SOUND_NOT_READY = "Cannot play before audio is decoded; wait for isReady() or use setAutoplay(true)";

constructor(filePath) {
this.filePath = filePath;

// BJS doesn't expose this, so we have to store it at this level (at the risk of being wrong)
this.playbackRate = 1;

// May be fetched from disk (TODO: It's blocking, use async?)
const resource = C_Decoding.decodeFile(filePath);
this.uniqueID = new UniqueID();

// BJS uses WebAudio's context.decodeAudio, which consumes the buffer, so we must copy it or the original will be empty (TODO: Test for this)
function copy(src) {
var dst = new ArrayBuffer(src.byteLength);
new Uint8Array(dst).set(new Uint8Array(src));
return dst;
}
const buffer = copy(resource.data);
const currentScene = C_Rendering.getActiveScene();
const sound = new BABYLON.Sound(this.getUniqueID(), buffer, currentScene);

// Apply some sane defaults (can be overwritten at will)
sound.spatialSound = true;
sound.loop = false;
sound.autoplay = false;
sound.maxDistance = 100;
sound.distanceModel = "linear";

this.sound = sound;
this.useHighQualityStereo(C_Settings.getValue("useHighQualityStereo"));
}
isUsingHighQualityStereo() {
return this.isUsingHRTF;
}
useHighQualityStereo(enabledFlag) {
// TODO Ensure only boolean can be passed
if (!enabledFlag) this.sound.switchPanningModelToEqualPower();
else this.sound.switchPanningModelToHRTF();

this.isUsingHRTF = enabledFlag;
}
getDistanceModel() {
return this.sound.distanceModel;
}
setDistanceModel(newDistanceModel) {
// TODO Ensure it's a valid model string
this.sound.distanceModel = newDistanceModel;
}
playWhenReady(shouldPlayOnLoad) {
this.sound.autoplay = shouldPlayOnLoad;
}
setMaxDistance(distanceInWorldUnits) {
this.sound.maxDistance = distanceInWorldUnits;
}
getMaxDistance() {
return this.sound.maxDistance;
}
setLooping(isLooping) {
return (this.sound.loop = isLooping);
}
isLooping() {
return this.sound.loop;
}
isSpatialized() {
return this.sound.spatialSound;
}
setSpatialized(isSoundSpatialized) {
this.sound.spatialSound = isSoundSpatialized;
}
getFilePath() {
return this.filePath;
}
startPlaying(transitionTimeInMilliseconds = 0) {
this.playWhenReady(true); // Needed if the audio is still decoding (awkward...)
this.sound.play(transitionTimeInMilliseconds / 1000); // ms to sec
}
stopPlaying(timeToFadeOutCompletelyInMilliseconds = 0) {
this.sound.stop(timeToFadeOutCompletelyInMilliseconds / 1000); // ms to sec
}
isPaused() {
return this.sound.isPaused;
}
isPlaying() {
return this.sound.isPlaying;
}
setVolume(volumeGain, transitionTimeInMilliseconds = 0) {
const usageError = "Usage: AudioSource.setVolume(Number volumeGain, Number transitionTimeInMilliseconds)";
validateNumber(volumeGain, usageError);
validateNumber(transitionTimeInMilliseconds, usageError);

if (volumeGain < 0) throw new RangeError(AudioSource.ERROR_NEGATIVE_VOLUME_GAIN);
if (transitionTimeInMilliseconds < 0) throw new RangeError(AudioSource.ERROR_NEGATIVE_TRANSITION_TIME);

this.sound.setVolume(volumeGain, transitionTimeInMilliseconds / 1000); // ms to sec
}
getVolume() {
return this.sound.getVolume();
}
pause() {
this.sound.pause();
}
resume() {
this.sound.play();
}
getCurrentTime() {
return this.sound.currentTime * 1000; // s to ms
}
fadeOut(timeToFadeOutCompletelyInMilliseconds = 0) {
const usageError = "Usage: AudioSource.fadeOut(Number timeToFadeOutCompletelyInMilliseconds)";
validateNumber(timeToFadeOutCompletelyInMilliseconds, usageError);

if (timeToFadeOutCompletelyInMilliseconds < 0) throw new RangeError(AudioSource.ERROR_NEGATIVE_TRANSITION_TIME);
this.setVolume(0, timeToFadeOutCompletelyInMilliseconds);
}
fadeIn(volumeGain = 1, transitionTimeInMilliseconds = 0) {
const usageError = "Usage: AudioSource.fadeIn(Number volumeGain, Number transitionTimeInMilliseconds)";
validateNumber(volumeGain, usageError);
validateNumber(transitionTimeInMilliseconds, usageError);

if (volumeGain < 0) throw new RangeError(AudioSource.ERROR_NEGATIVE_VOLUME_GAIN);
if (transitionTimeInMilliseconds < 0) throw new RangeError(AudioSource.ERROR_NEGATIVE_TRANSITION_TIME);

this.setVolume(volumeGain, transitionTimeInMilliseconds);
}
getPlaybackRate() {
return this.playbackRate;
}
setPlaybackRate(newPlaybackRate) {
validateNumber(newPlaybackRate, "Usage: AudioSource.setPlaybackRate(Number newPlaybackRate)");

if (newPlaybackRate <= 0) throw new RangeError(AudioSource.ERROR_INVALID_PLAYBACK_RATE);

this.playbackRate = newPlaybackRate;
this.sound.setPlaybackRate(newPlaybackRate);
}
getUniqueID() {
return this.uniqueID.toString();
}
destroy() {
if (this.sound === null) return false; // Already destroyed; all operations should fail

this.sound.dispose();
this.sound = null;

return true;
}
}
162 changes: 162 additions & 0 deletions Core/API/WebAudio/AudioTrack.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
// var format = require("util").format;

class AudioTrack {
// static ERROR_INVALID_VOICE_HANDLE = "Invalid voice handle";
constructor() {
this.volume = 1; // TODO set for each track in C_WebAudio
this.isTrackMuted = false;
this.useHRTF = C_Settings.getValue("useHighQualityStereo"); // TODO update via options

const currentScene = C_Rendering.getActiveScene();
this.soundtrack = new BABYLON.SoundTrack(currentScene);
}
getNumSounds() {
return this.soundtrack.soundCollection.length;
}
getUniqueID() {
return this.soundtrack.id;
}
addSound(sound) {
this.soundtrack.addSound(sound);
}
removeSound(sound) {
this.soundtrack.removeSound(sound);
}
connectAnalyzer(analyzer) {
this.soundtrack.connectToAnalyser(analyzer);
}
destroy() {
this.soundtrack.dispose();
}
setVolume(volumeGain) {
this.volume = volumeGain;
if (this.isTrackMuted) return; // Do not change the actual volume until it's unmuted

this.soundtrack.setVolume(volumeGain);
}
isMuted() {
return this.isTrackMuted;
}
mute() {
this.isTrackMuted = true;
this.soundtrack.setVolume(0);
}
unmute() {
this.isTrackMuted = false;
this.soundtrack.setVolume(this.volume);
}
useHighQualityStereo(enabledFlag) {
if (enabledFlag) this.soundtrack.switchPanningModelToHRTF();
else this.soundtrack.switchPanningModelToEqualPower();

this.useHRTF = enabledFlag;
}
isUsingHighQualityStereo() {
return this.useHRTF;
}
fadeOutStop(fadeoutTimeInMilliseconds = 500) {
this.soundtrack.soundCollection.forEach((sound) => {
DEBUG("Cleaning up audio source " + sound.name + " (playback has ended)");
sound.setVolume(0, fadeoutTimeInMilliseconds / 1000); // ms to s
sound.stop(fadeoutTimeInMilliseconds / 1000);

setTimeout(() => {
// Defer cleanup so the fadeout has time to finish
this.removeSound(sound);
sound.dispose();
}, fadeoutTimeInMilliseconds);
});
}
// getAudioSource(soundHandleID) {
// if (soundHandleID === undefined) throw new RangeError(AudioTrack.ERROR_INVALID_VOICE_HANDLE + ": " + soundHandleID);
// return this.voices[soundHandleID];
// }
// addAudioSource(audioSource) {
// const soundHandleID = audioSource.getUniqueID(); // New index at which the source will be inserted
// audioSource.setVolume(this.volumeGain);
// // Volumes need to be synchronized or some sounds will stick out like a sore thumb
// this.voices[soundHandleID] = audioSource;
// this.soundtrack.addSound(audioSource.sound);
// audioSource.sound.onEndedObservable.add(() => {
// DEBUG("Playback for audio source " + audioSource.uniqueID + " has ended");
// this.removeAudioSource(soundHandleID);
// });
// this.numVoices++;
// return soundHandleID;
// }
// removeAudioSource(soundHandleID) {
// // ringbuffer, FIFO - size: music = 1, ambience = 10, sfx = 32, track.hasAudioSource to determine when it's removed? object pool?
// DEBUG(format("Removing audio source %s", soundHandleID));
// if (soundHandleID === undefined) {
// throw new RangeError(AudioTrack.ERROR_INVALID_VOICE_HANDLE + ": " + soundHandleID);
// }
// const audioSource = this.voices[soundHandleID];
// if (audioSource === undefined) return true;

// audioSource.stopPlaying();
// this.soundtrack.removeSound(audioSource.sound);
// audioSource.destroy();

// delete this.voices[soundHandleID];
// this.numVoices--;

// return audioSource;
// }
// purgeInactiveVoices() {
// for (const soundHandleID of Object.keys(this.voices)) {
// const audioSource = this.voices[soundHandleID];
// if (!audioSource.isPlaying()) {
// DEBUG(format("Purging inactive audio source %s (%s)", soundHandleID, audioSource.getFilePath()));
// this.removeAudioSource(soundHandleID);
// }
// }
// }
// purgeAllVoices() {
// for (const soundHandleID of Object.keys(this.voices)) {
// const audioSource = this.voices[soundHandleID];
// DEBUG(format("Purging audio source %s (%s)", soundHandleID, audioSource.getFilePath()));
// this.removeAudioSource(soundHandleID);
// }
// }
// fadeOutStop(fadeoutTimeInMilliseconds = 500) {
// for (const soundHandleID of Object.keys(this.voices)) {
// const audioSource = this.voices[soundHandleID];

// audioSource.fadeOut(fadeoutTimeInMilliseconds);
// audioSource.stopPlaying(fadeoutTimeInMilliseconds);
// }
// }
// fadeInStart(fadeInTimeInMilliseconds = 500) {
// for (const soundHandleID of Object.keys(this.voices)) {
// const audioSource = this.voices[soundHandleID];
// // Can't fade in properly if it starts blasting at a higher volume immediately
// audioSource.setVolume(0);
// audioSource.startPlaying();
// audioSource.fadeIn(this.volumeGain, fadeInTimeInMilliseconds);
// // audioSource.setVolume(this.volumeGain, timeToTransitionInMilliseconds);
// }
// }
// getVolume() {
// return this.volumeGain;
// }
// setVolume(volumeGain, timeToTransitionInMilliseconds = 0) {
// this.volumeGain = volumeGain;
// for (const soundHandleID of Object.keys(this.voices)) {
// const audioSource = this.voices[soundHandleID];
// audioSource.setVolume(volumeGain, timeToTransitionInMilliseconds);
// }
// }
// getNumAudioSources() {
// return this.numVoices;
// }
// getNumActiveVoices() {
// let count = 0;

// for (const soundHandleID of Object.keys(this.voices)) {
// const audioSource = this.voices[soundHandleID];
// if (audioSource.isPlaying()) count++;
// }

// return count;
// }
}
Loading