Add audio

This commit is contained in:
bmathews 2025-11-15 16:33:18 -08:00
parent 077207ca27
commit 7a4792e4e8
7 changed files with 243 additions and 4 deletions

View file

@ -9,6 +9,7 @@ import { ObserverControls } from "@/src/components/ObserverControls";
import { InspectorControls } from "@/src/components/InspectorControls"; import { InspectorControls } from "@/src/components/InspectorControls";
import { SettingsProvider } from "@/src/components/SettingsProvider"; import { SettingsProvider } from "@/src/components/SettingsProvider";
import { ObserverCamera } from "@/src/components/ObserverCamera"; import { ObserverCamera } from "@/src/components/ObserverCamera";
import { AudioProvider } from "@/src/components/AudioContext";
// three.js has its own loaders for textures and models, but we need to load other // three.js has its own loaders for textures and models, but we need to load other
// stuff too, e.g. missions, terrains, and more. This client is used for those. // stuff too, e.g. missions, terrains, and more. This client is used for those.
@ -35,9 +36,11 @@ function MapInspector() {
<main> <main>
<SettingsProvider> <SettingsProvider>
<Canvas shadows> <Canvas shadows>
<ObserverControls /> <AudioProvider>
<Mission key={missionName} name={missionName} /> <ObserverControls />
<ObserverCamera /> <Mission key={missionName} name={missionName} />
<ObserverCamera />
</AudioProvider>
<EffectComposer> <EffectComposer>
<N8AO intensity={3} aoRadius={3} quality="performance" /> <N8AO intensity={3} aoRadius={3} quality="performance" />
</EffectComposer> </EffectComposer>

View file

@ -0,0 +1,66 @@
import {
createContext,
useContext,
useEffect,
useState,
ReactNode,
} from "react";
import { useThree } from "@react-three/fiber";
import { AudioListener, AudioLoader } from "three";
interface AudioContextType {
audioLoader: AudioLoader | null;
audioListener: AudioListener | null;
}
const AudioContext = createContext<AudioContextType | undefined>(undefined);
/**
* AudioProvider initializes the AudioLoader and AudioListener for spatial audio.
* Must be rendered inside the Canvas component.
*/
export function AudioProvider({ children }: { children: ReactNode }) {
const { camera } = useThree();
const [audioContext, setAudioContext] = useState<AudioContextType>({
audioLoader: null,
audioListener: null,
});
useEffect(() => {
// Create audio loader
const audioLoader = new AudioLoader();
// Create listener if not already present
let listener = camera.children.find(
(child) => child instanceof AudioListener
) as AudioListener | undefined;
if (!listener) {
listener = new AudioListener();
camera.add(listener);
}
setAudioContext({
audioLoader,
audioListener: listener,
});
}, [camera]);
return (
<AudioContext.Provider value={audioContext}>
{children}
</AudioContext.Provider>
);
}
/**
* Hook to access audio resources (AudioLoader and AudioListener).
* Must be used within an AudioProvider.
*/
export function useAudio(): AudioContextType {
const context = useContext(AudioContext);
if (context === undefined) {
throw new Error("useAudio must be used within AudioProvider");
}
return context;
}

View file

@ -0,0 +1,147 @@
import { useEffect, useRef } from "react";
import { useThree } from "@react-three/fiber";
import { PositionalAudio, Audio } from "three";
import { ConsoleObject, getPosition, getProperty } from "../mission";
import { audioToUrl } from "../loaders";
import { useAudio } from "./AudioContext";
import { useSettings } from "./SettingsProvider";
export function AudioEmitter({ object }: { object: ConsoleObject }) {
const fileName = getProperty(object, "fileName")?.value ?? "";
const volume = parseFloat(getProperty(object, "volume")?.value ?? "1");
const minDistance = parseFloat(
getProperty(object, "minDistance")?.value ?? "1"
);
const maxDistance = parseFloat(
getProperty(object, "maxDistance")?.value ?? "1"
);
const minLoopGap = parseFloat(
getProperty(object, "minLoopGap")?.value ?? "0"
);
const maxLoopGap = parseFloat(
getProperty(object, "maxLoopGap")?.value ?? "0"
);
const is3D = parseInt(getProperty(object, "is3D")?.value ?? "0");
const [z, y, x] = getPosition(object);
const { scene } = useThree();
const { audioLoader, audioListener } = useAudio();
const { audioEnabled } = useSettings();
const soundRef = useRef<PositionalAudio | null>(null);
const loopTimerRef = useRef<NodeJS.Timeout | null>(null);
const loopGapIntervalRef = useRef<NodeJS.Timeout | null>(null);
useEffect(() => {
if (!fileName || !audioLoader || !audioListener || !audioEnabled) {
if (!fileName) {
console.warn("AudioEmitter: No fileName provided");
}
if (!audioLoader) {
console.warn("AudioEmitter: No audio loader available");
}
if (!audioListener) {
console.warn("AudioEmitter: No audio listener available");
}
return;
}
const audioUrl = audioToUrl(fileName);
let sound;
// Configure distance properties
if (is3D) {
sound = new PositionalAudio(audioListener);
sound.position.set(x - 1024, y, z - 1024);
sound.setDistanceModel("exponential");
sound.setRefDistance(minDistance / 25);
sound.setMaxDistance(maxDistance / 50);
sound.setVolume(volume);
} else {
sound = new Audio(audioListener);
sound.setVolume(Math.min(volume, 0.5));
}
soundRef.current = sound;
// Setup looping with gap
const setupLooping = () => {
if (minLoopGap > 0 || maxLoopGap > 0) {
const gapMin = Math.max(0, minLoopGap);
const gapMax = Math.max(gapMin, maxLoopGap);
const gap =
gapMin === gapMax
? gapMin
: Math.random() * (gapMax - gapMin) + gapMin;
sound.loop = false;
// Check periodically when audio ends. onEnded wasn't working
const checkLoop = () => {
if (sound.isPlaying === false) {
loopTimerRef.current = setTimeout(() => {
try {
sound.play();
setupLooping();
} catch (err) {}
}, gap);
} else {
loopGapIntervalRef.current = setTimeout(checkLoop, 100);
}
};
loopGapIntervalRef.current = setTimeout(checkLoop, 100);
} else {
sound.setLoop(true);
}
};
// Load and play audio
audioLoader.load(
audioUrl,
(audioBuffer: any) => {
sound.setBuffer(audioBuffer);
try {
sound.play();
setupLooping();
} catch (err) {}
},
undefined,
(err: any) => {}
);
// Add to scene
scene.add(sound);
return () => {
if (loopTimerRef.current) {
clearTimeout(loopTimerRef.current);
}
if (loopGapIntervalRef.current) {
clearTimeout(loopGapIntervalRef.current);
}
try {
sound.stop();
} catch (e) {
// May fail if already stopped
}
sound.disconnect();
scene.remove(sound);
};
}, [
fileName,
volume,
minLoopGap,
maxLoopGap,
is3D,
minDistance,
maxDistance,
audioLoader,
audioListener,
audioEnabled,
scene,
]);
// Render debug visualization and invisible marker
return null;
}

View file

@ -27,6 +27,8 @@ export function InspectorControls({
setSpeedMultiplier, setSpeedMultiplier,
fov, fov,
setFov, setFov,
audioEnabled,
setAudioEnabled,
} = useSettings(); } = useSettings();
return ( return (
@ -56,6 +58,17 @@ export function InspectorControls({
/> />
<label htmlFor="fogInput">Fog?</label> <label htmlFor="fogInput">Fog?</label>
</div> </div>
<div className="CheckboxField">
<input
id="audioInput"
type="checkbox"
checked={audioEnabled}
onChange={(event) => {
setAudioEnabled(event.target.checked);
}}
/>
<label htmlFor="audioInput">Audio?</label>
</div>
<div className="Field"> <div className="Field">
<label htmlFor="fovInput">FOV</label> <label htmlFor="fovInput">FOV</label>
<input <input

View file

@ -6,6 +6,7 @@ type PersistedSettings = {
fogEnabled?: boolean; fogEnabled?: boolean;
speedMultiplier?: number; speedMultiplier?: number;
fov?: number; fov?: number;
audioEnabled?: boolean;
}; };
export function useSettings() { export function useSettings() {
@ -16,6 +17,7 @@ export function SettingsProvider({ children }: { children: React.ReactNode }) {
const [fogEnabled, setFogEnabled] = useState(true); const [fogEnabled, setFogEnabled] = useState(true);
const [speedMultiplier, setSpeedMultiplier] = useState(1); const [speedMultiplier, setSpeedMultiplier] = useState(1);
const [fov, setFov] = useState(90); const [fov, setFov] = useState(90);
const [audioEnabled, setAudioEnabled] = useState(false);
const value = useMemo( const value = useMemo(
() => ({ () => ({
@ -25,8 +27,10 @@ export function SettingsProvider({ children }: { children: React.ReactNode }) {
setSpeedMultiplier, setSpeedMultiplier,
fov, fov,
setFov, setFov,
audioEnabled,
setAudioEnabled,
}), }),
[fogEnabled, speedMultiplier, fov] [fogEnabled, speedMultiplier, fov, audioEnabled]
); );
// Read persisted settings from localStoarge. // Read persisted settings from localStoarge.

View file

@ -9,8 +9,10 @@ import { TSStatic } from "./TSStatic";
import { StaticShape } from "./StaticShape"; import { StaticShape } from "./StaticShape";
import { Item } from "./Item"; import { Item } from "./Item";
import { Turret } from "./Turret"; import { Turret } from "./Turret";
import { AudioEmitter } from "./AudioEmitter";
const componentMap = { const componentMap = {
AudioEmitter,
InteriorInstance, InteriorInstance,
Item, Item,
SimGroup, SimGroup,

View file

@ -58,6 +58,10 @@ export function textureToUrl(name: string) {
} }
} }
export function audioToUrl(fileName: string) {
return getUrlForPath(`audio/${fileName}`);
}
export async function loadDetailMapList(name: string) { export async function loadDetailMapList(name: string) {
const url = getUrlForPath(`textures/${name}`); const url = getUrlForPath(`textures/${name}`);
const res = await fetch(url); const res = await fetch(url);