diff --git a/src/app.ts b/src/app.ts index 4db81cd792..1555ff2b0b 100644 --- a/src/app.ts +++ b/src/app.ts @@ -1,6 +1,6 @@ -import { addEntity, createWorld, IWorld } from "bitecs"; +import { addComponent, addEntity, createWorld, IWorld } from "bitecs"; import "./aframe-to-bit-components"; -import { AEntity, Networked, Object3DTag, Owned } from "./bit-components"; +import { AEntity, AudioListenerTag, Networked, Object3DTag, Owned } from "./bit-components"; import MediaSearchStore from "./storage/media-search-store"; import Store from "./storage/store"; import qsTruthy from "./utils/qs_truthy"; @@ -10,17 +10,7 @@ import HubChannel from "./utils/hub-channel"; import MediaDevicesManager from "./utils/media-devices-manager"; import { EffectComposer, EffectPass } from "postprocessing"; -import { - Audio, - AudioListener, - Material, - Object3D, - PerspectiveCamera, - PositionalAudio, - Scene, - sRGBEncoding, - WebGLRenderer -} from "three"; +import { Camera, Material, Object3D, PerspectiveCamera, Scene, sRGBEncoding, WebGLRenderer } from "three"; import { AudioSettings, SourceType } from "./components/audio-params"; import { createEffectsComposer } from "./effects"; import { DialogAdapter } from "./naf-dialog-adapter"; @@ -28,8 +18,8 @@ import { mainTick } from "./systems/hubs-systems"; import { waitForPreloads } from "./utils/preload"; import SceneEntryManager from "./scene-entry-manager"; import { store } from "./utils/store-instance"; -import { addObject3DComponent } from "./utils/jsx-entity"; import { ElOrEid } from "./utils/bit-utils"; +import { addObject3DComponent } from "./utils/jsx-entity"; declare global { interface Window { @@ -79,7 +69,8 @@ export class App { mediaSearchStore = new MediaSearchStore(); - audios = new Map(); + audios = new Map(); + gains = new Map(); sourceType = new Map(); audioOverrides = new Map>(); zoneOverrides = new Map>(); @@ -91,6 +82,7 @@ export class App { audioDebugPanelOverrides = new Map>(); sceneAudioDefaults = new Map>(); moderatorAudioSource = new Set(); + audioSources = new Map(); world: HubsWorld = createWorld(); @@ -98,7 +90,8 @@ export class App { sid2str: Map; nextSid = 1; - audioListener: AudioListener; + audioCtx: AudioContext; + camera: Camera; dialog = new DialogAdapter(); @@ -124,6 +117,8 @@ export class App { this.world.deletedNids = new Set(); this.world.ignoredNids = new Set(); + this.audioCtx = new AudioContext(); + // used in aframe and networked aframe to avoid imports this.world.nameToComponent = { object3d: Object3DTag, @@ -166,7 +161,7 @@ export class App { canvas.dataset.aframeCanvas = "true"; // TODO this comes from aframe and prevents zoom on ipad. - // This should alreeady be handleed by disable-ios-zoom but it does not appear to work + // This should already be handled by disable-ios-zoom but it does not appear to work canvas.addEventListener("touchmove", function (event) { event.preventDefault(); }); @@ -196,13 +191,11 @@ export class App { sceneEl.appendChild(renderer.domElement); const camera = new PerspectiveCamera(80, window.innerWidth / window.innerHeight, 0.05, 10000); - - const audioListener = new AudioListener(); - this.audioListener = audioListener; - const audioListenerEid = addEntity(this.world); - addObject3DComponent(this.world, audioListenerEid, this.audioListener); - - camera.add(audioListener); + const listenerEid = addEntity(this.world); + addObject3DComponent(this.world, listenerEid, camera); + addComponent(this.world, AudioListenerTag, listenerEid); + const audioListener = this.audioCtx.listener; + this.camera = camera; this.world.time = { delta: 0, @@ -215,7 +208,7 @@ export class App { this.world.scene = scene; resolvePromiseToScene(scene); - // We manually call scene.updateMatrixWolrd in mainTick + // We manually call scene.updateMatrixWorld in mainTick scene.autoUpdate = false; if (enablePostEffects) { diff --git a/src/bit-components.js b/src/bit-components.js index 97eb25a88e..5df3dcb19f 100644 --- a/src/bit-components.js +++ b/src/bit-components.js @@ -222,7 +222,6 @@ export const VideoMenu = defineComponent({ export const AudioEmitter = defineComponent({ flags: Types.ui8 }); -AudioEmitter.audios = new Map(); AudioEmitter.params = new Map(); export const AudioSettingsChanged = defineComponent(); export const Deletable = defineComponent(); @@ -341,3 +340,4 @@ export const LinearScale = defineComponent({ targetY: Types.f32, targetZ: Types.f32 }); +export const AudioListenerTag = defineComponent(); diff --git a/src/bit-systems/audio-debug-system.ts b/src/bit-systems/audio-debug-system.ts index ba624dc852..c4560fc055 100644 --- a/src/bit-systems/audio-debug-system.ts +++ b/src/bit-systems/audio-debug-system.ts @@ -5,7 +5,7 @@ import { getScene, HubsWorld } from "../app"; import { NavMesh } from "../bit-components"; import { DistanceModelType } from "../components/audio-params"; import { getWebGLVersion } from "../utils/webgl"; -import { AudioObject3D, isPositionalAudio } from "./audio-emitter-system"; +import { getAudioOrientation, getAudioPosition, isPositionalAudio } from "./audio-emitter-system"; import { Mesh, Material, Vector3, ShaderMaterial } from "three"; import { disposeMaterial } from "../utils/three-utils"; import { ElOrEid } from "../utils/bit-utils"; @@ -159,16 +159,17 @@ export function audioDebugSystem(world: HubsWorld) { isEnabled && addDebugMaterial(world, navEid); }); let idx = 0; - APP.audios.forEach((audio: AudioObject3D, audioEmitterId: ElOrEid) => { + APP.audios.forEach((audio: AudioNode, audioEmitterId: ElOrEid) => { if (APP.isAudioPaused.has(audioEmitterId) || APP.mutedState.has(audioEmitterId)) { return; } if (idx >= maxDebugEmitters) return; - audio.getWorldPosition(emitterPos); - audio.getWorldDirection(emitterDir); + const panner = isPositionalAudio(audio) ? audio : fakePanner; + const gain = APP.gains.get(audioEmitterId)!; - const panner = isPositionalAudio(audio) ? audio.panner : fakePanner; + getAudioPosition(audioEmitterId, emitterPos); + getAudioOrientation(audioEmitterId, emitterDir); uniforms.sourcePositions[idx] = emitterPos.clone(); uniforms.sourceOrientations[idx] = emitterDir.clone(); @@ -185,7 +186,7 @@ export function audioDebugSystem(world: HubsWorld) { uniforms.rolloffFactors[idx] = panner.rolloffFactor; uniforms.coneInnerAngles[idx] = panner.coneInnerAngle; uniforms.coneOuterAngles[idx] = panner.coneOuterAngle; - uniforms.gains[idx] = audio.gain.gain.value; + uniforms.gains[idx] = gain.gain.value; uniforms.clipped[idx] = APP.clippingState.has(audioEmitterId) ? 1 : 0; idx++; diff --git a/src/bit-systems/audio-emitter-system.ts b/src/bit-systems/audio-emitter-system.ts index 070997c344..d37ebac640 100644 --- a/src/bit-systems/audio-emitter-system.ts +++ b/src/bit-systems/audio-emitter-system.ts @@ -1,77 +1,145 @@ -import { addComponent, addEntity, defineQuery, removeComponent } from "bitecs"; -import { - PositionalAudio, - Audio as StereoAudio, - AudioListener as ThreeAudioListener, - MeshStandardMaterial, - Mesh -} from "three"; +import { addComponent, defineQuery, exitQuery, removeComponent } from "bitecs"; +import { MeshStandardMaterial, Mesh, Vector3, Object3D, Quaternion } from "three"; import { HubsWorld } from "../app"; -import { AudioEmitter, AudioSettingsChanged } from "../bit-components"; +import { AudioEmitter, AudioSettingsChanged, FloatyObject } from "../bit-components"; import { AudioType, SourceType } from "../components/audio-params"; import { AudioSystem } from "../systems/audio-system"; import { applySettings, getCurrentAudioSettings, updateAudioSettings } from "../update-audio-settings"; -import { addObject3DComponent, swapObject3DComponent } from "../utils/jsx-entity"; +import { EntityID } from "../utils/networking-types"; +import { ElOrEid, findAncestorWithComponent } from "../utils/bit-utils"; +import { BodyAtRest } from "../systems/floaty-object-system"; -export type AudioObject3D = StereoAudio | PositionalAudio; -type AudioConstructor = new (listener: ThreeAudioListener) => T; - -export const Emitter2Audio = (AudioEmitter as any).audios as Map; export const Emitter2Params = (AudioEmitter as any).params as Map; -export function isPositionalAudio(node: AudioObject3D): node is PositionalAudio { - return (node as any).panner !== undefined; +export function isPositionalAudio(node: AudioNode): node is PannerNode { + return node instanceof PannerNode; } -export function cleanupAudio(audio: AudioObject3D) { - const eid = audio.eid!; +export const getAudioPosition = (() => { + const _position = new Vector3(); + const _quaternion = new Quaternion(); + const _scale = new Vector3(); + return (eid: ElOrEid, position: Vector3) => { + const node = APP.audios.get(eid)!; + if (node instanceof PannerNode) { + const panner = node as PannerNode; + return position.set(panner.positionX.value, panner.positionY.value, panner.positionZ.value); + } else { + if (typeof eid !== "number") { + eid = eid.eid; + } + const obj = APP.world.eid2obj.get(eid)!; + obj.updateMatrixWorld(); + obj.matrixWorld.decompose(_position, _quaternion, _scale); + position.copy(_position); + } + }; +})(); + +export const getAudioOrientation = (() => { + const _position = new Vector3(); + const _quaternion = new Quaternion(); + const _scale = new Vector3(); + return (eid: ElOrEid, orientation: Vector3) => { + const node = APP.audios.get(eid)!; + if (node instanceof PannerNode) { + const panner = node as PannerNode; + return orientation.set(panner.orientationX.value, panner.orientationY.value, panner.orientationZ.value); + } else { + if (typeof eid !== "number") { + eid = eid.eid; + } + const obj = APP.world.eid2obj.get(eid)!; + obj.updateMatrixWorld(); + obj.matrixWorld.decompose(_position, _quaternion, _scale); + orientation.set(0, 0, -1).applyQuaternion(_quaternion); + } + }; +})(); + +export const updatePannerNode = (() => { + const _position = new Vector3(); + const _quaternion = new Quaternion(); + const _scale = new Vector3(); + const _orientation = new Vector3(); + return (audio: PannerNode, obj: Object3D) => { + obj.updateMatrices(); + obj.matrixWorld.decompose(_position, _quaternion, _scale); + _orientation.set(0, 0, -1).applyQuaternion(_quaternion); + audio.positionX.setValueAtTime(_position.x, 0); + audio.positionY.setValueAtTime(_position.y, 0); + audio.positionZ.setValueAtTime(_position.z, 0); + audio.orientationX.setValueAtTime(_orientation.x, 0); + audio.orientationY.setValueAtTime(_orientation.y, 0); + audio.orientationZ.setValueAtTime(_orientation.z, 0); + }; +})(); + +export const updateAudio = (elOrEid: ElOrEid, obj: Object3D) => { + const audio = APP.audios.get(elOrEid)!; + const muted = !!APP.mutedState.has(elOrEid); + const clipped = !!APP.clippingState.has(elOrEid); + const isAudioPaused = !!APP.isAudioPaused.has(elOrEid); + if (isPositionalAudio(audio) && !muted && !clipped && !isAudioPaused) { + updatePannerNode(audio, obj); + } +}; + +export function cleanupAudio(eid: EntityID, audioSystem: AudioSystem) { + const audio = APP.audios.get(eid)!; + const gain = APP.gains.get(eid)!; + gain.disconnect(); audio.disconnect(); - const audioSystem = APP.scene?.systems["hubs-systems"].audioSystem; APP.audios.delete(eid); + APP.gains.delete(eid); APP.supplementaryAttenuation.delete(eid); APP.audioOverrides.delete(eid); audioSystem.removeAudio({ node: audio }); + APP.audioSources.delete(eid); } -function swapAudioType( - world: HubsWorld, - audioSystem: AudioSystem, - eid: number, - NewType: AudioConstructor -) { - const audio = world.eid2obj.get(eid)! as AudioObject3D; +export function swapAudioType(elOrEid: ElOrEid) { + const { audioType } = getCurrentAudioSettings(elOrEid); + let audio = APP.audios.get(elOrEid)!; + const mediaElement = APP.audioSources.get(elOrEid)!; + const gain = APP.gains.get(elOrEid)!; audio.disconnect(); - APP.sourceType.set(eid, SourceType.MEDIA_VIDEO); - APP.supplementaryAttenuation.delete(eid); - APP.audios.delete(eid); - audioSystem.removeAudio({ node: audio }); - - const newAudio = new NewType(APP.audioListener); - newAudio.setNodeSource(audio.source!); - audioSystem.addAudio({ sourceType: SourceType.MEDIA_VIDEO, node: newAudio }); - APP.audios.set(eid, newAudio); - - audio.parent!.add(newAudio); - audio.removeFromParent(); - - swapObject3DComponent(world, eid, newAudio); + APP.audios.delete(elOrEid); + if (audioType === AudioType.PannerNode) { + audio = APP.audioCtx.createPanner(); + } else { + audio = APP.audioCtx.createStereoPanner(); + } + audio.connect(gain); + APP.audios.set(elOrEid, audio); + mediaElement.connect(audio); } -export function makeAudioEntity(world: HubsWorld, source: number, sourceType: SourceType, audioSystem: AudioSystem) { - const eid = addEntity(world); +export function makeAudioEntity( + world: HubsWorld, + eid: number, + sourceType: SourceType, + audioSystem: AudioSystem, + overrideAudioType?: AudioType +) { APP.sourceType.set(eid, sourceType); + let { audioType } = getCurrentAudioSettings(eid); + overrideAudioType && (audioType = overrideAudioType); let audio; - const { audioType } = getCurrentAudioSettings(eid); - const audioListener = APP.audioListener; if (audioType === AudioType.PannerNode) { - audio = new PositionalAudio(audioListener); + audio = APP.audioCtx.createPanner(); } else { - audio = new StereoAudio(audioListener); + audio = APP.audioCtx.createStereoPanner(); } + const gain = APP.audioCtx.createGain(); + gain.gain.value = 0; + audio.connect(gain); + APP.audios.set(eid, audio); + APP.gains.set(eid, gain); if (sourceType === SourceType.MEDIA_VIDEO) { - const videoObj = world.eid2obj.get(source) as Mesh; + const videoObj = world.eid2obj.get(eid) as Mesh; const video = (videoObj.material as MeshStandardMaterial).map!.image as HTMLVideoElement; if (video.paused) { APP.isAudioPaused.add(eid); @@ -79,38 +147,52 @@ export function makeAudioEntity(world: HubsWorld, source: number, sourceType: So APP.isAudioPaused.delete(eid); } const audioSrcEl = video; - audio.setMediaElementSource(audioSrcEl); + const mediaElement = APP.audioCtx.createMediaElementSource(audioSrcEl); + APP.audioSources.set(eid, mediaElement); + mediaElement.connect(audio); // Original audio source volume can now be restored as audio systems will take over audioSrcEl.volume = 1; - audio.gain.gain.value = 0; + updateAudio(eid, videoObj); } addComponent(world, AudioEmitter, eid); - addObject3DComponent(world, eid, audio); - - audioSystem.addAudio({ sourceType, node: audio }); - - APP.audios.set(eid, audio); + audioSystem.addAudio({ sourceType, node: gain }); updateAudioSettings(eid, audio); - - return eid; } const staleAudioEmittersQuery = defineQuery([AudioEmitter, AudioSettingsChanged]); -export function audioEmitterSystem(world: HubsWorld, audioSystem: AudioSystem) { +const audioEmitterQuery = defineQuery([AudioEmitter]); +const audioEmitterExit = exitQuery(audioEmitterQuery); +export function audioEmitterSystem(world: HubsWorld) { staleAudioEmittersQuery(world).forEach(function (eid) { - const audio = world.eid2obj.get(eid)! as PositionalAudio | StereoAudio; + let audio = APP.audios.get(eid)!; const settings = getCurrentAudioSettings(eid); - const isPannerNode = isPositionalAudio(audio); - - // TODO this needs more testing - if (!isPannerNode && settings.audioType === AudioType.PannerNode) { - swapAudioType(world, audioSystem, eid, PositionalAudio); - } else if (isPannerNode && settings.audioType === AudioType.Stereo) { - swapAudioType(world, audioSystem, eid, StereoAudio); + if ( + (!isPositionalAudio(audio) && settings.audioType === AudioType.PannerNode) || + (isPositionalAudio(audio) && settings.audioType === AudioType.Stereo) + ) { + swapAudioType(eid); + audio = APP.audios.get(eid)!; + if (isPositionalAudio(audio)) { + const obj = APP.world.eid2obj.get(eid)!; + updatePannerNode(audio, obj); + } } - applySettings(audio, settings); + applySettings(eid, settings); removeComponent(world, AudioSettingsChanged, eid); }); + audioEmitterExit(world).forEach(eid => { + const audioSystem = APP.scene?.systems["hubs-systems"].audioSystem; + cleanupAudio(eid, audioSystem); + }); + audioEmitterQuery(world).forEach(eid => { + // For now we are only interested in updating movable media + const isFloaty = findAncestorWithComponent(APP.world, FloatyObject, eid); + const isAtRest = findAncestorWithComponent(APP.world, BodyAtRest, eid); + if (isFloaty && !isAtRest) { + const obj = APP.world.eid2obj.get(eid)!; + updateAudio(eid, obj); + } + }); } diff --git a/src/bit-systems/audio-listener-system.ts b/src/bit-systems/audio-listener-system.ts new file mode 100644 index 0000000000..ae435efc8a --- /dev/null +++ b/src/bit-systems/audio-listener-system.ts @@ -0,0 +1,51 @@ +import { Quaternion, Vector3 } from "three"; +import { HubsWorld } from "../app"; +import { defineQuery } from "bitecs"; +import { AudioListenerTag } from "../bit-components"; + +const _position = new Vector3(); +const _quaternion = new Quaternion(); +const _scale = new Vector3(); +const _orientation = new Vector3(); +const lastPosition = new Vector3(); +const lastOrientation = new Vector3(); +const lastUp = new Vector3(); + +const audioListenerQuery = defineQuery([AudioListenerTag]); +export function audioListenerSystem(world: HubsWorld) { + audioListenerQuery(world).forEach(eid => { + const obj = APP.world.eid2obj.get(eid)!; + const listener = APP.audioCtx.listener; + + const up = obj.up; + + obj.matrixWorld.decompose(_position, _quaternion, _scale); + + _orientation.set(0, 0, -1).applyQuaternion(_quaternion); + + const positionUpdated = !lastPosition.equals(_position); + const orientationUpdated = !lastOrientation.equals(_orientation); + const lastUpUpdated = !lastUp.equals(up); + if (positionUpdated || orientationUpdated || lastUpUpdated) { + if (listener.positionX) { + // code path for Chrome (see #14393) + listener.positionX.setValueAtTime(_position.x, 0); + listener.positionY.setValueAtTime(_position.y, 0); + listener.positionZ.setValueAtTime(_position.z, 0); + listener.forwardX.setValueAtTime(_orientation.x, 0); + listener.forwardY.setValueAtTime(_orientation.y, 0); + listener.forwardZ.setValueAtTime(_orientation.z, 0); + listener.upX.setValueAtTime(up.x, 0); + listener.upY.setValueAtTime(up.y, 0); + listener.upZ.setValueAtTime(up.z, 0); + } else { + // Although these methods are deprecated they are currently the only way to set the orientation and position in Firefox. + listener.setPosition(_position.x, _position.y, _position.z); + listener.setOrientation(_orientation.x, _orientation.y, _orientation.z, up.x, up.y, up.z); + } + } + lastPosition.copy(_position); + lastOrientation.copy(_orientation); + lastUp.copy(up); + }); +} diff --git a/src/bit-systems/audio-target-system.ts b/src/bit-systems/audio-target-system.ts index 2f6233941e..eb47d16840 100644 --- a/src/bit-systems/audio-target-system.ts +++ b/src/bit-systems/audio-target-system.ts @@ -1,13 +1,13 @@ import { AComponent, AElement } from "aframe"; import { addComponent, defineQuery, enterQuery, exitQuery } from "bitecs"; -import { Vector3, Object3D, LineSegments, WireframeGeometry, SphereBufferGeometry, PositionalAudio } from "three"; +import { Vector3, Object3D, LineSegments, WireframeGeometry, SphereBufferGeometry } from "three"; import { HubsWorld } from "../app"; import { AudioSettingsChanged, AudioSource, AudioTarget } from "../bit-components"; import { SourceType } from "../components/audio-params"; import { getMediaStream } from "../components/avatar-audio-source"; import { AUDIO_SOURCE_FLAGS } from "../inflators/audio-source"; import { AudioSystem } from "../systems/audio-system"; -import { Emitter2Audio, makeAudioEntity } from "./audio-emitter-system"; +import { makeAudioEntity } from "./audio-emitter-system"; const createWhiteNoise = (audioContext: AudioContext, gain: number): AudioBufferSourceNode => { const bufferSize = 2 * audioContext.sampleRate, @@ -26,57 +26,47 @@ const createWhiteNoise = (audioContext: AudioContext, gain: number): AudioBuffer }; const addSourceToAudioTarget = (audioSourceEid: number, source: AudioNode) => { + removeSourceFromAudioTarget(audioSourceEid); const audioTargetEids = source2Target.get(audioSourceEid); audioTargetEids?.forEach(audioTargetEid => { - const audioEid = Emitter2Audio.get(audioTargetEid)!; - const targetAudio = APP.audios.get(audioEid)!; - try { - // The WebAudio API doesn't support checking if an audio node is already connected so we always disconnect it but don't log as it's an expected behavior and the error is not meaningful. - targetAudio.disconnect(); - } catch (e) {} - if (targetAudio instanceof PositionalAudio) { - targetAudio.panner.connect(targetAudio.gain); - } - targetAudio.setNodeSource(source as AudioBufferSourceNode); - targetAudio.connect(); + const target = target2Node.get(audioTargetEid)!; + source.connect(target); }); + source2Node.set(audioSourceEid, source); }; const removeSourceFromAudioTarget = (audioSourceEid: number) => { - const audioTargetEids = source2Target.get(audioSourceEid); - audioTargetEids?.forEach(audioTargetEid => { - const audioEid = Emitter2Audio.get(audioTargetEid)!; - const targetAudio = APP.audios.get(audioEid)!; - try { - // The WebAudio API doesn't support checking if an audio node is already connected so we always disconnect it but don't log as it's an expected behavior and the error is not meaningful. - targetAudio.disconnect(); - } catch (e) {} - if (targetAudio instanceof PositionalAudio) { - targetAudio.panner.connect(targetAudio.gain); - } - }); + const node = source2Node.get(audioSourceEid)!; + node && node.disconnect(); + if (AudioSource.flags[audioSourceEid] & AUDIO_SOURCE_FLAGS.DEBUG) { + const whiteNoise = source2Noise.get(audioSourceEid)!; + whiteNoise.disconnect(); + } + source2Node.delete(audioSourceEid); }; -const connectSourceToTarget = (audioSourceEid: number, audioTargetEid: number) => { +const connectSourceToTargets = (audioSourceEid: number, audioTargetEid: number) => { let targetEids = source2Target.get(audioSourceEid); if (!targetEids?.includes(audioTargetEid)) { !targetEids && (targetEids = new Array()); targetEids.push(audioTargetEid); source2Target.set(audioSourceEid, targetEids); - if (AudioSource.flags[audioSourceEid] & AUDIO_SOURCE_FLAGS.DEBUG) { - const whiteNoise = createWhiteNoise(APP.audioListener.context, 0.01); - source2Noise.set(audioSourceEid, whiteNoise); - addSourceToAudioTarget(audioSourceEid, whiteNoise); + const source = source2Node.get(audioSourceEid); + const target = target2Node.get(audioTargetEid); + if (source && target) { + source.connect(target); } } }; const source2Noise = new Map(); +const source2Node = new Map(); const source2Target = new Map>(); const source2Emitter = new Map(); const source2Radius = new Map(); const source2Debug = new Map(); const sourceWorldPos = new Vector3(); +const target2Node = new Map(); const audioTargetQuery = defineQuery([AudioTarget]); const audioTargetEnterQuery = enterQuery(audioTargetQuery); @@ -88,42 +78,51 @@ export function audioTargetSystem(world: HubsWorld, audioSystem: AudioSystem) { const audioTargetEids = audioTargetQuery(world); const audioSourceEids = audioSourceQuery(world); audioTargetEnterQuery(world).forEach(audioTargetEid => { - const ctx = APP.audioListener.context; - const audioEid = makeAudioEntity(world, audioTargetEid, SourceType.AUDIO_TARGET, audioSystem); - Emitter2Audio.set(audioTargetEid, audioEid); - const audioObj = world.eid2obj.get(audioEid)!; - const audioTarget = world.eid2obj.get(audioTargetEid)!; - audioTarget.add(audioObj); + const ctx = APP.audioCtx; + makeAudioEntity(world, audioTargetEid, SourceType.AUDIO_TARGET, audioSystem); + const audio = APP.audios.get(audioTargetEid)!; const maxDelay = AudioTarget.maxDelay[audioTargetEid]; const minDelay = AudioTarget.minDelay[audioTargetEid]; if (maxDelay > 0) { - const audio = APP.audios.get(audioEid)!; const delayNode = ctx.createDelay(maxDelay); delayNode.delayTime.value = THREE.MathUtils.randFloat(minDelay, maxDelay); - audio.setFilters([delayNode]); + audio.disconnect(); + audio.connect(delayNode); + audioSystem.addAudio({ node: delayNode, sourceType: SourceType.AUDIO_TARGET }); + target2Node.set(audioTargetEid, delayNode); + } else { + audioSystem.addAudio({ node: audio, sourceType: SourceType.AUDIO_TARGET }); + target2Node.set(audioTargetEid, audio); } const audioSourceEid = AudioTarget.source[audioTargetEid]; if (audioSourceEids.includes(audioSourceEid)) { - connectSourceToTarget(audioSourceEid, audioTargetEid); + connectSourceToTargets(audioSourceEid, audioTargetEid); } const audioSettings = APP.audioOverrides.get(audioTargetEid)!; - APP.audioOverrides.set(audioEid, audioSettings); - addComponent(world, AudioSettingsChanged, audioEid); + APP.audioOverrides.set(audioTargetEid, audioSettings); + addComponent(world, AudioSettingsChanged, audioTargetEid); }); audioTargetExitQuery(world).forEach(audioTargetEid => { - Emitter2Audio.delete(audioTargetEid); const audioSourceEid = AudioTarget.source[audioTargetEid]; source2Target.delete(audioSourceEid); + const node = target2Node.get(audioTargetEid)!; + node.disconnect(); + target2Node.delete(audioTargetEid); APP.audioOverrides.delete(audioTargetEid); }); audioSourceEnterQuery(world).forEach(audioSourceEid => { const audioTargetEid = audioTargetEids.find( audioTargetEid => AudioTarget.source[audioTargetEid] === audioSourceEid ); - audioTargetEid && connectSourceToTarget(audioSourceEid, audioTargetEid); + if (AudioSource.flags[audioSourceEid] & AUDIO_SOURCE_FLAGS.DEBUG) { + const whiteNoise = createWhiteNoise(APP.audioCtx, 0.01); + source2Noise.set(audioSourceEid, whiteNoise); + addSourceToAudioTarget(audioSourceEid, whiteNoise); + } + audioTargetEid && connectSourceToTargets(audioSourceEid, audioTargetEid); // TODO this should probably be using bounds similar to media-frames and trigger-volume. // Doing the simple thing for now since we only support avatar audio sources currently @@ -140,10 +139,9 @@ export function audioTargetSystem(world: HubsWorld, audioSystem: AudioSystem) { }); audioSourceExitQuery(world).forEach(audioSourceEid => { removeSourceFromAudioTarget(audioSourceEid); - source2Target.delete(audioSourceEid); - const noise = source2Noise.get(audioSourceEid); - noise?.disconnect(); source2Noise.delete(audioSourceEid); + source2Node.delete(audioSourceEid); + source2Target.delete(audioSourceEid); source2Radius.delete(audioSourceEid); source2Emitter.delete(audioSourceEid); if (AudioSource.flags[audioSourceEid] & AUDIO_SOURCE_FLAGS.DEBUG) { @@ -202,9 +200,7 @@ export function audioTargetSystem(world: HubsWorld, audioSystem: AudioSystem) { removeSourceFromAudioTarget(audioSourceEid); } else { getMediaStream(avatar).then(stream => { - const audioListener = APP.audioListener; - const ctx = audioListener.context; - const node = ctx.createMediaStreamSource(stream); + const node = APP.audioCtx.createMediaStreamSource(stream); addSourceToAudioTarget(audioSourceEid, node); }); } diff --git a/src/bit-systems/audio-zone-system.ts b/src/bit-systems/audio-zone-system.ts index c2d2b2d701..2cc31372ac 100644 --- a/src/bit-systems/audio-zone-system.ts +++ b/src/bit-systems/audio-zone-system.ts @@ -1,12 +1,12 @@ import { defineQuery, enterQuery, exitQuery } from "bitecs"; import { getScene, HubsWorld } from "../app"; -import { AudioZone } from "../bit-components"; +import { AudioListenerTag, AudioZone } from "../bit-components"; import { Box3, BoxGeometry, DoubleSide, MeshBasicMaterial, Object3D, Ray, Vector3, Mesh, BoxHelper } from "three"; import { AUDIO_ZONE_FLAGS } from "../inflators/audio-zone"; import { disposeMaterial, disposeNode } from "../utils/three-utils"; import { AudioSettings } from "../components/audio-params"; -import { AudioObject3D } from "./audio-emitter-system"; -import { ElOrEid } from "../utils/bit-utils"; +import { getAudioPosition } from "./audio-emitter-system"; +import { ElOrEid, anyEntityWith } from "../utils/bit-utils"; import { updateAudioSettings } from "../update-audio-settings"; const debugObjects = new Map(); @@ -105,12 +105,7 @@ const isUpdated = (currZones: Set, prevZones: Set) => { const getEmitterPosition = (() => { const pos = new Vector3(); return (emitterId: ElOrEid) => { - const audio = APP.audios.get(emitterId); - if (audio) { - audio.getWorldPosition(pos); - } else { - pos.set(0, 0, 0); - } + getAudioPosition(emitterId, pos); return pos; }; })(); @@ -281,7 +276,7 @@ export function audioZoneSystem(world: HubsWorld) { aabbs.delete(zoneEid); debugObjects.delete(zoneEid); - APP.audios.forEach((_: AudioObject3D, emitterId: ElOrEid) => { + APP.audios.forEach((_: AudioNode, emitterId: ElOrEid) => { restoreEmitterParams(emitterId); currZones.delete(emitterId); prevZones.delete(emitterId); @@ -294,22 +289,22 @@ export function audioZoneSystem(world: HubsWorld) { const zones = audioZoneQuery(world); if (!zones.length) return; - const listener = APP.audioListener.eid!; - - APP.audioListener.getWorldPosition(listenerPos); + const listenerEid = anyEntityWith(world, AudioListenerTag)!; + const listener = APP.world.eid2obj.get(listenerEid)!; + listener.getWorldPosition(listenerPos); zones.forEach(zoneEid => { - !currZones.has(listener) && currZones.set(listener, new Set()); - !prevZones.has(listener) && prevZones.set(listener, new Set()); - addOrRemoveZone(currZones.get(listener)!, zoneEid, listenerPos); - APP.audios.forEach((_: AudioObject3D, emitterId: ElOrEid) => { + !currZones.has(listenerEid) && currZones.set(listenerEid, new Set()); + !prevZones.has(listenerEid) && prevZones.set(listenerEid, new Set()); + addOrRemoveZone(currZones.get(listenerEid)!, zoneEid, listenerPos); + APP.audios.forEach((_: AudioNode, emitterId: ElOrEid) => { !currZones.has(emitterId) && currZones.set(emitterId, new Set()); !prevZones.has(emitterId) && prevZones.set(emitterId, new Set()); addOrRemoveZone(currZones.get(emitterId)!, zoneEid, getEmitterPosition(emitterId)); }); }); - const isListenerUpdated = isUpdated(currZones.get(listener)!, prevZones.get(listener)!); - APP.audios.forEach((_: AudioObject3D, emitterId: ElOrEid) => { + const isListenerUpdated = isUpdated(currZones.get(listenerEid)!, prevZones.get(listenerEid)!); + APP.audios.forEach((_: AudioNode, emitterId: ElOrEid) => { const isEmitterUpdated = isUpdated(currZones.get(emitterId)!, prevZones.get(emitterId)!); if (isListenerUpdated || isEmitterUpdated) { updateEmitter( @@ -317,11 +312,11 @@ export function audioZoneSystem(world: HubsWorld) { getEmitterPosition(emitterId), currZones.get(emitterId)!, listenerPos, - currZones.get(listener)! + currZones.get(listenerEid)! ); } }); - APP.audios.forEach((_: AudioObject3D, emitterId: ElOrEid) => clearEntity(emitterId)); - clearEntity(APP.audioListener.eid!); + APP.audios.forEach((_: AudioNode, emitterId: ElOrEid) => clearEntity(emitterId)); + clearEntity(listenerEid); } diff --git a/src/bit-systems/camera-tool.js b/src/bit-systems/camera-tool.js index 0ce7cf8411..4dfcbe0ff7 100644 --- a/src/bit-systems/camera-tool.js +++ b/src/bit-systems/camera-tool.js @@ -96,13 +96,11 @@ function createRecorder(captureAudio) { // if no audio comes through on the listener source. (Eg the room is otherwise silent.) // So for now, if we don't have a track, just disable audio capture. if (captureAudio && APP.dialog._micProducer?.track) { - const context = THREE.AudioContext.getContext(); - const destination = context.createMediaStreamDestination(); - if (APP.audioListener) { - // NOTE audio is not captured from camera vantage point for now. - APP.audioListener.getInput().connect(destination); - } - context.createMediaStreamSource(new MediaStream([APP.dialog._micProducer?.track])).connect(destination); + const destination = APP.audioCtx.createMediaStreamDestination(); + const audioSystem = APP.scene.systems["hubs-systems"].audioSystem; + // NOTE audio is not captured from camera vantage point for now. + audioSystem.getListenerInput().connect(destination); + APP.audioCtx.createMediaStreamSource(new MediaStream([APP.dialog._micProducer?.track])).connect(destination); srcAudioTrack = destination.stream.getAudioTracks()[0]; } diff --git a/src/bit-systems/object-menu.ts b/src/bit-systems/object-menu.ts index 44840632df..420723e414 100644 --- a/src/bit-systems/object-menu.ts +++ b/src/bit-systems/object-menu.ts @@ -1,4 +1,4 @@ -import { defineQuery, enterQuery, entityExists, exitQuery, hasComponent } from "bitecs"; +import { addComponent, defineQuery, enterQuery, entityExists, exitQuery, hasComponent, removeComponent } from "bitecs"; import { Matrix4, Quaternion, Vector3 } from "three"; import type { HubsWorld } from "../app"; import { @@ -20,6 +20,7 @@ import { deleteTheDeletableAncestor } from "./delete-entity-system"; import { createMessageDatas, isPinned } from "./networking"; import { TRANSFORM_MODE } from "../components/transform-object-button"; import { ScalingHandler } from "../components/scale-button"; +import { BodyAtRest } from "../systems/floaty-object-system"; // Working variables. const _vec3_1 = new Vector3(); @@ -88,11 +89,13 @@ function startRotation(world: HubsWorld, targetEid: EntityID) { transformSystem.startTransform(world.eid2obj.get(targetEid)!, world.eid2obj.get(rightCursorEid)!, { mode: TRANSFORM_MODE.CURSOR }); + removeComponent(APP.world, BodyAtRest, targetEid); } -function stopRotation() { +function stopRotation(world: HubsWorld, targetEid: EntityID) { const transformSystem = APP.scene!.systems["transform-selected-object"]; transformSystem.stopTransform(); + addComponent(APP.world, BodyAtRest, targetEid); } function startScaling(world: HubsWorld, targetEid: EntityID) { @@ -188,7 +191,7 @@ function handleHeldEnter(world: HubsWorld, eid: EntityID, menuEid: EntityID) { function handleHeldExit(world: HubsWorld, eid: EntityID, menuEid: EntityID) { switch (eid) { case ObjectMenu.rotateButtonRef[menuEid]: - stopRotation(); + stopRotation(world, ObjectMenu.targetRef[menuEid]); break; case ObjectMenu.scaleButtonRef[menuEid]: stopScaling(world); diff --git a/src/bit-systems/video-menu-system.ts b/src/bit-systems/video-menu-system.ts index 880784e4e0..50497d7003 100644 --- a/src/bit-systems/video-menu-system.ts +++ b/src/bit-systems/video-menu-system.ts @@ -21,7 +21,6 @@ import { animate } from "../utils/animate"; import { coroutine } from "../utils/coroutine"; import { easeOutQuadratic } from "../utils/easing"; import { isFacingCamera } from "../utils/three-utils"; -import { Emitter2Audio } from "./audio-emitter-system"; const videoMenuQuery = defineQuery([VideoMenu]); const hoverRightVideoQuery = defineQuery([HoveredRemoteRight, MediaVideo]); @@ -93,16 +92,15 @@ export function videoMenuSystem(world: HubsWorld, userinput: any) { const playIndicatorObj = world.eid2obj.get(VideoMenu.playIndicatorRef[eid])!; const pauseIndicatorObj = world.eid2obj.get(VideoMenu.pauseIndicatorRef[eid])!; - const audioEid = Emitter2Audio.get(videoEid)!; if (video.paused) { video.play(); - APP.isAudioPaused.delete(audioEid); + APP.isAudioPaused.delete(videoEid); playIndicatorObj.visible = true; pauseIndicatorObj.visible = false; rightMenuIndicatorCoroutine = coroutine(animateIndicator(world, VideoMenu.playIndicatorRef[eid])); } else { video.pause(); - APP.isAudioPaused.add(audioEid); + APP.isAudioPaused.add(videoEid); playIndicatorObj.visible = false; pauseIndicatorObj.visible = true; rightMenuIndicatorCoroutine = coroutine(animateIndicator(world, VideoMenu.pauseIndicatorRef[eid])); diff --git a/src/bit-systems/video-system.ts b/src/bit-systems/video-system.ts index 8f0708ef2b..a7c9e40577 100644 --- a/src/bit-systems/video-system.ts +++ b/src/bit-systems/video-system.ts @@ -1,4 +1,4 @@ -import { addComponent, defineQuery, enterQuery, exitQuery, hasComponent } from "bitecs"; +import { addComponent, defineQuery, enterQuery, exitQuery, hasComponent, removeComponent } from "bitecs"; import { Mesh, MeshStandardMaterial } from "three"; import { HubsWorld } from "../app"; import { @@ -13,7 +13,7 @@ import { import { SourceType } from "../components/audio-params"; import { AudioSystem } from "../systems/audio-system"; import { findAncestorWithComponent } from "../utils/bit-utils"; -import { Emitter2Audio, Emitter2Params, makeAudioEntity } from "./audio-emitter-system"; +import { Emitter2Params, makeAudioEntity } from "./audio-emitter-system"; import { takeSoftOwnership } from "../utils/take-soft-ownership"; enum Flags { @@ -38,28 +38,21 @@ export function videoSystem(world: HubsWorld, audioSystem: AudioSystem) { console.error("Error auto-playing video."); }); } - const audioEid = makeAudioEntity(world, videoEid, SourceType.MEDIA_VIDEO, audioSystem); - Emitter2Audio.set(videoEid, audioEid); - const audio = world.eid2obj.get(audioEid)!; - videoObj.add(audio); - // Note in media-video we call updateMatrixWorld here to force PositionalAudio's updateMatrixWorld to run even - // if it has an invisible parent. We don't want to have invisible parents now. + makeAudioEntity(world, videoEid, SourceType.MEDIA_VIDEO, audioSystem); }); mediaLoadedQuery(world).forEach(videoEid => { const audioParamsEid = findAncestorWithComponent(world, AudioParams, videoEid); if (audioParamsEid) { const audioSettings = APP.audioOverrides.get(audioParamsEid)!; - const audioEid = Emitter2Audio.get(videoEid)!; - APP.audioOverrides.set(audioEid, audioSettings); + APP.audioOverrides.set(videoEid, audioSettings); Emitter2Params.set(videoEid, audioParamsEid); - addComponent(world, AudioSettingsChanged, audioEid); + addComponent(world, AudioSettingsChanged, videoEid); } }); mediaVideoExitQuery(world).forEach(videoEid => { const audioParamsEid = Emitter2Params.get(videoEid); audioParamsEid && APP.audioOverrides.delete(audioParamsEid); Emitter2Params.delete(videoEid); - Emitter2Audio.delete(videoEid); }); networkedVideoEnterQuery(world).forEach(function (eid) { diff --git a/src/components/audio-feedback.js b/src/components/audio-feedback.js index f99e0386be..dcab1d3beb 100644 --- a/src/components/audio-feedback.js +++ b/src/components/audio-feedback.js @@ -55,7 +55,7 @@ AFRAME.registerComponent("networked-audio-analyser", { this.el.addEventListener( "sound-source-set", event => { - const ctx = THREE.AudioContext.getContext(); + const ctx = APP.audioCtx; this.analyser = ctx.createAnalyser(); this.analyser.fftSize = 32; this.levels = new Uint8Array(this.analyser.fftSize); diff --git a/src/components/audio-zone-source.js b/src/components/audio-zone-source.js index ea3611b10d..77bd76e7e1 100644 --- a/src/components/audio-zone-source.js +++ b/src/components/audio-zone-source.js @@ -1,3 +1,4 @@ +import { getAudioPosition } from "../bit-systems/audio-emitter-system"; import { updateAudioSettings } from "../update-audio-settings"; AFRAME.registerComponent("audio-zone-source", { @@ -14,12 +15,7 @@ AFRAME.registerComponent("audio-zone-source", { getPosition: (() => { const sourcePos = new THREE.Vector3(); return function () { - const audio = APP.audios.get(this.el); - if (audio) { - audio.getWorldPosition(sourcePos); - } else { - sourcePos.set(0, 0, 0); - } + getAudioPosition(this.el, sourcePos); return sourcePos; }; })(), diff --git a/src/components/avatar-audio-source.js b/src/components/avatar-audio-source.js index 5c4e3dd3ee..4f6f4ce486 100644 --- a/src/components/avatar-audio-source.js +++ b/src/components/avatar-audio-source.js @@ -1,6 +1,8 @@ -import { SourceType, AudioType } from "./audio-params"; +import { AudioType, SourceType } from "./audio-params"; import { getCurrentAudioSettings, updateAudioSettings } from "../update-audio-settings"; import { isRoomOwner } from "../utils/hub-utils"; +import { updateAudio, updatePannerNode } from "../bit-systems/audio-emitter-system"; +import { findAncestorWithComponent } from "../utils/scene-graph"; const INFO_INIT_FAILED = "Failed to initialize avatar-audio-source."; const INFO_NO_NETWORKED_EL = "Could not find networked el."; const INFO_NO_OWNER = "Networked component has no owner."; @@ -45,8 +47,6 @@ export async function getMediaStream(el) { AFRAME.registerComponent("avatar-audio-source", { createAudio: async function () { - this.removeAudio(); - this.isCreatingAudio = true; const stream = await getMediaStream(this.el); this.isCreatingAudio = false; @@ -55,29 +55,30 @@ AFRAME.registerComponent("avatar-audio-source", { APP.sourceType.set(this.el, SourceType.AVATAR_AUDIO_SOURCE); const { audioType } = getCurrentAudioSettings(this.el); - const audioListener = this.el.sceneEl.audioListener; - let audio = this.el.getObject3D(this.attrName); + let audio = APP.audios.get(this.el); if (audioType === AudioType.PannerNode) { - audio = new THREE.PositionalAudio(audioListener); + audio = APP.audioCtx.createPanner(); + updatePannerNode(audio, this.el.object3D); } else { - audio = new THREE.Audio(audioListener); + audio = APP.audioCtx.createStereoPanner(); } - // Default to being quiet so it fades in when volume is set by audio systems - audio.gain.gain.value = 0; + const gain = APP.audioCtx.createGain(); + gain.gain.value = 0; + audio.connect(gain); - this.audioSystem.addAudio({ sourceType: SourceType.AVATAR_AUDIO_SOURCE, node: audio }); + this.audioSystem.addAudio({ sourceType: SourceType.AVATAR_AUDIO_SOURCE, node: gain }); if (SHOULD_CREATE_SILENT_AUDIO_ELS) { createSilentAudioEl(stream); // TODO: Do the audio els need to get cleaned up? } - this.destination = audio.context.createMediaStreamDestination(); - this.mediaStreamSource = audio.context.createMediaStreamSource(stream); - const destinationSource = audio.context.createMediaStreamSource(this.destination.stream); - this.mediaStreamSource.connect(this.destination); - audio.setNodeSource(destinationSource); - this.el.setObject3D(this.attrName, audio); - this.el.emit("sound-source-set", { soundSource: destinationSource }); + APP.audios.set(this.el, audio); + APP.gains.set(this.el, gain); + + const mediaStreamSource = APP.audioCtx.createMediaStreamSource(stream); + APP.audioSources.set(this.el, mediaStreamSource); + mediaStreamSource.connect(audio); + this.el.emit("sound-source-set", { soundSource: audio }); getOwnerId(this.el).then(async ownerId => { if (isRoomOwner(ownerId)) { @@ -85,17 +86,11 @@ AFRAME.registerComponent("avatar-audio-source", { } else { APP.moderatorAudioSource.delete(this.el); } - APP.audios.set(this.el, audio); updateAudioSettings(this.el, audio); - }); - }, - removeAudio() { - const audio = this.el.getObject3D(this.attrName); - if (audio) { - this.audioSystem.removeAudio({ node: audio }); - this.el.removeObject3D(this.attrName); - } + this.ikRootEl = findAncestorWithComponent(this.el, "ik-root"); + this.ikController = this.ikRootEl.querySelector(".AvatarRoot").components["ik-controller"]; + }); }, init() { @@ -108,28 +103,6 @@ AFRAME.registerComponent("avatar-audio-source", { APP.dialog.on("stream_updated", this._onStreamUpdated, this); this.createAudio(); - let { disableLeftRightPanning, audioPanningQuality } = APP.store.state.preferences; - this.onPreferenceChanged = () => { - const newDisableLeftRightPanning = APP.store.state.preferences.disableLeftRightPanning; - const newAudioPanningQuality = APP.store.state.preferences.audioPanningQuality; - - const shouldRecreateAudio = disableLeftRightPanning !== newDisableLeftRightPanning && !this.isCreatingAudio; - const shouldUpdateAudioSettings = audioPanningQuality !== newAudioPanningQuality; - - disableLeftRightPanning = newDisableLeftRightPanning; - audioPanningQuality = newAudioPanningQuality; - - if (shouldRecreateAudio) { - this.createAudio(); - } else if (shouldUpdateAudioSettings) { - // updateAudioSettings() is called in this.createAudio() - // so no need to call it if shouldRecreateAudio is true. - const audio = this.el.getObject3D(this.attrName); - updateAudioSettings(this.el, audio); - } - }; - APP.store.addEventListener("statechanged", this.onPreferenceChanged); - this.el.addEventListener("audio_type_changed", this.createAudio); APP.hubChannel.addEventListener("permissions_updated", this.onPermissionsUpdated); }, @@ -146,11 +119,6 @@ AFRAME.registerComponent("avatar-audio-source", { }, async _onStreamUpdated(peerId, kind) { - const audio = this.el.getObject3D(this.attrName); - if (!audio) return; - const stream = audio.source.mediaStream; - if (!stream) return; - getOwnerId(this.el).then(async ownerId => { if (ownerId === peerId && kind === "audio") { // The audio stream for this peer has been updated @@ -159,9 +127,12 @@ AFRAME.registerComponent("avatar-audio-source", { }); if (newStream) { - this.mediaStreamSource.disconnect(); - this.mediaStreamSource = audio.context.createMediaStreamSource(newStream); - this.mediaStreamSource.connect(this.destination); + let mediaStreamSource = APP.audioSources.get(this.el); + mediaStreamSource.disconnect(); + mediaStreamSource = APP.audioCtx.createMediaStreamSource(newStream); + const audio = APP.audios.get(this.el); + mediaStreamSource.connect(audio); + APP.audioSources.set(this.el, mediaStreamSource); } } }); @@ -171,26 +142,33 @@ AFRAME.registerComponent("avatar-audio-source", { APP.dialog.off("stream_updated", this._onStreamUpdated); APP.hubChannel.removeEventListener("permissions_updated", this.onPermissionsUpdated); - window.APP.store.removeEventListener("statechanged", this.onPreferenceChanged); - this.el.removeEventListener("audio_type_changed", this.createAudio); - + const gain = APP.gains.get(this.el); + this.audioSystem.removeAudio({ node: gain }); + APP.gains.delete(this.el); APP.audios.delete(this.el); + APP.audioSources.delete(this.el); + APP.isAudioPaused.delete(this.el); + APP.gainMultipliers.delete(this.el); APP.sourceType.delete(this.el); APP.supplementaryAttenuation.delete(this.el); + }, - this.removeAudio(); + tick: function () { + if (this.ikController && this.ikController.transformUpdated) { + updateAudio(this.el, this.el.object3D); + } } }); -function createWhiteNoise(audioContext, gain) { - const bufferSize = 2 * audioContext.sampleRate, - noiseBuffer = audioContext.createBuffer(1, bufferSize, audioContext.sampleRate), +function createWhiteNoise(gain) { + const bufferSize = 2 * APP.audioCtx.sampleRate, + noiseBuffer = APP.audioCtx.createBuffer(1, bufferSize, APP.audioCtx.sampleRate), gainFilter = noiseBuffer.getChannelData(0); for (let i = 0; i < bufferSize; i++) { gainFilter[i] = (Math.random() * 2 - 1) * gain; } - const whiteNoise = audioContext.createBufferSource(); + const whiteNoise = APP.audioCtx.createBufferSource(); whiteNoise.buffer = noiseBuffer; whiteNoise.loop = true; whiteNoise.start(0); @@ -215,11 +193,10 @@ AFRAME.registerComponent("zone-audio-source", { }, init() { - const audioListener = this.el.sceneEl.audioListener; - const ctx = audioListener.context; - this.gainFilter = ctx.createGain(); + this.gainFilter = APP.audioCtx.createGain(); + APP.audioSources.set(this.el, this.gainFilter); if (this.data.debug) { - this.whiteNoise = createWhiteNoise(ctx, 0.01); + this.whiteNoise = createWhiteNoise(0.01); this.setInput(this.whiteNoise); } @@ -239,7 +216,7 @@ AFRAME.registerComponent("zone-audio-source", { setInput(newInput) { if (this.input) { - this.input.disconnect(this.gainFilter); + this.input.disconnect(); this.input = null; } @@ -279,9 +256,7 @@ AFRAME.registerComponent("zone-audio-source", { this.setInput(null); } else { getMediaStream(this.trackingEl).then(stream => { - const audioListener = this.el.sceneEl.audioListener; - const ctx = audioListener.context; - const node = ctx.createMediaStreamSource(stream); + const node = APP.audioCtx.createMediaStreamSource(stream); this.setInput(node); }); } @@ -317,7 +292,6 @@ AFRAME.registerComponent("audio-target", { this.el.setAttribute("audio-zone-source"); this.createAudio = this.createAudio.bind(this); - this.el.addEventListener("audio_type_changed", this.createAudio); }, remove: function () { @@ -325,38 +299,48 @@ AFRAME.registerComponent("audio-target", { APP.audios.delete(this.el); APP.sourceType.delete(this.el); - this.removeAudio(); + const gain = APP.gains.get(this.el); + if (this.delayNode) { + this.audioSystem.removeAudio({ node: this.delayNode }); + } else { + this.audioSystem.removeAudio({ node: gain }); + } + APP.gains.delete(this.el); + APP.audios.delete(this.el); + APP.audioSources.delete(this.el); + APP.isAudioPaused.delete(this.el); + APP.gainMultipliers.delete(this.el); + APP.sourceType.delete(this.el); + APP.supplementaryAttenuation.delete(this.el); this.el.removeAttribute("audio-zone-source"); - this.el.removeEventListener("audio_type_changed", this.createAudio); }, createAudio: function () { - this.removeAudio(); - APP.sourceType.set(this.el, SourceType.AUDIO_TARGET); - const audioListener = this.el.sceneEl.audioListener; const { audioType } = getCurrentAudioSettings(this.el); - let audio = this.el.getObject3D(this.attrName); + let audio = APP.audios.get(this.el); if (audioType === AudioType.PannerNode) { - audio = new THREE.PositionalAudio(audioListener); + audio = APP.audioCtx.createPanner(); + updatePannerNode(audio, this.el.object3D); } else { - audio = new THREE.Audio(audioListener); + audio = APP.audioCtx.createStereoPanner(); } - this.audioSystem.addAudio({ sourceType: SourceType.AVATAR_AUDIO_SOURCE, node: audio }); + const gain = APP.audioCtx.createGain(); + gain.gain.value = 0; + audio.connect(gain); if (this.data.maxDelay > 0) { - const delayNode = audio.context.createDelay(this.data.maxDelay); - delayNode.delayTime.value = THREE.MathUtils.randFloat(this.data.minDelay, this.data.maxDelay); - audio.setFilters([delayNode]); + this.delayNode = APP.audioCtx.createDelay(this.data.maxDelay); + this.delayNode.delayTime.value = THREE.MathUtils.randFloat(this.data.minDelay, this.data.maxDelay); + gain.connect(this.delayNode); + this.audioSystem.addAudio({ sourceType: SourceType.AVATAR_AUDIO_SOURCE, node: this.delayNode }); + } else { + this.audioSystem.addAudio({ sourceType: SourceType.AVATAR_AUDIO_SOURCE, node: gain }); } - this.el.setObject3D(this.attrName, audio); - audio.matrixNeedsUpdate = true; - audio.updateMatrixWorld(); - - audio.updateMatrixWorld(); APP.audios.set(this.el, audio); + APP.gains.set(this.el, gain); updateAudioSettings(this.el, audio); }, @@ -365,20 +349,11 @@ AFRAME.registerComponent("audio-target", { const srcZone = srcEl && srcEl.components["zone-audio-source"]; const node = srcZone && srcZone.getGainFilter(); if (node) { - const audio = this.el.getObject3D(this.attrName); - if (audio) { - audio.setNodeSource(node); - } + const audio = APP.audios.get(this.el); + APP.audioSources.set(this.el, node); + node.connect(audio); } else { console.warn(`Failed to get audio from source for ${this.el.className}`, srcEl); } - }, - - removeAudio() { - const audio = this.el.getObject3D(this.attrName); - if (audio) { - this.audioSystem.removeAudio({ node: this.audio }); - this.el.removeObject3D(this.attrName); - } } }); diff --git a/src/components/body-helper.js b/src/components/body-helper.js index e7b17d63d7..94e521415a 100644 --- a/src/components/body-helper.js +++ b/src/components/body-helper.js @@ -26,7 +26,7 @@ AFRAME.registerComponent("body-helper", { default: ACTIVATION_STATE.ACTIVE_TAG, oneOf: ACTIVATION_STATES }, - type: { default: "dynamic", oneOf: [TYPE.STATIC, TYPE.DYNAMIC, TYPE.KINEMATIC] }, + type: { default: "static", oneOf: [TYPE.STATIC, TYPE.DYNAMIC, TYPE.KINEMATIC] }, emitCollisionEvents: { default: false }, disableCollision: { default: false }, collisionFilterGroup: { default: 1 }, //32-bit mask, diff --git a/src/components/ik-controller.js b/src/components/ik-controller.js index 3adf3a4d19..30872f1b4c 100644 --- a/src/components/ik-controller.js +++ b/src/components/ik-controller.js @@ -117,6 +117,8 @@ AFRAME.registerComponent("ik-controller", { this.isInView = true; this.hasConvergedHips = false; this.lastCameraTransform = new THREE.Matrix4(); + this.lastCameraWorldTransform = new THREE.Matrix4(); + this.transformUpdated = false; waitForDOMContentLoaded().then(() => { this.playerCamera = document.getElementById("viewing-camera").getObject3D("camera"); }); @@ -236,6 +238,11 @@ AFRAME.registerComponent("ik-controller", { if (this._hadFirstTick) { camera.object3D.updateMatrices(); + this.transformUpdated = !this.lastCameraWorldTransform.equals(camera.object3D.matrixWorld); + if (this.transformUpdated) { + this.lastCameraWorldTransform.copy(camera.object3D.matrixWorld); + } + avatar.updateMatrices(); // Note: Camera faces down -Z, avatar faces down +Z const yDelta = Math.PI - angleOnXZPlaneBetweenMatrixRotations(camera.object3D.matrixWorld, avatar.matrixWorld); diff --git a/src/components/media-video.js b/src/components/media-video.js index 04b3bf0ce5..17e90344b9 100644 --- a/src/components/media-video.js +++ b/src/components/media-video.js @@ -17,12 +17,16 @@ import semver from "semver"; import { createPlaneBufferGeometry } from "../utils/three-utils"; import HubsTextureLoader from "../loaders/HubsTextureLoader"; import { getCurrentAudioSettings, updateAudioSettings } from "../update-audio-settings"; -import { SourceType, AudioType } from "./audio-params"; +import { AudioType, SourceType } from "./audio-params"; import { errorTexture } from "../utils/error-texture"; import { scaleToAspectRatio } from "../utils/scale-to-aspect-ratio"; import { isSafari } from "../utils/detect-safari"; import { isIOS as detectIOS } from "../utils/is-mobile"; import { Layers } from "../camera-layers"; +import { updateAudio, updatePannerNode } from "../bit-systems/audio-emitter-system"; +import { hasComponent } from "bitecs"; +import { FloatyObject } from "../bit-components"; +import { BodyAtRest } from "../systems/floaty-object-system"; const ONCE_TRUE = { once: true }; const TYPE_IMG_PNG = { type: "image/png" }; @@ -156,30 +160,6 @@ AFRAME.registerComponent("media-video", { // Non-networked this.updatePlaybackState(); }); - - let { disableLeftRightPanning, audioPanningQuality } = APP.store.state.preferences; - this.onPreferenceChanged = () => { - const newDisableLeftRightPanning = APP.store.state.preferences.disableLeftRightPanning; - const newAudioPanningQuality = APP.store.state.preferences.audioPanningQuality; - - const shouldRecreateAudio = - disableLeftRightPanning !== newDisableLeftRightPanning && this.audio && this.mediaElementAudioSource; - const shouldUpdateAudioSettings = audioPanningQuality !== newAudioPanningQuality; - - disableLeftRightPanning = newDisableLeftRightPanning; - audioPanningQuality = newAudioPanningQuality; - - if (shouldRecreateAudio) { - this.setupAudio(); - } else if (shouldUpdateAudioSettings) { - // updateAudioSettings() is called in this.setupAudio() - // so no need to call it if shouldRecreateAudio is true. - updateAudioSettings(this.el, this.audio); - } - }; - - APP.store.addEventListener("statechanged", this.onPreferenceChanged); - this.el.addEventListener("audio_type_changed", this.setupAudio); }, play() { @@ -342,8 +322,6 @@ AFRAME.registerComponent("media-video", { }, setupAudio() { - this.removeAudio(); - APP.sourceType.set(this.el, SourceType.MEDIA_VIDEO); if (this.data.videoPaused) { @@ -353,28 +331,27 @@ AFRAME.registerComponent("media-video", { } const { audioType } = getCurrentAudioSettings(this.el); - const audioListener = this.el.sceneEl.audioListener; + let audio; if (audioType === AudioType.PannerNode) { - this.audio = new THREE.PositionalAudio(audioListener); + audio = APP.audioCtx.createPanner(); + updatePannerNode(audio, this.el.object3D); } else { - this.audio = new THREE.Audio(audioListener); + audio = APP.audioCtx.createStereoPanner(); } + const gain = APP.audioCtx.createGain(); + gain.gain.value = 0; + audio.connect(gain); // Default to being quiet so it fades in when volume is set by audio systems - this.audio.gain.gain.value = 0; - this.audioSystem.addAudio({ sourceType: SourceType.MEDIA_VIDEO, node: this.audio }); - - this.audio.setNodeSource(this.mediaElementAudioSource); - this.el.setObject3D("sound", this.audio); + this.audioSystem.addAudio({ sourceType: SourceType.MEDIA_VIDEO, node: gain }); - // Make sure that the audio is initialized to the right place. - // Its matrix may not update if this element is not visible. - // See https://github.com/mozilla/hubs/issues/2855 - this.audio.updateMatrixWorld(); + const mediaElementAudioSource = APP.audioSources.get(this.el); + mediaElementAudioSource.connect(audio); - APP.audios.set(this.el, this.audio); - updateAudioSettings(this.el, this.audio); + APP.audios.set(this.el, audio); + APP.gains.set(this.el, gain); + updateAudioSettings(this.el, audio); // Original audio source volume can now be restored as audio systems will take over - this.mediaElementAudioSource.mediaElement.volume = 1; + mediaElementAudioSource.mediaElement.volume = 1; }, async updateSrc(oldData) { @@ -407,14 +384,15 @@ AFRAME.registerComponent("media-video", { return; } - this.mediaElementAudioSource = null; + APP.audioSources.delete(this.el); if (!src.startsWith("hubs://")) { // iOS video audio is broken on ios safari < 13.1.2, see: https://github.com/mozilla/hubs/issues/1797 if (!isIOS || semver.satisfies(detect().version, ">=13.1.2")) { // TODO FF error here if binding mediastream: The captured HTMLMediaElement is playing a MediaStream. Applying volume or mute status is not currently supported -- not an issue since we have no audio atm in shared video. - this.mediaElementAudioSource = - linkedMediaElementAudioSource || - this.el.sceneEl.audioListener.context.createMediaElementSource(audioSourceEl); + APP.audioSources.set( + this.el, + linkedMediaElementAudioSource || APP.audioCtx.createMediaElementSource(audioSourceEl) + ); this.hasAudioTracks && this.setupAudio(); } @@ -746,6 +724,12 @@ AFRAME.registerComponent("media-video", { this.lastUpdate = now; } } + + const isFloaty = hasComponent(APP.world, FloatyObject, this.el.eid); + const isAtRest = hasComponent(APP.world, BodyAtRest, this.el.eid); + if (isFloaty && !isAtRest) { + updateAudio(this.el, this.el.object3D); + } }; })(), @@ -758,8 +742,6 @@ AFRAME.registerComponent("media-video", { remove() { this.cleanUp(); - APP.isAudioPaused.delete(this.el); - if (this.mesh) { this.el.removeObject3D("mesh"); } @@ -769,13 +751,16 @@ AFRAME.registerComponent("media-video", { this._audioSyncInterval = null; } - APP.gainMultipliers.delete(this.el); + const gain = APP.gains.get(this.el); + this.audioSystem.removeAudio({ node: gain }); + APP.gains.delete(this.el); APP.audios.delete(this.el); + APP.audioSources.delete(this.el); + APP.isAudioPaused.delete(this.el); + APP.gainMultipliers.delete(this.el); APP.sourceType.delete(this.el); APP.supplementaryAttenuation.delete(this.el); - this.removeAudio(); - if (this.networkedEl) { this.networkedEl.removeEventListener("pinned", this.updateHoverMenu); this.networkedEl.removeEventListener("unpinned", this.updateHoverMenu); @@ -797,16 +782,5 @@ AFRAME.registerComponent("media-video", { this.seekBackButton.object3D.removeEventListener("interact", this.seekBack); this.snapButton.object3D.removeEventListener("interact", this.snap); } - - window.APP.store.removeEventListener("statechanged", this.onPreferenceChanged); - this.el.addEventListener("audio_type_changed", this.setupAudio); - }, - - removeAudio() { - if (this.audio) { - this.el.removeObject3D("sound"); - this.audioSystem.removeAudio({ node: this.audio }); - delete this.audio; - } } }); diff --git a/src/components/transform-object-button.js b/src/components/transform-object-button.js index bdbcdd18fb..9ce8c4a29d 100644 --- a/src/components/transform-object-button.js +++ b/src/components/transform-object-button.js @@ -1,6 +1,8 @@ import { paths } from "../systems/userinput/paths"; import { waitForDOMContentLoaded } from "../utils/async-utils"; import { COLLISION_LAYERS } from "../constants"; +import { addComponent, removeComponent } from "bitecs"; +import { BodyAtRest } from "../systems/floaty-object-system"; const AMMO_BODY_ATTRIBUTES = { type: "kinematic", collisionFilterMask: COLLISION_LAYERS.HANDS }; export const TRANSFORM_MODE = { @@ -145,6 +147,7 @@ AFRAME.registerSystem("transform-selected-object", { pInv.invert(); this.target.quaternion.copy(pInv).multiply(q); this.target.matrixNeedsUpdate = true; + addComponent(APP.world, BodyAtRest, this.target.el.eid); } } }; @@ -241,6 +244,7 @@ AFRAME.registerSystem("transform-selected-object", { .premultiply(controllerOrientationDelta) .premultiply(controllerOrientationDelta); this.target.matrixNeedsUpdate = true; + removeComponent(APP.world, BodyAtRest, this.target.el.eid); }, cursorAxisOrScaleTick() { @@ -295,6 +299,7 @@ AFRAME.registerSystem("transform-selected-object", { q2.setFromAxisAngle(v, this.dxApplied); this.target.quaternion.premultiply(q).premultiply(q2); + removeComponent(APP.world, BodyAtRest, this.target.el.eid); } this.target.matrixNeedsUpdate = true; @@ -305,6 +310,7 @@ AFRAME.registerSystem("transform-selected-object", { this.target.quaternion.multiply(q.setFromAxisAngle(this.axis, -this.sign * this.dxApplied)); this.target.matrixNeedsUpdate = true; + removeComponent(APP.world, BodyAtRest, this.target.el.eid); } previousPointOnPlane.copy(currentPointOnPlane); @@ -323,6 +329,7 @@ AFRAME.registerSystem("transform-selected-object", { this.el.camera.getWorldPosition(CAMERA_WORLD_POSITION); this.target.lookAt(CAMERA_WORLD_POSITION); this.transforming = false; + removeComponent(APP.world, BodyAtRest, this.target.el.eid); return; } diff --git a/src/gltf-component-mappings.js b/src/gltf-component-mappings.js index b2c3875338..d7b2baf422 100644 --- a/src/gltf-component-mappings.js +++ b/src/gltf-component-mappings.js @@ -224,7 +224,7 @@ async function mediaInflator(el, componentName, componentData, components, fitTo // The way we are handling it is wrong. If a user created a scene with this old version // of the component, all of these parameters will be present whether the user explicitly set // the values for them or not. But really, they should only count as "overrides" if the user - // meant for them to take precendence over the app and scene defaults. + // meant for them to take precedence over the app and scene defaults. // TODO: Fix this issue. One option is to just ignore this component data, which might break old scenes // but simplifying the handling. Another option is to compare the component data here with // the "defaults" and only save the values that are different from the defaults. However, diff --git a/src/scene-entry-manager.js b/src/scene-entry-manager.js index fb171d3715..fa9e7a8ca9 100644 --- a/src/scene-entry-manager.js +++ b/src/scene-entry-manager.js @@ -544,7 +544,7 @@ export default class SceneEntryManager { if (isNaN(audioVolume)) { audioVolume = 1.0; } - const audioContext = THREE.AudioContext.getContext(); + const audioContext = APP.audioCtx; const audioSource = audioContext.createMediaStreamSource(audioStream); const audioDestination = audioContext.createMediaStreamDestination(); const gainNode = audioContext.createGain(); diff --git a/src/systems/audio-debug-system.js b/src/systems/audio-debug-system.js index 3e96d3b427..c267ed5894 100644 --- a/src/systems/audio-debug-system.js +++ b/src/systems/audio-debug-system.js @@ -3,6 +3,7 @@ import audioDebugFrag from "./audio-debug.frag"; import { DistanceModelType } from "../components/audio-params"; import { getWebGLVersion } from "../utils/webgl"; import { getMeshes } from "../utils/aframe-utils"; +import { getAudioOrientation, getAudioPosition, isPositionalAudio } from "../bit-systems/audio-emitter-system"; const fakePanner = { distanceModel: DistanceModelType.Inverse, @@ -117,12 +118,13 @@ AFRAME.registerSystem("audio-debug", { if (sourceNum >= this.maxDebugSources) continue; if (APP.isAudioPaused.has(el)) continue; - audio.getWorldPosition(sourcePos); - audio.getWorldDirection(sourceDir); - this.sourcePositions[sourceNum] = sourcePos; // TODO: Use Vector3 pool - this.sourceOrientations[sourceNum] = sourceDir; + const panner = isPositionalAudio(audio) ? audio : fakePanner; + const gain = APP.gains.get(el); - const panner = audio.panner || fakePanner; + getAudioPosition(el, sourcePos); + getAudioOrientation(el, sourceDir); + this.sourcePositions[sourceNum] = sourcePos.clone(); + this.sourceOrientations[sourceNum] = sourceDir.clone(); this.distanceModels[sourceNum] = 0; if (panner.distanceModel === DistanceModelType.Linear) { @@ -138,7 +140,7 @@ AFRAME.registerSystem("audio-debug", { this.coneInnerAngles[sourceNum] = panner.coneInnerAngle; this.coneOuterAngles[sourceNum] = panner.coneOuterAngle; - this.gains[sourceNum] = audio.gain.gain.value; + this.gains[sourceNum] = gain.gain.value; this.clipped[sourceNum] = APP.clippingState.has(el); sourceNum++; } diff --git a/src/systems/audio-gain-system.js b/src/systems/audio-gain-system.js index afb35324e8..06b74aedbe 100644 --- a/src/systems/audio-gain-system.js +++ b/src/systems/audio-gain-system.js @@ -1,8 +1,11 @@ +import { AudioListenerTag } from "../bit-components"; +import { getAudioPosition, isPositionalAudio } from "../bit-systems/audio-emitter-system"; import { getCurrentAudioSettings, shouldAddSupplementaryAttenuation, updateAudioSettings } from "../update-audio-settings"; +import { anyEntityWith } from "../utils/bit-utils"; const distanceModels = { linear: function (distance, rolloffFactor, refDistance, maxDistance) { @@ -20,15 +23,17 @@ const calculateAttenuation = (() => { const listenerPos = new THREE.Vector3(); const sourcePos = new THREE.Vector3(); return (el, audio) => { - APP.audioListener.getWorldPosition(listenerPos); - audio.getWorldPosition(sourcePos); + const listenerEid = anyEntityWith(APP.world, AudioListenerTag); + const listener = APP.world.eid2obj.get(listenerEid); + listener.getWorldPosition(listenerPos); + getAudioPosition(el, sourcePos); const distance = sourcePos.distanceTo(listenerPos); - if (audio.panner) { - return distanceModels[audio.panner.distanceModel]( + if (isPositionalAudio(audio)) { + return distanceModels[audio.distanceModel]( distance, - audio.panner.rolloffFactor, - audio.panner.refDistance, - audio.panner.maxDistance + audio.rolloffFactor, + audio.refDistance, + audio.maxDistance // TODO: Why are coneInnerAngle, coneOuterAngle and coneOuterGain not used? ); } else { diff --git a/src/systems/audio-system.js b/src/systems/audio-system.js index 0581d9adc0..17052d3ccd 100644 --- a/src/systems/audio-system.js +++ b/src/systems/audio-system.js @@ -1,5 +1,5 @@ import { LogMessageType } from "../react-components/room/ChatSidebar"; -import { GAIN_TIME_CONST, SourceType } from "../components/audio-params"; +import { AudioType, GAIN_TIME_CONST, SourceType } from "../components/audio-params"; let delayedReconnectTimeout = null; function performDelayedReconnect(gainNode) { @@ -19,15 +19,13 @@ function performDelayedReconnect(gainNode) { import * as sdpTransform from "sdp-transform"; import MediaDevicesManager from "../utils/media-devices-manager"; - -function isThreeAudio(node) { - return node instanceof THREE.Audio || node instanceof THREE.PositionalAudio; -} +import { isPositionalAudio, swapAudioType, updatePannerNode } from "../bit-systems/audio-emitter-system"; +import { getCurrentAudioSettings, updateAudioSettings } from "../update-audio-settings"; async function enableChromeAEC(gainNode) { /** * workaround for: https://bugs.chromium.org/p/chromium/issues/detail?id=687574 - * 1. grab the GainNode from the scene's THREE.AudioListener + * 1. grab the listener from the audio context * 2. disconnect the GainNode from the AudioDestinationNode (basically the audio out), this prevents hearing the audio twice. * 3. create a local webrtc connection between two RTCPeerConnections (see this example: https://webrtc.github.io/samples/src/content/peerconnection/pc1/) * 4. create a new MediaStreamDestination from the scene's THREE.AudioContext and connect the GainNode to it. @@ -40,7 +38,7 @@ async function enableChromeAEC(gainNode) { audioEl.setAttribute("autoplay", "autoplay"); audioEl.setAttribute("playsinline", "playsinline"); - const context = THREE.AudioContext.getContext(); + const context = APP.audioCtx; const loopbackDestination = context.createMediaStreamDestination(); const outboundPeerConnection = new RTCPeerConnection(); const inboundPeerConnection = new RTCPeerConnection(); @@ -124,9 +122,11 @@ export class AudioSystem { constructor(sceneEl) { this._sceneEl = sceneEl; - this.audioContext = THREE.AudioContext.getContext(); + this.audioContext = APP.audioCtx; this.audioNodes = new Map(); this.mediaStreamDestinationNode = this.audioContext.createMediaStreamDestination(); // Voice, camera, screenshare + this.destinationGain = APP.audioCtx.createGain(); + this.destinationGain.connect(APP.audioCtx.destination); this.audioDestination = this.audioContext.createMediaStreamDestination(); // Media elements this.outboundStream = this.mediaStreamDestinationNode.stream; this.outboundGainNode = this.audioContext.createGain(); @@ -144,9 +144,9 @@ export class AudioSystem { [SourceType.AUDIO_TARGET]: this.mediaGain, [SourceType.SFX]: this.audioContext.createGain() }; - this.mixer[SourceType.AVATAR_AUDIO_SOURCE].connect(this._sceneEl.audioListener.getInput()); - this.mixer[SourceType.MEDIA_VIDEO].connect(this._sceneEl.audioListener.getInput()); - this.mixer[SourceType.SFX].connect(this._sceneEl.audioListener.getInput()); + this.mixer[SourceType.AVATAR_AUDIO_SOURCE].connect(this.destinationGain); + this.mixer[SourceType.MEDIA_VIDEO].connect(this.destinationGain); + this.mixer[SourceType.SFX].connect(this.destinationGain); // Analyser to show the output audio level this.mixerAnalyser = this.audioContext.createAnalyser(); @@ -161,8 +161,11 @@ export class AudioSystem { document.body.addEventListener("touchend", this._resumeAudioContext, false); document.body.addEventListener("mouseup", this._resumeAudioContext, false); + const { disableLeftRightPanning, audioPanningQuality } = APP.store.state.preferences; + this.disableLeftRightPanning = disableLeftRightPanning; + this.audioPanningQuality = audioPanningQuality; this.onPrefsUpdated = this.updatePrefs.bind(this); - window.APP.store.addEventListener("statechanged", this.onPrefsUpdated); + APP.store.addEventListener("statechanged", this.onPrefsUpdated); } addStreamToOutboundAudio(id, mediaStream) { @@ -186,25 +189,21 @@ export class AudioSystem { } } - addAudio({ sourceType, node }) { - let outputNode = node; - if (isThreeAudio(node)) { - node.gain.disconnect(); - outputNode = node.gain; - } - outputNode.connect(this.mixer[sourceType]); + getListenerInput() { + return this.destinationGain; + } + + addAudio({ node, sourceType }) { + node.disconnect(); + node.connect(this.mixer[sourceType]); } removeAudio({ node }) { - let outputNode = node; - if (isThreeAudio(node)) { - outputNode = node.gain; - } - outputNode.disconnect(); + node.disconnect(); } updatePrefs() { - const { globalVoiceVolume, globalMediaVolume, globalSFXVolume } = window.APP.store.state.preferences; + const { globalVoiceVolume, globalMediaVolume, globalSFXVolume } = APP.store.state.preferences; let newGain = globalMediaVolume / 100; this.mixer[SourceType.MEDIA_VIDEO].gain.setTargetAtTime(newGain, this.audioContext.currentTime, GAIN_TIME_CONST); @@ -221,41 +220,71 @@ export class AudioSystem { if (MediaDevicesManager.isAudioOutputSelectEnabled && APP.mediaDevicesManager) { const sinkId = APP.mediaDevicesManager.selectedSpeakersDeviceId; const isDefault = sinkId === APP.mediaDevicesManager.defaultOutputDeviceId; - if ((!this.outputMediaAudio && isDefault) || sinkId === this.outputMediaAudio?.sinkId) return; - const sink = isDefault ? this._sceneEl.audioListener.getInput() : this.audioDestination; - this.mixer[SourceType.AVATAR_AUDIO_SOURCE].disconnect(); - this.mixer[SourceType.AVATAR_AUDIO_SOURCE].connect(sink); - this.mixer[SourceType.AVATAR_AUDIO_SOURCE].connect(this.mixerAnalyser); - this.mixer[SourceType.MEDIA_VIDEO].disconnect(); - this.mixer[SourceType.MEDIA_VIDEO].connect(sink); - this.mixer[SourceType.MEDIA_VIDEO].connect(this.mixerAnalyser); - this.mixer[SourceType.SFX].disconnect(); - this.mixer[SourceType.SFX].connect(sink); - this.mixer[SourceType.SFX].connect(this.mixerAnalyser); - if (isDefault) { - if (this.outputMediaAudio) { - this.outputMediaAudio.pause(); - this.outputMediaAudio.srcObject = null; - this.outputMediaAudio = null; - } - } else { - // Swithing the audio sync is only supported in Chrome at the time of writing this. - // It also seems to have some limitations and it only works on audio elements. We are piping all our media through the Audio Context - // and that doesn't seem to work. - // To workaround that we need to use a MediaStreamAudioDestinationNode that is set as the source of the audio element where we switch the sink. - // This is very hacky but there don't seem to have any better alternatives at the time of writing this. - // https://stackoverflow.com/a/67043782 - if (!this.outputMediaAudio) { - this.outputMediaAudio = new Audio(); - this.outputMediaAudio.srcObject = this.audioDestination.stream; - } - if (this.outputMediaAudio.sinkId !== sinkId) { - this.outputMediaAudio.setSinkId(sinkId).then(() => { - this.outputMediaAudio.play(); - }); + if ((this.outputMediaAudio || !isDefault) && sinkId !== this.outputMediaAudio?.sinkId) { + const sink = isDefault ? this.destinationGain : this.audioDestination; + this.mixer[SourceType.AVATAR_AUDIO_SOURCE].disconnect(); + this.mixer[SourceType.AVATAR_AUDIO_SOURCE].connect(sink); + this.mixer[SourceType.AVATAR_AUDIO_SOURCE].connect(this.mixerAnalyser); + this.mixer[SourceType.MEDIA_VIDEO].disconnect(); + this.mixer[SourceType.MEDIA_VIDEO].connect(sink); + this.mixer[SourceType.MEDIA_VIDEO].connect(this.mixerAnalyser); + this.mixer[SourceType.SFX].disconnect(); + this.mixer[SourceType.SFX].connect(sink); + this.mixer[SourceType.SFX].connect(this.mixerAnalyser); + if (isDefault) { + if (this.outputMediaAudio) { + this.outputMediaAudio.pause(); + this.outputMediaAudio.srcObject = null; + this.outputMediaAudio = null; + } + } else { + // Switching the audio sync is only supported in Chrome at the time of writing this. + // It also seems to have some limitations and it only works on audio elements. We are piping all our media through the Audio Context + // and that doesn't seem to work. + // To workaround that we need to use a MediaStreamAudioDestinationNode that is set as the source of the audio element where we switch the sink. + // This is very hacky but there don't seem to have any better alternatives at the time of writing this. + // https://stackoverflow.com/a/67043782 + if (!this.outputMediaAudio) { + this.outputMediaAudio = new Audio(); + this.outputMediaAudio.srcObject = this.audioDestination.stream; + } + if (this.outputMediaAudio.sinkId !== sinkId) { + this.outputMediaAudio.setSinkId(sinkId).then(() => { + this.outputMediaAudio.play(); + }); + } } } } + + const newDisableLeftRightPanning = APP.store.state.preferences.disableLeftRightPanning; + const newAudioPanningQuality = APP.store.state.preferences.audioPanningQuality; + + const shouldRecreateAudio = this.disableLeftRightPanning !== newDisableLeftRightPanning; + const shouldUpdateAudioSettings = this.audioPanningQuality !== newAudioPanningQuality; + + this.disableLeftRightPanning = newDisableLeftRightPanning; + this.audioPanningQuality = newAudioPanningQuality; + + APP.audios.forEach((audio, elOrEid) => { + if (shouldRecreateAudio) { + const { audioType } = getCurrentAudioSettings(elOrEid); + if ( + (!isPositionalAudio(audio) && audioType === AudioType.PannerNode) || + (isPositionalAudio(audio) && audioType === AudioType.Stereo) + ) { + swapAudioType(elOrEid); + audio = APP.audios.get(elOrEid); + if (isPositionalAudio(audio)) { + const obj = APP.world.eid2obj.get(elOrEid.isEntity ? elOrEid.eid : elOrEid); + updatePannerNode(audio, obj); + } + } + } else if (shouldUpdateAudioSettings) { + const audio = APP.audios.get(elOrEid); + updateAudioSettings(elOrEid, audio); + } + }); } /** @@ -269,7 +298,7 @@ export class AudioSystem { if (this.audioContext.state === "running") { const disableAEC = window.APP.store.state.preferences.disableEchoCancellation; if (!AFRAME.utils.device.isMobile() && /chrome/i.test(navigator.userAgent) && !disableAEC) { - enableChromeAEC(this._sceneEl.audioListener.gain); + enableChromeAEC(this.destinationGain); } document.body.removeEventListener("touchend", this._resumeAudioContext, false); diff --git a/src/systems/audio-zones-system.js b/src/systems/audio-zones-system.js index ee5c4becc4..78ac641268 100644 --- a/src/systems/audio-zones-system.js +++ b/src/systems/audio-zones-system.js @@ -1,3 +1,6 @@ +import { AudioListenerTag } from "../bit-components"; +import { anyEntityWith } from "../utils/bit-utils"; + // We apply the most restrictive audio parameters function paramsReducer(acc, curr) { if (!curr && !acc) return {}; @@ -154,15 +157,18 @@ export class AudioZonesSystem { tick = (function () { const listenerPosition = new THREE.Vector3(); return function (scene) { - if (!scene.is("entered")) return; + if (!scene.is("entered") || this.zones.length === 0 || this.sources.length === 0) return; + + const listenerEid = anyEntityWith(APP.world, AudioListenerTag); + const listener = APP.world.eid2obj.get(listenerEid); + listener.getWorldPosition(listenerPosition); if (!this.didRegisterAudioListener) { this.didRegisterAudioListener = true; - this.registerEntity(scene.audioListener); + this.registerEntity(listener); } - const currListenerZones = this.currZones.get(scene.audioListener); - scene.audioListener.getWorldPosition(listenerPosition); + const currListenerZones = this.currZones.get(listener); this.zones.forEach(zone => { addOrRemoveZone(currListenerZones, zone, listenerPosition); this.sources.forEach(source => { @@ -170,7 +176,7 @@ export class AudioZonesSystem { }); }); - const isListenerUpdated = isUpdated(currListenerZones, this.prevZones.get(scene.audioListener)); + const isListenerUpdated = isUpdated(currListenerZones, this.prevZones.get(listener)); this.sources .filter(source => { return isListenerUpdated || isUpdated(this.currZones.get(source), this.prevZones.get(source)); diff --git a/src/systems/camera-system.js b/src/systems/camera-system.js index ee6cdbeee7..ac473f5fef 100644 --- a/src/systems/camera-system.js +++ b/src/systems/camera-system.js @@ -8,6 +8,9 @@ import { qsGet } from "../utils/qs_truthy"; const customFOV = qsGet("fov"); const enableThirdPersonMode = qsTruthy("thirdPerson"); import { Layers } from "../camera-layers"; +import { anyEntityWith } from "../utils/bit-utils"; +import { addComponent, removeComponent } from "bitecs"; +import { AudioListenerTag } from "../bit-components"; function getInspectableInHierarchy(el) { let inspectable = el; @@ -286,12 +289,14 @@ export class CameraSystem { if (this.snapshot.audio) { this.snapshot.audio.updateMatrices(); this.snapshot.audioTransform.copy(this.snapshot.audio.matrixWorld); - scene.audioListener.updateMatrices(); - this.audioSourceTargetTransform.makeTranslation(0, 0, -0.25).premultiply(scene.audioListener.matrixWorld); + const listenerEid = anyEntityWith(APP.world, AudioListenerTag); + const listener = APP.world.eid2obj.get(listenerEid); + listener.updateMatrices(); + this.audioSourceTargetTransform.makeTranslation(0, 0, -0.25).premultiply(listener.matrixWorld); setMatrixWorld(this.snapshot.audio, this.audioSourceTargetTransform); } - this.ensureListenerIsParentedCorrectly(scene); + this.ensureListenerIsParentedCorrectly(); moveRigSoCameraLooksAtPivot( this.viewingRig.object3D, @@ -352,17 +357,21 @@ export class CameraSystem { AFRAME.scenes[0].emit("inspect-lights-changed"); } - ensureListenerIsParentedCorrectly(scene) { - if (scene.audioListener && this.avatarPOV) { - if (this.mode === CAMERA_MODE_INSPECT && scene.audioListener.parent !== this.avatarPOV.object3D) { - this.avatarPOV.object3D.add(scene.audioListener); + ensureListenerIsParentedCorrectly() { + const listenerEid = anyEntityWith(APP.world, AudioListenerTag); + const listener = APP.world.eid2obj.get(listenerEid); + if (listener && this.avatarPOV) { + if (this.mode === CAMERA_MODE_INSPECT && this.avatarPOV.object3D !== listener) { + removeComponent(APP.world, AudioListenerTag, listenerEid); + addComponent(APP.world, AudioListenerTag, this.avatarPOV.eid); } else if ( (this.mode === CAMERA_MODE_FIRST_PERSON || this.mode === CAMERA_MODE_THIRD_PERSON_NEAR || this.mode === CAMERA_MODE_THIRD_PERSON_FAR) && - scene.audioListener.parent !== this.viewingCamera + this.viewingCamera !== listener ) { - this.viewingCamera.add(scene.audioListener); + removeComponent(APP.world, AudioListenerTag, listenerEid); + addComponent(APP.world, AudioListenerTag, this.viewingCamera.eid); } } } @@ -438,7 +447,7 @@ export class CameraSystem { this.nextMode(); } - this.ensureListenerIsParentedCorrectly(scene); + this.ensureListenerIsParentedCorrectly(); if (this.mode === CAMERA_MODE_FIRST_PERSON) { this.viewingCameraRotator.on = false; diff --git a/src/systems/capture-system.js b/src/systems/capture-system.js index cd6fe2a9a6..3ee26b4e75 100644 --- a/src/systems/capture-system.js +++ b/src/systems/capture-system.js @@ -39,11 +39,11 @@ AFRAME.registerSystem("capture-system", { }, _tryAddingAudioTrack() { - if (this._gotAudioTrack || !this.el.audioListener) return; + if (this._gotAudioTrack) return; - const listener = this.el.audioListener; - const destination = listener.context.createMediaStreamDestination(); - listener.getInput().connect(destination); + const destination = APP.audioCtx.createMediaStreamDestination(); + const audioSystem = APP.scene.systems["hubs-systems"].audioSystem; + audioSystem.getListenerInput().connect(destination); const audio = destination.stream.getAudioTracks()[0]; this._stream.addTrack(audio); diff --git a/src/systems/floaty-object-system.js b/src/systems/floaty-object-system.js index 11f02c7a1e..681bb9471b 100644 --- a/src/systems/floaty-object-system.js +++ b/src/systems/floaty-object-system.js @@ -20,6 +20,7 @@ import { } from "../bit-components"; export const MakeStaticWhenAtRest = defineComponent(); +export const BodyAtRest = defineComponent(); const makeStaticAtRestQuery = defineQuery([FloatyObject, Rigidbody, Not(Constraint), MakeStaticWhenAtRest]); function makeStaticAtRest(world) { @@ -44,6 +45,7 @@ function makeStaticAtRest(world) { }); physicsSystem.updateRigidBody(eid, bodyData.options); removeComponent(world, MakeStaticWhenAtRest, eid); + addComponent(world, BodyAtRest, eid); } }); } @@ -54,6 +56,7 @@ function makeKinematicOnRelease(world) { makeKinematicOnReleaseExitQuery(world).forEach(eid => { if (!entityExists(world, eid) || !hasComponent(world, Owned, eid)) return; physicsSystem.updateRigidBodyOptions(eid, { type: "kinematic" }); + addComponent(world, BodyAtRest, eid); }); } @@ -76,6 +79,7 @@ export const floatyObjectSystem = world => { type: "kinematic", gravity: { x: 0, y: 0, z: 0 } }); + addComponent(world, BodyAtRest, eid); }); enterHeldFloatyObjectsQuery(world).forEach(eid => { @@ -84,6 +88,7 @@ export const floatyObjectSystem = world => { type: "dynamic", collisionFilterMask: COLLISION_LAYERS.HANDS | COLLISION_LAYERS.MEDIA_FRAMES }); + removeComponent(world, BodyAtRest, eid); }); exitedHeldFloatyObjectsQuery(world).forEach(eid => { @@ -119,6 +124,7 @@ export const floatyObjectSystem = world => { collisionFilterMask: COLLISION_LAYERS.DEFAULT_INTERACTABLE, gravity: { x: 0, y: -9.8, z: 0 } }); + removeComponent(world, BodyAtRest, eid); } }); diff --git a/src/systems/hubs-systems.ts b/src/systems/hubs-systems.ts index 5ab5904a67..8f5ab0d589 100644 --- a/src/systems/hubs-systems.ts +++ b/src/systems/hubs-systems.ts @@ -78,6 +78,7 @@ import { scenePreviewCameraSystem } from "../bit-systems/scene-preview-camera-sy import { linearTransformSystem } from "../bit-systems/linear-transform"; import { mixerAnimatableSystem } from "../bit-systems/mixer-animatable"; import { loopAnimationSystem } from "../bit-systems/loop-animation"; +import { audioListenerSystem } from "../bit-systems/audio-listener-system"; declare global { interface Window { @@ -255,7 +256,8 @@ export function mainTick(xrFrame: XRFrame, renderer: WebGLRenderer, scene: Scene mediaFramesSystem(world, hubsSystems.physicsSystem); hubsSystems.audioZonesSystem.tick(hubsSystems.el); audioZoneSystem(world); - audioEmitterSystem(world, hubsSystems.audioSystem); + audioEmitterSystem(world); + audioListenerSystem(world); audioTargetSystem(world, hubsSystems.audioSystem); hubsSystems.gainSystem.tick(); hubsSystems.nameTagSystem.tick(); diff --git a/src/systems/physics-system.js b/src/systems/physics-system.js index a18ca8f724..4c8065fc11 100644 --- a/src/systems/physics-system.js +++ b/src/systems/physics-system.js @@ -159,6 +159,7 @@ export class PhysicsSystem { console.error("Physics body exists but object3D has no parent."); continue; } + if (type === TYPE.DYNAMIC) { matrix.fromArray( this.objectMatricesFloatArray, @@ -171,7 +172,10 @@ export class PhysicsSystem { object3D.matrixNeedsUpdate = true; } - object3D.updateMatrices(); + if (type !== TYPE.STATIC) { + object3D.updateMatrices(); + } + this.objectMatricesFloatArray.set( object3D.matrixWorld.elements, index * BUFFER_CONFIG.BODY_DATA_SIZE + BUFFER_CONFIG.MATRIX_OFFSET diff --git a/src/systems/remove-object3D-system.js b/src/systems/remove-object3D-system.js index a163834c5f..57a73d3d13 100644 --- a/src/systems/remove-object3D-system.js +++ b/src/systems/remove-object3D-system.js @@ -1,6 +1,5 @@ import { defineQuery, exitQuery, hasComponent, removeEntity } from "bitecs"; import { - AudioEmitter, EnvironmentSettings, GLTFModel, LightTag, @@ -22,7 +21,6 @@ import { gltfCache } from "../components/gltf-model-plus"; import { releaseTextureByKey } from "../utils/load-texture"; import { disposeMaterial, traverseSome, disposeNode } from "../utils/three-utils"; import { forEachMaterial } from "../utils/material-utils"; -import { cleanupAudio } from "../bit-systems/audio-emitter-system"; import { cleanupAudioDebugNavMesh } from "../bit-systems/audio-debug-system"; import { cleanupMediaFrame } from "./bit-media-frames"; @@ -53,7 +51,6 @@ const cleanupGLTFs = cleanupObjOnExit(GLTFModel, obj => { const cleanupLights = cleanupObjOnExit(LightTag, obj => obj.dispose()); const cleanupTexts = cleanupObjOnExit(TextTag, obj => obj.dispose()); const cleanupMediaFrames = cleanupObjOnExit(MediaFrame, cleanupMediaFrame); -const cleanupAudioEmitters = cleanupObjOnExit(AudioEmitter, cleanupAudio); const cleanupImages = cleanupObjOnExit(MediaImage, obj => { releaseTextureByKey(APP.getString(MediaImage.cacheKey[obj.eid])); obj.geometry.dispose(); @@ -134,7 +131,6 @@ export function removeObject3DSystem(world) { cleanupImages(world); cleanupVideos(world); cleanupEnvironmentSettings(world); - cleanupAudioEmitters(world); cleanupSkyboxes(world); cleanupSimpleWaters(world); cleanupMirrors(world); diff --git a/src/systems/sound-effects-system.js b/src/systems/sound-effects-system.js index 906594955d..234a021769 100644 --- a/src/systems/sound-effects-system.js +++ b/src/systems/sound-effects-system.js @@ -15,9 +15,9 @@ import URL_MEDIA_LOADED from "../assets/sfx/A_bendUp.mp3"; import URL_MEDIA_LOADING from "../assets/sfx/suspense.mp3"; import URL_SPAWN_EMOJI from "../assets/sfx/emoji.mp3"; import URL_SPEAKER_TONE from "../assets/sfx/tone.mp3"; -import { setMatrixWorld } from "../utils/three-utils"; import { SourceType } from "../components/audio-params"; import { getOverriddenPanningModelType } from "../update-audio-settings"; +import { isPositionalAudio, updatePannerNode } from "../bit-systems/audio-emitter-system"; let soundEnum = 0; export const SOUND_HOVER_OR_GRAB = soundEnum++; @@ -57,11 +57,11 @@ function decodeAudioData(audioContext, arrayBuffer) { export class SoundEffectsSystem { constructor(scene) { this.pendingAudioSourceNodes = []; - this.pendingPositionalAudios = []; this.positionalAudiosStationary = []; this.positionalAudiosFollowingObject3Ds = []; + this.positionalAudiosSources = new Map(); - this.audioContext = THREE.AudioContext.getContext(); + this.audioContext = APP.audioCtx; this.scene = scene; const soundsAndUrls = [ @@ -110,9 +110,9 @@ export class SoundEffectsSystem { }); }); - this.isDisabled = window.APP.store.state.preferences.disableSoundEffects; - window.APP.store.addEventListener("statechanged", () => { - const shouldBeDisabled = window.APP.store.state.preferences.disableSoundEffects; + this.isDisabled = APP.store.state.preferences.disableSoundEffects; + APP.store.addEventListener("statechanged", () => { + const shouldBeDisabled = APP.store.state.preferences.disableSoundEffects; if (shouldBeDisabled && !this.isDisabled) { this.stopAllPositionalAudios(); // TODO: Technically we should stop any other sounds that have been started, @@ -141,38 +141,50 @@ export class SoundEffectsSystem { const audioBuffer = this.sounds.get(sound); if (!audioBuffer) return null; - const disablePositionalAudio = window.APP.store.state.preferences.disableLeftRightPanning; - const positionalAudio = disablePositionalAudio - ? new THREE.Audio(this.scene.audioListener) - : new THREE.PositionalAudio(this.scene.audioListener); - positionalAudio.setBuffer(audioBuffer); - positionalAudio.loop = loop; + const disablePositionalAudio = APP.store.state.preferences.disableLeftRightPanning; + let positionalAudio; + if (disablePositionalAudio) { + positionalAudio = APP.audioCtx.createStereoPanner(); + } else { + positionalAudio = APP.audioCtx.createPanner(); + } + const source = APP.audioCtx.createBufferSource(); + source.buffer = audioBuffer; + source.loop = loop; + source.connect(positionalAudio); if (!disablePositionalAudio) { const overriddenPanningModelType = getOverriddenPanningModelType(); if (overriddenPanningModelType !== null) { - positionalAudio.panner.panningModel = overriddenPanningModelType; + positionalAudio.panningModel = overriddenPanningModelType; } } - this.pendingPositionalAudios.push(positionalAudio); + this.pendingAudioSourceNodes.push(source); this.scene.systems["hubs-systems"].audioSystem.addAudio({ sourceType: SourceType.SFX, node: positionalAudio }); - return positionalAudio; + return { positionalAudio, source }; } playPositionalSoundAt(sound, position, loop) { - const positionalAudio = this.enqueuePositionalSound(sound, loop); + const { positionalAudio, source } = this.enqueuePositionalSound(sound, loop); if (!positionalAudio) return null; - positionalAudio.position.copy(position); - positionalAudio.matrixWorldNeedsUpdate = true; + if (positionalAudio instanceof PannerNode) { + positionalAudio.positionX.value = position.X; + positionalAudio.positionY.value = position.Y; + positionalAudio.positionZ.value = position.Z; + } + source.addEventListener("ended", () => this.stopPositionalAudio(positionalAudio)); this.positionalAudiosStationary.push(positionalAudio); + this.positionalAudiosSources.set(positionalAudio, source); } playPositionalSoundFollowing(sound, object3D, loop) { - const positionalAudio = this.enqueuePositionalSound(sound, loop); + const { positionalAudio, source } = this.enqueuePositionalSound(sound, loop); if (!positionalAudio) return null; + source.addEventListener("ended", () => this.stopPositionalAudio(positionalAudio)); this.positionalAudiosFollowingObject3Ds.push({ positionalAudio, object3D }); + this.positionalAudiosSources.set(positionalAudio, source); return positionalAudio; } @@ -203,24 +215,28 @@ export class SoundEffectsSystem { const index = this.pendingAudioSourceNodes.indexOf(node); if (index !== -1) { this.pendingAudioSourceNodes.splice(index, 1); - } else { - node.stop(); - this.scene.systems["hubs-systems"].audioSystem.removeAudio({ node }); } + node.stop(); + this.scene.systems["hubs-systems"].audioSystem.removeAudio({ node }); } stopPositionalAudio(inPositionalAudio) { - const pendingIndex = this.pendingPositionalAudios.indexOf(inPositionalAudio); - if (pendingIndex !== -1) { - this.pendingPositionalAudios.splice(pendingIndex, 1); - } else { - if (inPositionalAudio.isPlaying) { - inPositionalAudio.stop(); - } - if (inPositionalAudio.parent) { - inPositionalAudio.removeFromParent(); - } + const source = this.positionalAudiosSources.get(inPositionalAudio); + if (source) { + source.stop(); + source.disconnect(); } + let index = this.positionalAudiosStationary.indexOf(inPositionalAudio); + if (index !== -1) { + this.positionalAudiosStationary.splice(index, 1); + this.positionalAudiosStationary.delete(inPositionalAudio); + } + index = this.positionalAudiosFollowingObject3Ds.indexOf(inPositionalAudio); + if (index !== -1) { + this.positionalAudiosFollowingObject3Ds.splice(index, 1); + this.positionalAudiosFollowingObject3Ds.delete(inPositionalAudio); + } + this.positionalAudiosSources.delete(inPositionalAudio); this.positionalAudiosStationary = this.positionalAudiosStationary.filter( positionalAudio => positionalAudio !== inPositionalAudio ); @@ -249,33 +265,17 @@ export class SoundEffectsSystem { } for (let i = 0; i < this.pendingAudioSourceNodes.length; i++) { - this.pendingAudioSourceNodes[i].start(); + const source = this.pendingAudioSourceNodes[i]; + source.start(); } this.pendingAudioSourceNodes.length = 0; - for (let i = 0; i < this.pendingPositionalAudios.length; i++) { - const pendingPositionalAudio = this.pendingPositionalAudios[i]; - this.scene.object3D.add(pendingPositionalAudio); - pendingPositionalAudio.play(); - } - this.pendingPositionalAudios.length = 0; - - for (let i = this.positionalAudiosStationary.length - 1; i >= 0; i--) { - const positionalAudio = this.positionalAudiosStationary[i]; - if (!positionalAudio.isPlaying) { - this.stopPositionalAudio(positionalAudio); - } - } - for (let i = this.positionalAudiosFollowingObject3Ds.length - 1; i >= 0; i--) { const positionalAudioAndObject3D = this.positionalAudiosFollowingObject3Ds[i]; const positionalAudio = positionalAudioAndObject3D.positionalAudio; const object3D = positionalAudioAndObject3D.object3D; - if (!positionalAudio.isPlaying || !object3D.parent) { - this.stopPositionalAudio(positionalAudio); - } else { - object3D.updateMatrices(); - setMatrixWorld(positionalAudio, object3D.matrixWorld); + if (isPositionalAudio(positionalAudio)) { + updatePannerNode(positionalAudio, object3D); } } } diff --git a/src/update-audio-settings.js b/src/update-audio-settings.js index 508d84f3e2..ee63cbb3cd 100644 --- a/src/update-audio-settings.js +++ b/src/update-audio-settings.js @@ -1,13 +1,14 @@ import { addComponent } from "bitecs"; import { AudioSettingsChanged } from "./bit-components"; import { - AudioType, SourceType, PanningModelType, MediaAudioDefaults, AvatarAudioDefaults, - TargetAudioDefaults + TargetAudioDefaults, + AudioType } from "./components/audio-params"; +import { isPositionalAudio, swapAudioType, updatePannerNode } from "./bit-systems/audio-emitter-system"; const defaultSettingsForSourceType = Object.freeze( new Map([ @@ -17,18 +18,20 @@ const defaultSettingsForSourceType = Object.freeze( ]) ); -export function applySettings(audio, settings) { - if (audio.panner) { - audio.setDistanceModel(settings.distanceModel); - audio.setRolloffFactor(settings.rolloffFactor); - audio.setRefDistance(settings.refDistance); - audio.setMaxDistance(settings.maxDistance); - audio.panner.panningModel = settings.panningModel; - audio.panner.coneInnerAngle = settings.coneInnerAngle; - audio.panner.coneOuterAngle = settings.coneOuterAngle; - audio.panner.coneOuterGain = settings.coneOuterGain; +export function applySettings(elOrEid, settings) { + const audio = APP.audios.get(elOrEid); + if (isPositionalAudio(audio)) { + audio.distanceModel = settings.distanceModel; + audio.rolloffFactor = settings.rolloffFactor; + audio.refDistance = settings.refDistance; + audio.maxDistance = settings.maxDistance; + audio.panningModel = settings.panningModel; + audio.coneInnerAngle = settings.coneInnerAngle; + audio.coneOuterAngle = settings.coneOuterAngle; + audio.coneOuterGain = settings.coneOuterGain; } - audio.gain.gain.setTargetAtTime(settings.gain, audio.context.currentTime, 0.1); + const gain = APP.gains.get(elOrEid); + gain.gain.setTargetAtTime(settings.gain, audio.context.currentTime, 0.1); } export function getOverriddenPanningModelType() { @@ -97,7 +100,7 @@ export function getCurrentAudioSettingsForSourceType(sourceType) { } // Follow these rules and you'll have a good time: -// - If a THREE.Audio or THREE.PositionalAudio is created, call this function. +// - If a PannerNode or Stereo audio is created, call this function. // - If audio settings change, call this function. export function updateAudioSettings(elOrEid, audio) { if (!elOrEid.isEntity) { @@ -107,19 +110,24 @@ export function updateAudioSettings(elOrEid, audio) { const el = elOrEid; const settings = getCurrentAudioSettings(el); if ( - (audio.panner === undefined && settings.audioType === AudioType.PannerNode) || - (audio.panner !== undefined && settings.audioType === AudioType.Stereo) + (!isPositionalAudio(audio) && settings.audioType === AudioType.PannerNode) || + (isPositionalAudio(audio) && settings.audioType === AudioType.Stereo) ) { - el.emit("audio_type_changed"); + swapAudioType(elOrEid); + audio = APP.audios.get(elOrEid); + if (isPositionalAudio(audio)) { + const obj = APP.world.eid2obj.get(elOrEid.isEntity ? elOrEid.eid : elOrEid); + updatePannerNode(audio, obj); + } } - applySettings(audio, settings); + applySettings(elOrEid, settings); } } export function shouldAddSupplementaryAttenuation(el, audio) { // Never add supplemental attenuation to audios that have a panner node; // The panner node adds its own attenuation. - if (audio.panner) return false; + if (isPositionalAudio(audio)) return false; // This function must distinguish between Audios that are "incidentally" // not PositionalAudios from Audios that are "purposefully" not PositionalAudios: diff --git a/src/utils/audio-normalizer.js b/src/utils/audio-normalizer.js index 73497938fe..4e52ed2747 100644 --- a/src/utils/audio-normalizer.js +++ b/src/utils/audio-normalizer.js @@ -2,17 +2,21 @@ // Analyses audio source volume and adjusts gain value // to make it in a certain range. export class AudioNormalizer { - constructor(audio) { - this.audio = audio; - this.analyser = audio.context.createAnalyser(); - this.connected = false; + constructor(elOrEid) { + this.audio = APP.audios.get(elOrEid); + this.audioGain = APP.gains.get(elOrEid); + + this.analyser = APP.audioCtx.createAnalyser(); // To analyse volume, 32 fftsize may be good enough this.analyser.fftSize = 32; - this.gain = audio.context.createGain(); + this.gain = APP.audioCtx.createGain(); this.timeData = new Uint8Array(this.analyser.frequencyBinCount); this.volumes = []; this.volumeSum = 0; + + this.analyser.connect(this.gain); + this.connected = false; } apply() { @@ -59,31 +63,20 @@ export class AudioNormalizer { } connect() { - // Hacks. THREE.Audio connects audio nodes when source is set. - // If audio is not played yet, THREE.Audio.setFilters() doesn't - // reset connections. Then manually caling .connect()/disconnect() here. - // This might be a bug of Three.js and should be fixed in Three.js side? - if (this.audio.source && !this.audio.isPlaying) { + if (!this.connected) { this.audio.disconnect(); + this.audio.connect(this.analyser); + this.gain.connect(this.audioGain); + this.connected = true; } - const filters = this.audio.getFilters(); - filters.unshift(this.analyser, this.gain); - this.audio.setFilters(filters); - if (this.audio.source && !this.audio.isPlaying) { - this.audio.connect(); - } - this.connected = true; } disconnect() { - if (this.audio.source && !this.audio.isPlaying) { + if (this.connected) { + this.gain.disconnect(); this.audio.disconnect(); + this.audio.connect(this.audioGain); + this.connected = false; } - const filters = [this.analyser, this.gain]; - this.audio.setFilters(this.audio.getFilters().filter(filter => !filters.includes(filter))); - if (this.audio.source && !this.audio.isPlaying) { - this.audio.connect(); - } - this.connected = false; } } diff --git a/src/utils/render-target-recorder.js b/src/utils/render-target-recorder.js index 754c8e27a3..3b79b31d85 100644 --- a/src/utils/render-target-recorder.js +++ b/src/utils/render-target-recorder.js @@ -23,7 +23,7 @@ function blitFramebuffer(renderer, src, srcX0, srcY0, srcX1, srcY1, dest, dstX0, } } const createBlankAudioTrack = () => { - const context = THREE.AudioContext.getContext(); + const context = APP.audioCtx; const oscillator = context.createOscillator(); const gain = context.createGain(); const destination = context.createMediaStreamDestination(); diff --git a/types/three.d.ts b/types/three.d.ts index 39c6fd7838..9885927cc4 100644 --- a/types/three.d.ts +++ b/types/three.d.ts @@ -4,6 +4,7 @@ import { Object3D, Mesh, WebGLRenderer, Scene, Camera } from "three"; declare module "three" { interface Object3D { matrixNeedsUpdate: boolean; + matrixIsModified: boolean; childrenNeedMatrixWorldUpdate: boolean; eid?: number; el?: AElement;