LibWeb: Implement AudioListener

This exposes BaseAudioContext.listener, which is a container for
parameters related to the configuration of an actual "listener" in 3D
space.
This commit is contained in:
Jelle Raaijmakers 2024-10-17 21:26:01 +02:00
parent eeee6ba3f5
commit 2a98f2a12d
Notes: github-actions[bot] 2024-10-18 07:40:03 +00:00
15 changed files with 228 additions and 1 deletions

View file

@ -35,6 +35,7 @@ static bool is_platform_object(Type const& type)
"AnimationTimeline"sv,
"Attr"sv,
"AudioBuffer"sv,
"AudioListener"sv,
"AudioNode"sv,
"AudioParam"sv,
"AudioScheduledSourceNode"sv,

View file

@ -12,6 +12,8 @@ source_set("WebAudio") {
"AudioContext.h",
"AudioDestinationNode.cpp",
"AudioDestinationNode.h",
"AudioListener.cpp",
"AudioListener.h",
"AudioNode.cpp",
"AudioNode.h",
"AudioParam.cpp",

View file

@ -351,6 +351,7 @@ standard_idl_files = [
"//Userland/Libraries/LibWeb/WebAudio/AudioBufferSourceNode.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioContext.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioDestinationNode.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioListener.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioNode.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioParam.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioScheduledSourceNode.idl",

View file

@ -0,0 +1,18 @@
[object AudioParam] current: 0, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 0, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 0, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 0, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 0, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: -1, default: -1, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 0, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 1, default: 1, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 0, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: -1, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 2.5, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: -3, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 4, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: -5, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 6, default: -1, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 7, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: -8, default: 1, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate
[object AudioParam] current: 9, default: 0, min: -3.4028234663852886e+38, max: 3.4028234663852886e+38, rate: a-rate

View file

@ -15,6 +15,7 @@ AudioBuffer
AudioBufferSourceNode
AudioContext
AudioDestinationNode
AudioListener
AudioNode
AudioParam
AudioScheduledSourceNode

View file

@ -0,0 +1,29 @@
<script src="../include.js"></script>
<script>
function dumpAudioParam(param) {
println(`${param} current: ${param.value}, default: ${param.defaultValue}, min: ${param.minValue}, max: ${param.maxValue}, rate: ${param.automationRate}`);
}
function dumpListenerParams(listener) {
dumpAudioParam(listener.positionX);
dumpAudioParam(listener.positionY);
dumpAudioParam(listener.positionZ);
dumpAudioParam(listener.forwardX);
dumpAudioParam(listener.forwardY);
dumpAudioParam(listener.forwardZ);
dumpAudioParam(listener.upX);
dumpAudioParam(listener.upY);
dumpAudioParam(listener.upZ);
}
test(() => {
const audioContext = new OfflineAudioContext(1, 5000, 44100);
dumpListenerParams(audioContext.listener);
// Modify params
audioContext.listener.setPosition(-1, 2.5, -3);
audioContext.listener.setOrientation(4, -5, 6, 7, -8, 9);
dumpListenerParams(audioContext.listener);
});
</script>

View file

@ -725,6 +725,7 @@ set(SOURCES
WebAudio/AudioBufferSourceNode.cpp
WebAudio/AudioContext.cpp
WebAudio/AudioDestinationNode.cpp
WebAudio/AudioListener.cpp
WebAudio/AudioNode.cpp
WebAudio/AudioParam.cpp
WebAudio/AudioScheduledSourceNode.cpp

View file

@ -768,6 +768,7 @@ class AudioBuffer;
class AudioBufferSourceNode;
class AudioContext;
class AudioDestinationNode;
class AudioListener;
class AudioNode;
class AudioParam;
class AudioScheduledSourceNode;

View file

@ -0,0 +1,94 @@
/*
* Copyright (c) 2024, Jelle Raaijmakers <jelle@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibJS/Heap/CellAllocator.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/WebAudio/AudioListener.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioListener);
AudioListener::AudioListener(JS::Realm& realm)
: Bindings::PlatformObject(realm)
, m_forward_x(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_forward_y(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_forward_z(AudioParam::create(realm, -1.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_position_x(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_position_y(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_position_z(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_up_x(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_up_y(AudioParam::create(realm, 1.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_up_z(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
{
}
JS::NonnullGCPtr<AudioListener> AudioListener::create(JS::Realm& realm)
{
return realm.vm().heap().allocate<AudioListener>(realm, realm);
}
AudioListener::~AudioListener() = default;
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-setposition
WebIDL::ExceptionOr<void> AudioListener::set_position(float x, float y, float z)
{
// This method is DEPRECATED. It is equivalent to setting positionX.value, positionY.value, and
// positionZ.value directly with the given x, y, and z values, respectively.
// FIXME: Consequently, any of the positionX, positionY, and positionZ AudioParams for this
// AudioListener have an automation curve set using setValueCurveAtTime() at the time this
// method is called, a NotSupportedError MUST be thrown.
m_position_x->set_value(x);
m_position_y->set_value(y);
m_position_z->set_value(z);
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-setorientation
WebIDL::ExceptionOr<void> AudioListener::set_orientation(float x, float y, float z, float x_up, float y_up, float z_up)
{
// This method is DEPRECATED. It is equivalent to setting forwardX.value, forwardY.value,
// forwardZ.value, upX.value, upY.value, and upZ.value directly with the given x, y, z, xUp,
// yUp, and zUp values, respectively.
// FIXME: Consequently, if any of the forwardX, forwardY, forwardZ, upX, upY and upZ
// AudioParams have an automation curve set using setValueCurveAtTime() at the time this
// method is called, a NotSupportedError MUST be thrown.
m_forward_x->set_value(x);
m_forward_y->set_value(y);
m_forward_z->set_value(z);
m_up_x->set_value(x_up);
m_up_y->set_value(y_up);
m_up_z->set_value(z_up);
return {};
}
void AudioListener::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(AudioListener);
}
void AudioListener::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
visitor.visit(m_forward_x);
visitor.visit(m_forward_y);
visitor.visit(m_forward_z);
visitor.visit(m_position_x);
visitor.visit(m_position_y);
visitor.visit(m_position_z);
visitor.visit(m_up_x);
visitor.visit(m_up_y);
visitor.visit(m_up_z);
}
}

View file

@ -0,0 +1,56 @@
/*
* Copyright (c) 2024, Jelle Raaijmakers <jelle@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibJS/Forward.h>
#include <LibWeb/Bindings/AudioListenerPrototype.h>
#include <LibWeb/Bindings/PlatformObject.h>
#include <LibWeb/WebAudio/AudioParam.h>
#include <LibWeb/WebIDL/ExceptionOr.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioListener
class AudioListener final : public Bindings::PlatformObject {
WEB_PLATFORM_OBJECT(AudioListener, Bindings::PlatformObject);
JS_DECLARE_ALLOCATOR(AudioListener);
public:
static JS::NonnullGCPtr<AudioListener> create(JS::Realm&);
virtual ~AudioListener() override;
JS::NonnullGCPtr<AudioParam> forward_x() const { return m_forward_x; }
JS::NonnullGCPtr<AudioParam> forward_y() const { return m_forward_y; }
JS::NonnullGCPtr<AudioParam> forward_z() const { return m_forward_z; }
JS::NonnullGCPtr<AudioParam> position_x() const { return m_position_x; }
JS::NonnullGCPtr<AudioParam> position_y() const { return m_position_y; }
JS::NonnullGCPtr<AudioParam> position_z() const { return m_position_z; }
JS::NonnullGCPtr<AudioParam> up_x() const { return m_up_x; }
JS::NonnullGCPtr<AudioParam> up_y() const { return m_up_y; }
JS::NonnullGCPtr<AudioParam> up_z() const { return m_up_z; }
WebIDL::ExceptionOr<void> set_position(float x, float y, float z);
WebIDL::ExceptionOr<void> set_orientation(float x, float y, float z, float x_up, float y_up, float z_up);
private:
explicit AudioListener(JS::Realm&);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
JS::NonnullGCPtr<AudioParam> m_forward_x;
JS::NonnullGCPtr<AudioParam> m_forward_y;
JS::NonnullGCPtr<AudioParam> m_forward_z;
JS::NonnullGCPtr<AudioParam> m_position_x;
JS::NonnullGCPtr<AudioParam> m_position_y;
JS::NonnullGCPtr<AudioParam> m_position_z;
JS::NonnullGCPtr<AudioParam> m_up_x;
JS::NonnullGCPtr<AudioParam> m_up_y;
JS::NonnullGCPtr<AudioParam> m_up_z;
};
}

View file

@ -0,0 +1,15 @@
// https://webaudio.github.io/web-audio-api/#AudioListener
[Exposed=Window]
interface AudioListener {
readonly attribute AudioParam positionX;
readonly attribute AudioParam positionY;
readonly attribute AudioParam positionZ;
readonly attribute AudioParam forwardX;
readonly attribute AudioParam forwardY;
readonly attribute AudioParam forwardZ;
readonly attribute AudioParam upX;
readonly attribute AudioParam upY;
readonly attribute AudioParam upZ;
undefined setPosition (float x, float y, float z);
undefined setOrientation (float x, float y, float z, float xUp, float yUp, float zUp);
};

View file

@ -28,6 +28,7 @@ BaseAudioContext::BaseAudioContext(JS::Realm& realm, float sample_rate)
: DOM::EventTarget(realm)
, m_destination(AudioDestinationNode::construct_impl(realm, *this))
, m_sample_rate(sample_rate)
, m_listener(AudioListener::create(realm))
{
}
@ -44,6 +45,7 @@ void BaseAudioContext::visit_edges(Cell::Visitor& visitor)
Base::visit_edges(visitor);
visitor.visit(m_destination);
visitor.visit(m_pending_promises);
visitor.visit(m_listener);
}
void BaseAudioContext::set_onstatechange(WebIDL::CallbackType* event_handler)

View file

@ -10,6 +10,7 @@
#include <LibWeb/Bindings/BaseAudioContextPrototype.h>
#include <LibWeb/DOM/EventTarget.h>
#include <LibWeb/WebAudio/AudioListener.h>
#include <LibWeb/WebAudio/BiquadFilterNode.h>
#include <LibWeb/WebIDL/Types.h>
@ -38,6 +39,7 @@ public:
JS::NonnullGCPtr<AudioDestinationNode> destination() const { return m_destination; }
float sample_rate() const { return m_sample_rate; }
double current_time() const { return m_current_time; }
JS::NonnullGCPtr<AudioListener> listener() const { return m_listener; }
Bindings::AudioContextState state() const { return m_control_thread_state; }
// https://webaudio.github.io/web-audio-api/#--nyquist-frequency
@ -78,6 +80,8 @@ private:
float m_sample_rate { 0 };
double m_current_time { 0 };
JS::NonnullGCPtr<AudioListener> m_listener;
Bindings::AudioContextState m_control_thread_state = Bindings::AudioContextState::Suspended;
Bindings::AudioContextState m_rendering_thread_state = Bindings::AudioContextState::Suspended;

View file

@ -3,6 +3,7 @@
#import <WebAudio/AudioBuffer.idl>
#import <WebAudio/AudioBufferSourceNode.idl>
#import <WebAudio/AudioDestinationNode.idl>
#import <WebAudio/AudioListener.idl>
#import <WebAudio/DynamicsCompressorNode.idl>
#import <WebAudio/GainNode.idl>
#import <WebAudio/OscillatorNode.idl>
@ -21,7 +22,7 @@ interface BaseAudioContext : EventTarget {
readonly attribute AudioDestinationNode destination;
readonly attribute float sampleRate;
readonly attribute double currentTime;
[FIXME] readonly attribute AudioListener listener;
readonly attribute AudioListener listener;
readonly attribute AudioContextState state;
// FIXME: [SameObject, SecureContext]
[FIXME] readonly attribute AudioWorklet audioWorklet;

View file

@ -337,6 +337,7 @@ libweb_js_bindings(WebAudio/AudioBuffer)
libweb_js_bindings(WebAudio/AudioBufferSourceNode)
libweb_js_bindings(WebAudio/AudioContext)
libweb_js_bindings(WebAudio/AudioDestinationNode)
libweb_js_bindings(WebAudio/AudioListener)
libweb_js_bindings(WebAudio/AudioNode)
libweb_js_bindings(WebAudio/AudioParam)
libweb_js_bindings(WebAudio/AudioScheduledSourceNode)