WebAudio: Stub AudioDestinationContext

And expose it through `BaseAudioContext::destination`
This commit is contained in:
bbb651 2024-07-27 16:38:09 +03:00 committed by Andreas Kling
parent 9c17398429
commit 779e3072f9
Notes: github-actions[bot] 2024-07-28 19:42:11 +00:00
8 changed files with 34 additions and 19 deletions

View file

@ -10,6 +10,8 @@ source_set("WebAudio") {
"AudioBufferSourceNode.h", "AudioBufferSourceNode.h",
"AudioContext.cpp", "AudioContext.cpp",
"AudioContext.h", "AudioContext.h",
"AudioDestinationNode.cpp",
"AudioDestinationNode.h",
"AudioNode.cpp", "AudioNode.cpp",
"AudioNode.h", "AudioNode.h",
"AudioParam.cpp", "AudioParam.cpp",

View file

@ -321,6 +321,7 @@ standard_idl_files = [
"//Userland/Libraries/LibWeb/WebAudio/AudioBuffer.idl", "//Userland/Libraries/LibWeb/WebAudio/AudioBuffer.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioBufferSourceNode.idl", "//Userland/Libraries/LibWeb/WebAudio/AudioBufferSourceNode.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioContext.idl", "//Userland/Libraries/LibWeb/WebAudio/AudioContext.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioDestinationNode.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioNode.idl", "//Userland/Libraries/LibWeb/WebAudio/AudioNode.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioParam.idl", "//Userland/Libraries/LibWeb/WebAudio/AudioParam.idl",
"//Userland/Libraries/LibWeb/WebAudio/AudioScheduledSourceNode.idl", "//Userland/Libraries/LibWeb/WebAudio/AudioScheduledSourceNode.idl",

View file

@ -718,6 +718,7 @@ class AudioBuffer;
class AudioBufferSourceNode; class AudioBufferSourceNode;
class AudioDestinationNode; class AudioDestinationNode;
class AudioContext; class AudioContext;
class AudioDestinationNode;
class AudioNode; class AudioNode;
class AudioParam; class AudioParam;
class AudioScheduledSourceNode; class AudioScheduledSourceNode;

View file

@ -1,13 +1,15 @@
/* /*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org> * Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
* Copyright (c) 2024, Bar Yemini <bar.ye651@gmail.com>
* *
* SPDX-License-Identifier: BSD-2-Clause * SPDX-License-Identifier: BSD-2-Clause
*/ */
#include <LibWeb/Bindings/AudioDestinationNodePrototype.h> #include <LibWeb/Bindings/AudioDestinationNodePrototype.h>
#include <LibWeb/Bindings/Intrinsics.h> #include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/HTML/EventNames.h>
#include <LibWeb/WebAudio/AudioDestinationNode.h> #include <LibWeb/WebAudio/AudioDestinationNode.h>
#include <LibWeb/WebAudio/AudioNode.h>
#include <LibWeb/WebAudio/BaseAudioContext.h>
namespace Web::WebAudio { namespace Web::WebAudio {
@ -20,6 +22,18 @@ AudioDestinationNode::AudioDestinationNode(JS::Realm& realm, JS::NonnullGCPtr<Ba
AudioDestinationNode::~AudioDestinationNode() = default; AudioDestinationNode::~AudioDestinationNode() = default;
// https://webaudio.github.io/web-audio-api/#dom-audiodestinationnode-maxchannelcount
WebIDL::UnsignedLong AudioDestinationNode::max_channel_count()
{
dbgln("FIXME: Implement Audio::DestinationNode::max_channel_count()");
return 2;
}
JS::NonnullGCPtr<AudioDestinationNode> AudioDestinationNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context)
{
return realm.heap().allocate<AudioDestinationNode>(realm, realm, context);
}
void AudioDestinationNode::initialize(JS::Realm& realm) void AudioDestinationNode::initialize(JS::Realm& realm)
{ {
Base::initialize(realm); Base::initialize(realm);

View file

@ -1,12 +1,16 @@
/* /*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org> * Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
* Copyright (c) 2024, Bar Yemini <bar.ye651@gmail.com>
* *
* SPDX-License-Identifier: BSD-2-Clause * SPDX-License-Identifier: BSD-2-Clause
*/ */
#pragma once #pragma once
#include <LibWeb/Bindings/AudioDestinationNodePrototype.h>
#include <LibWeb/WebAudio/AudioNode.h> #include <LibWeb/WebAudio/AudioNode.h>
#include <LibWeb/WebAudio/BaseAudioContext.h>
#include <LibWeb/WebIDL/Types.h>
namespace Web::WebAudio { namespace Web::WebAudio {
@ -18,6 +22,10 @@ class AudioDestinationNode : public AudioNode {
public: public:
virtual ~AudioDestinationNode() override; virtual ~AudioDestinationNode() override;
WebIDL::UnsignedLong max_channel_count();
static JS::NonnullGCPtr<AudioDestinationNode> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>);
protected: protected:
AudioDestinationNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>); AudioDestinationNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>);

View file

@ -3,5 +3,5 @@
// https://webaudio.github.io/web-audio-api/#AudioDestinationNode // https://webaudio.github.io/web-audio-api/#AudioDestinationNode
[Exposed=Window] [Exposed=Window]
interface AudioDestinationNode : AudioNode { interface AudioDestinationNode : AudioNode {
[FIXME] readonly attribute unsigned long maxChannelCount; readonly attribute unsigned long maxChannelCount;
}; };

View file

@ -21,6 +21,7 @@ namespace Web::WebAudio {
BaseAudioContext::BaseAudioContext(JS::Realm& realm, float sample_rate) BaseAudioContext::BaseAudioContext(JS::Realm& realm, float sample_rate)
: DOM::EventTarget(realm) : DOM::EventTarget(realm)
, m_destination(AudioDestinationNode::construct_impl(realm, *this))
, m_sample_rate(sample_rate) , m_sample_rate(sample_rate)
{ {
} }
@ -39,18 +40,6 @@ void BaseAudioContext::visit_edges(Cell::Visitor& visitor)
visitor.visit(m_destination); visitor.visit(m_destination);
} }
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-destination
JS::NonnullGCPtr<AudioDestinationNode> BaseAudioContext::destination()
{
auto& realm = this->realm();
dbgln("FIXME: Properly implement BaseAudioContext::destination");
if (!m_destination)
m_destination = realm.heap().allocate<AudioDestinationNode>(realm, realm, *this);
return *m_destination;
}
void BaseAudioContext::set_onstatechange(WebIDL::CallbackType* event_handler) void BaseAudioContext::set_onstatechange(WebIDL::CallbackType* event_handler)
{ {
set_event_handler_attribute(HTML::EventNames::statechange, event_handler); set_event_handler_attribute(HTML::EventNames::statechange, event_handler);

View file

@ -9,6 +9,7 @@
#include <LibWeb/Bindings/BaseAudioContextPrototype.h> #include <LibWeb/Bindings/BaseAudioContextPrototype.h>
#include <LibWeb/DOM/EventTarget.h> #include <LibWeb/DOM/EventTarget.h>
#include <LibWeb/WebAudio/AudioDestinationNode.h>
#include <LibWeb/WebAudio/BiquadFilterNode.h> #include <LibWeb/WebAudio/BiquadFilterNode.h>
#include <LibWeb/WebIDL/Types.h> #include <LibWeb/WebIDL/Types.h>
@ -32,6 +33,7 @@ public:
static constexpr float MIN_SAMPLE_RATE { 8000 }; static constexpr float MIN_SAMPLE_RATE { 8000 };
static constexpr float MAX_SAMPLE_RATE { 192000 }; static constexpr float MAX_SAMPLE_RATE { 192000 };
JS::NonnullGCPtr<AudioDestinationNode> destination() const { return m_destination; }
float sample_rate() const { return m_sample_rate; } float sample_rate() const { return m_sample_rate; }
double current_time() const { return m_current_time; } double current_time() const { return m_current_time; }
Bindings::AudioContextState state() const { return m_control_thread_state; } Bindings::AudioContextState state() const { return m_control_thread_state; }
@ -55,14 +57,13 @@ public:
WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> create_dynamics_compressor(); WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> create_dynamics_compressor();
JS::NonnullGCPtr<GainNode> create_gain(); JS::NonnullGCPtr<GainNode> create_gain();
JS::NonnullGCPtr<AudioDestinationNode> destination();
protected: protected:
explicit BaseAudioContext(JS::Realm&, float m_sample_rate = 0); explicit BaseAudioContext(JS::Realm&, float m_sample_rate = 0);
virtual void initialize(JS::Realm&) override; JS::NonnullGCPtr<AudioDestinationNode> m_destination;
virtual void visit_edges(Cell::Visitor& visitor) override; virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private: private:
float m_sample_rate { 0 }; float m_sample_rate { 0 };
@ -70,7 +71,6 @@ private:
Bindings::AudioContextState m_control_thread_state = Bindings::AudioContextState::Suspended; Bindings::AudioContextState m_control_thread_state = Bindings::AudioContextState::Suspended;
Bindings::AudioContextState m_rendering_thread_state = Bindings::AudioContextState::Suspended; Bindings::AudioContextState m_rendering_thread_state = Bindings::AudioContextState::Suspended;
JS::GCPtr<AudioDestinationNode> m_destination;
}; };
} }