/src/serenity/Userland/Libraries/LibWeb/WebAudio/AudioBufferSourceNode.cpp
Line | Count | Source |
1 | | /* |
2 | | * Copyright (c) 2024, Bar Yemini <bar.ye651@gmail.com> |
3 | | * |
4 | | * SPDX-License-Identifier: BSD-2-Clause |
5 | | */ |
6 | | |
7 | | #include <LibWeb/Bindings/AudioScheduledSourceNodePrototype.h> |
8 | | #include <LibWeb/Bindings/Intrinsics.h> |
9 | | #include <LibWeb/WebAudio/AudioBuffer.h> |
10 | | #include <LibWeb/WebAudio/AudioBufferSourceNode.h> |
11 | | #include <LibWeb/WebAudio/AudioParam.h> |
12 | | #include <LibWeb/WebAudio/AudioScheduledSourceNode.h> |
13 | | |
14 | | namespace Web::WebAudio { |
15 | | |
16 | | JS_DEFINE_ALLOCATOR(AudioBufferSourceNode); |
17 | | |
18 | | AudioBufferSourceNode::AudioBufferSourceNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, AudioBufferSourceOptions const& options) |
19 | 0 | : AudioScheduledSourceNode(realm, context) |
20 | 0 | , m_buffer(options.buffer) |
21 | 0 | , m_playback_rate(AudioParam::create(realm, options.playback_rate, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate)) |
22 | 0 | , m_detune(AudioParam::create(realm, options.detune, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate)) |
23 | 0 | , m_loop(options.loop) |
24 | 0 | , m_loop_start(options.loop_start) |
25 | 0 | , m_loop_end(options.loop_end) |
26 | 0 | { |
27 | 0 | } |
28 | | |
29 | 0 | AudioBufferSourceNode::~AudioBufferSourceNode() = default; |
30 | | |
31 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-buffer |
32 | | WebIDL::ExceptionOr<void> AudioBufferSourceNode::set_buffer(JS::GCPtr<AudioBuffer> buffer) |
33 | 0 | { |
34 | 0 | m_buffer = buffer; |
35 | 0 | return {}; |
36 | 0 | } |
37 | | |
38 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-buffer |
39 | | JS::GCPtr<AudioBuffer> AudioBufferSourceNode::buffer() const |
40 | 0 | { |
41 | 0 | return m_buffer; |
42 | 0 | } |
43 | | |
44 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-playbackrate |
45 | | JS::NonnullGCPtr<AudioParam> AudioBufferSourceNode::playback_rate() const |
46 | 0 | { |
47 | 0 | return m_playback_rate; |
48 | 0 | } |
49 | | |
50 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-detune |
51 | | JS::NonnullGCPtr<AudioParam> AudioBufferSourceNode::detune() const |
52 | 0 | { |
53 | 0 | return m_detune; |
54 | 0 | } |
55 | | |
56 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loop |
57 | | WebIDL::ExceptionOr<void> AudioBufferSourceNode::set_loop(bool loop) |
58 | 0 | { |
59 | 0 | m_loop = loop; |
60 | 0 | return {}; |
61 | 0 | } |
62 | | |
63 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loop |
64 | | bool AudioBufferSourceNode::loop() const |
65 | 0 | { |
66 | 0 | return m_loop; |
67 | 0 | } |
68 | | |
69 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopstart |
70 | | WebIDL::ExceptionOr<void> AudioBufferSourceNode::set_loop_start(double loop_start) |
71 | 0 | { |
72 | 0 | m_loop_start = loop_start; |
73 | 0 | return {}; |
74 | 0 | } |
75 | | |
76 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopstart |
77 | | double AudioBufferSourceNode::loop_start() const |
78 | 0 | { |
79 | 0 | return m_loop_start; |
80 | 0 | } |
81 | | |
82 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopend |
83 | | WebIDL::ExceptionOr<void> AudioBufferSourceNode::set_loop_end(double loop_end) |
84 | 0 | { |
85 | 0 | m_loop_end = loop_end; |
86 | 0 | return {}; |
87 | 0 | } |
88 | | |
89 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopend |
90 | | double AudioBufferSourceNode::loop_end() const |
91 | 0 | { |
92 | 0 | return m_loop_end; |
93 | 0 | } |
94 | | |
95 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-start` |
96 | | WebIDL::ExceptionOr<void> AudioBufferSourceNode::start(Optional<double> when, Optional<double> offset, Optional<double> duration) |
97 | 0 | { |
98 | 0 | (void)when; |
99 | 0 | (void)offset; |
100 | 0 | (void)duration; |
101 | 0 | dbgln("FIXME: Implement AudioBufferSourceNode::start(when, offset, duration)"); |
102 | 0 | return {}; |
103 | 0 | } |
104 | | |
105 | | WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> AudioBufferSourceNode::create(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, AudioBufferSourceOptions const& options) |
106 | 0 | { |
107 | 0 | return construct_impl(realm, context, options); |
108 | 0 | } |
109 | | |
110 | | // https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-audiobuffersourcenode |
111 | | WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> AudioBufferSourceNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, AudioBufferSourceOptions const& options) |
112 | 0 | { |
113 | | // When the constructor is called with a BaseAudioContext c and an option object option, the user agent |
114 | | // MUST initialize the AudioNode this, with context and options as arguments. |
115 | |
|
116 | 0 | auto node = realm.vm().heap().allocate<AudioBufferSourceNode>(realm, realm, context, options); |
117 | 0 | return node; |
118 | 0 | } |
119 | | |
120 | | void AudioBufferSourceNode::initialize(JS::Realm& realm) |
121 | 0 | { |
122 | 0 | Base::initialize(realm); |
123 | 0 | WEB_SET_PROTOTYPE_FOR_INTERFACE(AudioBufferSourceNode); |
124 | 0 | } |
125 | | |
126 | | void AudioBufferSourceNode::visit_edges(Cell::Visitor& visitor) |
127 | 0 | { |
128 | 0 | Base::visit_edges(visitor); |
129 | 0 | visitor.visit(m_buffer); |
130 | 0 | visitor.visit(m_playback_rate); |
131 | 0 | visitor.visit(m_detune); |
132 | 0 | } |
133 | | |
134 | | } |