/src/mozilla-central/dom/media/webaudio/AudioParam.h
Line | Count | Source (jump to first uncovered line) |
1 | | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
2 | | /* vim:set ts=2 sw=2 sts=2 et cindent: */ |
3 | | /* This Source Code Form is subject to the terms of the Mozilla Public |
4 | | * License, v. 2.0. If a copy of the MPL was not distributed with this |
5 | | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
6 | | |
7 | | #ifndef AudioParam_h_ |
8 | | #define AudioParam_h_ |
9 | | |
10 | | #include "AudioParamTimeline.h" |
11 | | #include "nsWrapperCache.h" |
12 | | #include "nsCycleCollectionParticipant.h" |
13 | | #include "AudioNode.h" |
14 | | #include "mozilla/dom/TypedArray.h" |
15 | | #include "WebAudioUtils.h" |
16 | | #include "js/TypeDecls.h" |
17 | | |
18 | | namespace mozilla { |
19 | | |
20 | | namespace dom { |
21 | | |
22 | | class AudioParam final : public nsWrapperCache, |
23 | | public AudioParamTimeline |
24 | | { |
25 | | virtual ~AudioParam(); |
26 | | |
27 | | public: |
28 | | AudioParam(AudioNode* aNode, |
29 | | uint32_t aIndex, |
30 | | const char* aName, |
31 | | float aDefaultValue, |
32 | | float aMinValue = std::numeric_limits<float>::lowest(), |
33 | | float aMaxValue = std::numeric_limits<float>::max()); |
34 | | |
35 | | NS_IMETHOD_(MozExternalRefCountType) AddRef(void); |
36 | | NS_IMETHOD_(MozExternalRefCountType) Release(void); |
37 | | NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(AudioParam) |
38 | | |
39 | | AudioContext* GetParentObject() const |
40 | | { |
41 | | return mNode->Context(); |
42 | | } |
43 | | |
44 | | JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override; |
45 | | |
46 | | // We override SetValueCurveAtTime to convert the Float32Array to the wrapper |
47 | | // object. |
48 | | AudioParam* SetValueCurveAtTime(const nsTArray<float>& aValues, |
49 | | double aStartTime, |
50 | | double aDuration, |
51 | | ErrorResult& aRv) |
52 | | { |
53 | | if (!WebAudioUtils::IsTimeValid(aStartTime)) { |
54 | | aRv.ThrowRangeError< |
55 | | MSG_INVALID_AUDIOPARAM_METHOD_START_TIME_ERROR>(); |
56 | | return this; |
57 | | } |
58 | | aStartTime = std::max(aStartTime, GetParentObject()->CurrentTime()); |
59 | | EventInsertionHelper(aRv, AudioTimelineEvent::SetValueCurve, |
60 | | aStartTime, 0.0f, 0.0f, aDuration, aValues.Elements(), |
61 | | aValues.Length()); |
62 | | |
63 | | return this; |
64 | | } |
65 | | |
66 | | void SetValue(float aValue) |
67 | | { |
68 | | AudioTimelineEvent event(AudioTimelineEvent::SetValue, 0.0f, aValue); |
69 | | |
70 | | ErrorResult rv; |
71 | | if (!ValidateEvent(event, rv)) { |
72 | | MOZ_ASSERT(false, "This should not happen, " |
73 | | "setting the value should always work"); |
74 | | return; |
75 | | } |
76 | | |
77 | | AudioParamTimeline::SetValue(aValue); |
78 | | |
79 | | SendEventToEngine(event); |
80 | | } |
81 | | |
82 | | AudioParam* SetValueAtTime(float aValue, double aStartTime, ErrorResult& aRv) |
83 | | { |
84 | | if (!WebAudioUtils::IsTimeValid(aStartTime)) { |
85 | | aRv.ThrowRangeError< |
86 | | MSG_INVALID_AUDIOPARAM_METHOD_START_TIME_ERROR>(); |
87 | | return this; |
88 | | } |
89 | | aStartTime = std::max(aStartTime, GetParentObject()->CurrentTime()); |
90 | | EventInsertionHelper(aRv, AudioTimelineEvent::SetValueAtTime, |
91 | | aStartTime, aValue); |
92 | | |
93 | | return this; |
94 | | } |
95 | | |
96 | | AudioParam* LinearRampToValueAtTime(float aValue, double aEndTime, |
97 | | ErrorResult& aRv) |
98 | | { |
99 | | if (!WebAudioUtils::IsTimeValid(aEndTime)) { |
100 | | aRv.ThrowRangeError< |
101 | | MSG_INVALID_AUDIOPARAM_METHOD_END_TIME_ERROR>(); |
102 | | return this; |
103 | | } |
104 | | aEndTime = std::max(aEndTime, GetParentObject()->CurrentTime()); |
105 | | EventInsertionHelper(aRv, AudioTimelineEvent::LinearRamp, aEndTime, aValue); |
106 | | return this; |
107 | | } |
108 | | |
109 | | AudioParam* ExponentialRampToValueAtTime(float aValue, double aEndTime, |
110 | | ErrorResult& aRv) |
111 | | { |
112 | | if (!WebAudioUtils::IsTimeValid(aEndTime)) { |
113 | | aRv.ThrowRangeError< |
114 | | MSG_INVALID_AUDIOPARAM_METHOD_END_TIME_ERROR>(); |
115 | | return this; |
116 | | } |
117 | | aEndTime = std::max(aEndTime, GetParentObject()->CurrentTime()); |
118 | | EventInsertionHelper(aRv, AudioTimelineEvent::ExponentialRamp, |
119 | | aEndTime, aValue); |
120 | | return this; |
121 | | } |
122 | | |
123 | | AudioParam* SetTargetAtTime(float aTarget, double aStartTime, |
124 | | double aTimeConstant, ErrorResult& aRv) |
125 | | { |
126 | | if (!WebAudioUtils::IsTimeValid(aStartTime) || |
127 | | !WebAudioUtils::IsTimeValid(aTimeConstant)) { |
128 | | aRv.ThrowRangeError< |
129 | | MSG_INVALID_AUDIOPARAM_METHOD_START_TIME_ERROR>(); |
130 | | return this; |
131 | | } |
132 | | aStartTime = std::max(aStartTime, GetParentObject()->CurrentTime()); |
133 | | EventInsertionHelper(aRv, AudioTimelineEvent::SetTarget, |
134 | | aStartTime, aTarget, |
135 | | aTimeConstant); |
136 | | |
137 | | return this; |
138 | | } |
139 | | |
140 | | AudioParam* CancelScheduledValues(double aStartTime, ErrorResult& aRv) |
141 | | { |
142 | | if (!WebAudioUtils::IsTimeValid(aStartTime)) { |
143 | | aRv.ThrowRangeError< |
144 | | MSG_INVALID_AUDIOPARAM_METHOD_START_TIME_ERROR>(); |
145 | | return this; |
146 | | } |
147 | | |
148 | | aStartTime = std::max(aStartTime, GetParentObject()->CurrentTime()); |
149 | | |
150 | | // Remove some events on the main thread copy. |
151 | | AudioEventTimeline::CancelScheduledValues(aStartTime); |
152 | | |
153 | | AudioTimelineEvent event(AudioTimelineEvent::Cancel, aStartTime, 0.0f); |
154 | | |
155 | | SendEventToEngine(event); |
156 | | |
157 | | return this; |
158 | | } |
159 | | |
160 | | uint32_t ParentNodeId() |
161 | | { |
162 | | return mNode->Id(); |
163 | | } |
164 | | |
165 | | void GetName(nsAString& aName) |
166 | | { |
167 | | aName.AssignASCII(mName); |
168 | | } |
169 | | |
170 | | float DefaultValue() const |
171 | | { |
172 | | return mDefaultValue; |
173 | | } |
174 | | |
175 | | float MinValue() const |
176 | | { |
177 | | return mMinValue; |
178 | | } |
179 | | |
180 | | float MaxValue() const |
181 | | { |
182 | | return mMaxValue; |
183 | | } |
184 | | |
185 | | const nsTArray<AudioNode::InputNode>& InputNodes() const |
186 | 0 | { |
187 | 0 | return mInputNodes; |
188 | 0 | } |
189 | | |
190 | | void RemoveInputNode(uint32_t aIndex) |
191 | 0 | { |
192 | 0 | mInputNodes.RemoveElementAt(aIndex); |
193 | 0 | } |
194 | | |
195 | | AudioNode::InputNode* AppendInputNode() |
196 | 0 | { |
197 | 0 | return mInputNodes.AppendElement(); |
198 | 0 | } |
199 | | |
200 | | // May create the stream if it doesn't exist |
201 | | MediaStream* Stream(); |
202 | | |
203 | | size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override |
204 | 0 | { |
205 | 0 | size_t amount = AudioParamTimeline::SizeOfExcludingThis(aMallocSizeOf); |
206 | 0 | // Not owned: |
207 | 0 | // - mNode |
208 | 0 |
|
209 | 0 | // Just count the array, actual nodes are counted in mNode. |
210 | 0 | amount += mInputNodes.ShallowSizeOfExcludingThis(aMallocSizeOf); |
211 | 0 |
|
212 | 0 | if (mNodeStreamPort) { |
213 | 0 | amount += mNodeStreamPort->SizeOfIncludingThis(aMallocSizeOf); |
214 | 0 | } |
215 | 0 |
|
216 | 0 | return amount; |
217 | 0 | } |
218 | | |
219 | | size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override |
220 | 0 | { |
221 | 0 | return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf); |
222 | 0 | } |
223 | | |
224 | | private: |
225 | | void EventInsertionHelper(ErrorResult& aRv, |
226 | | AudioTimelineEvent::Type aType, |
227 | | double aTime, float aValue, |
228 | | double aTimeConstant = 0.0, |
229 | | double aDuration = 0.0, |
230 | | const float* aCurve = nullptr, |
231 | | uint32_t aCurveLength = 0) |
232 | | { |
233 | | AudioTimelineEvent event(aType, aTime, aValue, |
234 | | aTimeConstant, aDuration, aCurve, aCurveLength); |
235 | | |
236 | | if (!ValidateEvent(event, aRv)) { |
237 | | return; |
238 | | } |
239 | | |
240 | | AudioEventTimeline::InsertEvent<double>(event); |
241 | | |
242 | | SendEventToEngine(event); |
243 | | |
244 | | CleanupOldEvents(); |
245 | | } |
246 | | |
247 | | void CleanupOldEvents(); |
248 | | |
249 | | void SendEventToEngine(const AudioTimelineEvent& aEvent); |
250 | | |
251 | | void DisconnectFromGraphAndDestroyStream(); |
252 | | |
253 | | nsCycleCollectingAutoRefCnt mRefCnt; |
254 | | NS_DECL_OWNINGTHREAD |
255 | | RefPtr<AudioNode> mNode; |
256 | | // For every InputNode, there is a corresponding entry in mOutputParams of the |
257 | | // InputNode's mInputNode. |
258 | | nsTArray<AudioNode::InputNode> mInputNodes; |
259 | | const char* mName; |
260 | | // The input port used to connect the AudioParam's stream to its node's stream |
261 | | RefPtr<MediaInputPort> mNodeStreamPort; |
262 | | const uint32_t mIndex; |
263 | | const float mDefaultValue; |
264 | | const float mMinValue; |
265 | | const float mMaxValue; |
266 | | }; |
267 | | |
268 | | } // namespace dom |
269 | | } // namespace mozilla |
270 | | |
271 | | #endif |
272 | | |