/src/mozilla-central/dom/media/webaudio/AudioContext.h
Line | Count | Source (jump to first uncovered line) |
1 | | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
2 | | /* vim:set ts=2 sw=2 sts=2 et cindent: */ |
3 | | /* This Source Code Form is subject to the terms of the Mozilla Public |
4 | | * License, v. 2.0. If a copy of the MPL was not distributed with this |
5 | | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
6 | | |
7 | | #ifndef AudioContext_h_ |
8 | | #define AudioContext_h_ |
9 | | |
10 | | #include "mozilla/dom/OfflineAudioContextBinding.h" |
11 | | #include "MediaBufferDecoder.h" |
12 | | #include "mozilla/Attributes.h" |
13 | | #include "mozilla/DOMEventTargetHelper.h" |
14 | | #include "mozilla/MemoryReporting.h" |
15 | | #include "mozilla/dom/TypedArray.h" |
16 | | #include "mozilla/RelativeTimeline.h" |
17 | | #include "mozilla/UniquePtr.h" |
18 | | #include "nsCOMPtr.h" |
19 | | #include "nsCycleCollectionParticipant.h" |
20 | | #include "nsHashKeys.h" |
21 | | #include "nsTHashtable.h" |
22 | | #include "js/TypeDecls.h" |
23 | | #include "nsIMemoryReporter.h" |
24 | | |
25 | | // X11 has a #define for CurrentTime. Unbelievable :-(. |
26 | | // See dom/media/DOMMediaStream.h for more fun! |
27 | | #ifdef CurrentTime |
28 | | #undef CurrentTime |
29 | | #endif |
30 | | |
31 | | namespace WebCore { |
32 | | class PeriodicWave; |
33 | | } // namespace WebCore |
34 | | |
35 | | class nsPIDOMWindowInner; |
36 | | |
37 | | namespace mozilla { |
38 | | |
39 | | class DOMMediaStream; |
40 | | class ErrorResult; |
41 | | class MediaStream; |
42 | | class MediaStreamGraph; |
43 | | class AudioNodeStream; |
44 | | |
45 | | namespace dom { |
46 | | |
47 | | enum class AudioContextState : uint8_t; |
48 | | class AnalyserNode; |
49 | | class AudioBuffer; |
50 | | class AudioBufferSourceNode; |
51 | | class AudioDestinationNode; |
52 | | class AudioListener; |
53 | | class AudioNode; |
54 | | class BiquadFilterNode; |
55 | | class ChannelMergerNode; |
56 | | class ChannelSplitterNode; |
57 | | class ConstantSourceNode; |
58 | | class ConvolverNode; |
59 | | class DelayNode; |
60 | | class DynamicsCompressorNode; |
61 | | class GainNode; |
62 | | class GlobalObject; |
63 | | class HTMLMediaElement; |
64 | | class IIRFilterNode; |
65 | | class MediaElementAudioSourceNode; |
66 | | class MediaStreamAudioDestinationNode; |
67 | | class MediaStreamAudioSourceNode; |
68 | | class OscillatorNode; |
69 | | class PannerNode; |
70 | | class ScriptProcessorNode; |
71 | | class StereoPannerNode; |
72 | | class WaveShaperNode; |
73 | | class Worklet; |
74 | | class PeriodicWave; |
75 | | struct PeriodicWaveConstraints; |
76 | | class Promise; |
77 | | enum class OscillatorType : uint8_t; |
78 | | |
79 | | // This is addrefed by the OscillatorNodeEngine on the main thread |
80 | | // and then used from the MSG thread. |
81 | | // It can be released either from the graph thread or the main thread. |
82 | | class BasicWaveFormCache |
83 | | { |
84 | | public: |
85 | | explicit BasicWaveFormCache(uint32_t aSampleRate); |
86 | | NS_INLINE_DECL_THREADSAFE_REFCOUNTING(BasicWaveFormCache) |
87 | | WebCore::PeriodicWave* GetBasicWaveForm(OscillatorType aType); |
88 | | private: |
89 | | ~BasicWaveFormCache(); |
90 | | RefPtr<WebCore::PeriodicWave> mSawtooth; |
91 | | RefPtr<WebCore::PeriodicWave> mSquare; |
92 | | RefPtr<WebCore::PeriodicWave> mTriangle; |
93 | | uint32_t mSampleRate; |
94 | | }; |
95 | | |
96 | | |
97 | | /* This runnable allows the MSG to notify the main thread when audio is actually |
98 | | * flowing */ |
99 | | class StateChangeTask final : public Runnable |
100 | | { |
101 | | public: |
102 | | /* This constructor should be used when this event is sent from the main |
103 | | * thread. */ |
104 | | StateChangeTask(AudioContext* aAudioContext, void* aPromise, AudioContextState aNewState); |
105 | | |
106 | | /* This constructor should be used when this event is sent from the audio |
107 | | * thread. */ |
108 | | StateChangeTask(AudioNodeStream* aStream, void* aPromise, AudioContextState aNewState); |
109 | | |
110 | | NS_IMETHOD Run() override; |
111 | | |
112 | | private: |
113 | | RefPtr<AudioContext> mAudioContext; |
114 | | void* mPromise; |
115 | | RefPtr<AudioNodeStream> mAudioNodeStream; |
116 | | AudioContextState mNewState; |
117 | | }; |
118 | | |
119 | | enum class AudioContextOperation { Suspend, Resume, Close }; |
120 | | struct AudioContextOptions; |
121 | | |
122 | | class AudioContext final : public DOMEventTargetHelper, |
123 | | public nsIMemoryReporter, |
124 | | public RelativeTimeline |
125 | | { |
126 | | AudioContext(nsPIDOMWindowInner* aParentWindow, |
127 | | bool aIsOffline, |
128 | | uint32_t aNumberOfChannels = 0, |
129 | | uint32_t aLength = 0, |
130 | | float aSampleRate = 0.0f); |
131 | | ~AudioContext(); |
132 | | |
133 | | nsresult Init(); |
134 | | |
135 | | public: |
136 | | typedef uint64_t AudioContextId; |
137 | | |
138 | | NS_DECL_ISUPPORTS_INHERITED |
139 | | NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioContext, |
140 | | DOMEventTargetHelper) |
141 | | MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf) |
142 | | |
143 | | nsPIDOMWindowInner* GetParentObject() const |
144 | | { |
145 | | return GetOwner(); |
146 | | } |
147 | | |
148 | | virtual void DisconnectFromOwner() override; |
149 | | virtual void BindToOwner(nsIGlobalObject* aNew) override; |
150 | | |
151 | | void Shutdown(); // idempotent |
152 | | |
153 | | JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override; |
154 | | |
155 | | using DOMEventTargetHelper::DispatchTrustedEvent; |
156 | | |
157 | | // Constructor for regular AudioContext |
158 | | static already_AddRefed<AudioContext> |
159 | | Constructor(const GlobalObject& aGlobal, |
160 | | const AudioContextOptions& aOptions, |
161 | | ErrorResult& aRv); |
162 | | |
163 | | // Constructor for offline AudioContext with options object |
164 | | static already_AddRefed<AudioContext> |
165 | | Constructor(const GlobalObject& aGlobal, |
166 | | const OfflineAudioContextOptions& aOptions, |
167 | | ErrorResult& aRv); |
168 | | |
169 | | // Constructor for offline AudioContext |
170 | | static already_AddRefed<AudioContext> |
171 | | Constructor(const GlobalObject& aGlobal, |
172 | | uint32_t aNumberOfChannels, |
173 | | uint32_t aLength, |
174 | | float aSampleRate, |
175 | | ErrorResult& aRv); |
176 | | |
177 | | // AudioContext methods |
178 | | |
179 | | AudioDestinationNode* Destination() const |
180 | | { |
181 | | return mDestination; |
182 | | } |
183 | | |
184 | | float SampleRate() const |
185 | | { |
186 | | return mSampleRate; |
187 | | } |
188 | | |
189 | 0 | bool ShouldSuspendNewStream() const { return mSuspendCalled; } |
190 | | |
191 | | double CurrentTime(); |
192 | | |
193 | | AudioListener* Listener(); |
194 | | |
195 | | AudioContextState State() const { return mAudioContextState; } |
196 | | |
197 | | Worklet* GetAudioWorklet(ErrorResult& aRv); |
198 | | |
199 | | bool IsRunning() const; |
200 | | |
201 | | // Those three methods return a promise to content, that is resolved when an |
202 | | // (possibly long) operation is completed on the MSG (and possibly other) |
203 | | // thread(s). To avoid having to match the calls and asychronous result when |
204 | | // the operation is completed, we keep a reference to the promises on the main |
205 | | // thread, and then send the promises pointers down the MSG thread, as a void* |
206 | | // (to make it very clear that the pointer is to merely be treated as an ID). |
207 | | // When back on the main thread, we can resolve or reject the promise, by |
208 | | // casting it back to a `Promise*` while asserting we're back on the main |
209 | | // thread and removing the reference we added. |
210 | | already_AddRefed<Promise> Suspend(ErrorResult& aRv); |
211 | | already_AddRefed<Promise> Resume(ErrorResult& aRv); |
212 | | already_AddRefed<Promise> Close(ErrorResult& aRv); |
213 | | IMPL_EVENT_HANDLER(statechange) |
214 | | |
215 | | already_AddRefed<AudioBufferSourceNode> CreateBufferSource(ErrorResult& aRv); |
216 | | |
217 | | already_AddRefed<ConstantSourceNode> CreateConstantSource(ErrorResult& aRv); |
218 | | |
219 | | already_AddRefed<AudioBuffer> |
220 | | CreateBuffer(uint32_t aNumberOfChannels, uint32_t aLength, float aSampleRate, |
221 | | ErrorResult& aRv); |
222 | | |
223 | | already_AddRefed<MediaStreamAudioDestinationNode> |
224 | | CreateMediaStreamDestination(ErrorResult& aRv); |
225 | | |
226 | | already_AddRefed<ScriptProcessorNode> |
227 | | CreateScriptProcessor(uint32_t aBufferSize, |
228 | | uint32_t aNumberOfInputChannels, |
229 | | uint32_t aNumberOfOutputChannels, |
230 | | ErrorResult& aRv); |
231 | | |
232 | | already_AddRefed<StereoPannerNode> |
233 | | CreateStereoPanner(ErrorResult& aRv); |
234 | | |
235 | | already_AddRefed<AnalyserNode> |
236 | | CreateAnalyser(ErrorResult& aRv); |
237 | | |
238 | | already_AddRefed<GainNode> |
239 | | CreateGain(ErrorResult& aRv); |
240 | | |
241 | | already_AddRefed<WaveShaperNode> |
242 | | CreateWaveShaper(ErrorResult& aRv); |
243 | | |
244 | | already_AddRefed<MediaElementAudioSourceNode> |
245 | | CreateMediaElementSource(HTMLMediaElement& aMediaElement, ErrorResult& aRv); |
246 | | already_AddRefed<MediaStreamAudioSourceNode> |
247 | | CreateMediaStreamSource(DOMMediaStream& aMediaStream, ErrorResult& aRv); |
248 | | |
249 | | already_AddRefed<DelayNode> |
250 | | CreateDelay(double aMaxDelayTime, ErrorResult& aRv); |
251 | | |
252 | | already_AddRefed<PannerNode> |
253 | | CreatePanner(ErrorResult& aRv); |
254 | | |
255 | | already_AddRefed<ConvolverNode> |
256 | | CreateConvolver(ErrorResult& aRv); |
257 | | |
258 | | already_AddRefed<ChannelSplitterNode> |
259 | | CreateChannelSplitter(uint32_t aNumberOfOutputs, ErrorResult& aRv); |
260 | | |
261 | | already_AddRefed<ChannelMergerNode> |
262 | | CreateChannelMerger(uint32_t aNumberOfInputs, ErrorResult& aRv); |
263 | | |
264 | | already_AddRefed<DynamicsCompressorNode> |
265 | | CreateDynamicsCompressor(ErrorResult& aRv); |
266 | | |
267 | | already_AddRefed<BiquadFilterNode> |
268 | | CreateBiquadFilter(ErrorResult& aRv); |
269 | | |
270 | | already_AddRefed<IIRFilterNode> |
271 | | CreateIIRFilter(const Sequence<double>& aFeedforward, |
272 | | const Sequence<double>& aFeedback, |
273 | | mozilla::ErrorResult& aRv); |
274 | | |
275 | | already_AddRefed<OscillatorNode> |
276 | | CreateOscillator(ErrorResult& aRv); |
277 | | |
278 | | already_AddRefed<PeriodicWave> |
279 | | CreatePeriodicWave(const Float32Array& aRealData, const Float32Array& aImagData, |
280 | | const PeriodicWaveConstraints& aConstraints, |
281 | | ErrorResult& aRv); |
282 | | |
283 | | already_AddRefed<Promise> |
284 | | DecodeAudioData(const ArrayBuffer& aBuffer, |
285 | | const Optional<OwningNonNull<DecodeSuccessCallback> >& aSuccessCallback, |
286 | | const Optional<OwningNonNull<DecodeErrorCallback> >& aFailureCallback, |
287 | | ErrorResult& aRv); |
288 | | |
289 | | // OfflineAudioContext methods |
290 | | already_AddRefed<Promise> StartRendering(ErrorResult& aRv); |
291 | | IMPL_EVENT_HANDLER(complete) |
292 | | unsigned long Length(); |
293 | | |
294 | | bool IsOffline() const { return mIsOffline; } |
295 | | |
296 | | MediaStreamGraph* Graph() const; |
297 | | MediaStream* DestinationStream() const; |
298 | | |
299 | | // Nodes register here if they will produce sound even if they have silent |
300 | | // or no input connections. The AudioContext will keep registered nodes |
301 | | // alive until the context is collected. This takes care of "playing" |
302 | | // references and "tail-time" references. |
303 | | void RegisterActiveNode(AudioNode* aNode); |
304 | | // Nodes unregister when they have finished producing sound for the |
305 | | // foreseeable future. |
306 | | // Do NOT call UnregisterActiveNode from an AudioNode destructor. |
307 | | // If the destructor is called, then the Node has already been unregistered. |
308 | | // The destructor may be called during hashtable enumeration, during which |
309 | | // unregistering would not be safe. |
310 | | void UnregisterActiveNode(AudioNode* aNode); |
311 | | |
312 | | uint32_t MaxChannelCount() const; |
313 | | |
314 | | uint32_t ActiveNodeCount() const; |
315 | | |
316 | | void Mute() const; |
317 | | void Unmute() const; |
318 | | |
319 | | JSObject* GetGlobalJSObject() const; |
320 | | |
321 | | void RegisterNode(AudioNode* aNode); |
322 | | void UnregisterNode(AudioNode* aNode); |
323 | | |
324 | | void OnStateChanged(void* aPromise, AudioContextState aNewState); |
325 | | |
326 | | BasicWaveFormCache* GetBasicWaveFormCache(); |
327 | | |
328 | | bool CheckClosed(ErrorResult& aRv); |
329 | | |
330 | | void Dispatch(already_AddRefed<nsIRunnable>&& aRunnable); |
331 | | |
332 | | private: |
333 | | void DisconnectFromWindow(); |
334 | | void RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob); |
335 | | void ShutdownDecoder(); |
336 | | |
337 | | size_t SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const; |
338 | | NS_DECL_NSIMEMORYREPORTER |
339 | | |
340 | | friend struct ::mozilla::WebAudioDecodeJob; |
341 | | |
342 | | nsTArray<MediaStream*> GetAllStreams() const; |
343 | | |
344 | | // Request the prompt to ask for user's approval for autoplay. |
345 | | void EnsureAutoplayRequested(); |
346 | | |
347 | | void ResumeInternal(); |
348 | | void SuspendInternal(void* aPromise); |
349 | | |
350 | | // This event is used for testing only. |
351 | | void DispatchBlockedEvent(); |
352 | | |
353 | | private: |
354 | | // Each AudioContext has an id, that is passed down the MediaStreams that |
355 | | // back the AudioNodes, so we can easily compute the set of all the |
356 | | // MediaStreams for a given context, on the MediasStreamGraph side. |
357 | | const AudioContextId mId; |
358 | | // Note that it's important for mSampleRate to be initialized before |
359 | | // mDestination, as mDestination's constructor needs to access it! |
360 | | const float mSampleRate; |
361 | | AudioContextState mAudioContextState; |
362 | | RefPtr<AudioDestinationNode> mDestination; |
363 | | RefPtr<AudioListener> mListener; |
364 | | RefPtr<Worklet> mWorklet; |
365 | | nsTArray<UniquePtr<WebAudioDecodeJob> > mDecodeJobs; |
366 | | // This array is used to keep the suspend/close promises alive until |
367 | | // they are resolved, so we can safely pass them accross threads. |
368 | | nsTArray<RefPtr<Promise>> mPromiseGripArray; |
369 | | // This array is used to onlly keep the resume promises alive until they are |
370 | | // resolved, so we can safely pass them accross threads. If the audio context |
371 | | // is not allowed to play, the promise would be pending in this array and be |
372 | | // resolved until audio context has been allowed and user call resume() again. |
373 | | nsTArray<RefPtr<Promise>> mPendingResumePromises; |
374 | | // See RegisterActiveNode. These will keep the AudioContext alive while it |
375 | | // is rendering and the window remains alive. |
376 | | nsTHashtable<nsRefPtrHashKey<AudioNode> > mActiveNodes; |
377 | | // Raw (non-owning) references to all AudioNodes for this AudioContext. |
378 | | nsTHashtable<nsPtrHashKey<AudioNode> > mAllNodes; |
379 | | // Cache to avoid recomputing basic waveforms all the time. |
380 | | RefPtr<BasicWaveFormCache> mBasicWaveFormCache; |
381 | | // Number of channels passed in the OfflineAudioContext ctor. |
382 | | uint32_t mNumberOfChannels; |
383 | | bool mIsOffline; |
384 | | bool mIsStarted; |
385 | | bool mIsShutDown; |
386 | | // Close has been called, reject suspend and resume call. |
387 | | bool mCloseCalled; |
388 | | // Suspend has been called with no following resume. |
389 | | bool mSuspendCalled; |
390 | | bool mIsDisconnecting; |
391 | | }; |
392 | | |
393 | | static const dom::AudioContext::AudioContextId NO_AUDIO_CONTEXT = 0; |
394 | | |
395 | | } // namespace dom |
396 | | } // namespace mozilla |
397 | | |
398 | | #endif |
399 | | |