/src/mozilla-central/dom/html/HTMLMediaElement.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
2 | | /* vim: set ts=8 sts=2 et sw=2 tw=80: */ |
3 | | /* This Source Code Form is subject to the terms of the Mozilla Public |
4 | | * License, v. 2.0. If a copy of the MPL was not distributed with this |
5 | | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
6 | | |
7 | | #ifdef XP_WIN |
8 | | #include "objbase.h" |
9 | | // Some Windows header defines this, so undef it as it conflicts with our |
10 | | // function of the same name. |
11 | | #undef GetCurrentTime |
12 | | #endif |
13 | | |
14 | | #include "mozilla/dom/HTMLMediaElement.h" |
15 | | #include "AudioChannelService.h" |
16 | | #include "AudioStreamTrack.h" |
17 | | #include "AutoplayPolicy.h" |
18 | | #include "ChannelMediaDecoder.h" |
19 | | #include "DOMMediaStream.h" |
20 | | #include "DecoderDoctorDiagnostics.h" |
21 | | #include "DecoderDoctorLogger.h" |
22 | | #include "DecoderTraits.h" |
23 | | #include "FrameStatistics.h" |
24 | | #include "GMPCrashHelper.h" |
25 | | #ifdef MOZ_ANDROID_HLS_SUPPORT |
26 | | #include "HLSDecoder.h" |
27 | | #endif |
28 | | #include "HTMLMediaElement.h" |
29 | | #include "ImageContainer.h" |
30 | | #include "Layers.h" |
31 | | #include "MP4Decoder.h" |
32 | | #include "MediaContainerType.h" |
33 | | #include "MediaError.h" |
34 | | #include "MediaMetadataManager.h" |
35 | | #include "MediaResource.h" |
36 | | #include "MediaSourceDecoder.h" |
37 | | #include "MediaStreamError.h" |
38 | | #include "MediaStreamGraph.h" |
39 | | #include "MediaStreamListener.h" |
40 | | #include "MediaTrackList.h" |
41 | | #include "SVGObserverUtils.h" |
42 | | #include "TimeRanges.h" |
43 | | #include "VideoFrameContainer.h" |
44 | | #include "VideoStreamTrack.h" |
45 | | #include "base/basictypes.h" |
46 | | #include "jsapi.h" |
47 | | #include "mozilla/ArrayUtils.h" |
48 | | #include "mozilla/AsyncEventDispatcher.h" |
49 | | #include "mozilla/EMEUtils.h" |
50 | | #include "mozilla/EventDispatcher.h" |
51 | | #include "mozilla/EventStateManager.h" |
52 | | #include "mozilla/FloatingPoint.h" |
53 | | #include "mozilla/MathAlgorithms.h" |
54 | | #include "mozilla/NotNull.h" |
55 | | #include "mozilla/Preferences.h" |
56 | | #include "mozilla/Sprintf.h" |
57 | | #include "mozilla/StaticPrefs.h" |
58 | | #include "mozilla/Telemetry.h" |
59 | | #include "mozilla/dom/AudioTrack.h" |
60 | | #include "mozilla/dom/AudioTrackList.h" |
61 | | #include "mozilla/AutoplayPermissionManager.h" |
62 | | #include "mozilla/dom/BlobURLProtocolHandler.h" |
63 | | #include "mozilla/dom/ElementInlines.h" |
64 | | #include "mozilla/dom/HTMLAudioElement.h" |
65 | | #include "mozilla/dom/HTMLInputElement.h" |
66 | | #include "mozilla/dom/HTMLMediaElementBinding.h" |
67 | | #include "mozilla/dom/HTMLSourceElement.h" |
68 | | #include "mozilla/dom/HTMLVideoElement.h" |
69 | | #include "mozilla/dom/MediaEncryptedEvent.h" |
70 | | #include "mozilla/dom/MediaErrorBinding.h" |
71 | | #include "mozilla/dom/MediaSource.h" |
72 | | #include "mozilla/dom/PlayPromise.h" |
73 | | #include "mozilla/dom/Promise.h" |
74 | | #include "mozilla/dom/TextTrack.h" |
75 | | #include "mozilla/dom/VideoPlaybackQuality.h" |
76 | | #include "mozilla/dom/VideoTrack.h" |
77 | | #include "mozilla/dom/VideoTrackList.h" |
78 | | #include "mozilla/dom/WakeLock.h" |
79 | | #include "mozilla/dom/power/PowerManagerService.h" |
80 | | #include "nsAttrValueInlines.h" |
81 | | #include "nsContentPolicyUtils.h" |
82 | | #include "nsContentUtils.h" |
83 | | #include "nsCycleCollectionParticipant.h" |
84 | | #include "nsDisplayList.h" |
85 | | #include "nsDocShell.h" |
86 | | #include "nsError.h" |
87 | | #include "nsGenericHTMLElement.h" |
88 | | #include "nsGkAtoms.h" |
89 | | #include "nsIAsyncVerifyRedirectCallback.h" |
90 | | #include "nsIAutoplay.h" |
91 | | #include "nsICachingChannel.h" |
92 | | #include "nsICategoryManager.h" |
93 | | #include "nsIClassOfService.h" |
94 | | #include "nsIContentPolicy.h" |
95 | | #include "nsIContentSecurityPolicy.h" |
96 | | #include "nsIDocShell.h" |
97 | | #include "nsIDocument.h" |
98 | | #include "nsIFrame.h" |
99 | | #include "nsIObserverService.h" |
100 | | #include "nsIPermissionManager.h" |
101 | | #include "nsIPresShell.h" |
102 | | #include "nsIRequest.h" |
103 | | #include "nsIScriptError.h" |
104 | | #include "nsIScriptSecurityManager.h" |
105 | | #include "nsISupportsPrimitives.h" |
106 | | #include "nsIThreadInternal.h" |
107 | | #include "nsITimer.h" |
108 | | #include "nsIXPConnect.h" |
109 | | #include "nsJSUtils.h" |
110 | | #include "nsLayoutUtils.h" |
111 | | #include "nsMediaFragmentURIParser.h" |
112 | | #include "nsMimeTypes.h" |
113 | | #include "nsNetUtil.h" |
114 | | #include "nsNodeInfoManager.h" |
115 | | #include "nsPresContext.h" |
116 | | #include "nsQueryObject.h" |
117 | | #include "nsRange.h" |
118 | | #include "nsSize.h" |
119 | | #include "nsThreadUtils.h" |
120 | | #include "nsURIHashKey.h" |
121 | | #include "nsVideoFrame.h" |
122 | | #include "xpcpublic.h" |
123 | | #include <algorithm> |
124 | | #include <cmath> |
125 | | #include <limits> |
126 | | |
127 | | mozilla::LazyLogModule gMediaElementLog("nsMediaElement"); |
128 | | static mozilla::LazyLogModule gMediaElementEventsLog("nsMediaElementEvents"); |
129 | | |
130 | | extern mozilla::LazyLogModule gAutoplayPermissionLog; |
131 | | #define AUTOPLAY_LOG(msg, ...) \ |
132 | 0 | MOZ_LOG(gAutoplayPermissionLog, LogLevel::Debug, (msg, ##__VA_ARGS__)) |
133 | | |
134 | 0 | #define LOG(type, msg) MOZ_LOG(gMediaElementLog, type, msg) |
135 | 0 | #define LOG_EVENT(type, msg) MOZ_LOG(gMediaElementEventsLog, type, msg) |
136 | | |
137 | | using namespace mozilla::layers; |
138 | | using mozilla::net::nsMediaFragmentURIParser; |
139 | | using namespace mozilla::dom::HTMLMediaElement_Binding; |
140 | | |
141 | | namespace mozilla { |
142 | | namespace dom { |
143 | | |
144 | | // Number of milliseconds between progress events as defined by spec |
145 | | static const uint32_t PROGRESS_MS = 350; |
146 | | |
147 | | // Number of milliseconds of no data before a stall event is fired as defined by |
148 | | // spec |
149 | | static const uint32_t STALL_MS = 3000; |
150 | | |
151 | | // Used by AudioChannel for suppresssing the volume to this ratio. |
152 | | #define FADED_VOLUME_RATIO 0.25 |
153 | | |
154 | | // These constants are arbitrary |
155 | | // Minimum playbackRate for a media |
156 | | static const double MIN_PLAYBACKRATE = 1.0 / 16; |
157 | | // Maximum playbackRate for a media |
158 | | static const double MAX_PLAYBACKRATE = 16.0; |
159 | | // These are the limits beyonds which SoundTouch does not perform too well and |
160 | | // when speech is hard to understand anyway. Threshold above which audio is |
161 | | // muted |
162 | | static const double THRESHOLD_HIGH_PLAYBACKRATE_AUDIO = 4.0; |
163 | | // Threshold under which audio is muted |
164 | | static const double THRESHOLD_LOW_PLAYBACKRATE_AUDIO = 0.5; |
165 | | |
166 | | // Media error values. These need to match the ones in MediaError.webidl. |
167 | | static const unsigned short MEDIA_ERR_ABORTED = 1; |
168 | | static const unsigned short MEDIA_ERR_NETWORK = 2; |
169 | | static const unsigned short MEDIA_ERR_DECODE = 3; |
170 | | static const unsigned short MEDIA_ERR_SRC_NOT_SUPPORTED = 4; |
171 | | |
172 | | static void |
173 | | ResolvePromisesWithUndefined(const nsTArray<RefPtr<PlayPromise>>& aPromises) |
174 | 0 | { |
175 | 0 | for (auto& promise : aPromises) { |
176 | 0 | promise->MaybeResolveWithUndefined(); |
177 | 0 | } |
178 | 0 | } |
179 | | |
180 | | static void |
181 | | RejectPromises(const nsTArray<RefPtr<PlayPromise>>& aPromises, nsresult aError) |
182 | 0 | { |
183 | 0 | for (auto& promise : aPromises) { |
184 | 0 | promise->MaybeReject(aError); |
185 | 0 | } |
186 | 0 | } |
187 | | |
188 | | // Under certain conditions there may be no-one holding references to |
189 | | // a media element from script, DOM parent, etc, but the element may still |
190 | | // fire meaningful events in the future so we can't destroy it yet: |
191 | | // 1) If the element is delaying the load event (or would be, if it were |
192 | | // in a document), then events up to loadeddata or error could be fired, |
193 | | // so we need to stay alive. |
194 | | // 2) If the element is not paused and playback has not ended, then |
195 | | // we will (or might) play, sending timeupdate and ended events and possibly |
196 | | // audio output, so we need to stay alive. |
197 | | // 3) if the element is seeking then we will fire seeking events and possibly |
198 | | // start playing afterward, so we need to stay alive. |
199 | | // 4) If autoplay could start playback in this element (if we got enough data), |
200 | | // then we need to stay alive. |
201 | | // 5) if the element is currently loading, not suspended, and its source is |
202 | | // not a MediaSource, then script might be waiting for progress events or a |
203 | | // 'stalled' or 'suspend' event, so we need to stay alive. |
204 | | // If we're already suspended then (all other conditions being met), |
205 | | // it's OK to just disappear without firing any more events, |
206 | | // since we have the freedom to remain suspended indefinitely. Note |
207 | | // that we could use this 'suspended' loophole to garbage-collect a suspended |
208 | | // element in case 4 even if it had 'autoplay' set, but we choose not to. |
209 | | // If someone throws away all references to a loading 'autoplay' element |
210 | | // sound should still eventually play. |
211 | | // 6) If the source is a MediaSource, most loading events will not fire unless |
212 | | // appendBuffer() is called on a SourceBuffer, in which case something is |
213 | | // already referencing the SourceBuffer, which keeps the associated media |
214 | | // element alive. Further, a MediaSource will never time out the resource |
215 | | // fetch, and so should not keep the media element alive if it is |
216 | | // unreferenced. A pending 'stalled' event keeps the media element alive. |
217 | | // |
218 | | // Media elements owned by inactive documents (i.e. documents not contained in |
219 | | // any document viewer) should never hold a self-reference because none of the |
220 | | // above conditions are allowed: the element will stop loading and playing |
221 | | // and never resume loading or playing unless its owner document changes to |
222 | | // an active document (which can only happen if there is an external reference |
223 | | // to the element). |
224 | | // Media elements with no owner doc should be able to hold a self-reference. |
225 | | // Something native must have created the element and may expect it to |
226 | | // stay alive to play. |
227 | | |
228 | | // It's very important that any change in state which could change the value of |
229 | | // needSelfReference in AddRemoveSelfReference be followed by a call to |
230 | | // AddRemoveSelfReference before this element could die! |
231 | | // It's especially important if needSelfReference would change to 'true', |
232 | | // since if we neglect to add a self-reference, this element might be |
233 | | // garbage collected while there are still event listeners that should |
234 | | // receive events. If we neglect to remove the self-reference then the element |
235 | | // just lives longer than it needs to. |
236 | | |
237 | | class nsMediaEvent : public Runnable |
238 | | { |
239 | | public: |
240 | | explicit nsMediaEvent(const char* aName, HTMLMediaElement* aElement) |
241 | | : Runnable(aName) |
242 | | , mElement(aElement) |
243 | | , mLoadID(mElement->GetCurrentLoadID()) |
244 | 0 | { |
245 | 0 | } |
246 | 0 | ~nsMediaEvent() {} |
247 | | |
248 | | NS_IMETHOD Run() override = 0; |
249 | | |
250 | | protected: |
251 | 0 | bool IsCancelled() { return mElement->GetCurrentLoadID() != mLoadID; } |
252 | | |
253 | | RefPtr<HTMLMediaElement> mElement; |
254 | | uint32_t mLoadID; |
255 | | }; |
256 | | |
257 | | class HTMLMediaElement::nsAsyncEventRunner : public nsMediaEvent |
258 | | { |
259 | | private: |
260 | | nsString mName; |
261 | | |
262 | | public: |
263 | | nsAsyncEventRunner(const nsAString& aName, HTMLMediaElement* aElement) |
264 | | : nsMediaEvent("HTMLMediaElement::nsAsyncEventRunner", aElement) |
265 | | , mName(aName) |
266 | 0 | { |
267 | 0 | } |
268 | | |
269 | | NS_IMETHOD Run() override |
270 | 0 | { |
271 | 0 | // Silently cancel if our load has been cancelled. |
272 | 0 | if (IsCancelled()) |
273 | 0 | return NS_OK; |
274 | 0 | |
275 | 0 | return mElement->DispatchEvent(mName); |
276 | 0 | } |
277 | | }; |
278 | | |
279 | | /* |
280 | | * If no error is passed while constructing an instance, the instance will |
281 | | * resolve the passed promises with undefined; otherwise, the instance will |
282 | | * reject the passed promises with the passed error. |
283 | | * |
284 | | * The constructor appends the constructed instance into the passed media |
285 | | * element's mPendingPlayPromisesRunners member and once the the runner is run |
286 | | * (whether fulfilled or canceled), it removes itself from |
287 | | * mPendingPlayPromisesRunners. |
288 | | */ |
289 | | class HTMLMediaElement::nsResolveOrRejectPendingPlayPromisesRunner |
290 | | : public nsMediaEvent |
291 | | { |
292 | | nsTArray<RefPtr<PlayPromise>> mPromises; |
293 | | nsresult mError; |
294 | | |
295 | | public: |
296 | | nsResolveOrRejectPendingPlayPromisesRunner( |
297 | | HTMLMediaElement* aElement, |
298 | | nsTArray<RefPtr<PlayPromise>>&& aPromises, |
299 | | nsresult aError = NS_OK) |
300 | | : nsMediaEvent( |
301 | | "HTMLMediaElement::nsResolveOrRejectPendingPlayPromisesRunner", |
302 | | aElement) |
303 | | , mPromises(std::move(aPromises)) |
304 | | , mError(aError) |
305 | 0 | { |
306 | 0 | mElement->mPendingPlayPromisesRunners.AppendElement(this); |
307 | 0 | } |
308 | | |
309 | | void ResolveOrReject() |
310 | 0 | { |
311 | 0 | if (NS_SUCCEEDED(mError)) { |
312 | 0 | ResolvePromisesWithUndefined(mPromises); |
313 | 0 | } else { |
314 | 0 | RejectPromises(mPromises, mError); |
315 | 0 | } |
316 | 0 | } |
317 | | |
318 | | NS_IMETHOD Run() override |
319 | 0 | { |
320 | 0 | if (!IsCancelled()) { |
321 | 0 | ResolveOrReject(); |
322 | 0 | } |
323 | 0 |
|
324 | 0 | mElement->mPendingPlayPromisesRunners.RemoveElement(this); |
325 | 0 | return NS_OK; |
326 | 0 | } |
327 | | }; |
328 | | |
329 | | class HTMLMediaElement::nsNotifyAboutPlayingRunner |
330 | | : public nsResolveOrRejectPendingPlayPromisesRunner |
331 | | { |
332 | | public: |
333 | | nsNotifyAboutPlayingRunner( |
334 | | HTMLMediaElement* aElement, |
335 | | nsTArray<RefPtr<PlayPromise>>&& aPendingPlayPromises) |
336 | | : nsResolveOrRejectPendingPlayPromisesRunner(aElement, |
337 | | std::move(aPendingPlayPromises)) |
338 | 0 | { |
339 | 0 | } |
340 | | |
341 | | NS_IMETHOD Run() override |
342 | 0 | { |
343 | 0 | if (IsCancelled()) { |
344 | 0 | mElement->mPendingPlayPromisesRunners.RemoveElement(this); |
345 | 0 | return NS_OK; |
346 | 0 | } |
347 | 0 | |
348 | 0 | mElement->DispatchEvent(NS_LITERAL_STRING("playing")); |
349 | 0 | return nsResolveOrRejectPendingPlayPromisesRunner::Run(); |
350 | 0 | } |
351 | | }; |
352 | | |
353 | | class nsSourceErrorEventRunner : public nsMediaEvent |
354 | | { |
355 | | private: |
356 | | nsCOMPtr<nsIContent> mSource; |
357 | | |
358 | | public: |
359 | | nsSourceErrorEventRunner(HTMLMediaElement* aElement, nsIContent* aSource) |
360 | | : nsMediaEvent("dom::nsSourceErrorEventRunner", aElement) |
361 | | , mSource(aSource) |
362 | 0 | { |
363 | 0 | } |
364 | | |
365 | | NS_IMETHOD Run() override |
366 | 0 | { |
367 | 0 | // Silently cancel if our load has been cancelled. |
368 | 0 | if (IsCancelled()) |
369 | 0 | return NS_OK; |
370 | 0 | LOG_EVENT(LogLevel::Debug, |
371 | 0 | ("%p Dispatching simple event source error", mElement.get())); |
372 | 0 | return nsContentUtils::DispatchTrustedEvent( |
373 | 0 | mElement->OwnerDoc(), mSource, NS_LITERAL_STRING("error"), |
374 | 0 | CanBubble::eNo, Cancelable::eNo); |
375 | 0 | } |
376 | | }; |
377 | | |
378 | | /** |
379 | | * This listener observes the first video frame to arrive with a non-empty size, |
380 | | * and calls HTMLMediaElement::UpdateInitialMediaSize() with that size. |
381 | | */ |
382 | | class HTMLMediaElement::StreamSizeListener |
383 | | : public DirectMediaStreamTrackListener |
384 | | { |
385 | | public: |
386 | | explicit StreamSizeListener(HTMLMediaElement* aElement) |
387 | | : mElement(aElement) |
388 | | , mMainThreadEventTarget(aElement->MainThreadEventTarget()) |
389 | | , mInitialSizeFound(false) |
390 | 0 | { |
391 | 0 | MOZ_ASSERT(mElement); |
392 | 0 | MOZ_ASSERT(mMainThreadEventTarget); |
393 | 0 | } |
394 | | |
395 | 0 | void Forget() { mElement = nullptr; } |
396 | | |
397 | | void ReceivedSize(gfx::IntSize aSize) |
398 | 0 | { |
399 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
400 | 0 |
|
401 | 0 | if (!mElement) { |
402 | 0 | return; |
403 | 0 | } |
404 | 0 | |
405 | 0 | RefPtr<HTMLMediaElement> deathGrip = mElement; |
406 | 0 | deathGrip->UpdateInitialMediaSize(aSize); |
407 | 0 | } |
408 | | |
409 | | void NotifyRealtimeTrackData(MediaStreamGraph* aGraph, |
410 | | StreamTime aTrackOffset, |
411 | | const MediaSegment& aMedia) override |
412 | 0 | { |
413 | 0 | if (mInitialSizeFound) { |
414 | 0 | return; |
415 | 0 | } |
416 | 0 | |
417 | 0 | if (aMedia.GetType() != MediaSegment::VIDEO) { |
418 | 0 | MOZ_ASSERT(false, "Should only lock on to a video track"); |
419 | 0 | return; |
420 | 0 | } |
421 | 0 |
|
422 | 0 | const VideoSegment& video = static_cast<const VideoSegment&>(aMedia); |
423 | 0 | for (VideoSegment::ConstChunkIterator c(video); !c.IsEnded(); c.Next()) { |
424 | 0 | if (c->mFrame.GetIntrinsicSize() != gfx::IntSize(0, 0)) { |
425 | 0 | mInitialSizeFound = true; |
426 | 0 | // This is fine to dispatch straight to main thread (instead of via |
427 | 0 | // ...AfterStreamUpdate()) since it reflects state of the element, |
428 | 0 | // not the stream. Events reflecting stream or track state should be |
429 | 0 | // dispatched so their order is preserved. |
430 | 0 | mMainThreadEventTarget->Dispatch(NewRunnableMethod<gfx::IntSize>( |
431 | 0 | "dom::HTMLMediaElement::StreamSizeListener::ReceivedSize", |
432 | 0 | this, |
433 | 0 | &StreamSizeListener::ReceivedSize, |
434 | 0 | c->mFrame.GetIntrinsicSize())); |
435 | 0 | return; |
436 | 0 | } |
437 | 0 | } |
438 | 0 | } |
439 | | |
440 | | private: |
441 | | // These fields may only be accessed on the main thread |
442 | | HTMLMediaElement* mElement; |
443 | | // We hold mElement->MainThreadEventTarget() here because the mElement could |
444 | | // be reset in Forget(). |
445 | | nsCOMPtr<nsISerialEventTarget> mMainThreadEventTarget; |
446 | | |
447 | | // These fields may only be accessed on the MSG's appending thread. |
448 | | // (this is a direct listener so we get called by whoever is producing |
449 | | // this track's data) |
450 | | bool mInitialSizeFound; |
451 | | }; |
452 | | |
453 | | /** |
454 | | * There is a reference cycle involving this class: MediaLoadListener |
455 | | * holds a reference to the HTMLMediaElement, which holds a reference |
456 | | * to an nsIChannel, which holds a reference to this listener. |
457 | | * We break the reference cycle in OnStartRequest by clearing mElement. |
458 | | */ |
459 | | class HTMLMediaElement::MediaLoadListener final |
460 | | : public nsIStreamListener |
461 | | , public nsIChannelEventSink |
462 | | , public nsIInterfaceRequestor |
463 | | , public nsIObserver |
464 | | , public nsIThreadRetargetableStreamListener |
465 | | { |
466 | 0 | ~MediaLoadListener() {} |
467 | | |
468 | | NS_DECL_THREADSAFE_ISUPPORTS |
469 | | NS_DECL_NSIREQUESTOBSERVER |
470 | | NS_DECL_NSISTREAMLISTENER |
471 | | NS_DECL_NSICHANNELEVENTSINK |
472 | | NS_DECL_NSIOBSERVER |
473 | | NS_DECL_NSIINTERFACEREQUESTOR |
474 | | NS_DECL_NSITHREADRETARGETABLESTREAMLISTENER |
475 | | |
476 | | public: |
477 | | explicit MediaLoadListener(HTMLMediaElement* aElement) |
478 | | : mElement(aElement) |
479 | | , mLoadID(aElement->GetCurrentLoadID()) |
480 | 0 | { |
481 | 0 | MOZ_ASSERT(mElement, "Must pass an element to call back"); |
482 | 0 | } |
483 | | |
484 | | private: |
485 | | RefPtr<HTMLMediaElement> mElement; |
486 | | nsCOMPtr<nsIStreamListener> mNextListener; |
487 | | const uint32_t mLoadID; |
488 | | }; |
489 | | |
490 | | NS_IMPL_ISUPPORTS(HTMLMediaElement::MediaLoadListener, |
491 | | nsIRequestObserver, |
492 | | nsIStreamListener, |
493 | | nsIChannelEventSink, |
494 | | nsIInterfaceRequestor, |
495 | | nsIObserver, |
496 | | nsIThreadRetargetableStreamListener) |
497 | | |
498 | | NS_IMETHODIMP |
499 | | HTMLMediaElement::MediaLoadListener::Observe(nsISupports* aSubject, |
500 | | const char* aTopic, |
501 | | const char16_t* aData) |
502 | 0 | { |
503 | 0 | nsContentUtils::UnregisterShutdownObserver(this); |
504 | 0 |
|
505 | 0 | // Clear mElement to break cycle so we don't leak on shutdown |
506 | 0 | mElement = nullptr; |
507 | 0 | return NS_OK; |
508 | 0 | } |
509 | | |
510 | | NS_IMETHODIMP |
511 | | HTMLMediaElement::MediaLoadListener::OnStartRequest(nsIRequest* aRequest, |
512 | | nsISupports* aContext) |
513 | 0 | { |
514 | 0 | nsContentUtils::UnregisterShutdownObserver(this); |
515 | 0 |
|
516 | 0 | if (!mElement) { |
517 | 0 | // We've been notified by the shutdown observer, and are shutting down. |
518 | 0 | return NS_BINDING_ABORTED; |
519 | 0 | } |
520 | 0 | |
521 | 0 | // Media element playback is not currently supported when recording or |
522 | 0 | // replaying. See bug 1304146. |
523 | 0 | if (recordreplay::IsRecordingOrReplaying()) { |
524 | 0 | mElement->ReportLoadError("Media elements not available when recording", nullptr, 0); |
525 | 0 | return NS_ERROR_NOT_AVAILABLE; |
526 | 0 | } |
527 | 0 | |
528 | 0 | // The element is only needed until we've had a chance to call |
529 | 0 | // InitializeDecoderForChannel. So make sure mElement is cleared here. |
530 | 0 | RefPtr<HTMLMediaElement> element; |
531 | 0 | element.swap(mElement); |
532 | 0 |
|
533 | 0 | AbstractThread::AutoEnter context(element->AbstractMainThread()); |
534 | 0 |
|
535 | 0 | if (mLoadID != element->GetCurrentLoadID()) { |
536 | 0 | // The channel has been cancelled before we had a chance to create |
537 | 0 | // a decoder. Abort, don't dispatch an "error" event, as the new load |
538 | 0 | // may not be in an error state. |
539 | 0 | return NS_BINDING_ABORTED; |
540 | 0 | } |
541 | 0 | |
542 | 0 | // Don't continue to load if the request failed or has been canceled. |
543 | 0 | nsresult status; |
544 | 0 | nsresult rv = aRequest->GetStatus(&status); |
545 | 0 | NS_ENSURE_SUCCESS(rv, rv); |
546 | 0 | if (NS_FAILED(status)) { |
547 | 0 | if (element) { |
548 | 0 | // Handle media not loading error because source was a tracking URL. |
549 | 0 | // We make a note of this media node by including it in a dedicated |
550 | 0 | // array of blocked tracking nodes under its parent document. |
551 | 0 | if (status == NS_ERROR_TRACKING_URI) { |
552 | 0 | nsIDocument* ownerDoc = element->OwnerDoc(); |
553 | 0 | if (ownerDoc) { |
554 | 0 | ownerDoc->AddBlockedTrackingNode(element); |
555 | 0 | } |
556 | 0 | } |
557 | 0 | element->NotifyLoadError( |
558 | 0 | nsPrintfCString("%u: %s", uint32_t(status), "Request failed")); |
559 | 0 | } |
560 | 0 | return status; |
561 | 0 | } |
562 | 0 |
|
563 | 0 | nsCOMPtr<nsIHttpChannel> hc = do_QueryInterface(aRequest); |
564 | 0 | bool succeeded; |
565 | 0 | if (hc && NS_SUCCEEDED(hc->GetRequestSucceeded(&succeeded)) && !succeeded) { |
566 | 0 | uint32_t responseStatus = 0; |
567 | 0 | Unused << hc->GetResponseStatus(&responseStatus); |
568 | 0 | nsAutoCString statusText; |
569 | 0 | Unused << hc->GetResponseStatusText(statusText); |
570 | 0 | element->NotifyLoadError( |
571 | 0 | nsPrintfCString("%u: %s", responseStatus, statusText.get())); |
572 | 0 |
|
573 | 0 | nsAutoString code; |
574 | 0 | code.AppendInt(responseStatus); |
575 | 0 | nsAutoString src; |
576 | 0 | element->GetCurrentSrc(src); |
577 | 0 | const char16_t* params[] = { code.get(), src.get() }; |
578 | 0 | element->ReportLoadError("MediaLoadHttpError", params, ArrayLength(params)); |
579 | 0 | return NS_BINDING_ABORTED; |
580 | 0 | } |
581 | 0 | |
582 | 0 | nsCOMPtr<nsIChannel> channel = do_QueryInterface(aRequest); |
583 | 0 | if (channel && |
584 | 0 | NS_SUCCEEDED(rv = element->InitializeDecoderForChannel( |
585 | 0 | channel, getter_AddRefs(mNextListener))) && |
586 | 0 | mNextListener) { |
587 | 0 | rv = mNextListener->OnStartRequest(aRequest, aContext); |
588 | 0 | } else { |
589 | 0 | // If InitializeDecoderForChannel() returned an error, fire a network error. |
590 | 0 | if (NS_FAILED(rv) && !mNextListener) { |
591 | 0 | // Load failed, attempt to load the next candidate resource. If there |
592 | 0 | // are none, this will trigger a MEDIA_ERR_SRC_NOT_SUPPORTED error. |
593 | 0 | element->NotifyLoadError(NS_LITERAL_CSTRING("Failed to init decoder")); |
594 | 0 | } |
595 | 0 | // If InitializeDecoderForChannel did not return a listener (but may |
596 | 0 | // have otherwise succeeded), we abort the connection since we aren't |
597 | 0 | // interested in keeping the channel alive ourselves. |
598 | 0 | rv = NS_BINDING_ABORTED; |
599 | 0 | } |
600 | 0 |
|
601 | 0 | return rv; |
602 | 0 | } |
603 | | |
604 | | NS_IMETHODIMP |
605 | | HTMLMediaElement::MediaLoadListener::OnStopRequest(nsIRequest* aRequest, |
606 | | nsISupports* aContext, |
607 | | nsresult aStatus) |
608 | 0 | { |
609 | 0 | if (mNextListener) { |
610 | 0 | return mNextListener->OnStopRequest(aRequest, aContext, aStatus); |
611 | 0 | } |
612 | 0 | return NS_OK; |
613 | 0 | } |
614 | | |
615 | | NS_IMETHODIMP |
616 | | HTMLMediaElement::MediaLoadListener::OnDataAvailable(nsIRequest* aRequest, |
617 | | nsISupports* aContext, |
618 | | nsIInputStream* aStream, |
619 | | uint64_t aOffset, |
620 | | uint32_t aCount) |
621 | 0 | { |
622 | 0 | if (!mNextListener) { |
623 | 0 | NS_ERROR("Must have a chained listener; OnStartRequest should have " |
624 | 0 | "canceled this request"); |
625 | 0 | return NS_BINDING_ABORTED; |
626 | 0 | } |
627 | 0 | return mNextListener->OnDataAvailable( |
628 | 0 | aRequest, aContext, aStream, aOffset, aCount); |
629 | 0 | } |
630 | | |
631 | | NS_IMETHODIMP |
632 | | HTMLMediaElement::MediaLoadListener::AsyncOnChannelRedirect( |
633 | | nsIChannel* aOldChannel, |
634 | | nsIChannel* aNewChannel, |
635 | | uint32_t aFlags, |
636 | | nsIAsyncVerifyRedirectCallback* cb) |
637 | 0 | { |
638 | 0 | // TODO is this really correct?? See bug #579329. |
639 | 0 | if (mElement) { |
640 | 0 | mElement->OnChannelRedirect(aOldChannel, aNewChannel, aFlags); |
641 | 0 | } |
642 | 0 | nsCOMPtr<nsIChannelEventSink> sink = do_QueryInterface(mNextListener); |
643 | 0 | if (sink) { |
644 | 0 | return sink->AsyncOnChannelRedirect(aOldChannel, aNewChannel, aFlags, cb); |
645 | 0 | } |
646 | 0 | cb->OnRedirectVerifyCallback(NS_OK); |
647 | 0 | return NS_OK; |
648 | 0 | } |
649 | | |
650 | | NS_IMETHODIMP |
651 | | HTMLMediaElement::MediaLoadListener::CheckListenerChain() |
652 | 0 | { |
653 | 0 | MOZ_ASSERT(mNextListener); |
654 | 0 | nsCOMPtr<nsIThreadRetargetableStreamListener> retargetable = |
655 | 0 | do_QueryInterface(mNextListener); |
656 | 0 | if (retargetable) { |
657 | 0 | return retargetable->CheckListenerChain(); |
658 | 0 | } |
659 | 0 | return NS_ERROR_NO_INTERFACE; |
660 | 0 | } |
661 | | |
662 | | NS_IMETHODIMP |
663 | | HTMLMediaElement::MediaLoadListener::GetInterface(const nsIID& aIID, |
664 | | void** aResult) |
665 | 0 | { |
666 | 0 | return QueryInterface(aIID, aResult); |
667 | 0 | } |
668 | | |
669 | | void |
670 | | HTMLMediaElement::ReportLoadError(const char* aMsg, |
671 | | const char16_t** aParams, |
672 | | uint32_t aParamCount) |
673 | 0 | { |
674 | 0 | nsContentUtils::ReportToConsole(nsIScriptError::warningFlag, |
675 | 0 | NS_LITERAL_CSTRING("Media"), |
676 | 0 | OwnerDoc(), |
677 | 0 | nsContentUtils::eDOM_PROPERTIES, |
678 | 0 | aMsg, |
679 | 0 | aParams, |
680 | 0 | aParamCount); |
681 | 0 | } |
682 | | |
683 | | class HTMLMediaElement::AudioChannelAgentCallback final |
684 | | : public nsIAudioChannelAgentCallback |
685 | | { |
686 | | public: |
687 | | NS_DECL_CYCLE_COLLECTING_ISUPPORTS |
688 | | NS_DECL_CYCLE_COLLECTION_CLASS(AudioChannelAgentCallback) |
689 | | |
690 | | explicit AudioChannelAgentCallback(HTMLMediaElement* aOwner) |
691 | | : mOwner(aOwner) |
692 | | , mAudioChannelVolume(1.0) |
693 | | , mPlayingThroughTheAudioChannel(false) |
694 | | , mAudioCapturedByWindow(false) |
695 | | , mSuspended(nsISuspendedTypes::NONE_SUSPENDED) |
696 | | , mIsOwnerAudible(IsOwnerAudible()) |
697 | | , mIsShutDown(false) |
698 | 0 | { |
699 | 0 | MOZ_ASSERT(mOwner); |
700 | 0 | MaybeCreateAudioChannelAgent(); |
701 | 0 | } |
702 | | |
703 | | void UpdateAudioChannelPlayingState(bool aForcePlaying = false) |
704 | 0 | { |
705 | 0 | MOZ_ASSERT(!mIsShutDown); |
706 | 0 | bool playingThroughTheAudioChannel = |
707 | 0 | aForcePlaying || IsPlayingThroughTheAudioChannel(); |
708 | 0 |
|
709 | 0 | if (playingThroughTheAudioChannel != mPlayingThroughTheAudioChannel) { |
710 | 0 | if (!MaybeCreateAudioChannelAgent()) { |
711 | 0 | return; |
712 | 0 | } |
713 | 0 | |
714 | 0 | mPlayingThroughTheAudioChannel = playingThroughTheAudioChannel; |
715 | 0 | NotifyAudioChannelAgent(mPlayingThroughTheAudioChannel); |
716 | 0 | } |
717 | 0 | } |
718 | | |
719 | | bool ShouldResetSuspend() const |
720 | 0 | { |
721 | 0 | // The disposable-pause should be clear after media starts playing. |
722 | 0 | if (!mOwner->Paused() && |
723 | 0 | mSuspended == nsISuspendedTypes::SUSPENDED_PAUSE_DISPOSABLE) { |
724 | 0 | return true; |
725 | 0 | } |
726 | 0 | |
727 | 0 | // If the blocked media is paused, we don't need to resume it. We reset the |
728 | 0 | // mSuspended in order to unregister the agent. |
729 | 0 | if (mOwner->Paused() && mSuspended == nsISuspendedTypes::SUSPENDED_BLOCK) { |
730 | 0 | return true; |
731 | 0 | } |
732 | 0 | |
733 | 0 | return false; |
734 | 0 | } |
735 | | |
736 | | void NotifyPlayStateChanged() |
737 | 0 | { |
738 | 0 | MOZ_ASSERT(!mIsShutDown); |
739 | 0 | if (ShouldResetSuspend()) { |
740 | 0 | SetSuspended(nsISuspendedTypes::NONE_SUSPENDED); |
741 | 0 | NotifyAudioPlaybackChanged( |
742 | 0 | AudioChannelService::AudibleChangedReasons::ePauseStateChanged); |
743 | 0 | } |
744 | 0 | UpdateAudioChannelPlayingState(); |
745 | 0 | } |
746 | | |
747 | | NS_IMETHODIMP WindowVolumeChanged(float aVolume, bool aMuted) override |
748 | 0 | { |
749 | 0 | MOZ_ASSERT(mAudioChannelAgent); |
750 | 0 |
|
751 | 0 | MOZ_LOG( |
752 | 0 | AudioChannelService::GetAudioChannelLog(), |
753 | 0 | LogLevel::Debug, |
754 | 0 | ("HTMLMediaElement::AudioChannelAgentCallback, WindowVolumeChanged, " |
755 | 0 | "this = %p, aVolume = %f, aMuted = %s\n", |
756 | 0 | this, |
757 | 0 | aVolume, |
758 | 0 | aMuted ? "true" : "false")); |
759 | 0 |
|
760 | 0 | if (mAudioChannelVolume != aVolume) { |
761 | 0 | mAudioChannelVolume = aVolume; |
762 | 0 | mOwner->SetVolumeInternal(); |
763 | 0 | } |
764 | 0 |
|
765 | 0 | const uint32_t muted = mOwner->mMuted; |
766 | 0 | if (aMuted && !mOwner->ComputedMuted()) { |
767 | 0 | mOwner->SetMutedInternal(muted | MUTED_BY_AUDIO_CHANNEL); |
768 | 0 | } else if (!aMuted && mOwner->ComputedMuted()) { |
769 | 0 | mOwner->SetMutedInternal(muted & ~MUTED_BY_AUDIO_CHANNEL); |
770 | 0 | } |
771 | 0 |
|
772 | 0 | return NS_OK; |
773 | 0 | } |
774 | | |
775 | | NS_IMETHODIMP WindowSuspendChanged(SuspendTypes aSuspend) override |
776 | 0 | { |
777 | 0 | MOZ_ASSERT(mAudioChannelAgent); |
778 | 0 |
|
779 | 0 | MOZ_LOG( |
780 | 0 | AudioChannelService::GetAudioChannelLog(), |
781 | 0 | LogLevel::Debug, |
782 | 0 | ("HTMLMediaElement::AudioChannelAgentCallback, WindowSuspendChanged, " |
783 | 0 | "this = %p, aSuspend = %s\n", |
784 | 0 | this, |
785 | 0 | SuspendTypeToStr(aSuspend))); |
786 | 0 |
|
787 | 0 | switch (aSuspend) { |
788 | 0 | case nsISuspendedTypes::NONE_SUSPENDED: |
789 | 0 | Resume(); |
790 | 0 | break; |
791 | 0 | case nsISuspendedTypes::SUSPENDED_PAUSE: |
792 | 0 | case nsISuspendedTypes::SUSPENDED_PAUSE_DISPOSABLE: |
793 | 0 | case nsISuspendedTypes::SUSPENDED_BLOCK: |
794 | 0 | Suspend(aSuspend); |
795 | 0 | break; |
796 | 0 | case nsISuspendedTypes::SUSPENDED_STOP_DISPOSABLE: |
797 | 0 | Stop(); |
798 | 0 | break; |
799 | 0 | default: |
800 | 0 | MOZ_LOG( |
801 | 0 | AudioChannelService::GetAudioChannelLog(), |
802 | 0 | LogLevel::Debug, |
803 | 0 | ("HTMLMediaElement::AudioChannelAgentCallback, WindowSuspendChanged, " |
804 | 0 | "this = %p, Error : unknown suspended type!\n", |
805 | 0 | this)); |
806 | 0 | } |
807 | 0 | return NS_OK; |
808 | 0 | } |
809 | | |
810 | | NS_IMETHODIMP WindowAudioCaptureChanged(bool aCapture) override |
811 | 0 | { |
812 | 0 | MOZ_ASSERT(mAudioChannelAgent); |
813 | 0 |
|
814 | 0 | if (mAudioCapturedByWindow != aCapture) { |
815 | 0 | mAudioCapturedByWindow = aCapture; |
816 | 0 | AudioCaptureStreamChangeIfNeeded(); |
817 | 0 | } |
818 | 0 | return NS_OK; |
819 | 0 | } |
820 | | |
821 | | void AudioCaptureStreamChangeIfNeeded() |
822 | 0 | { |
823 | 0 | MOZ_ASSERT(!mIsShutDown); |
824 | 0 | if (!IsPlayingStarted()) { |
825 | 0 | return; |
826 | 0 | } |
827 | 0 | |
828 | 0 | if (!mOwner->HasAudio()) { |
829 | 0 | return; |
830 | 0 | } |
831 | 0 | |
832 | 0 | mOwner->AudioCaptureStreamChange(mAudioCapturedByWindow); |
833 | 0 | } |
834 | | |
835 | | void NotifyAudioPlaybackChanged(AudibleChangedReasons aReason) |
836 | 0 | { |
837 | 0 | MOZ_ASSERT(!mIsShutDown); |
838 | 0 | if (!IsPlayingStarted()) { |
839 | 0 | return; |
840 | 0 | } |
841 | 0 | |
842 | 0 | AudibleState newAudibleState = IsOwnerAudible(); |
843 | 0 | if (mIsOwnerAudible == newAudibleState) { |
844 | 0 | return; |
845 | 0 | } |
846 | 0 | |
847 | 0 | mIsOwnerAudible = newAudibleState; |
848 | 0 | mAudioChannelAgent->NotifyStartedAudible(mIsOwnerAudible, aReason); |
849 | 0 | } |
850 | | |
851 | | bool IsPlaybackBlocked() |
852 | 0 | { |
853 | 0 | MOZ_ASSERT(!mIsShutDown); |
854 | 0 | // If the tab hasn't been activated yet, the media element in that tab can't |
855 | 0 | // be playback now until the tab goes to foreground first time or user |
856 | 0 | // clicks the unblocking tab icon. |
857 | 0 | if (!IsTabActivated()) { |
858 | 0 | // Even we haven't start playing yet, we still need to notify the audio |
859 | 0 | // channe system because we need to receive the resume notification later. |
860 | 0 | UpdateAudioChannelPlayingState(true /* force to start */); |
861 | 0 | return true; |
862 | 0 | } |
863 | 0 | |
864 | 0 | return false; |
865 | 0 | } |
866 | | |
867 | | void Shutdown() |
868 | 0 | { |
869 | 0 | MOZ_ASSERT(!mIsShutDown); |
870 | 0 | if (mAudioChannelAgent) { |
871 | 0 | mAudioChannelAgent->NotifyStoppedPlaying(); |
872 | 0 | mAudioChannelAgent = nullptr; |
873 | 0 | } |
874 | 0 | mIsShutDown = true; |
875 | 0 | } |
876 | | |
877 | | float GetEffectiveVolume() const |
878 | 0 | { |
879 | 0 | MOZ_ASSERT(!mIsShutDown); |
880 | 0 | return mOwner->Volume() * mAudioChannelVolume; |
881 | 0 | } |
882 | | |
883 | | SuspendTypes GetSuspendType() const |
884 | 0 | { |
885 | 0 | MOZ_ASSERT(!mIsShutDown); |
886 | 0 | return mSuspended; |
887 | 0 | } |
888 | | |
889 | | private: |
890 | 0 | ~AudioChannelAgentCallback() { MOZ_ASSERT(mIsShutDown); }; |
891 | | |
892 | | bool MaybeCreateAudioChannelAgent() |
893 | 0 | { |
894 | 0 | if (mAudioChannelAgent) { |
895 | 0 | return true; |
896 | 0 | } |
897 | 0 | |
898 | 0 | mAudioChannelAgent = new AudioChannelAgent(); |
899 | 0 | nsresult rv = |
900 | 0 | mAudioChannelAgent->Init(mOwner->OwnerDoc()->GetInnerWindow(), this); |
901 | 0 | if (NS_WARN_IF(NS_FAILED(rv))) { |
902 | 0 | mAudioChannelAgent = nullptr; |
903 | 0 | MOZ_LOG( |
904 | 0 | AudioChannelService::GetAudioChannelLog(), |
905 | 0 | LogLevel::Debug, |
906 | 0 | ("HTMLMediaElement::AudioChannelAgentCallback, Fail to initialize " |
907 | 0 | "the audio channel agent, this = %p\n", |
908 | 0 | this)); |
909 | 0 | return false; |
910 | 0 | } |
911 | 0 |
|
912 | 0 | return true; |
913 | 0 | } |
914 | | |
915 | | void NotifyAudioChannelAgent(bool aPlaying) |
916 | 0 | { |
917 | 0 | MOZ_ASSERT(mAudioChannelAgent); |
918 | 0 |
|
919 | 0 | if (aPlaying) { |
920 | 0 | AudioPlaybackConfig config; |
921 | 0 | nsresult rv = |
922 | 0 | mAudioChannelAgent->NotifyStartedPlaying(&config, IsOwnerAudible()); |
923 | 0 | if (NS_WARN_IF(NS_FAILED(rv))) { |
924 | 0 | return; |
925 | 0 | } |
926 | 0 | |
927 | 0 | WindowVolumeChanged(config.mVolume, config.mMuted); |
928 | 0 | WindowSuspendChanged(config.mSuspend); |
929 | 0 | } else { |
930 | 0 | mAudioChannelAgent->NotifyStoppedPlaying(); |
931 | 0 | } |
932 | 0 | } |
933 | | |
934 | | void SetSuspended(SuspendTypes aSuspend) |
935 | 0 | { |
936 | 0 | if (mSuspended == aSuspend) { |
937 | 0 | return; |
938 | 0 | } |
939 | 0 | |
940 | 0 | MaybeNotifyMediaResumed(aSuspend); |
941 | 0 | mSuspended = aSuspend; |
942 | 0 | MOZ_LOG( |
943 | 0 | AudioChannelService::GetAudioChannelLog(), |
944 | 0 | LogLevel::Debug, |
945 | 0 | ("HTMLMediaElement::AudioChannelAgentCallback, SetAudioChannelSuspended, " |
946 | 0 | "this = %p, aSuspend = %s\n", |
947 | 0 | this, |
948 | 0 | SuspendTypeToStr(aSuspend))); |
949 | 0 | } |
950 | | |
951 | | void Resume() |
952 | 0 | { |
953 | 0 | if (!IsSuspended()) { |
954 | 0 | MOZ_LOG( |
955 | 0 | AudioChannelService::GetAudioChannelLog(), |
956 | 0 | LogLevel::Debug, |
957 | 0 | ("HTMLMediaElement::AudioChannelAgentCallback, ResumeFromAudioChannel, " |
958 | 0 | "this = %p, don't need to be resumed!\n", |
959 | 0 | this)); |
960 | 0 | return; |
961 | 0 | } |
962 | 0 |
|
963 | 0 | SetSuspended(nsISuspendedTypes::NONE_SUSPENDED); |
964 | 0 | IgnoredErrorResult rv; |
965 | 0 | RefPtr<Promise> toBeIgnored = mOwner->Play(rv); |
966 | 0 | MOZ_ASSERT_IF(toBeIgnored && |
967 | 0 | toBeIgnored->State() == Promise::PromiseState::Rejected, |
968 | 0 | rv.Failed()); |
969 | 0 | if (rv.Failed()) { |
970 | 0 | NS_WARNING("Not able to resume from AudioChannel."); |
971 | 0 | } |
972 | 0 |
|
973 | 0 | NotifyAudioPlaybackChanged( |
974 | 0 | AudioChannelService::AudibleChangedReasons::ePauseStateChanged); |
975 | 0 | } |
976 | | |
977 | | void Suspend(SuspendTypes aSuspend) |
978 | 0 | { |
979 | 0 | if (IsSuspended()) { |
980 | 0 | return; |
981 | 0 | } |
982 | 0 | |
983 | 0 | SetSuspended(aSuspend); |
984 | 0 | if (aSuspend == nsISuspendedTypes::SUSPENDED_PAUSE || |
985 | 0 | aSuspend == nsISuspendedTypes::SUSPENDED_PAUSE_DISPOSABLE) { |
986 | 0 | IgnoredErrorResult rv; |
987 | 0 | mOwner->Pause(rv); |
988 | 0 | if (NS_WARN_IF(rv.Failed())) { |
989 | 0 | return; |
990 | 0 | } |
991 | 0 | } |
992 | 0 | NotifyAudioPlaybackChanged( |
993 | 0 | AudioChannelService::AudibleChangedReasons::ePauseStateChanged); |
994 | 0 | } |
995 | | |
996 | | void Stop() |
997 | 0 | { |
998 | 0 | SetSuspended(nsISuspendedTypes::NONE_SUSPENDED); |
999 | 0 | mOwner->Pause(); |
1000 | 0 | } |
1001 | | |
1002 | | bool IsPlayingStarted() |
1003 | 0 | { |
1004 | 0 | if (MaybeCreateAudioChannelAgent()) { |
1005 | 0 | return mAudioChannelAgent->IsPlayingStarted(); |
1006 | 0 | } |
1007 | 0 | return false; |
1008 | 0 | } |
1009 | | |
1010 | | void MaybeNotifyMediaResumed(SuspendTypes aSuspend) |
1011 | 0 | { |
1012 | 0 | // In fennec, we should send the notification when media is resumed from the |
1013 | 0 | // pause-disposable which was called by media control. |
1014 | 0 | if (mSuspended != nsISuspendedTypes::SUSPENDED_PAUSE_DISPOSABLE && |
1015 | 0 | aSuspend != nsISuspendedTypes::NONE_SUSPENDED) { |
1016 | 0 | return; |
1017 | 0 | } |
1018 | 0 | |
1019 | 0 | if (!IsPlayingStarted()) { |
1020 | 0 | return; |
1021 | 0 | } |
1022 | 0 | |
1023 | 0 | uint64_t windowID = mAudioChannelAgent->WindowID(); |
1024 | 0 | mOwner->MainThreadEventTarget()->Dispatch(NS_NewRunnableFunction( |
1025 | 0 | "dom::HTMLMediaElement::AudioChannelAgentCallback::" |
1026 | 0 | "MaybeNotifyMediaResumed", |
1027 | 0 | [windowID]() -> void { |
1028 | 0 | nsCOMPtr<nsIObserverService> observerService = |
1029 | 0 | services::GetObserverService(); |
1030 | 0 | if (NS_WARN_IF(!observerService)) { |
1031 | 0 | return; |
1032 | 0 | } |
1033 | 0 | |
1034 | 0 | nsCOMPtr<nsISupportsPRUint64> wrapper = |
1035 | 0 | do_CreateInstance(NS_SUPPORTS_PRUINT64_CONTRACTID); |
1036 | 0 | if (NS_WARN_IF(!wrapper)) { |
1037 | 0 | return; |
1038 | 0 | } |
1039 | 0 | |
1040 | 0 | wrapper->SetData(windowID); |
1041 | 0 | observerService->NotifyObservers( |
1042 | 0 | wrapper, "media-playback-resumed", u"active"); |
1043 | 0 | })); |
1044 | 0 | } |
1045 | | |
1046 | | bool IsTabActivated() |
1047 | 0 | { |
1048 | 0 | if (MaybeCreateAudioChannelAgent()) { |
1049 | 0 | return !mAudioChannelAgent->ShouldBlockMedia(); |
1050 | 0 | } |
1051 | 0 | return false; |
1052 | 0 | } |
1053 | | |
1054 | | bool IsSuspended() const |
1055 | 0 | { |
1056 | 0 | return (mSuspended == nsISuspendedTypes::SUSPENDED_PAUSE || |
1057 | 0 | mSuspended == nsISuspendedTypes::SUSPENDED_PAUSE_DISPOSABLE || |
1058 | 0 | mSuspended == nsISuspendedTypes::SUSPENDED_BLOCK); |
1059 | 0 | } |
1060 | | |
1061 | | AudibleState IsOwnerAudible() const |
1062 | 0 | { |
1063 | 0 | // Muted or the volume should not be ~0 |
1064 | 0 | if (mOwner->mMuted || (std::fabs(mOwner->Volume()) <= 1e-7)) { |
1065 | 0 | return mOwner->HasAudio() |
1066 | 0 | ? AudioChannelService::AudibleState::eMaybeAudible |
1067 | 0 | : AudioChannelService::AudibleState::eNotAudible; |
1068 | 0 | } |
1069 | 0 |
|
1070 | 0 | // No audio track. |
1071 | 0 | if (!mOwner->HasAudio()) { |
1072 | 0 | return AudioChannelService::AudibleState::eNotAudible; |
1073 | 0 | } |
1074 | 0 | |
1075 | 0 | // Might be audible but not yet. |
1076 | 0 | if (mOwner->HasAudio() && !mOwner->mIsAudioTrackAudible) { |
1077 | 0 | return AudioChannelService::AudibleState::eMaybeAudible; |
1078 | 0 | } |
1079 | 0 | |
1080 | 0 | // Suspended or paused media doesn't produce any sound. |
1081 | 0 | if (mSuspended != nsISuspendedTypes::NONE_SUSPENDED || mOwner->mPaused) { |
1082 | 0 | return AudioChannelService::AudibleState::eNotAudible; |
1083 | 0 | } |
1084 | 0 | |
1085 | 0 | return AudioChannelService::AudibleState::eAudible; |
1086 | 0 | } |
1087 | | |
1088 | | bool IsPlayingThroughTheAudioChannel() const |
1089 | 0 | { |
1090 | 0 | // If we have an error, we are not playing. |
1091 | 0 | if (mOwner->GetError()) { |
1092 | 0 | return false; |
1093 | 0 | } |
1094 | 0 | |
1095 | 0 | // We should consider any bfcached page or inactive document as non-playing. |
1096 | 0 | if (!mOwner->IsActive()) { |
1097 | 0 | return false; |
1098 | 0 | } |
1099 | 0 | |
1100 | 0 | // It might be resumed from remote, we should keep the audio channel agent. |
1101 | 0 | if (IsSuspended()) { |
1102 | 0 | return true; |
1103 | 0 | } |
1104 | 0 | |
1105 | 0 | // Are we paused |
1106 | 0 | if (mOwner->mPaused) { |
1107 | 0 | return false; |
1108 | 0 | } |
1109 | 0 | |
1110 | 0 | // No audio track |
1111 | 0 | if (!mOwner->HasAudio()) { |
1112 | 0 | return false; |
1113 | 0 | } |
1114 | 0 | |
1115 | 0 | // A loop always is playing |
1116 | 0 | if (mOwner->HasAttr(kNameSpaceID_None, nsGkAtoms::loop)) { |
1117 | 0 | return true; |
1118 | 0 | } |
1119 | 0 | |
1120 | 0 | // If we are actually playing... |
1121 | 0 | if (mOwner->IsCurrentlyPlaying()) { |
1122 | 0 | return true; |
1123 | 0 | } |
1124 | 0 | |
1125 | 0 | // If we are playing an external stream. |
1126 | 0 | if (mOwner->mSrcAttrStream) { |
1127 | 0 | return true; |
1128 | 0 | } |
1129 | 0 | |
1130 | 0 | return false; |
1131 | 0 | } |
1132 | | |
1133 | | RefPtr<AudioChannelAgent> mAudioChannelAgent; |
1134 | | HTMLMediaElement* mOwner; |
1135 | | |
1136 | | // The audio channel volume |
1137 | | float mAudioChannelVolume; |
1138 | | // Is this media element playing? |
1139 | | bool mPlayingThroughTheAudioChannel; |
1140 | | // True if the sound is being captured by the window. |
1141 | | bool mAudioCapturedByWindow; |
1142 | | // We have different kinds of suspended cases, |
1143 | | // - SUSPENDED_PAUSE |
1144 | | // It's used when we temporary lost platform audio focus. MediaElement can |
1145 | | // only be resumed when we gain the audio focus again. |
1146 | | // - SUSPENDED_PAUSE_DISPOSABLE |
1147 | | // It's used when user press the pause button on the remote media-control. |
1148 | | // MediaElement can be resumed by remote media-control or via play(). |
1149 | | // - SUSPENDED_BLOCK |
1150 | | // It's used to reduce the power consumption, we won't play the auto-play |
1151 | | // audio/video in the page we have never visited before. MediaElement would |
1152 | | // be resumed when the page is active. See bug647429 for more details. |
1153 | | // - SUSPENDED_STOP_DISPOSABLE |
1154 | | // When we permanently lost platform audio focus, we should stop playing |
1155 | | // and stop the audio channel agent. MediaElement can only be restarted by |
1156 | | // play(). |
1157 | | SuspendTypes mSuspended; |
1158 | | // Indicate whether media element is audible for users. |
1159 | | AudibleState mIsOwnerAudible; |
1160 | | bool mIsShutDown; |
1161 | | }; |
1162 | | |
1163 | | NS_IMPL_CYCLE_COLLECTION_CLASS(HTMLMediaElement::AudioChannelAgentCallback) |
1164 | | |
1165 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN( |
1166 | 0 | HTMLMediaElement::AudioChannelAgentCallback) |
1167 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mAudioChannelAgent) |
1168 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END |
1169 | | |
1170 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN( |
1171 | 0 | HTMLMediaElement::AudioChannelAgentCallback) |
1172 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mAudioChannelAgent) |
1173 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK_END |
1174 | | |
1175 | 0 | NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION( |
1176 | 0 | HTMLMediaElement::AudioChannelAgentCallback) |
1177 | 0 | NS_INTERFACE_MAP_ENTRY(nsIAudioChannelAgentCallback) |
1178 | 0 | NS_INTERFACE_MAP_END |
1179 | | |
1180 | | NS_IMPL_CYCLE_COLLECTING_ADDREF(HTMLMediaElement::AudioChannelAgentCallback) |
1181 | | NS_IMPL_CYCLE_COLLECTING_RELEASE(HTMLMediaElement::AudioChannelAgentCallback) |
1182 | | |
1183 | | class HTMLMediaElement::ChannelLoader final |
1184 | | { |
1185 | | public: |
1186 | | NS_INLINE_DECL_REFCOUNTING(ChannelLoader); |
1187 | | |
1188 | | void LoadInternal(HTMLMediaElement* aElement) |
1189 | 0 | { |
1190 | 0 | if (mCancelled) { |
1191 | 0 | return; |
1192 | 0 | } |
1193 | 0 | |
1194 | 0 | // determine what security checks need to be performed in AsyncOpen2(). |
1195 | 0 | nsSecurityFlags securityFlags = |
1196 | 0 | aElement->ShouldCheckAllowOrigin() |
1197 | 0 | ? nsILoadInfo::SEC_REQUIRE_CORS_DATA_INHERITS |
1198 | 0 | : nsILoadInfo::SEC_ALLOW_CROSS_ORIGIN_DATA_INHERITS; |
1199 | 0 |
|
1200 | 0 | if (aElement->GetCORSMode() == CORS_USE_CREDENTIALS) { |
1201 | 0 | securityFlags |= nsILoadInfo::SEC_COOKIES_INCLUDE; |
1202 | 0 | } |
1203 | 0 |
|
1204 | 0 | MOZ_ASSERT( |
1205 | 0 | aElement->IsAnyOfHTMLElements(nsGkAtoms::audio, nsGkAtoms::video)); |
1206 | 0 | nsContentPolicyType contentPolicyType = |
1207 | 0 | aElement->IsHTMLElement(nsGkAtoms::audio) |
1208 | 0 | ? nsIContentPolicy::TYPE_INTERNAL_AUDIO |
1209 | 0 | : nsIContentPolicy::TYPE_INTERNAL_VIDEO; |
1210 | 0 |
|
1211 | 0 | // If aElement has 'triggeringprincipal' attribute, we will use the value as |
1212 | 0 | // triggeringPrincipal for the channel, otherwise it will default to use |
1213 | 0 | // aElement->NodePrincipal(). |
1214 | 0 | // This function returns true when aElement has 'triggeringprincipal', so if |
1215 | 0 | // setAttrs is true we will override the origin attributes on the channel |
1216 | 0 | // later. |
1217 | 0 | nsCOMPtr<nsIPrincipal> triggeringPrincipal; |
1218 | 0 | bool setAttrs = nsContentUtils::QueryTriggeringPrincipal( |
1219 | 0 | aElement, |
1220 | 0 | aElement->mLoadingSrcTriggeringPrincipal, |
1221 | 0 | getter_AddRefs(triggeringPrincipal)); |
1222 | 0 |
|
1223 | 0 | nsCOMPtr<nsILoadGroup> loadGroup = aElement->GetDocumentLoadGroup(); |
1224 | 0 | nsCOMPtr<nsIChannel> channel; |
1225 | 0 | nsresult rv = NS_NewChannelWithTriggeringPrincipal( |
1226 | 0 | getter_AddRefs(channel), |
1227 | 0 | aElement->mLoadingSrc, |
1228 | 0 | static_cast<Element*>(aElement), |
1229 | 0 | triggeringPrincipal, |
1230 | 0 | securityFlags, |
1231 | 0 | contentPolicyType, |
1232 | 0 | nullptr, // aPerformanceStorage |
1233 | 0 | loadGroup, |
1234 | 0 | nullptr, // aCallbacks |
1235 | 0 | nsICachingChannel::LOAD_BYPASS_LOCAL_CACHE_IF_BUSY | |
1236 | 0 | nsIChannel::LOAD_MEDIA_SNIFFER_OVERRIDES_CONTENT_TYPE | |
1237 | 0 | nsIChannel::LOAD_CLASSIFY_URI | nsIChannel::LOAD_CALL_CONTENT_SNIFFERS); |
1238 | 0 |
|
1239 | 0 | if (NS_FAILED(rv)) { |
1240 | 0 | // Notify load error so the element will try next resource candidate. |
1241 | 0 | aElement->NotifyLoadError(NS_LITERAL_CSTRING("Fail to create channel")); |
1242 | 0 | return; |
1243 | 0 | } |
1244 | 0 |
|
1245 | 0 | if (setAttrs) { |
1246 | 0 | nsCOMPtr<nsILoadInfo> loadInfo = channel->GetLoadInfo(); |
1247 | 0 | if (loadInfo) { |
1248 | 0 | // The function simply returns NS_OK, so we ignore the return value. |
1249 | 0 | Unused << loadInfo->SetOriginAttributes( |
1250 | 0 | triggeringPrincipal->OriginAttributesRef()); |
1251 | 0 | } |
1252 | 0 | } |
1253 | 0 |
|
1254 | 0 | nsCOMPtr<nsIClassOfService> cos(do_QueryInterface(channel)); |
1255 | 0 | if (cos) { |
1256 | 0 | if (aElement->mUseUrgentStartForChannel) { |
1257 | 0 | cos->AddClassFlags(nsIClassOfService::UrgentStart); |
1258 | 0 |
|
1259 | 0 | // Reset the flag to avoid loading again without initiated by user |
1260 | 0 | // interaction. |
1261 | 0 | aElement->mUseUrgentStartForChannel = false; |
1262 | 0 | } |
1263 | 0 |
|
1264 | 0 | // Unconditionally disable throttling since we want the media to fluently |
1265 | 0 | // play even when we switch the tab to background. |
1266 | 0 | cos->AddClassFlags(nsIClassOfService::DontThrottle); |
1267 | 0 | } |
1268 | 0 |
|
1269 | 0 | // The listener holds a strong reference to us. This creates a |
1270 | 0 | // reference cycle, once we've set mChannel, which is manually broken |
1271 | 0 | // in the listener's OnStartRequest method after it is finished with |
1272 | 0 | // the element. The cycle will also be broken if we get a shutdown |
1273 | 0 | // notification before OnStartRequest fires. Necko guarantees that |
1274 | 0 | // OnStartRequest will eventually fire if we don't shut down first. |
1275 | 0 | RefPtr<MediaLoadListener> loadListener = new MediaLoadListener(aElement); |
1276 | 0 |
|
1277 | 0 | channel->SetNotificationCallbacks(loadListener); |
1278 | 0 |
|
1279 | 0 | nsCOMPtr<nsIHttpChannel> hc = do_QueryInterface(channel); |
1280 | 0 | if (hc) { |
1281 | 0 | // Use a byte range request from the start of the resource. |
1282 | 0 | // This enables us to detect if the stream supports byte range |
1283 | 0 | // requests, and therefore seeking, early. |
1284 | 0 | rv = hc->SetRequestHeader( |
1285 | 0 | NS_LITERAL_CSTRING("Range"), NS_LITERAL_CSTRING("bytes=0-"), false); |
1286 | 0 | MOZ_ASSERT(NS_SUCCEEDED(rv)); |
1287 | 0 | aElement->SetRequestHeaders(hc); |
1288 | 0 | } |
1289 | 0 |
|
1290 | 0 | rv = channel->AsyncOpen2(loadListener); |
1291 | 0 | if (NS_FAILED(rv)) { |
1292 | 0 | // Notify load error so the element will try next resource candidate. |
1293 | 0 | aElement->NotifyLoadError(NS_LITERAL_CSTRING("Failed to open channel")); |
1294 | 0 | return; |
1295 | 0 | } |
1296 | 0 |
|
1297 | 0 | // Else the channel must be open and starting to download. If it encounters |
1298 | 0 | // a non-catastrophic failure, it will set a new task to continue loading |
1299 | 0 | // another candidate. It's safe to set it as mChannel now. |
1300 | 0 | mChannel = channel; |
1301 | 0 |
|
1302 | 0 | // loadListener will be unregistered either on shutdown or when |
1303 | 0 | // OnStartRequest for the channel we just opened fires. |
1304 | 0 | nsContentUtils::RegisterShutdownObserver(loadListener); |
1305 | 0 | } |
1306 | | |
1307 | | nsresult Load(HTMLMediaElement* aElement) |
1308 | 0 | { |
1309 | 0 | MOZ_ASSERT(aElement); |
1310 | 0 | // Per bug 1235183 comment 8, we can't spin the event loop from stable |
1311 | 0 | // state. Defer NS_NewChannel() to a new regular runnable. |
1312 | 0 | return aElement->MainThreadEventTarget()->Dispatch( |
1313 | 0 | NewRunnableMethod<HTMLMediaElement*>("ChannelLoader::LoadInternal", |
1314 | 0 | this, |
1315 | 0 | &ChannelLoader::LoadInternal, |
1316 | 0 | aElement)); |
1317 | 0 | } |
1318 | | |
1319 | | void Cancel() |
1320 | 0 | { |
1321 | 0 | mCancelled = true; |
1322 | 0 | if (mChannel) { |
1323 | 0 | mChannel->Cancel(NS_BINDING_ABORTED); |
1324 | 0 | mChannel = nullptr; |
1325 | 0 | } |
1326 | 0 | } |
1327 | | |
1328 | | void Done() |
1329 | 0 | { |
1330 | 0 | MOZ_ASSERT(mChannel); |
1331 | 0 | // Decoder successfully created, the decoder now owns the MediaResource |
1332 | 0 | // which owns the channel. |
1333 | 0 | mChannel = nullptr; |
1334 | 0 | } |
1335 | | |
1336 | | nsresult Redirect(nsIChannel* aChannel, |
1337 | | nsIChannel* aNewChannel, |
1338 | | uint32_t aFlags) |
1339 | 0 | { |
1340 | 0 | NS_ASSERTION(aChannel == mChannel, "Channels should match!"); |
1341 | 0 | mChannel = aNewChannel; |
1342 | 0 |
|
1343 | 0 | // Handle forwarding of Range header so that the intial detection |
1344 | 0 | // of seeking support (via result code 206) works across redirects. |
1345 | 0 | nsCOMPtr<nsIHttpChannel> http = do_QueryInterface(aChannel); |
1346 | 0 | NS_ENSURE_STATE(http); |
1347 | 0 |
|
1348 | 0 | NS_NAMED_LITERAL_CSTRING(rangeHdr, "Range"); |
1349 | 0 |
|
1350 | 0 | nsAutoCString rangeVal; |
1351 | 0 | if (NS_SUCCEEDED(http->GetRequestHeader(rangeHdr, rangeVal))) { |
1352 | 0 | NS_ENSURE_STATE(!rangeVal.IsEmpty()); |
1353 | 0 |
|
1354 | 0 | http = do_QueryInterface(aNewChannel); |
1355 | 0 | NS_ENSURE_STATE(http); |
1356 | 0 |
|
1357 | 0 | nsresult rv = http->SetRequestHeader(rangeHdr, rangeVal, false); |
1358 | 0 | NS_ENSURE_SUCCESS(rv, rv); |
1359 | 0 | } |
1360 | 0 |
|
1361 | 0 | return NS_OK; |
1362 | 0 | } |
1363 | | |
1364 | | private: |
1365 | 0 | ~ChannelLoader() { MOZ_ASSERT(!mChannel); } |
1366 | | // Holds a reference to the first channel we open to the media resource. |
1367 | | // Once the decoder is created, control over the channel passes to the |
1368 | | // decoder, and we null out this reference. We must store this in case |
1369 | | // we need to cancel the channel before control of it passes to the decoder. |
1370 | | nsCOMPtr<nsIChannel> mChannel; |
1371 | | |
1372 | | bool mCancelled = false; |
1373 | | }; |
1374 | | |
1375 | | class HTMLMediaElement::ErrorSink |
1376 | | { |
1377 | | public: |
1378 | | explicit ErrorSink(HTMLMediaElement* aOwner) |
1379 | | : mOwner(aOwner) |
1380 | 0 | { |
1381 | 0 | MOZ_ASSERT(mOwner); |
1382 | 0 | } |
1383 | | |
1384 | | void SetError(uint16_t aErrorCode, const nsACString& aErrorDetails) |
1385 | 0 | { |
1386 | 0 | // Since we have multiple paths calling into DecodeError, e.g. |
1387 | 0 | // MediaKeys::Terminated and EMEH264Decoder::Error. We should take the 1st |
1388 | 0 | // one only in order not to fire multiple 'error' events. |
1389 | 0 | if (mError) { |
1390 | 0 | return; |
1391 | 0 | } |
1392 | 0 | |
1393 | 0 | if (!IsValidErrorCode(aErrorCode)) { |
1394 | 0 | NS_ASSERTION(false, "Undefined MediaError codes!"); |
1395 | 0 | return; |
1396 | 0 | } |
1397 | 0 |
|
1398 | 0 |
|
1399 | 0 | mError = new MediaError(mOwner, aErrorCode, aErrorDetails); |
1400 | 0 | mOwner->DispatchAsyncEvent(NS_LITERAL_STRING("error")); |
1401 | 0 | if (mOwner->ReadyState() == HAVE_NOTHING && |
1402 | 0 | aErrorCode == MEDIA_ERR_ABORTED) { |
1403 | 0 | // https://html.spec.whatwg.org/multipage/embedded-content.html#media-data-processing-steps-list |
1404 | 0 | // "If the media data fetching process is aborted by the user" |
1405 | 0 | mOwner->DispatchAsyncEvent(NS_LITERAL_STRING("abort")); |
1406 | 0 | mOwner->ChangeNetworkState(NETWORK_EMPTY); |
1407 | 0 | mOwner->DispatchAsyncEvent(NS_LITERAL_STRING("emptied")); |
1408 | 0 | if (mOwner->mDecoder) { |
1409 | 0 | mOwner->ShutdownDecoder(); |
1410 | 0 | } |
1411 | 0 | } else if (aErrorCode == MEDIA_ERR_SRC_NOT_SUPPORTED) { |
1412 | 0 | mOwner->ChangeNetworkState(NETWORK_NO_SOURCE); |
1413 | 0 | } else { |
1414 | 0 | mOwner->ChangeNetworkState(NETWORK_IDLE); |
1415 | 0 | } |
1416 | 0 | } |
1417 | | |
1418 | | void ResetError() |
1419 | 0 | { |
1420 | 0 | mError = nullptr; |
1421 | 0 | } |
1422 | | |
1423 | | RefPtr<MediaError> mError; |
1424 | | |
1425 | | private: |
1426 | | bool IsValidErrorCode(const uint16_t& aErrorCode) const |
1427 | 0 | { |
1428 | 0 | return (aErrorCode == MEDIA_ERR_DECODE || aErrorCode == MEDIA_ERR_NETWORK || |
1429 | 0 | aErrorCode == MEDIA_ERR_ABORTED || |
1430 | 0 | aErrorCode == MEDIA_ERR_SRC_NOT_SUPPORTED); |
1431 | 0 | } |
1432 | | |
1433 | | // Media elememt's life cycle would be longer than error sink, so we use the |
1434 | | // raw pointer and this class would only be referenced by media element. |
1435 | | HTMLMediaElement* mOwner; |
1436 | | }; |
1437 | | |
1438 | | NS_IMPL_CYCLE_COLLECTION_CLASS(HTMLMediaElement) |
1439 | | |
1440 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(HTMLMediaElement, |
1441 | 0 | nsGenericHTMLElement) |
1442 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mMediaSource) |
1443 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSrcMediaSource) |
1444 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSrcStream) |
1445 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSrcAttrStream) |
1446 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSourcePointer) |
1447 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mLoadBlockedDoc) |
1448 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSourceLoadCandidate) |
1449 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mAudioChannelWrapper) |
1450 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mErrorSink->mError) |
1451 | 0 | for (uint32_t i = 0; i < tmp->mOutputStreams.Length(); ++i) { |
1452 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOutputStreams[i].mStream) |
1453 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOutputStreams[i].mPreCreatedTracks) |
1454 | 0 | } |
1455 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPlayed); |
1456 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTextTrackManager) |
1457 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mAudioTrackList) |
1458 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mVideoTrackList) |
1459 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mMediaKeys) |
1460 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mIncomingMediaKeys) |
1461 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSelectedVideoStreamTrack) |
1462 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPendingPlayPromises) |
1463 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSeekDOMPromise) |
1464 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSetMediaKeysDOMPromise) |
1465 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END |
1466 | | |
1467 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(HTMLMediaElement, |
1468 | 0 | nsGenericHTMLElement) |
1469 | 0 | tmp->RemoveMutationObserver(tmp); |
1470 | 0 | if (tmp->mSrcStream) { |
1471 | 0 | // Need to EndMediaStreamPlayback to clear mSrcStream and make sure |
1472 | 0 | // everything gets unhooked correctly. |
1473 | 0 | tmp->EndSrcMediaStreamPlayback(); |
1474 | 0 | } |
1475 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mSrcAttrStream) |
1476 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mMediaSource) |
1477 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mSrcMediaSource) |
1478 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mSourcePointer) |
1479 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mLoadBlockedDoc) |
1480 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mSourceLoadCandidate) |
1481 | 0 | if (tmp->mAudioChannelWrapper) { |
1482 | 0 | tmp->mAudioChannelWrapper->Shutdown(); |
1483 | 0 | } |
1484 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mAudioChannelWrapper) |
1485 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mErrorSink->mError) |
1486 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputStreams) |
1487 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mPlayed) |
1488 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mTextTrackManager) |
1489 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mAudioTrackList) |
1490 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mVideoTrackList) |
1491 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mMediaKeys) |
1492 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mIncomingMediaKeys) |
1493 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mSelectedVideoStreamTrack) |
1494 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mPendingPlayPromises) |
1495 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mSeekDOMPromise) |
1496 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mSetMediaKeysDOMPromise) |
1497 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK_END |
1498 | | |
1499 | | NS_IMPL_ISUPPORTS_CYCLE_COLLECTION_INHERITED_0(HTMLMediaElement, |
1500 | | nsGenericHTMLElement) |
1501 | | |
1502 | | void |
1503 | | HTMLMediaElement::ContentRemoved(nsIContent* aChild, |
1504 | | nsIContent* aPreviousSibling) |
1505 | 0 | { |
1506 | 0 | if (aChild == mSourcePointer) { |
1507 | 0 | mSourcePointer = aPreviousSibling; |
1508 | 0 | } |
1509 | 0 | } |
1510 | | |
1511 | | already_AddRefed<MediaSource> |
1512 | | HTMLMediaElement::GetMozMediaSourceObject() const |
1513 | 0 | { |
1514 | 0 | RefPtr<MediaSource> source = mMediaSource; |
1515 | 0 | return source.forget(); |
1516 | 0 | } |
1517 | | |
1518 | | void |
1519 | | HTMLMediaElement::GetMozDebugReaderData(nsAString& aString) |
1520 | 0 | { |
1521 | 0 | if (mDecoder && !mSrcStream) { |
1522 | 0 | nsAutoCString result; |
1523 | 0 | mDecoder->GetMozDebugReaderData(result); |
1524 | 0 | CopyUTF8toUTF16(result, aString); |
1525 | 0 | } |
1526 | 0 | } |
1527 | | |
1528 | | already_AddRefed<Promise> |
1529 | | HTMLMediaElement::MozRequestDebugInfo(ErrorResult& aRv) |
1530 | 0 | { |
1531 | 0 | RefPtr<Promise> promise = CreateDOMPromise(aRv); |
1532 | 0 | if (NS_WARN_IF(aRv.Failed())) { |
1533 | 0 | return nullptr; |
1534 | 0 | } |
1535 | 0 | |
1536 | 0 | nsAutoString result; |
1537 | 0 | GetMozDebugReaderData(result); |
1538 | 0 |
|
1539 | 0 | if (mVideoFrameContainer) { |
1540 | 0 | result.AppendPrintf( |
1541 | 0 | "Compositor dropped frame(including when element's invisible): %u\n", |
1542 | 0 | mVideoFrameContainer->GetDroppedImageCount()); |
1543 | 0 | } |
1544 | 0 |
|
1545 | 0 | if (mMediaKeys) { |
1546 | 0 | nsString EMEInfo; |
1547 | 0 | GetEMEInfo(EMEInfo); |
1548 | 0 | result.AppendLiteral("EME Info: "); |
1549 | 0 | result.Append(EMEInfo); |
1550 | 0 | result.AppendLiteral("\n"); |
1551 | 0 | } |
1552 | 0 |
|
1553 | 0 | if (mDecoder) { |
1554 | 0 | mDecoder->RequestDebugInfo()->Then( |
1555 | 0 | mAbstractMainThread, |
1556 | 0 | __func__, |
1557 | 0 | [promise, result](const nsACString& aString) { |
1558 | 0 | promise->MaybeResolve(result + NS_ConvertUTF8toUTF16(aString)); |
1559 | 0 | }, |
1560 | 0 | [promise, result]() { promise->MaybeResolve(result); }); |
1561 | 0 | } else { |
1562 | 0 | promise->MaybeResolve(result); |
1563 | 0 | } |
1564 | 0 |
|
1565 | 0 | return promise.forget(); |
1566 | 0 | } |
1567 | | |
1568 | | /* static */ void |
1569 | | HTMLMediaElement::MozEnableDebugLog(const GlobalObject&) |
1570 | 0 | { |
1571 | 0 | DecoderDoctorLogger::EnableLogging(); |
1572 | 0 | } |
1573 | | |
1574 | | already_AddRefed<Promise> |
1575 | | HTMLMediaElement::MozRequestDebugLog(ErrorResult& aRv) |
1576 | 0 | { |
1577 | 0 | RefPtr<Promise> promise = CreateDOMPromise(aRv); |
1578 | 0 | if (NS_WARN_IF(aRv.Failed())) { |
1579 | 0 | return nullptr; |
1580 | 0 | } |
1581 | 0 | |
1582 | 0 | DecoderDoctorLogger::RetrieveMessages(this)->Then( |
1583 | 0 | mAbstractMainThread, |
1584 | 0 | __func__, |
1585 | 0 | [promise](const nsACString& aString) { |
1586 | 0 | promise->MaybeResolve(NS_ConvertUTF8toUTF16(aString)); |
1587 | 0 | }, |
1588 | 0 | [promise](nsresult rv) { promise->MaybeReject(rv); }); |
1589 | 0 |
|
1590 | 0 | return promise.forget(); |
1591 | 0 | } |
1592 | | |
1593 | | already_AddRefed<Promise> |
1594 | | HTMLMediaElement::MozDumpDebugInfo() |
1595 | 0 | { |
1596 | 0 | ErrorResult rv; |
1597 | 0 | RefPtr<Promise> promise = CreateDOMPromise(rv); |
1598 | 0 | if (NS_WARN_IF(rv.Failed())) { |
1599 | 0 | return nullptr; |
1600 | 0 | } |
1601 | 0 | if (mDecoder) { |
1602 | 0 | mDecoder->DumpDebugInfo()->Then(mAbstractMainThread, |
1603 | 0 | __func__, |
1604 | 0 | promise.get(), |
1605 | 0 | &Promise::MaybeResolveWithUndefined); |
1606 | 0 | } else { |
1607 | 0 | promise->MaybeResolveWithUndefined(); |
1608 | 0 | } |
1609 | 0 | return promise.forget(); |
1610 | 0 | } |
1611 | | |
1612 | | void |
1613 | | HTMLMediaElement::SetVisible(bool aVisible) |
1614 | 0 | { |
1615 | 0 | mForcedHidden = !aVisible; |
1616 | 0 | if (mDecoder) { |
1617 | 0 | mDecoder->SetForcedHidden(!aVisible); |
1618 | 0 | } |
1619 | 0 | } |
1620 | | |
1621 | | already_AddRefed<layers::Image> |
1622 | | HTMLMediaElement::GetCurrentImage() |
1623 | 0 | { |
1624 | 0 | MarkAsTainted(); |
1625 | 0 |
|
1626 | 0 | // TODO: In bug 1345404, handle case when video decoder is already suspended. |
1627 | 0 | ImageContainer* container = GetImageContainer(); |
1628 | 0 | if (!container) { |
1629 | 0 | return nullptr; |
1630 | 0 | } |
1631 | 0 | |
1632 | 0 | AutoLockImage lockImage(container); |
1633 | 0 | RefPtr<layers::Image> image = lockImage.GetImage(TimeStamp::Now()); |
1634 | 0 | return image.forget(); |
1635 | 0 | } |
1636 | | |
1637 | | bool |
1638 | | HTMLMediaElement::HasSuspendTaint() const |
1639 | 0 | { |
1640 | 0 | MOZ_ASSERT(!mDecoder || (mDecoder->HasSuspendTaint() == mHasSuspendTaint)); |
1641 | 0 | return mHasSuspendTaint; |
1642 | 0 | } |
1643 | | |
1644 | | already_AddRefed<DOMMediaStream> |
1645 | | HTMLMediaElement::GetSrcObject() const |
1646 | 0 | { |
1647 | 0 | NS_ASSERTION(!mSrcAttrStream || mSrcAttrStream->GetPlaybackStream(), |
1648 | 0 | "MediaStream should have been set up properly"); |
1649 | 0 | RefPtr<DOMMediaStream> stream = mSrcAttrStream; |
1650 | 0 | return stream.forget(); |
1651 | 0 | } |
1652 | | |
1653 | | void |
1654 | | HTMLMediaElement::SetSrcObject(DOMMediaStream& aValue) |
1655 | 0 | { |
1656 | 0 | SetSrcObject(&aValue); |
1657 | 0 | } |
1658 | | |
1659 | | void |
1660 | | HTMLMediaElement::SetSrcObject(DOMMediaStream* aValue) |
1661 | 0 | { |
1662 | 0 | mSrcAttrStream = aValue; |
1663 | 0 | UpdateAudioChannelPlayingState(); |
1664 | 0 | DoLoad(); |
1665 | 0 | } |
1666 | | |
1667 | | bool |
1668 | | HTMLMediaElement::Ended() |
1669 | 0 | { |
1670 | 0 | return (mDecoder && mDecoder->IsEnded()) || |
1671 | 0 | (mSrcStream && !mSrcStream->Active()); |
1672 | 0 | } |
1673 | | |
1674 | | void |
1675 | | HTMLMediaElement::GetCurrentSrc(nsAString& aCurrentSrc) |
1676 | 0 | { |
1677 | 0 | nsAutoCString src; |
1678 | 0 | GetCurrentSpec(src); |
1679 | 0 | CopyUTF8toUTF16(src, aCurrentSrc); |
1680 | 0 | } |
1681 | | |
1682 | | nsresult |
1683 | | HTMLMediaElement::OnChannelRedirect(nsIChannel* aChannel, |
1684 | | nsIChannel* aNewChannel, |
1685 | | uint32_t aFlags) |
1686 | 0 | { |
1687 | 0 | MOZ_ASSERT(mChannelLoader); |
1688 | 0 | return mChannelLoader->Redirect(aChannel, aNewChannel, aFlags); |
1689 | 0 | } |
1690 | | |
1691 | | void |
1692 | | HTMLMediaElement::ShutdownDecoder() |
1693 | 0 | { |
1694 | 0 | RemoveMediaElementFromURITable(); |
1695 | 0 | NS_ASSERTION(mDecoder, "Must have decoder to shut down"); |
1696 | 0 |
|
1697 | 0 | mWaitingForKeyListener.DisconnectIfExists(); |
1698 | 0 | if (mMediaSource) { |
1699 | 0 | mMediaSource->CompletePendingTransactions(); |
1700 | 0 | } |
1701 | 0 | for (OutputMediaStream& out : mOutputStreams) { |
1702 | 0 | if (!out.mCapturingDecoder) { |
1703 | 0 | continue; |
1704 | 0 | } |
1705 | 0 | out.mNextAvailableTrackID = std::max<TrackID>( |
1706 | 0 | mDecoder->NextAvailableTrackIDFor(out.mStream->GetInputStream()), |
1707 | 0 | out.mNextAvailableTrackID); |
1708 | 0 | } |
1709 | 0 | mDecoder->Shutdown(); |
1710 | 0 | DDUNLINKCHILD(mDecoder.get()); |
1711 | 0 | mDecoder = nullptr; |
1712 | 0 | ReportAudioTrackSilenceProportionTelemetry(); |
1713 | 0 | } |
1714 | | |
1715 | | void |
1716 | | HTMLMediaElement::AbortExistingLoads() |
1717 | 0 | { |
1718 | 0 | // Abort any already-running instance of the resource selection algorithm. |
1719 | 0 | mLoadWaitStatus = NOT_WAITING; |
1720 | 0 |
|
1721 | 0 | // Set a new load ID. This will cause events which were enqueued |
1722 | 0 | // with a different load ID to silently be cancelled. |
1723 | 0 | mCurrentLoadID++; |
1724 | 0 |
|
1725 | 0 | // Immediately reject or resolve the already-dispatched |
1726 | 0 | // nsResolveOrRejectPendingPlayPromisesRunners. These runners won't be |
1727 | 0 | // executed again later since the mCurrentLoadID had been changed. |
1728 | 0 | for (auto& runner : mPendingPlayPromisesRunners) { |
1729 | 0 | runner->ResolveOrReject(); |
1730 | 0 | } |
1731 | 0 | mPendingPlayPromisesRunners.Clear(); |
1732 | 0 |
|
1733 | 0 | if (mChannelLoader) { |
1734 | 0 | mChannelLoader->Cancel(); |
1735 | 0 | mChannelLoader = nullptr; |
1736 | 0 | } |
1737 | 0 |
|
1738 | 0 | bool fireTimeUpdate = false; |
1739 | 0 |
|
1740 | 0 | // We need to remove StreamSizeListener before VideoTracks get emptied. |
1741 | 0 | if (mMediaStreamSizeListener) { |
1742 | 0 | mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener); |
1743 | 0 | mMediaStreamSizeListener->Forget(); |
1744 | 0 | mMediaStreamSizeListener = nullptr; |
1745 | 0 | } |
1746 | 0 |
|
1747 | 0 | // When aborting the existing loads, empty the objects in audio track list and |
1748 | 0 | // video track list, no events (in particular, no removetrack events) are |
1749 | 0 | // fired as part of this. Ending MediaStream sends track ended notifications, |
1750 | 0 | // so we empty the track lists prior. |
1751 | 0 | AudioTracks()->EmptyTracks(); |
1752 | 0 | VideoTracks()->EmptyTracks(); |
1753 | 0 |
|
1754 | 0 | if (mDecoder) { |
1755 | 0 | fireTimeUpdate = mDecoder->GetCurrentTime() != 0.0; |
1756 | 0 | ShutdownDecoder(); |
1757 | 0 | } |
1758 | 0 | if (mSrcStream) { |
1759 | 0 | EndSrcMediaStreamPlayback(); |
1760 | 0 | } |
1761 | 0 |
|
1762 | 0 | RemoveMediaElementFromURITable(); |
1763 | 0 | mLoadingSrc = nullptr; |
1764 | 0 | mLoadingSrcTriggeringPrincipal = nullptr; |
1765 | 0 | DDLOG(DDLogCategory::Property, "loading_src", ""); |
1766 | 0 | DDUNLINKCHILD(mMediaSource.get()); |
1767 | 0 | mMediaSource = nullptr; |
1768 | 0 |
|
1769 | 0 | if (mNetworkState == NETWORK_LOADING || mNetworkState == NETWORK_IDLE) { |
1770 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("abort")); |
1771 | 0 | } |
1772 | 0 |
|
1773 | 0 | mErrorSink->ResetError(); |
1774 | 0 | mCurrentPlayRangeStart = -1.0; |
1775 | 0 | mLoadedDataFired = false; |
1776 | 0 | mAutoplaying = true; |
1777 | 0 | mIsLoadingFromSourceChildren = false; |
1778 | 0 | mSuspendedAfterFirstFrame = false; |
1779 | 0 | mAllowSuspendAfterFirstFrame = true; |
1780 | 0 | mHaveQueuedSelectResource = false; |
1781 | 0 | mSuspendedForPreloadNone = false; |
1782 | 0 | mDownloadSuspendedByCache = false; |
1783 | 0 | mMediaInfo = MediaInfo(); |
1784 | 0 | mIsEncrypted = false; |
1785 | 0 | mPendingEncryptedInitData.Reset(); |
1786 | 0 | mWaitingForKey = NOT_WAITING_FOR_KEY; |
1787 | 0 | mSourcePointer = nullptr; |
1788 | 0 | mBlockedAsWithoutMetadata = false; |
1789 | 0 |
|
1790 | 0 | mTags = nullptr; |
1791 | 0 | mAudioTrackSilenceStartedTime = 0.0; |
1792 | 0 |
|
1793 | 0 | if (mNetworkState != NETWORK_EMPTY) { |
1794 | 0 | NS_ASSERTION(!mDecoder && !mSrcStream, |
1795 | 0 | "How did someone setup a new stream/decoder already?"); |
1796 | 0 | // ChangeNetworkState() will call UpdateAudioChannelPlayingState() |
1797 | 0 | // indirectly which depends on mPaused. So we need to update mPaused first. |
1798 | 0 | if (!mPaused) { |
1799 | 0 | mPaused = true; |
1800 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("pause")); |
1801 | 0 | RejectPromises(TakePendingPlayPromises(), NS_ERROR_DOM_MEDIA_ABORT_ERR); |
1802 | 0 | } |
1803 | 0 | ChangeNetworkState(NETWORK_EMPTY); |
1804 | 0 | ChangeReadyState(HAVE_NOTHING); |
1805 | 0 |
|
1806 | 0 | // TODO: Apply the rules for text track cue rendering Bug 865407 |
1807 | 0 | if (mTextTrackManager) { |
1808 | 0 | mTextTrackManager->GetTextTracks()->SetCuesInactive(); |
1809 | 0 | } |
1810 | 0 |
|
1811 | 0 | if (fireTimeUpdate) { |
1812 | 0 | // Since we destroyed the decoder above, the current playback position |
1813 | 0 | // will now be reported as 0. The playback position was non-zero when |
1814 | 0 | // we destroyed the decoder, so fire a timeupdate event so that the |
1815 | 0 | // change will be reflected in the controls. |
1816 | 0 | FireTimeUpdate(false); |
1817 | 0 | } |
1818 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("emptied")); |
1819 | 0 | UpdateAudioChannelPlayingState(); |
1820 | 0 | } |
1821 | 0 |
|
1822 | 0 | // Disconnect requests for permission to play. We'll make a new request |
1823 | 0 | // if required should the new media resource try to play. |
1824 | 0 | mAutoplayPermissionRequest.DisconnectIfExists(); |
1825 | 0 |
|
1826 | 0 | // We may have changed mPaused, mAutoplaying, and other |
1827 | 0 | // things which can affect AddRemoveSelfReference |
1828 | 0 | AddRemoveSelfReference(); |
1829 | 0 |
|
1830 | 0 | mIsRunningSelectResource = false; |
1831 | 0 |
|
1832 | 0 | if (mTextTrackManager) { |
1833 | 0 | mTextTrackManager->NotifyReset(); |
1834 | 0 | } |
1835 | 0 |
|
1836 | 0 | mEventDeliveryPaused = false; |
1837 | 0 | mPendingEvents.Clear(); |
1838 | 0 |
|
1839 | 0 | AssertReadyStateIsNothing(); |
1840 | 0 | } |
1841 | | |
1842 | | void |
1843 | | HTMLMediaElement::NoSupportedMediaSourceError(const nsACString& aErrorDetails) |
1844 | 0 | { |
1845 | 0 | if (mDecoder) { |
1846 | 0 | ShutdownDecoder(); |
1847 | 0 | } |
1848 | 0 | mErrorSink->SetError(MEDIA_ERR_SRC_NOT_SUPPORTED, aErrorDetails); |
1849 | 0 | ChangeDelayLoadStatus(false); |
1850 | 0 | UpdateAudioChannelPlayingState(); |
1851 | 0 | RejectPromises(TakePendingPlayPromises(), |
1852 | 0 | NS_ERROR_DOM_MEDIA_NOT_SUPPORTED_ERR); |
1853 | 0 | } |
1854 | | |
1855 | | typedef void (HTMLMediaElement::*SyncSectionFn)(); |
1856 | | |
1857 | | // Runs a "synchronous section", a function that must run once the event loop |
1858 | | // has reached a "stable state". See: |
1859 | | // http://www.whatwg.org/specs/web-apps/current-work/multipage/webappapis.html#synchronous-section |
1860 | | class nsSyncSection : public nsMediaEvent |
1861 | | { |
1862 | | private: |
1863 | | nsCOMPtr<nsIRunnable> mRunnable; |
1864 | | |
1865 | | public: |
1866 | | nsSyncSection(HTMLMediaElement* aElement, nsIRunnable* aRunnable) |
1867 | | : nsMediaEvent("dom::nsSyncSection", aElement) |
1868 | | , mRunnable(aRunnable) |
1869 | 0 | { |
1870 | 0 | } |
1871 | | |
1872 | | NS_IMETHOD Run() override |
1873 | 0 | { |
1874 | 0 | // Silently cancel if our load has been cancelled. |
1875 | 0 | if (IsCancelled()) |
1876 | 0 | return NS_OK; |
1877 | 0 | mRunnable->Run(); |
1878 | 0 | return NS_OK; |
1879 | 0 | } |
1880 | | }; |
1881 | | |
1882 | | void |
1883 | | HTMLMediaElement::RunInStableState(nsIRunnable* aRunnable) |
1884 | 0 | { |
1885 | 0 | if (mShuttingDown) { |
1886 | 0 | return; |
1887 | 0 | } |
1888 | 0 | |
1889 | 0 | nsCOMPtr<nsIRunnable> event = new nsSyncSection(this, aRunnable); |
1890 | 0 | nsContentUtils::RunInStableState(event.forget()); |
1891 | 0 | } |
1892 | | |
1893 | | void |
1894 | | HTMLMediaElement::QueueLoadFromSourceTask() |
1895 | 0 | { |
1896 | 0 | if (!mIsLoadingFromSourceChildren || mShuttingDown) { |
1897 | 0 | return; |
1898 | 0 | } |
1899 | 0 | |
1900 | 0 | if (mDecoder) { |
1901 | 0 | // Reset readyState to HAVE_NOTHING since we're going to load a new decoder. |
1902 | 0 | ShutdownDecoder(); |
1903 | 0 | ChangeReadyState(HAVE_NOTHING); |
1904 | 0 | } |
1905 | 0 |
|
1906 | 0 | AssertReadyStateIsNothing(); |
1907 | 0 |
|
1908 | 0 | ChangeDelayLoadStatus(true); |
1909 | 0 | ChangeNetworkState(NETWORK_LOADING); |
1910 | 0 | RefPtr<Runnable> r = |
1911 | 0 | NewRunnableMethod("HTMLMediaElement::LoadFromSourceChildren", |
1912 | 0 | this, |
1913 | 0 | &HTMLMediaElement::LoadFromSourceChildren); |
1914 | 0 | RunInStableState(r); |
1915 | 0 | } |
1916 | | |
1917 | | void |
1918 | | HTMLMediaElement::QueueSelectResourceTask() |
1919 | 0 | { |
1920 | 0 | // Don't allow multiple async select resource calls to be queued. |
1921 | 0 | if (mHaveQueuedSelectResource) |
1922 | 0 | return; |
1923 | 0 | mHaveQueuedSelectResource = true; |
1924 | 0 | ChangeNetworkState(NETWORK_NO_SOURCE); |
1925 | 0 | RefPtr<Runnable> r = |
1926 | 0 | NewRunnableMethod("HTMLMediaElement::SelectResourceWrapper", |
1927 | 0 | this, |
1928 | 0 | &HTMLMediaElement::SelectResourceWrapper); |
1929 | 0 | RunInStableState(r); |
1930 | 0 | } |
1931 | | |
1932 | | static bool |
1933 | | HasSourceChildren(nsIContent* aElement) |
1934 | 0 | { |
1935 | 0 | for (nsIContent* child = aElement->GetFirstChild(); child; |
1936 | 0 | child = child->GetNextSibling()) { |
1937 | 0 | if (child->IsHTMLElement(nsGkAtoms::source)) { |
1938 | 0 | return true; |
1939 | 0 | } |
1940 | 0 | } |
1941 | 0 | return false; |
1942 | 0 | } |
1943 | | |
1944 | | static nsCString |
1945 | | DocumentOrigin(nsIDocument* aDoc) |
1946 | 0 | { |
1947 | 0 | if (!aDoc) { |
1948 | 0 | return NS_LITERAL_CSTRING("null"); |
1949 | 0 | } |
1950 | 0 | nsCOMPtr<nsIPrincipal> principal = aDoc->NodePrincipal(); |
1951 | 0 | if (!principal) { |
1952 | 0 | return NS_LITERAL_CSTRING("null"); |
1953 | 0 | } |
1954 | 0 | nsCString origin; |
1955 | 0 | if (NS_FAILED(principal->GetOrigin(origin))) { |
1956 | 0 | return NS_LITERAL_CSTRING("null"); |
1957 | 0 | } |
1958 | 0 | return origin; |
1959 | 0 | } |
1960 | | |
1961 | | void |
1962 | | HTMLMediaElement::Load() |
1963 | 0 | { |
1964 | 0 | LOG(LogLevel::Debug, |
1965 | 0 | ("%p Load() hasSrcAttrStream=%d hasSrcAttr=%d hasSourceChildren=%d " |
1966 | 0 | "handlingInput=%d hasAutoplayAttr=%d IsAllowedToPlay=%d " |
1967 | 0 | "ownerDoc=%p (%s) ownerDocUserActivated=%d " |
1968 | 0 | "muted=%d volume=%f", |
1969 | 0 | this, |
1970 | 0 | !!mSrcAttrStream, |
1971 | 0 | HasAttr(kNameSpaceID_None, nsGkAtoms::src), |
1972 | 0 | HasSourceChildren(this), |
1973 | 0 | EventStateManager::IsHandlingUserInput(), |
1974 | 0 | HasAttr(kNameSpaceID_None, nsGkAtoms::autoplay), |
1975 | 0 | AutoplayPolicy::IsAllowedToPlay(*this), |
1976 | 0 | OwnerDoc(), |
1977 | 0 | DocumentOrigin(OwnerDoc()).get(), |
1978 | 0 | OwnerDoc() ? OwnerDoc()->HasBeenUserGestureActivated() : 0, |
1979 | 0 | mMuted, |
1980 | 0 | mVolume)); |
1981 | 0 |
|
1982 | 0 | if (mIsRunningLoadMethod) { |
1983 | 0 | return; |
1984 | 0 | } |
1985 | 0 | |
1986 | 0 | mIsDoingExplicitLoad = true; |
1987 | 0 | DoLoad(); |
1988 | 0 | } |
1989 | | |
1990 | | void |
1991 | | HTMLMediaElement::DoLoad() |
1992 | 0 | { |
1993 | 0 | // Check if media is allowed for the docshell. |
1994 | 0 | nsCOMPtr<nsIDocShell> docShell = OwnerDoc()->GetDocShell(); |
1995 | 0 | if (docShell && !docShell->GetAllowMedia()) { |
1996 | 0 | LOG(LogLevel::Debug, ("%p Media not allowed", this)); |
1997 | 0 | return; |
1998 | 0 | } |
1999 | 0 |
|
2000 | 0 | if (mIsRunningLoadMethod) { |
2001 | 0 | return; |
2002 | 0 | } |
2003 | 0 | |
2004 | 0 | if (EventStateManager::IsHandlingUserInput()) { |
2005 | 0 | // Detect if user has interacted with element so that play will not be |
2006 | 0 | // blocked when initiated by a script. This enables sites to capture user |
2007 | 0 | // intent to play by calling load() in the click handler of a "catalog |
2008 | 0 | // view" of a gallery of videos. |
2009 | 0 | mIsBlessed = true; |
2010 | 0 | // Mark the channel as urgent-start when autopaly so that it will play the |
2011 | 0 | // media from src after loading enough resource. |
2012 | 0 | if (HasAttr(kNameSpaceID_None, nsGkAtoms::autoplay)) { |
2013 | 0 | mUseUrgentStartForChannel = true; |
2014 | 0 | } |
2015 | 0 | } |
2016 | 0 |
|
2017 | 0 | SetPlayedOrSeeked(false); |
2018 | 0 | mIsRunningLoadMethod = true; |
2019 | 0 | AbortExistingLoads(); |
2020 | 0 | SetPlaybackRate(mDefaultPlaybackRate, IgnoreErrors()); |
2021 | 0 | QueueSelectResourceTask(); |
2022 | 0 | ResetState(); |
2023 | 0 | mIsRunningLoadMethod = false; |
2024 | 0 | } |
2025 | | |
2026 | | void |
2027 | | HTMLMediaElement::ResetState() |
2028 | 0 | { |
2029 | 0 | // There might be a pending MediaDecoder::PlaybackPositionChanged() which |
2030 | 0 | // will overwrite |mMediaInfo.mVideo.mDisplay| in UpdateMediaSize() to give |
2031 | 0 | // staled videoWidth and videoHeight. We have to call ForgetElement() here |
2032 | 0 | // such that the staled callbacks won't reach us. |
2033 | 0 | if (mVideoFrameContainer) { |
2034 | 0 | mVideoFrameContainer->ForgetElement(); |
2035 | 0 | mVideoFrameContainer = nullptr; |
2036 | 0 | } |
2037 | 0 | } |
2038 | | |
2039 | | void |
2040 | | HTMLMediaElement::SelectResourceWrapper() |
2041 | 0 | { |
2042 | 0 | SelectResource(); |
2043 | 0 | mIsRunningSelectResource = false; |
2044 | 0 | mHaveQueuedSelectResource = false; |
2045 | 0 | mIsDoingExplicitLoad = false; |
2046 | 0 | } |
2047 | | |
2048 | | void |
2049 | | HTMLMediaElement::SelectResource() |
2050 | 0 | { |
2051 | 0 | if (!mSrcAttrStream && !HasAttr(kNameSpaceID_None, nsGkAtoms::src) && |
2052 | 0 | !HasSourceChildren(this)) { |
2053 | 0 | // The media element has neither a src attribute nor any source |
2054 | 0 | // element children, abort the load. |
2055 | 0 | ChangeNetworkState(NETWORK_EMPTY); |
2056 | 0 | ChangeDelayLoadStatus(false); |
2057 | 0 | return; |
2058 | 0 | } |
2059 | 0 | |
2060 | 0 | ChangeDelayLoadStatus(true); |
2061 | 0 |
|
2062 | 0 | ChangeNetworkState(NETWORK_LOADING); |
2063 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("loadstart")); |
2064 | 0 |
|
2065 | 0 | // Delay setting mIsRunningSeletResource until after UpdatePreloadAction |
2066 | 0 | // so that we don't lose our state change by bailing out of the preload |
2067 | 0 | // state update |
2068 | 0 | UpdatePreloadAction(); |
2069 | 0 | mIsRunningSelectResource = true; |
2070 | 0 |
|
2071 | 0 | // If we have a 'src' attribute, use that exclusively. |
2072 | 0 | nsAutoString src; |
2073 | 0 | if (mSrcAttrStream) { |
2074 | 0 | SetupSrcMediaStreamPlayback(mSrcAttrStream); |
2075 | 0 | } else if (GetAttr(kNameSpaceID_None, nsGkAtoms::src, src)) { |
2076 | 0 | nsCOMPtr<nsIURI> uri; |
2077 | 0 | MediaResult rv = NewURIFromString(src, getter_AddRefs(uri)); |
2078 | 0 | if (NS_SUCCEEDED(rv)) { |
2079 | 0 | LOG( |
2080 | 0 | LogLevel::Debug, |
2081 | 0 | ("%p Trying load from src=%s", this, NS_ConvertUTF16toUTF8(src).get())); |
2082 | 0 | NS_ASSERTION( |
2083 | 0 | !mIsLoadingFromSourceChildren, |
2084 | 0 | "Should think we're not loading from source children by default"); |
2085 | 0 |
|
2086 | 0 | RemoveMediaElementFromURITable(); |
2087 | 0 | mLoadingSrc = uri; |
2088 | 0 | mLoadingSrcTriggeringPrincipal = mSrcAttrTriggeringPrincipal; |
2089 | 0 | DDLOG(DDLogCategory::Property, |
2090 | 0 | "loading_src", |
2091 | 0 | nsCString(NS_ConvertUTF16toUTF8(src))); |
2092 | 0 | mMediaSource = mSrcMediaSource; |
2093 | 0 | DDLINKCHILD("mediasource", mMediaSource.get()); |
2094 | 0 | UpdatePreloadAction(); |
2095 | 0 | if (mPreloadAction == HTMLMediaElement::PRELOAD_NONE && !mMediaSource) { |
2096 | 0 | // preload:none media, suspend the load here before we make any |
2097 | 0 | // network requests. |
2098 | 0 | SuspendLoad(); |
2099 | 0 | return; |
2100 | 0 | } |
2101 | 0 | |
2102 | 0 | rv = LoadResource(); |
2103 | 0 | if (NS_SUCCEEDED(rv)) { |
2104 | 0 | return; |
2105 | 0 | } |
2106 | 0 | } else { |
2107 | 0 | const char16_t* params[] = { src.get() }; |
2108 | 0 | ReportLoadError("MediaLoadInvalidURI", params, ArrayLength(params)); |
2109 | 0 | rv = MediaResult(rv.Code(), "MediaLoadInvalidURI"); |
2110 | 0 | } |
2111 | 0 | // The media element has neither a src attribute nor a source element child: |
2112 | 0 | // set the networkState to NETWORK_EMPTY, and abort these steps; the |
2113 | 0 | // synchronous section ends. |
2114 | 0 | mMainThreadEventTarget->Dispatch(NewRunnableMethod<nsCString>( |
2115 | 0 | "HTMLMediaElement::NoSupportedMediaSourceError", |
2116 | 0 | this, |
2117 | 0 | &HTMLMediaElement::NoSupportedMediaSourceError, |
2118 | 0 | rv.Description())); |
2119 | 0 | } else { |
2120 | 0 | // Otherwise, the source elements will be used. |
2121 | 0 | mIsLoadingFromSourceChildren = true; |
2122 | 0 | LoadFromSourceChildren(); |
2123 | 0 | } |
2124 | 0 | } |
2125 | | |
2126 | | void |
2127 | | HTMLMediaElement::NotifyLoadError(const nsACString& aErrorDetails) |
2128 | 0 | { |
2129 | 0 | if (!mIsLoadingFromSourceChildren) { |
2130 | 0 | LOG(LogLevel::Debug, ("NotifyLoadError(), no supported media error")); |
2131 | 0 | NoSupportedMediaSourceError(aErrorDetails); |
2132 | 0 | } else if (mSourceLoadCandidate) { |
2133 | 0 | DispatchAsyncSourceError(mSourceLoadCandidate); |
2134 | 0 | QueueLoadFromSourceTask(); |
2135 | 0 | } else { |
2136 | 0 | NS_WARNING("Should know the source we were loading from!"); |
2137 | 0 | } |
2138 | 0 | } |
2139 | | |
2140 | | void |
2141 | | HTMLMediaElement::NotifyMediaTrackEnabled(MediaTrack* aTrack) |
2142 | 0 | { |
2143 | 0 | MOZ_ASSERT(aTrack); |
2144 | 0 | if (!aTrack) { |
2145 | 0 | return; |
2146 | 0 | } |
2147 | | #ifdef DEBUG |
2148 | | nsString id; |
2149 | | aTrack->GetId(id); |
2150 | | |
2151 | | LOG(LogLevel::Debug, |
2152 | | ("MediaElement %p %sTrack with id %s enabled", |
2153 | | this, |
2154 | | aTrack->AsAudioTrack() ? "Audio" : "Video", |
2155 | | NS_ConvertUTF16toUTF8(id).get())); |
2156 | | #endif |
2157 | | |
2158 | 0 | MOZ_ASSERT((aTrack->AsAudioTrack() && aTrack->AsAudioTrack()->Enabled()) || |
2159 | 0 | (aTrack->AsVideoTrack() && aTrack->AsVideoTrack()->Selected())); |
2160 | 0 |
|
2161 | 0 | if (aTrack->AsAudioTrack()) { |
2162 | 0 | SetMutedInternal(mMuted & ~MUTED_BY_AUDIO_TRACK); |
2163 | 0 | } else if (aTrack->AsVideoTrack()) { |
2164 | 0 | if (!IsVideo()) { |
2165 | 0 | MOZ_ASSERT(false); |
2166 | 0 | return; |
2167 | 0 | } |
2168 | 0 | mDisableVideo = false; |
2169 | 0 | } else { |
2170 | 0 | MOZ_ASSERT(false, "Unknown track type"); |
2171 | 0 | } |
2172 | 0 |
|
2173 | 0 | if (mSrcStream) { |
2174 | 0 | if (aTrack->AsVideoTrack()) { |
2175 | 0 | MOZ_ASSERT(!mSelectedVideoStreamTrack); |
2176 | 0 | MOZ_ASSERT(!mMediaStreamSizeListener); |
2177 | 0 |
|
2178 | 0 | mSelectedVideoStreamTrack = aTrack->AsVideoTrack()->GetVideoStreamTrack(); |
2179 | 0 | VideoFrameContainer* container = GetVideoFrameContainer(); |
2180 | 0 | if (mSrcStreamIsPlaying && container) { |
2181 | 0 | mSelectedVideoStreamTrack->AddVideoOutput(container); |
2182 | 0 | } |
2183 | 0 | HTMLVideoElement* self = static_cast<HTMLVideoElement*>(this); |
2184 | 0 | if (self->VideoWidth() <= 1 && self->VideoHeight() <= 1) { |
2185 | 0 | // MediaInfo uses dummy values of 1 for width and height to |
2186 | 0 | // mark video as valid. We need a new stream size listener |
2187 | 0 | // if size is 0x0 or 1x1. |
2188 | 0 | mMediaStreamSizeListener = new StreamSizeListener(this); |
2189 | 0 | mSelectedVideoStreamTrack->AddDirectListener(mMediaStreamSizeListener); |
2190 | 0 | } |
2191 | 0 | } |
2192 | 0 |
|
2193 | 0 | if (mReadyState == HAVE_NOTHING) { |
2194 | 0 | // No MediaStreamTracks are captured until we have metadata. |
2195 | 0 | return; |
2196 | 0 | } |
2197 | 0 | for (OutputMediaStream& ms : mOutputStreams) { |
2198 | 0 | if (aTrack->AsVideoTrack() && ms.mCapturingAudioOnly) { |
2199 | 0 | // If the output stream is for audio only we ignore video tracks. |
2200 | 0 | continue; |
2201 | 0 | } |
2202 | 0 | AddCaptureMediaTrackToOutputStream(aTrack, ms); |
2203 | 0 | } |
2204 | 0 | } |
2205 | 0 | } |
2206 | | |
2207 | | void |
2208 | | HTMLMediaElement::NotifyMediaTrackDisabled(MediaTrack* aTrack) |
2209 | 0 | { |
2210 | 0 | MOZ_ASSERT(aTrack); |
2211 | 0 | if (!aTrack) { |
2212 | 0 | return; |
2213 | 0 | } |
2214 | | #ifdef DEBUG |
2215 | | nsString id; |
2216 | | aTrack->GetId(id); |
2217 | | |
2218 | | LOG(LogLevel::Debug, |
2219 | | ("MediaElement %p %sTrack with id %s disabled", |
2220 | | this, |
2221 | | aTrack->AsAudioTrack() ? "Audio" : "Video", |
2222 | | NS_ConvertUTF16toUTF8(id).get())); |
2223 | | #endif |
2224 | | |
2225 | 0 | MOZ_ASSERT((!aTrack->AsAudioTrack() || !aTrack->AsAudioTrack()->Enabled()) && |
2226 | 0 | (!aTrack->AsVideoTrack() || !aTrack->AsVideoTrack()->Selected())); |
2227 | 0 |
|
2228 | 0 | if (aTrack->AsAudioTrack()) { |
2229 | 0 | // If we don't have any alive track , we don't need to mute MediaElement. |
2230 | 0 | if (AudioTracks()->Length() > 0) { |
2231 | 0 | bool shouldMute = true; |
2232 | 0 | for (uint32_t i = 0; i < AudioTracks()->Length(); ++i) { |
2233 | 0 | if ((*AudioTracks())[i]->Enabled()) { |
2234 | 0 | shouldMute = false; |
2235 | 0 | break; |
2236 | 0 | } |
2237 | 0 | } |
2238 | 0 |
|
2239 | 0 | if (shouldMute) { |
2240 | 0 | SetMutedInternal(mMuted | MUTED_BY_AUDIO_TRACK); |
2241 | 0 | } |
2242 | 0 | } |
2243 | 0 | } else if (aTrack->AsVideoTrack()) { |
2244 | 0 | if (mSrcStream) { |
2245 | 0 | MOZ_ASSERT(mSelectedVideoStreamTrack); |
2246 | 0 | if (mSelectedVideoStreamTrack && mMediaStreamSizeListener) { |
2247 | 0 | mSelectedVideoStreamTrack->RemoveDirectListener( |
2248 | 0 | mMediaStreamSizeListener); |
2249 | 0 | mMediaStreamSizeListener->Forget(); |
2250 | 0 | mMediaStreamSizeListener = nullptr; |
2251 | 0 | } |
2252 | 0 | VideoFrameContainer* container = GetVideoFrameContainer(); |
2253 | 0 | if (mSrcStreamIsPlaying && container) { |
2254 | 0 | mSelectedVideoStreamTrack->RemoveVideoOutput(container); |
2255 | 0 | } |
2256 | 0 | mSelectedVideoStreamTrack = nullptr; |
2257 | 0 | } |
2258 | 0 | } |
2259 | 0 |
|
2260 | 0 | if (mReadyState == HAVE_NOTHING) { |
2261 | 0 | // No MediaStreamTracks are captured until we have metadata, and code |
2262 | 0 | // below doesn't do anything for captured decoders. |
2263 | 0 | return; |
2264 | 0 | } |
2265 | 0 | |
2266 | 0 | for (OutputMediaStream& ms : mOutputStreams) { |
2267 | 0 | if (ms.mCapturingDecoder) { |
2268 | 0 | MOZ_ASSERT(!ms.mCapturingMediaStream); |
2269 | 0 | continue; |
2270 | 0 | } |
2271 | 0 | MOZ_ASSERT(ms.mCapturingMediaStream); |
2272 | 0 | for (int32_t i = ms.mTrackPorts.Length() - 1; i >= 0; --i) { |
2273 | 0 | if (ms.mTrackPorts[i].first() == aTrack->GetId()) { |
2274 | 0 | // The source of this track just ended. Force-notify that it ended. |
2275 | 0 | // If we bounce it to the MediaStreamGraph it might not be picked up, |
2276 | 0 | // for instance if the MediaInputPort was destroyed in the same |
2277 | 0 | // iteration as it was added. |
2278 | 0 | MediaStreamTrack* outputTrack = ms.mStream->FindOwnedDOMTrack( |
2279 | 0 | ms.mTrackPorts[i].second()->GetDestination(), |
2280 | 0 | ms.mTrackPorts[i].second()->GetDestinationTrackId()); |
2281 | 0 | MOZ_ASSERT(outputTrack); |
2282 | 0 | if (outputTrack) { |
2283 | 0 | mMainThreadEventTarget->Dispatch( |
2284 | 0 | NewRunnableMethod("MediaStreamTrack::OverrideEnded", |
2285 | 0 | outputTrack, |
2286 | 0 | &MediaStreamTrack::OverrideEnded)); |
2287 | 0 | } |
2288 | 0 |
|
2289 | 0 | ms.mTrackPorts[i].second()->Destroy(); |
2290 | 0 | ms.mTrackPorts.RemoveElementAt(i); |
2291 | 0 | break; |
2292 | 0 | } |
2293 | 0 | } |
2294 | | #ifdef DEBUG |
2295 | | for (auto pair : ms.mTrackPorts) { |
2296 | | MOZ_ASSERT(pair.first() != aTrack->GetId(), |
2297 | | "The same MediaTrack was forwarded to the output stream more " |
2298 | | "than once. This shouldn't happen."); |
2299 | | } |
2300 | | #endif |
2301 | | } |
2302 | 0 | } |
2303 | | |
2304 | | void |
2305 | | HTMLMediaElement::NotifyMediaStreamTracksAvailable(DOMMediaStream* aStream) |
2306 | 0 | { |
2307 | 0 | if (!mSrcStream || mSrcStream != aStream) { |
2308 | 0 | return; |
2309 | 0 | } |
2310 | 0 | |
2311 | 0 | LOG(LogLevel::Debug, ("MediaElement %p MediaStream tracks available", this)); |
2312 | 0 |
|
2313 | 0 | mSrcStreamTracksAvailable = true; |
2314 | 0 |
|
2315 | 0 | bool videoHasChanged = IsVideo() && HasVideo() != !VideoTracks()->IsEmpty(); |
2316 | 0 |
|
2317 | 0 | if (videoHasChanged) { |
2318 | 0 | // We are a video element and HasVideo() changed so update the screen |
2319 | 0 | // wakelock |
2320 | 0 | NotifyOwnerDocumentActivityChanged(); |
2321 | 0 | } |
2322 | 0 |
|
2323 | 0 | UpdateReadyStateInternal(); |
2324 | 0 | } |
2325 | | |
2326 | | void |
2327 | | HTMLMediaElement::DealWithFailedElement(nsIContent* aSourceElement) |
2328 | 0 | { |
2329 | 0 | if (mShuttingDown) { |
2330 | 0 | return; |
2331 | 0 | } |
2332 | 0 | |
2333 | 0 | DispatchAsyncSourceError(aSourceElement); |
2334 | 0 | mMainThreadEventTarget->Dispatch( |
2335 | 0 | NewRunnableMethod("HTMLMediaElement::QueueLoadFromSourceTask", |
2336 | 0 | this, |
2337 | 0 | &HTMLMediaElement::QueueLoadFromSourceTask)); |
2338 | 0 | } |
2339 | | |
2340 | | void |
2341 | | HTMLMediaElement::NotifyOutputTrackStopped(DOMMediaStream* aOwningStream, |
2342 | | TrackID aDestinationTrackID) |
2343 | 0 | { |
2344 | 0 | for (OutputMediaStream& ms : mOutputStreams) { |
2345 | 0 | if (!ms.mCapturingMediaStream) { |
2346 | 0 | continue; |
2347 | 0 | } |
2348 | 0 | |
2349 | 0 | if (ms.mStream != aOwningStream) { |
2350 | 0 | continue; |
2351 | 0 | } |
2352 | 0 | |
2353 | 0 | for (int32_t i = ms.mTrackPorts.Length() - 1; i >= 0; --i) { |
2354 | 0 | MediaInputPort* port = ms.mTrackPorts[i].second(); |
2355 | 0 | if (port->GetDestinationTrackId() != aDestinationTrackID) { |
2356 | 0 | continue; |
2357 | 0 | } |
2358 | 0 | |
2359 | 0 | port->Destroy(); |
2360 | 0 | ms.mTrackPorts.RemoveElementAt(i); |
2361 | 0 | return; |
2362 | 0 | } |
2363 | 0 | } |
2364 | 0 |
|
2365 | 0 | // An output track ended but its port is already gone. |
2366 | 0 | // It was probably cleared by the removal of the source MediaTrack. |
2367 | 0 | } |
2368 | | |
2369 | | void |
2370 | | HTMLMediaElement::LoadFromSourceChildren() |
2371 | 0 | { |
2372 | 0 | NS_ASSERTION(mDelayingLoadEvent, |
2373 | 0 | "Should delay load event (if in document) during load"); |
2374 | 0 | NS_ASSERTION(mIsLoadingFromSourceChildren, |
2375 | 0 | "Must remember we're loading from source children"); |
2376 | 0 |
|
2377 | 0 | AddMutationObserverUnlessExists(this); |
2378 | 0 |
|
2379 | 0 | while (true) { |
2380 | 0 | Element* child = GetNextSource(); |
2381 | 0 | if (!child) { |
2382 | 0 | // Exhausted candidates, wait for more candidates to be appended to |
2383 | 0 | // the media element. |
2384 | 0 | mLoadWaitStatus = WAITING_FOR_SOURCE; |
2385 | 0 | ChangeNetworkState(NETWORK_NO_SOURCE); |
2386 | 0 | ChangeDelayLoadStatus(false); |
2387 | 0 | ReportLoadError("MediaLoadExhaustedCandidates"); |
2388 | 0 | return; |
2389 | 0 | } |
2390 | 0 | |
2391 | 0 | // Must have src attribute. |
2392 | 0 | nsAutoString src; |
2393 | 0 | if (!child->GetAttr(kNameSpaceID_None, nsGkAtoms::src, src)) { |
2394 | 0 | ReportLoadError("MediaLoadSourceMissingSrc"); |
2395 | 0 | DealWithFailedElement(child); |
2396 | 0 | return; |
2397 | 0 | } |
2398 | 0 | |
2399 | 0 | // If we have a type attribute, it must be a supported type. |
2400 | 0 | nsAutoString type; |
2401 | 0 | if (child->GetAttr(kNameSpaceID_None, nsGkAtoms::type, type)) { |
2402 | 0 | DecoderDoctorDiagnostics diagnostics; |
2403 | 0 | CanPlayStatus canPlay = GetCanPlay(type, &diagnostics); |
2404 | 0 | diagnostics.StoreFormatDiagnostics( |
2405 | 0 | OwnerDoc(), type, canPlay != CANPLAY_NO, __func__); |
2406 | 0 | if (canPlay == CANPLAY_NO) { |
2407 | 0 | const char16_t* params[] = { type.get(), src.get() }; |
2408 | 0 | ReportLoadError( |
2409 | 0 | "MediaLoadUnsupportedTypeAttribute", params, ArrayLength(params)); |
2410 | 0 | DealWithFailedElement(child); |
2411 | 0 | return; |
2412 | 0 | } |
2413 | 0 | } |
2414 | 0 | HTMLSourceElement* childSrc = HTMLSourceElement::FromNode(child); |
2415 | 0 | LOG(LogLevel::Debug, |
2416 | 0 | ("%p Trying load from <source>=%s type=%s", |
2417 | 0 | this, |
2418 | 0 | NS_ConvertUTF16toUTF8(src).get(), |
2419 | 0 | NS_ConvertUTF16toUTF8(type).get())); |
2420 | 0 |
|
2421 | 0 | nsCOMPtr<nsIURI> uri; |
2422 | 0 | NewURIFromString(src, getter_AddRefs(uri)); |
2423 | 0 | if (!uri) { |
2424 | 0 | const char16_t* params[] = { src.get() }; |
2425 | 0 | ReportLoadError("MediaLoadInvalidURI", params, ArrayLength(params)); |
2426 | 0 | DealWithFailedElement(child); |
2427 | 0 | return; |
2428 | 0 | } |
2429 | 0 | |
2430 | 0 | RemoveMediaElementFromURITable(); |
2431 | 0 | mLoadingSrc = uri; |
2432 | 0 | mLoadingSrcTriggeringPrincipal = childSrc->GetSrcTriggeringPrincipal(); |
2433 | 0 | DDLOG(DDLogCategory::Property, |
2434 | 0 | "loading_src", |
2435 | 0 | nsCString(NS_ConvertUTF16toUTF8(src))); |
2436 | 0 | mMediaSource = childSrc->GetSrcMediaSource(); |
2437 | 0 | DDLINKCHILD("mediasource", mMediaSource.get()); |
2438 | 0 | NS_ASSERTION(mNetworkState == NETWORK_LOADING, |
2439 | 0 | "Network state should be loading"); |
2440 | 0 |
|
2441 | 0 | if (mPreloadAction == HTMLMediaElement::PRELOAD_NONE && !mMediaSource) { |
2442 | 0 | // preload:none media, suspend the load here before we make any |
2443 | 0 | // network requests. |
2444 | 0 | SuspendLoad(); |
2445 | 0 | return; |
2446 | 0 | } |
2447 | 0 | |
2448 | 0 | if (NS_SUCCEEDED(LoadResource())) { |
2449 | 0 | return; |
2450 | 0 | } |
2451 | 0 | |
2452 | 0 | // If we fail to load, loop back and try loading the next resource. |
2453 | 0 | DispatchAsyncSourceError(child); |
2454 | 0 | } |
2455 | 0 | MOZ_ASSERT_UNREACHABLE("Execution should not reach here!"); |
2456 | 0 | } |
2457 | | |
2458 | | void |
2459 | | HTMLMediaElement::SuspendLoad() |
2460 | 0 | { |
2461 | 0 | mSuspendedForPreloadNone = true; |
2462 | 0 | ChangeNetworkState(NETWORK_IDLE); |
2463 | 0 | ChangeDelayLoadStatus(false); |
2464 | 0 | } |
2465 | | |
2466 | | void |
2467 | | HTMLMediaElement::ResumeLoad(PreloadAction aAction) |
2468 | 0 | { |
2469 | 0 | NS_ASSERTION(mSuspendedForPreloadNone, |
2470 | 0 | "Must be halted for preload:none to resume from preload:none " |
2471 | 0 | "suspended load."); |
2472 | 0 | mSuspendedForPreloadNone = false; |
2473 | 0 | mPreloadAction = aAction; |
2474 | 0 | ChangeDelayLoadStatus(true); |
2475 | 0 | ChangeNetworkState(NETWORK_LOADING); |
2476 | 0 | if (!mIsLoadingFromSourceChildren) { |
2477 | 0 | // We were loading from the element's src attribute. |
2478 | 0 | MediaResult rv = LoadResource(); |
2479 | 0 | if (NS_FAILED(rv)) { |
2480 | 0 | NoSupportedMediaSourceError(rv.Description()); |
2481 | 0 | } |
2482 | 0 | } else { |
2483 | 0 | // We were loading from a child <source> element. Try to resume the |
2484 | 0 | // load of that child, and if that fails, try the next child. |
2485 | 0 | if (NS_FAILED(LoadResource())) { |
2486 | 0 | LoadFromSourceChildren(); |
2487 | 0 | } |
2488 | 0 | } |
2489 | 0 | } |
2490 | | |
2491 | | bool |
2492 | | HTMLMediaElement::AllowedToPlay() const |
2493 | 0 | { |
2494 | 0 | return AutoplayPolicy::IsAllowedToPlay(*this); |
2495 | 0 | } |
2496 | | |
2497 | | void |
2498 | | HTMLMediaElement::UpdatePreloadAction() |
2499 | 0 | { |
2500 | 0 | PreloadAction nextAction = PRELOAD_UNDEFINED; |
2501 | 0 | // If autoplay is set, or we're playing, we should always preload data, |
2502 | 0 | // as we'll need it to play. |
2503 | 0 | if ((AutoplayPolicy::IsAllowedToPlay(*this) && |
2504 | 0 | HasAttr(kNameSpaceID_None, nsGkAtoms::autoplay)) || |
2505 | 0 | !mPaused) { |
2506 | 0 | nextAction = HTMLMediaElement::PRELOAD_ENOUGH; |
2507 | 0 | } else { |
2508 | 0 | // Find the appropriate preload action by looking at the attribute. |
2509 | 0 | const nsAttrValue* val = |
2510 | 0 | mAttrs.GetAttr(nsGkAtoms::preload, kNameSpaceID_None); |
2511 | 0 | // MSE doesn't work if preload is none, so it ignores the pref when src is |
2512 | 0 | // from MSE. |
2513 | 0 | uint32_t preloadDefault = |
2514 | 0 | mMediaSource |
2515 | 0 | ? HTMLMediaElement::PRELOAD_ATTR_METADATA |
2516 | 0 | : Preferences::GetInt("media.preload.default", |
2517 | 0 | HTMLMediaElement::PRELOAD_ATTR_METADATA); |
2518 | 0 | uint32_t preloadAuto = Preferences::GetInt( |
2519 | 0 | "media.preload.auto", HTMLMediaElement::PRELOAD_ENOUGH); |
2520 | 0 | if (!val) { |
2521 | 0 | // Attribute is not set. Use the preload action specified by the |
2522 | 0 | // media.preload.default pref, or just preload metadata if not present. |
2523 | 0 | nextAction = static_cast<PreloadAction>(preloadDefault); |
2524 | 0 | } else if (val->Type() == nsAttrValue::eEnum) { |
2525 | 0 | PreloadAttrValue attr = |
2526 | 0 | static_cast<PreloadAttrValue>(val->GetEnumValue()); |
2527 | 0 | if (attr == HTMLMediaElement::PRELOAD_ATTR_EMPTY || |
2528 | 0 | attr == HTMLMediaElement::PRELOAD_ATTR_AUTO) { |
2529 | 0 | nextAction = static_cast<PreloadAction>(preloadAuto); |
2530 | 0 | } else if (attr == HTMLMediaElement::PRELOAD_ATTR_METADATA) { |
2531 | 0 | nextAction = HTMLMediaElement::PRELOAD_METADATA; |
2532 | 0 | } else if (attr == HTMLMediaElement::PRELOAD_ATTR_NONE) { |
2533 | 0 | nextAction = HTMLMediaElement::PRELOAD_NONE; |
2534 | 0 | } |
2535 | 0 | } else { |
2536 | 0 | // Use the suggested "missing value default" of "metadata", or the value |
2537 | 0 | // specified by the media.preload.default, if present. |
2538 | 0 | nextAction = static_cast<PreloadAction>(preloadDefault); |
2539 | 0 | } |
2540 | 0 | } |
2541 | 0 |
|
2542 | 0 | if (nextAction == HTMLMediaElement::PRELOAD_NONE && mIsDoingExplicitLoad) { |
2543 | 0 | nextAction = HTMLMediaElement::PRELOAD_METADATA; |
2544 | 0 | } |
2545 | 0 |
|
2546 | 0 | mPreloadAction = nextAction; |
2547 | 0 |
|
2548 | 0 | if (nextAction == HTMLMediaElement::PRELOAD_ENOUGH) { |
2549 | 0 | if (mSuspendedForPreloadNone) { |
2550 | 0 | // Our load was previouly suspended due to the media having preload |
2551 | 0 | // value "none". The preload value has changed to preload:auto, so |
2552 | 0 | // resume the load. |
2553 | 0 | ResumeLoad(PRELOAD_ENOUGH); |
2554 | 0 | } else { |
2555 | 0 | // Preload as much of the video as we can, i.e. don't suspend after |
2556 | 0 | // the first frame. |
2557 | 0 | StopSuspendingAfterFirstFrame(); |
2558 | 0 | } |
2559 | 0 |
|
2560 | 0 | } else if (nextAction == HTMLMediaElement::PRELOAD_METADATA) { |
2561 | 0 | // Ensure that the video can be suspended after first frame. |
2562 | 0 | mAllowSuspendAfterFirstFrame = true; |
2563 | 0 | if (mSuspendedForPreloadNone) { |
2564 | 0 | // Our load was previouly suspended due to the media having preload |
2565 | 0 | // value "none". The preload value has changed to preload:metadata, so |
2566 | 0 | // resume the load. We'll pause the load again after we've read the |
2567 | 0 | // metadata. |
2568 | 0 | ResumeLoad(PRELOAD_METADATA); |
2569 | 0 | } |
2570 | 0 | } |
2571 | 0 | } |
2572 | | |
2573 | | MediaResult |
2574 | | HTMLMediaElement::LoadResource() |
2575 | 0 | { |
2576 | 0 | AbstractThread::AutoEnter context(AbstractMainThread()); |
2577 | 0 |
|
2578 | 0 | NS_ASSERTION(mDelayingLoadEvent, |
2579 | 0 | "Should delay load event (if in document) during load"); |
2580 | 0 |
|
2581 | 0 | if (mChannelLoader) { |
2582 | 0 | mChannelLoader->Cancel(); |
2583 | 0 | mChannelLoader = nullptr; |
2584 | 0 | } |
2585 | 0 |
|
2586 | 0 | // Set the media element's CORS mode only when loading a resource |
2587 | 0 | mCORSMode = AttrValueToCORSMode(GetParsedAttr(nsGkAtoms::crossorigin)); |
2588 | 0 |
|
2589 | 0 | HTMLMediaElement* other = LookupMediaElementURITable(mLoadingSrc); |
2590 | 0 | if (other && other->mDecoder) { |
2591 | 0 | // Clone it. |
2592 | 0 | // TODO: remove the cast by storing ChannelMediaDecoder in the URI table. |
2593 | 0 | nsresult rv = InitializeDecoderAsClone( |
2594 | 0 | static_cast<ChannelMediaDecoder*>(other->mDecoder.get())); |
2595 | 0 | if (NS_SUCCEEDED(rv)) |
2596 | 0 | return rv; |
2597 | 0 | } |
2598 | 0 | |
2599 | 0 | if (mMediaSource) { |
2600 | 0 | MediaDecoderInit decoderInit( |
2601 | 0 | this, |
2602 | 0 | mMuted ? 0.0 : mVolume, |
2603 | 0 | mPreservesPitch, |
2604 | 0 | mPlaybackRate, |
2605 | 0 | mPreloadAction == HTMLMediaElement::PRELOAD_METADATA, |
2606 | 0 | mHasSuspendTaint, |
2607 | 0 | HasAttr(kNameSpaceID_None, nsGkAtoms::loop), |
2608 | 0 | MediaContainerType(MEDIAMIMETYPE("application/x.mediasource"))); |
2609 | 0 |
|
2610 | 0 | RefPtr<MediaSourceDecoder> decoder = new MediaSourceDecoder(decoderInit); |
2611 | 0 | if (!mMediaSource->Attach(decoder)) { |
2612 | 0 | // TODO: Handle failure: run "If the media data cannot be fetched at |
2613 | 0 | // all, due to network errors, causing the user agent to give up |
2614 | 0 | // trying to fetch the resource" section of resource fetch algorithm. |
2615 | 0 | decoder->Shutdown(); |
2616 | 0 | return MediaResult(NS_ERROR_FAILURE, "Failed to attach MediaSource"); |
2617 | 0 | } |
2618 | 0 | ChangeDelayLoadStatus(false); |
2619 | 0 | nsresult rv = decoder->Load(mMediaSource->GetPrincipal()); |
2620 | 0 | if (NS_FAILED(rv)) { |
2621 | 0 | decoder->Shutdown(); |
2622 | 0 | LOG(LogLevel::Debug, |
2623 | 0 | ("%p Failed to load for decoder %p", this, decoder.get())); |
2624 | 0 | return MediaResult(rv, "Fail to load decoder"); |
2625 | 0 | } |
2626 | 0 | rv = FinishDecoderSetup(decoder); |
2627 | 0 | return MediaResult(rv, "Failed to set up decoder"); |
2628 | 0 | } |
2629 | 0 | |
2630 | 0 | AssertReadyStateIsNothing(); |
2631 | 0 |
|
2632 | 0 | RefPtr<ChannelLoader> loader = new ChannelLoader; |
2633 | 0 | nsresult rv = loader->Load(this); |
2634 | 0 | if (NS_SUCCEEDED(rv)) { |
2635 | 0 | mChannelLoader = loader.forget(); |
2636 | 0 | } |
2637 | 0 | return MediaResult(rv, "Failed to load channel"); |
2638 | 0 | } |
2639 | | |
2640 | | nsresult |
2641 | | HTMLMediaElement::LoadWithChannel(nsIChannel* aChannel, |
2642 | | nsIStreamListener** aListener) |
2643 | 0 | { |
2644 | 0 | NS_ENSURE_ARG_POINTER(aChannel); |
2645 | 0 | NS_ENSURE_ARG_POINTER(aListener); |
2646 | 0 |
|
2647 | 0 | *aListener = nullptr; |
2648 | 0 |
|
2649 | 0 | // Make sure we don't reenter during synchronous abort events. |
2650 | 0 | if (mIsRunningLoadMethod) |
2651 | 0 | return NS_OK; |
2652 | 0 | mIsRunningLoadMethod = true; |
2653 | 0 | AbortExistingLoads(); |
2654 | 0 | mIsRunningLoadMethod = false; |
2655 | 0 |
|
2656 | 0 | mLoadingSrcTriggeringPrincipal = nullptr; |
2657 | 0 | nsresult rv = aChannel->GetOriginalURI(getter_AddRefs(mLoadingSrc)); |
2658 | 0 | NS_ENSURE_SUCCESS(rv, rv); |
2659 | 0 |
|
2660 | 0 | ChangeDelayLoadStatus(true); |
2661 | 0 | rv = InitializeDecoderForChannel(aChannel, aListener); |
2662 | 0 | if (NS_FAILED(rv)) { |
2663 | 0 | ChangeDelayLoadStatus(false); |
2664 | 0 | return rv; |
2665 | 0 | } |
2666 | 0 | |
2667 | 0 | SetPlaybackRate(mDefaultPlaybackRate, IgnoreErrors()); |
2668 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("loadstart")); |
2669 | 0 |
|
2670 | 0 | return NS_OK; |
2671 | 0 | } |
2672 | | |
2673 | | bool |
2674 | | HTMLMediaElement::Seeking() const |
2675 | 0 | { |
2676 | 0 | return mDecoder && mDecoder->IsSeeking(); |
2677 | 0 | } |
2678 | | |
2679 | | double |
2680 | | HTMLMediaElement::CurrentTime() const |
2681 | 0 | { |
2682 | 0 | if (MediaStream* stream = GetSrcMediaStream()) { |
2683 | 0 | if (mSrcStreamPausedCurrentTime >= 0) { |
2684 | 0 | return mSrcStreamPausedCurrentTime; |
2685 | 0 | } |
2686 | 0 | return stream->StreamTimeToSeconds(stream->GetCurrentTime()); |
2687 | 0 | } |
2688 | 0 | |
2689 | 0 | if (mDefaultPlaybackStartPosition == 0.0 && mDecoder) { |
2690 | 0 | return mDecoder->GetCurrentTime(); |
2691 | 0 | } |
2692 | 0 | |
2693 | 0 | return mDefaultPlaybackStartPosition; |
2694 | 0 | } |
2695 | | |
2696 | | void |
2697 | | HTMLMediaElement::FastSeek(double aTime, ErrorResult& aRv) |
2698 | 0 | { |
2699 | 0 | LOG(LogLevel::Debug, ("%p FastSeek(%f) called by JS", this, aTime)); |
2700 | 0 | LOG(LogLevel::Debug, ("Reporting telemetry VIDEO_FASTSEEK_USED")); |
2701 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_FASTSEEK_USED, 1); |
2702 | 0 | RefPtr<Promise> tobeDropped = Seek(aTime, SeekTarget::PrevSyncPoint, aRv); |
2703 | 0 | } |
2704 | | |
2705 | | already_AddRefed<Promise> |
2706 | | HTMLMediaElement::SeekToNextFrame(ErrorResult& aRv) |
2707 | 0 | { |
2708 | 0 | /* This will cause JIT code to be kept around longer, to help performance |
2709 | 0 | * when using SeekToNextFrame to iterate through every frame of a video. |
2710 | 0 | */ |
2711 | 0 | nsPIDOMWindowInner* win = OwnerDoc()->GetInnerWindow(); |
2712 | 0 |
|
2713 | 0 | if (win) { |
2714 | 0 | if (JSObject* obj = win->AsGlobal()->GetGlobalJSObject()) { |
2715 | 0 | js::NotifyAnimationActivity(obj); |
2716 | 0 | } |
2717 | 0 | } |
2718 | 0 |
|
2719 | 0 | return Seek(CurrentTime(), SeekTarget::NextFrame, aRv); |
2720 | 0 | } |
2721 | | |
2722 | | void |
2723 | | HTMLMediaElement::SetCurrentTime(double aCurrentTime, ErrorResult& aRv) |
2724 | 0 | { |
2725 | 0 | LOG(LogLevel::Debug, |
2726 | 0 | ("%p SetCurrentTime(%f) called by JS", this, aCurrentTime)); |
2727 | 0 | RefPtr<Promise> tobeDropped = Seek(aCurrentTime, SeekTarget::Accurate, aRv); |
2728 | 0 | } |
2729 | | |
2730 | | /** |
2731 | | * Check if aValue is inside a range of aRanges, and if so returns true |
2732 | | * and puts the range index in aIntervalIndex. If aValue is not |
2733 | | * inside a range, returns false, and aIntervalIndex |
2734 | | * is set to the index of the range which starts immediately after aValue |
2735 | | * (and can be aRanges.Length() if aValue is after the last range). |
2736 | | */ |
2737 | | static bool |
2738 | | IsInRanges(TimeRanges& aRanges, double aValue, uint32_t& aIntervalIndex) |
2739 | 0 | { |
2740 | 0 | uint32_t length = aRanges.Length(); |
2741 | 0 |
|
2742 | 0 | for (uint32_t i = 0; i < length; i++) { |
2743 | 0 | double start = aRanges.Start(i); |
2744 | 0 | if (start > aValue) { |
2745 | 0 | aIntervalIndex = i; |
2746 | 0 | return false; |
2747 | 0 | } |
2748 | 0 | double end = aRanges.End(i); |
2749 | 0 | if (aValue <= end) { |
2750 | 0 | aIntervalIndex = i; |
2751 | 0 | return true; |
2752 | 0 | } |
2753 | 0 | } |
2754 | 0 | aIntervalIndex = length; |
2755 | 0 | return false; |
2756 | 0 | } |
2757 | | |
2758 | | already_AddRefed<Promise> |
2759 | | HTMLMediaElement::Seek(double aTime, |
2760 | | SeekTarget::Type aSeekType, |
2761 | | ErrorResult& aRv) |
2762 | 0 | { |
2763 | 0 | // aTime should be non-NaN. |
2764 | 0 | MOZ_ASSERT(!mozilla::IsNaN(aTime)); |
2765 | 0 |
|
2766 | 0 | RefPtr<Promise> promise = CreateDOMPromise(aRv); |
2767 | 0 |
|
2768 | 0 | if (NS_WARN_IF(aRv.Failed())) { |
2769 | 0 | return nullptr; |
2770 | 0 | } |
2771 | 0 | |
2772 | 0 | // Detect if user has interacted with element by seeking so that |
2773 | 0 | // play will not be blocked when initiated by a script. |
2774 | 0 | if (EventStateManager::IsHandlingUserInput()) { |
2775 | 0 | mIsBlessed = true; |
2776 | 0 | } |
2777 | 0 |
|
2778 | 0 | StopSuspendingAfterFirstFrame(); |
2779 | 0 |
|
2780 | 0 | if (mSrcStream) { |
2781 | 0 | // do nothing since media streams have an empty Seekable range. |
2782 | 0 | promise->MaybeReject(NS_ERROR_DOM_INVALID_STATE_ERR); |
2783 | 0 | return promise.forget(); |
2784 | 0 | } |
2785 | 0 | |
2786 | 0 | if (mPlayed && mCurrentPlayRangeStart != -1.0) { |
2787 | 0 | double rangeEndTime = CurrentTime(); |
2788 | 0 | LOG(LogLevel::Debug, |
2789 | 0 | ("%p Adding \'played\' a range : [%f, %f]", |
2790 | 0 | this, |
2791 | 0 | mCurrentPlayRangeStart, |
2792 | 0 | rangeEndTime)); |
2793 | 0 | // Multiple seek without playing, or seek while playing. |
2794 | 0 | if (mCurrentPlayRangeStart != rangeEndTime) { |
2795 | 0 | mPlayed->Add(mCurrentPlayRangeStart, rangeEndTime); |
2796 | 0 | } |
2797 | 0 | // Reset the current played range start time. We'll re-set it once |
2798 | 0 | // the seek completes. |
2799 | 0 | mCurrentPlayRangeStart = -1.0; |
2800 | 0 | } |
2801 | 0 |
|
2802 | 0 | if (mReadyState == HAVE_NOTHING) { |
2803 | 0 | mDefaultPlaybackStartPosition = aTime; |
2804 | 0 | promise->MaybeReject(NS_ERROR_DOM_INVALID_STATE_ERR); |
2805 | 0 | return promise.forget(); |
2806 | 0 | } |
2807 | 0 | |
2808 | 0 | if (!mDecoder) { |
2809 | 0 | // mDecoder must always be set in order to reach this point. |
2810 | 0 | NS_ASSERTION(mDecoder, "SetCurrentTime failed: no decoder"); |
2811 | 0 | promise->MaybeReject(NS_ERROR_DOM_INVALID_STATE_ERR); |
2812 | 0 | return promise.forget(); |
2813 | 0 | } |
2814 | 0 |
|
2815 | 0 | // Clamp the seek target to inside the seekable ranges. |
2816 | 0 | media::TimeIntervals seekableIntervals = mDecoder->GetSeekable(); |
2817 | 0 | if (seekableIntervals.IsInvalid()) { |
2818 | 0 | aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); // This will reject the promise. |
2819 | 0 | return promise.forget(); |
2820 | 0 | } |
2821 | 0 | RefPtr<TimeRanges> seekable = |
2822 | 0 | new TimeRanges(ToSupports(OwnerDoc()), seekableIntervals); |
2823 | 0 | uint32_t length = seekable->Length(); |
2824 | 0 | if (length == 0) { |
2825 | 0 | promise->MaybeReject(NS_ERROR_DOM_INVALID_STATE_ERR); |
2826 | 0 | return promise.forget(); |
2827 | 0 | } |
2828 | 0 | |
2829 | 0 | // If the position we want to seek to is not in a seekable range, we seek |
2830 | 0 | // to the closest position in the seekable ranges instead. If two positions |
2831 | 0 | // are equally close, we seek to the closest position from the currentTime. |
2832 | 0 | // See seeking spec, point 7 : |
2833 | 0 | // http://www.whatwg.org/specs/web-apps/current-work/multipage/the-video-element.html#seeking |
2834 | 0 | uint32_t range = 0; |
2835 | 0 | bool isInRange = IsInRanges(*seekable, aTime, range); |
2836 | 0 | if (!isInRange) { |
2837 | 0 | if (range == 0) { |
2838 | 0 | // aTime is before the first range in |seekable|, the closest point we can |
2839 | 0 | // seek to is the start of the first range. |
2840 | 0 | aTime = seekable->Start(0); |
2841 | 0 | } else if (range == length) { |
2842 | 0 | // Seek target is after the end last range in seekable data. |
2843 | 0 | // Clamp the seek target to the end of the last seekable range. |
2844 | 0 | aTime = seekable->End(length - 1); |
2845 | 0 | } else { |
2846 | 0 | double leftBound = seekable->End(range - 1); |
2847 | 0 | double rightBound = seekable->Start(range); |
2848 | 0 | double distanceLeft = Abs(leftBound - aTime); |
2849 | 0 | double distanceRight = Abs(rightBound - aTime); |
2850 | 0 | if (distanceLeft == distanceRight) { |
2851 | 0 | double currentTime = CurrentTime(); |
2852 | 0 | distanceLeft = Abs(leftBound - currentTime); |
2853 | 0 | distanceRight = Abs(rightBound - currentTime); |
2854 | 0 | } |
2855 | 0 | aTime = (distanceLeft < distanceRight) ? leftBound : rightBound; |
2856 | 0 | } |
2857 | 0 | } |
2858 | 0 |
|
2859 | 0 | // TODO: The spec requires us to update the current time to reflect the |
2860 | 0 | // actual seek target before beginning the synchronous section, but |
2861 | 0 | // that requires changing all MediaDecoderReaders to support telling |
2862 | 0 | // us the fastSeek target, and it's currently not possible to get |
2863 | 0 | // this information as we don't yet control the demuxer for all |
2864 | 0 | // MediaDecoderReaders. |
2865 | 0 |
|
2866 | 0 | mPlayingBeforeSeek = IsPotentiallyPlaying(); |
2867 | 0 |
|
2868 | 0 | // If the audio track is silent before seeking, we should end current silence |
2869 | 0 | // range and start a new range after seeking. Since seek() could be called |
2870 | 0 | // multiple times before seekEnd() executed, we should only calculate silence |
2871 | 0 | // range when first time seek() called. Calculating on other seek() calls |
2872 | 0 | // would cause a wrong result. In order to get correct time, this checking |
2873 | 0 | // should be called before decoder->seek(). |
2874 | 0 | if (IsAudioTrackCurrentlySilent() && |
2875 | 0 | !mHasAccumulatedSilenceRangeBeforeSeekEnd) { |
2876 | 0 | AccumulateAudioTrackSilence(); |
2877 | 0 | mHasAccumulatedSilenceRangeBeforeSeekEnd = true; |
2878 | 0 | } |
2879 | 0 |
|
2880 | 0 | // The media backend is responsible for dispatching the timeupdate |
2881 | 0 | // event if it changes the playback position as a result of the seek. |
2882 | 0 | LOG(LogLevel::Debug, ("%p SetCurrentTime(%f) starting seek", this, aTime)); |
2883 | 0 | mDecoder->Seek(aTime, aSeekType); |
2884 | 0 |
|
2885 | 0 | // We changed whether we're seeking so we need to AddRemoveSelfReference. |
2886 | 0 | AddRemoveSelfReference(); |
2887 | 0 |
|
2888 | 0 | // Keep the DOM promise. |
2889 | 0 | mSeekDOMPromise = promise; |
2890 | 0 |
|
2891 | 0 | return promise.forget(); |
2892 | 0 | } |
2893 | | |
2894 | | double |
2895 | | HTMLMediaElement::Duration() const |
2896 | 0 | { |
2897 | 0 | if (mSrcStream) { |
2898 | 0 | return std::numeric_limits<double>::infinity(); |
2899 | 0 | } |
2900 | 0 | |
2901 | 0 | if (mDecoder) { |
2902 | 0 | return mDecoder->GetDuration(); |
2903 | 0 | } |
2904 | 0 | |
2905 | 0 | return std::numeric_limits<double>::quiet_NaN(); |
2906 | 0 | } |
2907 | | |
2908 | | already_AddRefed<TimeRanges> |
2909 | | HTMLMediaElement::Seekable() const |
2910 | 0 | { |
2911 | 0 | media::TimeIntervals seekable = |
2912 | 0 | mDecoder ? mDecoder->GetSeekable() : media::TimeIntervals(); |
2913 | 0 | RefPtr<TimeRanges> ranges = new TimeRanges(ToSupports(OwnerDoc()), seekable); |
2914 | 0 | return ranges.forget(); |
2915 | 0 | } |
2916 | | |
2917 | | already_AddRefed<TimeRanges> |
2918 | | HTMLMediaElement::Played() |
2919 | 0 | { |
2920 | 0 | RefPtr<TimeRanges> ranges = new TimeRanges(ToSupports(OwnerDoc())); |
2921 | 0 |
|
2922 | 0 | uint32_t timeRangeCount = 0; |
2923 | 0 | if (mPlayed) { |
2924 | 0 | timeRangeCount = mPlayed->Length(); |
2925 | 0 | } |
2926 | 0 | for (uint32_t i = 0; i < timeRangeCount; i++) { |
2927 | 0 | double begin = mPlayed->Start(i); |
2928 | 0 | double end = mPlayed->End(i); |
2929 | 0 | ranges->Add(begin, end); |
2930 | 0 | } |
2931 | 0 |
|
2932 | 0 | if (mCurrentPlayRangeStart != -1.0) { |
2933 | 0 | double now = CurrentTime(); |
2934 | 0 | if (mCurrentPlayRangeStart != now) { |
2935 | 0 | ranges->Add(mCurrentPlayRangeStart, now); |
2936 | 0 | } |
2937 | 0 | } |
2938 | 0 |
|
2939 | 0 | ranges->Normalize(); |
2940 | 0 | return ranges.forget(); |
2941 | 0 | } |
2942 | | |
2943 | | void |
2944 | | HTMLMediaElement::Pause(ErrorResult& aRv) |
2945 | 0 | { |
2946 | 0 | LOG(LogLevel::Debug, ("%p Pause() called by JS", this)); |
2947 | 0 | if (mNetworkState == NETWORK_EMPTY) { |
2948 | 0 | LOG(LogLevel::Debug, ("Loading due to Pause()")); |
2949 | 0 | DoLoad(); |
2950 | 0 | } else if (mDecoder) { |
2951 | 0 | mDecoder->Pause(); |
2952 | 0 | } |
2953 | 0 |
|
2954 | 0 | bool oldPaused = mPaused; |
2955 | 0 | mPaused = true; |
2956 | 0 | mAutoplaying = false; |
2957 | 0 | // We changed mPaused and mAutoplaying which can affect AddRemoveSelfReference |
2958 | 0 | AddRemoveSelfReference(); |
2959 | 0 | UpdateSrcMediaStreamPlaying(); |
2960 | 0 | if (mAudioChannelWrapper) { |
2961 | 0 | mAudioChannelWrapper->NotifyPlayStateChanged(); |
2962 | 0 | } |
2963 | 0 |
|
2964 | 0 | if (!oldPaused) { |
2965 | 0 | FireTimeUpdate(false); |
2966 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("pause")); |
2967 | 0 | AsyncRejectPendingPlayPromises(NS_ERROR_DOM_MEDIA_ABORT_ERR); |
2968 | 0 | } |
2969 | 0 | } |
2970 | | |
2971 | | void |
2972 | | HTMLMediaElement::SetVolume(double aVolume, ErrorResult& aRv) |
2973 | 0 | { |
2974 | 0 | LOG(LogLevel::Debug, ("%p SetVolume(%f) called by JS", this, aVolume)); |
2975 | 0 |
|
2976 | 0 | if (aVolume < 0.0 || aVolume > 1.0) { |
2977 | 0 | aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); |
2978 | 0 | return; |
2979 | 0 | } |
2980 | 0 | |
2981 | 0 | if (aVolume == mVolume) |
2982 | 0 | return; |
2983 | 0 | |
2984 | 0 | mVolume = aVolume; |
2985 | 0 |
|
2986 | 0 | // Here we want just to update the volume. |
2987 | 0 | SetVolumeInternal(); |
2988 | 0 |
|
2989 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("volumechange")); |
2990 | 0 |
|
2991 | 0 | // We allow inaudible autoplay. But changing our volume may make this |
2992 | 0 | // media audible. So pause if we are no longer supposed to be autoplaying. |
2993 | 0 | PauseIfShouldNotBePlaying(); |
2994 | 0 | } |
2995 | | |
2996 | | void |
2997 | | HTMLMediaElement::MozGetMetadata(JSContext* cx, |
2998 | | JS::MutableHandle<JSObject*> aRetval, |
2999 | | ErrorResult& aRv) |
3000 | 0 | { |
3001 | 0 | if (mReadyState < HAVE_METADATA) { |
3002 | 0 | aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); |
3003 | 0 | return; |
3004 | 0 | } |
3005 | 0 | |
3006 | 0 | JS::Rooted<JSObject*> tags(cx, JS_NewPlainObject(cx)); |
3007 | 0 | if (!tags) { |
3008 | 0 | aRv.Throw(NS_ERROR_FAILURE); |
3009 | 0 | return; |
3010 | 0 | } |
3011 | 0 | if (mTags) { |
3012 | 0 | for (auto iter = mTags->ConstIter(); !iter.Done(); iter.Next()) { |
3013 | 0 | nsString wideValue; |
3014 | 0 | CopyUTF8toUTF16(iter.UserData(), wideValue); |
3015 | 0 | JS::Rooted<JSString*> string(cx, |
3016 | 0 | JS_NewUCStringCopyZ(cx, wideValue.Data())); |
3017 | 0 | if (!string || !JS_DefineProperty( |
3018 | 0 | cx, tags, iter.Key().Data(), string, JSPROP_ENUMERATE)) { |
3019 | 0 | NS_WARNING("couldn't create metadata object!"); |
3020 | 0 | aRv.Throw(NS_ERROR_FAILURE); |
3021 | 0 | return; |
3022 | 0 | } |
3023 | 0 | } |
3024 | 0 | } |
3025 | 0 |
|
3026 | 0 | aRetval.set(tags); |
3027 | 0 | } |
3028 | | |
3029 | | void |
3030 | | HTMLMediaElement::SetMutedInternal(uint32_t aMuted) |
3031 | 0 | { |
3032 | 0 | uint32_t oldMuted = mMuted; |
3033 | 0 | mMuted = aMuted; |
3034 | 0 |
|
3035 | 0 | if (!!aMuted == !!oldMuted) { |
3036 | 0 | return; |
3037 | 0 | } |
3038 | 0 | |
3039 | 0 | SetVolumeInternal(); |
3040 | 0 | } |
3041 | | |
3042 | | void |
3043 | | HTMLMediaElement::PauseIfShouldNotBePlaying() |
3044 | 0 | { |
3045 | 0 | if (GetPaused()) { |
3046 | 0 | return; |
3047 | 0 | } |
3048 | 0 | if (!AutoplayPolicy::IsAllowedToPlay(*this)) { |
3049 | 0 | AUTOPLAY_LOG("pause because not allowed to play, element=%p", this); |
3050 | 0 | ErrorResult rv; |
3051 | 0 | Pause(rv); |
3052 | 0 | OwnerDoc()->SetDocTreeHadPlayRevoked(); |
3053 | 0 | } |
3054 | 0 | } |
3055 | | |
3056 | | void |
3057 | | HTMLMediaElement::SetVolumeInternal() |
3058 | 0 | { |
3059 | 0 | float effectiveVolume = ComputedVolume(); |
3060 | 0 |
|
3061 | 0 | if (mDecoder) { |
3062 | 0 | mDecoder->SetVolume(effectiveVolume); |
3063 | 0 | } else if (MediaStream* stream = GetSrcMediaStream()) { |
3064 | 0 | if (mSrcStreamIsPlaying) { |
3065 | 0 | stream->SetAudioOutputVolume(this, effectiveVolume); |
3066 | 0 | } |
3067 | 0 | } |
3068 | 0 |
|
3069 | 0 | NotifyAudioPlaybackChanged( |
3070 | 0 | AudioChannelService::AudibleChangedReasons::eVolumeChanged); |
3071 | 0 | } |
3072 | | |
3073 | | void |
3074 | | HTMLMediaElement::SetMuted(bool aMuted) |
3075 | 0 | { |
3076 | 0 | LOG(LogLevel::Debug, ("%p SetMuted(%d) called by JS", this, aMuted)); |
3077 | 0 | if (aMuted == Muted()) { |
3078 | 0 | return; |
3079 | 0 | } |
3080 | 0 | |
3081 | 0 | if (aMuted) { |
3082 | 0 | SetMutedInternal(mMuted | MUTED_BY_CONTENT); |
3083 | 0 | } else { |
3084 | 0 | SetMutedInternal(mMuted & ~MUTED_BY_CONTENT); |
3085 | 0 | } |
3086 | 0 |
|
3087 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("volumechange")); |
3088 | 0 |
|
3089 | 0 | // We allow inaudible autoplay. But changing our mute status may make this |
3090 | 0 | // media audible. So pause if we are no longer supposed to be autoplaying. |
3091 | 0 | PauseIfShouldNotBePlaying(); |
3092 | 0 | } |
3093 | | |
3094 | | class HTMLMediaElement::StreamCaptureTrackSource |
3095 | | : public MediaStreamTrackSource |
3096 | | , public MediaStreamTrackSource::Sink |
3097 | | { |
3098 | | public: |
3099 | | NS_DECL_ISUPPORTS_INHERITED |
3100 | | NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(StreamCaptureTrackSource, |
3101 | | MediaStreamTrackSource) |
3102 | | |
3103 | | StreamCaptureTrackSource(HTMLMediaElement* aElement, |
3104 | | MediaStreamTrackSource* aCapturedTrackSource, |
3105 | | DOMMediaStream* aOwningStream, |
3106 | | TrackID aDestinationTrackID) |
3107 | | : MediaStreamTrackSource(aCapturedTrackSource->GetPrincipal(), nsString()) |
3108 | | , mElement(aElement) |
3109 | | , mCapturedTrackSource(aCapturedTrackSource) |
3110 | | , mOwningStream(aOwningStream) |
3111 | | , mDestinationTrackID(aDestinationTrackID) |
3112 | 0 | { |
3113 | 0 | MOZ_ASSERT(mElement); |
3114 | 0 | MOZ_ASSERT(mCapturedTrackSource); |
3115 | 0 | MOZ_ASSERT(mOwningStream); |
3116 | 0 | MOZ_ASSERT(IsTrackIDExplicit(mDestinationTrackID)); |
3117 | 0 |
|
3118 | 0 | mCapturedTrackSource->RegisterSink(this); |
3119 | 0 | } |
3120 | | |
3121 | | void Destroy() override |
3122 | 0 | { |
3123 | 0 | if (mCapturedTrackSource) { |
3124 | 0 | mCapturedTrackSource->UnregisterSink(this); |
3125 | 0 | mCapturedTrackSource = nullptr; |
3126 | 0 | } |
3127 | 0 | } |
3128 | | |
3129 | | MediaSourceEnum GetMediaSource() const override |
3130 | 0 | { |
3131 | 0 | return MediaSourceEnum::Other; |
3132 | 0 | } |
3133 | | |
3134 | | CORSMode GetCORSMode() const override |
3135 | 0 | { |
3136 | 0 | if (!mCapturedTrackSource) { |
3137 | 0 | // This could happen during shutdown. |
3138 | 0 | return CORS_NONE; |
3139 | 0 | } |
3140 | 0 | |
3141 | 0 | return mCapturedTrackSource->GetCORSMode(); |
3142 | 0 | } |
3143 | | |
3144 | | void Stop() override |
3145 | 0 | { |
3146 | 0 | if (mElement && mElement->mSrcStream) { |
3147 | 0 | // Only notify if we're still playing the source stream. GC might have |
3148 | 0 | // cleared it before the track sources. |
3149 | 0 | mElement->NotifyOutputTrackStopped(mOwningStream, mDestinationTrackID); |
3150 | 0 | } |
3151 | 0 | mElement = nullptr; |
3152 | 0 | mOwningStream = nullptr; |
3153 | 0 |
|
3154 | 0 | Destroy(); |
3155 | 0 | } |
3156 | | |
3157 | | /** |
3158 | | * Do not keep the track source alive. The source lifetime is controlled by |
3159 | | * its associated tracks. |
3160 | | */ |
3161 | 0 | bool KeepsSourceAlive() const override { return false; } |
3162 | | |
3163 | | /** |
3164 | | * Do not keep the track source on. It is controlled by its associated tracks. |
3165 | | */ |
3166 | 0 | bool Enabled() const override { return false; } |
3167 | | |
3168 | 0 | void Disable() override {} |
3169 | | |
3170 | 0 | void Enable() override {} |
3171 | | |
3172 | | void PrincipalChanged() override |
3173 | 0 | { |
3174 | 0 | if (!mCapturedTrackSource) { |
3175 | 0 | // This could happen during shutdown. |
3176 | 0 | return; |
3177 | 0 | } |
3178 | 0 | |
3179 | 0 | mPrincipal = mCapturedTrackSource->GetPrincipal(); |
3180 | 0 | MediaStreamTrackSource::PrincipalChanged(); |
3181 | 0 | } |
3182 | | |
3183 | | void MutedChanged(bool aNewState) override |
3184 | 0 | { |
3185 | 0 | if (!mCapturedTrackSource) { |
3186 | 0 | // This could happen during shutdown. |
3187 | 0 | return; |
3188 | 0 | } |
3189 | 0 | |
3190 | 0 | MediaStreamTrackSource::MutedChanged(aNewState); |
3191 | 0 | } |
3192 | | |
3193 | | private: |
3194 | 0 | virtual ~StreamCaptureTrackSource() {} |
3195 | | |
3196 | | RefPtr<HTMLMediaElement> mElement; |
3197 | | RefPtr<MediaStreamTrackSource> mCapturedTrackSource; |
3198 | | RefPtr<DOMMediaStream> mOwningStream; |
3199 | | TrackID mDestinationTrackID; |
3200 | | }; |
3201 | | |
3202 | | NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::StreamCaptureTrackSource, |
3203 | | MediaStreamTrackSource) |
3204 | | NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::StreamCaptureTrackSource, |
3205 | | MediaStreamTrackSource) |
3206 | 0 | NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION( |
3207 | 0 | HTMLMediaElement::StreamCaptureTrackSource) |
3208 | 0 | NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSource) |
3209 | | NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::StreamCaptureTrackSource, |
3210 | | MediaStreamTrackSource, |
3211 | | mElement, |
3212 | | mCapturedTrackSource, |
3213 | | mOwningStream) |
3214 | | |
3215 | | class HTMLMediaElement::DecoderCaptureTrackSource |
3216 | | : public MediaStreamTrackSource |
3217 | | , public DecoderPrincipalChangeObserver |
3218 | | { |
3219 | | public: |
3220 | | NS_DECL_ISUPPORTS_INHERITED |
3221 | | NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DecoderCaptureTrackSource, |
3222 | | MediaStreamTrackSource) |
3223 | | |
3224 | | explicit DecoderCaptureTrackSource(HTMLMediaElement* aElement) |
3225 | | : MediaStreamTrackSource( |
3226 | | nsCOMPtr<nsIPrincipal>(aElement->GetCurrentPrincipal()).get(), |
3227 | | nsString()) |
3228 | | , mElement(aElement) |
3229 | 0 | { |
3230 | 0 | MOZ_ASSERT(mElement); |
3231 | 0 | mElement->AddDecoderPrincipalChangeObserver(this); |
3232 | 0 | } |
3233 | | |
3234 | | void Destroy() override |
3235 | 0 | { |
3236 | 0 | if (mElement) { |
3237 | 0 | DebugOnly<bool> res = |
3238 | 0 | mElement->RemoveDecoderPrincipalChangeObserver(this); |
3239 | 0 | NS_ASSERTION(res, |
3240 | 0 | "Removing decoder principal changed observer failed. " |
3241 | 0 | "Had it already been removed?"); |
3242 | 0 | mElement = nullptr; |
3243 | 0 | } |
3244 | 0 | } |
3245 | | |
3246 | | MediaSourceEnum GetMediaSource() const override |
3247 | 0 | { |
3248 | 0 | return MediaSourceEnum::Other; |
3249 | 0 | } |
3250 | | |
3251 | | CORSMode GetCORSMode() const override |
3252 | 0 | { |
3253 | 0 | if (!mElement) { |
3254 | 0 | MOZ_ASSERT(false, "Should always have an element if in use"); |
3255 | 0 | return CORS_NONE; |
3256 | 0 | } |
3257 | 0 |
|
3258 | 0 | return mElement->GetCORSMode(); |
3259 | 0 | } |
3260 | | |
3261 | | void Stop() override |
3262 | 0 | { |
3263 | 0 | // We don't notify the source that a track was stopped since it will keep |
3264 | 0 | // producing tracks until the element ends. The decoder also needs the |
3265 | 0 | // tracks it created to be live at the source since the decoder's clock is |
3266 | 0 | // based on MediaStreams during capture. |
3267 | 0 | } |
3268 | | |
3269 | 0 | void Disable() override {} |
3270 | | |
3271 | 0 | void Enable() override {} |
3272 | | |
3273 | | void NotifyDecoderPrincipalChanged() override |
3274 | 0 | { |
3275 | 0 | nsCOMPtr<nsIPrincipal> newPrincipal = mElement->GetCurrentPrincipal(); |
3276 | 0 | if (nsContentUtils::CombineResourcePrincipals(&mPrincipal, newPrincipal)) { |
3277 | 0 | PrincipalChanged(); |
3278 | 0 | } |
3279 | 0 | } |
3280 | | |
3281 | | protected: |
3282 | 0 | virtual ~DecoderCaptureTrackSource() {} |
3283 | | |
3284 | | RefPtr<HTMLMediaElement> mElement; |
3285 | | }; |
3286 | | |
3287 | | NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::DecoderCaptureTrackSource, |
3288 | | MediaStreamTrackSource) |
3289 | | NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::DecoderCaptureTrackSource, |
3290 | | MediaStreamTrackSource) |
3291 | 0 | NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION( |
3292 | 0 | HTMLMediaElement::DecoderCaptureTrackSource) |
3293 | 0 | NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSource) |
3294 | | NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::DecoderCaptureTrackSource, |
3295 | | MediaStreamTrackSource, |
3296 | | mElement) |
3297 | | |
3298 | | class HTMLMediaElement::CaptureStreamTrackSourceGetter |
3299 | | : public MediaStreamTrackSourceGetter |
3300 | | { |
3301 | | public: |
3302 | | NS_DECL_ISUPPORTS_INHERITED |
3303 | | NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(CaptureStreamTrackSourceGetter, |
3304 | | MediaStreamTrackSourceGetter) |
3305 | | |
3306 | | explicit CaptureStreamTrackSourceGetter(HTMLMediaElement* aElement) |
3307 | | : mElement(aElement) |
3308 | 0 | { |
3309 | 0 | } |
3310 | | |
3311 | | already_AddRefed<dom::MediaStreamTrackSource> GetMediaStreamTrackSource( |
3312 | | TrackID aInputTrackID) override |
3313 | 0 | { |
3314 | 0 | if (mElement && mElement->mSrcStream) { |
3315 | 0 | NS_ERROR("Captured media element playing a stream adds tracks explicitly " |
3316 | 0 | "on main thread."); |
3317 | 0 | return nullptr; |
3318 | 0 | } |
3319 | 0 |
|
3320 | 0 | // We can return a new source each time here, even for different streams, |
3321 | 0 | // since the sources don't keep any internal state and all of them call |
3322 | 0 | // through to the same HTMLMediaElement. |
3323 | 0 | // If this changes (after implementing Stop()?) we'll have to ensure we |
3324 | 0 | // return the same source for all requests to the same TrackID, and only |
3325 | 0 | // have one getter. |
3326 | 0 | return do_AddRef(new DecoderCaptureTrackSource(mElement)); |
3327 | 0 | } |
3328 | | |
3329 | | protected: |
3330 | 0 | virtual ~CaptureStreamTrackSourceGetter() {} |
3331 | | |
3332 | | RefPtr<HTMLMediaElement> mElement; |
3333 | | }; |
3334 | | |
3335 | | NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::CaptureStreamTrackSourceGetter, |
3336 | | MediaStreamTrackSourceGetter) |
3337 | | NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::CaptureStreamTrackSourceGetter, |
3338 | | MediaStreamTrackSourceGetter) |
3339 | 0 | NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION( |
3340 | 0 | HTMLMediaElement::CaptureStreamTrackSourceGetter) |
3341 | 0 | NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSourceGetter) |
3342 | | NS_IMPL_CYCLE_COLLECTION_INHERITED( |
3343 | | HTMLMediaElement::CaptureStreamTrackSourceGetter, |
3344 | | MediaStreamTrackSourceGetter, |
3345 | | mElement) |
3346 | | |
3347 | | void |
3348 | | HTMLMediaElement::SetCapturedOutputStreamsEnabled(bool aEnabled) |
3349 | 0 | { |
3350 | 0 | for (OutputMediaStream& ms : mOutputStreams) { |
3351 | 0 | if (ms.mCapturingDecoder) { |
3352 | 0 | MOZ_ASSERT(!ms.mCapturingMediaStream); |
3353 | 0 | continue; |
3354 | 0 | } |
3355 | 0 | for (auto pair : ms.mTrackPorts) { |
3356 | 0 | MediaStream* outputSource = ms.mStream->GetInputStream(); |
3357 | 0 | if (!outputSource) { |
3358 | 0 | NS_ERROR("No output source stream"); |
3359 | 0 | return; |
3360 | 0 | } |
3361 | 0 |
|
3362 | 0 | TrackID id = pair.second()->GetDestinationTrackId(); |
3363 | 0 | outputSource->SetTrackEnabled(id, |
3364 | 0 | aEnabled |
3365 | 0 | ? DisabledTrackMode::ENABLED |
3366 | 0 | : DisabledTrackMode::SILENCE_FREEZE); |
3367 | 0 |
|
3368 | 0 | LOG(LogLevel::Debug, |
3369 | 0 | ("%s track %d for captured MediaStream %p", |
3370 | 0 | aEnabled ? "Enabled" : "Disabled", |
3371 | 0 | id, |
3372 | 0 | ms.mStream.get())); |
3373 | 0 | } |
3374 | 0 | } |
3375 | 0 | } |
3376 | | |
3377 | | void |
3378 | | HTMLMediaElement::AddCaptureMediaTrackToOutputStream( |
3379 | | MediaTrack* aTrack, |
3380 | | OutputMediaStream& aOutputStream, |
3381 | | bool aAsyncAddtrack) |
3382 | 0 | { |
3383 | 0 | if (aOutputStream.mCapturingDecoder) { |
3384 | 0 | MOZ_ASSERT(!aOutputStream.mCapturingMediaStream); |
3385 | 0 | return; |
3386 | 0 | } |
3387 | 0 | aOutputStream.mCapturingMediaStream = true; |
3388 | 0 |
|
3389 | 0 | if (aOutputStream.mStream == mSrcStream) { |
3390 | 0 | // Cycle detected. This can happen since tracks are added async. |
3391 | 0 | // We avoid forwarding it to the output here or we'd get into an infloop. |
3392 | 0 | return; |
3393 | 0 | } |
3394 | 0 | |
3395 | 0 | MediaStream* outputSource = aOutputStream.mStream->GetInputStream(); |
3396 | 0 | if (!outputSource) { |
3397 | 0 | NS_ERROR("No output source stream"); |
3398 | 0 | return; |
3399 | 0 | } |
3400 | 0 |
|
3401 | 0 | ProcessedMediaStream* processedOutputSource = |
3402 | 0 | outputSource->AsProcessedStream(); |
3403 | 0 | if (!processedOutputSource) { |
3404 | 0 | NS_ERROR("Input stream not a ProcessedMediaStream"); |
3405 | 0 | return; |
3406 | 0 | } |
3407 | 0 |
|
3408 | 0 | if (!aTrack) { |
3409 | 0 | MOZ_ASSERT(false, "Bad MediaTrack"); |
3410 | 0 | return; |
3411 | 0 | } |
3412 | 0 |
|
3413 | 0 | MediaStreamTrack* inputTrack = mSrcStream->GetTrackById(aTrack->GetId()); |
3414 | 0 | MOZ_ASSERT(inputTrack); |
3415 | 0 | if (!inputTrack) { |
3416 | 0 | NS_ERROR("Input track not found in source stream"); |
3417 | 0 | return; |
3418 | 0 | } |
3419 | 0 |
|
3420 | | #if DEBUG |
3421 | | for (auto pair : aOutputStream.mTrackPorts) { |
3422 | | MOZ_ASSERT(pair.first() != aTrack->GetId(), |
3423 | | "Captured track already captured to output stream"); |
3424 | | } |
3425 | | #endif |
3426 | | |
3427 | 0 | TrackID destinationTrackID = aOutputStream.mNextAvailableTrackID++; |
3428 | 0 | RefPtr<MediaStreamTrackSource> source = new StreamCaptureTrackSource( |
3429 | 0 | this, &inputTrack->GetSource(), aOutputStream.mStream, destinationTrackID); |
3430 | 0 |
|
3431 | 0 | MediaSegment::Type type = inputTrack->AsAudioStreamTrack() |
3432 | 0 | ? MediaSegment::AUDIO |
3433 | 0 | : MediaSegment::VIDEO; |
3434 | 0 |
|
3435 | 0 | RefPtr<MediaStreamTrack> track = |
3436 | 0 | aOutputStream.mStream->CreateDOMTrack(destinationTrackID, type, source); |
3437 | 0 |
|
3438 | 0 | if (aAsyncAddtrack) { |
3439 | 0 | mMainThreadEventTarget->Dispatch( |
3440 | 0 | NewRunnableMethod<StoreRefPtrPassByPtr<MediaStreamTrack>>( |
3441 | 0 | "DOMMediaStream::AddTrackInternal", |
3442 | 0 | aOutputStream.mStream, |
3443 | 0 | &DOMMediaStream::AddTrackInternal, |
3444 | 0 | track)); |
3445 | 0 | } else { |
3446 | 0 | aOutputStream.mStream->AddTrackInternal(track); |
3447 | 0 | } |
3448 | 0 |
|
3449 | 0 | // Track is muted initially, so we don't leak data if it's added while paused |
3450 | 0 | // and an MSG iteration passes before the mute comes into effect. |
3451 | 0 | processedOutputSource->SetTrackEnabled(destinationTrackID, |
3452 | 0 | DisabledTrackMode::SILENCE_FREEZE); |
3453 | 0 | RefPtr<MediaInputPort> port = inputTrack->ForwardTrackContentsTo( |
3454 | 0 | processedOutputSource, destinationTrackID); |
3455 | 0 |
|
3456 | 0 | Pair<nsString, RefPtr<MediaInputPort>> p(aTrack->GetId(), port); |
3457 | 0 | aOutputStream.mTrackPorts.AppendElement(std::move(p)); |
3458 | 0 |
|
3459 | 0 | if (mSrcStreamIsPlaying) { |
3460 | 0 | processedOutputSource->SetTrackEnabled(destinationTrackID, |
3461 | 0 | DisabledTrackMode::ENABLED); |
3462 | 0 | } |
3463 | 0 |
|
3464 | 0 | LOG(LogLevel::Debug, |
3465 | 0 | ("Created %s track %p with id %d from track %p through MediaInputPort %p", |
3466 | 0 | inputTrack->AsAudioStreamTrack() ? "audio" : "video", |
3467 | 0 | track.get(), |
3468 | 0 | destinationTrackID, |
3469 | 0 | inputTrack, |
3470 | 0 | port.get())); |
3471 | 0 | } |
3472 | | |
3473 | | bool |
3474 | | HTMLMediaElement::CanBeCaptured(StreamCaptureType aCaptureType) |
3475 | 0 | { |
3476 | 0 | // Don't bother capturing when the document has gone away |
3477 | 0 | nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow(); |
3478 | 0 | if (!window) { |
3479 | 0 | return false; |
3480 | 0 | } |
3481 | 0 | |
3482 | 0 | // Prevent capturing restricted video |
3483 | 0 | if (aCaptureType == StreamCaptureType::CAPTURE_ALL_TRACKS && |
3484 | 0 | ContainsRestrictedContent()) { |
3485 | 0 | return false; |
3486 | 0 | } |
3487 | 0 | return true; |
3488 | 0 | } |
3489 | | |
3490 | | already_AddRefed<DOMMediaStream> |
3491 | | HTMLMediaElement::CaptureStreamInternal(StreamCaptureBehavior aFinishBehavior, |
3492 | | StreamCaptureType aStreamCaptureType, |
3493 | | MediaStreamGraph* aGraph) |
3494 | 0 | { |
3495 | 0 | MOZ_RELEASE_ASSERT(aGraph); |
3496 | 0 | MOZ_ASSERT(CanBeCaptured(aStreamCaptureType)); |
3497 | 0 |
|
3498 | 0 | MarkAsContentSource(CallerAPI::CAPTURE_STREAM); |
3499 | 0 | MarkAsTainted(); |
3500 | 0 |
|
3501 | 0 | // We don't support routing to a different graph. |
3502 | 0 | if (!mOutputStreams.IsEmpty() && |
3503 | 0 | aGraph != mOutputStreams[0].mStream->GetInputStream()->Graph()) { |
3504 | 0 | return nullptr; |
3505 | 0 | } |
3506 | 0 | |
3507 | 0 | OutputMediaStream* out = mOutputStreams.AppendElement(); |
3508 | 0 | MediaStreamTrackSourceGetter* getter = |
3509 | 0 | new CaptureStreamTrackSourceGetter(this); |
3510 | 0 | nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow(); |
3511 | 0 | out->mStream = |
3512 | 0 | DOMMediaStream::CreateTrackUnionStreamAsInput(window, aGraph, getter); |
3513 | 0 | out->mStream->SetInactiveOnFinish(); |
3514 | 0 | out->mFinishWhenEnded = |
3515 | 0 | aFinishBehavior == StreamCaptureBehavior::FINISH_WHEN_ENDED; |
3516 | 0 | out->mCapturingAudioOnly = |
3517 | 0 | aStreamCaptureType == StreamCaptureType::CAPTURE_AUDIO; |
3518 | 0 |
|
3519 | 0 | if (aStreamCaptureType == StreamCaptureType::CAPTURE_AUDIO) { |
3520 | 0 | if (mSrcStream) { |
3521 | 0 | // We don't support applying volume and mute to the captured stream, when |
3522 | 0 | // capturing a MediaStream. |
3523 | 0 | nsContentUtils::ReportToConsole( |
3524 | 0 | nsIScriptError::errorFlag, |
3525 | 0 | NS_LITERAL_CSTRING("Media"), |
3526 | 0 | OwnerDoc(), |
3527 | 0 | nsContentUtils::eDOM_PROPERTIES, |
3528 | 0 | "MediaElementAudioCaptureOfMediaStreamError"); |
3529 | 0 | return nullptr; |
3530 | 0 | } |
3531 | 0 |
|
3532 | 0 | // mAudioCaptured tells the user that the audio played by this media element |
3533 | 0 | // is being routed to the captureStreams *instead* of being played to |
3534 | 0 | // speakers. |
3535 | 0 | mAudioCaptured = true; |
3536 | 0 | } |
3537 | 0 |
|
3538 | 0 | if (mDecoder) { |
3539 | 0 | out->mCapturingDecoder = true; |
3540 | 0 | mDecoder->AddOutputStream( |
3541 | 0 | out->mStream->GetInputStream()->AsProcessedStream(), |
3542 | 0 | out->mNextAvailableTrackID, |
3543 | 0 | aFinishBehavior == StreamCaptureBehavior::FINISH_WHEN_ENDED); |
3544 | 0 | } else if (mSrcStream) { |
3545 | 0 | out->mCapturingMediaStream = true; |
3546 | 0 | } |
3547 | 0 |
|
3548 | 0 | if (mReadyState == HAVE_NOTHING) { |
3549 | 0 | // Do not expose the tracks until we have metadata. |
3550 | 0 | RefPtr<DOMMediaStream> result = out->mStream; |
3551 | 0 | return result.forget(); |
3552 | 0 | } |
3553 | 0 | |
3554 | 0 | if (mDecoder) { |
3555 | 0 | if (HasAudio()) { |
3556 | 0 | TrackID audioTrackId = out->mNextAvailableTrackID++; |
3557 | 0 | RefPtr<MediaStreamTrackSource> trackSource = |
3558 | 0 | getter->GetMediaStreamTrackSource(audioTrackId); |
3559 | 0 | RefPtr<MediaStreamTrack> track = out->mStream->CreateDOMTrack( |
3560 | 0 | audioTrackId, MediaSegment::AUDIO, trackSource); |
3561 | 0 | out->mPreCreatedTracks.AppendElement(track); |
3562 | 0 | out->mStream->AddTrackInternal(track); |
3563 | 0 | LOG(LogLevel::Debug, |
3564 | 0 | ("Created audio track %d for captured decoder", audioTrackId)); |
3565 | 0 | } |
3566 | 0 | if (IsVideo() && HasVideo() && !out->mCapturingAudioOnly) { |
3567 | 0 | TrackID videoTrackId = out->mNextAvailableTrackID++; |
3568 | 0 | RefPtr<MediaStreamTrackSource> trackSource = |
3569 | 0 | getter->GetMediaStreamTrackSource(videoTrackId); |
3570 | 0 | RefPtr<MediaStreamTrack> track = out->mStream->CreateDOMTrack( |
3571 | 0 | videoTrackId, MediaSegment::VIDEO, trackSource); |
3572 | 0 | out->mPreCreatedTracks.AppendElement(track); |
3573 | 0 | out->mStream->AddTrackInternal(track); |
3574 | 0 | LOG(LogLevel::Debug, |
3575 | 0 | ("Created video track %d for captured decoder", videoTrackId)); |
3576 | 0 | } |
3577 | 0 | } |
3578 | 0 |
|
3579 | 0 | if (mSrcStream) { |
3580 | 0 | for (size_t i = 0; i < AudioTracks()->Length(); ++i) { |
3581 | 0 | AudioTrack* t = (*AudioTracks())[i]; |
3582 | 0 | if (t->Enabled()) { |
3583 | 0 | AddCaptureMediaTrackToOutputStream(t, *out, false); |
3584 | 0 | } |
3585 | 0 | } |
3586 | 0 | if (IsVideo() && !out->mCapturingAudioOnly) { |
3587 | 0 | // Only add video tracks if we're a video element and the output stream |
3588 | 0 | // wants video. |
3589 | 0 | for (size_t i = 0; i < VideoTracks()->Length(); ++i) { |
3590 | 0 | VideoTrack* t = (*VideoTracks())[i]; |
3591 | 0 | if (t->Selected()) { |
3592 | 0 | AddCaptureMediaTrackToOutputStream(t, *out, false); |
3593 | 0 | } |
3594 | 0 | } |
3595 | 0 | } |
3596 | 0 | } |
3597 | 0 | RefPtr<DOMMediaStream> result = out->mStream; |
3598 | 0 | return result.forget(); |
3599 | 0 | } |
3600 | | |
3601 | | already_AddRefed<DOMMediaStream> |
3602 | | HTMLMediaElement::CaptureAudio(ErrorResult& aRv, MediaStreamGraph* aGraph) |
3603 | 0 | { |
3604 | 0 | MOZ_RELEASE_ASSERT(aGraph); |
3605 | 0 |
|
3606 | 0 | if (!CanBeCaptured(StreamCaptureType::CAPTURE_AUDIO)) { |
3607 | 0 | aRv.Throw(NS_ERROR_FAILURE); |
3608 | 0 | return nullptr; |
3609 | 0 | } |
3610 | 0 | |
3611 | 0 | RefPtr<DOMMediaStream> stream = |
3612 | 0 | CaptureStreamInternal(StreamCaptureBehavior::CONTINUE_WHEN_ENDED, |
3613 | 0 | StreamCaptureType::CAPTURE_AUDIO, |
3614 | 0 | aGraph); |
3615 | 0 | if (!stream) { |
3616 | 0 | aRv.Throw(NS_ERROR_FAILURE); |
3617 | 0 | return nullptr; |
3618 | 0 | } |
3619 | 0 | |
3620 | 0 | return stream.forget(); |
3621 | 0 | } |
3622 | | |
3623 | | already_AddRefed<DOMMediaStream> |
3624 | | HTMLMediaElement::MozCaptureStream(ErrorResult& aRv) |
3625 | 0 | { |
3626 | 0 | MediaStreamGraph::GraphDriverType graphDriverType = |
3627 | 0 | HasAudio() ? MediaStreamGraph::AUDIO_THREAD_DRIVER |
3628 | 0 | : MediaStreamGraph::SYSTEM_THREAD_DRIVER; |
3629 | 0 |
|
3630 | 0 | nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow(); |
3631 | 0 | if (!window) { |
3632 | 0 | aRv.Throw(NS_ERROR_FAILURE); |
3633 | 0 | return nullptr; |
3634 | 0 | } |
3635 | 0 | |
3636 | 0 | if (!CanBeCaptured(StreamCaptureType::CAPTURE_ALL_TRACKS)) { |
3637 | 0 | aRv.Throw(NS_ERROR_FAILURE); |
3638 | 0 | return nullptr; |
3639 | 0 | } |
3640 | 0 | |
3641 | 0 | MediaStreamGraph* graph = MediaStreamGraph::GetInstance( |
3642 | 0 | graphDriverType, window, MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE); |
3643 | 0 |
|
3644 | 0 | RefPtr<DOMMediaStream> stream = |
3645 | 0 | CaptureStreamInternal(StreamCaptureBehavior::CONTINUE_WHEN_ENDED, |
3646 | 0 | StreamCaptureType::CAPTURE_ALL_TRACKS, |
3647 | 0 | graph); |
3648 | 0 | if (!stream) { |
3649 | 0 | aRv.Throw(NS_ERROR_FAILURE); |
3650 | 0 | return nullptr; |
3651 | 0 | } |
3652 | 0 | |
3653 | 0 | return stream.forget(); |
3654 | 0 | } |
3655 | | |
3656 | | already_AddRefed<DOMMediaStream> |
3657 | | HTMLMediaElement::MozCaptureStreamUntilEnded(ErrorResult& aRv) |
3658 | 0 | { |
3659 | 0 | MediaStreamGraph::GraphDriverType graphDriverType = |
3660 | 0 | HasAudio() ? MediaStreamGraph::AUDIO_THREAD_DRIVER |
3661 | 0 | : MediaStreamGraph::SYSTEM_THREAD_DRIVER; |
3662 | 0 |
|
3663 | 0 | nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow(); |
3664 | 0 | if (!window) { |
3665 | 0 | aRv.Throw(NS_ERROR_FAILURE); |
3666 | 0 | return nullptr; |
3667 | 0 | } |
3668 | 0 | |
3669 | 0 | if (!CanBeCaptured(StreamCaptureType::CAPTURE_ALL_TRACKS)) { |
3670 | 0 | aRv.Throw(NS_ERROR_FAILURE); |
3671 | 0 | return nullptr; |
3672 | 0 | } |
3673 | 0 | |
3674 | 0 | MediaStreamGraph* graph = MediaStreamGraph::GetInstance( |
3675 | 0 | graphDriverType, window, MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE); |
3676 | 0 |
|
3677 | 0 | RefPtr<DOMMediaStream> stream = |
3678 | 0 | CaptureStreamInternal(StreamCaptureBehavior::FINISH_WHEN_ENDED, |
3679 | 0 | StreamCaptureType::CAPTURE_ALL_TRACKS, |
3680 | 0 | graph); |
3681 | 0 | if (!stream) { |
3682 | 0 | aRv.Throw(NS_ERROR_FAILURE); |
3683 | 0 | return nullptr; |
3684 | 0 | } |
3685 | 0 | |
3686 | 0 | return stream.forget(); |
3687 | 0 | } |
3688 | | |
3689 | | class MediaElementSetForURI : public nsURIHashKey |
3690 | | { |
3691 | | public: |
3692 | 0 | explicit MediaElementSetForURI(const nsIURI* aKey) : nsURIHashKey(aKey) {} |
3693 | | MediaElementSetForURI(MediaElementSetForURI&& aOther) |
3694 | | : nsURIHashKey(std::move(aOther)) |
3695 | 0 | , mElements(std::move(aOther.mElements)) {} |
3696 | | nsTArray<HTMLMediaElement*> mElements; |
3697 | | }; |
3698 | | |
3699 | | typedef nsTHashtable<MediaElementSetForURI> MediaElementURITable; |
3700 | | // Elements in this table must have non-null mDecoder and mLoadingSrc, and those |
3701 | | // can't change while the element is in the table. The table is keyed by |
3702 | | // the element's mLoadingSrc. Each entry has a list of all elements with the |
3703 | | // same mLoadingSrc. |
3704 | | static MediaElementURITable* gElementTable; |
3705 | | |
3706 | | #ifdef DEBUG |
3707 | | static bool |
3708 | | URISafeEquals(nsIURI* a1, nsIURI* a2) |
3709 | | { |
3710 | | if (!a1 || !a2) { |
3711 | | // Consider two empty URIs *not* equal! |
3712 | | return false; |
3713 | | } |
3714 | | bool equal = false; |
3715 | | nsresult rv = a1->Equals(a2, &equal); |
3716 | | return NS_SUCCEEDED(rv) && equal; |
3717 | | } |
3718 | | // Returns the number of times aElement appears in the media element table |
3719 | | // for aURI. If this returns other than 0 or 1, there's a bug somewhere! |
3720 | | static unsigned |
3721 | | MediaElementTableCount(HTMLMediaElement* aElement, nsIURI* aURI) |
3722 | | { |
3723 | | if (!gElementTable || !aElement) { |
3724 | | return 0; |
3725 | | } |
3726 | | uint32_t uriCount = 0; |
3727 | | uint32_t otherCount = 0; |
3728 | | for (auto it = gElementTable->ConstIter(); !it.Done(); it.Next()) { |
3729 | | MediaElementSetForURI* entry = it.Get(); |
3730 | | uint32_t count = 0; |
3731 | | for (const auto& elem : entry->mElements) { |
3732 | | if (elem == aElement) { |
3733 | | count++; |
3734 | | } |
3735 | | } |
3736 | | if (URISafeEquals(aURI, entry->GetKey())) { |
3737 | | uriCount = count; |
3738 | | } else { |
3739 | | otherCount += count; |
3740 | | } |
3741 | | } |
3742 | | NS_ASSERTION(otherCount == 0, "Should not have entries for unknown URIs"); |
3743 | | return uriCount; |
3744 | | } |
3745 | | #endif |
3746 | | |
3747 | | void |
3748 | | HTMLMediaElement::AddMediaElementToURITable() |
3749 | 0 | { |
3750 | 0 | NS_ASSERTION(mDecoder, "Call this only with decoder Load called"); |
3751 | 0 | NS_ASSERTION( |
3752 | 0 | MediaElementTableCount(this, mLoadingSrc) == 0, |
3753 | 0 | "Should not have entry for element in element table before addition"); |
3754 | 0 | if (!gElementTable) { |
3755 | 0 | gElementTable = new MediaElementURITable(); |
3756 | 0 | } |
3757 | 0 | MediaElementSetForURI* entry = gElementTable->PutEntry(mLoadingSrc); |
3758 | 0 | entry->mElements.AppendElement(this); |
3759 | 0 | NS_ASSERTION( |
3760 | 0 | MediaElementTableCount(this, mLoadingSrc) == 1, |
3761 | 0 | "Should have a single entry for element in element table after addition"); |
3762 | 0 | } |
3763 | | |
3764 | | void |
3765 | | HTMLMediaElement::RemoveMediaElementFromURITable() |
3766 | 0 | { |
3767 | 0 | if (!mDecoder || !mLoadingSrc || !gElementTable) { |
3768 | 0 | return; |
3769 | 0 | } |
3770 | 0 | MediaElementSetForURI* entry = gElementTable->GetEntry(mLoadingSrc); |
3771 | 0 | if (!entry) { |
3772 | 0 | return; |
3773 | 0 | } |
3774 | 0 | entry->mElements.RemoveElement(this); |
3775 | 0 | if (entry->mElements.IsEmpty()) { |
3776 | 0 | gElementTable->RemoveEntry(entry); |
3777 | 0 | if (gElementTable->Count() == 0) { |
3778 | 0 | delete gElementTable; |
3779 | 0 | gElementTable = nullptr; |
3780 | 0 | } |
3781 | 0 | } |
3782 | 0 | NS_ASSERTION(MediaElementTableCount(this, mLoadingSrc) == 0, |
3783 | 0 | "After remove, should no longer have an entry in element table"); |
3784 | 0 | } |
3785 | | |
3786 | | HTMLMediaElement* |
3787 | | HTMLMediaElement::LookupMediaElementURITable(nsIURI* aURI) |
3788 | 0 | { |
3789 | 0 | if (!gElementTable) { |
3790 | 0 | return nullptr; |
3791 | 0 | } |
3792 | 0 | MediaElementSetForURI* entry = gElementTable->GetEntry(aURI); |
3793 | 0 | if (!entry) { |
3794 | 0 | return nullptr; |
3795 | 0 | } |
3796 | 0 | for (uint32_t i = 0; i < entry->mElements.Length(); ++i) { |
3797 | 0 | HTMLMediaElement* elem = entry->mElements[i]; |
3798 | 0 | bool equal; |
3799 | 0 | // Look for elements that have the same principal and CORS mode. |
3800 | 0 | // Ditto for anything else that could cause us to send different headers. |
3801 | 0 | if (NS_SUCCEEDED(elem->NodePrincipal()->Equals(NodePrincipal(), &equal)) && |
3802 | 0 | equal && elem->mCORSMode == mCORSMode) { |
3803 | 0 | // See SetupDecoder() below. We only add a element to the table when |
3804 | 0 | // mDecoder is a ChannelMediaDecoder. |
3805 | 0 | auto decoder = static_cast<ChannelMediaDecoder*>(elem->mDecoder.get()); |
3806 | 0 | NS_ASSERTION(decoder, "Decoder gone"); |
3807 | 0 | if (decoder->CanClone()) { |
3808 | 0 | return elem; |
3809 | 0 | } |
3810 | 0 | } |
3811 | 0 | } |
3812 | 0 | return nullptr; |
3813 | 0 | } |
3814 | | |
3815 | | class HTMLMediaElement::ShutdownObserver : public nsIObserver |
3816 | | { |
3817 | | enum class Phase : int8_t |
3818 | | { |
3819 | | Init, |
3820 | | Subscribed, |
3821 | | Unsubscribed |
3822 | | }; |
3823 | | |
3824 | | public: |
3825 | | NS_DECL_ISUPPORTS |
3826 | | |
3827 | | NS_IMETHOD Observe(nsISupports*, const char* aTopic, const char16_t*) override |
3828 | 0 | { |
3829 | 0 | if (mPhase != Phase::Subscribed) { |
3830 | 0 | // Bail out if we are not subscribed for this might be called even after |
3831 | 0 | // |nsContentUtils::UnregisterShutdownObserver(this)|. |
3832 | 0 | return NS_OK; |
3833 | 0 | } |
3834 | 0 | MOZ_DIAGNOSTIC_ASSERT(mWeak); |
3835 | 0 | if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) { |
3836 | 0 | mWeak->NotifyShutdownEvent(); |
3837 | 0 | } |
3838 | 0 | return NS_OK; |
3839 | 0 | } |
3840 | | void Subscribe(HTMLMediaElement* aPtr) |
3841 | 0 | { |
3842 | 0 | MOZ_DIAGNOSTIC_ASSERT(mPhase == Phase::Init); |
3843 | 0 | MOZ_DIAGNOSTIC_ASSERT(!mWeak); |
3844 | 0 | mWeak = aPtr; |
3845 | 0 | nsContentUtils::RegisterShutdownObserver(this); |
3846 | 0 | mPhase = Phase::Subscribed; |
3847 | 0 | } |
3848 | | void Unsubscribe() |
3849 | 0 | { |
3850 | 0 | MOZ_DIAGNOSTIC_ASSERT(mPhase == Phase::Subscribed); |
3851 | 0 | MOZ_DIAGNOSTIC_ASSERT(mWeak); |
3852 | 0 | mWeak = nullptr; |
3853 | 0 | nsContentUtils::UnregisterShutdownObserver(this); |
3854 | 0 | mPhase = Phase::Unsubscribed; |
3855 | 0 | } |
3856 | 0 | void AddRefMediaElement() { mWeak->AddRef(); } |
3857 | 0 | void ReleaseMediaElement() { mWeak->Release(); } |
3858 | | |
3859 | | private: |
3860 | | virtual ~ShutdownObserver() |
3861 | 0 | { |
3862 | 0 | MOZ_DIAGNOSTIC_ASSERT(mPhase == Phase::Unsubscribed); |
3863 | 0 | MOZ_DIAGNOSTIC_ASSERT(!mWeak); |
3864 | 0 | } |
3865 | | // Guaranteed to be valid by HTMLMediaElement. |
3866 | | HTMLMediaElement* mWeak = nullptr; |
3867 | | Phase mPhase = Phase::Init; |
3868 | | }; |
3869 | | |
3870 | | NS_IMPL_ISUPPORTS(HTMLMediaElement::ShutdownObserver, nsIObserver) |
3871 | | |
3872 | | HTMLMediaElement::HTMLMediaElement( |
3873 | | already_AddRefed<mozilla::dom::NodeInfo>&& aNodeInfo) |
3874 | | : nsGenericHTMLElement(std::move(aNodeInfo)) |
3875 | | , mWatchManager(this, OwnerDoc()->AbstractMainThreadFor(TaskCategory::Other)) |
3876 | | , mMainThreadEventTarget(OwnerDoc()->EventTargetFor(TaskCategory::Other)) |
3877 | | , mAbstractMainThread(OwnerDoc()->AbstractMainThreadFor(TaskCategory::Other)) |
3878 | | , mShutdownObserver(new ShutdownObserver) |
3879 | | , mPlayed(new TimeRanges(ToSupports(OwnerDoc()))) |
3880 | | , mPaused(true, "HTMLMediaElement::mPaused") |
3881 | | , mErrorSink(new ErrorSink(this)) |
3882 | | , mAudioChannelWrapper(new AudioChannelAgentCallback(this)) |
3883 | 0 | { |
3884 | 0 | MOZ_ASSERT(mMainThreadEventTarget); |
3885 | 0 | MOZ_ASSERT(mAbstractMainThread); |
3886 | 0 |
|
3887 | 0 | DecoderDoctorLogger::LogConstruction(this); |
3888 | 0 |
|
3889 | 0 | mWatchManager.Watch(mPaused, &HTMLMediaElement::UpdateWakeLock); |
3890 | 0 |
|
3891 | 0 | ErrorResult rv; |
3892 | 0 |
|
3893 | 0 | double defaultVolume = Preferences::GetFloat("media.default_volume", 1.0); |
3894 | 0 | SetVolume(defaultVolume, rv); |
3895 | 0 |
|
3896 | 0 | RegisterActivityObserver(); |
3897 | 0 | NotifyOwnerDocumentActivityChanged(); |
3898 | 0 |
|
3899 | 0 | mShutdownObserver->Subscribe(this); |
3900 | 0 | } |
3901 | | |
3902 | | HTMLMediaElement::~HTMLMediaElement() |
3903 | 0 | { |
3904 | 0 | NS_ASSERTION( |
3905 | 0 | !mHasSelfReference, |
3906 | 0 | "How can we be destroyed if we're still holding a self reference?"); |
3907 | 0 |
|
3908 | 0 | mShutdownObserver->Unsubscribe(); |
3909 | 0 |
|
3910 | 0 | if (mVideoFrameContainer) { |
3911 | 0 | mVideoFrameContainer->ForgetElement(); |
3912 | 0 | } |
3913 | 0 | UnregisterActivityObserver(); |
3914 | 0 |
|
3915 | 0 | mSetCDMRequest.DisconnectIfExists(); |
3916 | 0 | mAutoplayPermissionRequest.DisconnectIfExists(); |
3917 | 0 | if (mDecoder) { |
3918 | 0 | ShutdownDecoder(); |
3919 | 0 | } |
3920 | 0 | if (mProgressTimer) { |
3921 | 0 | StopProgress(); |
3922 | 0 | } |
3923 | 0 | if (mVideoDecodeSuspendTimer) { |
3924 | 0 | mVideoDecodeSuspendTimer->Cancel(); |
3925 | 0 | mVideoDecodeSuspendTimer = nullptr; |
3926 | 0 | } |
3927 | 0 | if (mSrcStream) { |
3928 | 0 | EndSrcMediaStreamPlayback(); |
3929 | 0 | } |
3930 | 0 |
|
3931 | 0 | if (mCaptureStreamPort) { |
3932 | 0 | mCaptureStreamPort->Destroy(); |
3933 | 0 | mCaptureStreamPort = nullptr; |
3934 | 0 | } |
3935 | 0 |
|
3936 | 0 | NS_ASSERTION(MediaElementTableCount(this, mLoadingSrc) == 0, |
3937 | 0 | "Destroyed media element should no longer be in element table"); |
3938 | 0 |
|
3939 | 0 | if (mChannelLoader) { |
3940 | 0 | mChannelLoader->Cancel(); |
3941 | 0 | } |
3942 | 0 |
|
3943 | 0 | if (mAudioChannelWrapper) { |
3944 | 0 | mAudioChannelWrapper->Shutdown(); |
3945 | 0 | mAudioChannelWrapper = nullptr; |
3946 | 0 | } |
3947 | 0 |
|
3948 | 0 | WakeLockRelease(); |
3949 | 0 |
|
3950 | 0 | DecoderDoctorLogger::LogDestruction(this); |
3951 | 0 | } |
3952 | | |
3953 | | void |
3954 | | HTMLMediaElement::StopSuspendingAfterFirstFrame() |
3955 | 0 | { |
3956 | 0 | mAllowSuspendAfterFirstFrame = false; |
3957 | 0 | if (!mSuspendedAfterFirstFrame) |
3958 | 0 | return; |
3959 | 0 | mSuspendedAfterFirstFrame = false; |
3960 | 0 | if (mDecoder) { |
3961 | 0 | mDecoder->Resume(); |
3962 | 0 | } |
3963 | 0 | } |
3964 | | |
3965 | | void |
3966 | | HTMLMediaElement::SetPlayedOrSeeked(bool aValue) |
3967 | 0 | { |
3968 | 0 | if (aValue == mHasPlayedOrSeeked) { |
3969 | 0 | return; |
3970 | 0 | } |
3971 | 0 | |
3972 | 0 | mHasPlayedOrSeeked = aValue; |
3973 | 0 |
|
3974 | 0 | // Force a reflow so that the poster frame hides or shows immediately. |
3975 | 0 | nsIFrame* frame = GetPrimaryFrame(); |
3976 | 0 | if (!frame) { |
3977 | 0 | return; |
3978 | 0 | } |
3979 | 0 | frame->PresShell()->FrameNeedsReflow( |
3980 | 0 | frame, nsIPresShell::eTreeChange, NS_FRAME_IS_DIRTY); |
3981 | 0 | } |
3982 | | |
3983 | | void |
3984 | | HTMLMediaElement::NotifyXPCOMShutdown() |
3985 | 0 | { |
3986 | 0 | ShutdownDecoder(); |
3987 | 0 | } |
3988 | | |
3989 | | bool |
3990 | | HTMLMediaElement::AudioChannelAgentDelayingPlayback() |
3991 | 0 | { |
3992 | 0 | return mAudioChannelWrapper && mAudioChannelWrapper->IsPlaybackBlocked(); |
3993 | 0 | } |
3994 | | |
3995 | | void |
3996 | | HTMLMediaElement::UpdateHadAudibleAutoplayState() |
3997 | 0 | { |
3998 | 0 | // If we're audible, and autoplaying... |
3999 | 0 | if ((Volume() > 0.0 && !Muted()) && |
4000 | 0 | (!OwnerDoc()->HasBeenUserGestureActivated() || Autoplay())) { |
4001 | 0 | OwnerDoc()->SetDocTreeHadAudibleMedia(); |
4002 | 0 | if (AutoplayPolicy::WouldBeAllowedToPlayIfAutoplayDisabled(*this)) { |
4003 | 0 | ScalarAdd(Telemetry::ScalarID::MEDIA_AUTOPLAY_WOULD_BE_ALLOWED_COUNT, 1); |
4004 | 0 | if (mReadyState >= HAVE_METADATA && !HasAudio()) { |
4005 | 0 | ScalarAdd(Telemetry::ScalarID::MEDIA_ALLOWED_AUTOPLAY_NO_AUDIO_TRACK_COUNT, 1); |
4006 | 0 | } |
4007 | 0 | } else { |
4008 | 0 | if (mReadyState < HAVE_METADATA) { |
4009 | 0 | mBlockedAsWithoutMetadata = true; |
4010 | 0 | ScalarAdd(Telemetry::ScalarID::MEDIA_BLOCKED_NO_METADATA, 1); |
4011 | 0 | } |
4012 | 0 | ScalarAdd(Telemetry::ScalarID::MEDIA_AUTOPLAY_WOULD_NOT_BE_ALLOWED_COUNT, 1); |
4013 | 0 | } |
4014 | 0 | } |
4015 | 0 | } |
4016 | | |
4017 | | already_AddRefed<Promise> |
4018 | | HTMLMediaElement::Play(ErrorResult& aRv) |
4019 | 0 | { |
4020 | 0 | LOG(LogLevel::Debug, |
4021 | 0 | ("%p Play() called by JS readyState=%d", this, mReadyState)); |
4022 | 0 |
|
4023 | 0 | // 4.8.12.8 |
4024 | 0 | // When the play() method on a media element is invoked, the user agent must |
4025 | 0 | // run the following steps. |
4026 | 0 |
|
4027 | 0 | RefPtr<PlayPromise> promise = CreatePlayPromise(aRv); |
4028 | 0 | if (NS_WARN_IF(aRv.Failed())) { |
4029 | 0 | return nullptr; |
4030 | 0 | } |
4031 | 0 | |
4032 | 0 | // 4.8.12.8 - Step 1: |
4033 | 0 | // If the media element is not allowed to play, return a promise rejected |
4034 | 0 | // with a "NotAllowedError" DOMException and abort these steps. |
4035 | 0 | // NOTE: we may require requesting permission from the user, so we do the |
4036 | 0 | // "not allowed" check below. |
4037 | 0 | |
4038 | 0 | // 4.8.12.8 - Step 2: |
4039 | 0 | // If the media element's error attribute is not null and its code |
4040 | 0 | // attribute has the value MEDIA_ERR_SRC_NOT_SUPPORTED, return a promise |
4041 | 0 | // rejected with a "NotSupportedError" DOMException and abort these steps. |
4042 | 0 | if (GetError() && GetError()->Code() == MEDIA_ERR_SRC_NOT_SUPPORTED) { |
4043 | 0 | LOG(LogLevel::Debug, |
4044 | 0 | ("%p Play() promise rejected because source not supported.", this)); |
4045 | 0 | promise->MaybeReject(NS_ERROR_DOM_MEDIA_NOT_SUPPORTED_ERR); |
4046 | 0 | return promise.forget(); |
4047 | 0 | } |
4048 | 0 |
|
4049 | 0 | // 4.8.12.8 - Step 3: |
4050 | 0 | // Let promise be a new promise and append promise to the list of pending |
4051 | 0 | // play promises. |
4052 | 0 | // Note: Promise appended to list of pending promises as needed below. |
4053 | 0 |
|
4054 | 0 | if (AudioChannelAgentDelayingPlayback()) { |
4055 | 0 | // The audio channel agent may delay starting playback of a media resource |
4056 | 0 | // until the tab the media element is in has been in the foreground. |
4057 | 0 | // Save a reference to the promise, and return it. The AudioChannelAgent |
4058 | 0 | // will call Play() again if the tab is brought to the foreground, or the |
4059 | 0 | // audio tab indicator is clicked, which will resolve the promise if we end |
4060 | 0 | // up playing. |
4061 | 0 | LOG(LogLevel::Debug, ("%p Play() call delayed by AudioChannelAgent", this)); |
4062 | 0 | MaybeDoLoad(); |
4063 | 0 | mPendingPlayPromises.AppendElement(promise); |
4064 | 0 | return promise.forget(); |
4065 | 0 | } |
4066 | 0 |
|
4067 | 0 | if (AudioChannelAgentBlockedPlay()) { |
4068 | 0 | LOG(LogLevel::Debug, ("%p play blocked by AudioChannelAgent.", this)); |
4069 | 0 | promise->MaybeReject(NS_ERROR_DOM_MEDIA_NOT_ALLOWED_ERR); |
4070 | 0 | DispatchEventsWhenPlayWasNotAllowed(); |
4071 | 0 | return promise.forget(); |
4072 | 0 | } |
4073 | 0 |
|
4074 | 0 | UpdateHadAudibleAutoplayState(); |
4075 | 0 |
|
4076 | 0 | const bool handlingUserInput = EventStateManager::IsHandlingUserInput(); |
4077 | 0 | if (AutoplayPolicy::IsAllowedToPlay(*this)) { |
4078 | 0 | mPendingPlayPromises.AppendElement(promise); |
4079 | 0 | PlayInternal(handlingUserInput); |
4080 | 0 | UpdateCustomPolicyAfterPlayed(); |
4081 | 0 | } else { |
4082 | 0 | // Prompt the user for permission to play. |
4083 | 0 | mPendingPlayPromises.AppendElement(promise); |
4084 | 0 | EnsureAutoplayRequested(handlingUserInput); |
4085 | 0 | } |
4086 | 0 | return promise.forget(); |
4087 | 0 | } |
4088 | | |
4089 | | void |
4090 | | HTMLMediaElement::EnsureAutoplayRequested(bool aHandlingUserInput) |
4091 | 0 | { |
4092 | 0 | if (mAutoplayPermissionRequest.Exists()) { |
4093 | 0 | // Autoplay has already been requested in a previous play() call. |
4094 | 0 | // Await for the previous request to be approved or denied. This |
4095 | 0 | // play request's promise will be fulfilled with all other pending |
4096 | 0 | // promises when the permission prompt is resolved. |
4097 | 0 | AUTOPLAY_LOG("%p EnsureAutoplayRequested() existing request, bailing.", this); |
4098 | 0 | return; |
4099 | 0 | } |
4100 | 0 |
|
4101 | 0 | RefPtr<AutoplayPermissionManager> request = |
4102 | 0 | AutoplayPolicy::RequestFor(*OwnerDoc()); |
4103 | 0 | if (!request) { |
4104 | 0 | AsyncRejectPendingPlayPromises(NS_ERROR_DOM_INVALID_STATE_ERR); |
4105 | 0 | return; |
4106 | 0 | } |
4107 | 0 | RefPtr<HTMLMediaElement> self = this; |
4108 | 0 | request->RequestWithPrompt() |
4109 | 0 | ->Then(mAbstractMainThread, |
4110 | 0 | __func__, |
4111 | 0 | [ self, handlingUserInput = aHandlingUserInput, request ]( |
4112 | 0 | bool aApproved) { |
4113 | 0 | self->mAutoplayPermissionRequest.Complete(); |
4114 | 0 | AUTOPLAY_LOG("%p Autoplay request approved request=%p", |
4115 | 0 | self.get(), |
4116 | 0 | request.get()); |
4117 | 0 | self->PlayInternal(handlingUserInput); |
4118 | 0 | self->UpdateCustomPolicyAfterPlayed(); |
4119 | 0 | }, |
4120 | 0 | [self, request](nsresult aError) { |
4121 | 0 | self->mAutoplayPermissionRequest.Complete(); |
4122 | 0 | AUTOPLAY_LOG("%p Autoplay request denied request=%p", |
4123 | 0 | self.get(), |
4124 | 0 | request.get()); |
4125 | 0 | LOG(LogLevel::Debug, ("%s rejecting play promises", __func__)); |
4126 | 0 | self->AsyncRejectPendingPlayPromises( |
4127 | 0 | NS_ERROR_DOM_MEDIA_NOT_ALLOWED_ERR); |
4128 | 0 | nsContentUtils::ReportToConsole(nsIScriptError::warningFlag, |
4129 | 0 | NS_LITERAL_CSTRING("Media"), |
4130 | 0 | self->OwnerDoc(), |
4131 | 0 | nsContentUtils::eDOM_PROPERTIES, |
4132 | 0 | "BlockAutoplayError"); |
4133 | 0 | }) |
4134 | 0 | ->Track(mAutoplayPermissionRequest); |
4135 | 0 | } |
4136 | | |
4137 | | void |
4138 | | HTMLMediaElement::DispatchEventsWhenPlayWasNotAllowed() |
4139 | 0 | { |
4140 | 0 | if (StaticPrefs::MediaBlockEventEnabled()) { |
4141 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("blocked")); |
4142 | 0 | } |
4143 | | #if defined(MOZ_WIDGET_ANDROID) |
4144 | | RefPtr<AsyncEventDispatcher> asyncDispatcher = |
4145 | | new AsyncEventDispatcher(this, |
4146 | | NS_LITERAL_STRING("MozAutoplayMediaBlocked"), |
4147 | | CanBubble::eYes, |
4148 | | ChromeOnlyDispatch::eYes); |
4149 | | asyncDispatcher->PostDOMEvent(); |
4150 | | #endif |
4151 | | } |
4152 | | |
4153 | | void |
4154 | | HTMLMediaElement::PlayInternal(bool aHandlingUserInput) |
4155 | 0 | { |
4156 | 0 | if (mPreloadAction == HTMLMediaElement::PRELOAD_NONE) { |
4157 | 0 | // The media load algorithm will be initiated by a user interaction. |
4158 | 0 | // We want to boost the channel priority for better responsiveness. |
4159 | 0 | // Note this must be done before UpdatePreloadAction() which will |
4160 | 0 | // update |mPreloadAction|. |
4161 | 0 | mUseUrgentStartForChannel = true; |
4162 | 0 | } |
4163 | 0 |
|
4164 | 0 | StopSuspendingAfterFirstFrame(); |
4165 | 0 | SetPlayedOrSeeked(true); |
4166 | 0 |
|
4167 | 0 | // 4.8.12.8 - Step 4: |
4168 | 0 | // If the media element's networkState attribute has the value NETWORK_EMPTY, |
4169 | 0 | // invoke the media element's resource selection algorithm. |
4170 | 0 | MaybeDoLoad(); |
4171 | 0 | if (mSuspendedForPreloadNone) { |
4172 | 0 | ResumeLoad(PRELOAD_ENOUGH); |
4173 | 0 | } |
4174 | 0 |
|
4175 | 0 | // 4.8.12.8 - Step 5: |
4176 | 0 | // If the playback has ended and the direction of playback is forwards, |
4177 | 0 | // seek to the earliest possible position of the media resource. |
4178 | 0 |
|
4179 | 0 | // Even if we just did Load() or ResumeLoad(), we could already have a decoder |
4180 | 0 | // here if we managed to clone an existing decoder. |
4181 | 0 | if (mDecoder) { |
4182 | 0 | if (mDecoder->IsEnded()) { |
4183 | 0 | SetCurrentTime(0); |
4184 | 0 | } |
4185 | 0 | if (!mPausedForInactiveDocumentOrChannel) { |
4186 | 0 | mDecoder->Play(); |
4187 | 0 | } |
4188 | 0 | } |
4189 | 0 |
|
4190 | 0 | if (mCurrentPlayRangeStart == -1.0) { |
4191 | 0 | mCurrentPlayRangeStart = CurrentTime(); |
4192 | 0 | } |
4193 | 0 |
|
4194 | 0 | const bool oldPaused = mPaused; |
4195 | 0 | mPaused = false; |
4196 | 0 | mAutoplaying = false; |
4197 | 0 |
|
4198 | 0 | // We changed mPaused and mAutoplaying which can affect AddRemoveSelfReference |
4199 | 0 | // and our preload status. |
4200 | 0 | AddRemoveSelfReference(); |
4201 | 0 | UpdatePreloadAction(); |
4202 | 0 | UpdateSrcMediaStreamPlaying(); |
4203 | 0 |
|
4204 | 0 | // Once play() has been called in a user generated event handler, |
4205 | 0 | // it is allowed to autoplay. Note: we can reach here when not in |
4206 | 0 | // a user generated event handler if our readyState has not yet |
4207 | 0 | // reached HAVE_METADATA. |
4208 | 0 | mIsBlessed |= aHandlingUserInput; |
4209 | 0 |
|
4210 | 0 | // TODO: If the playback has ended, then the user agent must set |
4211 | 0 | // seek to the effective start. |
4212 | 0 |
|
4213 | 0 | // 4.8.12.8 - Step 6: |
4214 | 0 | // If the media element's paused attribute is true, run the following steps: |
4215 | 0 | if (oldPaused) { |
4216 | 0 | // 6.1. Change the value of paused to false. (Already done.) |
4217 | 0 | // This step is uplifted because the "block-media-playback" feature needs |
4218 | 0 | // the mPaused to be false before UpdateAudioChannelPlayingState() being |
4219 | 0 | // called. |
4220 | 0 |
|
4221 | 0 | // 6.2. If the show poster flag is true, set the element's show poster flag |
4222 | 0 | // to false and run the time marches on steps. |
4223 | 0 |
|
4224 | 0 | // 6.3. Queue a task to fire a simple event named play at the element. |
4225 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("play")); |
4226 | 0 |
|
4227 | 0 | // 6.4. If the media element's readyState attribute has the value |
4228 | 0 | // HAVE_NOTHING, HAVE_METADATA, or HAVE_CURRENT_DATA, queue a task to |
4229 | 0 | // fire a simple event named waiting at the element. |
4230 | 0 | // Otherwise, the media element's readyState attribute has the value |
4231 | 0 | // HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA: notify about playing for the |
4232 | 0 | // element. |
4233 | 0 | switch (mReadyState) { |
4234 | 0 | case HAVE_NOTHING: |
4235 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("waiting")); |
4236 | 0 | break; |
4237 | 0 | case HAVE_METADATA: |
4238 | 0 | case HAVE_CURRENT_DATA: |
4239 | 0 | FireTimeUpdate(false); |
4240 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("waiting")); |
4241 | 0 | break; |
4242 | 0 | case HAVE_FUTURE_DATA: |
4243 | 0 | case HAVE_ENOUGH_DATA: |
4244 | 0 | FireTimeUpdate(false); |
4245 | 0 | NotifyAboutPlaying(); |
4246 | 0 | break; |
4247 | 0 | } |
4248 | 0 | } else if (mReadyState >= HAVE_FUTURE_DATA) { |
4249 | 0 | // 7. Otherwise, if the media element's readyState attribute has the value |
4250 | 0 | // HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA, take pending play promises and |
4251 | 0 | // queue a task to resolve pending play promises with the result. |
4252 | 0 | AsyncResolvePendingPlayPromises(); |
4253 | 0 | } |
4254 | 0 |
|
4255 | 0 | // 8. Set the media element's autoplaying flag to false. (Already done.) |
4256 | 0 |
|
4257 | 0 | // 9. Return promise. |
4258 | 0 | // (Done in caller.) |
4259 | 0 | } |
4260 | | |
4261 | | void |
4262 | | HTMLMediaElement::MaybeDoLoad() |
4263 | 0 | { |
4264 | 0 | if (mNetworkState == NETWORK_EMPTY) { |
4265 | 0 | DoLoad(); |
4266 | 0 | } |
4267 | 0 | } |
4268 | | |
4269 | | void |
4270 | | HTMLMediaElement::UpdateWakeLock() |
4271 | 0 | { |
4272 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
4273 | 0 | // Ensure we have a wake lock if we're playing audibly. This ensures the |
4274 | 0 | // device doesn't sleep while playing. |
4275 | 0 | bool playing = !mPaused; |
4276 | 0 | bool isAudible = |
4277 | 0 | Volume() > 0.0 && !mMuted && mIsAudioTrackAudible; |
4278 | 0 | // WakeLock when playing audible media. |
4279 | 0 | if (playing && isAudible) { |
4280 | 0 | WakeLockCreate(); |
4281 | 0 | } else { |
4282 | 0 | WakeLockRelease(); |
4283 | 0 | } |
4284 | 0 | } |
4285 | | |
4286 | | void |
4287 | | HTMLMediaElement::WakeLockCreate() |
4288 | 0 | { |
4289 | 0 | if (!mWakeLock) { |
4290 | 0 | RefPtr<power::PowerManagerService> pmService = |
4291 | 0 | power::PowerManagerService::GetInstance(); |
4292 | 0 | NS_ENSURE_TRUE_VOID(pmService); |
4293 | 0 |
|
4294 | 0 | ErrorResult rv; |
4295 | 0 | mWakeLock = pmService->NewWakeLock( |
4296 | 0 | NS_LITERAL_STRING("audio-playing"), OwnerDoc()->GetInnerWindow(), rv); |
4297 | 0 | } |
4298 | 0 | } |
4299 | | |
4300 | | void |
4301 | | HTMLMediaElement::WakeLockRelease() |
4302 | 0 | { |
4303 | 0 | if (mWakeLock) { |
4304 | 0 | ErrorResult rv; |
4305 | 0 | mWakeLock->Unlock(rv); |
4306 | 0 | rv.SuppressException(); |
4307 | 0 | mWakeLock = nullptr; |
4308 | 0 | } |
4309 | 0 | } |
4310 | | |
4311 | | HTMLMediaElement::OutputMediaStream::OutputMediaStream() |
4312 | | : mNextAvailableTrackID(1) |
4313 | | , mFinishWhenEnded(false) |
4314 | | , mCapturingAudioOnly(false) |
4315 | | , mCapturingDecoder(false) |
4316 | | , mCapturingMediaStream(false) |
4317 | 0 | { |
4318 | 0 | } |
4319 | | |
4320 | | HTMLMediaElement::OutputMediaStream::~OutputMediaStream() |
4321 | 0 | { |
4322 | 0 | for (auto pair : mTrackPorts) { |
4323 | 0 | pair.second()->Destroy(); |
4324 | 0 | } |
4325 | 0 | } |
4326 | | |
4327 | | void |
4328 | | HTMLMediaElement::GetEventTargetParent(EventChainPreVisitor& aVisitor) |
4329 | 0 | { |
4330 | 0 | if (!this->Controls() || !aVisitor.mEvent->mFlags.mIsTrusted) { |
4331 | 0 | nsGenericHTMLElement::GetEventTargetParent(aVisitor); |
4332 | 0 | return; |
4333 | 0 | } |
4334 | 0 | |
4335 | 0 | HTMLInputElement* el = nullptr; |
4336 | 0 | nsCOMPtr<nsINode> node; |
4337 | 0 |
|
4338 | 0 | // We will need to trap pointer, touch, and mouse events within the media |
4339 | 0 | // element, allowing media control exclusive consumption on these events, |
4340 | 0 | // and preventing the content from handling them. |
4341 | 0 | switch (aVisitor.mEvent->mMessage) { |
4342 | 0 | case ePointerDown: |
4343 | 0 | case ePointerUp: |
4344 | 0 | case eTouchEnd: |
4345 | 0 | // Always prevent touchmove captured in video element from being handled by |
4346 | 0 | // content, since we always do that for touchstart. |
4347 | 0 | case eTouchMove: |
4348 | 0 | case eTouchStart: |
4349 | 0 | case eMouseClick: |
4350 | 0 | case eMouseDoubleClick: |
4351 | 0 | case eMouseDown: |
4352 | 0 | case eMouseUp: |
4353 | 0 | aVisitor.mCanHandle = false; |
4354 | 0 | return; |
4355 | 0 |
|
4356 | 0 | // The *move events however are only comsumed when the range input is being |
4357 | 0 | // dragged. |
4358 | 0 | case ePointerMove: |
4359 | 0 | case eMouseMove: |
4360 | 0 | node = do_QueryInterface(aVisitor.mEvent->mOriginalTarget); |
4361 | 0 | if (node->IsInNativeAnonymousSubtree() || |
4362 | 0 | node->IsInUAWidget()) { |
4363 | 0 | if (node->IsHTMLElement(nsGkAtoms::input)) { |
4364 | 0 | // The node is a <input type="range"> |
4365 | 0 | el = static_cast<HTMLInputElement*>(node.get()); |
4366 | 0 | } else if (node->GetParentNode() && |
4367 | 0 | node->GetParentNode()->IsHTMLElement(nsGkAtoms::input)) { |
4368 | 0 | // The node is a child of <input type="range"> |
4369 | 0 | el = static_cast<HTMLInputElement*>(node->GetParentNode()); |
4370 | 0 | } |
4371 | 0 | } |
4372 | 0 | if (el && el->IsDraggingRange()) { |
4373 | 0 | aVisitor.mCanHandle = false; |
4374 | 0 | return; |
4375 | 0 | } |
4376 | 0 | nsGenericHTMLElement::GetEventTargetParent(aVisitor); |
4377 | 0 | return; |
4378 | 0 |
|
4379 | 0 | default: |
4380 | 0 | nsGenericHTMLElement::GetEventTargetParent(aVisitor); |
4381 | 0 | return; |
4382 | 0 | } |
4383 | 0 | } |
4384 | | |
4385 | | bool |
4386 | | HTMLMediaElement::ParseAttribute(int32_t aNamespaceID, |
4387 | | nsAtom* aAttribute, |
4388 | | const nsAString& aValue, |
4389 | | nsIPrincipal* aMaybeScriptedPrincipal, |
4390 | | nsAttrValue& aResult) |
4391 | 0 | { |
4392 | 0 | // Mappings from 'preload' attribute strings to an enumeration. |
4393 | 0 | static const nsAttrValue::EnumTable kPreloadTable[] = { |
4394 | 0 | { "", HTMLMediaElement::PRELOAD_ATTR_EMPTY }, |
4395 | 0 | { "none", HTMLMediaElement::PRELOAD_ATTR_NONE }, |
4396 | 0 | { "metadata", HTMLMediaElement::PRELOAD_ATTR_METADATA }, |
4397 | 0 | { "auto", HTMLMediaElement::PRELOAD_ATTR_AUTO }, |
4398 | 0 | { nullptr, 0 } |
4399 | 0 | }; |
4400 | 0 |
|
4401 | 0 | if (aNamespaceID == kNameSpaceID_None) { |
4402 | 0 | if (ParseImageAttribute(aAttribute, aValue, aResult)) { |
4403 | 0 | return true; |
4404 | 0 | } |
4405 | 0 | if (aAttribute == nsGkAtoms::crossorigin) { |
4406 | 0 | ParseCORSValue(aValue, aResult); |
4407 | 0 | return true; |
4408 | 0 | } |
4409 | 0 | if (aAttribute == nsGkAtoms::preload) { |
4410 | 0 | return aResult.ParseEnumValue(aValue, kPreloadTable, false); |
4411 | 0 | } |
4412 | 0 | } |
4413 | 0 | |
4414 | 0 | return nsGenericHTMLElement::ParseAttribute( |
4415 | 0 | aNamespaceID, aAttribute, aValue, aMaybeScriptedPrincipal, aResult); |
4416 | 0 | } |
4417 | | |
4418 | | void |
4419 | | HTMLMediaElement::DoneCreatingElement() |
4420 | 0 | { |
4421 | 0 | if (HasAttr(kNameSpaceID_None, nsGkAtoms::muted)) { |
4422 | 0 | mMuted |= MUTED_BY_CONTENT; |
4423 | 0 | } |
4424 | 0 | } |
4425 | | |
4426 | | bool |
4427 | | HTMLMediaElement::IsHTMLFocusable(bool aWithMouse, |
4428 | | bool* aIsFocusable, |
4429 | | int32_t* aTabIndex) |
4430 | 0 | { |
4431 | 0 | if (nsGenericHTMLElement::IsHTMLFocusable( |
4432 | 0 | aWithMouse, aIsFocusable, aTabIndex)) { |
4433 | 0 | return true; |
4434 | 0 | } |
4435 | 0 | |
4436 | 0 | *aIsFocusable = true; |
4437 | 0 | return false; |
4438 | 0 | } |
4439 | | |
4440 | | int32_t |
4441 | | HTMLMediaElement::TabIndexDefault() |
4442 | 0 | { |
4443 | 0 | return 0; |
4444 | 0 | } |
4445 | | |
4446 | | nsresult |
4447 | | HTMLMediaElement::AfterSetAttr(int32_t aNameSpaceID, |
4448 | | nsAtom* aName, |
4449 | | const nsAttrValue* aValue, |
4450 | | const nsAttrValue* aOldValue, |
4451 | | nsIPrincipal* aMaybeScriptedPrincipal, |
4452 | | bool aNotify) |
4453 | 0 | { |
4454 | 0 | if (aNameSpaceID == kNameSpaceID_None) { |
4455 | 0 | if (aName == nsGkAtoms::src) { |
4456 | 0 | mSrcMediaSource = nullptr; |
4457 | 0 | mSrcAttrTriggeringPrincipal = nsContentUtils::GetAttrTriggeringPrincipal( |
4458 | 0 | this, |
4459 | 0 | aValue ? aValue->GetStringValue() : EmptyString(), |
4460 | 0 | aMaybeScriptedPrincipal); |
4461 | 0 | if (aValue) { |
4462 | 0 | nsString srcStr = aValue->GetStringValue(); |
4463 | 0 | nsCOMPtr<nsIURI> uri; |
4464 | 0 | NewURIFromString(srcStr, getter_AddRefs(uri)); |
4465 | 0 | if (uri && IsMediaSourceURI(uri)) { |
4466 | 0 | nsresult rv = |
4467 | 0 | NS_GetSourceForMediaSourceURI(uri, getter_AddRefs(mSrcMediaSource)); |
4468 | 0 | if (NS_FAILED(rv)) { |
4469 | 0 | nsAutoString spec; |
4470 | 0 | GetCurrentSrc(spec); |
4471 | 0 | const char16_t* params[] = { spec.get() }; |
4472 | 0 | ReportLoadError("MediaLoadInvalidURI", params, ArrayLength(params)); |
4473 | 0 | } |
4474 | 0 | } |
4475 | 0 | } |
4476 | 0 | } else if (aName == nsGkAtoms::autoplay) { |
4477 | 0 | if (aNotify) { |
4478 | 0 | if (aValue) { |
4479 | 0 | StopSuspendingAfterFirstFrame(); |
4480 | 0 | CheckAutoplayDataReady(); |
4481 | 0 | } |
4482 | 0 | // This attribute can affect AddRemoveSelfReference |
4483 | 0 | AddRemoveSelfReference(); |
4484 | 0 | UpdatePreloadAction(); |
4485 | 0 | } |
4486 | 0 | } else if (aName == nsGkAtoms::preload) { |
4487 | 0 | UpdatePreloadAction(); |
4488 | 0 | } else if (aName == nsGkAtoms::loop) { |
4489 | 0 | if (mDecoder) { |
4490 | 0 | mDecoder->SetLooping(!!aValue); |
4491 | 0 | } |
4492 | 0 | } else if (nsContentUtils::IsUAWidgetEnabled() && |
4493 | 0 | aName == nsGkAtoms::controls && |
4494 | 0 | IsInComposedDoc()) { |
4495 | 0 | AsyncEventDispatcher* dispatcher = |
4496 | 0 | new AsyncEventDispatcher(this, |
4497 | 0 | NS_LITERAL_STRING("UAWidgetAttributeChanged"), |
4498 | 0 | CanBubble::eYes, |
4499 | 0 | ChromeOnlyDispatch::eYes); |
4500 | 0 | // This has to happen at this tick so that UA Widget could respond |
4501 | 0 | // before returning to content script. |
4502 | 0 | dispatcher->RunDOMEventWhenSafe(); |
4503 | 0 | } |
4504 | 0 | } |
4505 | 0 |
|
4506 | 0 | // Since AfterMaybeChangeAttr may call DoLoad, make sure that it is called |
4507 | 0 | // *after* any possible changes to mSrcMediaSource. |
4508 | 0 | if (aValue) { |
4509 | 0 | AfterMaybeChangeAttr(aNameSpaceID, aName, aNotify); |
4510 | 0 | } |
4511 | 0 |
|
4512 | 0 | return nsGenericHTMLElement::AfterSetAttr( |
4513 | 0 | aNameSpaceID, aName, aValue, aOldValue, aMaybeScriptedPrincipal, aNotify); |
4514 | 0 | } |
4515 | | |
4516 | | nsresult |
4517 | | HTMLMediaElement::OnAttrSetButNotChanged(int32_t aNamespaceID, |
4518 | | nsAtom* aName, |
4519 | | const nsAttrValueOrString& aValue, |
4520 | | bool aNotify) |
4521 | 0 | { |
4522 | 0 | AfterMaybeChangeAttr(aNamespaceID, aName, aNotify); |
4523 | 0 |
|
4524 | 0 | return nsGenericHTMLElement::OnAttrSetButNotChanged( |
4525 | 0 | aNamespaceID, aName, aValue, aNotify); |
4526 | 0 | } |
4527 | | |
4528 | | void |
4529 | | HTMLMediaElement::AfterMaybeChangeAttr(int32_t aNamespaceID, |
4530 | | nsAtom* aName, |
4531 | | bool aNotify) |
4532 | 0 | { |
4533 | 0 | if (aNamespaceID == kNameSpaceID_None) { |
4534 | 0 | if (aName == nsGkAtoms::src) { |
4535 | 0 | DoLoad(); |
4536 | 0 | } |
4537 | 0 | } |
4538 | 0 | } |
4539 | | |
4540 | | nsresult |
4541 | | HTMLMediaElement::BindToTree(nsIDocument* aDocument, |
4542 | | nsIContent* aParent, |
4543 | | nsIContent* aBindingParent) |
4544 | 0 | { |
4545 | 0 | nsresult rv = nsGenericHTMLElement::BindToTree( |
4546 | 0 | aDocument, aParent, aBindingParent); |
4547 | 0 |
|
4548 | 0 | if (nsContentUtils::IsUAWidgetEnabled() && IsInComposedDoc()) { |
4549 | 0 | // Construct Shadow Root so web content can be hidden in the DOM. |
4550 | 0 | AttachAndSetUAShadowRoot(); |
4551 | | #ifdef ANDROID |
4552 | | AsyncEventDispatcher* dispatcher = |
4553 | | new AsyncEventDispatcher(this, |
4554 | | NS_LITERAL_STRING("UAWidgetBindToTree"), |
4555 | | CanBubble::eYes, |
4556 | | ChromeOnlyDispatch::eYes); |
4557 | | dispatcher->RunDOMEventWhenSafe(); |
4558 | | #else |
4559 | | // We don't want to call into JS if the website never asks for native |
4560 | 0 | // video controls. |
4561 | 0 | // If controls attribute is set later, controls is constructed lazily |
4562 | 0 | // with the UAWidgetAttributeChanged event. |
4563 | 0 | // This only applies to Desktop because on Fennec we would need to show |
4564 | 0 | // an UI if the video is blocked. |
4565 | 0 | if (Controls()) { |
4566 | 0 | AsyncEventDispatcher* dispatcher = |
4567 | 0 | new AsyncEventDispatcher(this, |
4568 | 0 | NS_LITERAL_STRING("UAWidgetBindToTree"), |
4569 | 0 | CanBubble::eYes, |
4570 | 0 | ChromeOnlyDispatch::eYes); |
4571 | 0 | dispatcher->RunDOMEventWhenSafe(); |
4572 | 0 | } |
4573 | 0 | #endif |
4574 | 0 | } |
4575 | 0 |
|
4576 | 0 | mUnboundFromTree = false; |
4577 | 0 |
|
4578 | 0 | if (aDocument) { |
4579 | 0 | // The preload action depends on the value of the autoplay attribute. |
4580 | 0 | // It's value may have changed, so update it. |
4581 | 0 | UpdatePreloadAction(); |
4582 | 0 | } |
4583 | 0 |
|
4584 | 0 | NotifyDecoderActivityChanges(); |
4585 | 0 |
|
4586 | 0 | return rv; |
4587 | 0 | } |
4588 | | |
4589 | | /* static */ |
4590 | | void |
4591 | | HTMLMediaElement::VideoDecodeSuspendTimerCallback(nsITimer* aTimer, |
4592 | | void* aClosure) |
4593 | 0 | { |
4594 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
4595 | 0 | auto element = static_cast<HTMLMediaElement*>(aClosure); |
4596 | 0 | element->mVideoDecodeSuspendTime.Start(); |
4597 | 0 | element->mVideoDecodeSuspendTimer = nullptr; |
4598 | 0 | } |
4599 | | |
4600 | | void |
4601 | | HTMLMediaElement::HiddenVideoStart() |
4602 | 0 | { |
4603 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
4604 | 0 | mHiddenPlayTime.Start(); |
4605 | 0 | if (mVideoDecodeSuspendTimer) { |
4606 | 0 | // Already started, just keep it running. |
4607 | 0 | return; |
4608 | 0 | } |
4609 | 0 | NS_NewTimerWithFuncCallback( |
4610 | 0 | getter_AddRefs(mVideoDecodeSuspendTimer), |
4611 | 0 | VideoDecodeSuspendTimerCallback, |
4612 | 0 | this, |
4613 | 0 | StaticPrefs::MediaSuspendBkgndVideoDelayMs(), |
4614 | 0 | nsITimer::TYPE_ONE_SHOT, |
4615 | 0 | "HTMLMediaElement::VideoDecodeSuspendTimerCallback", |
4616 | 0 | mMainThreadEventTarget); |
4617 | 0 | } |
4618 | | |
4619 | | void |
4620 | | HTMLMediaElement::HiddenVideoStop() |
4621 | 0 | { |
4622 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
4623 | 0 | mHiddenPlayTime.Pause(); |
4624 | 0 | mVideoDecodeSuspendTime.Pause(); |
4625 | 0 | if (!mVideoDecodeSuspendTimer) { |
4626 | 0 | return; |
4627 | 0 | } |
4628 | 0 | mVideoDecodeSuspendTimer->Cancel(); |
4629 | 0 | mVideoDecodeSuspendTimer = nullptr; |
4630 | 0 | } |
4631 | | |
4632 | | void |
4633 | | HTMLMediaElement::ReportTelemetry() |
4634 | 0 | { |
4635 | 0 | // Report telemetry for videos when a page is unloaded. We |
4636 | 0 | // want to know data on what state the video is at when |
4637 | 0 | // the user has exited. |
4638 | 0 | enum UnloadedState |
4639 | 0 | { |
4640 | 0 | ENDED = 0, |
4641 | 0 | PAUSED = 1, |
4642 | 0 | STALLED = 2, |
4643 | 0 | SEEKING = 3, |
4644 | 0 | OTHER = 4 |
4645 | 0 | }; |
4646 | 0 |
|
4647 | 0 | UnloadedState state = OTHER; |
4648 | 0 | if (Seeking()) { |
4649 | 0 | state = SEEKING; |
4650 | 0 | } else if (Ended()) { |
4651 | 0 | state = ENDED; |
4652 | 0 | } else if (Paused()) { |
4653 | 0 | state = PAUSED; |
4654 | 0 | } else { |
4655 | 0 | // For buffering we check if the current playback position is at the end |
4656 | 0 | // of a buffered range, within a margin of error. We also consider to be |
4657 | 0 | // buffering if the last frame status was buffering and the ready state is |
4658 | 0 | // HAVE_CURRENT_DATA to account for times where we are in a buffering state |
4659 | 0 | // regardless of what actual data we have buffered. |
4660 | 0 | bool stalled = false; |
4661 | 0 | RefPtr<TimeRanges> ranges = Buffered(); |
4662 | 0 | const double errorMargin = 0.05; |
4663 | 0 | double t = CurrentTime(); |
4664 | 0 | TimeRanges::index_type index = ranges->Find(t, errorMargin); |
4665 | 0 | stalled = |
4666 | 0 | index != TimeRanges::NoIndex && (ranges->End(index) - t) < errorMargin; |
4667 | 0 | stalled |= mDecoder && |
4668 | 0 | NextFrameStatus() == |
4669 | 0 | MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_BUFFERING && |
4670 | 0 | mReadyState == HAVE_CURRENT_DATA; |
4671 | 0 | if (stalled) { |
4672 | 0 | state = STALLED; |
4673 | 0 | } |
4674 | 0 | } |
4675 | 0 |
|
4676 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_UNLOAD_STATE, state); |
4677 | 0 | LOG(LogLevel::Debug, ("%p VIDEO_UNLOAD_STATE = %d", this, state)); |
4678 | 0 |
|
4679 | 0 | FrameStatisticsData data; |
4680 | 0 |
|
4681 | 0 | if (HTMLVideoElement* vid = HTMLVideoElement::FromNodeOrNull(this)) { |
4682 | 0 | FrameStatistics* stats = vid->GetFrameStatistics(); |
4683 | 0 | if (stats) { |
4684 | 0 | data = stats->GetFrameStatisticsData(); |
4685 | 0 | if (data.mParsedFrames) { |
4686 | 0 | MOZ_ASSERT(data.mDroppedFrames <= data.mParsedFrames); |
4687 | 0 | // Dropped frames <= total frames, so 'percentage' cannot be higher than |
4688 | 0 | // 100 and therefore can fit in a uint32_t (that Telemetry takes). |
4689 | 0 | uint32_t percentage = 100 * data.mDroppedFrames / data.mParsedFrames; |
4690 | 0 | LOG(LogLevel::Debug, |
4691 | 0 | ("Reporting telemetry DROPPED_FRAMES_IN_VIDEO_PLAYBACK")); |
4692 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_DROPPED_FRAMES_PROPORTION, |
4693 | 0 | percentage); |
4694 | 0 | } |
4695 | 0 | } |
4696 | 0 | } |
4697 | 0 |
|
4698 | 0 | if (mMediaInfo.HasVideo() && mMediaInfo.mVideo.mImage.height > 0) { |
4699 | 0 | // We have a valid video. |
4700 | 0 | double playTime = mPlayTime.Total(); |
4701 | 0 | double hiddenPlayTime = mHiddenPlayTime.Total(); |
4702 | 0 | double videoDecodeSuspendTime = mVideoDecodeSuspendTime.Total(); |
4703 | 0 |
|
4704 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_PLAY_TIME_MS, |
4705 | 0 | SECONDS_TO_MS(playTime)); |
4706 | 0 | LOG(LogLevel::Debug, ("%p VIDEO_PLAY_TIME_MS = %f", this, playTime)); |
4707 | 0 |
|
4708 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_HIDDEN_PLAY_TIME_MS, |
4709 | 0 | SECONDS_TO_MS(hiddenPlayTime)); |
4710 | 0 | LOG(LogLevel::Debug, |
4711 | 0 | ("%p VIDEO_HIDDEN_PLAY_TIME_MS = %f", this, hiddenPlayTime)); |
4712 | 0 |
|
4713 | 0 | if (playTime > 0.0) { |
4714 | 0 | // We have actually played something -> Report some valid-video telemetry. |
4715 | 0 |
|
4716 | 0 | // Keyed by audio+video or video alone, and by a resolution range. |
4717 | 0 | nsCString key(mMediaInfo.HasAudio() ? "AV," : "V,"); |
4718 | 0 | static const struct |
4719 | 0 | { |
4720 | 0 | int32_t mH; |
4721 | 0 | const char* mRes; |
4722 | 0 | } sResolutions[] = { { 240, "0<h<=240" }, { 480, "240<h<=480" }, |
4723 | 0 | { 576, "480<h<=576" }, { 720, "576<h<=720" }, |
4724 | 0 | { 1080, "720<h<=1080" }, { 2160, "1080<h<=2160" } }; |
4725 | 0 | const char* resolution = "h>2160"; |
4726 | 0 | int32_t height = mMediaInfo.mVideo.mImage.height; |
4727 | 0 | for (const auto& res : sResolutions) { |
4728 | 0 | if (height <= res.mH) { |
4729 | 0 | resolution = res.mRes; |
4730 | 0 | break; |
4731 | 0 | } |
4732 | 0 | } |
4733 | 0 | key.AppendASCII(resolution); |
4734 | 0 |
|
4735 | 0 | uint32_t hiddenPercentage = |
4736 | 0 | uint32_t(hiddenPlayTime / playTime * 100.0 + 0.5); |
4737 | 0 | Telemetry::Accumulate( |
4738 | 0 | Telemetry::VIDEO_HIDDEN_PLAY_TIME_PERCENTAGE, key, hiddenPercentage); |
4739 | 0 | // Also accumulate all percentages in an "All" key. |
4740 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_HIDDEN_PLAY_TIME_PERCENTAGE, |
4741 | 0 | NS_LITERAL_CSTRING("All"), |
4742 | 0 | hiddenPercentage); |
4743 | 0 | LOG(LogLevel::Debug, |
4744 | 0 | ("%p VIDEO_HIDDEN_PLAY_TIME_PERCENTAGE = %u, keys: '%s' and 'All'", |
4745 | 0 | this, |
4746 | 0 | hiddenPercentage, |
4747 | 0 | key.get())); |
4748 | 0 |
|
4749 | 0 | uint32_t videoDecodeSuspendPercentage = |
4750 | 0 | uint32_t(videoDecodeSuspendTime / playTime * 100.0 + 0.5); |
4751 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_INFERRED_DECODE_SUSPEND_PERCENTAGE, |
4752 | 0 | key, |
4753 | 0 | videoDecodeSuspendPercentage); |
4754 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_INFERRED_DECODE_SUSPEND_PERCENTAGE, |
4755 | 0 | NS_LITERAL_CSTRING("All"), |
4756 | 0 | videoDecodeSuspendPercentage); |
4757 | 0 | LOG(LogLevel::Debug, |
4758 | 0 | ("%p VIDEO_INFERRED_DECODE_SUSPEND_PERCENTAGE = %u, keys: '%s' and " |
4759 | 0 | "'All'", |
4760 | 0 | this, |
4761 | 0 | videoDecodeSuspendPercentage, |
4762 | 0 | key.get())); |
4763 | 0 |
|
4764 | 0 | if (data.mInterKeyframeCount != 0) { |
4765 | 0 | uint32_t average_ms = uint32_t( |
4766 | 0 | std::min<uint64_t>(double(data.mInterKeyframeSum_us) / |
4767 | 0 | double(data.mInterKeyframeCount) / 1000.0 + |
4768 | 0 | 0.5, |
4769 | 0 | UINT32_MAX)); |
4770 | 0 | Telemetry::Accumulate( |
4771 | 0 | Telemetry::VIDEO_INTER_KEYFRAME_AVERAGE_MS, key, average_ms); |
4772 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_AVERAGE_MS, |
4773 | 0 | NS_LITERAL_CSTRING("All"), |
4774 | 0 | average_ms); |
4775 | 0 | LOG(LogLevel::Debug, |
4776 | 0 | ("%p VIDEO_INTER_KEYFRAME_AVERAGE_MS = %u, keys: '%s' and 'All'", |
4777 | 0 | this, |
4778 | 0 | average_ms, |
4779 | 0 | key.get())); |
4780 | 0 |
|
4781 | 0 | uint32_t max_ms = uint32_t(std::min<uint64_t>( |
4782 | 0 | (data.mInterKeyFrameMax_us + 500) / 1000, UINT32_MAX)); |
4783 | 0 | Telemetry::Accumulate( |
4784 | 0 | Telemetry::VIDEO_INTER_KEYFRAME_MAX_MS, key, max_ms); |
4785 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_MAX_MS, |
4786 | 0 | NS_LITERAL_CSTRING("All"), |
4787 | 0 | max_ms); |
4788 | 0 | LOG(LogLevel::Debug, |
4789 | 0 | ("%p VIDEO_INTER_KEYFRAME_MAX_MS = %u, keys: '%s' and 'All'", |
4790 | 0 | this, |
4791 | 0 | max_ms, |
4792 | 0 | key.get())); |
4793 | 0 | } else { |
4794 | 0 | // Here, we have played *some* of the video, but didn't get more than 1 |
4795 | 0 | // keyframe. Report '0' if we have played for longer than the video- |
4796 | 0 | // decode-suspend delay (showing recovery would be difficult). |
4797 | 0 | uint32_t suspendDelay_ms = StaticPrefs::MediaSuspendBkgndVideoDelayMs(); |
4798 | 0 | if (uint32_t(playTime * 1000.0) > suspendDelay_ms) { |
4799 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_MAX_MS, key, 0); |
4800 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_MAX_MS, |
4801 | 0 | NS_LITERAL_CSTRING("All"), |
4802 | 0 | 0); |
4803 | 0 | LOG(LogLevel::Debug, |
4804 | 0 | ("%p VIDEO_INTER_KEYFRAME_MAX_MS = 0 (only 1 keyframe), keys: " |
4805 | 0 | "'%s' and 'All'", |
4806 | 0 | this, |
4807 | 0 | key.get())); |
4808 | 0 | } |
4809 | 0 | } |
4810 | 0 | } |
4811 | 0 | } |
4812 | 0 | } |
4813 | | |
4814 | | void |
4815 | | HTMLMediaElement::UnbindFromTree(bool aDeep, bool aNullParent) |
4816 | 0 | { |
4817 | 0 | mUnboundFromTree = true; |
4818 | 0 | mVisibilityState = Visibility::UNTRACKED; |
4819 | 0 |
|
4820 | 0 | nsGenericHTMLElement::UnbindFromTree(aDeep, aNullParent); |
4821 | 0 |
|
4822 | 0 | MOZ_ASSERT(IsHidden()); |
4823 | 0 | NotifyDecoderActivityChanges(); |
4824 | 0 |
|
4825 | 0 | AsyncEventDispatcher* dispatcher = |
4826 | 0 | new AsyncEventDispatcher(this, |
4827 | 0 | NS_LITERAL_STRING("UAWidgetUnbindFromTree"), |
4828 | 0 | CanBubble::eYes, |
4829 | 0 | ChromeOnlyDispatch::eYes); |
4830 | 0 | dispatcher->RunDOMEventWhenSafe(); |
4831 | 0 |
|
4832 | 0 | RefPtr<HTMLMediaElement> self(this); |
4833 | 0 | nsCOMPtr<nsIRunnable> task = |
4834 | 0 | NS_NewRunnableFunction("dom::HTMLMediaElement::UnbindFromTree", [self]() { |
4835 | 0 | if (self->mUnboundFromTree) { |
4836 | 0 | self->Pause(); |
4837 | 0 | } |
4838 | 0 | }); |
4839 | 0 | RunInStableState(task); |
4840 | 0 | } |
4841 | | |
4842 | | /* static */ |
4843 | | CanPlayStatus |
4844 | | HTMLMediaElement::GetCanPlay(const nsAString& aType, |
4845 | | DecoderDoctorDiagnostics* aDiagnostics) |
4846 | 0 | { |
4847 | 0 | Maybe<MediaContainerType> containerType = MakeMediaContainerType(aType); |
4848 | 0 | if (!containerType) { |
4849 | 0 | return CANPLAY_NO; |
4850 | 0 | } |
4851 | 0 | CanPlayStatus status = |
4852 | 0 | DecoderTraits::CanHandleContainerType(*containerType, aDiagnostics); |
4853 | 0 | if (status == CANPLAY_YES && |
4854 | 0 | (*containerType).ExtendedType().Codecs().IsEmpty()) { |
4855 | 0 | // Per spec: 'Generally, a user agent should never return "probably" for a |
4856 | 0 | // type that allows the `codecs` parameter if that parameter is not |
4857 | 0 | // present.' As all our currently-supported types allow for `codecs`, we can |
4858 | 0 | // do this check here. |
4859 | 0 | // TODO: Instead, missing `codecs` should be checked in each decoder's |
4860 | 0 | // `IsSupportedType` call from `CanHandleCodecsType()`. |
4861 | 0 | // See bug 1399023. |
4862 | 0 | return CANPLAY_MAYBE; |
4863 | 0 | } |
4864 | 0 | return status; |
4865 | 0 | } |
4866 | | |
4867 | | void |
4868 | | HTMLMediaElement::CanPlayType(const nsAString& aType, nsAString& aResult) |
4869 | 0 | { |
4870 | 0 | DecoderDoctorDiagnostics diagnostics; |
4871 | 0 | CanPlayStatus canPlay = GetCanPlay(aType, &diagnostics); |
4872 | 0 | diagnostics.StoreFormatDiagnostics( |
4873 | 0 | OwnerDoc(), aType, canPlay != CANPLAY_NO, __func__); |
4874 | 0 | switch (canPlay) { |
4875 | 0 | case CANPLAY_NO: |
4876 | 0 | aResult.Truncate(); |
4877 | 0 | break; |
4878 | 0 | case CANPLAY_YES: |
4879 | 0 | aResult.AssignLiteral("probably"); |
4880 | 0 | break; |
4881 | 0 | case CANPLAY_MAYBE: |
4882 | 0 | aResult.AssignLiteral("maybe"); |
4883 | 0 | break; |
4884 | 0 | default: |
4885 | 0 | MOZ_ASSERT_UNREACHABLE("Unexpected case."); |
4886 | 0 | break; |
4887 | 0 | } |
4888 | 0 |
|
4889 | 0 | LOG(LogLevel::Debug, |
4890 | 0 | ("%p CanPlayType(%s) = \"%s\"", |
4891 | 0 | this, |
4892 | 0 | NS_ConvertUTF16toUTF8(aType).get(), |
4893 | 0 | NS_ConvertUTF16toUTF8(aResult).get())); |
4894 | 0 | } |
4895 | | |
4896 | | void |
4897 | | HTMLMediaElement::AssertReadyStateIsNothing() |
4898 | 0 | { |
4899 | 0 | #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED |
4900 | 0 | if (mReadyState != HAVE_NOTHING) { |
4901 | 0 | char buf[1024]; |
4902 | 0 | SprintfLiteral(buf, |
4903 | 0 | "readyState=%d networkState=%d mLoadWaitStatus=%d " |
4904 | 0 | "mSourceLoadCandidate=%d " |
4905 | 0 | "mIsLoadingFromSourceChildren=%d mPreloadAction=%d " |
4906 | 0 | "mSuspendedForPreloadNone=%d error=%d", |
4907 | 0 | int(mReadyState), |
4908 | 0 | int(mNetworkState), |
4909 | 0 | int(mLoadWaitStatus), |
4910 | 0 | !!mSourceLoadCandidate, |
4911 | 0 | mIsLoadingFromSourceChildren, |
4912 | 0 | int(mPreloadAction), |
4913 | 0 | mSuspendedForPreloadNone, |
4914 | 0 | GetError() ? GetError()->Code() : 0); |
4915 | 0 | MOZ_CRASH_UNSAFE_PRINTF("ReadyState should be HAVE_NOTHING! %s", buf); |
4916 | 0 | } |
4917 | 0 | #endif |
4918 | 0 | } |
4919 | | |
4920 | | void |
4921 | | HTMLMediaElement::AttachAndSetUAShadowRoot() |
4922 | 0 | { |
4923 | 0 | if (GetShadowRoot()) { |
4924 | 0 | MOZ_ASSERT(GetShadowRoot()->IsUAWidget()); |
4925 | 0 | return; |
4926 | 0 | } |
4927 | 0 |
|
4928 | 0 | // Add a closed shadow root to host video controls |
4929 | 0 | RefPtr<ShadowRoot> shadowRoot = |
4930 | 0 | AttachShadowWithoutNameChecks(ShadowRootMode::Closed); |
4931 | 0 | shadowRoot->SetIsUAWidget(); |
4932 | 0 | } |
4933 | | |
4934 | | nsresult |
4935 | | HTMLMediaElement::InitializeDecoderAsClone(ChannelMediaDecoder* aOriginal) |
4936 | 0 | { |
4937 | 0 | NS_ASSERTION(mLoadingSrc, "mLoadingSrc must already be set"); |
4938 | 0 | NS_ASSERTION(mDecoder == nullptr, "Shouldn't have a decoder"); |
4939 | 0 | AssertReadyStateIsNothing(); |
4940 | 0 |
|
4941 | 0 | MediaDecoderInit decoderInit(this, |
4942 | 0 | mMuted ? 0.0 : mVolume, |
4943 | 0 | mPreservesPitch, |
4944 | 0 | mPlaybackRate, |
4945 | 0 | mPreloadAction == |
4946 | 0 | HTMLMediaElement::PRELOAD_METADATA, |
4947 | 0 | mHasSuspendTaint, |
4948 | 0 | HasAttr(kNameSpaceID_None, nsGkAtoms::loop), |
4949 | 0 | aOriginal->ContainerType()); |
4950 | 0 |
|
4951 | 0 | RefPtr<ChannelMediaDecoder> decoder = aOriginal->Clone(decoderInit); |
4952 | 0 | if (!decoder) |
4953 | 0 | return NS_ERROR_FAILURE; |
4954 | 0 | |
4955 | 0 | LOG(LogLevel::Debug, |
4956 | 0 | ("%p Cloned decoder %p from %p", this, decoder.get(), aOriginal)); |
4957 | 0 |
|
4958 | 0 | return FinishDecoderSetup(decoder); |
4959 | 0 | } |
4960 | | |
4961 | | template<typename DecoderType, typename... LoadArgs> |
4962 | | nsresult |
4963 | | HTMLMediaElement::SetupDecoder(DecoderType* aDecoder, LoadArgs&&... aArgs) |
4964 | 0 | { |
4965 | 0 | LOG(LogLevel::Debug, |
4966 | 0 | ("%p Created decoder %p for type %s", |
4967 | 0 | this, |
4968 | 0 | aDecoder, |
4969 | 0 | aDecoder->ContainerType().OriginalString().Data())); |
4970 | 0 |
|
4971 | 0 | nsresult rv = aDecoder->Load(std::forward<LoadArgs>(aArgs)...); |
4972 | 0 | if (NS_FAILED(rv)) { |
4973 | 0 | aDecoder->Shutdown(); |
4974 | 0 | LOG(LogLevel::Debug, ("%p Failed to load for decoder %p", this, aDecoder)); |
4975 | 0 | return rv; |
4976 | 0 | } |
4977 | 0 |
|
4978 | 0 | rv = FinishDecoderSetup(aDecoder); |
4979 | 0 | // Only ChannelMediaDecoder supports resource cloning. |
4980 | 0 | if (IsSame<DecoderType, ChannelMediaDecoder>::value && NS_SUCCEEDED(rv)) { |
4981 | 0 | AddMediaElementToURITable(); |
4982 | 0 | NS_ASSERTION( |
4983 | 0 | MediaElementTableCount(this, mLoadingSrc) == 1, |
4984 | 0 | "Media element should have single table entry if decode initialized"); |
4985 | 0 | } |
4986 | 0 |
|
4987 | 0 | return rv; |
4988 | 0 | } |
4989 | | |
4990 | | nsresult |
4991 | | HTMLMediaElement::InitializeDecoderForChannel(nsIChannel* aChannel, |
4992 | | nsIStreamListener** aListener) |
4993 | 0 | { |
4994 | 0 | NS_ASSERTION(mLoadingSrc, "mLoadingSrc must already be set"); |
4995 | 0 | AssertReadyStateIsNothing(); |
4996 | 0 |
|
4997 | 0 | DecoderDoctorDiagnostics diagnostics; |
4998 | 0 |
|
4999 | 0 | nsAutoCString mimeType; |
5000 | 0 | aChannel->GetContentType(mimeType); |
5001 | 0 | NS_ASSERTION(!mimeType.IsEmpty(), "We should have the Content-Type."); |
5002 | 0 | NS_ConvertUTF8toUTF16 mimeUTF16(mimeType); |
5003 | 0 |
|
5004 | 0 | RefPtr<HTMLMediaElement> self = this; |
5005 | 0 | auto reportCanPlay = [&, self](bool aCanPlay) { |
5006 | 0 | diagnostics.StoreFormatDiagnostics( |
5007 | 0 | self->OwnerDoc(), mimeUTF16, aCanPlay, __func__); |
5008 | 0 | if (!aCanPlay) { |
5009 | 0 | nsAutoString src; |
5010 | 0 | self->GetCurrentSrc(src); |
5011 | 0 | const char16_t* params[] = { mimeUTF16.get(), src.get() }; |
5012 | 0 | self->ReportLoadError( |
5013 | 0 | "MediaLoadUnsupportedMimeType", params, ArrayLength(params)); |
5014 | 0 | } |
5015 | 0 | }; |
5016 | 0 |
|
5017 | 0 | auto onExit = MakeScopeExit([self] { |
5018 | 0 | if (self->mChannelLoader) { |
5019 | 0 | self->mChannelLoader->Done(); |
5020 | 0 | self->mChannelLoader = nullptr; |
5021 | 0 | } |
5022 | 0 | }); |
5023 | 0 |
|
5024 | 0 | Maybe<MediaContainerType> containerType = MakeMediaContainerType(mimeType); |
5025 | 0 | if (!containerType) { |
5026 | 0 | reportCanPlay(false); |
5027 | 0 | return NS_ERROR_FAILURE; |
5028 | 0 | } |
5029 | 0 | |
5030 | 0 | MediaDecoderInit decoderInit(this, |
5031 | 0 | mMuted ? 0.0 : mVolume, |
5032 | 0 | mPreservesPitch, |
5033 | 0 | mPlaybackRate, |
5034 | 0 | mPreloadAction == |
5035 | 0 | HTMLMediaElement::PRELOAD_METADATA, |
5036 | 0 | mHasSuspendTaint, |
5037 | 0 | HasAttr(kNameSpaceID_None, nsGkAtoms::loop), |
5038 | 0 | *containerType); |
5039 | 0 |
|
5040 | | #ifdef MOZ_ANDROID_HLS_SUPPORT |
5041 | | if (HLSDecoder::IsSupportedType(*containerType)) { |
5042 | | RefPtr<HLSDecoder> decoder = new HLSDecoder(decoderInit); |
5043 | | reportCanPlay(true); |
5044 | | return SetupDecoder(decoder.get(), aChannel); |
5045 | | } |
5046 | | #endif |
5047 | |
|
5048 | 0 | RefPtr<ChannelMediaDecoder> decoder = |
5049 | 0 | ChannelMediaDecoder::Create(decoderInit, &diagnostics); |
5050 | 0 | if (!decoder) { |
5051 | 0 | reportCanPlay(false); |
5052 | 0 | return NS_ERROR_FAILURE; |
5053 | 0 | } |
5054 | 0 | |
5055 | 0 | reportCanPlay(true); |
5056 | 0 | bool isPrivateBrowsing = NodePrincipal()->GetPrivateBrowsingId() > 0; |
5057 | 0 | return SetupDecoder(decoder.get(), aChannel, isPrivateBrowsing, aListener); |
5058 | 0 | } |
5059 | | |
5060 | | nsresult |
5061 | | HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder) |
5062 | 0 | { |
5063 | 0 | ChangeNetworkState(NETWORK_LOADING); |
5064 | 0 |
|
5065 | 0 | // Set mDecoder now so if methods like GetCurrentSrc get called between |
5066 | 0 | // here and Load(), they work. |
5067 | 0 | SetDecoder(aDecoder); |
5068 | 0 |
|
5069 | 0 | // Notify the decoder of the initial activity status. |
5070 | 0 | NotifyDecoderActivityChanges(); |
5071 | 0 |
|
5072 | 0 | // Update decoder principal before we start decoding, since it |
5073 | 0 | // can affect how we feed data to MediaStreams |
5074 | 0 | NotifyDecoderPrincipalChanged(); |
5075 | 0 |
|
5076 | 0 | for (OutputMediaStream& ms : mOutputStreams) { |
5077 | 0 | if (ms.mCapturingMediaStream) { |
5078 | 0 | MOZ_ASSERT(!ms.mCapturingDecoder); |
5079 | 0 | continue; |
5080 | 0 | } |
5081 | 0 |
|
5082 | 0 | ms.mCapturingDecoder = true; |
5083 | 0 | aDecoder->AddOutputStream(ms.mStream->GetInputStream()->AsProcessedStream(), |
5084 | 0 | ms.mNextAvailableTrackID, |
5085 | 0 | ms.mFinishWhenEnded); |
5086 | 0 | } |
5087 | 0 |
|
5088 | 0 | if (mMediaKeys) { |
5089 | 0 | if (mMediaKeys->GetCDMProxy()) { |
5090 | 0 | mDecoder->SetCDMProxy(mMediaKeys->GetCDMProxy()); |
5091 | 0 | } else { |
5092 | 0 | // CDM must have crashed. |
5093 | 0 | ShutdownDecoder(); |
5094 | 0 | return NS_ERROR_FAILURE; |
5095 | 0 | } |
5096 | 0 | } |
5097 | 0 | |
5098 | 0 | if (mChannelLoader) { |
5099 | 0 | mChannelLoader->Done(); |
5100 | 0 | mChannelLoader = nullptr; |
5101 | 0 | } |
5102 | 0 |
|
5103 | 0 | // We may want to suspend the new stream now. |
5104 | 0 | // This will also do an AddRemoveSelfReference. |
5105 | 0 | NotifyOwnerDocumentActivityChanged(); |
5106 | 0 |
|
5107 | 0 | if (mPausedForInactiveDocumentOrChannel) { |
5108 | 0 | mDecoder->Suspend(); |
5109 | 0 | } |
5110 | 0 |
|
5111 | 0 | if (!mPaused) { |
5112 | 0 | SetPlayedOrSeeked(true); |
5113 | 0 | if (!mPausedForInactiveDocumentOrChannel) { |
5114 | 0 | mDecoder->Play(); |
5115 | 0 | } |
5116 | 0 | } |
5117 | 0 |
|
5118 | 0 | return NS_OK; |
5119 | 0 | } |
5120 | | |
5121 | | class HTMLMediaElement::StreamListener : public MediaStreamListener |
5122 | | { |
5123 | | public: |
5124 | | StreamListener(HTMLMediaElement* aElement, const char* aName) |
5125 | | : mElement(aElement) |
5126 | | , mHaveCurrentData(false) |
5127 | | , mFinished(false) |
5128 | | , mMutex(aName) |
5129 | | , mPendingNotifyOutput(false) |
5130 | 0 | { |
5131 | 0 | } |
5132 | | void Forget() |
5133 | 0 | { |
5134 | 0 | if (mElement) { |
5135 | 0 | HTMLMediaElement* element = mElement; |
5136 | 0 | mElement = nullptr; |
5137 | 0 | element->UpdateReadyStateInternal(); |
5138 | 0 | } |
5139 | 0 | } |
5140 | | |
5141 | | // Main thread |
5142 | | |
5143 | | MediaDecoderOwner::NextFrameStatus NextFrameStatus() |
5144 | 0 | { |
5145 | 0 | if (!mElement || !mHaveCurrentData || mFinished) { |
5146 | 0 | return MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE; |
5147 | 0 | } |
5148 | 0 | return MediaDecoderOwner::NEXT_FRAME_AVAILABLE; |
5149 | 0 | } |
5150 | | |
5151 | | void DoNotifyOutput() |
5152 | 0 | { |
5153 | 0 | { |
5154 | 0 | MutexAutoLock lock(mMutex); |
5155 | 0 | mPendingNotifyOutput = false; |
5156 | 0 | } |
5157 | 0 | if (mElement && mHaveCurrentData) { |
5158 | 0 | RefPtr<HTMLMediaElement> kungFuDeathGrip = mElement; |
5159 | 0 | kungFuDeathGrip->FireTimeUpdate(true); |
5160 | 0 | } |
5161 | 0 | } |
5162 | | void DoNotifyHaveCurrentData() |
5163 | 0 | { |
5164 | 0 | mHaveCurrentData = true; |
5165 | 0 | if (mElement) { |
5166 | 0 | RefPtr<HTMLMediaElement> kungFuDeathGrip = mElement; |
5167 | 0 | kungFuDeathGrip->FirstFrameLoaded(); |
5168 | 0 | kungFuDeathGrip->UpdateReadyStateInternal(); |
5169 | 0 | } |
5170 | 0 | DoNotifyOutput(); |
5171 | 0 | } |
5172 | | |
5173 | | // These notifications run on the media graph thread so we need to |
5174 | | // dispatch events to the main thread. |
5175 | | virtual void NotifyHasCurrentData(MediaStreamGraph* aGraph) override |
5176 | 0 | { |
5177 | 0 | MutexAutoLock lock(mMutex); |
5178 | 0 | aGraph->DispatchToMainThreadAfterStreamStateUpdate(NewRunnableMethod( |
5179 | 0 | "dom::HTMLMediaElement::StreamListener::DoNotifyHaveCurrentData", |
5180 | 0 | this, |
5181 | 0 | &StreamListener::DoNotifyHaveCurrentData)); |
5182 | 0 | } |
5183 | | virtual void NotifyOutput(MediaStreamGraph* aGraph, |
5184 | | GraphTime aCurrentTime) override |
5185 | 0 | { |
5186 | 0 | MutexAutoLock lock(mMutex); |
5187 | 0 | if (mPendingNotifyOutput) |
5188 | 0 | return; |
5189 | 0 | mPendingNotifyOutput = true; |
5190 | 0 | aGraph->DispatchToMainThreadAfterStreamStateUpdate( |
5191 | 0 | NewRunnableMethod("dom::HTMLMediaElement::StreamListener::DoNotifyOutput", |
5192 | 0 | this, |
5193 | 0 | &StreamListener::DoNotifyOutput)); |
5194 | 0 | } |
5195 | | |
5196 | | private: |
5197 | | // These fields may only be accessed on the main thread |
5198 | | HTMLMediaElement* mElement; |
5199 | | bool mHaveCurrentData; |
5200 | | bool mFinished; |
5201 | | |
5202 | | // mMutex protects the fields below; they can be accessed on any thread |
5203 | | Mutex mMutex; |
5204 | | bool mPendingNotifyOutput; |
5205 | | }; |
5206 | | |
5207 | | class HTMLMediaElement::MediaStreamTracksAvailableCallback |
5208 | | : public OnTracksAvailableCallback |
5209 | | { |
5210 | | public: |
5211 | | explicit MediaStreamTracksAvailableCallback(HTMLMediaElement* aElement) |
5212 | | : OnTracksAvailableCallback() |
5213 | | , mElement(aElement) |
5214 | 0 | { |
5215 | 0 | } |
5216 | | virtual void NotifyTracksAvailable(DOMMediaStream* aStream) override |
5217 | 0 | { |
5218 | 0 | NS_ASSERTION(NS_IsMainThread(), "Should be on main thread."); |
5219 | 0 |
|
5220 | 0 | if (!mElement) { |
5221 | 0 | return; |
5222 | 0 | } |
5223 | 0 | mElement->NotifyMediaStreamTracksAvailable(aStream); |
5224 | 0 | } |
5225 | | |
5226 | | private: |
5227 | | WeakPtr<HTMLMediaElement> mElement; |
5228 | | }; |
5229 | | |
5230 | | class HTMLMediaElement::MediaStreamTrackListener |
5231 | | : public DOMMediaStream::TrackListener |
5232 | | { |
5233 | | public: |
5234 | | explicit MediaStreamTrackListener(HTMLMediaElement* aElement) |
5235 | | : mElement(aElement) |
5236 | 0 | { |
5237 | 0 | } |
5238 | | |
5239 | | void NotifyTrackAdded(const RefPtr<MediaStreamTrack>& aTrack) override |
5240 | 0 | { |
5241 | 0 | mElement->NotifyMediaStreamTrackAdded(aTrack); |
5242 | 0 | } |
5243 | | |
5244 | | void NotifyTrackRemoved(const RefPtr<MediaStreamTrack>& aTrack) override |
5245 | 0 | { |
5246 | 0 | mElement->NotifyMediaStreamTrackRemoved(aTrack); |
5247 | 0 | } |
5248 | | |
5249 | | void NotifyActive() override |
5250 | 0 | { |
5251 | 0 | LOG(LogLevel::Debug, |
5252 | 0 | ("%p, mSrcStream %p became active", |
5253 | 0 | mElement, |
5254 | 0 | mElement->mSrcStream.get())); |
5255 | 0 | mElement->CheckAutoplayDataReady(); |
5256 | 0 | } |
5257 | | |
5258 | | void NotifyInactive() override |
5259 | 0 | { |
5260 | 0 | LOG(LogLevel::Debug, |
5261 | 0 | ("%p, mSrcStream %p became inactive", |
5262 | 0 | mElement, |
5263 | 0 | mElement->mSrcStream.get())); |
5264 | 0 | MOZ_ASSERT(!mElement->mSrcStream->Active()); |
5265 | 0 | if (mElement->mMediaStreamListener) { |
5266 | 0 | mElement->mMediaStreamListener->Forget(); |
5267 | 0 | } |
5268 | 0 | mElement->PlaybackEnded(); |
5269 | 0 | } |
5270 | | |
5271 | | protected: |
5272 | | HTMLMediaElement* const mElement; |
5273 | | }; |
5274 | | |
5275 | | void |
5276 | | HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags) |
5277 | 0 | { |
5278 | 0 | if (!mSrcStream) { |
5279 | 0 | return; |
5280 | 0 | } |
5281 | 0 | // We might be in cycle collection with mSrcStream->GetPlaybackStream() |
5282 | 0 | // already returning null due to unlinking. |
5283 | 0 | |
5284 | 0 | MediaStream* stream = GetSrcMediaStream(); |
5285 | 0 | bool shouldPlay = !(aFlags & REMOVING_SRC_STREAM) && !mPaused && |
5286 | 0 | !mPausedForInactiveDocumentOrChannel && stream; |
5287 | 0 | if (shouldPlay == mSrcStreamIsPlaying) { |
5288 | 0 | return; |
5289 | 0 | } |
5290 | 0 | mSrcStreamIsPlaying = shouldPlay; |
5291 | 0 |
|
5292 | 0 | LOG(LogLevel::Debug, |
5293 | 0 | ("MediaElement %p %s playback of DOMMediaStream %p", |
5294 | 0 | this, |
5295 | 0 | shouldPlay ? "Setting up" : "Removing", |
5296 | 0 | mSrcStream.get())); |
5297 | 0 |
|
5298 | 0 | if (shouldPlay) { |
5299 | 0 | mSrcStreamPausedCurrentTime = -1; |
5300 | 0 |
|
5301 | 0 | mMediaStreamListener = |
5302 | 0 | new StreamListener(this, "HTMLMediaElement::mMediaStreamListener"); |
5303 | 0 | stream->AddListener(mMediaStreamListener); |
5304 | 0 |
|
5305 | 0 | stream->AddAudioOutput(this); |
5306 | 0 | SetVolumeInternal(); |
5307 | 0 |
|
5308 | 0 | VideoFrameContainer* container = GetVideoFrameContainer(); |
5309 | 0 | if (mSelectedVideoStreamTrack && container) { |
5310 | 0 | mSelectedVideoStreamTrack->AddVideoOutput(container); |
5311 | 0 | } |
5312 | 0 |
|
5313 | 0 | SetCapturedOutputStreamsEnabled(true); // Unmute |
5314 | 0 | // If the input is a media stream, we don't check its data and always regard |
5315 | 0 | // it as audible when it's playing. |
5316 | 0 | SetAudibleState(true); |
5317 | 0 | } else { |
5318 | 0 | if (stream) { |
5319 | 0 | mSrcStreamPausedCurrentTime = CurrentTime(); |
5320 | 0 |
|
5321 | 0 | stream->RemoveListener(mMediaStreamListener); |
5322 | 0 |
|
5323 | 0 | stream->RemoveAudioOutput(this); |
5324 | 0 | VideoFrameContainer* container = GetVideoFrameContainer(); |
5325 | 0 | if (mSelectedVideoStreamTrack && container) { |
5326 | 0 | mSelectedVideoStreamTrack->RemoveVideoOutput(container); |
5327 | 0 | } |
5328 | 0 |
|
5329 | 0 | SetCapturedOutputStreamsEnabled(false); // Mute |
5330 | 0 | } |
5331 | 0 | // If stream is null, then DOMMediaStream::Destroy must have been |
5332 | 0 | // called and that will remove all listeners/outputs. |
5333 | 0 |
|
5334 | 0 | mMediaStreamListener->Forget(); |
5335 | 0 | mMediaStreamListener = nullptr; |
5336 | 0 | } |
5337 | 0 | } |
5338 | | |
5339 | | void |
5340 | | HTMLMediaElement::SetupSrcMediaStreamPlayback(DOMMediaStream* aStream) |
5341 | 0 | { |
5342 | 0 | NS_ASSERTION(!mSrcStream && !mMediaStreamListener && |
5343 | 0 | !mMediaStreamSizeListener, |
5344 | 0 | "Should have been ended already"); |
5345 | 0 |
|
5346 | 0 | mSrcStream = aStream; |
5347 | 0 |
|
5348 | 0 | nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow(); |
5349 | 0 | if (!window) { |
5350 | 0 | return; |
5351 | 0 | } |
5352 | 0 | |
5353 | 0 | UpdateSrcMediaStreamPlaying(); |
5354 | 0 |
|
5355 | 0 | // If we pause this media element, track changes in the underlying stream |
5356 | 0 | // will continue to fire events at this element and alter its track list. |
5357 | 0 | // That's simpler than delaying the events, but probably confusing... |
5358 | 0 | nsTArray<RefPtr<MediaStreamTrack>> tracks; |
5359 | 0 | mSrcStream->GetTracks(tracks); |
5360 | 0 | for (const RefPtr<MediaStreamTrack>& track : tracks) { |
5361 | 0 | NotifyMediaStreamTrackAdded(track); |
5362 | 0 | } |
5363 | 0 |
|
5364 | 0 | mSrcStream->OnTracksAvailable(new MediaStreamTracksAvailableCallback(this)); |
5365 | 0 | mMediaStreamTrackListener = new MediaStreamTrackListener(this); |
5366 | 0 | mSrcStream->RegisterTrackListener(mMediaStreamTrackListener); |
5367 | 0 |
|
5368 | 0 | mSrcStream->AddPrincipalChangeObserver(this); |
5369 | 0 | mSrcStreamVideoPrincipal = mSrcStream->GetVideoPrincipal(); |
5370 | 0 |
|
5371 | 0 | ChangeNetworkState(NETWORK_IDLE); |
5372 | 0 | ChangeDelayLoadStatus(false); |
5373 | 0 | CheckAutoplayDataReady(); |
5374 | 0 |
|
5375 | 0 | // FirstFrameLoaded() will be called when the stream has current data. |
5376 | 0 | } |
5377 | | |
5378 | | void |
5379 | | HTMLMediaElement::EndSrcMediaStreamPlayback() |
5380 | 0 | { |
5381 | 0 | MOZ_ASSERT(mSrcStream); |
5382 | 0 |
|
5383 | 0 | UpdateSrcMediaStreamPlaying(REMOVING_SRC_STREAM); |
5384 | 0 |
|
5385 | 0 | if (mMediaStreamSizeListener) { |
5386 | 0 | MOZ_ASSERT(mSelectedVideoStreamTrack); |
5387 | 0 | if (mSelectedVideoStreamTrack) { |
5388 | 0 | mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener); |
5389 | 0 | } |
5390 | 0 | mMediaStreamSizeListener->Forget(); |
5391 | 0 | } |
5392 | 0 | mSelectedVideoStreamTrack = nullptr; |
5393 | 0 | mMediaStreamSizeListener = nullptr; |
5394 | 0 |
|
5395 | 0 | mSrcStream->UnregisterTrackListener(mMediaStreamTrackListener); |
5396 | 0 | mMediaStreamTrackListener = nullptr; |
5397 | 0 | mSrcStreamTracksAvailable = false; |
5398 | 0 |
|
5399 | 0 | mSrcStream->RemovePrincipalChangeObserver(this); |
5400 | 0 | mSrcStreamVideoPrincipal = nullptr; |
5401 | 0 |
|
5402 | 0 | for (OutputMediaStream& ms : mOutputStreams) { |
5403 | 0 | for (auto pair : ms.mTrackPorts) { |
5404 | 0 | pair.second()->Destroy(); |
5405 | 0 | } |
5406 | 0 | ms.mTrackPorts.Clear(); |
5407 | 0 | } |
5408 | 0 |
|
5409 | 0 | mSrcStream = nullptr; |
5410 | 0 | } |
5411 | | |
5412 | | static already_AddRefed<AudioTrack> |
5413 | | CreateAudioTrack(AudioStreamTrack* aStreamTrack, nsIGlobalObject* aOwnerGlobal) |
5414 | 0 | { |
5415 | 0 | nsAutoString id; |
5416 | 0 | nsAutoString label; |
5417 | 0 | aStreamTrack->GetId(id); |
5418 | 0 | aStreamTrack->GetLabel(label, CallerType::System); |
5419 | 0 |
|
5420 | 0 | return MediaTrackList::CreateAudioTrack( |
5421 | 0 | aOwnerGlobal, id, NS_LITERAL_STRING("main"), label, EmptyString(), true); |
5422 | 0 | } |
5423 | | |
5424 | | static already_AddRefed<VideoTrack> |
5425 | | CreateVideoTrack(VideoStreamTrack* aStreamTrack, nsIGlobalObject* aOwnerGlobal) |
5426 | 0 | { |
5427 | 0 | nsAutoString id; |
5428 | 0 | nsAutoString label; |
5429 | 0 | aStreamTrack->GetId(id); |
5430 | 0 | aStreamTrack->GetLabel(label, CallerType::System); |
5431 | 0 |
|
5432 | 0 | return MediaTrackList::CreateVideoTrack(aOwnerGlobal, |
5433 | 0 | id, |
5434 | 0 | NS_LITERAL_STRING("main"), |
5435 | 0 | label, |
5436 | 0 | EmptyString(), |
5437 | 0 | aStreamTrack); |
5438 | 0 | } |
5439 | | |
5440 | | void |
5441 | | HTMLMediaElement::NotifyMediaStreamTrackAdded( |
5442 | | const RefPtr<MediaStreamTrack>& aTrack) |
5443 | 0 | { |
5444 | 0 | MOZ_ASSERT(aTrack); |
5445 | 0 |
|
5446 | 0 | if (aTrack->Ended()) { |
5447 | 0 | return; |
5448 | 0 | } |
5449 | 0 | |
5450 | | #ifdef DEBUG |
5451 | | nsString id; |
5452 | | aTrack->GetId(id); |
5453 | | |
5454 | | LOG(LogLevel::Debug, |
5455 | | ("%p, Adding %sTrack with id %s", |
5456 | | this, |
5457 | | aTrack->AsAudioStreamTrack() ? "Audio" : "Video", |
5458 | | NS_ConvertUTF16toUTF8(id).get())); |
5459 | | #endif |
5460 | | |
5461 | 0 | if (AudioStreamTrack* t = aTrack->AsAudioStreamTrack()) { |
5462 | 0 | RefPtr<AudioTrack> audioTrack = |
5463 | 0 | CreateAudioTrack(t, AudioTracks()->GetOwnerGlobal()); |
5464 | 0 | AudioTracks()->AddTrack(audioTrack); |
5465 | 0 | } else if (VideoStreamTrack* t = aTrack->AsVideoStreamTrack()) { |
5466 | 0 | // TODO: Fix this per the spec on bug 1273443. |
5467 | 0 | if (!IsVideo()) { |
5468 | 0 | return; |
5469 | 0 | } |
5470 | 0 | RefPtr<VideoTrack> videoTrack = |
5471 | 0 | CreateVideoTrack(t, VideoTracks()->GetOwnerGlobal()); |
5472 | 0 | VideoTracks()->AddTrack(videoTrack); |
5473 | 0 | // New MediaStreamTrack added, set the new added video track as selected |
5474 | 0 | // video track when there is no selected track. |
5475 | 0 | if (VideoTracks()->SelectedIndex() == -1) { |
5476 | 0 | MOZ_ASSERT(!mSelectedVideoStreamTrack); |
5477 | 0 | videoTrack->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS); |
5478 | 0 | } |
5479 | 0 | } |
5480 | 0 |
|
5481 | 0 | UpdateReadyStateInternal(); |
5482 | 0 | } |
5483 | | |
5484 | | void |
5485 | | HTMLMediaElement::NotifyMediaStreamTrackRemoved( |
5486 | | const RefPtr<MediaStreamTrack>& aTrack) |
5487 | 0 | { |
5488 | 0 | MOZ_ASSERT(aTrack); |
5489 | 0 |
|
5490 | 0 | nsAutoString id; |
5491 | 0 | aTrack->GetId(id); |
5492 | 0 |
|
5493 | 0 | LOG(LogLevel::Debug, |
5494 | 0 | ("%p, Removing %sTrack with id %s", |
5495 | 0 | this, |
5496 | 0 | aTrack->AsAudioStreamTrack() ? "Audio" : "Video", |
5497 | 0 | NS_ConvertUTF16toUTF8(id).get())); |
5498 | 0 |
|
5499 | 0 | if (MediaTrack* t = AudioTracks()->GetTrackById(id)) { |
5500 | 0 | AudioTracks()->RemoveTrack(t); |
5501 | 0 | } else if (MediaTrack* t = VideoTracks()->GetTrackById(id)) { |
5502 | 0 | VideoTracks()->RemoveTrack(t); |
5503 | 0 | } else { |
5504 | 0 | NS_ASSERTION(aTrack->AsVideoStreamTrack() && !IsVideo(), |
5505 | 0 | "MediaStreamTrack ended but did not exist in track lists. " |
5506 | 0 | "This is only allowed if a video element ends and we are an " |
5507 | 0 | "audio element."); |
5508 | 0 | return; |
5509 | 0 | } |
5510 | 0 | } |
5511 | | |
5512 | | void |
5513 | | HTMLMediaElement::ProcessMediaFragmentURI() |
5514 | 0 | { |
5515 | 0 | nsMediaFragmentURIParser parser(mLoadingSrc); |
5516 | 0 |
|
5517 | 0 | if (mDecoder && parser.HasEndTime()) { |
5518 | 0 | mFragmentEnd = parser.GetEndTime(); |
5519 | 0 | } |
5520 | 0 |
|
5521 | 0 | if (parser.HasStartTime()) { |
5522 | 0 | SetCurrentTime(parser.GetStartTime()); |
5523 | 0 | mFragmentStart = parser.GetStartTime(); |
5524 | 0 | } |
5525 | 0 | } |
5526 | | |
5527 | | void |
5528 | | HTMLMediaElement::MetadataLoaded(const MediaInfo* aInfo, |
5529 | | UniquePtr<const MetadataTags> aTags) |
5530 | 0 | { |
5531 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
5532 | 0 |
|
5533 | 0 | SetMediaInfo(*aInfo); |
5534 | 0 |
|
5535 | 0 | mIsEncrypted = |
5536 | 0 | aInfo->IsEncrypted() || mPendingEncryptedInitData.IsEncrypted(); |
5537 | 0 | mTags = std::move(aTags); |
5538 | 0 | mLoadedDataFired = false; |
5539 | 0 | ChangeReadyState(HAVE_METADATA); |
5540 | 0 |
|
5541 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("durationchange")); |
5542 | 0 | if (IsVideo() && HasVideo()) { |
5543 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("resize")); |
5544 | 0 | } |
5545 | 0 | NS_ASSERTION(!HasVideo() || (mMediaInfo.mVideo.mDisplay.width > 0 && |
5546 | 0 | mMediaInfo.mVideo.mDisplay.height > 0), |
5547 | 0 | "Video resolution must be known on 'loadedmetadata'"); |
5548 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("loadedmetadata")); |
5549 | 0 |
|
5550 | 0 | if (mBlockedAsWithoutMetadata && !HasAudio()) { |
5551 | 0 | mBlockedAsWithoutMetadata = false; |
5552 | 0 | ScalarAdd(Telemetry::ScalarID::MEDIA_BLOCKED_NO_METADATA_ENDUP_NO_AUDIO_TRACK, 1); |
5553 | 0 | } |
5554 | 0 |
|
5555 | 0 | if (mDecoder && mDecoder->IsTransportSeekable() && |
5556 | 0 | mDecoder->IsMediaSeekable()) { |
5557 | 0 | ProcessMediaFragmentURI(); |
5558 | 0 | mDecoder->SetFragmentEndTime(mFragmentEnd); |
5559 | 0 | } |
5560 | 0 | if (mIsEncrypted) { |
5561 | 0 | // We only support playback of encrypted content via MSE by default. |
5562 | 0 | if (!mMediaSource && Preferences::GetBool("media.eme.mse-only", true)) { |
5563 | 0 | DecodeError( |
5564 | 0 | MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, |
5565 | 0 | "Encrypted content not supported outside of MSE")); |
5566 | 0 | return; |
5567 | 0 | } |
5568 | 0 | |
5569 | 0 | // Dispatch a distinct 'encrypted' event for each initData we have. |
5570 | 0 | for (const auto& initData : mPendingEncryptedInitData.mInitDatas) { |
5571 | 0 | DispatchEncrypted(initData.mInitData, initData.mType); |
5572 | 0 | } |
5573 | 0 | mPendingEncryptedInitData.Reset(); |
5574 | 0 | } |
5575 | 0 |
|
5576 | 0 | if (IsVideo() && aInfo->HasVideo()) { |
5577 | 0 | // We are a video element playing video so update the screen wakelock |
5578 | 0 | NotifyOwnerDocumentActivityChanged(); |
5579 | 0 | } |
5580 | 0 |
|
5581 | 0 | if (mDefaultPlaybackStartPosition != 0.0) { |
5582 | 0 | SetCurrentTime(mDefaultPlaybackStartPosition); |
5583 | 0 | mDefaultPlaybackStartPosition = 0.0; |
5584 | 0 | } |
5585 | 0 |
|
5586 | 0 | UpdateReadyStateInternal(); |
5587 | 0 |
|
5588 | 0 | if (!mSrcStream) { |
5589 | 0 | return; |
5590 | 0 | } |
5591 | 0 | for (OutputMediaStream& ms : mOutputStreams) { |
5592 | 0 | for (size_t i = 0; i < AudioTracks()->Length(); ++i) { |
5593 | 0 | AudioTrack* t = (*AudioTracks())[i]; |
5594 | 0 | if (t->Enabled()) { |
5595 | 0 | AddCaptureMediaTrackToOutputStream(t, ms); |
5596 | 0 | } |
5597 | 0 | } |
5598 | 0 | if (IsVideo() && !ms.mCapturingAudioOnly) { |
5599 | 0 | // Only add video tracks if we're a video element and the output stream |
5600 | 0 | // wants video. |
5601 | 0 | for (size_t i = 0; i < VideoTracks()->Length(); ++i) { |
5602 | 0 | VideoTrack* t = (*VideoTracks())[i]; |
5603 | 0 | if (t->Selected()) { |
5604 | 0 | AddCaptureMediaTrackToOutputStream(t, ms); |
5605 | 0 | } |
5606 | 0 | } |
5607 | 0 | } |
5608 | 0 | } |
5609 | 0 | } |
5610 | | |
5611 | | void |
5612 | | HTMLMediaElement::FirstFrameLoaded() |
5613 | 0 | { |
5614 | 0 | LOG(LogLevel::Debug, |
5615 | 0 | ("%p, FirstFrameLoaded() mFirstFrameLoaded=%d mWaitingForKey=%d", |
5616 | 0 | this, |
5617 | 0 | mFirstFrameLoaded, |
5618 | 0 | mWaitingForKey)); |
5619 | 0 |
|
5620 | 0 | NS_ASSERTION(!mSuspendedAfterFirstFrame, "Should not have already suspended"); |
5621 | 0 |
|
5622 | 0 | if (!mFirstFrameLoaded) { |
5623 | 0 | mFirstFrameLoaded = true; |
5624 | 0 | UpdateReadyStateInternal(); |
5625 | 0 | } |
5626 | 0 |
|
5627 | 0 | ChangeDelayLoadStatus(false); |
5628 | 0 |
|
5629 | 0 | if (mDecoder && mAllowSuspendAfterFirstFrame && mPaused && |
5630 | 0 | !HasAttr(kNameSpaceID_None, nsGkAtoms::autoplay) && |
5631 | 0 | mPreloadAction == HTMLMediaElement::PRELOAD_METADATA) { |
5632 | 0 | mSuspendedAfterFirstFrame = true; |
5633 | 0 | mDecoder->Suspend(); |
5634 | 0 | } |
5635 | 0 | } |
5636 | | |
5637 | | void |
5638 | | HTMLMediaElement::NetworkError(const MediaResult& aError) |
5639 | 0 | { |
5640 | 0 | if (mReadyState == HAVE_NOTHING) { |
5641 | 0 | NoSupportedMediaSourceError(aError.Description()); |
5642 | 0 | } else { |
5643 | 0 | Error(MEDIA_ERR_NETWORK); |
5644 | 0 | } |
5645 | 0 | } |
5646 | | |
5647 | | void |
5648 | | HTMLMediaElement::DecodeError(const MediaResult& aError) |
5649 | 0 | { |
5650 | 0 | nsAutoString src; |
5651 | 0 | GetCurrentSrc(src); |
5652 | 0 | const char16_t* params[] = { src.get() }; |
5653 | 0 | ReportLoadError("MediaLoadDecodeError", params, ArrayLength(params)); |
5654 | 0 |
|
5655 | 0 | DecoderDoctorDiagnostics diagnostics; |
5656 | 0 | diagnostics.StoreDecodeError(OwnerDoc(), aError, src, __func__); |
5657 | 0 |
|
5658 | 0 | AudioTracks()->EmptyTracks(); |
5659 | 0 | VideoTracks()->EmptyTracks(); |
5660 | 0 | if (mIsLoadingFromSourceChildren) { |
5661 | 0 | mErrorSink->ResetError(); |
5662 | 0 | if (mSourceLoadCandidate) { |
5663 | 0 | DispatchAsyncSourceError(mSourceLoadCandidate); |
5664 | 0 | QueueLoadFromSourceTask(); |
5665 | 0 | } else { |
5666 | 0 | NS_WARNING("Should know the source we were loading from!"); |
5667 | 0 | } |
5668 | 0 | } else if (mReadyState == HAVE_NOTHING) { |
5669 | 0 | NoSupportedMediaSourceError(aError.Description()); |
5670 | 0 | } else { |
5671 | 0 | Error(MEDIA_ERR_DECODE, aError.Description()); |
5672 | 0 | } |
5673 | 0 | } |
5674 | | |
5675 | | void |
5676 | | HTMLMediaElement::DecodeWarning(const MediaResult& aError) |
5677 | 0 | { |
5678 | 0 | nsAutoString src; |
5679 | 0 | GetCurrentSrc(src); |
5680 | 0 | DecoderDoctorDiagnostics diagnostics; |
5681 | 0 | diagnostics.StoreDecodeWarning(OwnerDoc(), aError, src, __func__); |
5682 | 0 | } |
5683 | | |
5684 | | bool |
5685 | | HTMLMediaElement::HasError() const |
5686 | 0 | { |
5687 | 0 | return GetError(); |
5688 | 0 | } |
5689 | | |
5690 | | void |
5691 | | HTMLMediaElement::LoadAborted() |
5692 | 0 | { |
5693 | 0 | Error(MEDIA_ERR_ABORTED); |
5694 | 0 | } |
5695 | | |
5696 | | void |
5697 | | HTMLMediaElement::Error(uint16_t aErrorCode, const nsACString& aErrorDetails) |
5698 | 0 | { |
5699 | 0 | mErrorSink->SetError(aErrorCode, aErrorDetails); |
5700 | 0 | ChangeDelayLoadStatus(false); |
5701 | 0 | UpdateAudioChannelPlayingState(); |
5702 | 0 | } |
5703 | | |
5704 | | void |
5705 | | HTMLMediaElement::PlaybackEnded() |
5706 | 0 | { |
5707 | 0 | // We changed state which can affect AddRemoveSelfReference |
5708 | 0 | AddRemoveSelfReference(); |
5709 | 0 |
|
5710 | 0 | NS_ASSERTION(!mDecoder || mDecoder->IsEnded(), |
5711 | 0 | "Decoder fired ended, but not in ended state"); |
5712 | 0 |
|
5713 | 0 | // Discard all output streams that have finished now. |
5714 | 0 | for (int32_t i = mOutputStreams.Length() - 1; i >= 0; --i) { |
5715 | 0 | if (mOutputStreams[i].mFinishWhenEnded) { |
5716 | 0 | LOG(LogLevel::Debug, |
5717 | 0 | ("Playback ended. Removing output stream %p", |
5718 | 0 | mOutputStreams[i].mStream.get())); |
5719 | 0 | mOutputStreams.RemoveElementAt(i); |
5720 | 0 | } |
5721 | 0 | } |
5722 | 0 |
|
5723 | 0 | if (mSrcStream) { |
5724 | 0 | LOG(LogLevel::Debug, |
5725 | 0 | ("%p, got duration by reaching the end of the resource", this)); |
5726 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("durationchange")); |
5727 | 0 | } |
5728 | 0 |
|
5729 | 0 | if (HasAttr(kNameSpaceID_None, nsGkAtoms::loop)) { |
5730 | 0 | SetCurrentTime(0); |
5731 | 0 | return; |
5732 | 0 | } |
5733 | 0 | |
5734 | 0 | FireTimeUpdate(false); |
5735 | 0 |
|
5736 | 0 | if (!mPaused) { |
5737 | 0 | Pause(); |
5738 | 0 | } |
5739 | 0 |
|
5740 | 0 | if (mSrcStream) { |
5741 | 0 | // A MediaStream that goes from inactive to active shall be eligible for |
5742 | 0 | // autoplay again according to the mediacapture-main spec. |
5743 | 0 | mAutoplaying = true; |
5744 | 0 | } |
5745 | 0 |
|
5746 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("ended")); |
5747 | 0 | } |
5748 | | |
5749 | | void |
5750 | | HTMLMediaElement::SeekStarted() |
5751 | 0 | { |
5752 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("seeking")); |
5753 | 0 | } |
5754 | | |
5755 | | void |
5756 | | HTMLMediaElement::SeekCompleted() |
5757 | 0 | { |
5758 | 0 | mPlayingBeforeSeek = false; |
5759 | 0 | SetPlayedOrSeeked(true); |
5760 | 0 | if (mTextTrackManager) { |
5761 | 0 | mTextTrackManager->DidSeek(); |
5762 | 0 | } |
5763 | 0 | FireTimeUpdate(false); |
5764 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("seeked")); |
5765 | 0 | // We changed whether we're seeking so we need to AddRemoveSelfReference |
5766 | 0 | AddRemoveSelfReference(); |
5767 | 0 | if (mCurrentPlayRangeStart == -1.0) { |
5768 | 0 | mCurrentPlayRangeStart = CurrentTime(); |
5769 | 0 | } |
5770 | 0 |
|
5771 | 0 | // After seeking completed, if the audio track is silent, start another new |
5772 | 0 | // silence range. |
5773 | 0 | mHasAccumulatedSilenceRangeBeforeSeekEnd = false; |
5774 | 0 | if (IsAudioTrackCurrentlySilent()) { |
5775 | 0 | UpdateAudioTrackSilenceRange(mIsAudioTrackAudible); |
5776 | 0 | } |
5777 | 0 | } |
5778 | | |
5779 | | void |
5780 | | HTMLMediaElement::NotifySuspendedByCache(bool aSuspendedByCache) |
5781 | 0 | { |
5782 | 0 | mDownloadSuspendedByCache = aSuspendedByCache; |
5783 | 0 | UpdateReadyStateInternal(); |
5784 | 0 | } |
5785 | | |
5786 | | void |
5787 | | HTMLMediaElement::DownloadSuspended() |
5788 | 0 | { |
5789 | 0 | if (mNetworkState == NETWORK_LOADING) { |
5790 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("progress")); |
5791 | 0 | } |
5792 | 0 | ChangeNetworkState(NETWORK_IDLE); |
5793 | 0 | } |
5794 | | |
5795 | | void |
5796 | | HTMLMediaElement::DownloadResumed() |
5797 | 0 | { |
5798 | 0 | ChangeNetworkState(NETWORK_LOADING); |
5799 | 0 | } |
5800 | | |
5801 | | void |
5802 | | HTMLMediaElement::CheckProgress(bool aHaveNewProgress) |
5803 | 0 | { |
5804 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
5805 | 0 | MOZ_ASSERT(mNetworkState == NETWORK_LOADING); |
5806 | 0 |
|
5807 | 0 | TimeStamp now = TimeStamp::NowLoRes(); |
5808 | 0 |
|
5809 | 0 | if (aHaveNewProgress) { |
5810 | 0 | mDataTime = now; |
5811 | 0 | } |
5812 | 0 |
|
5813 | 0 | // If this is the first progress, or PROGRESS_MS has passed since the last |
5814 | 0 | // progress event fired and more data has arrived since then, fire a |
5815 | 0 | // progress event. |
5816 | 0 | NS_ASSERTION((mProgressTime.IsNull() && !aHaveNewProgress) || |
5817 | 0 | !mDataTime.IsNull(), |
5818 | 0 | "null TimeStamp mDataTime should not be used in comparison"); |
5819 | 0 | if (mProgressTime.IsNull() |
5820 | 0 | ? aHaveNewProgress |
5821 | 0 | : (now - mProgressTime >= TimeDuration::FromMilliseconds(PROGRESS_MS) && |
5822 | 0 | mDataTime > mProgressTime)) { |
5823 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("progress")); |
5824 | 0 | // Resolution() ensures that future data will have now > mProgressTime, |
5825 | 0 | // and so will trigger another event. mDataTime is not reset because it |
5826 | 0 | // is still required to detect stalled; it is similarly offset by |
5827 | 0 | // resolution to indicate the new data has not yet arrived. |
5828 | 0 | mProgressTime = now - TimeDuration::Resolution(); |
5829 | 0 | if (mDataTime > mProgressTime) { |
5830 | 0 | mDataTime = mProgressTime; |
5831 | 0 | } |
5832 | 0 | if (!mProgressTimer) { |
5833 | 0 | NS_ASSERTION(aHaveNewProgress, |
5834 | 0 | "timer dispatched when there was no timer"); |
5835 | 0 | // Were stalled. Restart timer. |
5836 | 0 | StartProgressTimer(); |
5837 | 0 | if (!mLoadedDataFired) { |
5838 | 0 | ChangeDelayLoadStatus(true); |
5839 | 0 | } |
5840 | 0 | } |
5841 | 0 | // Download statistics may have been updated, force a recheck of the |
5842 | 0 | // readyState. |
5843 | 0 | UpdateReadyStateInternal(); |
5844 | 0 | } |
5845 | 0 |
|
5846 | 0 | if (now - mDataTime >= TimeDuration::FromMilliseconds(STALL_MS)) { |
5847 | 0 | if (!mMediaSource) { |
5848 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("stalled")); |
5849 | 0 | } else { |
5850 | 0 | ChangeDelayLoadStatus(false); |
5851 | 0 | } |
5852 | 0 |
|
5853 | 0 | NS_ASSERTION(mProgressTimer, "detected stalled without timer"); |
5854 | 0 | // Stop timer events, which prevents repeated stalled events until there |
5855 | 0 | // is more progress. |
5856 | 0 | StopProgress(); |
5857 | 0 | } |
5858 | 0 |
|
5859 | 0 | AddRemoveSelfReference(); |
5860 | 0 | } |
5861 | | |
5862 | | /* static */ |
5863 | | void |
5864 | | HTMLMediaElement::ProgressTimerCallback(nsITimer* aTimer, void* aClosure) |
5865 | 0 | { |
5866 | 0 | auto decoder = static_cast<HTMLMediaElement*>(aClosure); |
5867 | 0 | decoder->CheckProgress(false); |
5868 | 0 | } |
5869 | | |
5870 | | void |
5871 | | HTMLMediaElement::StartProgressTimer() |
5872 | 0 | { |
5873 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
5874 | 0 | MOZ_ASSERT(mNetworkState == NETWORK_LOADING); |
5875 | 0 | NS_ASSERTION(!mProgressTimer, "Already started progress timer."); |
5876 | 0 |
|
5877 | 0 | NS_NewTimerWithFuncCallback(getter_AddRefs(mProgressTimer), |
5878 | 0 | ProgressTimerCallback, |
5879 | 0 | this, |
5880 | 0 | PROGRESS_MS, |
5881 | 0 | nsITimer::TYPE_REPEATING_SLACK, |
5882 | 0 | "HTMLMediaElement::ProgressTimerCallback", |
5883 | 0 | mMainThreadEventTarget); |
5884 | 0 | } |
5885 | | |
5886 | | void |
5887 | | HTMLMediaElement::StartProgress() |
5888 | 0 | { |
5889 | 0 | // Record the time now for detecting stalled. |
5890 | 0 | mDataTime = TimeStamp::NowLoRes(); |
5891 | 0 | // Reset mProgressTime so that mDataTime is not indicating bytes received |
5892 | 0 | // after the last progress event. |
5893 | 0 | mProgressTime = TimeStamp(); |
5894 | 0 | StartProgressTimer(); |
5895 | 0 | } |
5896 | | |
5897 | | void |
5898 | | HTMLMediaElement::StopProgress() |
5899 | 0 | { |
5900 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
5901 | 0 | if (!mProgressTimer) { |
5902 | 0 | return; |
5903 | 0 | } |
5904 | 0 | |
5905 | 0 | mProgressTimer->Cancel(); |
5906 | 0 | mProgressTimer = nullptr; |
5907 | 0 | } |
5908 | | |
5909 | | void |
5910 | | HTMLMediaElement::DownloadProgressed() |
5911 | 0 | { |
5912 | 0 | if (mNetworkState != NETWORK_LOADING) { |
5913 | 0 | return; |
5914 | 0 | } |
5915 | 0 | CheckProgress(true); |
5916 | 0 | } |
5917 | | |
5918 | | bool |
5919 | | HTMLMediaElement::ShouldCheckAllowOrigin() |
5920 | 0 | { |
5921 | 0 | return mCORSMode != CORS_NONE; |
5922 | 0 | } |
5923 | | |
5924 | | bool |
5925 | | HTMLMediaElement::IsCORSSameOrigin() |
5926 | 0 | { |
5927 | 0 | bool subsumes; |
5928 | 0 | RefPtr<nsIPrincipal> principal = GetCurrentPrincipal(); |
5929 | 0 | return (NS_SUCCEEDED(NodePrincipal()->Subsumes(principal, &subsumes)) && |
5930 | 0 | subsumes) || |
5931 | 0 | ShouldCheckAllowOrigin(); |
5932 | 0 | } |
5933 | | |
5934 | | void |
5935 | | HTMLMediaElement::UpdateReadyStateInternal() |
5936 | 0 | { |
5937 | 0 | if (!mDecoder && !mSrcStream) { |
5938 | 0 | // Not initialized - bail out. |
5939 | 0 | LOG(LogLevel::Debug, |
5940 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
5941 | 0 | "Not initialized", |
5942 | 0 | this)); |
5943 | 0 | return; |
5944 | 0 | } |
5945 | 0 |
|
5946 | 0 | if (mDecoder && mReadyState < HAVE_METADATA) { |
5947 | 0 | // aNextFrame might have a next frame because the decoder can advance |
5948 | 0 | // on its own thread before MetadataLoaded gets a chance to run. |
5949 | 0 | // The arrival of more data can't change us out of this readyState. |
5950 | 0 | LOG(LogLevel::Debug, |
5951 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
5952 | 0 | "Decoder ready state < HAVE_METADATA", |
5953 | 0 | this)); |
5954 | 0 | return; |
5955 | 0 | } |
5956 | 0 |
|
5957 | 0 | if (mSrcStream && mReadyState < HAVE_METADATA) { |
5958 | 0 | if (!mSrcStreamTracksAvailable) { |
5959 | 0 | LOG(LogLevel::Debug, |
5960 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
5961 | 0 | "MediaStreamTracks not available yet", |
5962 | 0 | this)); |
5963 | 0 | return; |
5964 | 0 | } |
5965 | 0 |
|
5966 | 0 | bool hasAudioTracks = !AudioTracks()->IsEmpty(); |
5967 | 0 | bool hasVideoTracks = !VideoTracks()->IsEmpty(); |
5968 | 0 | if (!hasAudioTracks && !hasVideoTracks) { |
5969 | 0 | LOG(LogLevel::Debug, |
5970 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
5971 | 0 | "Stream with no tracks", |
5972 | 0 | this)); |
5973 | 0 | return; |
5974 | 0 | } |
5975 | 0 |
|
5976 | 0 | if (IsVideo() && hasVideoTracks && !HasVideo()) { |
5977 | 0 | LOG(LogLevel::Debug, |
5978 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
5979 | 0 | "Stream waiting for video", |
5980 | 0 | this)); |
5981 | 0 | return; |
5982 | 0 | } |
5983 | 0 |
|
5984 | 0 | LOG(LogLevel::Debug, |
5985 | 0 | ("MediaElement %p UpdateReadyStateInternal() Stream has " |
5986 | 0 | "metadata; audioTracks=%d, videoTracks=%d, " |
5987 | 0 | "hasVideoFrame=%d", |
5988 | 0 | this, |
5989 | 0 | AudioTracks()->Length(), |
5990 | 0 | VideoTracks()->Length(), |
5991 | 0 | HasVideo())); |
5992 | 0 |
|
5993 | 0 | // We are playing a stream that has video and a video frame is now set. |
5994 | 0 | // This means we have all metadata needed to change ready state. |
5995 | 0 | MediaInfo mediaInfo = mMediaInfo; |
5996 | 0 | if (hasAudioTracks) { |
5997 | 0 | mediaInfo.EnableAudio(); |
5998 | 0 | } |
5999 | 0 | if (hasVideoTracks) { |
6000 | 0 | mediaInfo.EnableVideo(); |
6001 | 0 | } |
6002 | 0 | MetadataLoaded(&mediaInfo, nullptr); |
6003 | 0 | } |
6004 | 0 |
|
6005 | 0 | if (mMediaSource) { |
6006 | 0 | // readyState has changed, assuming it's following the pending mediasource |
6007 | 0 | // operations. Notify the Mediasource that the operations have completed. |
6008 | 0 | mMediaSource->CompletePendingTransactions(); |
6009 | 0 | } |
6010 | 0 |
|
6011 | 0 | enum NextFrameStatus nextFrameStatus = NextFrameStatus(); |
6012 | 0 | if (mWaitingForKey == NOT_WAITING_FOR_KEY) { |
6013 | 0 | if (nextFrameStatus == NEXT_FRAME_UNAVAILABLE && mDecoder && |
6014 | 0 | !mDecoder->IsEnded()) { |
6015 | 0 | nextFrameStatus = mDecoder->NextFrameBufferedStatus(); |
6016 | 0 | } |
6017 | 0 | } else if (mWaitingForKey == WAITING_FOR_KEY) { |
6018 | 0 | if (nextFrameStatus == NEXT_FRAME_UNAVAILABLE || |
6019 | 0 | nextFrameStatus == NEXT_FRAME_UNAVAILABLE_BUFFERING) { |
6020 | 0 | // http://w3c.github.io/encrypted-media/#wait-for-key |
6021 | 0 | // Continuing 7.3.4 Queue a "waitingforkey" Event |
6022 | 0 | // 4. Queue a task to fire a simple event named waitingforkey |
6023 | 0 | // at the media element. |
6024 | 0 | // 5. Set the readyState of media element to HAVE_METADATA. |
6025 | 0 | // NOTE: We'll change to HAVE_CURRENT_DATA or HAVE_METADATA |
6026 | 0 | // depending on whether we've loaded the first frame or not |
6027 | 0 | // below. |
6028 | 0 | // 6. Suspend playback. |
6029 | 0 | // Note: Playback will already be stalled, as the next frame is |
6030 | 0 | // unavailable. |
6031 | 0 | mWaitingForKey = WAITING_FOR_KEY_DISPATCHED; |
6032 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("waitingforkey")); |
6033 | 0 | } |
6034 | 0 | } else { |
6035 | 0 | MOZ_ASSERT(mWaitingForKey == WAITING_FOR_KEY_DISPATCHED); |
6036 | 0 | if (nextFrameStatus == NEXT_FRAME_AVAILABLE) { |
6037 | 0 | // We have new frames after dispatching "waitingforkey". |
6038 | 0 | // This means we've got the key and can reset mWaitingForKey now. |
6039 | 0 | mWaitingForKey = NOT_WAITING_FOR_KEY; |
6040 | 0 | } |
6041 | 0 | } |
6042 | 0 |
|
6043 | 0 | if (nextFrameStatus == MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_SEEKING) { |
6044 | 0 | LOG(LogLevel::Debug, |
6045 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
6046 | 0 | "NEXT_FRAME_UNAVAILABLE_SEEKING; Forcing HAVE_METADATA", |
6047 | 0 | this)); |
6048 | 0 | ChangeReadyState(HAVE_METADATA); |
6049 | 0 | return; |
6050 | 0 | } |
6051 | 0 |
|
6052 | 0 | if (IsVideo() && HasVideo() && !IsPlaybackEnded() && GetImageContainer() && |
6053 | 0 | !GetImageContainer()->HasCurrentImage()) { |
6054 | 0 | // Don't advance if we are playing video, but don't have a video frame. |
6055 | 0 | // Also, if video became available after advancing to HAVE_CURRENT_DATA |
6056 | 0 | // while we are still playing, we need to revert to HAVE_METADATA until |
6057 | 0 | // a video frame is available. |
6058 | 0 | LOG(LogLevel::Debug, |
6059 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
6060 | 0 | "Playing video but no video frame; Forcing HAVE_METADATA", |
6061 | 0 | this)); |
6062 | 0 | ChangeReadyState(HAVE_METADATA); |
6063 | 0 | return; |
6064 | 0 | } |
6065 | 0 |
|
6066 | 0 | if (!mFirstFrameLoaded) { |
6067 | 0 | // We haven't yet loaded the first frame, making us unable to determine |
6068 | 0 | // if we have enough valid data at the present stage. |
6069 | 0 | return; |
6070 | 0 | } |
6071 | 0 | |
6072 | 0 | if (nextFrameStatus == NEXT_FRAME_UNAVAILABLE_BUFFERING) { |
6073 | 0 | // Force HAVE_CURRENT_DATA when buffering. |
6074 | 0 | ChangeReadyState(HAVE_CURRENT_DATA); |
6075 | 0 | return; |
6076 | 0 | } |
6077 | 0 | |
6078 | 0 | // TextTracks must be loaded for the HAVE_ENOUGH_DATA and |
6079 | 0 | // HAVE_FUTURE_DATA. |
6080 | 0 | // So force HAVE_CURRENT_DATA if text tracks not loaded. |
6081 | 0 | if (mTextTrackManager && !mTextTrackManager->IsLoaded()) { |
6082 | 0 | ChangeReadyState(HAVE_CURRENT_DATA); |
6083 | 0 | return; |
6084 | 0 | } |
6085 | 0 | |
6086 | 0 | if (mDownloadSuspendedByCache && mDecoder && !mDecoder->IsEnded()) { |
6087 | 0 | // The decoder has signaled that the download has been suspended by the |
6088 | 0 | // media cache. So move readyState into HAVE_ENOUGH_DATA, in case there's |
6089 | 0 | // script waiting for a "canplaythrough" event; without this forced |
6090 | 0 | // transition, we will never fire the "canplaythrough" event if the |
6091 | 0 | // media cache is too small, and scripts are bound to fail. Don't force |
6092 | 0 | // this transition if the decoder is in ended state; the readyState |
6093 | 0 | // should remain at HAVE_CURRENT_DATA in this case. |
6094 | 0 | // Note that this state transition includes the case where we finished |
6095 | 0 | // downloaded the whole data stream. |
6096 | 0 | LOG(LogLevel::Debug, |
6097 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
6098 | 0 | "Decoder download suspended by cache", |
6099 | 0 | this)); |
6100 | 0 | ChangeReadyState(HAVE_ENOUGH_DATA); |
6101 | 0 | return; |
6102 | 0 | } |
6103 | 0 |
|
6104 | 0 | if (nextFrameStatus != MediaDecoderOwner::NEXT_FRAME_AVAILABLE) { |
6105 | 0 | LOG(LogLevel::Debug, |
6106 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
6107 | 0 | "Next frame not available", |
6108 | 0 | this)); |
6109 | 0 | ChangeReadyState(HAVE_CURRENT_DATA); |
6110 | 0 | return; |
6111 | 0 | } |
6112 | 0 |
|
6113 | 0 | if (mSrcStream) { |
6114 | 0 | LOG(LogLevel::Debug, |
6115 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
6116 | 0 | "Stream HAVE_ENOUGH_DATA", |
6117 | 0 | this)); |
6118 | 0 | ChangeReadyState(HAVE_ENOUGH_DATA); |
6119 | 0 | return; |
6120 | 0 | } |
6121 | 0 |
|
6122 | 0 | // Now see if we should set HAVE_ENOUGH_DATA. |
6123 | 0 | // If it's something we don't know the size of, then we can't |
6124 | 0 | // make a real estimate, so we go straight to HAVE_ENOUGH_DATA once |
6125 | 0 | // we've downloaded enough data that our download rate is considered |
6126 | 0 | // reliable. We have to move to HAVE_ENOUGH_DATA at some point or |
6127 | 0 | // autoplay elements for live streams will never play. Otherwise we |
6128 | 0 | // move to HAVE_ENOUGH_DATA if we can play through the entire media |
6129 | 0 | // without stopping to buffer. |
6130 | 0 | if (mDecoder->CanPlayThrough()) { |
6131 | 0 | LOG(LogLevel::Debug, |
6132 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
6133 | 0 | "Decoder can play through", |
6134 | 0 | this)); |
6135 | 0 | ChangeReadyState(HAVE_ENOUGH_DATA); |
6136 | 0 | return; |
6137 | 0 | } |
6138 | 0 | LOG(LogLevel::Debug, |
6139 | 0 | ("MediaElement %p UpdateReadyStateInternal() " |
6140 | 0 | "Default; Decoder has future data", |
6141 | 0 | this)); |
6142 | 0 | ChangeReadyState(HAVE_FUTURE_DATA); |
6143 | 0 | } |
6144 | | |
6145 | | static const char* const gReadyStateToString[] = { "HAVE_NOTHING", |
6146 | | "HAVE_METADATA", |
6147 | | "HAVE_CURRENT_DATA", |
6148 | | "HAVE_FUTURE_DATA", |
6149 | | "HAVE_ENOUGH_DATA" }; |
6150 | | |
6151 | | void |
6152 | | HTMLMediaElement::ChangeReadyState(nsMediaReadyState aState) |
6153 | 0 | { |
6154 | 0 | if (mReadyState == aState) { |
6155 | 0 | return; |
6156 | 0 | } |
6157 | 0 | |
6158 | 0 | nsMediaReadyState oldState = mReadyState; |
6159 | 0 | mReadyState = aState; |
6160 | 0 | LOG(LogLevel::Debug, |
6161 | 0 | ("%p Ready state changed to %s", this, gReadyStateToString[aState])); |
6162 | 0 |
|
6163 | 0 | DDLOG(DDLogCategory::Property, "ready_state", gReadyStateToString[aState]); |
6164 | 0 |
|
6165 | 0 | if (mNetworkState == NETWORK_EMPTY) { |
6166 | 0 | return; |
6167 | 0 | } |
6168 | 0 | |
6169 | 0 | UpdateAudioChannelPlayingState(); |
6170 | 0 |
|
6171 | 0 | // Handle raising of "waiting" event during seek (see 4.8.10.9) |
6172 | 0 | // or |
6173 | 0 | // 4.8.12.7 Ready states: |
6174 | 0 | // "If the previous ready state was HAVE_FUTURE_DATA or more, and the new |
6175 | 0 | // ready state is HAVE_CURRENT_DATA or less |
6176 | 0 | // If the media element was potentially playing before its readyState |
6177 | 0 | // attribute changed to a value lower than HAVE_FUTURE_DATA, and the element |
6178 | 0 | // has not ended playback, and playback has not stopped due to errors, |
6179 | 0 | // paused for user interaction, or paused for in-band content, the user agent |
6180 | 0 | // must queue a task to fire a simple event named timeupdate at the element, |
6181 | 0 | // and queue a task to fire a simple event named waiting at the element." |
6182 | 0 | if (mPlayingBeforeSeek && mReadyState < HAVE_FUTURE_DATA) { |
6183 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("waiting")); |
6184 | 0 | } else if (oldState >= HAVE_FUTURE_DATA && mReadyState < HAVE_FUTURE_DATA && |
6185 | 0 | !Paused() && !Ended() && !mErrorSink->mError) { |
6186 | 0 | FireTimeUpdate(false); |
6187 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("waiting")); |
6188 | 0 | } |
6189 | 0 |
|
6190 | 0 | if (oldState < HAVE_CURRENT_DATA && mReadyState >= HAVE_CURRENT_DATA && |
6191 | 0 | !mLoadedDataFired) { |
6192 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("loadeddata")); |
6193 | 0 | mLoadedDataFired = true; |
6194 | 0 | } |
6195 | 0 |
|
6196 | 0 | if (oldState < HAVE_FUTURE_DATA && mReadyState >= HAVE_FUTURE_DATA) { |
6197 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("canplay")); |
6198 | 0 | if (!mPaused) { |
6199 | 0 | if (mDecoder && !mPausedForInactiveDocumentOrChannel) { |
6200 | 0 | MOZ_ASSERT(AutoplayPolicy::IsAllowedToPlay(*this)); |
6201 | 0 | mDecoder->Play(); |
6202 | 0 | } |
6203 | 0 | NotifyAboutPlaying(); |
6204 | 0 | } |
6205 | 0 | } |
6206 | 0 |
|
6207 | 0 | CheckAutoplayDataReady(); |
6208 | 0 |
|
6209 | 0 | if (oldState < HAVE_ENOUGH_DATA && mReadyState >= HAVE_ENOUGH_DATA) { |
6210 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("canplaythrough")); |
6211 | 0 | } |
6212 | 0 | } |
6213 | | |
6214 | | static const char* const gNetworkStateToString[] = { "EMPTY", |
6215 | | "IDLE", |
6216 | | "LOADING", |
6217 | | "NO_SOURCE" }; |
6218 | | |
6219 | | void |
6220 | | HTMLMediaElement::ChangeNetworkState(nsMediaNetworkState aState) |
6221 | 0 | { |
6222 | 0 | if (mNetworkState == aState) { |
6223 | 0 | return; |
6224 | 0 | } |
6225 | 0 | |
6226 | 0 | nsMediaNetworkState oldState = mNetworkState; |
6227 | 0 | mNetworkState = aState; |
6228 | 0 | LOG(LogLevel::Debug, |
6229 | 0 | ("%p Network state changed to %s", this, gNetworkStateToString[aState])); |
6230 | 0 | DDLOG( |
6231 | 0 | DDLogCategory::Property, "network_state", gNetworkStateToString[aState]); |
6232 | 0 |
|
6233 | 0 | if (oldState == NETWORK_LOADING) { |
6234 | 0 | // Stop progress notification when exiting NETWORK_LOADING. |
6235 | 0 | StopProgress(); |
6236 | 0 | } |
6237 | 0 |
|
6238 | 0 | if (mNetworkState == NETWORK_LOADING) { |
6239 | 0 | // Start progress notification when entering NETWORK_LOADING. |
6240 | 0 | StartProgress(); |
6241 | 0 | } else if (mNetworkState == NETWORK_IDLE && !mErrorSink->mError) { |
6242 | 0 | // Fire 'suspend' event when entering NETWORK_IDLE and no error presented. |
6243 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("suspend")); |
6244 | 0 | } |
6245 | 0 |
|
6246 | 0 | // Changing mNetworkState affects AddRemoveSelfReference(). |
6247 | 0 | AddRemoveSelfReference(); |
6248 | 0 | } |
6249 | | |
6250 | | bool |
6251 | | HTMLMediaElement::CanActivateAutoplay() |
6252 | 0 | { |
6253 | 0 | // For stream inputs, we activate autoplay on HAVE_NOTHING because |
6254 | 0 | // this element itself might be blocking the stream from making progress by |
6255 | 0 | // being paused. We only check that it has data by checking its active state. |
6256 | 0 | // We also activate autoplay when playing a media source since the data |
6257 | 0 | // download is controlled by the script and there is no way to evaluate |
6258 | 0 | // MediaDecoder::CanPlayThrough(). |
6259 | 0 |
|
6260 | 0 | if (!HasAttr(kNameSpaceID_None, nsGkAtoms::autoplay)) { |
6261 | 0 | return false; |
6262 | 0 | } |
6263 | 0 | |
6264 | 0 | if (!mAutoplaying) { |
6265 | 0 | return false; |
6266 | 0 | } |
6267 | 0 | |
6268 | 0 | if (IsEditable()) { |
6269 | 0 | return false; |
6270 | 0 | } |
6271 | 0 | |
6272 | 0 | if (!mPaused) { |
6273 | 0 | return false; |
6274 | 0 | } |
6275 | 0 | |
6276 | 0 | if (mPausedForInactiveDocumentOrChannel) { |
6277 | 0 | return false; |
6278 | 0 | } |
6279 | 0 | |
6280 | 0 | // Static document is used for print preview and printing, should not be |
6281 | 0 | // autoplay |
6282 | 0 | if (OwnerDoc()->IsStaticDocument()) { |
6283 | 0 | return false; |
6284 | 0 | } |
6285 | 0 | |
6286 | 0 | if (mAudioChannelWrapper) { |
6287 | 0 | // Note: SUSPENDED_PAUSE and SUSPENDED_BLOCK will be merged into one single |
6288 | 0 | // state. |
6289 | 0 | if (mAudioChannelWrapper->GetSuspendType() == |
6290 | 0 | nsISuspendedTypes::SUSPENDED_PAUSE || |
6291 | 0 | mAudioChannelWrapper->GetSuspendType() == |
6292 | 0 | nsISuspendedTypes::SUSPENDED_BLOCK || |
6293 | 0 | mAudioChannelWrapper->IsPlaybackBlocked()) { |
6294 | 0 | return false; |
6295 | 0 | } |
6296 | 0 | } |
6297 | 0 | |
6298 | 0 | bool hasData = (mDecoder && mReadyState >= HAVE_ENOUGH_DATA) || |
6299 | 0 | (mSrcStream && mSrcStream->Active()); |
6300 | 0 |
|
6301 | 0 | return hasData; |
6302 | 0 | } |
6303 | | |
6304 | | void |
6305 | | HTMLMediaElement::CheckAutoplayDataReady() |
6306 | 0 | { |
6307 | 0 | if (!CanActivateAutoplay()) { |
6308 | 0 | return; |
6309 | 0 | } |
6310 | 0 | |
6311 | 0 | UpdateHadAudibleAutoplayState(); |
6312 | 0 | if (!AutoplayPolicy::IsAllowedToPlay(*this)) { |
6313 | 0 | EnsureAutoplayRequested(false); |
6314 | 0 | return; |
6315 | 0 | } |
6316 | 0 | |
6317 | 0 | mPaused = false; |
6318 | 0 | // We changed mPaused which can affect AddRemoveSelfReference |
6319 | 0 | AddRemoveSelfReference(); |
6320 | 0 | UpdateSrcMediaStreamPlaying(); |
6321 | 0 | UpdateAudioChannelPlayingState(); |
6322 | 0 |
|
6323 | 0 | if (mDecoder) { |
6324 | 0 | SetPlayedOrSeeked(true); |
6325 | 0 | if (mCurrentPlayRangeStart == -1.0) { |
6326 | 0 | mCurrentPlayRangeStart = CurrentTime(); |
6327 | 0 | } |
6328 | 0 | MOZ_ASSERT(!mPausedForInactiveDocumentOrChannel); |
6329 | 0 | mDecoder->Play(); |
6330 | 0 | } else if (mSrcStream) { |
6331 | 0 | SetPlayedOrSeeked(true); |
6332 | 0 | } |
6333 | 0 |
|
6334 | 0 | // For blocked media, the event would be pending until it is resumed. |
6335 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("play")); |
6336 | 0 |
|
6337 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("playing")); |
6338 | 0 | } |
6339 | | |
6340 | | bool |
6341 | | HTMLMediaElement::IsActive() const |
6342 | 0 | { |
6343 | 0 | nsIDocument* ownerDoc = OwnerDoc(); |
6344 | 0 | return ownerDoc && ownerDoc->IsActive() && ownerDoc->IsVisible(); |
6345 | 0 | } |
6346 | | |
6347 | | bool |
6348 | | HTMLMediaElement::IsHidden() const |
6349 | 0 | { |
6350 | 0 | nsIDocument* ownerDoc; |
6351 | 0 | return mUnboundFromTree || !(ownerDoc = OwnerDoc()) || ownerDoc->Hidden(); |
6352 | 0 | } |
6353 | | |
6354 | | VideoFrameContainer* |
6355 | | HTMLMediaElement::GetVideoFrameContainer() |
6356 | 0 | { |
6357 | 0 | if (mShuttingDown) { |
6358 | 0 | return nullptr; |
6359 | 0 | } |
6360 | 0 | |
6361 | 0 | if (mVideoFrameContainer) |
6362 | 0 | return mVideoFrameContainer; |
6363 | 0 | |
6364 | 0 | // Only video frames need an image container. |
6365 | 0 | if (!IsVideo()) { |
6366 | 0 | return nullptr; |
6367 | 0 | } |
6368 | 0 | |
6369 | 0 | mVideoFrameContainer = new VideoFrameContainer( |
6370 | 0 | this, LayerManager::CreateImageContainer(ImageContainer::ASYNCHRONOUS)); |
6371 | 0 |
|
6372 | 0 | return mVideoFrameContainer; |
6373 | 0 | } |
6374 | | |
6375 | | void |
6376 | | HTMLMediaElement::PrincipalChanged(DOMMediaStream* aStream) |
6377 | 0 | { |
6378 | 0 | LOG(LogLevel::Info, ("HTMLMediaElement %p Stream principal changed.", this)); |
6379 | 0 | nsContentUtils::CombineResourcePrincipals(&mSrcStreamVideoPrincipal, |
6380 | 0 | aStream->GetVideoPrincipal()); |
6381 | 0 |
|
6382 | 0 | LOG(LogLevel::Debug, |
6383 | 0 | ("HTMLMediaElement %p Stream video principal changed to " |
6384 | 0 | "%p. Waiting for it to reach VideoFrameContainer before " |
6385 | 0 | "setting.", |
6386 | 0 | this, |
6387 | 0 | aStream->GetVideoPrincipal())); |
6388 | 0 | if (mVideoFrameContainer) { |
6389 | 0 | UpdateSrcStreamVideoPrincipal( |
6390 | 0 | mVideoFrameContainer->GetLastPrincipalHandle()); |
6391 | 0 | } |
6392 | 0 | } |
6393 | | |
6394 | | void |
6395 | | HTMLMediaElement::UpdateSrcStreamVideoPrincipal( |
6396 | | const PrincipalHandle& aPrincipalHandle) |
6397 | 0 | { |
6398 | 0 | nsTArray<RefPtr<VideoStreamTrack>> videoTracks; |
6399 | 0 | mSrcStream->GetVideoTracks(videoTracks); |
6400 | 0 |
|
6401 | 0 | PrincipalHandle handle(aPrincipalHandle); |
6402 | 0 | bool matchesTrackPrincipal = false; |
6403 | 0 | for (const RefPtr<VideoStreamTrack>& track : videoTracks) { |
6404 | 0 | if (PrincipalHandleMatches(handle, track->GetPrincipal()) && |
6405 | 0 | !track->Ended()) { |
6406 | 0 | // When the PrincipalHandle for the VideoFrameContainer changes to that of |
6407 | 0 | // a track in mSrcStream we know that a removed track was displayed but |
6408 | 0 | // is no longer so. |
6409 | 0 | matchesTrackPrincipal = true; |
6410 | 0 | LOG(LogLevel::Debug, |
6411 | 0 | ("HTMLMediaElement %p VideoFrameContainer's " |
6412 | 0 | "PrincipalHandle matches track %p. That's all we " |
6413 | 0 | "need.", |
6414 | 0 | this, |
6415 | 0 | track.get())); |
6416 | 0 | break; |
6417 | 0 | } |
6418 | 0 | } |
6419 | 0 |
|
6420 | 0 | if (matchesTrackPrincipal) { |
6421 | 0 | mSrcStreamVideoPrincipal = mSrcStream->GetVideoPrincipal(); |
6422 | 0 | } |
6423 | 0 | } |
6424 | | |
6425 | | void |
6426 | | HTMLMediaElement::PrincipalHandleChangedForVideoFrameContainer( |
6427 | | VideoFrameContainer* aContainer, |
6428 | | const PrincipalHandle& aNewPrincipalHandle) |
6429 | 0 | { |
6430 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
6431 | 0 |
|
6432 | 0 | if (!mSrcStream) { |
6433 | 0 | return; |
6434 | 0 | } |
6435 | 0 | |
6436 | 0 | LOG(LogLevel::Debug, |
6437 | 0 | ("HTMLMediaElement %p PrincipalHandle changed in " |
6438 | 0 | "VideoFrameContainer.", |
6439 | 0 | this)); |
6440 | 0 |
|
6441 | 0 | UpdateSrcStreamVideoPrincipal(aNewPrincipalHandle); |
6442 | 0 | } |
6443 | | |
6444 | | nsresult |
6445 | | HTMLMediaElement::DispatchEvent(const nsAString& aName) |
6446 | 0 | { |
6447 | 0 | LOG_EVENT( |
6448 | 0 | LogLevel::Debug, |
6449 | 0 | ("%p Dispatching event %s", this, NS_ConvertUTF16toUTF8(aName).get())); |
6450 | 0 |
|
6451 | 0 | // Save events that occur while in the bfcache. These will be dispatched |
6452 | 0 | // if the page comes out of the bfcache. |
6453 | 0 | if (mEventDeliveryPaused) { |
6454 | 0 | mPendingEvents.AppendElement(aName); |
6455 | 0 | return NS_OK; |
6456 | 0 | } |
6457 | 0 | |
6458 | 0 | return nsContentUtils::DispatchTrustedEvent( |
6459 | 0 | OwnerDoc(), static_cast<nsIContent*>(this), aName, |
6460 | 0 | CanBubble::eNo, |
6461 | 0 | Cancelable::eNo); |
6462 | 0 | } |
6463 | | |
6464 | | void |
6465 | | HTMLMediaElement::DispatchAsyncEvent(const nsAString& aName) |
6466 | 0 | { |
6467 | 0 | LOG_EVENT(LogLevel::Debug, |
6468 | 0 | ("%p Queuing event %s", this, NS_ConvertUTF16toUTF8(aName).get())); |
6469 | 0 | DDLOG(DDLogCategory::Event, |
6470 | 0 | "HTMLMediaElement", |
6471 | 0 | nsCString(NS_ConvertUTF16toUTF8(aName))); |
6472 | 0 |
|
6473 | 0 | // Save events that occur while in the bfcache. These will be dispatched |
6474 | 0 | // if the page comes out of the bfcache. |
6475 | 0 | if (mEventDeliveryPaused) { |
6476 | 0 | mPendingEvents.AppendElement(aName); |
6477 | 0 | return; |
6478 | 0 | } |
6479 | 0 | |
6480 | 0 | nsCOMPtr<nsIRunnable> event; |
6481 | 0 |
|
6482 | 0 | if (aName.EqualsLiteral("playing")) { |
6483 | 0 | event = new nsNotifyAboutPlayingRunner(this, TakePendingPlayPromises()); |
6484 | 0 | } else { |
6485 | 0 | event = new nsAsyncEventRunner(aName, this); |
6486 | 0 | } |
6487 | 0 |
|
6488 | 0 | mMainThreadEventTarget->Dispatch(event.forget()); |
6489 | 0 |
|
6490 | 0 | if ((aName.EqualsLiteral("play") || aName.EqualsLiteral("playing"))) { |
6491 | 0 | mPlayTime.Start(); |
6492 | 0 | if (IsHidden()) { |
6493 | 0 | HiddenVideoStart(); |
6494 | 0 | } |
6495 | 0 | } else if (aName.EqualsLiteral("waiting")) { |
6496 | 0 | mPlayTime.Pause(); |
6497 | 0 | HiddenVideoStop(); |
6498 | 0 | } else if (aName.EqualsLiteral("pause")) { |
6499 | 0 | mPlayTime.Pause(); |
6500 | 0 | HiddenVideoStop(); |
6501 | 0 | } |
6502 | 0 | } |
6503 | | |
6504 | | nsresult |
6505 | | HTMLMediaElement::DispatchPendingMediaEvents() |
6506 | 0 | { |
6507 | 0 | NS_ASSERTION(!mEventDeliveryPaused, |
6508 | 0 | "Must not be in bfcache when dispatching pending media events"); |
6509 | 0 |
|
6510 | 0 | uint32_t count = mPendingEvents.Length(); |
6511 | 0 | for (uint32_t i = 0; i < count; ++i) { |
6512 | 0 | DispatchAsyncEvent(mPendingEvents[i]); |
6513 | 0 | } |
6514 | 0 | mPendingEvents.Clear(); |
6515 | 0 |
|
6516 | 0 | return NS_OK; |
6517 | 0 | } |
6518 | | |
6519 | | bool |
6520 | | HTMLMediaElement::IsPotentiallyPlaying() const |
6521 | 0 | { |
6522 | 0 | // TODO: |
6523 | 0 | // playback has not stopped due to errors, |
6524 | 0 | // and the element has not paused for user interaction |
6525 | 0 | return !mPaused && |
6526 | 0 | (mReadyState == HAVE_ENOUGH_DATA || mReadyState == HAVE_FUTURE_DATA) && |
6527 | 0 | !IsPlaybackEnded(); |
6528 | 0 | } |
6529 | | |
6530 | | bool |
6531 | | HTMLMediaElement::IsPlaybackEnded() const |
6532 | 0 | { |
6533 | 0 | // TODO: |
6534 | 0 | // the current playback position is equal to the effective end of the media |
6535 | 0 | // resource. See bug 449157. |
6536 | 0 | return mReadyState >= HAVE_METADATA && mDecoder && mDecoder->IsEnded(); |
6537 | 0 | } |
6538 | | |
6539 | | already_AddRefed<nsIPrincipal> |
6540 | | HTMLMediaElement::GetCurrentPrincipal() |
6541 | 0 | { |
6542 | 0 | if (mDecoder) { |
6543 | 0 | return mDecoder->GetCurrentPrincipal(); |
6544 | 0 | } |
6545 | 0 | if (mSrcStream) { |
6546 | 0 | nsCOMPtr<nsIPrincipal> principal = mSrcStream->GetPrincipal(); |
6547 | 0 | return principal.forget(); |
6548 | 0 | } |
6549 | 0 | return nullptr; |
6550 | 0 | } |
6551 | | |
6552 | | already_AddRefed<nsIPrincipal> |
6553 | | HTMLMediaElement::GetCurrentVideoPrincipal() |
6554 | 0 | { |
6555 | 0 | if (mDecoder) { |
6556 | 0 | return mDecoder->GetCurrentPrincipal(); |
6557 | 0 | } |
6558 | 0 | if (mSrcStream) { |
6559 | 0 | nsCOMPtr<nsIPrincipal> principal = mSrcStreamVideoPrincipal; |
6560 | 0 | return principal.forget(); |
6561 | 0 | } |
6562 | 0 | return nullptr; |
6563 | 0 | } |
6564 | | |
6565 | | void |
6566 | | HTMLMediaElement::NotifyDecoderPrincipalChanged() |
6567 | 0 | { |
6568 | 0 | RefPtr<nsIPrincipal> principal = GetCurrentPrincipal(); |
6569 | 0 |
|
6570 | 0 | mDecoder->UpdateSameOriginStatus(!principal || IsCORSSameOrigin()); |
6571 | 0 |
|
6572 | 0 | for (DecoderPrincipalChangeObserver* observer : |
6573 | 0 | mDecoderPrincipalChangeObservers) { |
6574 | 0 | observer->NotifyDecoderPrincipalChanged(); |
6575 | 0 | } |
6576 | 0 | } |
6577 | | |
6578 | | void |
6579 | | HTMLMediaElement::AddDecoderPrincipalChangeObserver( |
6580 | | DecoderPrincipalChangeObserver* aObserver) |
6581 | 0 | { |
6582 | 0 | mDecoderPrincipalChangeObservers.AppendElement(aObserver); |
6583 | 0 | } |
6584 | | |
6585 | | bool |
6586 | | HTMLMediaElement::RemoveDecoderPrincipalChangeObserver( |
6587 | | DecoderPrincipalChangeObserver* aObserver) |
6588 | 0 | { |
6589 | 0 | return mDecoderPrincipalChangeObservers.RemoveElement(aObserver); |
6590 | 0 | } |
6591 | | |
6592 | | void |
6593 | | HTMLMediaElement::Invalidate(bool aImageSizeChanged, |
6594 | | Maybe<nsIntSize>& aNewIntrinsicSize, |
6595 | | bool aForceInvalidate) |
6596 | 0 | { |
6597 | 0 | nsIFrame* frame = GetPrimaryFrame(); |
6598 | 0 | if (aNewIntrinsicSize) { |
6599 | 0 | UpdateMediaSize(aNewIntrinsicSize.value()); |
6600 | 0 | if (frame) { |
6601 | 0 | nsPresContext* presContext = frame->PresContext(); |
6602 | 0 | nsIPresShell* presShell = presContext->PresShell(); |
6603 | 0 | presShell->FrameNeedsReflow( |
6604 | 0 | frame, nsIPresShell::eStyleChange, NS_FRAME_IS_DIRTY); |
6605 | 0 | } |
6606 | 0 | } |
6607 | 0 |
|
6608 | 0 | RefPtr<ImageContainer> imageContainer = GetImageContainer(); |
6609 | 0 | bool asyncInvalidate = |
6610 | 0 | imageContainer && imageContainer->IsAsync() && !aForceInvalidate; |
6611 | 0 | if (frame) { |
6612 | 0 | if (aImageSizeChanged) { |
6613 | 0 | frame->InvalidateFrame(); |
6614 | 0 | } else { |
6615 | 0 | frame->InvalidateLayer(DisplayItemType::TYPE_VIDEO, |
6616 | 0 | nullptr, |
6617 | 0 | nullptr, |
6618 | 0 | asyncInvalidate ? nsIFrame::UPDATE_IS_ASYNC : 0); |
6619 | 0 | } |
6620 | 0 | } |
6621 | 0 |
|
6622 | 0 | SVGObserverUtils::InvalidateDirectRenderingObservers(this); |
6623 | 0 | } |
6624 | | |
6625 | | void |
6626 | | HTMLMediaElement::UpdateMediaSize(const nsIntSize& aSize) |
6627 | 0 | { |
6628 | 0 | if (IsVideo() && mReadyState != HAVE_NOTHING && |
6629 | 0 | mMediaInfo.mVideo.mDisplay != aSize) { |
6630 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("resize")); |
6631 | 0 | } |
6632 | 0 |
|
6633 | 0 | mMediaInfo.mVideo.mDisplay = aSize; |
6634 | 0 | UpdateReadyStateInternal(); |
6635 | 0 | } |
6636 | | |
6637 | | void |
6638 | | HTMLMediaElement::UpdateInitialMediaSize(const nsIntSize& aSize) |
6639 | 0 | { |
6640 | 0 | if (!mMediaInfo.HasVideo()) { |
6641 | 0 | UpdateMediaSize(aSize); |
6642 | 0 | } |
6643 | 0 |
|
6644 | 0 | if (!mMediaStreamSizeListener) { |
6645 | 0 | return; |
6646 | 0 | } |
6647 | 0 | |
6648 | 0 | if (!mSelectedVideoStreamTrack) { |
6649 | 0 | MOZ_ASSERT(false); |
6650 | 0 | return; |
6651 | 0 | } |
6652 | 0 |
|
6653 | 0 | mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener); |
6654 | 0 | mMediaStreamSizeListener->Forget(); |
6655 | 0 | mMediaStreamSizeListener = nullptr; |
6656 | 0 | } |
6657 | | |
6658 | | void |
6659 | | HTMLMediaElement::SuspendOrResumeElement(bool aPauseElement, |
6660 | | bool aSuspendEvents) |
6661 | 0 | { |
6662 | 0 | LOG(LogLevel::Debug, |
6663 | 0 | ("%p SuspendOrResumeElement(pause=%d, suspendEvents=%d) hidden=%d", |
6664 | 0 | this, |
6665 | 0 | aPauseElement, |
6666 | 0 | aSuspendEvents, |
6667 | 0 | OwnerDoc()->Hidden())); |
6668 | 0 |
|
6669 | 0 | if (aPauseElement != mPausedForInactiveDocumentOrChannel) { |
6670 | 0 | mPausedForInactiveDocumentOrChannel = aPauseElement; |
6671 | 0 | UpdateSrcMediaStreamPlaying(); |
6672 | 0 | UpdateAudioChannelPlayingState(); |
6673 | 0 | if (aPauseElement) { |
6674 | 0 | ReportTelemetry(); |
6675 | 0 |
|
6676 | 0 | // For EME content, we may force destruction of the CDM client (and CDM |
6677 | 0 | // instance if this is the last client for that CDM instance) and |
6678 | 0 | // the CDM's decoder. This ensures the CDM gets reliable and prompt |
6679 | 0 | // shutdown notifications, as it may have book-keeping it needs |
6680 | 0 | // to do on shutdown. |
6681 | 0 | if (mMediaKeys) { |
6682 | 0 | nsAutoString keySystem; |
6683 | 0 | mMediaKeys->GetKeySystem(keySystem); |
6684 | 0 | } |
6685 | 0 | if (mDecoder) { |
6686 | 0 | mDecoder->Pause(); |
6687 | 0 | mDecoder->Suspend(); |
6688 | 0 | } |
6689 | 0 | mEventDeliveryPaused = aSuspendEvents; |
6690 | 0 | } else { |
6691 | 0 | if (mDecoder) { |
6692 | 0 | mDecoder->Resume(); |
6693 | 0 | if (!mPaused && !mDecoder->IsEnded()) { |
6694 | 0 | mDecoder->Play(); |
6695 | 0 | } |
6696 | 0 | } |
6697 | 0 | if (mEventDeliveryPaused) { |
6698 | 0 | mEventDeliveryPaused = false; |
6699 | 0 | DispatchPendingMediaEvents(); |
6700 | 0 | } |
6701 | 0 | } |
6702 | 0 | } |
6703 | 0 | } |
6704 | | |
6705 | | bool |
6706 | | HTMLMediaElement::IsBeingDestroyed() |
6707 | 0 | { |
6708 | 0 | nsIDocument* ownerDoc = OwnerDoc(); |
6709 | 0 | nsIDocShell* docShell = ownerDoc ? ownerDoc->GetDocShell() : nullptr; |
6710 | 0 | bool isBeingDestroyed = false; |
6711 | 0 | if (docShell) { |
6712 | 0 | docShell->IsBeingDestroyed(&isBeingDestroyed); |
6713 | 0 | } |
6714 | 0 | return isBeingDestroyed; |
6715 | 0 | } |
6716 | | |
6717 | | void |
6718 | | HTMLMediaElement::NotifyOwnerDocumentActivityChanged() |
6719 | 0 | { |
6720 | 0 | bool visible = !IsHidden(); |
6721 | 0 | if (visible) { |
6722 | 0 | // Visible -> Just pause hidden play time (no-op if already paused). |
6723 | 0 | HiddenVideoStop(); |
6724 | 0 | } else if (mPlayTime.IsStarted()) { |
6725 | 0 | // Not visible, play time is running -> Start hidden play time if needed. |
6726 | 0 | HiddenVideoStart(); |
6727 | 0 | } |
6728 | 0 |
|
6729 | 0 | if (mDecoder && !IsBeingDestroyed()) { |
6730 | 0 | NotifyDecoderActivityChanges(); |
6731 | 0 | } |
6732 | 0 |
|
6733 | 0 | bool pauseElement = ShouldElementBePaused(); |
6734 | 0 | SuspendOrResumeElement(pauseElement, !IsActive()); |
6735 | 0 |
|
6736 | 0 | // If the owning document has become inactive we should shutdown the CDM. |
6737 | 0 | if (!OwnerDoc()->IsCurrentActiveDocument() && mMediaKeys) { |
6738 | 0 | mMediaKeys->Shutdown(); |
6739 | 0 | DDUNLINKCHILD(mMediaKeys.get()); |
6740 | 0 | mMediaKeys = nullptr; |
6741 | 0 | if (mDecoder) { |
6742 | 0 | ShutdownDecoder(); |
6743 | 0 | } |
6744 | 0 | } |
6745 | 0 |
|
6746 | 0 | AddRemoveSelfReference(); |
6747 | 0 | } |
6748 | | |
6749 | | void |
6750 | | HTMLMediaElement::AddRemoveSelfReference() |
6751 | 0 | { |
6752 | 0 | // XXX we could release earlier here in many situations if we examined |
6753 | 0 | // which event listeners are attached. Right now we assume there is a |
6754 | 0 | // potential listener for every event. We would also have to keep the |
6755 | 0 | // element alive if it was playing and producing audio output --- right now |
6756 | 0 | // that's covered by the !mPaused check. |
6757 | 0 | nsIDocument* ownerDoc = OwnerDoc(); |
6758 | 0 |
|
6759 | 0 | // See the comment at the top of this file for the explanation of this |
6760 | 0 | // boolean expression. |
6761 | 0 | bool needSelfReference = |
6762 | 0 | !mShuttingDown && ownerDoc->IsActive() && |
6763 | 0 | (mDelayingLoadEvent || (!mPaused && mDecoder && !mDecoder->IsEnded()) || |
6764 | 0 | (!mPaused && mSrcStream && !mSrcStream->IsFinished()) || |
6765 | 0 | (mDecoder && mDecoder->IsSeeking()) || CanActivateAutoplay() || |
6766 | 0 | (mMediaSource ? mProgressTimer : mNetworkState == NETWORK_LOADING)); |
6767 | 0 |
|
6768 | 0 | if (needSelfReference != mHasSelfReference) { |
6769 | 0 | mHasSelfReference = needSelfReference; |
6770 | 0 | if (needSelfReference) { |
6771 | 0 | // The shutdown observer will hold a strong reference to us. This |
6772 | 0 | // will do to keep us alive. We need to know about shutdown so that |
6773 | 0 | // we can release our self-reference. |
6774 | 0 | mShutdownObserver->AddRefMediaElement(); |
6775 | 0 | } else { |
6776 | 0 | // Dispatch Release asynchronously so that we don't destroy this object |
6777 | 0 | // inside a call stack of method calls on this object |
6778 | 0 | mMainThreadEventTarget->Dispatch( |
6779 | 0 | NewRunnableMethod("dom::HTMLMediaElement::DoRemoveSelfReference", |
6780 | 0 | this, |
6781 | 0 | &HTMLMediaElement::DoRemoveSelfReference)); |
6782 | 0 | } |
6783 | 0 | } |
6784 | 0 | } |
6785 | | |
6786 | | void |
6787 | | HTMLMediaElement::DoRemoveSelfReference() |
6788 | 0 | { |
6789 | 0 | mShutdownObserver->ReleaseMediaElement(); |
6790 | 0 | } |
6791 | | |
6792 | | void |
6793 | | HTMLMediaElement::NotifyShutdownEvent() |
6794 | 0 | { |
6795 | 0 | mShuttingDown = true; |
6796 | 0 | // Since target thread had been shutdown, it's no chance to execute the Then() |
6797 | 0 | // afterward. Therefore, we should disconnect the request. |
6798 | 0 | mAutoplayPermissionRequest.DisconnectIfExists(); |
6799 | 0 | ResetState(); |
6800 | 0 | AddRemoveSelfReference(); |
6801 | 0 | } |
6802 | | |
6803 | | void |
6804 | | HTMLMediaElement::DispatchAsyncSourceError(nsIContent* aSourceElement) |
6805 | 0 | { |
6806 | 0 | LOG_EVENT(LogLevel::Debug, ("%p Queuing simple source error event", this)); |
6807 | 0 |
|
6808 | 0 | nsCOMPtr<nsIRunnable> event = |
6809 | 0 | new nsSourceErrorEventRunner(this, aSourceElement); |
6810 | 0 | mMainThreadEventTarget->Dispatch(event.forget()); |
6811 | 0 | } |
6812 | | |
6813 | | void |
6814 | | HTMLMediaElement::NotifyAddedSource() |
6815 | 0 | { |
6816 | 0 | // If a source element is inserted as a child of a media element |
6817 | 0 | // that has no src attribute and whose networkState has the value |
6818 | 0 | // NETWORK_EMPTY, the user agent must invoke the media element's |
6819 | 0 | // resource selection algorithm. |
6820 | 0 | if (!HasAttr(kNameSpaceID_None, nsGkAtoms::src) && |
6821 | 0 | mNetworkState == NETWORK_EMPTY) { |
6822 | 0 | AssertReadyStateIsNothing(); |
6823 | 0 | QueueSelectResourceTask(); |
6824 | 0 | } |
6825 | 0 |
|
6826 | 0 | // A load was paused in the resource selection algorithm, waiting for |
6827 | 0 | // a new source child to be added, resume the resource selection algorithm. |
6828 | 0 | if (mLoadWaitStatus == WAITING_FOR_SOURCE) { |
6829 | 0 | // Rest the flag so we don't queue multiple LoadFromSourceTask() when |
6830 | 0 | // multiple <source> are attached in an event loop. |
6831 | 0 | mLoadWaitStatus = NOT_WAITING; |
6832 | 0 | QueueLoadFromSourceTask(); |
6833 | 0 | } |
6834 | 0 | } |
6835 | | |
6836 | | Element* |
6837 | | HTMLMediaElement::GetNextSource() |
6838 | 0 | { |
6839 | 0 | mSourceLoadCandidate = nullptr; |
6840 | 0 |
|
6841 | 0 | while (true) { |
6842 | 0 | if (mSourcePointer == nsINode::GetLastChild()) { |
6843 | 0 | return nullptr; // no more children |
6844 | 0 | } |
6845 | 0 | |
6846 | 0 | if (!mSourcePointer) { |
6847 | 0 | mSourcePointer = nsINode::GetFirstChild(); |
6848 | 0 | } else { |
6849 | 0 | mSourcePointer = mSourcePointer->GetNextSibling(); |
6850 | 0 | } |
6851 | 0 | nsIContent* child = mSourcePointer; |
6852 | 0 |
|
6853 | 0 | // If child is a <source> element, it is the next candidate. |
6854 | 0 | if (child && child->IsHTMLElement(nsGkAtoms::source)) { |
6855 | 0 | mSourceLoadCandidate = child; |
6856 | 0 | return child->AsElement(); |
6857 | 0 | } |
6858 | 0 | } |
6859 | 0 | MOZ_ASSERT_UNREACHABLE("Execution should not reach here!"); |
6860 | 0 | return nullptr; |
6861 | 0 | } |
6862 | | |
6863 | | void |
6864 | | HTMLMediaElement::ChangeDelayLoadStatus(bool aDelay) |
6865 | 0 | { |
6866 | 0 | if (mDelayingLoadEvent == aDelay) |
6867 | 0 | return; |
6868 | 0 | |
6869 | 0 | mDelayingLoadEvent = aDelay; |
6870 | 0 |
|
6871 | 0 | LOG(LogLevel::Debug, |
6872 | 0 | ("%p ChangeDelayLoadStatus(%d) doc=0x%p", |
6873 | 0 | this, |
6874 | 0 | aDelay, |
6875 | 0 | mLoadBlockedDoc.get())); |
6876 | 0 | if (mDecoder) { |
6877 | 0 | mDecoder->SetLoadInBackground(!aDelay); |
6878 | 0 | } |
6879 | 0 | if (aDelay) { |
6880 | 0 | mLoadBlockedDoc = OwnerDoc(); |
6881 | 0 | mLoadBlockedDoc->BlockOnload(); |
6882 | 0 | } else { |
6883 | 0 | // mLoadBlockedDoc might be null due to GC unlinking |
6884 | 0 | if (mLoadBlockedDoc) { |
6885 | 0 | mLoadBlockedDoc->UnblockOnload(false); |
6886 | 0 | mLoadBlockedDoc = nullptr; |
6887 | 0 | } |
6888 | 0 | } |
6889 | 0 |
|
6890 | 0 | // We changed mDelayingLoadEvent which can affect AddRemoveSelfReference |
6891 | 0 | AddRemoveSelfReference(); |
6892 | 0 | } |
6893 | | |
6894 | | already_AddRefed<nsILoadGroup> |
6895 | | HTMLMediaElement::GetDocumentLoadGroup() |
6896 | 0 | { |
6897 | 0 | if (!OwnerDoc()->IsActive()) { |
6898 | 0 | NS_WARNING("Load group requested for media element in inactive document."); |
6899 | 0 | } |
6900 | 0 | return OwnerDoc()->GetDocumentLoadGroup(); |
6901 | 0 | } |
6902 | | |
6903 | | nsresult |
6904 | | HTMLMediaElement::CopyInnerTo(Element* aDest) |
6905 | 0 | { |
6906 | 0 | nsresult rv = nsGenericHTMLElement::CopyInnerTo(aDest); |
6907 | 0 | NS_ENSURE_SUCCESS(rv, rv); |
6908 | 0 | if (aDest->OwnerDoc()->IsStaticDocument()) { |
6909 | 0 | HTMLMediaElement* dest = static_cast<HTMLMediaElement*>(aDest); |
6910 | 0 | dest->SetMediaInfo(mMediaInfo); |
6911 | 0 | } |
6912 | 0 | return rv; |
6913 | 0 | } |
6914 | | |
6915 | | already_AddRefed<TimeRanges> |
6916 | | HTMLMediaElement::Buffered() const |
6917 | 0 | { |
6918 | 0 | media::TimeIntervals buffered = |
6919 | 0 | mDecoder ? mDecoder->GetBuffered() : media::TimeIntervals(); |
6920 | 0 | RefPtr<TimeRanges> ranges = new TimeRanges(ToSupports(OwnerDoc()), buffered); |
6921 | 0 | return ranges.forget(); |
6922 | 0 | } |
6923 | | |
6924 | | void |
6925 | | HTMLMediaElement::SetRequestHeaders(nsIHttpChannel* aChannel) |
6926 | 0 | { |
6927 | 0 | // Send Accept header for video and audio types only (Bug 489071) |
6928 | 0 | SetAcceptHeader(aChannel); |
6929 | 0 |
|
6930 | 0 | // Apache doesn't send Content-Length when gzip transfer encoding is used, |
6931 | 0 | // which prevents us from estimating the video length (if explicit |
6932 | 0 | // Content-Duration and a length spec in the container are not present either) |
6933 | 0 | // and from seeking. So, disable the standard "Accept-Encoding: gzip,deflate" |
6934 | 0 | // that we usually send. See bug 614760. |
6935 | 0 | DebugOnly<nsresult> rv = aChannel->SetRequestHeader( |
6936 | 0 | NS_LITERAL_CSTRING("Accept-Encoding"), EmptyCString(), false); |
6937 | 0 | MOZ_ASSERT(NS_SUCCEEDED(rv)); |
6938 | 0 |
|
6939 | 0 | // Set the Referer header |
6940 | 0 | rv = aChannel->SetReferrerWithPolicy(OwnerDoc()->GetDocumentURI(), |
6941 | 0 | OwnerDoc()->GetReferrerPolicy()); |
6942 | 0 | MOZ_ASSERT(NS_SUCCEEDED(rv)); |
6943 | 0 | } |
6944 | | |
6945 | | void |
6946 | | HTMLMediaElement::FireTimeUpdate(bool aPeriodic) |
6947 | 0 | { |
6948 | 0 | NS_ASSERTION(NS_IsMainThread(), "Should be on main thread."); |
6949 | 0 |
|
6950 | 0 | TimeStamp now = TimeStamp::Now(); |
6951 | 0 | double time = CurrentTime(); |
6952 | 0 |
|
6953 | 0 | // Fire a timeupdate event if this is not a periodic update (i.e. it's a |
6954 | 0 | // timeupdate event mandated by the spec), or if it's a periodic update |
6955 | 0 | // and TIMEUPDATE_MS has passed since the last timeupdate event fired and |
6956 | 0 | // the time has changed. |
6957 | 0 | if (!aPeriodic || (mLastCurrentTime != time && |
6958 | 0 | (mTimeUpdateTime.IsNull() || |
6959 | 0 | now - mTimeUpdateTime >= |
6960 | 0 | TimeDuration::FromMilliseconds(TIMEUPDATE_MS)))) { |
6961 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("timeupdate")); |
6962 | 0 | mTimeUpdateTime = now; |
6963 | 0 | mLastCurrentTime = time; |
6964 | 0 | } |
6965 | 0 | if (mFragmentEnd >= 0.0 && time >= mFragmentEnd) { |
6966 | 0 | Pause(); |
6967 | 0 | mFragmentEnd = -1.0; |
6968 | 0 | mFragmentStart = -1.0; |
6969 | 0 | mDecoder->SetFragmentEndTime(mFragmentEnd); |
6970 | 0 | } |
6971 | 0 |
|
6972 | 0 | // Update the cues displaying on the video. |
6973 | 0 | // Here mTextTrackManager can be null if the cycle collector has unlinked |
6974 | 0 | // us before our parent. In that case UnbindFromTree will call us |
6975 | 0 | // when our parent is unlinked. |
6976 | 0 | if (mTextTrackManager) { |
6977 | 0 | mTextTrackManager->TimeMarchesOn(); |
6978 | 0 | } |
6979 | 0 | } |
6980 | | |
6981 | | MediaStream* |
6982 | | HTMLMediaElement::GetSrcMediaStream() const |
6983 | 0 | { |
6984 | 0 | if (!mSrcStream) { |
6985 | 0 | return nullptr; |
6986 | 0 | } |
6987 | 0 | return mSrcStream->GetPlaybackStream(); |
6988 | 0 | } |
6989 | | |
6990 | | MediaError* |
6991 | | HTMLMediaElement::GetError() const |
6992 | 0 | { |
6993 | 0 | return mErrorSink->mError; |
6994 | 0 | } |
6995 | | |
6996 | | void |
6997 | | HTMLMediaElement::GetCurrentSpec(nsCString& aString) |
6998 | 0 | { |
6999 | 0 | if (mLoadingSrc) { |
7000 | 0 | mLoadingSrc->GetSpec(aString); |
7001 | 0 | } else { |
7002 | 0 | aString.Truncate(); |
7003 | 0 | } |
7004 | 0 | } |
7005 | | |
7006 | | double |
7007 | | HTMLMediaElement::MozFragmentEnd() |
7008 | 0 | { |
7009 | 0 | double duration = Duration(); |
7010 | 0 |
|
7011 | 0 | // If there is no end fragment, or the fragment end is greater than the |
7012 | 0 | // duration, return the duration. |
7013 | 0 | return (mFragmentEnd < 0.0 || mFragmentEnd > duration) ? duration |
7014 | 0 | : mFragmentEnd; |
7015 | 0 | } |
7016 | | |
7017 | | static double |
7018 | | ClampPlaybackRate(double aPlaybackRate) |
7019 | 0 | { |
7020 | 0 | MOZ_ASSERT(aPlaybackRate >= 0.0); |
7021 | 0 |
|
7022 | 0 | if (aPlaybackRate == 0.0) { |
7023 | 0 | return aPlaybackRate; |
7024 | 0 | } |
7025 | 0 | if (aPlaybackRate < MIN_PLAYBACKRATE) { |
7026 | 0 | return MIN_PLAYBACKRATE; |
7027 | 0 | } |
7028 | 0 | if (aPlaybackRate > MAX_PLAYBACKRATE) { |
7029 | 0 | return MAX_PLAYBACKRATE; |
7030 | 0 | } |
7031 | 0 | return aPlaybackRate; |
7032 | 0 | } |
7033 | | |
7034 | | void |
7035 | | HTMLMediaElement::SetDefaultPlaybackRate(double aDefaultPlaybackRate, |
7036 | | ErrorResult& aRv) |
7037 | 0 | { |
7038 | 0 | if (aDefaultPlaybackRate < 0) { |
7039 | 0 | aRv.Throw(NS_ERROR_NOT_IMPLEMENTED); |
7040 | 0 | return; |
7041 | 0 | } |
7042 | 0 | |
7043 | 0 | mDefaultPlaybackRate = ClampPlaybackRate(aDefaultPlaybackRate); |
7044 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("ratechange")); |
7045 | 0 | } |
7046 | | |
7047 | | void |
7048 | | HTMLMediaElement::SetPlaybackRate(double aPlaybackRate, ErrorResult& aRv) |
7049 | 0 | { |
7050 | 0 | // Changing the playback rate of a media that has more than two channels is |
7051 | 0 | // not supported. |
7052 | 0 | if (aPlaybackRate < 0) { |
7053 | 0 | aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); |
7054 | 0 | return; |
7055 | 0 | } |
7056 | 0 | |
7057 | 0 | if (mPlaybackRate == aPlaybackRate) { |
7058 | 0 | return; |
7059 | 0 | } |
7060 | 0 | |
7061 | 0 | mPlaybackRate = aPlaybackRate; |
7062 | 0 |
|
7063 | 0 | if (mPlaybackRate != 0.0 && |
7064 | 0 | (mPlaybackRate > THRESHOLD_HIGH_PLAYBACKRATE_AUDIO || |
7065 | 0 | mPlaybackRate < THRESHOLD_LOW_PLAYBACKRATE_AUDIO)) { |
7066 | 0 | SetMutedInternal(mMuted | MUTED_BY_INVALID_PLAYBACK_RATE); |
7067 | 0 | } else { |
7068 | 0 | SetMutedInternal(mMuted & ~MUTED_BY_INVALID_PLAYBACK_RATE); |
7069 | 0 | } |
7070 | 0 |
|
7071 | 0 | if (mDecoder) { |
7072 | 0 | mDecoder->SetPlaybackRate(ClampPlaybackRate(mPlaybackRate)); |
7073 | 0 | } |
7074 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("ratechange")); |
7075 | 0 | } |
7076 | | |
7077 | | void |
7078 | | HTMLMediaElement::SetMozPreservesPitch(bool aPreservesPitch) |
7079 | 0 | { |
7080 | 0 | mPreservesPitch = aPreservesPitch; |
7081 | 0 | if (mDecoder) { |
7082 | 0 | mDecoder->SetPreservesPitch(mPreservesPitch); |
7083 | 0 | } |
7084 | 0 | } |
7085 | | |
7086 | | ImageContainer* |
7087 | | HTMLMediaElement::GetImageContainer() |
7088 | 0 | { |
7089 | 0 | VideoFrameContainer* container = GetVideoFrameContainer(); |
7090 | 0 | return container ? container->GetImageContainer() : nullptr; |
7091 | 0 | } |
7092 | | |
7093 | | void |
7094 | | HTMLMediaElement::UpdateAudioChannelPlayingState(bool aForcePlaying) |
7095 | 0 | { |
7096 | 0 | if (mAudioChannelWrapper) { |
7097 | 0 | mAudioChannelWrapper->UpdateAudioChannelPlayingState(aForcePlaying); |
7098 | 0 | } |
7099 | 0 | } |
7100 | | |
7101 | | bool |
7102 | | HTMLMediaElement::AudioChannelAgentBlockedPlay() |
7103 | 0 | { |
7104 | 0 | if (!mAudioChannelWrapper) { |
7105 | 0 | // If the mAudioChannelWrapper doesn't exist that means the CC happened. |
7106 | 0 | LOG(LogLevel::Debug, |
7107 | 0 | ("%p AudioChannelAgentBlockedPlay() returning true due to null " |
7108 | 0 | "AudioChannelAgent.", |
7109 | 0 | this)); |
7110 | 0 | return true; |
7111 | 0 | } |
7112 | 0 |
|
7113 | 0 | // Note: SUSPENDED_PAUSE and SUSPENDED_BLOCK will be merged into one single |
7114 | 0 | // state. |
7115 | 0 | const auto suspendType = mAudioChannelWrapper->GetSuspendType(); |
7116 | 0 | return suspendType == nsISuspendedTypes::SUSPENDED_PAUSE || |
7117 | 0 | suspendType == nsISuspendedTypes::SUSPENDED_BLOCK; |
7118 | 0 | } |
7119 | | |
7120 | | static const char* |
7121 | | VisibilityString(Visibility aVisibility) |
7122 | 0 | { |
7123 | 0 | switch (aVisibility) { |
7124 | 0 | case Visibility::UNTRACKED: { |
7125 | 0 | return "UNTRACKED"; |
7126 | 0 | } |
7127 | 0 | case Visibility::APPROXIMATELY_NONVISIBLE: { |
7128 | 0 | return "APPROXIMATELY_NONVISIBLE"; |
7129 | 0 | } |
7130 | 0 | case Visibility::APPROXIMATELY_VISIBLE: { |
7131 | 0 | return "APPROXIMATELY_VISIBLE"; |
7132 | 0 | } |
7133 | 0 | } |
7134 | 0 | |
7135 | 0 | return "NAN"; |
7136 | 0 | } |
7137 | | |
7138 | | void |
7139 | | HTMLMediaElement::OnVisibilityChange(Visibility aNewVisibility) |
7140 | 0 | { |
7141 | 0 | LOG(LogLevel::Debug, |
7142 | 0 | ("OnVisibilityChange(): %s\n", VisibilityString(aNewVisibility))); |
7143 | 0 |
|
7144 | 0 | mVisibilityState = aNewVisibility; |
7145 | 0 |
|
7146 | 0 | if (!mDecoder) { |
7147 | 0 | return; |
7148 | 0 | } |
7149 | 0 | |
7150 | 0 | switch (aNewVisibility) { |
7151 | 0 | case Visibility::UNTRACKED: { |
7152 | 0 | MOZ_ASSERT_UNREACHABLE("Shouldn't notify for untracked visibility"); |
7153 | 0 | return; |
7154 | 0 | } |
7155 | 0 | case Visibility::APPROXIMATELY_NONVISIBLE: { |
7156 | 0 | if (mPlayTime.IsStarted()) { |
7157 | 0 | // Not visible, play time is running -> Start hidden play time if |
7158 | 0 | // needed. |
7159 | 0 | HiddenVideoStart(); |
7160 | 0 | } |
7161 | 0 | break; |
7162 | 0 | } |
7163 | 0 | case Visibility::APPROXIMATELY_VISIBLE: { |
7164 | 0 | // Visible -> Just pause hidden play time (no-op if already paused). |
7165 | 0 | HiddenVideoStop(); |
7166 | 0 | break; |
7167 | 0 | } |
7168 | 0 | } |
7169 | 0 | |
7170 | 0 | NotifyDecoderActivityChanges(); |
7171 | 0 | } |
7172 | | |
7173 | | MediaKeys* |
7174 | | HTMLMediaElement::GetMediaKeys() const |
7175 | 0 | { |
7176 | 0 | return mMediaKeys; |
7177 | 0 | } |
7178 | | |
7179 | | bool |
7180 | | HTMLMediaElement::ContainsRestrictedContent() |
7181 | 0 | { |
7182 | 0 | return GetMediaKeys() != nullptr; |
7183 | 0 | } |
7184 | | |
7185 | | void |
7186 | | HTMLMediaElement::SetCDMProxyFailure(const MediaResult& aResult) |
7187 | 0 | { |
7188 | 0 | LOG(LogLevel::Debug, ("%s", __func__)); |
7189 | 0 | MOZ_ASSERT(mSetMediaKeysDOMPromise); |
7190 | 0 |
|
7191 | 0 | ResetSetMediaKeysTempVariables(); |
7192 | 0 |
|
7193 | 0 | mSetMediaKeysDOMPromise->MaybeReject(aResult.Code(), aResult.Message()); |
7194 | 0 | } |
7195 | | |
7196 | | void |
7197 | | HTMLMediaElement::RemoveMediaKeys() |
7198 | 0 | { |
7199 | 0 | LOG(LogLevel::Debug, ("%s", __func__)); |
7200 | 0 | // 5.2.3 Stop using the CDM instance represented by the mediaKeys attribute |
7201 | 0 | // to decrypt media data and remove the association with the media element. |
7202 | 0 | if (mMediaKeys) { |
7203 | 0 | mMediaKeys->Unbind(); |
7204 | 0 | } |
7205 | 0 | mMediaKeys = nullptr; |
7206 | 0 | } |
7207 | | |
7208 | | bool |
7209 | | HTMLMediaElement::TryRemoveMediaKeysAssociation() |
7210 | 0 | { |
7211 | 0 | MOZ_ASSERT(mMediaKeys); |
7212 | 0 | LOG(LogLevel::Debug, ("%s", __func__)); |
7213 | 0 | // 5.2.1 If the user agent or CDM do not support removing the association, |
7214 | 0 | // let this object's attaching media keys value be false and reject promise |
7215 | 0 | // with a new DOMException whose name is NotSupportedError. |
7216 | 0 | // 5.2.2 If the association cannot currently be removed, let this object's |
7217 | 0 | // attaching media keys value be false and reject promise with a new |
7218 | 0 | // DOMException whose name is InvalidStateError. |
7219 | 0 | if (mDecoder) { |
7220 | 0 | RefPtr<HTMLMediaElement> self = this; |
7221 | 0 | mDecoder->SetCDMProxy(nullptr) |
7222 | 0 | ->Then(mAbstractMainThread, |
7223 | 0 | __func__, |
7224 | 0 | [self]() { |
7225 | 0 | self->mSetCDMRequest.Complete(); |
7226 | 0 |
|
7227 | 0 | self->RemoveMediaKeys(); |
7228 | 0 | if (self->AttachNewMediaKeys()) { |
7229 | 0 | // No incoming MediaKeys object or MediaDecoder is not created |
7230 | 0 | // yet. |
7231 | 0 | self->MakeAssociationWithCDMResolved(); |
7232 | 0 | } |
7233 | 0 | }, |
7234 | 0 | [self](const MediaResult& aResult) { |
7235 | 0 | self->mSetCDMRequest.Complete(); |
7236 | 0 | // 5.2.4 If the preceding step failed, let this object's |
7237 | 0 | // attaching media keys value be false and reject promise with a |
7238 | 0 | // new DOMException whose name is the appropriate error name. |
7239 | 0 | self->SetCDMProxyFailure(aResult); |
7240 | 0 | }) |
7241 | 0 | ->Track(mSetCDMRequest); |
7242 | 0 | return false; |
7243 | 0 | } |
7244 | 0 |
|
7245 | 0 | RemoveMediaKeys(); |
7246 | 0 | return true; |
7247 | 0 | } |
7248 | | |
7249 | | bool |
7250 | | HTMLMediaElement::DetachExistingMediaKeys() |
7251 | 0 | { |
7252 | 0 | LOG(LogLevel::Debug, ("%s", __func__)); |
7253 | 0 | MOZ_ASSERT(mSetMediaKeysDOMPromise); |
7254 | 0 | // 5.1 If mediaKeys is not null, CDM instance represented by mediaKeys is |
7255 | 0 | // already in use by another media element, and the user agent is unable |
7256 | 0 | // to use it with this element, let this object's attaching media keys |
7257 | 0 | // value be false and reject promise with a new DOMException whose name |
7258 | 0 | // is QuotaExceededError. |
7259 | 0 | if (mIncomingMediaKeys && mIncomingMediaKeys->IsBoundToMediaElement()) { |
7260 | 0 | SetCDMProxyFailure(MediaResult( |
7261 | 0 | NS_ERROR_DOM_QUOTA_EXCEEDED_ERR, |
7262 | 0 | "MediaKeys object is already bound to another HTMLMediaElement")); |
7263 | 0 | return false; |
7264 | 0 | } |
7265 | 0 | |
7266 | 0 | // 5.2 If the mediaKeys attribute is not null, run the following steps: |
7267 | 0 | if (mMediaKeys) { |
7268 | 0 | return TryRemoveMediaKeysAssociation(); |
7269 | 0 | } |
7270 | 0 | return true; |
7271 | 0 | } |
7272 | | |
7273 | | void |
7274 | | HTMLMediaElement::MakeAssociationWithCDMResolved() |
7275 | 0 | { |
7276 | 0 | LOG(LogLevel::Debug, ("%s", __func__)); |
7277 | 0 | MOZ_ASSERT(mSetMediaKeysDOMPromise); |
7278 | 0 |
|
7279 | 0 | // 5.4 Set the mediaKeys attribute to mediaKeys. |
7280 | 0 | mMediaKeys = mIncomingMediaKeys; |
7281 | 0 | // 5.5 Let this object's attaching media keys value be false. |
7282 | 0 | ResetSetMediaKeysTempVariables(); |
7283 | 0 | // 5.6 Resolve promise. |
7284 | 0 | mSetMediaKeysDOMPromise->MaybeResolveWithUndefined(); |
7285 | 0 | mSetMediaKeysDOMPromise = nullptr; |
7286 | 0 | } |
7287 | | |
7288 | | bool |
7289 | | HTMLMediaElement::TryMakeAssociationWithCDM(CDMProxy* aProxy) |
7290 | 0 | { |
7291 | 0 | LOG(LogLevel::Debug, ("%s", __func__)); |
7292 | 0 | MOZ_ASSERT(aProxy); |
7293 | 0 |
|
7294 | 0 | // 5.3.3 Queue a task to run the "Attempt to Resume Playback If Necessary" |
7295 | 0 | // algorithm on the media element. |
7296 | 0 | // Note: Setting the CDMProxy on the MediaDecoder will unblock playback. |
7297 | 0 | if (mDecoder) { |
7298 | 0 | // CDMProxy is set asynchronously in MediaFormatReader, once it's done, |
7299 | 0 | // HTMLMediaElement should resolve or reject the DOM promise. |
7300 | 0 | RefPtr<HTMLMediaElement> self = this; |
7301 | 0 | mDecoder->SetCDMProxy(aProxy) |
7302 | 0 | ->Then(mAbstractMainThread, |
7303 | 0 | __func__, |
7304 | 0 | [self]() { |
7305 | 0 | self->mSetCDMRequest.Complete(); |
7306 | 0 | self->MakeAssociationWithCDMResolved(); |
7307 | 0 | }, |
7308 | 0 | [self](const MediaResult& aResult) { |
7309 | 0 | self->mSetCDMRequest.Complete(); |
7310 | 0 | self->SetCDMProxyFailure(aResult); |
7311 | 0 | }) |
7312 | 0 | ->Track(mSetCDMRequest); |
7313 | 0 | return false; |
7314 | 0 | } |
7315 | 0 | return true; |
7316 | 0 | } |
7317 | | |
7318 | | bool |
7319 | | HTMLMediaElement::AttachNewMediaKeys() |
7320 | 0 | { |
7321 | 0 | LOG(LogLevel::Debug, |
7322 | 0 | ("%s incoming MediaKeys(%p)", __func__, mIncomingMediaKeys.get())); |
7323 | 0 | MOZ_ASSERT(mSetMediaKeysDOMPromise); |
7324 | 0 |
|
7325 | 0 | // 5.3. If mediaKeys is not null, run the following steps: |
7326 | 0 | if (mIncomingMediaKeys) { |
7327 | 0 | auto cdmProxy = mIncomingMediaKeys->GetCDMProxy(); |
7328 | 0 | if (!cdmProxy) { |
7329 | 0 | SetCDMProxyFailure(MediaResult( |
7330 | 0 | NS_ERROR_DOM_INVALID_STATE_ERR, |
7331 | 0 | "CDM crashed before binding MediaKeys object to HTMLMediaElement")); |
7332 | 0 | return false; |
7333 | 0 | } |
7334 | 0 | |
7335 | 0 | // 5.3.1 Associate the CDM instance represented by mediaKeys with the |
7336 | 0 | // media element for decrypting media data. |
7337 | 0 | if (NS_FAILED(mIncomingMediaKeys->Bind(this))) { |
7338 | 0 | // 5.3.2 If the preceding step failed, run the following steps: |
7339 | 0 |
|
7340 | 0 | // 5.3.2.1 Set the mediaKeys attribute to null. |
7341 | 0 | mMediaKeys = nullptr; |
7342 | 0 | // 5.3.2.2 Let this object's attaching media keys value be false. |
7343 | 0 | // 5.3.2.3 Reject promise with a new DOMException whose name is |
7344 | 0 | // the appropriate error name. |
7345 | 0 | SetCDMProxyFailure( |
7346 | 0 | MediaResult(NS_ERROR_DOM_INVALID_STATE_ERR, |
7347 | 0 | "Failed to bind MediaKeys object to HTMLMediaElement")); |
7348 | 0 | return false; |
7349 | 0 | } |
7350 | 0 | return TryMakeAssociationWithCDM(cdmProxy); |
7351 | 0 | } |
7352 | 0 | return true; |
7353 | 0 | } |
7354 | | |
7355 | | void |
7356 | | HTMLMediaElement::ResetSetMediaKeysTempVariables() |
7357 | 0 | { |
7358 | 0 | mAttachingMediaKey = false; |
7359 | 0 | mIncomingMediaKeys = nullptr; |
7360 | 0 | } |
7361 | | |
7362 | | already_AddRefed<Promise> |
7363 | | HTMLMediaElement::SetMediaKeys(mozilla::dom::MediaKeys* aMediaKeys, |
7364 | | ErrorResult& aRv) |
7365 | 0 | { |
7366 | 0 | LOG(LogLevel::Debug, |
7367 | 0 | ("%p SetMediaKeys(%p) mMediaKeys=%p mDecoder=%p", |
7368 | 0 | this, |
7369 | 0 | aMediaKeys, |
7370 | 0 | mMediaKeys.get(), |
7371 | 0 | mDecoder.get())); |
7372 | 0 |
|
7373 | 0 | if (MozAudioCaptured()) { |
7374 | 0 | aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); |
7375 | 0 | return nullptr; |
7376 | 0 | } |
7377 | 0 | |
7378 | 0 | nsPIDOMWindowInner* win = OwnerDoc()->GetInnerWindow(); |
7379 | 0 | if (!win) { |
7380 | 0 | aRv.Throw(NS_ERROR_UNEXPECTED); |
7381 | 0 | return nullptr; |
7382 | 0 | } |
7383 | 0 | RefPtr<DetailedPromise> promise = DetailedPromise::Create( |
7384 | 0 | win->AsGlobal(), aRv, NS_LITERAL_CSTRING("HTMLMediaElement.setMediaKeys")); |
7385 | 0 | if (aRv.Failed()) { |
7386 | 0 | return nullptr; |
7387 | 0 | } |
7388 | 0 | |
7389 | 0 | // 1. If mediaKeys and the mediaKeys attribute are the same object, |
7390 | 0 | // return a resolved promise. |
7391 | 0 | if (mMediaKeys == aMediaKeys) { |
7392 | 0 | promise->MaybeResolveWithUndefined(); |
7393 | 0 | return promise.forget(); |
7394 | 0 | } |
7395 | 0 | |
7396 | 0 | // 2. If this object's attaching media keys value is true, return a |
7397 | 0 | // promise rejected with a new DOMException whose name is InvalidStateError. |
7398 | 0 | if (mAttachingMediaKey) { |
7399 | 0 | promise->MaybeReject( |
7400 | 0 | NS_ERROR_DOM_INVALID_STATE_ERR, |
7401 | 0 | NS_LITERAL_CSTRING("A MediaKeys object is in attaching operation.")); |
7402 | 0 | return promise.forget(); |
7403 | 0 | } |
7404 | 0 |
|
7405 | 0 | // 3. Let this object's attaching media keys value be true. |
7406 | 0 | mAttachingMediaKey = true; |
7407 | 0 | mIncomingMediaKeys = aMediaKeys; |
7408 | 0 |
|
7409 | 0 | // 4. Let promise be a new promise. |
7410 | 0 | mSetMediaKeysDOMPromise = promise; |
7411 | 0 |
|
7412 | 0 | // 5. Run the following steps in parallel: |
7413 | 0 |
|
7414 | 0 | // 5.1 & 5.2 & 5.3 |
7415 | 0 | if (!DetachExistingMediaKeys() || !AttachNewMediaKeys()) { |
7416 | 0 | return promise.forget(); |
7417 | 0 | } |
7418 | 0 | |
7419 | 0 | // 5.4, 5.5, 5.6 |
7420 | 0 | MakeAssociationWithCDMResolved(); |
7421 | 0 |
|
7422 | 0 | // 6. Return promise. |
7423 | 0 | return promise.forget(); |
7424 | 0 | } |
7425 | | |
7426 | | EventHandlerNonNull* |
7427 | | HTMLMediaElement::GetOnencrypted() |
7428 | 0 | { |
7429 | 0 | return EventTarget::GetEventHandler(nsGkAtoms::onencrypted); |
7430 | 0 | } |
7431 | | |
7432 | | void |
7433 | | HTMLMediaElement::SetOnencrypted(EventHandlerNonNull* aCallback) |
7434 | 0 | { |
7435 | 0 | EventTarget::SetEventHandler(nsGkAtoms::onencrypted, aCallback); |
7436 | 0 | } |
7437 | | |
7438 | | EventHandlerNonNull* |
7439 | | HTMLMediaElement::GetOnwaitingforkey() |
7440 | 0 | { |
7441 | 0 | return EventTarget::GetEventHandler(nsGkAtoms::onwaitingforkey); |
7442 | 0 | } |
7443 | | |
7444 | | void |
7445 | | HTMLMediaElement::SetOnwaitingforkey(EventHandlerNonNull* aCallback) |
7446 | 0 | { |
7447 | 0 | EventTarget::SetEventHandler(nsGkAtoms::onwaitingforkey, aCallback); |
7448 | 0 | } |
7449 | | |
7450 | | void |
7451 | | HTMLMediaElement::DispatchEncrypted(const nsTArray<uint8_t>& aInitData, |
7452 | | const nsAString& aInitDataType) |
7453 | 0 | { |
7454 | 0 | LOG(LogLevel::Debug, |
7455 | 0 | ("%p DispatchEncrypted initDataType='%s'", |
7456 | 0 | this, |
7457 | 0 | NS_ConvertUTF16toUTF8(aInitDataType).get())); |
7458 | 0 |
|
7459 | 0 | if (mReadyState == HAVE_NOTHING) { |
7460 | 0 | // Ready state not HAVE_METADATA (yet), don't dispatch encrypted now. |
7461 | 0 | // Queueing for later dispatch in MetadataLoaded. |
7462 | 0 | mPendingEncryptedInitData.AddInitData(aInitDataType, aInitData); |
7463 | 0 | return; |
7464 | 0 | } |
7465 | 0 | |
7466 | 0 | RefPtr<MediaEncryptedEvent> event; |
7467 | 0 | if (IsCORSSameOrigin()) { |
7468 | 0 | event = MediaEncryptedEvent::Constructor(this, aInitDataType, aInitData); |
7469 | 0 | } else { |
7470 | 0 | event = MediaEncryptedEvent::Constructor(this); |
7471 | 0 | } |
7472 | 0 |
|
7473 | 0 | RefPtr<AsyncEventDispatcher> asyncDispatcher = |
7474 | 0 | new AsyncEventDispatcher(this, event); |
7475 | 0 | asyncDispatcher->PostDOMEvent(); |
7476 | 0 | } |
7477 | | |
7478 | | bool |
7479 | | HTMLMediaElement::IsEventAttributeNameInternal(nsAtom* aName) |
7480 | 0 | { |
7481 | 0 | return aName == nsGkAtoms::onencrypted || |
7482 | 0 | nsGenericHTMLElement::IsEventAttributeNameInternal(aName); |
7483 | 0 | } |
7484 | | |
7485 | | already_AddRefed<nsIPrincipal> |
7486 | | HTMLMediaElement::GetTopLevelPrincipal() |
7487 | 0 | { |
7488 | 0 | RefPtr<nsIPrincipal> principal; |
7489 | 0 | nsCOMPtr<nsPIDOMWindowInner> window = OwnerDoc()->GetInnerWindow(); |
7490 | 0 | if (!window) { |
7491 | 0 | return nullptr; |
7492 | 0 | } |
7493 | 0 | // XXXkhuey better hope we always have an outer ... |
7494 | 0 | nsCOMPtr<nsPIDOMWindowOuter> top = window->GetOuterWindow()->GetTop(); |
7495 | 0 | if (!top) { |
7496 | 0 | return nullptr; |
7497 | 0 | } |
7498 | 0 | nsIDocument* doc = top->GetExtantDoc(); |
7499 | 0 | if (!doc) { |
7500 | 0 | return nullptr; |
7501 | 0 | } |
7502 | 0 | principal = doc->NodePrincipal(); |
7503 | 0 | return principal.forget(); |
7504 | 0 | } |
7505 | | |
7506 | | void |
7507 | | HTMLMediaElement::NotifyWaitingForKey() |
7508 | 0 | { |
7509 | 0 | LOG(LogLevel::Debug, ("%p, NotifyWaitingForKey()", this)); |
7510 | 0 |
|
7511 | 0 | // http://w3c.github.io/encrypted-media/#wait-for-key |
7512 | 0 | // 7.3.4 Queue a "waitingforkey" Event |
7513 | 0 | // 1. Let the media element be the specified HTMLMediaElement object. |
7514 | 0 | // 2. If the media element's waiting for key value is true, abort these steps. |
7515 | 0 | if (mWaitingForKey == NOT_WAITING_FOR_KEY) { |
7516 | 0 | // 3. Set the media element's waiting for key value to true. |
7517 | 0 | // Note: algorithm continues in UpdateReadyStateInternal() when all decoded |
7518 | 0 | // data enqueued in the MDSM is consumed. |
7519 | 0 | mWaitingForKey = WAITING_FOR_KEY; |
7520 | 0 | UpdateReadyStateInternal(); |
7521 | 0 | } |
7522 | 0 | } |
7523 | | |
7524 | | AudioTrackList* |
7525 | | HTMLMediaElement::AudioTracks() |
7526 | 0 | { |
7527 | 0 | if (!mAudioTrackList) { |
7528 | 0 | nsCOMPtr<nsPIDOMWindowInner> window = |
7529 | 0 | do_QueryInterface(OwnerDoc()->GetParentObject()); |
7530 | 0 | mAudioTrackList = new AudioTrackList(window, this); |
7531 | 0 | } |
7532 | 0 | return mAudioTrackList; |
7533 | 0 | } |
7534 | | |
7535 | | VideoTrackList* |
7536 | | HTMLMediaElement::VideoTracks() |
7537 | 0 | { |
7538 | 0 | if (!mVideoTrackList) { |
7539 | 0 | nsCOMPtr<nsPIDOMWindowInner> window = |
7540 | 0 | do_QueryInterface(OwnerDoc()->GetParentObject()); |
7541 | 0 | mVideoTrackList = new VideoTrackList(window, this); |
7542 | 0 | } |
7543 | 0 | return mVideoTrackList; |
7544 | 0 | } |
7545 | | |
7546 | | TextTrackList* |
7547 | | HTMLMediaElement::GetTextTracks() |
7548 | 0 | { |
7549 | 0 | return GetOrCreateTextTrackManager()->GetTextTracks(); |
7550 | 0 | } |
7551 | | |
7552 | | already_AddRefed<TextTrack> |
7553 | | HTMLMediaElement::AddTextTrack(TextTrackKind aKind, |
7554 | | const nsAString& aLabel, |
7555 | | const nsAString& aLanguage) |
7556 | 0 | { |
7557 | 0 | return GetOrCreateTextTrackManager()->AddTextTrack( |
7558 | 0 | aKind, |
7559 | 0 | aLabel, |
7560 | 0 | aLanguage, |
7561 | 0 | TextTrackMode::Hidden, |
7562 | 0 | TextTrackReadyState::Loaded, |
7563 | 0 | TextTrackSource::AddTextTrack); |
7564 | 0 | } |
7565 | | |
7566 | | void |
7567 | | HTMLMediaElement::PopulatePendingTextTrackList() |
7568 | 0 | { |
7569 | 0 | if (mTextTrackManager) { |
7570 | 0 | mTextTrackManager->PopulatePendingList(); |
7571 | 0 | } |
7572 | 0 | } |
7573 | | |
7574 | | TextTrackManager* |
7575 | | HTMLMediaElement::GetOrCreateTextTrackManager() |
7576 | 0 | { |
7577 | 0 | if (!mTextTrackManager) { |
7578 | 0 | mTextTrackManager = new TextTrackManager(this); |
7579 | 0 | mTextTrackManager->AddListeners(); |
7580 | 0 | } |
7581 | 0 | return mTextTrackManager; |
7582 | 0 | } |
7583 | | |
7584 | | MediaDecoderOwner::NextFrameStatus |
7585 | | HTMLMediaElement::NextFrameStatus() |
7586 | 0 | { |
7587 | 0 | if (mDecoder) { |
7588 | 0 | return mDecoder->NextFrameStatus(); |
7589 | 0 | } else if (mMediaStreamListener) { |
7590 | 0 | return mMediaStreamListener->NextFrameStatus(); |
7591 | 0 | } |
7592 | 0 | return NEXT_FRAME_UNINITIALIZED; |
7593 | 0 | } |
7594 | | |
7595 | | void |
7596 | | HTMLMediaElement::SetDecoder(MediaDecoder* aDecoder) |
7597 | 0 | { |
7598 | 0 | MOZ_ASSERT(aDecoder); // Use ShutdownDecoder() to clear. |
7599 | 0 | if (mDecoder) { |
7600 | 0 | ShutdownDecoder(); |
7601 | 0 | } |
7602 | 0 | mDecoder = aDecoder; |
7603 | 0 | DDLINKCHILD("decoder", mDecoder.get()); |
7604 | 0 | if (mDecoder && mForcedHidden) { |
7605 | 0 | mDecoder->SetForcedHidden(mForcedHidden); |
7606 | 0 | } |
7607 | 0 | } |
7608 | | |
7609 | | float |
7610 | | HTMLMediaElement::ComputedVolume() const |
7611 | 0 | { |
7612 | 0 | return mMuted |
7613 | 0 | ? 0.0f |
7614 | 0 | : mAudioChannelWrapper ? mAudioChannelWrapper->GetEffectiveVolume() |
7615 | 0 | : mVolume; |
7616 | 0 | } |
7617 | | |
7618 | | bool |
7619 | | HTMLMediaElement::ComputedMuted() const |
7620 | 0 | { |
7621 | 0 | return (mMuted & MUTED_BY_AUDIO_CHANNEL); |
7622 | 0 | } |
7623 | | |
7624 | | nsSuspendedTypes |
7625 | | HTMLMediaElement::ComputedSuspended() const |
7626 | 0 | { |
7627 | 0 | return mAudioChannelWrapper ? mAudioChannelWrapper->GetSuspendType() |
7628 | 0 | : nsISuspendedTypes::NONE_SUSPENDED; |
7629 | 0 | } |
7630 | | |
7631 | | bool |
7632 | | HTMLMediaElement::IsCurrentlyPlaying() const |
7633 | 0 | { |
7634 | 0 | // We have playable data, but we still need to check whether data is "real" |
7635 | 0 | // current data. |
7636 | 0 | return mReadyState >= HAVE_CURRENT_DATA && !IsPlaybackEnded(); |
7637 | 0 | } |
7638 | | |
7639 | | void |
7640 | | HTMLMediaElement::SetAudibleState(bool aAudible) |
7641 | 0 | { |
7642 | 0 | if (mIsAudioTrackAudible != aAudible) { |
7643 | 0 | UpdateAudioTrackSilenceRange(aAudible); |
7644 | 0 | mIsAudioTrackAudible = aAudible; |
7645 | 0 | NotifyAudioPlaybackChanged( |
7646 | 0 | AudioChannelService::AudibleChangedReasons::eDataAudibleChanged); |
7647 | 0 | } |
7648 | 0 | } |
7649 | | |
7650 | | bool |
7651 | | HTMLMediaElement::IsAudioTrackCurrentlySilent() const |
7652 | 0 | { |
7653 | 0 | return HasAudio() && !mIsAudioTrackAudible; |
7654 | 0 | } |
7655 | | |
7656 | | void |
7657 | | HTMLMediaElement::UpdateAudioTrackSilenceRange(bool aAudible) |
7658 | 0 | { |
7659 | 0 | if (!HasAudio()) { |
7660 | 0 | return; |
7661 | 0 | } |
7662 | 0 | |
7663 | 0 | if (!aAudible) { |
7664 | 0 | mAudioTrackSilenceStartedTime = CurrentTime(); |
7665 | 0 | return; |
7666 | 0 | } |
7667 | 0 | |
7668 | 0 | AccumulateAudioTrackSilence(); |
7669 | 0 | } |
7670 | | |
7671 | | void |
7672 | | HTMLMediaElement::AccumulateAudioTrackSilence() |
7673 | 0 | { |
7674 | 0 | MOZ_ASSERT(HasAudio()); |
7675 | 0 | const double current = CurrentTime(); |
7676 | 0 | if (current < mAudioTrackSilenceStartedTime) { |
7677 | 0 | return; |
7678 | 0 | } |
7679 | 0 | const auto start = media::TimeUnit::FromSeconds(mAudioTrackSilenceStartedTime); |
7680 | 0 | const auto end = media::TimeUnit::FromSeconds(current); |
7681 | 0 | mSilenceTimeRanges += media::TimeInterval(start, end); |
7682 | 0 | } |
7683 | | |
7684 | | void |
7685 | | HTMLMediaElement::ReportAudioTrackSilenceProportionTelemetry() |
7686 | 0 | { |
7687 | 0 | if (!HasAudio()) { |
7688 | 0 | return; |
7689 | 0 | } |
7690 | 0 | |
7691 | 0 | // Add last silence range to our ranges set. |
7692 | 0 | if (!mIsAudioTrackAudible) { |
7693 | 0 | AccumulateAudioTrackSilence(); |
7694 | 0 | } |
7695 | 0 |
|
7696 | 0 | RefPtr<TimeRanges> ranges = Played(); |
7697 | 0 | const uint32_t lengthPlayedRange = ranges->Length(); |
7698 | 0 | const uint32_t lengthSilenceRange = mSilenceTimeRanges.Length(); |
7699 | 0 | if (!lengthPlayedRange || !lengthSilenceRange) { |
7700 | 0 | return; |
7701 | 0 | } |
7702 | 0 | |
7703 | 0 | double playedTime = 0.0, silenceTime = 0.0; |
7704 | 0 | for (uint32_t idx = 0; idx < lengthPlayedRange; idx++) { |
7705 | 0 | playedTime += ranges->End(idx) - ranges->Start(idx); |
7706 | 0 | } |
7707 | 0 |
|
7708 | 0 | for (uint32_t idx = 0; idx < lengthSilenceRange; idx++) { |
7709 | 0 | silenceTime += |
7710 | 0 | mSilenceTimeRanges.End(idx).ToSeconds() - mSilenceTimeRanges.Start(idx).ToSeconds(); |
7711 | 0 | } |
7712 | 0 |
|
7713 | 0 | double silenceProportion = (silenceTime / playedTime) * 100; |
7714 | 0 | // silenceProportion should be in the range [0, 100] |
7715 | 0 | silenceProportion = std::min(100.0, std::max(silenceProportion, 0.0)); |
7716 | 0 | Telemetry::Accumulate(Telemetry::AUDIO_TRACK_SILENCE_PROPORTION, |
7717 | 0 | silenceProportion); |
7718 | 0 | } |
7719 | | |
7720 | | void |
7721 | | HTMLMediaElement::NotifyAudioPlaybackChanged(AudibleChangedReasons aReason) |
7722 | 0 | { |
7723 | 0 | if (mAudioChannelWrapper) { |
7724 | 0 | mAudioChannelWrapper->NotifyAudioPlaybackChanged(aReason); |
7725 | 0 | } |
7726 | 0 | // only request wake lock for audible media. |
7727 | 0 | UpdateWakeLock(); |
7728 | 0 | } |
7729 | | |
7730 | | bool |
7731 | | HTMLMediaElement::ShouldElementBePaused() |
7732 | 0 | { |
7733 | 0 | // Bfcached page or inactive document. |
7734 | 0 | if (!IsActive()) { |
7735 | 0 | return true; |
7736 | 0 | } |
7737 | 0 | |
7738 | 0 | return false; |
7739 | 0 | } |
7740 | | |
7741 | | void |
7742 | | HTMLMediaElement::SetMediaInfo(const MediaInfo& aInfo) |
7743 | 0 | { |
7744 | 0 | const bool oldHasAudio = mMediaInfo.HasAudio(); |
7745 | 0 | mMediaInfo = aInfo; |
7746 | 0 | if (aInfo.HasAudio() != oldHasAudio) { |
7747 | 0 | UpdateAudioChannelPlayingState(); |
7748 | 0 | NotifyAudioPlaybackChanged( |
7749 | 0 | AudioChannelService::AudibleChangedReasons::eDataAudibleChanged); |
7750 | 0 | } |
7751 | 0 | if (mAudioChannelWrapper) { |
7752 | 0 | mAudioChannelWrapper->AudioCaptureStreamChangeIfNeeded(); |
7753 | 0 | } |
7754 | 0 | } |
7755 | | |
7756 | | void |
7757 | | HTMLMediaElement::AudioCaptureStreamChange(bool aCapture) |
7758 | 0 | { |
7759 | 0 | // No need to capture a silence media element. |
7760 | 0 | if (!HasAudio()) { |
7761 | 0 | return; |
7762 | 0 | } |
7763 | 0 | |
7764 | 0 | if (aCapture && !mCaptureStreamPort) { |
7765 | 0 | nsCOMPtr<nsPIDOMWindowInner> window = OwnerDoc()->GetInnerWindow(); |
7766 | 0 | if (!OwnerDoc()->GetInnerWindow()) { |
7767 | 0 | return; |
7768 | 0 | } |
7769 | 0 | |
7770 | 0 | uint64_t id = window->WindowID(); |
7771 | 0 | MediaStreamGraph* msg = MediaStreamGraph::GetInstance( |
7772 | 0 | MediaStreamGraph::AUDIO_THREAD_DRIVER, |
7773 | 0 | window, |
7774 | 0 | MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE); |
7775 | 0 |
|
7776 | 0 | if (GetSrcMediaStream()) { |
7777 | 0 | mCaptureStreamPort = msg->ConnectToCaptureStream(id, GetSrcMediaStream()); |
7778 | 0 | } else { |
7779 | 0 | RefPtr<DOMMediaStream> stream = |
7780 | 0 | CaptureStreamInternal(StreamCaptureBehavior::CONTINUE_WHEN_ENDED, |
7781 | 0 | StreamCaptureType::CAPTURE_AUDIO, |
7782 | 0 | msg); |
7783 | 0 | mCaptureStreamPort = |
7784 | 0 | msg->ConnectToCaptureStream(id, stream->GetPlaybackStream()); |
7785 | 0 | } |
7786 | 0 | } else if (!aCapture && mCaptureStreamPort) { |
7787 | 0 | if (mDecoder) { |
7788 | 0 | ProcessedMediaStream* ps = |
7789 | 0 | mCaptureStreamPort->GetSource()->AsProcessedStream(); |
7790 | 0 | MOZ_ASSERT(ps); |
7791 | 0 |
|
7792 | 0 | for (uint32_t i = 0; i < mOutputStreams.Length(); i++) { |
7793 | 0 | if (mOutputStreams[i].mStream->GetPlaybackStream() == ps) { |
7794 | 0 | mOutputStreams.RemoveElementAt(i); |
7795 | 0 | break; |
7796 | 0 | } |
7797 | 0 | } |
7798 | 0 | mDecoder->RemoveOutputStream(ps); |
7799 | 0 | } |
7800 | 0 | mCaptureStreamPort->Destroy(); |
7801 | 0 | mCaptureStreamPort = nullptr; |
7802 | 0 | } |
7803 | 0 | } |
7804 | | |
7805 | | void |
7806 | | HTMLMediaElement::NotifyCueDisplayStatesChanged() |
7807 | 0 | { |
7808 | 0 | if (!mTextTrackManager) { |
7809 | 0 | return; |
7810 | 0 | } |
7811 | 0 | |
7812 | 0 | mTextTrackManager->DispatchUpdateCueDisplay(); |
7813 | 0 | } |
7814 | | |
7815 | | void |
7816 | | HTMLMediaElement::MarkAsContentSource(CallerAPI aAPI) |
7817 | 0 | { |
7818 | 0 | const bool isVisible = mVisibilityState == Visibility::APPROXIMATELY_VISIBLE; |
7819 | 0 |
|
7820 | 0 | if (isVisible) { |
7821 | 0 | // 0 = ALL_VISIBLE |
7822 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE, 0); |
7823 | 0 | } else { |
7824 | 0 | // 1 = ALL_INVISIBLE |
7825 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE, 1); |
7826 | 0 |
|
7827 | 0 | if (IsInComposedDoc()) { |
7828 | 0 | // 0 = ALL_IN_TREE |
7829 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE_IN_TREE_OR_NOT, |
7830 | 0 | 0); |
7831 | 0 | } else { |
7832 | 0 | // 1 = ALL_NOT_IN_TREE |
7833 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE_IN_TREE_OR_NOT, |
7834 | 0 | 1); |
7835 | 0 | } |
7836 | 0 | } |
7837 | 0 |
|
7838 | 0 | switch (aAPI) { |
7839 | 0 | case CallerAPI::DRAW_IMAGE: { |
7840 | 0 | if (isVisible) { |
7841 | 0 | // 2 = drawImage_VISIBLE |
7842 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE, 2); |
7843 | 0 | } else { |
7844 | 0 | // 3 = drawImage_INVISIBLE |
7845 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE, 3); |
7846 | 0 |
|
7847 | 0 | if (IsInComposedDoc()) { |
7848 | 0 | // 2 = drawImage_IN_TREE |
7849 | 0 | Telemetry::Accumulate( |
7850 | 0 | Telemetry::VIDEO_AS_CONTENT_SOURCE_IN_TREE_OR_NOT, 2); |
7851 | 0 | } else { |
7852 | 0 | // 3 = drawImage_NOT_IN_TREE |
7853 | 0 | Telemetry::Accumulate( |
7854 | 0 | Telemetry::VIDEO_AS_CONTENT_SOURCE_IN_TREE_OR_NOT, 3); |
7855 | 0 | } |
7856 | 0 | } |
7857 | 0 | break; |
7858 | 0 | } |
7859 | 0 | case CallerAPI::CREATE_PATTERN: { |
7860 | 0 | if (isVisible) { |
7861 | 0 | // 4 = createPattern_VISIBLE |
7862 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE, 4); |
7863 | 0 | } else { |
7864 | 0 | // 5 = createPattern_INVISIBLE |
7865 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE, 5); |
7866 | 0 |
|
7867 | 0 | if (IsInComposedDoc()) { |
7868 | 0 | // 4 = createPattern_IN_TREE |
7869 | 0 | Telemetry::Accumulate( |
7870 | 0 | Telemetry::VIDEO_AS_CONTENT_SOURCE_IN_TREE_OR_NOT, 4); |
7871 | 0 | } else { |
7872 | 0 | // 5 = createPattern_NOT_IN_TREE |
7873 | 0 | Telemetry::Accumulate( |
7874 | 0 | Telemetry::VIDEO_AS_CONTENT_SOURCE_IN_TREE_OR_NOT, 5); |
7875 | 0 | } |
7876 | 0 | } |
7877 | 0 | break; |
7878 | 0 | } |
7879 | 0 | case CallerAPI::CREATE_IMAGEBITMAP: { |
7880 | 0 | if (isVisible) { |
7881 | 0 | // 6 = createImageBitmap_VISIBLE |
7882 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE, 6); |
7883 | 0 | } else { |
7884 | 0 | // 7 = createImageBitmap_INVISIBLE |
7885 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE, 7); |
7886 | 0 |
|
7887 | 0 | if (IsInComposedDoc()) { |
7888 | 0 | // 6 = createImageBitmap_IN_TREE |
7889 | 0 | Telemetry::Accumulate( |
7890 | 0 | Telemetry::VIDEO_AS_CONTENT_SOURCE_IN_TREE_OR_NOT, 6); |
7891 | 0 | } else { |
7892 | 0 | // 7 = createImageBitmap_NOT_IN_TREE |
7893 | 0 | Telemetry::Accumulate( |
7894 | 0 | Telemetry::VIDEO_AS_CONTENT_SOURCE_IN_TREE_OR_NOT, 7); |
7895 | 0 | } |
7896 | 0 | } |
7897 | 0 | break; |
7898 | 0 | } |
7899 | 0 | case CallerAPI::CAPTURE_STREAM: { |
7900 | 0 | if (isVisible) { |
7901 | 0 | // 8 = captureStream_VISIBLE |
7902 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE, 8); |
7903 | 0 | } else { |
7904 | 0 | // 9 = captureStream_INVISIBLE |
7905 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_AS_CONTENT_SOURCE, 9); |
7906 | 0 |
|
7907 | 0 | if (IsInComposedDoc()) { |
7908 | 0 | // 8 = captureStream_IN_TREE |
7909 | 0 | Telemetry::Accumulate( |
7910 | 0 | Telemetry::VIDEO_AS_CONTENT_SOURCE_IN_TREE_OR_NOT, 8); |
7911 | 0 | } else { |
7912 | 0 | // 9 = captureStream_NOT_IN_TREE |
7913 | 0 | Telemetry::Accumulate( |
7914 | 0 | Telemetry::VIDEO_AS_CONTENT_SOURCE_IN_TREE_OR_NOT, 9); |
7915 | 0 | } |
7916 | 0 | } |
7917 | 0 | break; |
7918 | 0 | } |
7919 | 0 | } |
7920 | 0 |
|
7921 | 0 | LOG(LogLevel::Debug, |
7922 | 0 | ("%p Log VIDEO_AS_CONTENT_SOURCE: visibility = %u, API: '%d' and 'All'", |
7923 | 0 | this, |
7924 | 0 | isVisible, |
7925 | 0 | static_cast<int>(aAPI))); |
7926 | 0 |
|
7927 | 0 | if (!isVisible) { |
7928 | 0 | LOG(LogLevel::Debug, |
7929 | 0 | ("%p Log VIDEO_AS_CONTENT_SOURCE_IN_TREE_OR_NOT: inTree = %u, API: " |
7930 | 0 | "'%d' and 'All'", |
7931 | 0 | this, |
7932 | 0 | IsInComposedDoc(), |
7933 | 0 | static_cast<int>(aAPI))); |
7934 | 0 | } |
7935 | 0 | } |
7936 | | |
7937 | | void |
7938 | | HTMLMediaElement::UpdateCustomPolicyAfterPlayed() |
7939 | 0 | { |
7940 | 0 | if (mAudioChannelWrapper) { |
7941 | 0 | mAudioChannelWrapper->NotifyPlayStateChanged(); |
7942 | 0 | } |
7943 | 0 | } |
7944 | | |
7945 | | AbstractThread* |
7946 | | HTMLMediaElement::AbstractMainThread() const |
7947 | 0 | { |
7948 | 0 | MOZ_ASSERT(mAbstractMainThread); |
7949 | 0 |
|
7950 | 0 | return mAbstractMainThread; |
7951 | 0 | } |
7952 | | |
7953 | | nsTArray<RefPtr<PlayPromise>> |
7954 | | HTMLMediaElement::TakePendingPlayPromises() |
7955 | 0 | { |
7956 | 0 | return std::move(mPendingPlayPromises); |
7957 | 0 | } |
7958 | | |
7959 | | void |
7960 | | HTMLMediaElement::NotifyAboutPlaying() |
7961 | 0 | { |
7962 | 0 | // Stick to the DispatchAsyncEvent() call path for now because we want to |
7963 | 0 | // trigger some telemetry-related codes in the DispatchAsyncEvent() method. |
7964 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("playing")); |
7965 | 0 | } |
7966 | | |
7967 | | already_AddRefed<PlayPromise> |
7968 | | HTMLMediaElement::CreatePlayPromise(ErrorResult& aRv) const |
7969 | 0 | { |
7970 | 0 | nsPIDOMWindowInner* win = OwnerDoc()->GetInnerWindow(); |
7971 | 0 |
|
7972 | 0 | if (!win) { |
7973 | 0 | aRv.Throw(NS_ERROR_UNEXPECTED); |
7974 | 0 | return nullptr; |
7975 | 0 | } |
7976 | 0 | |
7977 | 0 | RefPtr<PlayPromise> promise = PlayPromise::Create(win->AsGlobal(), aRv); |
7978 | 0 | LOG(LogLevel::Debug, ("%p created PlayPromise %p", this, promise.get())); |
7979 | 0 |
|
7980 | 0 | return promise.forget(); |
7981 | 0 | } |
7982 | | |
7983 | | already_AddRefed<Promise> |
7984 | | HTMLMediaElement::CreateDOMPromise(ErrorResult& aRv) const |
7985 | 0 | { |
7986 | 0 | nsPIDOMWindowInner* win = OwnerDoc()->GetInnerWindow(); |
7987 | 0 |
|
7988 | 0 | if (!win) { |
7989 | 0 | aRv.Throw(NS_ERROR_UNEXPECTED); |
7990 | 0 | return nullptr; |
7991 | 0 | } |
7992 | 0 | |
7993 | 0 | return Promise::Create(win->AsGlobal(), aRv); |
7994 | 0 | } |
7995 | | |
7996 | | void |
7997 | | HTMLMediaElement::AsyncResolvePendingPlayPromises() |
7998 | 0 | { |
7999 | 0 | // Disconnect requests for permission to play. We're playing either way, |
8000 | 0 | // so there's no point keeping the promise connected. Note: the front |
8001 | 0 | // end permission prompt code will detect that we've started playing, and |
8002 | 0 | // hide the permission prompt. |
8003 | 0 | mAutoplayPermissionRequest.DisconnectIfExists(); |
8004 | 0 |
|
8005 | 0 | if (mShuttingDown) { |
8006 | 0 | return; |
8007 | 0 | } |
8008 | 0 | |
8009 | 0 | nsCOMPtr<nsIRunnable> event = new nsResolveOrRejectPendingPlayPromisesRunner( |
8010 | 0 | this, TakePendingPlayPromises()); |
8011 | 0 |
|
8012 | 0 | mMainThreadEventTarget->Dispatch(event.forget()); |
8013 | 0 | } |
8014 | | |
8015 | | void |
8016 | | HTMLMediaElement::AsyncRejectPendingPlayPromises(nsresult aError) |
8017 | 0 | { |
8018 | 0 | mAutoplayPermissionRequest.DisconnectIfExists(); |
8019 | 0 |
|
8020 | 0 | if (!mPaused) { |
8021 | 0 | mPaused = true; |
8022 | 0 | DispatchAsyncEvent(NS_LITERAL_STRING("pause")); |
8023 | 0 | } |
8024 | 0 |
|
8025 | 0 | if (mShuttingDown) { |
8026 | 0 | return; |
8027 | 0 | } |
8028 | 0 | |
8029 | 0 | if (aError == NS_ERROR_DOM_MEDIA_NOT_ALLOWED_ERR) { |
8030 | 0 | DispatchEventsWhenPlayWasNotAllowed(); |
8031 | 0 | } |
8032 | 0 |
|
8033 | 0 | nsCOMPtr<nsIRunnable> event = new nsResolveOrRejectPendingPlayPromisesRunner( |
8034 | 0 | this, TakePendingPlayPromises(), aError); |
8035 | 0 |
|
8036 | 0 | mMainThreadEventTarget->Dispatch(event.forget()); |
8037 | 0 | } |
8038 | | |
8039 | | void |
8040 | | HTMLMediaElement::GetEMEInfo(nsString& aEMEInfo) |
8041 | 0 | { |
8042 | 0 | if (!mMediaKeys) { |
8043 | 0 | return; |
8044 | 0 | } |
8045 | 0 | |
8046 | 0 | nsString keySystem; |
8047 | 0 | mMediaKeys->GetKeySystem(keySystem); |
8048 | 0 |
|
8049 | 0 | nsString sessionsInfo; |
8050 | 0 | mMediaKeys->GetSessionsInfo(sessionsInfo); |
8051 | 0 |
|
8052 | 0 | aEMEInfo.AppendLiteral("Key System="); |
8053 | 0 | aEMEInfo.Append(keySystem); |
8054 | 0 | aEMEInfo.AppendLiteral(" SessionsInfo="); |
8055 | 0 | aEMEInfo.Append(sessionsInfo); |
8056 | 0 | } |
8057 | | |
8058 | | void |
8059 | | HTMLMediaElement::NotifyDecoderActivityChanges() const |
8060 | 0 | { |
8061 | 0 | if (mDecoder) { |
8062 | 0 | mDecoder->NotifyOwnerActivityChanged( |
8063 | 0 | !IsHidden(), mVisibilityState, IsInComposedDoc()); |
8064 | 0 | } |
8065 | 0 | } |
8066 | | |
8067 | | nsIDocument* |
8068 | | HTMLMediaElement::GetDocument() const |
8069 | 0 | { |
8070 | 0 | return OwnerDoc(); |
8071 | 0 | } |
8072 | | |
8073 | | void |
8074 | | HTMLMediaElement::ConstructMediaTracks(const MediaInfo* aInfo) |
8075 | 0 | { |
8076 | 0 | if (mMediaTracksConstructed || !aInfo) { |
8077 | 0 | return; |
8078 | 0 | } |
8079 | 0 | |
8080 | 0 | mMediaTracksConstructed = true; |
8081 | 0 |
|
8082 | 0 | AudioTrackList* audioList = AudioTracks(); |
8083 | 0 | if (audioList && aInfo->HasAudio()) { |
8084 | 0 | const TrackInfo& info = aInfo->mAudio; |
8085 | 0 | RefPtr<AudioTrack> track = |
8086 | 0 | MediaTrackList::CreateAudioTrack(audioList->GetOwnerGlobal(), |
8087 | 0 | info.mId, |
8088 | 0 | info.mKind, |
8089 | 0 | info.mLabel, |
8090 | 0 | info.mLanguage, |
8091 | 0 | info.mEnabled); |
8092 | 0 |
|
8093 | 0 | audioList->AddTrack(track); |
8094 | 0 | } |
8095 | 0 |
|
8096 | 0 | VideoTrackList* videoList = VideoTracks(); |
8097 | 0 | if (videoList && aInfo->HasVideo()) { |
8098 | 0 | const TrackInfo& info = aInfo->mVideo; |
8099 | 0 | RefPtr<VideoTrack> track = |
8100 | 0 | MediaTrackList::CreateVideoTrack(videoList->GetOwnerGlobal(), |
8101 | 0 | info.mId, |
8102 | 0 | info.mKind, |
8103 | 0 | info.mLabel, |
8104 | 0 | info.mLanguage); |
8105 | 0 |
|
8106 | 0 | videoList->AddTrack(track); |
8107 | 0 | track->SetEnabledInternal(info.mEnabled, MediaTrack::FIRE_NO_EVENTS); |
8108 | 0 | } |
8109 | 0 | } |
8110 | | |
8111 | | void |
8112 | | HTMLMediaElement::RemoveMediaTracks() |
8113 | 0 | { |
8114 | 0 | if (mAudioTrackList) { |
8115 | 0 | mAudioTrackList->RemoveTracks(); |
8116 | 0 | } |
8117 | 0 |
|
8118 | 0 | if (mVideoTrackList) { |
8119 | 0 | mVideoTrackList->RemoveTracks(); |
8120 | 0 | } |
8121 | 0 |
|
8122 | 0 | mMediaTracksConstructed = false; |
8123 | 0 |
|
8124 | 0 | for (OutputMediaStream& ms : mOutputStreams) { |
8125 | 0 | if (!ms.mCapturingDecoder) { |
8126 | 0 | continue; |
8127 | 0 | } |
8128 | 0 | for (RefPtr<MediaStreamTrack>& t : ms.mPreCreatedTracks) { |
8129 | 0 | if (t->Ended()) { |
8130 | 0 | continue; |
8131 | 0 | } |
8132 | 0 | mAbstractMainThread->Dispatch(NewRunnableMethod( |
8133 | 0 | "dom::HTMLMediaElement::RemoveMediaTracks", |
8134 | 0 | t, &MediaStreamTrack::OverrideEnded)); |
8135 | 0 | } |
8136 | 0 | ms.mPreCreatedTracks.Clear(); |
8137 | 0 | } |
8138 | 0 | } |
8139 | | |
8140 | | class MediaElementGMPCrashHelper : public GMPCrashHelper |
8141 | | { |
8142 | | public: |
8143 | | explicit MediaElementGMPCrashHelper(HTMLMediaElement* aElement) |
8144 | | : mElement(aElement) |
8145 | 0 | { |
8146 | 0 | MOZ_ASSERT(NS_IsMainThread()); // WeakPtr isn't thread safe. |
8147 | 0 | } |
8148 | | already_AddRefed<nsPIDOMWindowInner> GetPluginCrashedEventTarget() override |
8149 | 0 | { |
8150 | 0 | MOZ_ASSERT(NS_IsMainThread()); // WeakPtr isn't thread safe. |
8151 | 0 | if (!mElement) { |
8152 | 0 | return nullptr; |
8153 | 0 | } |
8154 | 0 | return do_AddRef(mElement->OwnerDoc()->GetInnerWindow()); |
8155 | 0 | } |
8156 | | |
8157 | | private: |
8158 | | WeakPtr<HTMLMediaElement> mElement; |
8159 | | }; |
8160 | | |
8161 | | already_AddRefed<GMPCrashHelper> |
8162 | | HTMLMediaElement::CreateGMPCrashHelper() |
8163 | 0 | { |
8164 | 0 | return MakeAndAddRef<MediaElementGMPCrashHelper>(this); |
8165 | 0 | } |
8166 | | |
8167 | | void |
8168 | | HTMLMediaElement::MarkAsTainted() |
8169 | 0 | { |
8170 | 0 | mHasSuspendTaint = true; |
8171 | 0 |
|
8172 | 0 | if (mDecoder) { |
8173 | 0 | mDecoder->SetSuspendTaint(true); |
8174 | 0 | } |
8175 | 0 | } |
8176 | | |
8177 | | bool |
8178 | | HasDebuggerOrTabsPrivilege(JSContext* aCx, JSObject* aObj) |
8179 | 0 | { |
8180 | 0 | return nsContentUtils::CallerHasPermission(aCx, nsGkAtoms::debugger) || |
8181 | 0 | nsContentUtils::CallerHasPermission(aCx, nsGkAtoms::tabs); |
8182 | 0 | } |
8183 | | |
8184 | | void |
8185 | | HTMLMediaElement::AsyncResolveSeekDOMPromiseIfExists() |
8186 | 0 | { |
8187 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
8188 | 0 | if (mSeekDOMPromise) { |
8189 | 0 | RefPtr<dom::Promise> promise = mSeekDOMPromise.forget(); |
8190 | 0 | nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction( |
8191 | 0 | "dom::HTMLMediaElement::AsyncResolveSeekDOMPromiseIfExists", |
8192 | 0 | [=]() { promise->MaybeResolveWithUndefined(); }); |
8193 | 0 | mAbstractMainThread->Dispatch(r.forget()); |
8194 | 0 | mSeekDOMPromise = nullptr; |
8195 | 0 | } |
8196 | 0 | } |
8197 | | |
8198 | | void |
8199 | | HTMLMediaElement::AsyncRejectSeekDOMPromiseIfExists() |
8200 | 0 | { |
8201 | 0 | MOZ_ASSERT(NS_IsMainThread()); |
8202 | 0 | if (mSeekDOMPromise) { |
8203 | 0 | RefPtr<dom::Promise> promise = mSeekDOMPromise.forget(); |
8204 | 0 | nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction( |
8205 | 0 | "dom::HTMLMediaElement::AsyncRejectSeekDOMPromiseIfExists", |
8206 | 0 | [=]() { promise->MaybeReject(NS_ERROR_DOM_ABORT_ERR); }); |
8207 | 0 | mAbstractMainThread->Dispatch(r.forget()); |
8208 | 0 | mSeekDOMPromise = nullptr; |
8209 | 0 | } |
8210 | 0 | } |
8211 | | |
8212 | | void |
8213 | | HTMLMediaElement::ReportCanPlayTelemetry() |
8214 | 0 | { |
8215 | 0 | LOG(LogLevel::Debug, ("%s", __func__)); |
8216 | 0 |
|
8217 | 0 | RefPtr<nsIThread> thread; |
8218 | 0 | nsresult rv = NS_NewNamedThread("MediaTelemetry", getter_AddRefs(thread)); |
8219 | 0 | if (NS_WARN_IF(NS_FAILED(rv))) { |
8220 | 0 | return; |
8221 | 0 | } |
8222 | 0 | |
8223 | 0 | RefPtr<AbstractThread> abstractThread = mAbstractMainThread; |
8224 | 0 |
|
8225 | 0 | thread->Dispatch( |
8226 | 0 | NS_NewRunnableFunction( |
8227 | 0 | "dom::HTMLMediaElement::ReportCanPlayTelemetry", |
8228 | 0 | [thread, abstractThread]() { |
8229 | | #if XP_WIN |
8230 | | // Windows Media Foundation requires MSCOM to be inited. |
8231 | | DebugOnly<HRESULT> hr = CoInitializeEx(0, COINIT_MULTITHREADED); |
8232 | | MOZ_ASSERT(hr == S_OK); |
8233 | | #endif |
8234 | | bool aac = MP4Decoder::IsSupportedType( |
8235 | 0 | MediaContainerType(MEDIAMIMETYPE(AUDIO_MP4)), nullptr); |
8236 | 0 | bool h264 = MP4Decoder::IsSupportedType( |
8237 | 0 | MediaContainerType(MEDIAMIMETYPE(VIDEO_MP4)), nullptr); |
8238 | | #if XP_WIN |
8239 | | CoUninitialize(); |
8240 | | #endif |
8241 | | abstractThread->Dispatch(NS_NewRunnableFunction( |
8242 | 0 | "dom::HTMLMediaElement::ReportCanPlayTelemetry", |
8243 | 0 | [thread, aac, h264]() { |
8244 | 0 | LOG(LogLevel::Debug, ("MediaTelemetry aac=%d h264=%d", aac, h264)); |
8245 | 0 | Telemetry::Accumulate( |
8246 | 0 | Telemetry::HistogramID::VIDEO_CAN_CREATE_AAC_DECODER, aac); |
8247 | 0 | Telemetry::Accumulate( |
8248 | 0 | Telemetry::HistogramID::VIDEO_CAN_CREATE_H264_DECODER, h264); |
8249 | 0 | thread->AsyncShutdown(); |
8250 | 0 | })); |
8251 | 0 | }), |
8252 | 0 | NS_DISPATCH_NORMAL); |
8253 | 0 | } |
8254 | | |
8255 | | } // namespace dom |
8256 | | } // namespace mozilla |
8257 | | |
8258 | | #undef LOG |
8259 | | #undef LOG_EVENT |