Coverage Report

Created: 2018-09-25 14:53

/src/mozilla-central/dom/media/webaudio/AudioDestinationNode.cpp
Line
Count
Source (jump to first uncovered line)
1
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2
/* vim:set ts=2 sw=2 sts=2 et cindent: */
3
/* This Source Code Form is subject to the terms of the Mozilla Public
4
 * License, v. 2.0. If a copy of the MPL was not distributed with this
5
 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7
#include "AudioDestinationNode.h"
8
#include "AudioContext.h"
9
#include "AlignmentUtils.h"
10
#include "AudioContext.h"
11
#include "mozilla/dom/AudioDestinationNodeBinding.h"
12
#include "mozilla/dom/OfflineAudioCompletionEvent.h"
13
#include "mozilla/dom/ScriptSettings.h"
14
#include "mozilla/dom/BaseAudioContextBinding.h"
15
#include "mozilla/Services.h"
16
#include "AudioChannelAgent.h"
17
#include "AudioChannelService.h"
18
#include "AudioNodeEngine.h"
19
#include "AudioNodeStream.h"
20
#include "MediaStreamGraph.h"
21
#include "nsContentUtils.h"
22
#include "nsIInterfaceRequestorUtils.h"
23
#include "nsIDocShell.h"
24
#include "nsIPermissionManager.h"
25
#include "nsIScriptObjectPrincipal.h"
26
#include "nsServiceManagerUtils.h"
27
#include "mozilla/dom/Promise.h"
28
29
namespace mozilla {
30
namespace dom {
31
32
static uint8_t gWebAudioOutputKey;
33
34
class OfflineDestinationNodeEngine final : public AudioNodeEngine
35
{
36
public:
37
  OfflineDestinationNodeEngine(AudioDestinationNode* aNode,
38
                               uint32_t aNumberOfChannels,
39
                               uint32_t aLength,
40
                               float aSampleRate)
41
    : AudioNodeEngine(aNode)
42
    , mWriteIndex(0)
43
    , mNumberOfChannels(aNumberOfChannels)
44
    , mLength(aLength)
45
    , mSampleRate(aSampleRate)
46
    , mBufferAllocated(false)
47
0
  {
48
0
  }
49
50
  void ProcessBlock(AudioNodeStream* aStream,
51
                    GraphTime aFrom,
52
                    const AudioBlock& aInput,
53
                    AudioBlock* aOutput,
54
                    bool* aFinished) override
55
0
  {
56
0
    // Do this just for the sake of political correctness; this output
57
0
    // will not go anywhere.
58
0
    *aOutput = aInput;
59
0
60
0
    // The output buffer is allocated lazily, on the rendering thread, when
61
0
    // non-null input is received.
62
0
    if (!mBufferAllocated && !aInput.IsNull()) {
63
0
      // These allocations might fail if content provides a huge number of
64
0
      // channels or size, but it's OK since we'll deal with the failure
65
0
      // gracefully.
66
0
      mBuffer = ThreadSharedFloatArrayBufferList::
67
0
        Create(mNumberOfChannels, mLength, fallible);
68
0
      if (mBuffer && mWriteIndex) {
69
0
        // Zero leading for any null chunks that were skipped.
70
0
        for (uint32_t i = 0; i < mNumberOfChannels; ++i) {
71
0
          float* channelData = mBuffer->GetDataForWrite(i);
72
0
          PodZero(channelData, mWriteIndex);
73
0
        }
74
0
      }
75
0
76
0
      mBufferAllocated = true;
77
0
    }
78
0
79
0
    // Skip copying if there is no buffer.
80
0
    uint32_t outputChannelCount = mBuffer ? mNumberOfChannels : 0;
81
0
82
0
    // Record our input buffer
83
0
    MOZ_ASSERT(mWriteIndex < mLength, "How did this happen?");
84
0
    const uint32_t duration = std::min(WEBAUDIO_BLOCK_SIZE, mLength - mWriteIndex);
85
0
    const uint32_t inputChannelCount = aInput.ChannelCount();
86
0
    for (uint32_t i = 0; i < outputChannelCount; ++i) {
87
0
      float* outputData = mBuffer->GetDataForWrite(i) + mWriteIndex;
88
0
      if (aInput.IsNull() || i >= inputChannelCount) {
89
0
        PodZero(outputData, duration);
90
0
      } else {
91
0
        const float* inputBuffer = static_cast<const float*>(aInput.mChannelData[i]);
92
0
        if (duration == WEBAUDIO_BLOCK_SIZE && IS_ALIGNED16(inputBuffer)) {
93
0
          // Use the optimized version of the copy with scale operation
94
0
          AudioBlockCopyChannelWithScale(inputBuffer, aInput.mVolume,
95
0
                                         outputData);
96
0
        } else {
97
0
          if (aInput.mVolume == 1.0f) {
98
0
            PodCopy(outputData, inputBuffer, duration);
99
0
          } else {
100
0
            for (uint32_t j = 0; j < duration; ++j) {
101
0
              outputData[j] = aInput.mVolume * inputBuffer[j];
102
0
            }
103
0
          }
104
0
        }
105
0
      }
106
0
    }
107
0
    mWriteIndex += duration;
108
0
109
0
    if (mWriteIndex >= mLength) {
110
0
      NS_ASSERTION(mWriteIndex == mLength, "Overshot length");
111
0
      // Go to finished state. When the graph's current time eventually reaches
112
0
      // the end of the stream, then the main thread will be notified and we'll
113
0
      // shut down the AudioContext.
114
0
      *aFinished = true;
115
0
    }
116
0
  }
117
118
  bool IsActive() const override
119
0
  {
120
0
    // Keep processing to track stream time, which is used for all timelines
121
0
    // associated with the same AudioContext.
122
0
    return true;
123
0
  }
124
125
126
  class OnCompleteTask final : public Runnable
127
  {
128
  public:
129
    OnCompleteTask(AudioContext* aAudioContext, AudioBuffer* aRenderedBuffer)
130
      : Runnable("dom::OfflineDestinationNodeEngine::OnCompleteTask")
131
      , mAudioContext(aAudioContext)
132
      , mRenderedBuffer(aRenderedBuffer)
133
0
    {}
134
135
    NS_IMETHOD Run() override
136
0
    {
137
0
      OfflineAudioCompletionEventInit param;
138
0
      param.mRenderedBuffer = mRenderedBuffer;
139
0
140
0
      RefPtr<OfflineAudioCompletionEvent> event =
141
0
          OfflineAudioCompletionEvent::Constructor(mAudioContext,
142
0
                                                   NS_LITERAL_STRING("complete"),
143
0
                                                   param);
144
0
      mAudioContext->DispatchTrustedEvent(event);
145
0
146
0
      return NS_OK;
147
0
    }
148
  private:
149
    RefPtr<AudioContext> mAudioContext;
150
    RefPtr<AudioBuffer> mRenderedBuffer;
151
  };
152
153
  void FireOfflineCompletionEvent(AudioDestinationNode* aNode)
154
0
  {
155
0
    AudioContext* context = aNode->Context();
156
0
    context->Shutdown();
157
0
    // Shutdown drops self reference, but the context is still referenced by aNode,
158
0
    // which is strongly referenced by the runnable that called
159
0
    // AudioDestinationNode::FireOfflineCompletionEvent.
160
0
161
0
    // Create the input buffer
162
0
    ErrorResult rv;
163
0
    RefPtr<AudioBuffer> renderedBuffer =
164
0
      AudioBuffer::Create(context->GetOwner(), mNumberOfChannels, mLength,
165
0
                          mSampleRate, mBuffer.forget(), rv);
166
0
    if (rv.Failed()) {
167
0
      rv.SuppressException();
168
0
      return;
169
0
    }
170
0
171
0
    aNode->ResolvePromise(renderedBuffer);
172
0
173
0
    context->Dispatch(do_AddRef(new OnCompleteTask(context, renderedBuffer)));
174
0
175
0
    context->OnStateChanged(nullptr, AudioContextState::Closed);
176
0
  }
177
178
  size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override
179
0
  {
180
0
    size_t amount = AudioNodeEngine::SizeOfExcludingThis(aMallocSizeOf);
181
0
    if (mBuffer) {
182
0
      amount += mBuffer->SizeOfIncludingThis(aMallocSizeOf);
183
0
    }
184
0
    return amount;
185
0
  }
186
187
  size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override
188
0
  {
189
0
    return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
190
0
  }
191
192
private:
193
  // The input to the destination node is recorded in mBuffer.
194
  // When this buffer fills up with mLength frames, the buffered input is sent
195
  // to the main thread in order to dispatch OfflineAudioCompletionEvent.
196
  RefPtr<ThreadSharedFloatArrayBufferList> mBuffer;
197
  // An index representing the next offset in mBuffer to be written to.
198
  uint32_t mWriteIndex;
199
  uint32_t mNumberOfChannels;
200
  // How many frames the OfflineAudioContext intends to produce.
201
  uint32_t mLength;
202
  float mSampleRate;
203
  bool mBufferAllocated;
204
};
205
206
class InputMutedRunnable final : public Runnable
207
{
208
public:
209
  InputMutedRunnable(AudioNodeStream* aStream, bool aInputMuted)
210
    : Runnable("dom::InputMutedRunnable")
211
    , mStream(aStream)
212
    , mInputMuted(aInputMuted)
213
0
  {
214
0
  }
215
216
  NS_IMETHOD Run() override
217
0
  {
218
0
    MOZ_ASSERT(NS_IsMainThread());
219
0
    RefPtr<AudioNode> node = mStream->Engine()->NodeMainThread();
220
0
221
0
    if (node) {
222
0
      RefPtr<AudioDestinationNode> destinationNode =
223
0
        static_cast<AudioDestinationNode*>(node.get());
224
0
      destinationNode->InputMuted(mInputMuted);
225
0
    }
226
0
    return NS_OK;
227
0
  }
228
229
private:
230
  RefPtr<AudioNodeStream> mStream;
231
  bool mInputMuted;
232
};
233
234
class DestinationNodeEngine final : public AudioNodeEngine
235
{
236
public:
237
  explicit DestinationNodeEngine(AudioDestinationNode* aNode)
238
    : AudioNodeEngine(aNode)
239
    , mVolume(1.0f)
240
    , mLastInputMuted(true)
241
    , mSuspended(false)
242
0
  {
243
0
    MOZ_ASSERT(aNode);
244
0
  }
245
246
  void ProcessBlock(AudioNodeStream* aStream,
247
                    GraphTime aFrom,
248
                    const AudioBlock& aInput,
249
                    AudioBlock* aOutput,
250
                    bool* aFinished) override
251
0
  {
252
0
    *aOutput = aInput;
253
0
    aOutput->mVolume *= mVolume;
254
0
255
0
    if (mSuspended) {
256
0
      return;
257
0
    }
258
0
259
0
    bool newInputMuted = aInput.IsNull() || aInput.IsMuted();
260
0
    if (newInputMuted != mLastInputMuted) {
261
0
      mLastInputMuted = newInputMuted;
262
0
263
0
      RefPtr<InputMutedRunnable> runnable =
264
0
        new InputMutedRunnable(aStream, newInputMuted);
265
0
      aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
266
0
        runnable.forget());
267
0
    }
268
0
  }
269
270
  bool IsActive() const override
271
0
  {
272
0
    // Keep processing to track stream time, which is used for all timelines
273
0
    // associated with the same AudioContext.  If there are no other engines
274
0
    // for the AudioContext, then this could return false to suspend the
275
0
    // stream, but the stream is blocked anyway through
276
0
    // AudioDestinationNode::SetIsOnlyNodeForContext().
277
0
    return true;
278
0
  }
279
280
  void SetDoubleParameter(uint32_t aIndex, double aParam) override
281
0
  {
282
0
    if (aIndex == VOLUME) {
283
0
      mVolume = aParam;
284
0
    }
285
0
  }
286
287
  void SetInt32Parameter(uint32_t aIndex, int32_t aParam) override
288
0
  {
289
0
    if (aIndex == SUSPENDED) {
290
0
      mSuspended = !!aParam;
291
0
      if (mSuspended) {
292
0
        mLastInputMuted = true;
293
0
      }
294
0
    }
295
0
  }
296
297
  enum Parameters {
298
    VOLUME,
299
    SUSPENDED,
300
  };
301
302
  size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override
303
0
  {
304
0
    return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
305
0
  }
306
307
private:
308
  float mVolume;
309
  bool mLastInputMuted;
310
  bool mSuspended;
311
};
312
313
NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioDestinationNode, AudioNode,
314
                                   mAudioChannelAgent,
315
                                   mOfflineRenderingPromise)
316
317
0
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(AudioDestinationNode)
318
0
  NS_INTERFACE_MAP_ENTRY(nsIAudioChannelAgentCallback)
319
0
NS_INTERFACE_MAP_END_INHERITING(AudioNode)
320
321
NS_IMPL_ADDREF_INHERITED(AudioDestinationNode, AudioNode)
322
NS_IMPL_RELEASE_INHERITED(AudioDestinationNode, AudioNode)
323
324
AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
325
                                           bool aIsOffline,
326
                                           bool aAllowedToStart,
327
                                           uint32_t aNumberOfChannels,
328
                                           uint32_t aLength,
329
                                           float aSampleRate)
330
  : AudioNode(aContext, aNumberOfChannels,
331
              ChannelCountMode::Explicit, ChannelInterpretation::Speakers)
332
  , mFramesToProduce(aLength)
333
  , mIsOffline(aIsOffline)
334
  , mAudioChannelSuspended(false)
335
  , mCaptured(false)
336
  , mAudible(AudioChannelService::AudibleState::eAudible)
337
  , mCreatedTime(TimeStamp::Now())
338
0
{
339
0
  nsPIDOMWindowInner* window = aContext->GetParentObject();
340
0
  MediaStreamGraph* graph =
341
0
    aIsOffline
342
0
      ? MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate, window)
343
0
      : MediaStreamGraph::GetInstance(
344
0
          MediaStreamGraph::AUDIO_THREAD_DRIVER, window, aSampleRate);
345
0
  AudioNodeEngine* engine = aIsOffline ?
346
0
                            new OfflineDestinationNodeEngine(this, aNumberOfChannels,
347
0
                                                             aLength, aSampleRate) :
348
0
                            static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
349
0
350
0
  AudioNodeStream::Flags flags =
351
0
    AudioNodeStream::NEED_MAIN_THREAD_CURRENT_TIME |
352
0
    AudioNodeStream::NEED_MAIN_THREAD_FINISHED |
353
0
    AudioNodeStream::EXTERNAL_OUTPUT;
354
0
  mStream = AudioNodeStream::Create(aContext, engine, flags, graph);
355
0
  mStream->AddMainThreadListener(this);
356
0
  mStream->AddAudioOutput(&gWebAudioOutputKey);
357
0
358
0
  if (!aIsOffline && aAllowedToStart) {
359
0
    graph->NotifyWhenGraphStarted(mStream);
360
0
  }
361
0
}
362
363
AudioDestinationNode::~AudioDestinationNode()
364
0
{
365
0
}
366
367
size_t
368
AudioDestinationNode::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
369
0
{
370
0
  size_t amount = AudioNode::SizeOfExcludingThis(aMallocSizeOf);
371
0
  // Might be useful in the future:
372
0
  // - mAudioChannelAgent
373
0
  return amount;
374
0
}
375
376
size_t
377
AudioDestinationNode::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
378
0
{
379
0
  return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
380
0
}
381
382
void
383
AudioDestinationNode::DestroyAudioChannelAgent()
384
0
{
385
0
  if (mAudioChannelAgent && !Context()->IsOffline()) {
386
0
    mAudioChannelAgent->NotifyStoppedPlaying();
387
0
    mAudioChannelAgent = nullptr;
388
0
    // Reset the state, and it would always be regard as audible.
389
0
    mAudible = AudioChannelService::AudibleState::eAudible;
390
0
  }
391
0
}
392
393
void
394
AudioDestinationNode::DestroyMediaStream()
395
0
{
396
0
  DestroyAudioChannelAgent();
397
0
398
0
  if (!mStream)
399
0
    return;
400
0
401
0
  mStream->RemoveMainThreadListener(this);
402
0
  MediaStreamGraph* graph = mStream->Graph();
403
0
  if (graph->IsNonRealtime()) {
404
0
    MediaStreamGraph::DestroyNonRealtimeInstance(graph);
405
0
  }
406
0
  AudioNode::DestroyMediaStream();
407
0
}
408
409
void
410
AudioDestinationNode::NotifyMainThreadStreamFinished()
411
0
{
412
0
  MOZ_ASSERT(NS_IsMainThread());
413
0
  MOZ_ASSERT(mStream->IsFinished());
414
0
415
0
  if (mIsOffline) {
416
0
    AbstractMainThread()->Dispatch(
417
0
      NewRunnableMethod("dom::AudioDestinationNode::FireOfflineCompletionEvent",
418
0
                        this,
419
0
                        &AudioDestinationNode::FireOfflineCompletionEvent));
420
0
  }
421
0
}
422
423
void
424
AudioDestinationNode::FireOfflineCompletionEvent()
425
0
{
426
0
  OfflineDestinationNodeEngine* engine =
427
0
    static_cast<OfflineDestinationNodeEngine*>(Stream()->Engine());
428
0
  engine->FireOfflineCompletionEvent(this);
429
0
}
430
431
void
432
AudioDestinationNode::ResolvePromise(AudioBuffer* aRenderedBuffer)
433
0
{
434
0
  MOZ_ASSERT(NS_IsMainThread());
435
0
  MOZ_ASSERT(mIsOffline);
436
0
  mOfflineRenderingPromise->MaybeResolve(aRenderedBuffer);
437
0
}
438
439
uint32_t
440
AudioDestinationNode::MaxChannelCount() const
441
0
{
442
0
  return Context()->MaxChannelCount();
443
0
}
444
445
void
446
AudioDestinationNode::SetChannelCount(uint32_t aChannelCount, ErrorResult& aRv)
447
0
{
448
0
  if (aChannelCount > MaxChannelCount()) {
449
0
    aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
450
0
    return;
451
0
  }
452
0
453
0
  AudioNode::SetChannelCount(aChannelCount, aRv);
454
0
}
455
456
void
457
AudioDestinationNode::Mute()
458
0
{
459
0
  MOZ_ASSERT(Context() && !Context()->IsOffline());
460
0
  SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 0.0f);
461
0
}
462
463
void
464
AudioDestinationNode::Unmute()
465
0
{
466
0
  MOZ_ASSERT(Context() && !Context()->IsOffline());
467
0
  SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 1.0f);
468
0
}
469
470
void
471
AudioDestinationNode::Suspend()
472
0
{
473
0
  DestroyAudioChannelAgent();
474
0
  SendInt32ParameterToStream(DestinationNodeEngine::SUSPENDED, 1);
475
0
}
476
477
void
478
AudioDestinationNode::Resume()
479
0
{
480
0
  CreateAudioChannelAgent();
481
0
  SendInt32ParameterToStream(DestinationNodeEngine::SUSPENDED, 0);
482
0
}
483
484
void
485
AudioDestinationNode::OfflineShutdown()
486
0
{
487
0
  MOZ_ASSERT(Context() && Context()->IsOffline(),
488
0
             "Should only be called on a valid OfflineAudioContext");
489
0
490
0
  MediaStreamGraph::DestroyNonRealtimeInstance(mStream->Graph());
491
0
  mOfflineRenderingRef.Drop(this);
492
0
}
493
494
JSObject*
495
AudioDestinationNode::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
496
0
{
497
0
  return AudioDestinationNode_Binding::Wrap(aCx, this, aGivenProto);
498
0
}
499
500
void
501
AudioDestinationNode::StartRendering(Promise* aPromise)
502
0
{
503
0
  mOfflineRenderingPromise = aPromise;
504
0
  mOfflineRenderingRef.Take(this);
505
0
  mStream->Graph()->StartNonRealtimeProcessing(mFramesToProduce);
506
0
}
507
508
NS_IMETHODIMP
509
AudioDestinationNode::WindowVolumeChanged(float aVolume, bool aMuted)
510
0
{
511
0
  if (!mStream) {
512
0
    return NS_OK;
513
0
  }
514
0
515
0
  MOZ_LOG(AudioChannelService::GetAudioChannelLog(), LogLevel::Debug,
516
0
         ("AudioDestinationNode, WindowVolumeChanged, "
517
0
          "this = %p, aVolume = %f, aMuted = %s\n",
518
0
          this, aVolume, aMuted ? "true" : "false"));
519
0
520
0
  float volume = aMuted ? 0.0 : aVolume;
521
0
  mStream->SetAudioOutputVolume(&gWebAudioOutputKey, volume);
522
0
523
0
  AudioChannelService::AudibleState audible = volume > 0.0 ?
524
0
    AudioChannelService::AudibleState::eAudible :
525
0
    AudioChannelService::AudibleState::eNotAudible;
526
0
  if (mAudible != audible) {
527
0
    mAudible = audible;
528
0
    mAudioChannelAgent->NotifyStartedAudible(mAudible,
529
0
                                             AudioChannelService::AudibleChangedReasons::eVolumeChanged);
530
0
  }
531
0
  return NS_OK;
532
0
}
533
534
NS_IMETHODIMP
535
AudioDestinationNode::WindowSuspendChanged(nsSuspendedTypes aSuspend)
536
0
{
537
0
  if (!mStream) {
538
0
    return NS_OK;
539
0
  }
540
0
541
0
  bool suspended = (aSuspend != nsISuspendedTypes::NONE_SUSPENDED);
542
0
  if (mAudioChannelSuspended == suspended) {
543
0
    return NS_OK;
544
0
  }
545
0
546
0
  MOZ_LOG(AudioChannelService::GetAudioChannelLog(), LogLevel::Debug,
547
0
         ("AudioDestinationNode, WindowSuspendChanged, "
548
0
          "this = %p, aSuspend = %s\n", this, SuspendTypeToStr(aSuspend)));
549
0
550
0
  mAudioChannelSuspended = suspended;
551
0
552
0
  DisabledTrackMode disabledMode = suspended ? DisabledTrackMode::SILENCE_BLACK
553
0
                                             : DisabledTrackMode::ENABLED;
554
0
  mStream->SetTrackEnabled(AudioNodeStream::AUDIO_TRACK, disabledMode);
555
0
556
0
  AudioChannelService::AudibleState audible =
557
0
    aSuspend == nsISuspendedTypes::NONE_SUSPENDED ?
558
0
      AudioChannelService::AudibleState::eAudible :
559
0
      AudioChannelService::AudibleState::eNotAudible;
560
0
  if (mAudible != audible) {
561
0
    mAudible = audible;
562
0
    mAudioChannelAgent->NotifyStartedAudible(audible,
563
0
                                             AudioChannelService::AudibleChangedReasons::ePauseStateChanged);
564
0
  }
565
0
  return NS_OK;
566
0
}
567
568
NS_IMETHODIMP
569
AudioDestinationNode::WindowAudioCaptureChanged(bool aCapture)
570
0
{
571
0
  MOZ_ASSERT(mAudioChannelAgent);
572
0
573
0
  if (!mStream || Context()->IsOffline()) {
574
0
    return NS_OK;
575
0
  }
576
0
577
0
  nsCOMPtr<nsPIDOMWindowInner> ownerWindow = GetOwner();
578
0
  if (!ownerWindow) {
579
0
    return NS_OK;
580
0
  }
581
0
582
0
  if (aCapture != mCaptured) {
583
0
    if (aCapture) {
584
0
      nsCOMPtr<nsPIDOMWindowInner> window = Context()->GetParentObject();
585
0
      uint64_t id = window->WindowID();
586
0
      mCaptureStreamPort =
587
0
        mStream->Graph()->ConnectToCaptureStream(id, mStream);
588
0
    } else {
589
0
      mCaptureStreamPort->Destroy();
590
0
    }
591
0
    mCaptured = aCapture;
592
0
  }
593
0
594
0
  return NS_OK;
595
0
}
596
597
nsresult
598
AudioDestinationNode::CreateAudioChannelAgent()
599
0
{
600
0
  if (mIsOffline || mAudioChannelAgent) {
601
0
    return NS_OK;
602
0
  }
603
0
604
0
  mAudioChannelAgent = new AudioChannelAgent();
605
0
  nsresult rv = mAudioChannelAgent->InitWithWeakCallback(GetOwner(), this);
606
0
  if (NS_WARN_IF(NS_FAILED(rv))) {
607
0
    return rv;
608
0
  }
609
0
610
0
  return NS_OK;
611
0
}
612
613
void
614
AudioDestinationNode::InputMuted(bool aMuted)
615
0
{
616
0
  MOZ_ASSERT(Context() && !Context()->IsOffline());
617
0
618
0
  if (!mAudioChannelAgent) {
619
0
    if (aMuted) {
620
0
      return;
621
0
    }
622
0
    CreateAudioChannelAgent();
623
0
  }
624
0
625
0
  if (aMuted) {
626
0
    mAudioChannelAgent->NotifyStoppedPlaying();
627
0
    // Reset the state, and it would always be regard as audible.
628
0
    mAudible = AudioChannelService::AudibleState::eAudible;
629
0
    return;
630
0
  }
631
0
632
0
  if (mDurationBeforeFirstTimeAudible.IsZero()) {
633
0
    MOZ_ASSERT(!aMuted);
634
0
    mDurationBeforeFirstTimeAudible = TimeStamp::Now() - mCreatedTime;
635
0
    Telemetry::Accumulate(Telemetry::WEB_AUDIO_BECOMES_AUDIBLE_TIME,
636
0
                          mDurationBeforeFirstTimeAudible.ToSeconds());
637
0
  }
638
0
639
0
  AudioPlaybackConfig config;
640
0
  nsresult rv = mAudioChannelAgent->NotifyStartedPlaying(&config,
641
0
                                                         mAudible);
642
0
  if (NS_WARN_IF(NS_FAILED(rv))) {
643
0
    return;
644
0
  }
645
0
646
0
  WindowVolumeChanged(config.mVolume, config.mMuted);
647
0
  WindowSuspendChanged(config.mSuspend);
648
0
}
649
650
} // namespace dom
651
} // namespace mozilla