/src/mozilla-central/dom/media/webrtc/MediaEngineWebRTC.h
Line | Count | Source (jump to first uncovered line) |
1 | | /* This Source Code Form is subject to the terms of the Mozilla Public |
2 | | * License, v. 2.0. If a copy of the MPL was not distributed with this file, |
3 | | * You can obtain one at http://mozilla.org/MPL/2.0/. */ |
4 | | |
5 | | #ifndef MEDIAENGINEWEBRTC_H_ |
6 | | #define MEDIAENGINEWEBRTC_H_ |
7 | | |
8 | | #include "AudioPacketizer.h" |
9 | | #include "AudioSegment.h" |
10 | | #include "AudioDeviceInfo.h" |
11 | | #include "CamerasChild.h" |
12 | | #include "cubeb/cubeb.h" |
13 | | #include "CubebUtils.h" |
14 | | #include "DOMMediaStream.h" |
15 | | #include "ipc/IPCMessageUtils.h" |
16 | | #include "MediaEngine.h" |
17 | | #include "MediaEnginePrefs.h" |
18 | | #include "MediaEngineSource.h" |
19 | | #include "MediaEngineWrapper.h" |
20 | | #include "MediaStreamGraph.h" |
21 | | #include "mozilla/dom/File.h" |
22 | | #include "mozilla/dom/MediaStreamTrackBinding.h" |
23 | | #include "mozilla/Mutex.h" |
24 | | #include "mozilla/Mutex.h" |
25 | | #include "mozilla/Sprintf.h" |
26 | | #include "mozilla/StaticMutex.h" |
27 | | #include "mozilla/UniquePtr.h" |
28 | | #include "nsAutoPtr.h" |
29 | | #include "nsComponentManagerUtils.h" |
30 | | #include "nsCOMPtr.h" |
31 | | #include "nsDirectoryServiceDefs.h" |
32 | | #include "nsIThread.h" |
33 | | #include "nsIRunnable.h" |
34 | | #include "nsRefPtrHashtable.h" |
35 | | #include "nsThreadUtils.h" |
36 | | #include "NullTransport.h" |
37 | | #include "prcvar.h" |
38 | | #include "prthread.h" |
39 | | #include "StreamTracks.h" |
40 | | #include "VideoSegment.h" |
41 | | #include "VideoUtils.h" |
42 | | |
43 | | // WebRTC library includes follow |
44 | | // Audio Engine |
45 | | #include "webrtc/voice_engine/include/voe_base.h" |
46 | | #include "webrtc/voice_engine/include/voe_codec.h" |
47 | | #include "webrtc/voice_engine/include/voe_network.h" |
48 | | #include "webrtc/voice_engine/include/voe_audio_processing.h" |
49 | | #include "webrtc/voice_engine/include/voe_volume_control.h" |
50 | | #include "webrtc/voice_engine/include/voe_external_media.h" |
51 | | #include "webrtc/voice_engine/include/voe_audio_processing.h" |
52 | | #include "webrtc/modules/audio_device/include/audio_device.h" |
53 | | #include "webrtc/modules/audio_processing/include/audio_processing.h" |
54 | | // Video Engine |
55 | | // conflicts with #include of scoped_ptr.h |
56 | | #undef FF |
57 | | #include "webrtc/modules/video_capture/video_capture_defines.h" |
58 | | |
59 | | namespace mozilla { |
60 | | |
61 | | class MediaEngineWebRTCMicrophoneSource; |
62 | | |
63 | | class MediaEngineWebRTCAudioCaptureSource : public MediaEngineSource |
64 | | { |
65 | | public: |
66 | | explicit MediaEngineWebRTCAudioCaptureSource(const char* aUuid) |
67 | 0 | { |
68 | 0 | } |
69 | | nsString GetName() const override; |
70 | | nsCString GetUUID() const override; |
71 | | nsresult Allocate(const dom::MediaTrackConstraints &aConstraints, |
72 | | const MediaEnginePrefs &aPrefs, |
73 | | const nsString& aDeviceId, |
74 | | const ipc::PrincipalInfo& aPrincipalInfo, |
75 | | AllocationHandle** aOutHandle, |
76 | | const char** aOutBadConstraint) override |
77 | 0 | { |
78 | 0 | // Nothing to do here, everything is managed in MediaManager.cpp |
79 | 0 | *aOutHandle = nullptr; |
80 | 0 | return NS_OK; |
81 | 0 | } |
82 | | nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override |
83 | 0 | { |
84 | 0 | // Nothing to do here, everything is managed in MediaManager.cpp |
85 | 0 | MOZ_ASSERT(!aHandle); |
86 | 0 | return NS_OK; |
87 | 0 | } |
88 | | nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle, |
89 | | const RefPtr<SourceMediaStream>& aStream, |
90 | | TrackID aTrackID, |
91 | | const PrincipalHandle& aPrincipal) override; |
92 | | nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override; |
93 | | nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override; |
94 | | nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle, |
95 | | const dom::MediaTrackConstraints& aConstraints, |
96 | | const MediaEnginePrefs& aPrefs, |
97 | | const nsString& aDeviceId, |
98 | | const char** aOutBadConstraint) override; |
99 | | |
100 | | void Pull(const RefPtr<const AllocationHandle>& aHandle, |
101 | | const RefPtr<SourceMediaStream>& aStream, |
102 | | TrackID aTrackID, |
103 | | StreamTime aDesiredTime, |
104 | | const PrincipalHandle& aPrincipalHandle) override |
105 | 0 | {} |
106 | | |
107 | | dom::MediaSourceEnum GetMediaSource() const override |
108 | 0 | { |
109 | 0 | return dom::MediaSourceEnum::AudioCapture; |
110 | 0 | } |
111 | | |
112 | | nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override |
113 | 0 | { |
114 | 0 | return NS_ERROR_NOT_IMPLEMENTED; |
115 | 0 | } |
116 | | |
117 | | uint32_t GetBestFitnessDistance( |
118 | | const nsTArray<const NormalizedConstraintSet*>& aConstraintSets, |
119 | | const nsString& aDeviceId) const override; |
120 | | |
121 | | protected: |
122 | | virtual ~MediaEngineWebRTCAudioCaptureSource() = default; |
123 | | }; |
124 | | |
125 | | // This class implements a cache for accessing the audio device list. It can be |
126 | | // accessed on any thread. |
127 | | class CubebDeviceEnumerator final |
128 | | { |
129 | | public: |
130 | | CubebDeviceEnumerator(); |
131 | | ~CubebDeviceEnumerator(); |
132 | | // This method returns a list of all the input and output audio devices |
133 | | // available on this machine. |
134 | | // This method is safe to call from all threads. |
135 | | void EnumerateAudioInputDevices(nsTArray<RefPtr<AudioDeviceInfo>>& aOutDevices); |
136 | | // From a cubeb device id, return the info for this device, if it's still a |
137 | | // valid id, or nullptr otherwise. |
138 | | // This method is safe to call from any thread. |
139 | | already_AddRefed<AudioDeviceInfo> |
140 | | DeviceInfoFromID(CubebUtils::AudioDeviceID aID); |
141 | | |
142 | | protected: |
143 | | |
144 | | // Static function called by cubeb when the audio input device list changes |
145 | | // (i.e. when a new device is made available, or non-available). This |
146 | | // re-binds to the MediaEngineWebRTC that instantiated this |
147 | | // CubebDeviceEnumerator, and simply calls `AudioDeviceListChanged` below. |
148 | | static void AudioDeviceListChanged_s(cubeb* aContext, void* aUser); |
149 | | // Invalidates the cached audio input device list, can be called on any |
150 | | // thread. |
151 | | void AudioDeviceListChanged(); |
152 | | |
153 | | private: |
154 | | // Synchronize access to mDevices |
155 | | Mutex mMutex; |
156 | | nsTArray<RefPtr<AudioDeviceInfo>> mDevices; |
157 | | // If mManualInvalidation is true, then it is necessary to query the device |
158 | | // list each time instead of relying on automatic invalidation of the cache by |
159 | | // cubeb itself. Set in the constructor and then can be access on any thread. |
160 | | bool mManualInvalidation; |
161 | | }; |
162 | | |
163 | | // This class is instantiated on the MediaManager thread, and is then sent and |
164 | | // only ever access again on the MediaStreamGraph. |
165 | | class WebRTCAudioDataListener : public AudioDataListener |
166 | | { |
167 | | protected: |
168 | | // Protected destructor, to discourage deletion outside of Release(): |
169 | 0 | virtual ~WebRTCAudioDataListener() {} |
170 | | |
171 | | public: |
172 | | explicit WebRTCAudioDataListener(MediaEngineWebRTCMicrophoneSource* aAudioSource) |
173 | | : mAudioSource(aAudioSource) |
174 | 0 | {} |
175 | | |
176 | | // AudioDataListenerInterface methods |
177 | | void NotifyOutputData(MediaStreamGraphImpl* aGraph, |
178 | | AudioDataValue* aBuffer, |
179 | | size_t aFrames, |
180 | | TrackRate aRate, |
181 | | uint32_t aChannels) override; |
182 | | |
183 | | void NotifyInputData(MediaStreamGraphImpl* aGraph, |
184 | | const AudioDataValue* aBuffer, |
185 | | size_t aFrames, |
186 | | TrackRate aRate, |
187 | | uint32_t aChannels) override; |
188 | | |
189 | | uint32_t RequestedInputChannelCount(MediaStreamGraphImpl* aGraph) override; |
190 | | |
191 | | void DeviceChanged(MediaStreamGraphImpl* aGraph) override; |
192 | | |
193 | | void Disconnect(MediaStreamGraphImpl* aGraph) override; |
194 | | |
195 | | private: |
196 | | RefPtr<MediaEngineWebRTCMicrophoneSource> mAudioSource; |
197 | | }; |
198 | | |
199 | | class MediaEngineWebRTCMicrophoneSource : public MediaEngineSource, |
200 | | public AudioDataListenerInterface |
201 | | { |
202 | | public: |
203 | | MediaEngineWebRTCMicrophoneSource(RefPtr<AudioDeviceInfo> aInfo, |
204 | | const nsString& name, |
205 | | const nsCString& uuid, |
206 | | uint32_t maxChannelCount, |
207 | | bool aDelayAgnostic, |
208 | | bool aExtendedFilter); |
209 | | |
210 | | bool RequiresSharing() const override |
211 | 0 | { |
212 | 0 | return false; |
213 | 0 | } |
214 | | |
215 | | nsString GetName() const override; |
216 | | nsCString GetUUID() const override; |
217 | | |
218 | | nsresult Allocate(const dom::MediaTrackConstraints &aConstraints, |
219 | | const MediaEnginePrefs& aPrefs, |
220 | | const nsString& aDeviceId, |
221 | | const ipc::PrincipalInfo& aPrincipalInfo, |
222 | | AllocationHandle** aOutHandle, |
223 | | const char** aOutBadConstraint) override; |
224 | | nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override; |
225 | | nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle, |
226 | | const RefPtr<SourceMediaStream>& aStream, |
227 | | TrackID aTrackID, |
228 | | const PrincipalHandle& aPrincipal) override; |
229 | | nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override; |
230 | | nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override; |
231 | | nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle, |
232 | | const dom::MediaTrackConstraints& aConstraints, |
233 | | const MediaEnginePrefs& aPrefs, |
234 | | const nsString& aDeviceId, |
235 | | const char** aOutBadConstraint) override; |
236 | | |
237 | | /** |
238 | | * Assigns the current settings of the capture to aOutSettings. |
239 | | * Main thread only. |
240 | | */ |
241 | | void GetSettings(dom::MediaTrackSettings& aOutSettings) const override; |
242 | | |
243 | | void Pull(const RefPtr<const AllocationHandle>& aHandle, |
244 | | const RefPtr<SourceMediaStream>& aStream, |
245 | | TrackID aTrackID, |
246 | | StreamTime aDesiredTime, |
247 | | const PrincipalHandle& aPrincipalHandle) override; |
248 | | |
249 | | // AudioDataListenerInterface methods |
250 | | void NotifyOutputData(MediaStreamGraphImpl* aGraph, |
251 | | AudioDataValue* aBuffer, size_t aFrames, |
252 | | TrackRate aRate, uint32_t aChannels) override; |
253 | | void NotifyInputData(MediaStreamGraphImpl* aGraph, |
254 | | const AudioDataValue* aBuffer, size_t aFrames, |
255 | | TrackRate aRate, uint32_t aChannels) override; |
256 | | |
257 | | void DeviceChanged(MediaStreamGraphImpl* aGraph) override; |
258 | | |
259 | | uint32_t RequestedInputChannelCount(MediaStreamGraphImpl* aGraph) override |
260 | 0 | { |
261 | 0 | return GetRequestedInputChannelCount(aGraph); |
262 | 0 | } |
263 | | |
264 | | void Disconnect(MediaStreamGraphImpl* aGraph) override; |
265 | | |
266 | | dom::MediaSourceEnum GetMediaSource() const override |
267 | 0 | { |
268 | 0 | return dom::MediaSourceEnum::Microphone; |
269 | 0 | } |
270 | | |
271 | | nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override |
272 | 0 | { |
273 | 0 | return NS_ERROR_NOT_IMPLEMENTED; |
274 | 0 | } |
275 | | |
276 | | uint32_t GetBestFitnessDistance( |
277 | | const nsTArray<const NormalizedConstraintSet*>& aConstraintSets, |
278 | | const nsString& aDeviceId) const override; |
279 | | |
280 | | void Shutdown() override; |
281 | | |
282 | | protected: |
283 | 0 | ~MediaEngineWebRTCMicrophoneSource() {} |
284 | | |
285 | | private: |
286 | | /** |
287 | | * Representation of data tied to an AllocationHandle rather than to the source. |
288 | | */ |
289 | | struct Allocation { |
290 | | Allocation() = delete; |
291 | | explicit Allocation(const RefPtr<AllocationHandle>& aHandle); |
292 | | ~Allocation(); |
293 | | |
294 | | #ifdef DEBUG |
295 | | // The MSGImpl::IterationEnd() of the last time we appended data from an |
296 | | // audio callback. |
297 | | // Guarded by MediaEngineWebRTCMicrophoneSource::mMutex. |
298 | | GraphTime mLastCallbackAppendTime = 0; |
299 | | #endif |
300 | | // Set to false by Start(). Becomes true after the first time we append real |
301 | | // audio frames from the audio callback. |
302 | | // Guarded by MediaEngineWebRTCMicrophoneSource::mMutex. |
303 | | bool mLiveFramesAppended = false; |
304 | | |
305 | | // Set to false by Start(). Becomes true after the first time we append |
306 | | // silence *after* the first audio callback has appended real frames. |
307 | | // Guarded by MediaEngineWebRTCMicrophoneSource::mMutex. |
308 | | bool mLiveSilenceAppended = false; |
309 | | |
310 | | const RefPtr<AllocationHandle> mHandle; |
311 | | RefPtr<SourceMediaStream> mStream; |
312 | | TrackID mTrackID = TRACK_NONE; |
313 | | PrincipalHandle mPrincipal = PRINCIPAL_HANDLE_NONE; |
314 | | bool mEnabled = false; |
315 | | }; |
316 | | |
317 | | /** |
318 | | * Used with nsTArray<Allocation>::IndexOf to locate an Allocation by a handle. |
319 | | */ |
320 | | class AllocationHandleComparator { |
321 | | public: |
322 | | bool Equals(const Allocation& aAllocation, |
323 | | const RefPtr<const AllocationHandle>& aHandle) const |
324 | 0 | { |
325 | 0 | return aHandle == aAllocation.mHandle; |
326 | 0 | } |
327 | | }; |
328 | | |
329 | | /** |
330 | | * Reevaluates the aggregated constraints of all allocations and restarts the |
331 | | * underlying device if necessary. |
332 | | * |
333 | | * If the given AllocationHandle was already registered, its constraints will |
334 | | * be updated before reevaluation. If not, they will be added before |
335 | | * reevaluation. |
336 | | */ |
337 | | nsresult ReevaluateAllocation(const RefPtr<AllocationHandle>& aHandle, |
338 | | const NormalizedConstraints* aConstraintsUpdate, |
339 | | const MediaEnginePrefs& aPrefs, |
340 | | const nsString& aDeviceId, |
341 | | const char** aOutBadConstraint); |
342 | | |
343 | | /** |
344 | | * Updates the underlying (single) device with the aggregated constraints |
345 | | * aNetConstraints. If the chosen settings for the device changes based on |
346 | | * these new constraints, and capture is active, the device will be restarted. |
347 | | */ |
348 | | nsresult UpdateSingleSource(const RefPtr<const AllocationHandle>& aHandle, |
349 | | const NormalizedConstraints& aNetConstraints, |
350 | | const MediaEnginePrefs& aPrefs, |
351 | | const nsString& aDeviceId, |
352 | | const char** aOutBadConstraint); |
353 | | |
354 | | |
355 | | void UpdateAECSettingsIfNeeded(bool aEnable, webrtc::EcModes aMode); |
356 | | void UpdateAGCSettingsIfNeeded(bool aEnable, webrtc::AgcModes aMode); |
357 | | void UpdateNSSettingsIfNeeded(bool aEnable, webrtc::NsModes aMode); |
358 | | |
359 | | void ApplySettings(const MediaEnginePrefs& aPrefs, |
360 | | RefPtr<MediaStreamGraphImpl> aGraph); |
361 | | |
362 | | bool HasEnabledTrack() const; |
363 | | |
364 | | template<typename T> |
365 | | void InsertInGraph(const T* aBuffer, |
366 | | size_t aFrames, |
367 | | uint32_t aChannels); |
368 | | |
369 | | void PacketizeAndProcess(MediaStreamGraphImpl* aGraph, |
370 | | const AudioDataValue* aBuffer, |
371 | | size_t aFrames, |
372 | | TrackRate aRate, |
373 | | uint32_t aChannels); |
374 | | |
375 | | |
376 | | // This is true when all processing is disabled, we can skip |
377 | | // packetization, resampling and other processing passes. |
378 | | // Graph thread only. |
379 | | bool PassThrough(MediaStreamGraphImpl* aGraphImpl) const; |
380 | | |
381 | | // Graph thread only. |
382 | | void SetPassThrough(bool aPassThrough); |
383 | | uint32_t GetRequestedInputChannelCount(MediaStreamGraphImpl* aGraphImpl); |
384 | | void SetRequestedInputChannelCount(uint32_t aRequestedInputChannelCount); |
385 | | |
386 | | // mListener is created on the MediaManager thread, and then sent to the MSG |
387 | | // thread. On shutdown, we send this pointer to the MSG thread again, telling |
388 | | // it to clean up. |
389 | | RefPtr<WebRTCAudioDataListener> mListener; |
390 | | |
391 | | // Can be shared on any thread. |
392 | | const RefPtr<AudioDeviceInfo> mDeviceInfo; |
393 | | |
394 | | const UniquePtr<webrtc::AudioProcessing> mAudioProcessing; |
395 | | |
396 | | // accessed from the GraphDriver thread except for deletion. |
397 | | nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerInput; |
398 | | nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerOutput; |
399 | | |
400 | | // mMutex protects some of our members off the owning thread. |
401 | | Mutex mMutex; |
402 | | |
403 | | // We append an allocation in Allocate() and remove it in Deallocate(). |
404 | | // Both the array and the Allocation members are modified under mMutex on |
405 | | // the owning thread. Accessed under one of the two. |
406 | | nsTArray<Allocation> mAllocations; |
407 | | |
408 | | // Current state of the shared resource for this source. Written on the |
409 | | // owning thread, read on either the owning thread or the MSG thread. |
410 | | Atomic<MediaEngineSourceState> mState; |
411 | | |
412 | | bool mDelayAgnostic; |
413 | | bool mExtendedFilter; |
414 | | bool mStarted; |
415 | | |
416 | | const nsString mDeviceName; |
417 | | const nsCString mDeviceUUID; |
418 | | |
419 | | // The current settings for the underlying device. |
420 | | // Member access is main thread only after construction. |
421 | | const nsMainThreadPtrHandle<media::Refcountable<dom::MediaTrackSettings>> mSettings; |
422 | | |
423 | | // The number of channels asked for by content, after clamping to the range of |
424 | | // legal channel count for this particular device. This is the number of |
425 | | // channels of the input buffer passed as parameter in NotifyInputData. |
426 | | uint32_t mRequestedInputChannelCount; |
427 | | uint64_t mTotalFrames; |
428 | | uint64_t mLastLogFrames; |
429 | | |
430 | | // mSkipProcessing is true if none of the processing passes are enabled, |
431 | | // because of prefs or constraints. This allows simply copying the audio into |
432 | | // the MSG, skipping resampling and the whole webrtc.org code. |
433 | | // This is read and written to only on the MSG thread. |
434 | | bool mSkipProcessing; |
435 | | |
436 | | // To only update microphone when needed, we keep track of the prefs |
437 | | // representing the currently applied settings for this source. This is the |
438 | | // net result of the prefs across all allocations. |
439 | | // Owning thread only. |
440 | | MediaEnginePrefs mNetPrefs; |
441 | | |
442 | | // Stores the mixed audio output for the reverse-stream of the AEC. |
443 | | AlignedFloatBuffer mOutputBuffer; |
444 | | |
445 | | AlignedFloatBuffer mInputBuffer; |
446 | | AlignedFloatBuffer mDeinterleavedBuffer; |
447 | | AlignedFloatBuffer mInputDownmixBuffer; |
448 | | }; |
449 | | |
450 | | class MediaEngineWebRTC : public MediaEngine |
451 | | { |
452 | | typedef MediaEngine Super; |
453 | | public: |
454 | | explicit MediaEngineWebRTC(MediaEnginePrefs& aPrefs); |
455 | | |
456 | | virtual void SetFakeDeviceChangeEvents() override; |
457 | | |
458 | | // Clients should ensure to clean-up sources video/audio sources |
459 | | // before invoking Shutdown on this class. |
460 | | void Shutdown() override; |
461 | | |
462 | | // Returns whether the host supports duplex audio stream. |
463 | | bool SupportsDuplex(); |
464 | | |
465 | | void EnumerateDevices(uint64_t aWindowId, |
466 | | dom::MediaSourceEnum, |
467 | | MediaSinkEnum, |
468 | | nsTArray<RefPtr<MediaDevice>>*) override; |
469 | | void ReleaseResourcesForWindow(uint64_t aWindowId) override; |
470 | | private: |
471 | 0 | ~MediaEngineWebRTC() = default; |
472 | | void EnumerateVideoDevices(uint64_t aWindowId, |
473 | | dom::MediaSourceEnum, |
474 | | nsTArray<RefPtr<MediaDevice>>*); |
475 | | void EnumerateMicrophoneDevices(uint64_t aWindowId, |
476 | | nsTArray<RefPtr<MediaDevice>>*); |
477 | | void EnumerateSpeakerDevices(uint64_t aWindowId, |
478 | | nsTArray<RefPtr<MediaDevice> >*); |
479 | | |
480 | | // gUM runnables can e.g. Enumerate from multiple threads |
481 | | Mutex mMutex; |
482 | | UniquePtr<mozilla::CubebDeviceEnumerator> mEnumerator; |
483 | | const bool mDelayAgnostic; |
484 | | const bool mExtendedFilter; |
485 | | // This also is set in the ctor and then never changed, but we can't make it |
486 | | // const because we pass it to a function that takes bool* in the ctor. |
487 | | bool mHasTabVideoSource; |
488 | | |
489 | | // Maps WindowID to a map of device uuid to their MediaEngineSource, |
490 | | // separately for audio and video. |
491 | | nsClassHashtable<nsUint64HashKey, |
492 | | nsRefPtrHashtable<nsStringHashKey, |
493 | | MediaEngineSource>> mVideoSources; |
494 | | nsClassHashtable<nsUint64HashKey, |
495 | | nsRefPtrHashtable<nsStringHashKey, |
496 | | MediaEngineSource>> mAudioSources; |
497 | | }; |
498 | | |
499 | | } |
500 | | |
501 | | #endif /* NSMEDIAENGINEWEBRTC_H_ */ |