/src/mozilla-central/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- |
2 | | * This Source Code Form is subject to the terms of the Mozilla Public |
3 | | * License, v. 2.0. If a copy of the MPL was not distributed with this file, |
4 | | * You can obtain one at http://mozilla.org/MPL/2.0/. */ |
5 | | |
6 | | #include "MediaEngineRemoteVideoSource.h" |
7 | | |
8 | | #include "AllocationHandle.h" |
9 | | #include "CamerasChild.h" |
10 | | #include "MediaManager.h" |
11 | | #include "MediaTrackConstraints.h" |
12 | | #include "mozilla/ErrorNames.h" |
13 | | #include "mozilla/RefPtr.h" |
14 | | #include "nsIPrefService.h" |
15 | | #include "Tracing.h" |
16 | | #include "VideoFrameUtils.h" |
17 | | #include "VideoUtils.h" |
18 | | #include "webrtc/common_video/include/video_frame_buffer.h" |
19 | | #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
20 | | |
21 | | mozilla::LogModule* GetMediaManagerLog(); |
22 | 0 | #define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg) |
23 | | #define LOGFRAME(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg) |
24 | | |
25 | | namespace mozilla { |
26 | | |
27 | | using dom::ConstrainLongRange; |
28 | | using dom::MediaSourceEnum; |
29 | | using dom::MediaTrackConstraints; |
30 | | using dom::MediaTrackConstraintSet; |
31 | | using dom::MediaTrackSettings; |
32 | | using dom::VideoFacingModeEnum; |
33 | | |
34 | | MediaEngineRemoteVideoSource::MediaEngineRemoteVideoSource( |
35 | | int aIndex, |
36 | | camera::CaptureEngine aCapEngine, |
37 | | MediaSourceEnum aMediaSource, |
38 | | bool aScary) |
39 | | : mCaptureIndex(aIndex) |
40 | | , mMediaSource(aMediaSource) |
41 | | , mCapEngine(aCapEngine) |
42 | | , mScary(aScary) |
43 | | , mMutex("MediaEngineRemoteVideoSource::mMutex") |
44 | | , mRescalingBufferPool(/* zero_initialize */ false, |
45 | | /* max_number_of_buffers */ 1) |
46 | | , mSettingsUpdatedByFrame(MakeAndAddRef<media::Refcountable<AtomicBool>>()) |
47 | | , mSettings(MakeAndAddRef<media::Refcountable<MediaTrackSettings>>()) |
48 | 0 | { |
49 | 0 | MOZ_ASSERT(aMediaSource != MediaSourceEnum::Other); |
50 | 0 | mSettings->mWidth.Construct(0); |
51 | 0 | mSettings->mHeight.Construct(0); |
52 | 0 | mSettings->mFrameRate.Construct(0); |
53 | 0 | Init(); |
54 | 0 | } |
55 | | |
56 | | void |
57 | | MediaEngineRemoteVideoSource::Init() |
58 | 0 | { |
59 | 0 | LOG((__PRETTY_FUNCTION__)); |
60 | 0 | AssertIsOnOwningThread(); |
61 | 0 |
|
62 | 0 | char deviceName[kMaxDeviceNameLength]; |
63 | 0 | char uniqueId[kMaxUniqueIdLength]; |
64 | 0 | if (camera::GetChildAndCall(&camera::CamerasChild::GetCaptureDevice, |
65 | 0 | mCapEngine, mCaptureIndex, |
66 | 0 | deviceName, kMaxDeviceNameLength, |
67 | 0 | uniqueId, kMaxUniqueIdLength, nullptr)) { |
68 | 0 | LOG(("Error initializing RemoteVideoSource (GetCaptureDevice)")); |
69 | 0 | return; |
70 | 0 | } |
71 | 0 |
|
72 | 0 | SetName(NS_ConvertUTF8toUTF16(deviceName)); |
73 | 0 | SetUUID(uniqueId); |
74 | 0 |
|
75 | 0 | mInitDone = true; |
76 | 0 | } |
77 | | |
78 | | void |
79 | | MediaEngineRemoteVideoSource::Shutdown() |
80 | 0 | { |
81 | 0 | LOG((__PRETTY_FUNCTION__)); |
82 | 0 | AssertIsOnOwningThread(); |
83 | 0 |
|
84 | 0 | if (!mInitDone) { |
85 | 0 | // Already shut down |
86 | 0 | return; |
87 | 0 | } |
88 | 0 | |
89 | 0 | // Allocate always returns a null AllocationHandle. |
90 | 0 | // We can safely pass nullptr here. |
91 | 0 | if (mState == kStarted) { |
92 | 0 | Stop(nullptr); |
93 | 0 | } |
94 | 0 | if (mState == kAllocated || mState == kStopped) { |
95 | 0 | Deallocate(nullptr); |
96 | 0 | } |
97 | 0 | MOZ_ASSERT(mState == kReleased); |
98 | 0 |
|
99 | 0 | mInitDone = false; |
100 | 0 | } |
101 | | |
102 | | void |
103 | | MediaEngineRemoteVideoSource::SetName(nsString aName) |
104 | 0 | { |
105 | 0 | LOG((__PRETTY_FUNCTION__)); |
106 | 0 | AssertIsOnOwningThread(); |
107 | 0 |
|
108 | 0 | mDeviceName = std::move(aName); |
109 | 0 | bool hasFacingMode = false; |
110 | 0 | VideoFacingModeEnum facingMode = VideoFacingModeEnum::User; |
111 | 0 |
|
112 | 0 | // Set facing mode based on device name. |
113 | | #if defined(ANDROID) |
114 | | // Names are generated. Example: "Camera 0, Facing back, Orientation 90" |
115 | | // |
116 | | // See media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/ |
117 | | // webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java |
118 | | |
119 | | if (mDeviceName.Find(NS_LITERAL_STRING("Facing back")) != kNotFound) { |
120 | | hasFacingMode = true; |
121 | | facingMode = VideoFacingModeEnum::Environment; |
122 | | } else if (mDeviceName.Find(NS_LITERAL_STRING("Facing front")) != kNotFound) { |
123 | | hasFacingMode = true; |
124 | | facingMode = VideoFacingModeEnum::User; |
125 | | } |
126 | | #endif // ANDROID |
127 | | #ifdef XP_MACOSX |
128 | | // Kludge to test user-facing cameras on OSX. |
129 | | if (mDeviceName.Find(NS_LITERAL_STRING("Face")) != -1) { |
130 | | hasFacingMode = true; |
131 | | facingMode = VideoFacingModeEnum::User; |
132 | | } |
133 | | #endif |
134 | | #ifdef XP_WIN |
135 | | // The cameras' name of Surface book are "Microsoft Camera Front" and |
136 | | // "Microsoft Camera Rear" respectively. |
137 | | |
138 | | if (mDeviceName.Find(NS_LITERAL_STRING("Front")) != kNotFound) { |
139 | | hasFacingMode = true; |
140 | | facingMode = VideoFacingModeEnum::User; |
141 | | } else if (mDeviceName.Find(NS_LITERAL_STRING("Rear")) != kNotFound) { |
142 | | hasFacingMode = true; |
143 | | facingMode = VideoFacingModeEnum::Environment; |
144 | | } |
145 | | #endif // WINDOWS |
146 | 0 | if (hasFacingMode) { |
147 | 0 | mFacingMode.Assign(NS_ConvertUTF8toUTF16( |
148 | 0 | dom::VideoFacingModeEnumValues::strings[uint32_t(facingMode)].value)); |
149 | 0 | } else { |
150 | 0 | mFacingMode.Truncate(); |
151 | 0 | } |
152 | 0 | } |
153 | | |
154 | | nsString |
155 | | MediaEngineRemoteVideoSource::GetName() const |
156 | 0 | { |
157 | 0 | AssertIsOnOwningThread(); |
158 | 0 |
|
159 | 0 | return mDeviceName; |
160 | 0 | } |
161 | | |
162 | | void |
163 | | MediaEngineRemoteVideoSource::SetUUID(const char* aUUID) |
164 | 0 | { |
165 | 0 | AssertIsOnOwningThread(); |
166 | 0 |
|
167 | 0 | mUniqueId.Assign(aUUID); |
168 | 0 | } |
169 | | |
170 | | nsCString |
171 | | MediaEngineRemoteVideoSource::GetUUID() const |
172 | 0 | { |
173 | 0 | AssertIsOnOwningThread(); |
174 | 0 |
|
175 | 0 | return mUniqueId; |
176 | 0 | } |
177 | | |
178 | | nsresult |
179 | | MediaEngineRemoteVideoSource::Allocate( |
180 | | const MediaTrackConstraints& aConstraints, |
181 | | const MediaEnginePrefs& aPrefs, |
182 | | const nsString& aDeviceId, |
183 | | const mozilla::ipc::PrincipalInfo& aPrincipalInfo, |
184 | | AllocationHandle** aOutHandle, |
185 | | const char** aOutBadConstraint) |
186 | 0 | { |
187 | 0 | LOG((__PRETTY_FUNCTION__)); |
188 | 0 | AssertIsOnOwningThread(); |
189 | 0 |
|
190 | 0 | MOZ_ASSERT(mState == kReleased); |
191 | 0 |
|
192 | 0 | if (!mInitDone) { |
193 | 0 | LOG(("Init not done")); |
194 | 0 | return NS_ERROR_FAILURE; |
195 | 0 | } |
196 | 0 |
|
197 | 0 | NormalizedConstraints constraints(aConstraints); |
198 | 0 | webrtc::CaptureCapability newCapability; |
199 | 0 | LOG(("ChooseCapability(kFitness) for mCapability (Allocate) ++")); |
200 | 0 | if (!ChooseCapability(constraints, aPrefs, aDeviceId, newCapability, kFitness)) { |
201 | 0 | *aOutBadConstraint = |
202 | 0 | MediaConstraintsHelper::FindBadConstraint(constraints, this, aDeviceId); |
203 | 0 | return NS_ERROR_FAILURE; |
204 | 0 | } |
205 | 0 | LOG(("ChooseCapability(kFitness) for mCapability (Allocate) --")); |
206 | 0 |
|
207 | 0 | if (camera::GetChildAndCall(&camera::CamerasChild::AllocateCaptureDevice, |
208 | 0 | mCapEngine, mUniqueId.get(), |
209 | 0 | kMaxUniqueIdLength, mCaptureIndex, |
210 | 0 | aPrincipalInfo)) { |
211 | 0 | return NS_ERROR_FAILURE; |
212 | 0 | } |
213 | 0 | |
214 | 0 | *aOutHandle = nullptr; |
215 | 0 |
|
216 | 0 | { |
217 | 0 | MutexAutoLock lock(mMutex); |
218 | 0 | mState = kAllocated; |
219 | 0 | mCapability = newCapability; |
220 | 0 | } |
221 | 0 |
|
222 | 0 | LOG(("Video device %d allocated", mCaptureIndex)); |
223 | 0 | return NS_OK; |
224 | 0 | } |
225 | | |
226 | | nsresult |
227 | | MediaEngineRemoteVideoSource::Deallocate(const RefPtr<const AllocationHandle>& aHandle) |
228 | 0 | { |
229 | 0 | LOG((__PRETTY_FUNCTION__)); |
230 | 0 | AssertIsOnOwningThread(); |
231 | 0 |
|
232 | 0 | MOZ_ASSERT(mState == kStopped || mState == kAllocated); |
233 | 0 |
|
234 | 0 | if (mStream && IsTrackIDExplicit(mTrackID)) { |
235 | 0 | mStream->EndTrack(mTrackID); |
236 | 0 | } |
237 | 0 |
|
238 | 0 | { |
239 | 0 | MutexAutoLock lock(mMutex); |
240 | 0 |
|
241 | 0 | mStream = nullptr; |
242 | 0 | mTrackID = TRACK_NONE; |
243 | 0 | mPrincipal = PRINCIPAL_HANDLE_NONE; |
244 | 0 | mState = kReleased; |
245 | 0 | } |
246 | 0 |
|
247 | 0 | // Stop() has stopped capture synchronously on the media thread before we get |
248 | 0 | // here, so there are no longer any callbacks on an IPC thread accessing |
249 | 0 | // mImageContainer or mRescalingBufferPool. |
250 | 0 | mImageContainer = nullptr; |
251 | 0 | mRescalingBufferPool.Release(); |
252 | 0 |
|
253 | 0 | LOG(("Video device %d deallocated", mCaptureIndex)); |
254 | 0 |
|
255 | 0 | if (camera::GetChildAndCall(&camera::CamerasChild::ReleaseCaptureDevice, |
256 | 0 | mCapEngine, mCaptureIndex)) { |
257 | 0 | MOZ_ASSERT_UNREACHABLE("Couldn't release allocated device"); |
258 | 0 | } |
259 | 0 | return NS_OK; |
260 | 0 | } |
261 | | |
262 | | nsresult |
263 | | MediaEngineRemoteVideoSource::SetTrack(const RefPtr<const AllocationHandle>& aHandle, |
264 | | const RefPtr<SourceMediaStream>& aStream, |
265 | | TrackID aTrackID, |
266 | | const PrincipalHandle& aPrincipal) |
267 | 0 | { |
268 | 0 | LOG((__PRETTY_FUNCTION__)); |
269 | 0 | AssertIsOnOwningThread(); |
270 | 0 |
|
271 | 0 | MOZ_ASSERT(mState == kAllocated); |
272 | 0 | MOZ_ASSERT(!mStream); |
273 | 0 | MOZ_ASSERT(mTrackID == TRACK_NONE); |
274 | 0 | MOZ_ASSERT(aStream); |
275 | 0 | MOZ_ASSERT(IsTrackIDExplicit(aTrackID)); |
276 | 0 |
|
277 | 0 | if (!mImageContainer) { |
278 | 0 | mImageContainer = layers::LayerManager::CreateImageContainer( |
279 | 0 | layers::ImageContainer::ASYNCHRONOUS); |
280 | 0 | } |
281 | 0 |
|
282 | 0 | { |
283 | 0 | MutexAutoLock lock(mMutex); |
284 | 0 | mStream = aStream; |
285 | 0 | mTrackID = aTrackID; |
286 | 0 | mPrincipal = aPrincipal; |
287 | 0 | } |
288 | 0 | aStream->AddTrack(aTrackID, 0, new VideoSegment(), |
289 | 0 | SourceMediaStream::ADDTRACK_QUEUED); |
290 | 0 | return NS_OK; |
291 | 0 | } |
292 | | |
293 | | nsresult |
294 | | MediaEngineRemoteVideoSource::Start(const RefPtr<const AllocationHandle>& aHandle) |
295 | 0 | { |
296 | 0 | LOG((__PRETTY_FUNCTION__)); |
297 | 0 | AssertIsOnOwningThread(); |
298 | 0 |
|
299 | 0 | MOZ_ASSERT(mState == kAllocated || mState == kStopped); |
300 | 0 | MOZ_ASSERT(mInitDone); |
301 | 0 | MOZ_ASSERT(mStream); |
302 | 0 | MOZ_ASSERT(IsTrackIDExplicit(mTrackID)); |
303 | 0 |
|
304 | 0 | { |
305 | 0 | MutexAutoLock lock(mMutex); |
306 | 0 | mState = kStarted; |
307 | 0 | } |
308 | 0 |
|
309 | 0 | mSettingsUpdatedByFrame->mValue = false; |
310 | 0 |
|
311 | 0 | if (camera::GetChildAndCall(&camera::CamerasChild::StartCapture, |
312 | 0 | mCapEngine, mCaptureIndex, mCapability, this)) { |
313 | 0 | LOG(("StartCapture failed")); |
314 | 0 | MutexAutoLock lock(mMutex); |
315 | 0 | mState = kStopped; |
316 | 0 | return NS_ERROR_FAILURE; |
317 | 0 | } |
318 | 0 |
|
319 | 0 | NS_DispatchToMainThread(NS_NewRunnableFunction( |
320 | 0 | "MediaEngineRemoteVideoSource::SetLastCapability", |
321 | 0 | [settings = mSettings, |
322 | 0 | updated = mSettingsUpdatedByFrame, |
323 | 0 | source = mMediaSource, |
324 | 0 | cap = mCapability]() mutable { |
325 | 0 | switch (source) { |
326 | 0 | case dom::MediaSourceEnum::Screen: |
327 | 0 | case dom::MediaSourceEnum::Window: |
328 | 0 | case dom::MediaSourceEnum::Application: |
329 | 0 | // Undo the hack where ideal and max constraints are crammed together |
330 | 0 | // in mCapability for consumption by low-level code. We don't actually |
331 | 0 | // know the real resolution yet, so report min(ideal, max) for now. |
332 | 0 | // TODO: This can be removed in bug 1453269. |
333 | 0 | cap.width = std::min(cap.width >> 16, cap.width & 0xffff); |
334 | 0 | cap.height = std::min(cap.height >> 16, cap.height & 0xffff); |
335 | 0 | break; |
336 | 0 | default: |
337 | 0 | break; |
338 | 0 | } |
339 | 0 | |
340 | 0 | if (!updated->mValue) { |
341 | 0 | settings->mWidth.Value() = cap.width; |
342 | 0 | settings->mHeight.Value() = cap.height; |
343 | 0 | } |
344 | 0 | settings->mFrameRate.Value() = cap.maxFPS; |
345 | 0 | })); |
346 | 0 |
|
347 | 0 | return NS_OK; |
348 | 0 | } |
349 | | |
350 | | nsresult |
351 | | MediaEngineRemoteVideoSource::FocusOnSelectedSource(const RefPtr<const AllocationHandle>& aHandle) |
352 | 0 | { |
353 | 0 | LOG((__PRETTY_FUNCTION__)); |
354 | 0 | AssertIsOnOwningThread(); |
355 | 0 |
|
356 | 0 | int result; |
357 | 0 | result = camera::GetChildAndCall(&camera::CamerasChild::FocusOnSelectedSource, |
358 | 0 | mCapEngine, mCaptureIndex); |
359 | 0 | return result == 0 ? NS_OK : NS_ERROR_FAILURE; |
360 | 0 | } |
361 | | |
362 | | nsresult |
363 | | MediaEngineRemoteVideoSource::Stop(const RefPtr<const AllocationHandle>& aHandle) |
364 | 0 | { |
365 | 0 | LOG((__PRETTY_FUNCTION__)); |
366 | 0 | AssertIsOnOwningThread(); |
367 | 0 |
|
368 | 0 | if (mState == kStopped || mState == kAllocated) { |
369 | 0 | return NS_OK; |
370 | 0 | } |
371 | 0 | |
372 | 0 | MOZ_ASSERT(mState == kStarted); |
373 | 0 |
|
374 | 0 | if (camera::GetChildAndCall(&camera::CamerasChild::StopCapture, |
375 | 0 | mCapEngine, mCaptureIndex)) { |
376 | 0 | MOZ_DIAGNOSTIC_ASSERT(false, "Stopping a started capture failed"); |
377 | 0 | return NS_ERROR_FAILURE; |
378 | 0 | } |
379 | 0 | |
380 | 0 | { |
381 | 0 | MutexAutoLock lock(mMutex); |
382 | 0 | mState = kStopped; |
383 | 0 |
|
384 | 0 | // Drop any cached image so we don't start with a stale image on next |
385 | 0 | // usage. Also, gfx gets very upset if these are held until this object |
386 | 0 | // is gc'd in final-cc during shutdown (bug 1374164) |
387 | 0 | mImage = nullptr; |
388 | 0 | } |
389 | 0 |
|
390 | 0 | return NS_OK; |
391 | 0 | } |
392 | | |
393 | | nsresult |
394 | | MediaEngineRemoteVideoSource::Reconfigure(const RefPtr<AllocationHandle>& aHandle, |
395 | | const MediaTrackConstraints& aConstraints, |
396 | | const MediaEnginePrefs& aPrefs, |
397 | | const nsString& aDeviceId, |
398 | | const char** aOutBadConstraint) |
399 | 0 | { |
400 | 0 | LOG((__PRETTY_FUNCTION__)); |
401 | 0 | AssertIsOnOwningThread(); |
402 | 0 |
|
403 | 0 | MOZ_ASSERT(mInitDone); |
404 | 0 |
|
405 | 0 | NormalizedConstraints constraints(aConstraints); |
406 | 0 | webrtc::CaptureCapability newCapability; |
407 | 0 | LOG(("ChooseCapability(kFitness) for mTargetCapability (Reconfigure) ++")); |
408 | 0 | if (!ChooseCapability(constraints, aPrefs, aDeviceId, newCapability, kFitness)) { |
409 | 0 | *aOutBadConstraint = |
410 | 0 | MediaConstraintsHelper::FindBadConstraint(constraints, this, aDeviceId); |
411 | 0 | return NS_ERROR_INVALID_ARG; |
412 | 0 | } |
413 | 0 | LOG(("ChooseCapability(kFitness) for mTargetCapability (Reconfigure) --")); |
414 | 0 |
|
415 | 0 | if (mCapability == newCapability) { |
416 | 0 | return NS_OK; |
417 | 0 | } |
418 | 0 | |
419 | 0 | bool started = mState == kStarted; |
420 | 0 | if (started) { |
421 | 0 | // Allocate always returns a null AllocationHandle. |
422 | 0 | // We can safely pass nullptr below. |
423 | 0 | nsresult rv = Stop(nullptr); |
424 | 0 | if (NS_WARN_IF(NS_FAILED(rv))) { |
425 | 0 | nsAutoCString name; |
426 | 0 | GetErrorName(rv, name); |
427 | 0 | LOG(("Video source %p for video device %d Reconfigure() failed " |
428 | 0 | "unexpectedly in Stop(). rv=%s", this, mCaptureIndex, name.Data())); |
429 | 0 | return NS_ERROR_UNEXPECTED; |
430 | 0 | } |
431 | 0 | } |
432 | 0 |
|
433 | 0 | { |
434 | 0 | MutexAutoLock lock(mMutex); |
435 | 0 | // Start() applies mCapability on the device. |
436 | 0 | mCapability = newCapability; |
437 | 0 | } |
438 | 0 |
|
439 | 0 | if (started) { |
440 | 0 | nsresult rv = Start(nullptr); |
441 | 0 | if (NS_WARN_IF(NS_FAILED(rv))) { |
442 | 0 | nsAutoCString name; |
443 | 0 | GetErrorName(rv, name); |
444 | 0 | LOG(("Video source %p for video device %d Reconfigure() failed " |
445 | 0 | "unexpectedly in Start(). rv=%s", this, mCaptureIndex, name.Data())); |
446 | 0 | return NS_ERROR_UNEXPECTED; |
447 | 0 | } |
448 | 0 | } |
449 | 0 |
|
450 | 0 | return NS_OK; |
451 | 0 | } |
452 | | |
453 | | size_t |
454 | | MediaEngineRemoteVideoSource::NumCapabilities() const |
455 | 0 | { |
456 | 0 | AssertIsOnOwningThread(); |
457 | 0 |
|
458 | 0 | mHardcodedCapabilities.Clear(); |
459 | 0 | int num = camera::GetChildAndCall(&camera::CamerasChild::NumberOfCapabilities, |
460 | 0 | mCapEngine, mUniqueId.get()); |
461 | 0 |
|
462 | 0 | if (num >= 1) { |
463 | 0 | return num; |
464 | 0 | } |
465 | 0 | |
466 | 0 | // The default for devices that don't return discrete capabilities: treat |
467 | 0 | // them as supporting all capabilities orthogonally. E.g. screensharing. |
468 | 0 | // CaptureCapability defaults key values to 0, which means accept any value. |
469 | 0 | mHardcodedCapabilities.AppendElement(webrtc::CaptureCapability()); |
470 | 0 | return mHardcodedCapabilities.Length(); // 1 |
471 | 0 | } |
472 | | |
473 | | webrtc::CaptureCapability |
474 | | MediaEngineRemoteVideoSource::GetCapability(size_t aIndex) const |
475 | 0 | { |
476 | 0 | AssertIsOnOwningThread(); |
477 | 0 | webrtc::CaptureCapability result; |
478 | 0 | if (!mHardcodedCapabilities.IsEmpty()) { |
479 | 0 | MOZ_ASSERT(aIndex < mHardcodedCapabilities.Length()); |
480 | 0 | result = mHardcodedCapabilities.SafeElementAt(aIndex, webrtc::CaptureCapability()); |
481 | 0 | } |
482 | 0 | camera::GetChildAndCall(&camera::CamerasChild::GetCaptureCapability, |
483 | 0 | mCapEngine, mUniqueId.get(), aIndex, result); |
484 | 0 | return result; |
485 | 0 | } |
486 | | |
487 | | void |
488 | | MediaEngineRemoteVideoSource::Pull(const RefPtr<const AllocationHandle>& aHandle, |
489 | | const RefPtr<SourceMediaStream>& aStream, |
490 | | TrackID aTrackID, |
491 | | StreamTime aDesiredTime, |
492 | | const PrincipalHandle& aPrincipalHandle) |
493 | 0 | { |
494 | 0 | TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i", |
495 | 0 | aStream.get(), aTrackID); |
496 | 0 | MutexAutoLock lock(mMutex); |
497 | 0 | if (mState == kReleased) { |
498 | 0 | // We end the track before deallocating, so this is safe. |
499 | 0 | return; |
500 | 0 | } |
501 | 0 | |
502 | 0 | MOZ_ASSERT(mState == kStarted || mState == kStopped); |
503 | 0 |
|
504 | 0 | StreamTime delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID); |
505 | 0 | if (delta <= 0) { |
506 | 0 | return; |
507 | 0 | } |
508 | 0 | |
509 | 0 | VideoSegment segment; |
510 | 0 | RefPtr<layers::Image> image = mImage; |
511 | 0 | if (mState == kStarted) { |
512 | 0 | MOZ_ASSERT(!image || mImageSize == image->GetSize()); |
513 | 0 | segment.AppendFrame(image.forget(), delta, mImageSize, aPrincipalHandle); |
514 | 0 | } else { |
515 | 0 | // nullptr images are allowed, but we force it to black and retain the size. |
516 | 0 | segment.AppendFrame(image.forget(), delta, mImageSize, aPrincipalHandle, true); |
517 | 0 | } |
518 | 0 |
|
519 | 0 | // This is safe from any thread, and is safe if the track is Finished |
520 | 0 | // or Destroyed. |
521 | 0 | // This can fail if either a) we haven't added the track yet, or b) |
522 | 0 | // we've removed or finished the track. |
523 | 0 | aStream->AppendToTrack(aTrackID, &segment); |
524 | 0 | } |
525 | | |
526 | | int |
527 | | MediaEngineRemoteVideoSource::DeliverFrame(uint8_t* aBuffer, |
528 | | const camera::VideoFrameProperties& aProps) |
529 | 0 | { |
530 | 0 | // Cameras IPC thread - take great care with accessing members! |
531 | 0 |
|
532 | 0 | int32_t req_max_width; |
533 | 0 | int32_t req_max_height; |
534 | 0 | int32_t req_ideal_width; |
535 | 0 | int32_t req_ideal_height; |
536 | 0 | { |
537 | 0 | MutexAutoLock lock(mMutex); |
538 | 0 | MOZ_ASSERT(mState == kStarted); |
539 | 0 | // TODO: These can be removed in bug 1453269. |
540 | 0 | req_max_width = mCapability.width & 0xffff; |
541 | 0 | req_max_height = mCapability.height & 0xffff; |
542 | 0 | req_ideal_width = (mCapability.width >> 16) & 0xffff; |
543 | 0 | req_ideal_height = (mCapability.height >> 16) & 0xffff; |
544 | 0 | } |
545 | 0 |
|
546 | 0 | // This is only used in the case of screen sharing, see bug 1453269. |
547 | 0 | const int32_t target_width = aProps.width(); |
548 | 0 | const int32_t target_height = aProps.height(); |
549 | 0 |
|
550 | 0 | if (aProps.rotation() == 90 || aProps.rotation() == 270) { |
551 | 0 | // This frame is rotated, so what was negotiated as width is now height, |
552 | 0 | // and vice versa. |
553 | 0 | std::swap(req_max_width, req_max_height); |
554 | 0 | std::swap(req_ideal_width, req_ideal_height); |
555 | 0 | } |
556 | 0 |
|
557 | 0 | int32_t dst_max_width = req_max_width == 0 ? aProps.width() : |
558 | 0 | std::min(req_max_width, aProps.width()); |
559 | 0 | int32_t dst_max_height = req_max_height == 0 ? aProps.height() : |
560 | 0 | std::min(req_max_height, aProps.height()); |
561 | 0 | // This logic works for both camera and screen sharing case. |
562 | 0 | // for camera case, req_ideal_width and req_ideal_height is 0. |
563 | 0 | // The following snippet will set dst_width to dst_max_width and dst_height to dst_max_height |
564 | 0 | int32_t dst_width = std::min(req_ideal_width > 0 ? req_ideal_width : aProps.width(), dst_max_width); |
565 | 0 | int32_t dst_height = std::min(req_ideal_height > 0 ? req_ideal_height : aProps.height(), dst_max_height); |
566 | 0 |
|
567 | 0 | // Apply scaling for screen sharing, see bug 1453269. |
568 | 0 | switch (mMediaSource) { |
569 | 0 | case MediaSourceEnum::Screen: |
570 | 0 | case MediaSourceEnum::Window: |
571 | 0 | case MediaSourceEnum::Application: { |
572 | 0 | // scale to average of portrait and landscape |
573 | 0 | float scale_width = (float)dst_width / (float)aProps.width(); |
574 | 0 | float scale_height = (float)dst_height / (float)aProps.height(); |
575 | 0 | float scale = (scale_width + scale_height) / 2; |
576 | 0 | dst_width = (int)(scale * target_width); |
577 | 0 | dst_height = (int)(scale * target_height); |
578 | 0 |
|
579 | 0 | // if scaled rectangle exceeds max rectangle, scale to minimum of portrait and landscape |
580 | 0 | if (dst_width > dst_max_width || dst_height > dst_max_height) { |
581 | 0 | scale_width = (float)dst_max_width / (float)dst_width; |
582 | 0 | scale_height = (float)dst_max_height / (float)dst_height; |
583 | 0 | scale = std::min(scale_width, scale_height); |
584 | 0 | dst_width = (int32_t)(scale * dst_width); |
585 | 0 | dst_height = (int32_t)(scale * dst_height); |
586 | 0 | } |
587 | 0 | break; |
588 | 0 | } |
589 | 0 | default: { |
590 | 0 | break; |
591 | 0 | } |
592 | 0 | } |
593 | 0 | |
594 | 0 | rtc::Callback0<void> callback_unused; |
595 | 0 | rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = |
596 | 0 | new rtc::RefCountedObject<webrtc::WrappedI420Buffer>( |
597 | 0 | aProps.width(), |
598 | 0 | aProps.height(), |
599 | 0 | aBuffer, |
600 | 0 | aProps.yStride(), |
601 | 0 | aBuffer + aProps.yAllocatedSize(), |
602 | 0 | aProps.uStride(), |
603 | 0 | aBuffer + aProps.yAllocatedSize() + aProps.uAllocatedSize(), |
604 | 0 | aProps.vStride(), |
605 | 0 | callback_unused); |
606 | 0 |
|
607 | 0 | if ((dst_width != aProps.width() || dst_height != aProps.height()) && |
608 | 0 | dst_width <= aProps.width() && |
609 | 0 | dst_height <= aProps.height()) { |
610 | 0 | // Destination resolution is smaller than source buffer. We'll rescale. |
611 | 0 | rtc::scoped_refptr<webrtc::I420Buffer> scaledBuffer = |
612 | 0 | mRescalingBufferPool.CreateBuffer(dst_width, dst_height); |
613 | 0 | if (!scaledBuffer) { |
614 | 0 | MOZ_ASSERT_UNREACHABLE("We might fail to allocate a buffer, but with this " |
615 | 0 | "being a recycling pool that shouldn't happen"); |
616 | 0 | return 0; |
617 | 0 | } |
618 | 0 | scaledBuffer->CropAndScaleFrom(*buffer); |
619 | 0 | buffer = scaledBuffer; |
620 | 0 | } |
621 | 0 |
|
622 | 0 | layers::PlanarYCbCrData data; |
623 | 0 | data.mYChannel = const_cast<uint8_t*>(buffer->DataY()); |
624 | 0 | data.mYSize = IntSize(buffer->width(), buffer->height()); |
625 | 0 | data.mYStride = buffer->StrideY(); |
626 | 0 | MOZ_ASSERT(buffer->StrideU() == buffer->StrideV()); |
627 | 0 | data.mCbCrStride = buffer->StrideU(); |
628 | 0 | data.mCbChannel = const_cast<uint8_t*>(buffer->DataU()); |
629 | 0 | data.mCrChannel = const_cast<uint8_t*>(buffer->DataV()); |
630 | 0 | data.mCbCrSize = IntSize((buffer->width() + 1) / 2, |
631 | 0 | (buffer->height() + 1) / 2); |
632 | 0 | data.mPicX = 0; |
633 | 0 | data.mPicY = 0; |
634 | 0 | data.mPicSize = IntSize(buffer->width(), buffer->height()); |
635 | 0 |
|
636 | 0 | RefPtr<layers::PlanarYCbCrImage> image = |
637 | 0 | mImageContainer->CreatePlanarYCbCrImage(); |
638 | 0 | if (!image->CopyData(data)) { |
639 | 0 | MOZ_ASSERT_UNREACHABLE("We might fail to allocate a buffer, but with this " |
640 | 0 | "being a recycling container that shouldn't happen"); |
641 | 0 | return 0; |
642 | 0 | } |
643 | 0 |
|
644 | | #ifdef DEBUG |
645 | | static uint32_t frame_num = 0; |
646 | | LOGFRAME(("frame %d (%dx%d)->(%dx%d); rotation %d, timeStamp %u, " |
647 | | "ntpTimeMs %" PRIu64 ", renderTimeMs %" PRIu64, |
648 | | frame_num++, aProps.width(), aProps.height(), dst_width, dst_height, |
649 | | aProps.rotation(), aProps.timeStamp(), aProps.ntpTimeMs(), |
650 | | aProps.renderTimeMs())); |
651 | | #endif |
652 | | |
653 | 0 | if (mImageSize.width != dst_width || mImageSize.height != dst_height) { |
654 | 0 | NS_DispatchToMainThread(NS_NewRunnableFunction( |
655 | 0 | "MediaEngineRemoteVideoSource::FrameSizeChange", |
656 | 0 | [settings = mSettings, |
657 | 0 | updated = mSettingsUpdatedByFrame, |
658 | 0 | dst_width, |
659 | 0 | dst_height]() mutable { |
660 | 0 | settings->mWidth.Value() = dst_width; |
661 | 0 | settings->mHeight.Value() = dst_height; |
662 | 0 | updated->mValue = true; |
663 | 0 | })); |
664 | 0 | } |
665 | 0 |
|
666 | 0 | { |
667 | 0 | MutexAutoLock lock(mMutex); |
668 | 0 | // implicitly releases last image |
669 | 0 | mImage = image.forget(); |
670 | 0 | mImageSize = mImage->GetSize(); |
671 | 0 | } |
672 | 0 |
|
673 | 0 | // We'll push the frame into the MSG on the next Pull. This will avoid |
674 | 0 | // swamping the MSG with frames should it be taking longer than normal to run |
675 | 0 | // an iteration. |
676 | 0 |
|
677 | 0 | return 0; |
678 | 0 | } |
679 | | |
680 | | uint32_t |
681 | | MediaEngineRemoteVideoSource::GetDistance( |
682 | | const webrtc::CaptureCapability& aCandidate, |
683 | | const NormalizedConstraintSet &aConstraints, |
684 | | const nsString& aDeviceId, |
685 | | const DistanceCalculation aCalculate) const |
686 | 0 | { |
687 | 0 | if (aCalculate == kFeasibility) { |
688 | 0 | return GetFeasibilityDistance(aCandidate, aConstraints, aDeviceId); |
689 | 0 | } |
690 | 0 | return GetFitnessDistance(aCandidate, aConstraints, aDeviceId); |
691 | 0 | } |
692 | | |
693 | | uint32_t |
694 | | MediaEngineRemoteVideoSource::GetFitnessDistance( |
695 | | const webrtc::CaptureCapability& aCandidate, |
696 | | const NormalizedConstraintSet& aConstraints, |
697 | | const nsString& aDeviceId) const |
698 | 0 | { |
699 | 0 | AssertIsOnOwningThread(); |
700 | 0 |
|
701 | 0 | // Treat width|height|frameRate == 0 on capability as "can do any". |
702 | 0 | // This allows for orthogonal capabilities that are not in discrete steps. |
703 | 0 |
|
704 | 0 | typedef MediaConstraintsHelper H; |
705 | 0 | uint64_t distance = |
706 | 0 | uint64_t(H::FitnessDistance(aDeviceId, aConstraints.mDeviceId)) + |
707 | 0 | uint64_t(H::FitnessDistance(mFacingMode, aConstraints.mFacingMode)) + |
708 | 0 | uint64_t(aCandidate.width ? H::FitnessDistance(int32_t(aCandidate.width), |
709 | 0 | aConstraints.mWidth) : 0) + |
710 | 0 | uint64_t(aCandidate.height ? H::FitnessDistance(int32_t(aCandidate.height), |
711 | 0 | aConstraints.mHeight) : 0) + |
712 | 0 | uint64_t(aCandidate.maxFPS ? H::FitnessDistance(double(aCandidate.maxFPS), |
713 | 0 | aConstraints.mFrameRate) : 0); |
714 | 0 | return uint32_t(std::min(distance, uint64_t(UINT32_MAX))); |
715 | 0 | } |
716 | | |
717 | | uint32_t |
718 | | MediaEngineRemoteVideoSource::GetFeasibilityDistance( |
719 | | const webrtc::CaptureCapability& aCandidate, |
720 | | const NormalizedConstraintSet& aConstraints, |
721 | | const nsString& aDeviceId) const |
722 | 0 | { |
723 | 0 | AssertIsOnOwningThread(); |
724 | 0 |
|
725 | 0 | // Treat width|height|frameRate == 0 on capability as "can do any". |
726 | 0 | // This allows for orthogonal capabilities that are not in discrete steps. |
727 | 0 |
|
728 | 0 | typedef MediaConstraintsHelper H; |
729 | 0 | uint64_t distance = |
730 | 0 | uint64_t(H::FitnessDistance(aDeviceId, aConstraints.mDeviceId)) + |
731 | 0 | uint64_t(H::FitnessDistance(mFacingMode, aConstraints.mFacingMode)) + |
732 | 0 | uint64_t(aCandidate.width ? H::FeasibilityDistance(int32_t(aCandidate.width), |
733 | 0 | aConstraints.mWidth) : 0) + |
734 | 0 | uint64_t(aCandidate.height ? H::FeasibilityDistance(int32_t(aCandidate.height), |
735 | 0 | aConstraints.mHeight) : 0) + |
736 | 0 | uint64_t(aCandidate.maxFPS ? H::FeasibilityDistance(double(aCandidate.maxFPS), |
737 | 0 | aConstraints.mFrameRate) : 0); |
738 | 0 | return uint32_t(std::min(distance, uint64_t(UINT32_MAX))); |
739 | 0 | } |
740 | | |
741 | | // Find best capability by removing inferiors. May leave >1 of equal distance |
742 | | |
743 | | /* static */ void |
744 | | MediaEngineRemoteVideoSource::TrimLessFitCandidates(nsTArray<CapabilityCandidate>& aSet) |
745 | 0 | { |
746 | 0 | uint32_t best = UINT32_MAX; |
747 | 0 | for (auto& candidate : aSet) { |
748 | 0 | if (best > candidate.mDistance) { |
749 | 0 | best = candidate.mDistance; |
750 | 0 | } |
751 | 0 | } |
752 | 0 | for (size_t i = 0; i < aSet.Length();) { |
753 | 0 | if (aSet[i].mDistance > best) { |
754 | 0 | aSet.RemoveElementAt(i); |
755 | 0 | } else { |
756 | 0 | ++i; |
757 | 0 | } |
758 | 0 | } |
759 | 0 | MOZ_ASSERT(aSet.Length()); |
760 | 0 | } |
761 | | |
762 | | uint32_t |
763 | | MediaEngineRemoteVideoSource::GetBestFitnessDistance( |
764 | | const nsTArray<const NormalizedConstraintSet*>& aConstraintSets, |
765 | | const nsString& aDeviceId) const |
766 | 0 | { |
767 | 0 | AssertIsOnOwningThread(); |
768 | 0 |
|
769 | 0 | size_t num = NumCapabilities(); |
770 | 0 | nsTArray<CapabilityCandidate> candidateSet; |
771 | 0 | for (size_t i = 0; i < num; i++) { |
772 | 0 | candidateSet.AppendElement(CapabilityCandidate(GetCapability(i))); |
773 | 0 | } |
774 | 0 |
|
775 | 0 | bool first = true; |
776 | 0 | for (const NormalizedConstraintSet* ns : aConstraintSets) { |
777 | 0 | for (size_t i = 0; i < candidateSet.Length(); ) { |
778 | 0 | auto& candidate = candidateSet[i]; |
779 | 0 | uint32_t distance = |
780 | 0 | GetFitnessDistance(candidate.mCapability, *ns, aDeviceId); |
781 | 0 | if (distance == UINT32_MAX) { |
782 | 0 | candidateSet.RemoveElementAt(i); |
783 | 0 | } else { |
784 | 0 | ++i; |
785 | 0 | if (first) { |
786 | 0 | candidate.mDistance = distance; |
787 | 0 | } |
788 | 0 | } |
789 | 0 | } |
790 | 0 | first = false; |
791 | 0 | } |
792 | 0 | if (!candidateSet.Length()) { |
793 | 0 | return UINT32_MAX; |
794 | 0 | } |
795 | 0 | TrimLessFitCandidates(candidateSet); |
796 | 0 | return candidateSet[0].mDistance; |
797 | 0 | } |
798 | | |
799 | | static void |
800 | | LogConstraints(const NormalizedConstraintSet& aConstraints) |
801 | 0 | { |
802 | 0 | auto& c = aConstraints; |
803 | 0 | if (c.mWidth.mIdeal.isSome()) { |
804 | 0 | LOG(("Constraints: width: { min: %d, max: %d, ideal: %d }", |
805 | 0 | c.mWidth.mMin, c.mWidth.mMax, |
806 | 0 | c.mWidth.mIdeal.valueOr(0))); |
807 | 0 | } else { |
808 | 0 | LOG(("Constraints: width: { min: %d, max: %d }", |
809 | 0 | c.mWidth.mMin, c.mWidth.mMax)); |
810 | 0 | } |
811 | 0 | if (c.mHeight.mIdeal.isSome()) { |
812 | 0 | LOG((" height: { min: %d, max: %d, ideal: %d }", |
813 | 0 | c.mHeight.mMin, c.mHeight.mMax, |
814 | 0 | c.mHeight.mIdeal.valueOr(0))); |
815 | 0 | } else { |
816 | 0 | LOG((" height: { min: %d, max: %d }", |
817 | 0 | c.mHeight.mMin, c.mHeight.mMax)); |
818 | 0 | } |
819 | 0 | if (c.mFrameRate.mIdeal.isSome()) { |
820 | 0 | LOG((" frameRate: { min: %f, max: %f, ideal: %f }", |
821 | 0 | c.mFrameRate.mMin, c.mFrameRate.mMax, |
822 | 0 | c.mFrameRate.mIdeal.valueOr(0))); |
823 | 0 | } else { |
824 | 0 | LOG((" frameRate: { min: %f, max: %f }", |
825 | 0 | c.mFrameRate.mMin, c.mFrameRate.mMax)); |
826 | 0 | } |
827 | 0 | } |
828 | | |
829 | | static void |
830 | | LogCapability(const char* aHeader, |
831 | | const webrtc::CaptureCapability &aCapability, |
832 | | uint32_t aDistance) |
833 | 0 | { |
834 | 0 | // RawVideoType and VideoCodecType media/webrtc/trunk/webrtc/common_types.h |
835 | 0 | static const char* const types[] = { |
836 | 0 | "I420", |
837 | 0 | "YV12", |
838 | 0 | "YUY2", |
839 | 0 | "UYVY", |
840 | 0 | "IYUV", |
841 | 0 | "ARGB", |
842 | 0 | "RGB24", |
843 | 0 | "RGB565", |
844 | 0 | "ARGB4444", |
845 | 0 | "ARGB1555", |
846 | 0 | "MJPEG", |
847 | 0 | "NV12", |
848 | 0 | "NV21", |
849 | 0 | "BGRA", |
850 | 0 | "Unknown type" |
851 | 0 | }; |
852 | 0 |
|
853 | 0 | static const char* const codec[] = { |
854 | 0 | "VP8", |
855 | 0 | "VP9", |
856 | 0 | "H264", |
857 | 0 | "I420", |
858 | 0 | "RED", |
859 | 0 | "ULPFEC", |
860 | 0 | "Generic codec", |
861 | 0 | "Unknown codec" |
862 | 0 | }; |
863 | 0 |
|
864 | 0 | LOG(("%s: %4u x %4u x %2u maxFps, %s, %s. Distance = %" PRIu32, |
865 | 0 | aHeader, aCapability.width, aCapability.height, aCapability.maxFPS, |
866 | 0 | types[std::min(std::max(uint32_t(0), uint32_t(aCapability.rawType)), |
867 | 0 | uint32_t(sizeof(types) / sizeof(*types) - 1))], |
868 | 0 | codec[std::min(std::max(uint32_t(0), uint32_t(aCapability.codecType)), |
869 | 0 | uint32_t(sizeof(codec) / sizeof(*codec) - 1))], |
870 | 0 | aDistance)); |
871 | 0 | } |
872 | | |
873 | | bool |
874 | | MediaEngineRemoteVideoSource::ChooseCapability( |
875 | | const NormalizedConstraints& aConstraints, |
876 | | const MediaEnginePrefs& aPrefs, |
877 | | const nsString& aDeviceId, |
878 | | webrtc::CaptureCapability& aCapability, |
879 | | const DistanceCalculation aCalculate) |
880 | 0 | { |
881 | 0 | LOG((__PRETTY_FUNCTION__)); |
882 | 0 | AssertIsOnOwningThread(); |
883 | 0 |
|
884 | 0 | if (MOZ_LOG_TEST(GetMediaManagerLog(), LogLevel::Debug)) { |
885 | 0 | LOG(("ChooseCapability: prefs: %dx%d @%dfps", |
886 | 0 | aPrefs.GetWidth(), aPrefs.GetHeight(), |
887 | 0 | aPrefs.mFPS)); |
888 | 0 | LogConstraints(aConstraints); |
889 | 0 | if (!aConstraints.mAdvanced.empty()) { |
890 | 0 | LOG(("Advanced array[%zu]:", aConstraints.mAdvanced.size())); |
891 | 0 | for (auto& advanced : aConstraints.mAdvanced) { |
892 | 0 | LogConstraints(advanced); |
893 | 0 | } |
894 | 0 | } |
895 | 0 | } |
896 | 0 |
|
897 | 0 | switch (mMediaSource) { |
898 | 0 | case MediaSourceEnum::Screen: |
899 | 0 | case MediaSourceEnum::Window: |
900 | 0 | case MediaSourceEnum::Application: { |
901 | 0 | FlattenedConstraints c(aConstraints); |
902 | 0 | // The actual resolution to constrain around is not easy to find ahead of |
903 | 0 | // time (and may in fact change over time), so as a hack, we push ideal |
904 | 0 | // and max constraints down to desktop_capture_impl.cc and finish the |
905 | 0 | // algorithm there. |
906 | 0 | // TODO: This can be removed in bug 1453269. |
907 | 0 | aCapability.width = |
908 | 0 | (std::min(0xffff, c.mWidth.mIdeal.valueOr(0)) & 0xffff) << 16 | |
909 | 0 | (std::min(0xffff, c.mWidth.mMax) & 0xffff); |
910 | 0 | aCapability.height = |
911 | 0 | (std::min(0xffff, c.mHeight.mIdeal.valueOr(0)) & 0xffff) << 16 | |
912 | 0 | (std::min(0xffff, c.mHeight.mMax) & 0xffff); |
913 | 0 | aCapability.maxFPS = |
914 | 0 | c.mFrameRate.Clamp(c.mFrameRate.mIdeal.valueOr(aPrefs.mFPS)); |
915 | 0 | return true; |
916 | 0 | } |
917 | 0 | default: |
918 | 0 | break; |
919 | 0 | } |
920 | 0 | |
921 | 0 | nsTArray<CapabilityCandidate> candidateSet; |
922 | 0 | size_t num = NumCapabilities(); |
923 | 0 | for (size_t i = 0; i < num; i++) { |
924 | 0 | candidateSet.AppendElement(CapabilityCandidate(GetCapability(i))); |
925 | 0 | } |
926 | 0 |
|
927 | 0 | if (!mHardcodedCapabilities.IsEmpty() && |
928 | 0 | mMediaSource == MediaSourceEnum::Camera) { |
929 | 0 | // We have a hardcoded capability, which means this camera didn't report |
930 | 0 | // discrete capabilities. It might still allow a ranged capability, so we |
931 | 0 | // add a couple of default candidates based on prefs and constraints. |
932 | 0 | // The chosen candidate will be propagated to StartCapture() which will fail |
933 | 0 | // for an invalid candidate. |
934 | 0 | MOZ_DIAGNOSTIC_ASSERT(mHardcodedCapabilities.Length() == 1); |
935 | 0 | MOZ_DIAGNOSTIC_ASSERT(candidateSet.Length() == 1); |
936 | 0 | candidateSet.Clear(); |
937 | 0 |
|
938 | 0 | FlattenedConstraints c(aConstraints); |
939 | 0 | // Reuse the code across both the low-definition (`false`) pref and |
940 | 0 | // the high-definition (`true`) pref. |
941 | 0 | // If there are constraints we try to satisfy them but we default to prefs. |
942 | 0 | // Note that since constraints are from content and can literally be |
943 | 0 | // anything we put (rather generous) caps on them. |
944 | 0 | for (bool isHd : {false, true}) { |
945 | 0 | webrtc::CaptureCapability cap; |
946 | 0 | int32_t prefWidth = aPrefs.GetWidth(isHd); |
947 | 0 | int32_t prefHeight = aPrefs.GetHeight(isHd); |
948 | 0 |
|
949 | 0 | cap.width = c.mWidth.Get(prefWidth); |
950 | 0 | cap.width = std::max(0, std::min(cap.width, 7680)); |
951 | 0 |
|
952 | 0 | cap.height = c.mHeight.Get(prefHeight); |
953 | 0 | cap.height = std::max(0, std::min(cap.height, 4320)); |
954 | 0 |
|
955 | 0 | cap.maxFPS = c.mFrameRate.Get(aPrefs.mFPS); |
956 | 0 | cap.maxFPS = std::max(0, std::min(cap.maxFPS, 480)); |
957 | 0 |
|
958 | 0 | if (cap.width != prefWidth) { |
959 | 0 | // Width was affected by constraints. |
960 | 0 | // We'll adjust the height too so the aspect ratio is retained. |
961 | 0 | cap.height = cap.width * prefHeight / prefWidth; |
962 | 0 | } else if (cap.height != prefHeight) { |
963 | 0 | // Height was affected by constraints but not width. |
964 | 0 | // We'll adjust the width too so the aspect ratio is retained. |
965 | 0 | cap.width = cap.height * prefWidth / prefHeight; |
966 | 0 | } |
967 | 0 |
|
968 | 0 | if (candidateSet.Contains(cap, CapabilityComparator())) { |
969 | 0 | continue; |
970 | 0 | } |
971 | 0 | LogCapability("Hardcoded capability", cap, 0); |
972 | 0 | candidateSet.AppendElement(CapabilityCandidate(std::move(cap))); |
973 | 0 | } |
974 | 0 | } |
975 | 0 |
|
976 | 0 | // First, filter capabilities by required constraints (min, max, exact). |
977 | 0 |
|
978 | 0 | for (size_t i = 0; i < candidateSet.Length();) { |
979 | 0 | auto& candidate = candidateSet[i]; |
980 | 0 | candidate.mDistance = |
981 | 0 | GetDistance(candidate.mCapability, aConstraints, aDeviceId, aCalculate); |
982 | 0 | LogCapability("Capability", candidate.mCapability, candidate.mDistance); |
983 | 0 | if (candidate.mDistance == UINT32_MAX) { |
984 | 0 | candidateSet.RemoveElementAt(i); |
985 | 0 | } else { |
986 | 0 | ++i; |
987 | 0 | } |
988 | 0 | } |
989 | 0 |
|
990 | 0 | if (candidateSet.IsEmpty()) { |
991 | 0 | LOG(("failed to find capability match from %zu choices", candidateSet.Length())); |
992 | 0 | return false; |
993 | 0 | } |
994 | 0 |
|
995 | 0 | // Filter further with all advanced constraints (that don't overconstrain). |
996 | 0 |
|
997 | 0 | for (const auto &cs : aConstraints.mAdvanced) { |
998 | 0 | nsTArray<CapabilityCandidate> rejects; |
999 | 0 | for (size_t i = 0; i < candidateSet.Length();) { |
1000 | 0 | if (GetDistance(candidateSet[i].mCapability, |
1001 | 0 | cs, aDeviceId, aCalculate) == UINT32_MAX) { |
1002 | 0 | rejects.AppendElement(candidateSet[i]); |
1003 | 0 | candidateSet.RemoveElementAt(i); |
1004 | 0 | } else { |
1005 | 0 | ++i; |
1006 | 0 | } |
1007 | 0 | } |
1008 | 0 | if (!candidateSet.Length()) { |
1009 | 0 | candidateSet.AppendElements(std::move(rejects)); |
1010 | 0 | } |
1011 | 0 | } |
1012 | 0 | MOZ_ASSERT(candidateSet.Length(), |
1013 | 0 | "advanced constraints filtering step can't reduce candidates to zero"); |
1014 | 0 |
|
1015 | 0 | // Remaining algorithm is up to the UA. |
1016 | 0 |
|
1017 | 0 | TrimLessFitCandidates(candidateSet); |
1018 | 0 |
|
1019 | 0 | // Any remaining multiples all have the same distance. A common case of this |
1020 | 0 | // occurs when no ideal is specified. Lean toward defaults. |
1021 | 0 | uint32_t sameDistance = candidateSet[0].mDistance; |
1022 | 0 | { |
1023 | 0 | MediaTrackConstraintSet prefs; |
1024 | 0 | prefs.mWidth.SetAsLong() = aPrefs.GetWidth(); |
1025 | 0 | prefs.mHeight.SetAsLong() = aPrefs.GetHeight(); |
1026 | 0 | prefs.mFrameRate.SetAsDouble() = aPrefs.mFPS; |
1027 | 0 | NormalizedConstraintSet normPrefs(prefs, false); |
1028 | 0 |
|
1029 | 0 | for (auto& candidate : candidateSet) { |
1030 | 0 | candidate.mDistance = |
1031 | 0 | GetDistance(candidate.mCapability, normPrefs, aDeviceId, aCalculate); |
1032 | 0 | } |
1033 | 0 | TrimLessFitCandidates(candidateSet); |
1034 | 0 | } |
1035 | 0 |
|
1036 | 0 | // Any remaining multiples all have the same distance, but may vary on |
1037 | 0 | // format. Some formats are more desirable for certain use like WebRTC. |
1038 | 0 | // E.g. I420 over RGB24 can remove a needless format conversion. |
1039 | 0 |
|
1040 | 0 | bool found = false; |
1041 | 0 | for (auto& candidate : candidateSet) { |
1042 | 0 | const webrtc::CaptureCapability& cap = candidate.mCapability; |
1043 | 0 | if (cap.rawType == webrtc::RawVideoType::kVideoI420 || |
1044 | 0 | cap.rawType == webrtc::RawVideoType::kVideoYUY2 || |
1045 | 0 | cap.rawType == webrtc::RawVideoType::kVideoYV12) { |
1046 | 0 | aCapability = cap; |
1047 | 0 | found = true; |
1048 | 0 | break; |
1049 | 0 | } |
1050 | 0 | } |
1051 | 0 | if (!found) { |
1052 | 0 | aCapability = candidateSet[0].mCapability; |
1053 | 0 | } |
1054 | 0 |
|
1055 | 0 | LogCapability("Chosen capability", aCapability, sameDistance); |
1056 | 0 | return true; |
1057 | 0 | } |
1058 | | |
1059 | | void |
1060 | | MediaEngineRemoteVideoSource::GetSettings(MediaTrackSettings& aOutSettings) const |
1061 | 0 | { |
1062 | 0 | aOutSettings = *mSettings; |
1063 | 0 | } |
1064 | | |
1065 | | void |
1066 | | MediaEngineRemoteVideoSource::Refresh(int aIndex) |
1067 | 0 | { |
1068 | 0 | LOG((__PRETTY_FUNCTION__)); |
1069 | 0 | AssertIsOnOwningThread(); |
1070 | 0 |
|
1071 | 0 | // NOTE: mCaptureIndex might have changed when allocated! |
1072 | 0 | // Use aIndex to update information, but don't change mCaptureIndex!! |
1073 | 0 | // Caller looked up this source by uniqueId, so it shouldn't change |
1074 | 0 | char deviceName[kMaxDeviceNameLength]; |
1075 | 0 | char uniqueId[kMaxUniqueIdLength]; |
1076 | 0 |
|
1077 | 0 | if (camera::GetChildAndCall(&camera::CamerasChild::GetCaptureDevice, |
1078 | 0 | mCapEngine, aIndex, deviceName, |
1079 | 0 | sizeof(deviceName), uniqueId, sizeof(uniqueId), |
1080 | 0 | nullptr)) { |
1081 | 0 | return; |
1082 | 0 | } |
1083 | 0 | |
1084 | 0 | SetName(NS_ConvertUTF8toUTF16(deviceName)); |
1085 | 0 | MOZ_ASSERT(mUniqueId.Equals(uniqueId)); |
1086 | 0 | } |
1087 | | |
1088 | | } // namespace mozilla |