/src/mozilla-central/dom/media/VideoUtils.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | /* This Source Code Form is subject to the terms of the Mozilla Public |
2 | | * License, v. 2.0. If a copy of the MPL was not distributed with this |
3 | | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
4 | | |
5 | | #include "VideoUtils.h" |
6 | | |
7 | | #include <functional> |
8 | | #include <stdint.h> |
9 | | |
10 | | #include "CubebUtils.h" |
11 | | #include "ImageContainer.h" |
12 | | #include "MediaContainerType.h" |
13 | | #include "MediaResource.h" |
14 | | #include "TimeUnits.h" |
15 | | #include "VorbisUtils.h" |
16 | | #include "mozilla/Base64.h" |
17 | | #include "mozilla/SharedThreadPool.h" |
18 | | #include "mozilla/StaticPrefs.h" |
19 | | #include "mozilla/SystemGroup.h" |
20 | | #include "mozilla/TaskCategory.h" |
21 | | #include "mozilla/TaskQueue.h" |
22 | | #include "mozilla/Telemetry.h" |
23 | | #include "nsCharSeparatedTokenizer.h" |
24 | | #include "nsContentTypeParser.h" |
25 | | #include "nsIConsoleService.h" |
26 | | #include "nsIRandomGenerator.h" |
27 | | #include "nsIServiceManager.h" |
28 | | #include "nsMathUtils.h" |
29 | | #include "nsServiceManagerUtils.h" |
30 | | #include "nsThreadUtils.h" |
31 | | |
32 | | namespace mozilla { |
33 | | |
34 | | NS_NAMED_LITERAL_CSTRING(kEMEKeySystemClearkey, "org.w3.clearkey"); |
35 | | NS_NAMED_LITERAL_CSTRING(kEMEKeySystemWidevine, "com.widevine.alpha"); |
36 | | |
37 | | using layers::PlanarYCbCrImage; |
38 | | using media::TimeUnit; |
39 | | |
40 | 0 | CheckedInt64 SaferMultDiv(int64_t aValue, uint64_t aMul, uint64_t aDiv) { |
41 | 0 | if (aMul > INT64_MAX || aDiv > INT64_MAX) { |
42 | 0 | return CheckedInt64(INT64_MAX) + 1; // Return an invalid checked int. |
43 | 0 | } |
44 | 0 | int64_t mul = aMul; |
45 | 0 | int64_t div = aDiv; |
46 | 0 | int64_t major = aValue / div; |
47 | 0 | int64_t remainder = aValue % div; |
48 | 0 | return CheckedInt64(remainder) * mul / div + CheckedInt64(major) * mul; |
49 | 0 | } |
50 | | |
51 | | // Converts from number of audio frames to microseconds, given the specified |
52 | | // audio rate. |
53 | 0 | CheckedInt64 FramesToUsecs(int64_t aFrames, uint32_t aRate) { |
54 | 0 | return SaferMultDiv(aFrames, USECS_PER_S, aRate); |
55 | 0 | } |
56 | | |
57 | 0 | TimeUnit FramesToTimeUnit(int64_t aFrames, uint32_t aRate) { |
58 | 0 | int64_t major = aFrames / aRate; |
59 | 0 | int64_t remainder = aFrames % aRate; |
60 | 0 | return TimeUnit::FromMicroseconds(major) * USECS_PER_S + |
61 | 0 | (TimeUnit::FromMicroseconds(remainder) * USECS_PER_S) / aRate; |
62 | 0 | } |
63 | | |
64 | | // Converts from microseconds to number of audio frames, given the specified |
65 | | // audio rate. |
66 | 0 | CheckedInt64 UsecsToFrames(int64_t aUsecs, uint32_t aRate) { |
67 | 0 | return SaferMultDiv(aUsecs, aRate, USECS_PER_S); |
68 | 0 | } |
69 | | |
70 | | // Format TimeUnit as number of frames at given rate. |
71 | 0 | CheckedInt64 TimeUnitToFrames(const TimeUnit& aTime, uint32_t aRate) { |
72 | 0 | return UsecsToFrames(aTime.ToMicroseconds(), aRate); |
73 | 0 | } |
74 | | |
75 | 0 | nsresult SecondsToUsecs(double aSeconds, int64_t& aOutUsecs) { |
76 | 0 | if (aSeconds * double(USECS_PER_S) > INT64_MAX) { |
77 | 0 | return NS_ERROR_FAILURE; |
78 | 0 | } |
79 | 0 | aOutUsecs = int64_t(aSeconds * double(USECS_PER_S)); |
80 | 0 | return NS_OK; |
81 | 0 | } |
82 | | |
83 | | static int32_t ConditionDimension(float aValue) |
84 | 0 | { |
85 | 0 | // This will exclude NaNs and too-big values. |
86 | 0 | if (aValue > 1.0 && aValue <= INT32_MAX) |
87 | 0 | return int32_t(NS_round(aValue)); |
88 | 0 | return 0; |
89 | 0 | } |
90 | | |
91 | | void |
92 | | ScaleDisplayByAspectRatio(gfx::IntSize& aDisplay, float aAspectRatio) |
93 | 0 | { |
94 | 0 | if (aAspectRatio > 1.0) { |
95 | 0 | // Increase the intrinsic width |
96 | 0 | aDisplay.width = ConditionDimension(aAspectRatio * aDisplay.width); |
97 | 0 | } else { |
98 | 0 | // Increase the intrinsic height |
99 | 0 | aDisplay.height = ConditionDimension(aDisplay.height / aAspectRatio); |
100 | 0 | } |
101 | 0 | } |
102 | | |
103 | 0 | static int64_t BytesToTime(int64_t offset, int64_t length, int64_t durationUs) { |
104 | 0 | NS_ASSERTION(length > 0, "Must have positive length"); |
105 | 0 | double r = double(offset) / double(length); |
106 | 0 | if (r > 1.0) |
107 | 0 | r = 1.0; |
108 | 0 | return int64_t(double(durationUs) * r); |
109 | 0 | } |
110 | | |
111 | | media::TimeIntervals GetEstimatedBufferedTimeRanges(mozilla::MediaResource* aStream, |
112 | | int64_t aDurationUsecs) |
113 | 0 | { |
114 | 0 | media::TimeIntervals buffered; |
115 | 0 | // Nothing to cache if the media takes 0us to play. |
116 | 0 | if (aDurationUsecs <= 0 || !aStream) |
117 | 0 | return buffered; |
118 | 0 | |
119 | 0 | // Special case completely cached files. This also handles local files. |
120 | 0 | if (aStream->IsDataCachedToEndOfResource(0)) { |
121 | 0 | buffered += |
122 | 0 | media::TimeInterval(TimeUnit::Zero(), |
123 | 0 | TimeUnit::FromMicroseconds(aDurationUsecs)); |
124 | 0 | return buffered; |
125 | 0 | } |
126 | 0 | |
127 | 0 | int64_t totalBytes = aStream->GetLength(); |
128 | 0 |
|
129 | 0 | // If we can't determine the total size, pretend that we have nothing |
130 | 0 | // buffered. This will put us in a state of eternally-low-on-undecoded-data |
131 | 0 | // which is not great, but about the best we can do. |
132 | 0 | if (totalBytes <= 0) |
133 | 0 | return buffered; |
134 | 0 | |
135 | 0 | int64_t startOffset = aStream->GetNextCachedData(0); |
136 | 0 | while (startOffset >= 0) { |
137 | 0 | int64_t endOffset = aStream->GetCachedDataEnd(startOffset); |
138 | 0 | // Bytes [startOffset..endOffset] are cached. |
139 | 0 | NS_ASSERTION(startOffset >= 0, "Integer underflow in GetBuffered"); |
140 | 0 | NS_ASSERTION(endOffset >= 0, "Integer underflow in GetBuffered"); |
141 | 0 |
|
142 | 0 | int64_t startUs = BytesToTime(startOffset, totalBytes, aDurationUsecs); |
143 | 0 | int64_t endUs = BytesToTime(endOffset, totalBytes, aDurationUsecs); |
144 | 0 | if (startUs != endUs) { |
145 | 0 | buffered += |
146 | 0 | media::TimeInterval(TimeUnit::FromMicroseconds(startUs), |
147 | 0 | TimeUnit::FromMicroseconds(endUs)); |
148 | 0 | } |
149 | 0 | startOffset = aStream->GetNextCachedData(endOffset); |
150 | 0 | } |
151 | 0 | return buffered; |
152 | 0 | } |
153 | | |
154 | | void DownmixStereoToMono(mozilla::AudioDataValue* aBuffer, |
155 | | uint32_t aFrames) |
156 | 0 | { |
157 | 0 | MOZ_ASSERT(aBuffer); |
158 | 0 | const int channels = 2; |
159 | 0 | for (uint32_t fIdx = 0; fIdx < aFrames; ++fIdx) { |
160 | 0 | #ifdef MOZ_SAMPLE_TYPE_FLOAT32 |
161 | 0 | float sample = 0.0; |
162 | | #else |
163 | | int sample = 0; |
164 | | #endif |
165 | | // The sample of the buffer would be interleaved. |
166 | 0 | sample = (aBuffer[fIdx*channels] + aBuffer[fIdx*channels + 1]) * 0.5; |
167 | 0 | aBuffer[fIdx*channels] = aBuffer[fIdx*channels + 1] = sample; |
168 | 0 | } |
169 | 0 | } |
170 | | |
171 | | uint32_t |
172 | | DecideAudioPlaybackChannels(const AudioInfo& info) |
173 | 0 | { |
174 | 0 | if (StaticPrefs::accessibility_monoaudio_enable()) { |
175 | 0 | return 1; |
176 | 0 | } |
177 | 0 | |
178 | 0 | if (StaticPrefs::MediaForcestereoEnabled()) { |
179 | 0 | return 2; |
180 | 0 | } |
181 | 0 | |
182 | 0 | return info.mChannels; |
183 | 0 | } |
184 | | |
185 | | bool |
186 | | IsDefaultPlaybackDeviceMono() |
187 | 0 | { |
188 | 0 | return CubebUtils::MaxNumberOfChannels() == 1; |
189 | 0 | } |
190 | | |
191 | | bool |
192 | | IsVideoContentType(const nsCString& aContentType) |
193 | 0 | { |
194 | 0 | NS_NAMED_LITERAL_CSTRING(video, "video"); |
195 | 0 | if (FindInReadable(video, aContentType)) { |
196 | 0 | return true; |
197 | 0 | } |
198 | 0 | return false; |
199 | 0 | } |
200 | | |
201 | | bool |
202 | | IsValidVideoRegion(const gfx::IntSize& aFrame, |
203 | | const gfx::IntRect& aPicture, |
204 | | const gfx::IntSize& aDisplay) |
205 | 0 | { |
206 | 0 | return |
207 | 0 | aFrame.width <= PlanarYCbCrImage::MAX_DIMENSION && |
208 | 0 | aFrame.height <= PlanarYCbCrImage::MAX_DIMENSION && |
209 | 0 | aFrame.width * aFrame.height <= MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT && |
210 | 0 | aFrame.width * aFrame.height != 0 && |
211 | 0 | aPicture.width <= PlanarYCbCrImage::MAX_DIMENSION && |
212 | 0 | aPicture.x < PlanarYCbCrImage::MAX_DIMENSION && |
213 | 0 | aPicture.x + aPicture.width < PlanarYCbCrImage::MAX_DIMENSION && |
214 | 0 | aPicture.height <= PlanarYCbCrImage::MAX_DIMENSION && |
215 | 0 | aPicture.y < PlanarYCbCrImage::MAX_DIMENSION && |
216 | 0 | aPicture.y + aPicture.height < PlanarYCbCrImage::MAX_DIMENSION && |
217 | 0 | aPicture.width * aPicture.height <= MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT && |
218 | 0 | aPicture.width * aPicture.height != 0 && |
219 | 0 | aDisplay.width <= PlanarYCbCrImage::MAX_DIMENSION && |
220 | 0 | aDisplay.height <= PlanarYCbCrImage::MAX_DIMENSION && |
221 | 0 | aDisplay.width * aDisplay.height <= MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT && |
222 | 0 | aDisplay.width * aDisplay.height != 0; |
223 | 0 | } |
224 | | |
225 | | already_AddRefed<SharedThreadPool> GetMediaThreadPool(MediaThreadType aType) |
226 | 0 | { |
227 | 0 | const char *name; |
228 | 0 | switch (aType) { |
229 | 0 | case MediaThreadType::PLATFORM_DECODER: |
230 | 0 | name = "MediaPDecoder"; |
231 | 0 | break; |
232 | 0 | case MediaThreadType::MSG_CONTROL: |
233 | 0 | name = "MSGControl"; |
234 | 0 | break; |
235 | 0 | case MediaThreadType::WEBRTC_DECODER: |
236 | 0 | name = "WebRTCPD"; |
237 | 0 | break; |
238 | 0 | default: |
239 | 0 | MOZ_FALLTHROUGH_ASSERT("Unexpected MediaThreadType"); |
240 | 0 | case MediaThreadType::PLAYBACK: |
241 | 0 | name = "MediaPlayback"; |
242 | 0 | break; |
243 | 0 | } |
244 | 0 | |
245 | 0 | static const uint32_t kMediaThreadPoolDefaultCount = 4; |
246 | 0 | RefPtr<SharedThreadPool> pool = SharedThreadPool:: |
247 | 0 | Get(nsDependentCString(name), kMediaThreadPoolDefaultCount); |
248 | 0 |
|
249 | 0 | // Ensure a larger stack for platform decoder threads |
250 | 0 | if (aType == MediaThreadType::PLATFORM_DECODER) { |
251 | 0 | const uint32_t minStackSize = 512*1024; |
252 | 0 | uint32_t stackSize; |
253 | 0 | MOZ_ALWAYS_SUCCEEDS(pool->GetThreadStackSize(&stackSize)); |
254 | 0 | if (stackSize < minStackSize) { |
255 | 0 | MOZ_ALWAYS_SUCCEEDS(pool->SetThreadStackSize(minStackSize)); |
256 | 0 | } |
257 | 0 | } |
258 | 0 |
|
259 | 0 | return pool.forget(); |
260 | 0 | } |
261 | | |
262 | | bool |
263 | | ExtractVPXCodecDetails(const nsAString& aCodec, |
264 | | uint8_t& aProfile, |
265 | | uint8_t& aLevel, |
266 | | uint8_t& aBitDepth) |
267 | 0 | { |
268 | 0 | uint8_t dummyChromaSubsampling = 1; |
269 | 0 | VideoColorSpace dummyColorspace; |
270 | 0 | return ExtractVPXCodecDetails(aCodec, |
271 | 0 | aProfile, |
272 | 0 | aLevel, |
273 | 0 | aBitDepth, |
274 | 0 | dummyChromaSubsampling, |
275 | 0 | dummyColorspace); |
276 | 0 | } |
277 | | |
278 | | bool ExtractVPXCodecDetails(const nsAString& aCodec, |
279 | | uint8_t& aProfile, |
280 | | uint8_t& aLevel, |
281 | | uint8_t& aBitDepth, |
282 | | uint8_t& aChromaSubsampling, |
283 | | VideoColorSpace& aColorSpace) |
284 | 0 | { |
285 | 0 | // Assign default value. |
286 | 0 | aChromaSubsampling = 1; |
287 | 0 | auto splitter = aCodec.Split(u'.'); |
288 | 0 | auto fieldsItr = splitter.begin(); |
289 | 0 | auto fourCC = *fieldsItr; |
290 | 0 |
|
291 | 0 | if (!fourCC.EqualsLiteral("vp09") && !fourCC.EqualsLiteral("vp08")) { |
292 | 0 | // Invalid 4CC |
293 | 0 | return false; |
294 | 0 | } |
295 | 0 | ++fieldsItr; |
296 | 0 | uint8_t *fields[] = { &aProfile, &aLevel, &aBitDepth, &aChromaSubsampling, |
297 | 0 | &aColorSpace.mPrimaryId, &aColorSpace.mTransferId, |
298 | 0 | &aColorSpace.mMatrixId, &aColorSpace.mRangeId }; |
299 | 0 | int fieldsCount = 0; |
300 | 0 | nsresult rv; |
301 | 0 | for (; fieldsItr != splitter.end(); ++fieldsItr, ++fieldsCount) { |
302 | 0 | if (fieldsCount > 7) { |
303 | 0 | // No more than 8 fields are expected. |
304 | 0 | return false; |
305 | 0 | } |
306 | 0 | *(fields[fieldsCount]) = |
307 | 0 | static_cast<uint8_t>(PromiseFlatString((*fieldsItr)).ToInteger(&rv, 10)); |
308 | 0 | // We got invalid field value, parsing error. |
309 | 0 | NS_ENSURE_SUCCESS(rv, false); |
310 | 0 | } |
311 | 0 | // Mandatory Fields |
312 | 0 | // <sample entry 4CC>.<profile>.<level>.<bitDepth>. |
313 | 0 | // Optional Fields |
314 | 0 | // <chromaSubsampling>.<colourPrimaries>.<transferCharacteristics>. |
315 | 0 | // <matrixCoefficients>.<videoFullRangeFlag> |
316 | 0 | // First three fields are mandatory(we have parsed 4CC). |
317 | 0 | if (fieldsCount < 3) { |
318 | 0 | // Invalid number of fields. |
319 | 0 | return false; |
320 | 0 | } |
321 | 0 | // Start to validate the parsing value. |
322 | 0 | |
323 | 0 | // profile should be 0,1,2 or 3. |
324 | 0 | // See https://www.webmproject.org/vp9/profiles/ |
325 | 0 | // We don't support more than profile 2 |
326 | 0 | if (aProfile > 2) { |
327 | 0 | // Invalid profile. |
328 | 0 | return false; |
329 | 0 | } |
330 | 0 | |
331 | 0 | // level, See https://www.webmproject.org/vp9/mp4/#semantics_1 |
332 | 0 | switch (aLevel) { |
333 | 0 | case 10: |
334 | 0 | case 11: |
335 | 0 | case 20: |
336 | 0 | case 21: |
337 | 0 | case 30: |
338 | 0 | case 31: |
339 | 0 | case 40: |
340 | 0 | case 41: |
341 | 0 | case 50: |
342 | 0 | case 51: |
343 | 0 | case 52: |
344 | 0 | case 60: |
345 | 0 | case 61: |
346 | 0 | case 62: |
347 | 0 | break; |
348 | 0 | default: |
349 | 0 | // Invalid level. |
350 | 0 | return false; |
351 | 0 | } |
352 | 0 | |
353 | 0 | if (aBitDepth != 8 && aBitDepth != 10 && aBitDepth != 12) { |
354 | 0 | // Invalid bitDepth: |
355 | 0 | return false; |
356 | 0 | } |
357 | 0 | |
358 | 0 | if (fieldsCount == 3) { |
359 | 0 | // No more options. |
360 | 0 | return true; |
361 | 0 | } |
362 | 0 | |
363 | 0 | // chromaSubsampling should be 0,1,2,3...4~7 are reserved. |
364 | 0 | if (aChromaSubsampling > 3) { |
365 | 0 | return false; |
366 | 0 | } |
367 | 0 | |
368 | 0 | if (fieldsCount == 4) { |
369 | 0 | // No more options. |
370 | 0 | return true; |
371 | 0 | } |
372 | 0 | |
373 | 0 | // It is an integer that is defined by the "Colour primaries" |
374 | 0 | // section of ISO/IEC 23001-8:2016 Table 2. |
375 | 0 | // We treat reserved value as false case. |
376 | 0 | const auto& primaryId = aColorSpace.mPrimaryId; |
377 | 0 | if (primaryId == 0 || primaryId == 3 || primaryId > 22) { |
378 | 0 | // reserved value. |
379 | 0 | return false; |
380 | 0 | } |
381 | 0 | if (primaryId > 12 && primaryId < 22) { |
382 | 0 | // 13~21 are reserved values. |
383 | 0 | return false; |
384 | 0 | } |
385 | 0 | |
386 | 0 | if (fieldsCount == 5) { |
387 | 0 | // No more options. |
388 | 0 | return true; |
389 | 0 | } |
390 | 0 | |
391 | 0 | // It is an integer that is defined by the |
392 | 0 | // "Transfer characteristics" section of ISO/IEC 23001-8:2016 Table 3. |
393 | 0 | // We treat reserved value as false case. |
394 | 0 | const auto& transferId = aColorSpace.mTransferId; |
395 | 0 | if (transferId == 0 || transferId == 3 || transferId > 18) { |
396 | 0 | // reserved value. |
397 | 0 | return false; |
398 | 0 | } |
399 | 0 | |
400 | 0 | if (fieldsCount == 6) { |
401 | 0 | // No more options. |
402 | 0 | return true; |
403 | 0 | } |
404 | 0 | |
405 | 0 | // It is an integer that is defined by the |
406 | 0 | // "Matrix coefficients" section of ISO/IEC 23001-8:2016 Table 4. |
407 | 0 | // We treat reserved value as false case. |
408 | 0 | const auto& matrixId = aColorSpace.mMatrixId; |
409 | 0 | if (matrixId == 3 || matrixId > 11) { |
410 | 0 | return false; |
411 | 0 | } |
412 | 0 | |
413 | 0 | // If matrixCoefficients is 0 (RGB), then chroma subsampling MUST be 3 (4:4:4). |
414 | 0 | if (matrixId == 0 && aChromaSubsampling != 3) { |
415 | 0 | return false; |
416 | 0 | } |
417 | 0 | |
418 | 0 | if (fieldsCount == 7) { |
419 | 0 | // No more options. |
420 | 0 | return true; |
421 | 0 | } |
422 | 0 | |
423 | 0 | // videoFullRangeFlag indicates the black level and range of the luma and |
424 | 0 | // chroma signals. 0 = legal range (e.g. 16-235 for 8 bit sample depth); |
425 | 0 | // 1 = full range (e.g. 0-255 for 8-bit sample depth). |
426 | 0 | const auto& rangeId = aColorSpace.mRangeId; |
427 | 0 | return rangeId <= 1; |
428 | 0 | } |
429 | | |
430 | | bool |
431 | | ExtractH264CodecDetails(const nsAString& aCodec, |
432 | | uint8_t& aProfile, |
433 | | uint8_t& aConstraint, |
434 | | uint8_t& aLevel) |
435 | 0 | { |
436 | 0 | // H.264 codecs parameters have a type defined as avcN.PPCCLL, where |
437 | 0 | // N = avc type. avc3 is avcc with SPS & PPS implicit (within stream) |
438 | 0 | // PP = profile_idc, CC = constraint_set flags, LL = level_idc. |
439 | 0 | // We ignore the constraint_set flags, as it's not clear from any |
440 | 0 | // documentation what constraints the platform decoders support. |
441 | 0 | // See http://blog.pearce.org.nz/2013/11/what-does-h264avc1-codecs-parameters.html |
442 | 0 | // for more details. |
443 | 0 | if (aCodec.Length() != strlen("avc1.PPCCLL")) { |
444 | 0 | return false; |
445 | 0 | } |
446 | 0 | |
447 | 0 | // Verify the codec starts with "avc1." or "avc3.". |
448 | 0 | const nsAString& sample = Substring(aCodec, 0, 5); |
449 | 0 | if (!sample.EqualsASCII("avc1.") && !sample.EqualsASCII("avc3.")) { |
450 | 0 | return false; |
451 | 0 | } |
452 | 0 | |
453 | 0 | // Extract the profile_idc, constraint_flags and level_idc. |
454 | 0 | nsresult rv = NS_OK; |
455 | 0 | aProfile = PromiseFlatString(Substring(aCodec, 5, 2)).ToInteger(&rv, 16); |
456 | 0 | NS_ENSURE_SUCCESS(rv, false); |
457 | 0 |
|
458 | 0 | // Constraint flags are stored on the 6 most significant bits, first two bits |
459 | 0 | // are reserved_zero_2bits. |
460 | 0 | aConstraint = PromiseFlatString(Substring(aCodec, 7, 2)).ToInteger(&rv, 16); |
461 | 0 | NS_ENSURE_SUCCESS(rv, false); |
462 | 0 |
|
463 | 0 | aLevel = PromiseFlatString(Substring(aCodec, 9, 2)).ToInteger(&rv, 16); |
464 | 0 | NS_ENSURE_SUCCESS(rv, false); |
465 | 0 |
|
466 | 0 | if (aLevel == 9) { |
467 | 0 | aLevel = H264_LEVEL_1_b; |
468 | 0 | } else if (aLevel <= 5) { |
469 | 0 | aLevel *= 10; |
470 | 0 | } |
471 | 0 |
|
472 | 0 | // We only make sure constraints is above 4 for collection perspective |
473 | 0 | // otherwise collect 0 for unknown. |
474 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_CANPLAYTYPE_H264_CONSTRAINT_SET_FLAG, |
475 | 0 | aConstraint >= 4 ? aConstraint : 0); |
476 | 0 | // 244 is the highest meaningful profile value (High 4:4:4 Intra Profile) |
477 | 0 | // that can be represented as single hex byte, otherwise collect 0 for unknown. |
478 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_CANPLAYTYPE_H264_PROFILE, |
479 | 0 | aProfile <= 244 ? aProfile : 0); |
480 | 0 |
|
481 | 0 | // Make sure aLevel represents a value between levels 1 and 5.2, |
482 | 0 | // otherwise collect 0 for unknown. |
483 | 0 | Telemetry::Accumulate(Telemetry::VIDEO_CANPLAYTYPE_H264_LEVEL, |
484 | 0 | (aLevel >= 10 && aLevel <= 52) ? aLevel : 0); |
485 | 0 |
|
486 | 0 | return true; |
487 | 0 | } |
488 | | |
489 | | nsresult |
490 | | GenerateRandomName(nsCString& aOutSalt, uint32_t aLength) |
491 | 0 | { |
492 | 0 | nsresult rv; |
493 | 0 | nsCOMPtr<nsIRandomGenerator> rg = |
494 | 0 | do_GetService("@mozilla.org/security/random-generator;1", &rv); |
495 | 0 | if (NS_FAILED(rv)) return rv; |
496 | 0 | |
497 | 0 | // For each three bytes of random data we will get four bytes of ASCII. |
498 | 0 | const uint32_t requiredBytesLength = |
499 | 0 | static_cast<uint32_t>((aLength + 3) / 4 * 3); |
500 | 0 |
|
501 | 0 | uint8_t* buffer; |
502 | 0 | rv = rg->GenerateRandomBytes(requiredBytesLength, &buffer); |
503 | 0 | if (NS_FAILED(rv)) return rv; |
504 | 0 | |
505 | 0 | nsAutoCString temp; |
506 | 0 | nsDependentCSubstring randomData(reinterpret_cast<const char*>(buffer), |
507 | 0 | requiredBytesLength); |
508 | 0 | rv = Base64Encode(randomData, temp); |
509 | 0 | free(buffer); |
510 | 0 | buffer = nullptr; |
511 | 0 | if (NS_FAILED (rv)) return rv; |
512 | 0 | |
513 | 0 | aOutSalt = temp; |
514 | 0 | return NS_OK; |
515 | 0 | } |
516 | | |
517 | | nsresult |
518 | | GenerateRandomPathName(nsCString& aOutSalt, uint32_t aLength) |
519 | 0 | { |
520 | 0 | nsresult rv = GenerateRandomName(aOutSalt, aLength); |
521 | 0 | if (NS_FAILED(rv)) return rv; |
522 | 0 | |
523 | 0 | // Base64 characters are alphanumeric (a-zA-Z0-9) and '+' and '/', so we need |
524 | 0 | // to replace illegal characters -- notably '/' |
525 | 0 | aOutSalt.ReplaceChar(FILE_PATH_SEPARATOR FILE_ILLEGAL_CHARACTERS, '_'); |
526 | 0 | return NS_OK; |
527 | 0 | } |
528 | | |
529 | | already_AddRefed<TaskQueue> |
530 | | CreateMediaDecodeTaskQueue(const char* aName) |
531 | 0 | { |
532 | 0 | RefPtr<TaskQueue> queue = new TaskQueue( |
533 | 0 | GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER), aName); |
534 | 0 | return queue.forget(); |
535 | 0 | } |
536 | | |
537 | | void |
538 | 0 | SimpleTimer::Cancel() { |
539 | 0 | if (mTimer) { |
540 | | #ifdef DEBUG |
541 | | nsCOMPtr<nsIEventTarget> target; |
542 | | mTimer->GetTarget(getter_AddRefs(target)); |
543 | | bool onCurrent; |
544 | | nsresult rv = target->IsOnCurrentThread(&onCurrent); |
545 | | MOZ_ASSERT(NS_SUCCEEDED(rv) && onCurrent); |
546 | | #endif |
547 | | mTimer->Cancel(); |
548 | 0 | mTimer = nullptr; |
549 | 0 | } |
550 | 0 | mTask = nullptr; |
551 | 0 | } |
552 | | |
553 | | NS_IMETHODIMP |
554 | 0 | SimpleTimer::Notify(nsITimer *timer) { |
555 | 0 | RefPtr<SimpleTimer> deathGrip(this); |
556 | 0 | if (mTask) { |
557 | 0 | mTask->Run(); |
558 | 0 | mTask = nullptr; |
559 | 0 | } |
560 | 0 | return NS_OK; |
561 | 0 | } |
562 | | |
563 | | NS_IMETHODIMP |
564 | | SimpleTimer::GetName(nsACString& aName) |
565 | 0 | { |
566 | 0 | aName.AssignLiteral("SimpleTimer"); |
567 | 0 | return NS_OK; |
568 | 0 | } |
569 | | |
570 | | nsresult |
571 | | SimpleTimer::Init(nsIRunnable* aTask, uint32_t aTimeoutMs, nsIEventTarget* aTarget) |
572 | 0 | { |
573 | 0 | nsresult rv; |
574 | 0 |
|
575 | 0 | // Get target thread first, so we don't have to cancel the timer if it fails. |
576 | 0 | nsCOMPtr<nsIEventTarget> target; |
577 | 0 | if (aTarget) { |
578 | 0 | target = aTarget; |
579 | 0 | } else { |
580 | 0 | target = GetMainThreadEventTarget(); |
581 | 0 | if (!target) { |
582 | 0 | return NS_ERROR_NOT_AVAILABLE; |
583 | 0 | } |
584 | 0 | } |
585 | 0 | |
586 | 0 | rv = NS_NewTimerWithCallback(getter_AddRefs(mTimer), |
587 | 0 | this, aTimeoutMs, nsITimer::TYPE_ONE_SHOT, |
588 | 0 | target); |
589 | 0 | if (NS_FAILED(rv)) { |
590 | 0 | return rv; |
591 | 0 | } |
592 | 0 | |
593 | 0 | mTask = aTask; |
594 | 0 | return NS_OK; |
595 | 0 | } |
596 | | |
597 | | NS_IMPL_ISUPPORTS(SimpleTimer, nsITimerCallback, nsINamed) |
598 | | |
599 | | already_AddRefed<SimpleTimer> |
600 | | SimpleTimer::Create(nsIRunnable* aTask, uint32_t aTimeoutMs, nsIEventTarget* aTarget) |
601 | 0 | { |
602 | 0 | RefPtr<SimpleTimer> t(new SimpleTimer()); |
603 | 0 | if (NS_FAILED(t->Init(aTask, aTimeoutMs, aTarget))) { |
604 | 0 | return nullptr; |
605 | 0 | } |
606 | 0 | return t.forget(); |
607 | 0 | } |
608 | | |
609 | | void |
610 | | LogToBrowserConsole(const nsAString& aMsg) |
611 | 0 | { |
612 | 0 | if (!NS_IsMainThread()) { |
613 | 0 | nsString msg(aMsg); |
614 | 0 | nsCOMPtr<nsIRunnable> task = NS_NewRunnableFunction( |
615 | 0 | "LogToBrowserConsole", [msg]() { LogToBrowserConsole(msg); }); |
616 | 0 | SystemGroup::Dispatch(TaskCategory::Other, task.forget()); |
617 | 0 | return; |
618 | 0 | } |
619 | 0 | nsCOMPtr<nsIConsoleService> console( |
620 | 0 | do_GetService("@mozilla.org/consoleservice;1")); |
621 | 0 | if (!console) { |
622 | 0 | NS_WARNING("Failed to log message to console."); |
623 | 0 | return; |
624 | 0 | } |
625 | 0 | nsAutoString msg(aMsg); |
626 | 0 | console->LogStringMessage(msg.get()); |
627 | 0 | } |
628 | | |
629 | | bool |
630 | | ParseCodecsString(const nsAString& aCodecs, nsTArray<nsString>& aOutCodecs) |
631 | 0 | { |
632 | 0 | aOutCodecs.Clear(); |
633 | 0 | bool expectMoreTokens = false; |
634 | 0 | nsCharSeparatedTokenizer tokenizer(aCodecs, ','); |
635 | 0 | while (tokenizer.hasMoreTokens()) { |
636 | 0 | const nsAString& token = tokenizer.nextToken(); |
637 | 0 | expectMoreTokens = tokenizer.separatorAfterCurrentToken(); |
638 | 0 | aOutCodecs.AppendElement(token); |
639 | 0 | } |
640 | 0 | if (expectMoreTokens) { |
641 | 0 | // Last codec name was empty |
642 | 0 | return false; |
643 | 0 | } |
644 | 0 | return true; |
645 | 0 | } |
646 | | |
647 | | bool |
648 | | ParseMIMETypeString(const nsAString& aMIMEType, |
649 | | nsString& aOutContainerType, |
650 | | nsTArray<nsString>& aOutCodecs) |
651 | 0 | { |
652 | 0 | nsContentTypeParser parser(aMIMEType); |
653 | 0 | nsresult rv = parser.GetType(aOutContainerType); |
654 | 0 | if (NS_FAILED(rv)) { |
655 | 0 | return false; |
656 | 0 | } |
657 | 0 | |
658 | 0 | nsString codecsStr; |
659 | 0 | parser.GetParameter("codecs", codecsStr); |
660 | 0 | return ParseCodecsString(codecsStr, aOutCodecs); |
661 | 0 | } |
662 | | |
663 | | template <int N> |
664 | | static bool |
665 | | StartsWith(const nsACString& string, const char (&prefix)[N]) |
666 | 0 | { |
667 | 0 | if (N - 1 > string.Length()) { |
668 | 0 | return false; |
669 | 0 | } |
670 | 0 | return memcmp(string.Data(), prefix, N - 1) == 0; |
671 | 0 | } Unexecuted instantiation: Unified_cpp_dom_media11.cpp:bool mozilla::StartsWith<5>(nsTSubstring<char> const&, char const (&) [5]) Unexecuted instantiation: Unified_cpp_dom_media11.cpp:bool mozilla::StartsWith<7>(nsTSubstring<char> const&, char const (&) [7]) |
672 | | |
673 | | bool |
674 | | IsH264CodecString(const nsAString& aCodec) |
675 | 0 | { |
676 | 0 | uint8_t profile = 0; |
677 | 0 | uint8_t constraint = 0; |
678 | 0 | uint8_t level = 0; |
679 | 0 | return ExtractH264CodecDetails(aCodec, profile, constraint, level); |
680 | 0 | } |
681 | | |
682 | | bool |
683 | | IsAACCodecString(const nsAString& aCodec) |
684 | 0 | { |
685 | 0 | return |
686 | 0 | aCodec.EqualsLiteral("mp4a.40.2") || // MPEG4 AAC-LC |
687 | 0 | aCodec.EqualsLiteral("mp4a.40.02") || // MPEG4 AAC-LC(for compatibility) |
688 | 0 | aCodec.EqualsLiteral("mp4a.40.5") || // MPEG4 HE-AAC |
689 | 0 | aCodec.EqualsLiteral("mp4a.40.05") || // MPEG4 HE-AAC(for compatibility) |
690 | 0 | aCodec.EqualsLiteral("mp4a.67") || // MPEG2 AAC-LC |
691 | 0 | aCodec.EqualsLiteral("mp4a.40.29"); // MPEG4 HE-AACv2 |
692 | 0 | } |
693 | | |
694 | | bool |
695 | | IsVP8CodecString(const nsAString& aCodec) |
696 | 0 | { |
697 | 0 | uint8_t profile = 0; |
698 | 0 | uint8_t level = 0; |
699 | 0 | uint8_t bitDepth = 0; |
700 | 0 | return aCodec.EqualsLiteral("vp8") || |
701 | 0 | aCodec.EqualsLiteral("vp8.0") || |
702 | 0 | (StartsWith(NS_ConvertUTF16toUTF8(aCodec), "vp08") && |
703 | 0 | ExtractVPXCodecDetails(aCodec, profile, level, bitDepth)); |
704 | 0 | } |
705 | | |
706 | | bool |
707 | | IsVP9CodecString(const nsAString& aCodec) |
708 | 0 | { |
709 | 0 | uint8_t profile = 0; |
710 | 0 | uint8_t level = 0; |
711 | 0 | uint8_t bitDepth = 0; |
712 | 0 | return aCodec.EqualsLiteral("vp9") || |
713 | 0 | aCodec.EqualsLiteral("vp9.0") || |
714 | 0 | (StartsWith(NS_ConvertUTF16toUTF8(aCodec), "vp09") && |
715 | 0 | ExtractVPXCodecDetails(aCodec, profile, level, bitDepth)); |
716 | 0 | } |
717 | | |
718 | | bool |
719 | | IsAV1CodecString(const nsAString& aCodec) |
720 | 0 | { |
721 | 0 | return aCodec.EqualsLiteral("av1") || |
722 | 0 | StartsWith(NS_ConvertUTF16toUTF8(aCodec), "av01"); |
723 | 0 | } |
724 | | |
725 | | UniquePtr<TrackInfo> |
726 | | CreateTrackInfoWithMIMEType(const nsACString& aCodecMIMEType) |
727 | 0 | { |
728 | 0 | UniquePtr<TrackInfo> trackInfo; |
729 | 0 | if (StartsWith(aCodecMIMEType, "audio/")) { |
730 | 0 | trackInfo.reset(new AudioInfo()); |
731 | 0 | trackInfo->mMimeType = aCodecMIMEType; |
732 | 0 | } else if (StartsWith(aCodecMIMEType, "video/")) { |
733 | 0 | trackInfo.reset(new VideoInfo()); |
734 | 0 | trackInfo->mMimeType = aCodecMIMEType; |
735 | 0 | } |
736 | 0 | return trackInfo; |
737 | 0 | } |
738 | | |
739 | | UniquePtr<TrackInfo> |
740 | | CreateTrackInfoWithMIMETypeAndContainerTypeExtraParameters( |
741 | | const nsACString& aCodecMIMEType, |
742 | | const MediaContainerType& aContainerType) |
743 | 0 | { |
744 | 0 | UniquePtr<TrackInfo> trackInfo = CreateTrackInfoWithMIMEType(aCodecMIMEType); |
745 | 0 | if (trackInfo) { |
746 | 0 | VideoInfo* videoInfo = trackInfo->GetAsVideoInfo(); |
747 | 0 | if (videoInfo) { |
748 | 0 | Maybe<int32_t> maybeWidth = aContainerType.ExtendedType().GetWidth(); |
749 | 0 | if (maybeWidth && *maybeWidth > 0) { |
750 | 0 | videoInfo->mImage.width = *maybeWidth; |
751 | 0 | videoInfo->mDisplay.width = *maybeWidth; |
752 | 0 | } |
753 | 0 | Maybe<int32_t> maybeHeight = aContainerType.ExtendedType().GetHeight(); |
754 | 0 | if (maybeHeight && *maybeHeight > 0) { |
755 | 0 | videoInfo->mImage.height = *maybeHeight; |
756 | 0 | videoInfo->mDisplay.height = *maybeHeight; |
757 | 0 | } |
758 | 0 | } else if (trackInfo->GetAsAudioInfo()) { |
759 | 0 | AudioInfo* audioInfo = trackInfo->GetAsAudioInfo(); |
760 | 0 | Maybe<int32_t> maybeChannels = |
761 | 0 | aContainerType.ExtendedType().GetChannels(); |
762 | 0 | if (maybeChannels && *maybeChannels > 0) { |
763 | 0 | audioInfo->mChannels = *maybeChannels; |
764 | 0 | } |
765 | 0 | Maybe<int32_t> maybeSamplerate = |
766 | 0 | aContainerType.ExtendedType().GetSamplerate(); |
767 | 0 | if (maybeSamplerate && *maybeSamplerate > 0) { |
768 | 0 | audioInfo->mRate = *maybeSamplerate; |
769 | 0 | } |
770 | 0 | } |
771 | 0 | } |
772 | 0 | return trackInfo; |
773 | 0 | } |
774 | | |
775 | | } // end namespace mozilla |