/src/mozilla-central/dom/media/webaudio/AudioBuffer.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
2 | | /* vim:set ts=2 sw=2 sts=2 et cindent: */ |
3 | | /* This Source Code Form is subject to the terms of the Mozilla Public |
4 | | * License, v. 2.0. If a copy of the MPL was not distributed with this |
5 | | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
6 | | |
7 | | #include "AudioBuffer.h" |
8 | | #include "mozilla/dom/AudioBufferBinding.h" |
9 | | #include "jsfriendapi.h" |
10 | | #include "mozilla/ErrorResult.h" |
11 | | #include "AudioSegment.h" |
12 | | #include "AudioChannelFormat.h" |
13 | | #include "mozilla/PodOperations.h" |
14 | | #include "mozilla/CheckedInt.h" |
15 | | #include "mozilla/MemoryReporting.h" |
16 | | #include "AudioNodeEngine.h" |
17 | | |
18 | | namespace mozilla { |
19 | | namespace dom { |
20 | | |
21 | | NS_IMPL_CYCLE_COLLECTION_CLASS(AudioBuffer) |
22 | | |
23 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(AudioBuffer) |
24 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mJSChannels) |
25 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER |
26 | 0 | tmp->ClearJSChannels(); |
27 | 0 | NS_IMPL_CYCLE_COLLECTION_UNLINK_END |
28 | | |
29 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(AudioBuffer) |
30 | 0 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END |
31 | | |
32 | 0 | NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(AudioBuffer) |
33 | 0 | NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER |
34 | 0 | for (uint32_t i = 0; i < tmp->mJSChannels.Length(); ++i) { |
35 | 0 | NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mJSChannels[i]) |
36 | 0 | } |
37 | 0 | NS_IMPL_CYCLE_COLLECTION_TRACE_END |
38 | | |
39 | | NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(AudioBuffer, AddRef) |
40 | | NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(AudioBuffer, Release) |
41 | | |
42 | | /** |
43 | | * AudioBuffers can be shared between AudioContexts, so we need a separate |
44 | | * mechanism to track their memory usage. This thread-safe class keeps track of |
45 | | * all the AudioBuffers, and gets called back by the memory reporting system |
46 | | * when a memory report is needed, reporting how much memory is used by the |
47 | | * buffers backing AudioBuffer objects. */ |
48 | | class AudioBufferMemoryTracker : public nsIMemoryReporter |
49 | | { |
50 | | NS_DECL_THREADSAFE_ISUPPORTS |
51 | | NS_DECL_NSIMEMORYREPORTER |
52 | | |
53 | | private: |
54 | | AudioBufferMemoryTracker(); |
55 | | virtual ~AudioBufferMemoryTracker(); |
56 | | |
57 | | public: |
58 | | /* Those methods can be called on any thread. */ |
59 | | static void RegisterAudioBuffer(const AudioBuffer* aAudioBuffer); |
60 | | static void UnregisterAudioBuffer(const AudioBuffer* aAudioBuffer); |
61 | | private: |
62 | | static AudioBufferMemoryTracker* GetInstance(); |
63 | | /* Those methods must be called with the lock held. */ |
64 | | void RegisterAudioBufferInternal(const AudioBuffer* aAudioBuffer); |
65 | | /* Returns the number of buffers still present in the hash table. */ |
66 | | uint32_t UnregisterAudioBufferInternal(const AudioBuffer* aAudioBuffer); |
67 | | void Init(); |
68 | | |
69 | | /* This protects all members of this class. */ |
70 | | static StaticMutex sMutex; |
71 | | static StaticRefPtr<AudioBufferMemoryTracker> sSingleton; |
72 | | nsTHashtable<nsPtrHashKey<const AudioBuffer>> mBuffers; |
73 | | }; |
74 | | |
75 | | StaticRefPtr<AudioBufferMemoryTracker> AudioBufferMemoryTracker::sSingleton; |
76 | | StaticMutex AudioBufferMemoryTracker::sMutex; |
77 | | |
78 | | NS_IMPL_ISUPPORTS(AudioBufferMemoryTracker, nsIMemoryReporter); |
79 | | |
80 | | AudioBufferMemoryTracker* AudioBufferMemoryTracker::GetInstance() |
81 | 0 | { |
82 | 0 | sMutex.AssertCurrentThreadOwns(); |
83 | 0 | if (!sSingleton) { |
84 | 0 | sSingleton = new AudioBufferMemoryTracker(); |
85 | 0 | sSingleton->Init(); |
86 | 0 | } |
87 | 0 | return sSingleton; |
88 | 0 | } |
89 | | |
90 | | AudioBufferMemoryTracker::AudioBufferMemoryTracker() |
91 | 0 | { |
92 | 0 | } |
93 | | |
94 | | void |
95 | | AudioBufferMemoryTracker::Init() |
96 | 0 | { |
97 | 0 | RegisterWeakMemoryReporter(this); |
98 | 0 | } |
99 | | |
100 | | AudioBufferMemoryTracker::~AudioBufferMemoryTracker() |
101 | 0 | { |
102 | 0 | UnregisterWeakMemoryReporter(this); |
103 | 0 | } |
104 | | |
105 | | void |
106 | | AudioBufferMemoryTracker::RegisterAudioBuffer(const AudioBuffer* aAudioBuffer) |
107 | 0 | { |
108 | 0 | StaticMutexAutoLock lock(sMutex); |
109 | 0 | AudioBufferMemoryTracker* tracker = AudioBufferMemoryTracker::GetInstance(); |
110 | 0 | tracker->RegisterAudioBufferInternal(aAudioBuffer); |
111 | 0 | } |
112 | | |
113 | | void |
114 | | AudioBufferMemoryTracker::UnregisterAudioBuffer(const AudioBuffer* aAudioBuffer) |
115 | 0 | { |
116 | 0 | StaticMutexAutoLock lock(sMutex); |
117 | 0 | AudioBufferMemoryTracker* tracker = AudioBufferMemoryTracker::GetInstance(); |
118 | 0 | uint32_t count; |
119 | 0 | count = tracker->UnregisterAudioBufferInternal(aAudioBuffer); |
120 | 0 | if (count == 0) { |
121 | 0 | sSingleton = nullptr; |
122 | 0 | } |
123 | 0 | } |
124 | | |
125 | | void |
126 | | AudioBufferMemoryTracker::RegisterAudioBufferInternal(const AudioBuffer* aAudioBuffer) |
127 | 0 | { |
128 | 0 | sMutex.AssertCurrentThreadOwns(); |
129 | 0 | mBuffers.PutEntry(aAudioBuffer); |
130 | 0 | } |
131 | | |
132 | | uint32_t |
133 | | AudioBufferMemoryTracker::UnregisterAudioBufferInternal(const AudioBuffer* aAudioBuffer) |
134 | 0 | { |
135 | 0 | sMutex.AssertCurrentThreadOwns(); |
136 | 0 | mBuffers.RemoveEntry(aAudioBuffer); |
137 | 0 | return mBuffers.Count(); |
138 | 0 | } |
139 | | |
140 | | MOZ_DEFINE_MALLOC_SIZE_OF(AudioBufferMemoryTrackerMallocSizeOf) |
141 | | |
142 | | NS_IMETHODIMP |
143 | | AudioBufferMemoryTracker::CollectReports(nsIHandleReportCallback* aHandleReport, |
144 | | nsISupports* aData, bool) |
145 | 0 | { |
146 | 0 | size_t amount = 0; |
147 | 0 |
|
148 | 0 | for (auto iter = mBuffers.Iter(); !iter.Done(); iter.Next()) { |
149 | 0 | amount += iter.Get()->GetKey()->SizeOfIncludingThis(AudioBufferMemoryTrackerMallocSizeOf); |
150 | 0 | } |
151 | 0 |
|
152 | 0 | MOZ_COLLECT_REPORT( |
153 | 0 | "explicit/webaudio/audiobuffer", KIND_HEAP, UNITS_BYTES, amount, |
154 | 0 | "Memory used by AudioBuffer objects (Web Audio)."); |
155 | 0 |
|
156 | 0 | return NS_OK; |
157 | 0 | } |
158 | | |
159 | | AudioBuffer::AudioBuffer(nsPIDOMWindowInner* aWindow, |
160 | | uint32_t aNumberOfChannels, |
161 | | uint32_t aLength, |
162 | | float aSampleRate, |
163 | | ErrorResult& aRv) |
164 | | : mOwnerWindow(do_GetWeakReference(aWindow)), |
165 | | mSampleRate(aSampleRate) |
166 | 0 | { |
167 | 0 | // Note that a buffer with zero channels is permitted here for the sake of |
168 | 0 | // AudioProcessingEvent, where channel counts must match parameters passed |
169 | 0 | // to createScriptProcessor(), one of which may be zero. |
170 | 0 | if (aSampleRate < WebAudioUtils::MinSampleRate || |
171 | 0 | aSampleRate > WebAudioUtils::MaxSampleRate || |
172 | 0 | aNumberOfChannels > WebAudioUtils::MaxChannelCount || |
173 | 0 | !aLength || aLength > INT32_MAX) { |
174 | 0 | aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); |
175 | 0 | return; |
176 | 0 | } |
177 | 0 | |
178 | 0 | mSharedChannels.mDuration = aLength; |
179 | 0 | mJSChannels.SetLength(aNumberOfChannels); |
180 | 0 | mozilla::HoldJSObjects(this); |
181 | 0 | AudioBufferMemoryTracker::RegisterAudioBuffer(this); |
182 | 0 | } |
183 | | |
184 | | AudioBuffer::~AudioBuffer() |
185 | 0 | { |
186 | 0 | AudioBufferMemoryTracker::UnregisterAudioBuffer(this); |
187 | 0 | ClearJSChannels(); |
188 | 0 | mozilla::DropJSObjects(this); |
189 | 0 | } |
190 | | |
191 | | /* static */ already_AddRefed<AudioBuffer> |
192 | | AudioBuffer::Constructor(const GlobalObject& aGlobal, |
193 | | const AudioBufferOptions& aOptions, |
194 | | ErrorResult& aRv) |
195 | 0 | { |
196 | 0 | if (!aOptions.mNumberOfChannels) { |
197 | 0 | aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); |
198 | 0 | return nullptr; |
199 | 0 | } |
200 | 0 | |
201 | 0 | nsCOMPtr<nsPIDOMWindowInner> window = |
202 | 0 | do_QueryInterface(aGlobal.GetAsSupports()); |
203 | 0 |
|
204 | 0 | return Create(window, aOptions.mNumberOfChannels, aOptions.mLength, |
205 | 0 | aOptions.mSampleRate, aRv); |
206 | 0 | } |
207 | | |
208 | | void |
209 | | AudioBuffer::ClearJSChannels() |
210 | 0 | { |
211 | 0 | mJSChannels.Clear(); |
212 | 0 | } |
213 | | |
214 | | void |
215 | | AudioBuffer::SetSharedChannels( |
216 | | already_AddRefed<ThreadSharedFloatArrayBufferList> aBuffer) |
217 | 0 | { |
218 | 0 | RefPtr<ThreadSharedFloatArrayBufferList> buffer = aBuffer; |
219 | 0 | uint32_t channelCount = buffer->GetChannels(); |
220 | 0 | mSharedChannels.mChannelData.SetLength(channelCount); |
221 | 0 | for (uint32_t i = 0; i < channelCount; ++i) { |
222 | 0 | mSharedChannels.mChannelData[i] = buffer->GetData(i); |
223 | 0 | } |
224 | 0 | mSharedChannels.mBuffer = buffer.forget(); |
225 | 0 | mSharedChannels.mBufferFormat = AUDIO_FORMAT_FLOAT32; |
226 | 0 | } |
227 | | |
228 | | /* static */ already_AddRefed<AudioBuffer> |
229 | | AudioBuffer::Create(nsPIDOMWindowInner* aWindow, uint32_t aNumberOfChannels, |
230 | | uint32_t aLength, float aSampleRate, |
231 | | already_AddRefed<ThreadSharedFloatArrayBufferList> |
232 | | aInitialContents, |
233 | | ErrorResult& aRv) |
234 | 0 | { |
235 | 0 | RefPtr<ThreadSharedFloatArrayBufferList> initialContents = aInitialContents; |
236 | 0 | RefPtr<AudioBuffer> buffer = |
237 | 0 | new AudioBuffer(aWindow, aNumberOfChannels, aLength, aSampleRate, aRv); |
238 | 0 | if (aRv.Failed()) { |
239 | 0 | return nullptr; |
240 | 0 | } |
241 | 0 | |
242 | 0 | if (initialContents) { |
243 | 0 | MOZ_ASSERT(initialContents->GetChannels() == aNumberOfChannels); |
244 | 0 | buffer->SetSharedChannels(initialContents.forget()); |
245 | 0 | } |
246 | 0 |
|
247 | 0 | return buffer.forget(); |
248 | 0 | } |
249 | | |
250 | | /* static */ already_AddRefed<AudioBuffer> |
251 | | AudioBuffer::Create(nsPIDOMWindowInner* aWindow, float aSampleRate, |
252 | | AudioChunk&& aInitialContents) |
253 | 0 | { |
254 | 0 | AudioChunk initialContents = aInitialContents; |
255 | 0 | ErrorResult rv; |
256 | 0 | RefPtr<AudioBuffer> buffer = |
257 | 0 | new AudioBuffer(aWindow, initialContents.ChannelCount(), |
258 | 0 | initialContents.mDuration, aSampleRate, rv); |
259 | 0 | if (rv.Failed()) { |
260 | 0 | return nullptr; |
261 | 0 | } |
262 | 0 | buffer->mSharedChannels = std::move(aInitialContents); |
263 | 0 |
|
264 | 0 | return buffer.forget(); |
265 | 0 | } |
266 | | |
267 | | JSObject* |
268 | | AudioBuffer::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) |
269 | 0 | { |
270 | 0 | return AudioBuffer_Binding::Wrap(aCx, this, aGivenProto); |
271 | 0 | } |
272 | | |
273 | | static void |
274 | | CopyChannelDataToFloat(const AudioChunk& aChunk, uint32_t aChannel, |
275 | | uint32_t aSrcOffset, float* aOutput, uint32_t aLength) |
276 | 0 | { |
277 | 0 | MOZ_ASSERT(aChunk.mVolume == 1.0f); |
278 | 0 | if (aChunk.mBufferFormat == AUDIO_FORMAT_FLOAT32) { |
279 | 0 | mozilla::PodCopy(aOutput, |
280 | 0 | aChunk.ChannelData<float>()[aChannel] + aSrcOffset, |
281 | 0 | aLength); |
282 | 0 | } else { |
283 | 0 | MOZ_ASSERT(aChunk.mBufferFormat == AUDIO_FORMAT_S16); |
284 | 0 | ConvertAudioSamples(aChunk.ChannelData<int16_t>()[aChannel] + aSrcOffset, |
285 | 0 | aOutput, aLength); |
286 | 0 | } |
287 | 0 | } |
288 | | |
289 | | bool |
290 | | AudioBuffer::RestoreJSChannelData(JSContext* aJSContext) |
291 | 0 | { |
292 | 0 | for (uint32_t i = 0; i < mJSChannels.Length(); ++i) { |
293 | 0 | if (mJSChannels[i]) { |
294 | 0 | // Already have data in JS array. |
295 | 0 | continue; |
296 | 0 | } |
297 | 0 | |
298 | 0 | // The following code first zeroes the array and then copies our data |
299 | 0 | // into it. We could avoid this with additional JS APIs to construct |
300 | 0 | // an array (or ArrayBuffer) containing initial data. |
301 | 0 | JS::Rooted<JSObject*> array(aJSContext, |
302 | 0 | JS_NewFloat32Array(aJSContext, Length())); |
303 | 0 | if (!array) { |
304 | 0 | return false; |
305 | 0 | } |
306 | 0 | if (!mSharedChannels.IsNull()) { |
307 | 0 | // "4. Attach ArrayBuffers containing copies of the data to the |
308 | 0 | // AudioBuffer, to be returned by the next call to getChannelData." |
309 | 0 | JS::AutoCheckCannotGC nogc; |
310 | 0 | bool isShared; |
311 | 0 | float* jsData = JS_GetFloat32ArrayData(array, &isShared, nogc); |
312 | 0 | MOZ_ASSERT(!isShared); // Was created as unshared above |
313 | 0 | CopyChannelDataToFloat(mSharedChannels, i, 0, jsData, Length()); |
314 | 0 | } |
315 | 0 | mJSChannels[i] = array; |
316 | 0 | } |
317 | 0 |
|
318 | 0 | mSharedChannels.SetNull(Length()); |
319 | 0 |
|
320 | 0 | return true; |
321 | 0 | } |
322 | | |
323 | | void |
324 | | AudioBuffer::CopyFromChannel(const Float32Array& aDestination, uint32_t aChannelNumber, |
325 | | uint32_t aStartInChannel, ErrorResult& aRv) |
326 | 0 | { |
327 | 0 | aDestination.ComputeLengthAndData(); |
328 | 0 |
|
329 | 0 | uint32_t length = aDestination.Length(); |
330 | 0 | CheckedInt<uint32_t> end = aStartInChannel; |
331 | 0 | end += length; |
332 | 0 | if (aChannelNumber >= NumberOfChannels() || |
333 | 0 | !end.isValid() || end.value() > Length()) { |
334 | 0 | aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); |
335 | 0 | return; |
336 | 0 | } |
337 | 0 | |
338 | 0 | JS::AutoCheckCannotGC nogc; |
339 | 0 | JSObject* channelArray = mJSChannels[aChannelNumber]; |
340 | 0 | if (channelArray) { |
341 | 0 | if (JS_GetTypedArrayLength(channelArray) != Length()) { |
342 | 0 | // The array's buffer was detached. |
343 | 0 | aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); |
344 | 0 | return; |
345 | 0 | } |
346 | 0 | |
347 | 0 | bool isShared = false; |
348 | 0 | const float* sourceData = |
349 | 0 | JS_GetFloat32ArrayData(channelArray, &isShared, nogc); |
350 | 0 | // The sourceData arrays should all have originated in |
351 | 0 | // RestoreJSChannelData, where they are created unshared. |
352 | 0 | MOZ_ASSERT(!isShared); |
353 | 0 | PodMove(aDestination.Data(), sourceData + aStartInChannel, length); |
354 | 0 | return; |
355 | 0 | } |
356 | 0 |
|
357 | 0 | if (!mSharedChannels.IsNull()) { |
358 | 0 | CopyChannelDataToFloat(mSharedChannels, aChannelNumber, aStartInChannel, |
359 | 0 | aDestination.Data(), length); |
360 | 0 | return; |
361 | 0 | } |
362 | 0 | |
363 | 0 | PodZero(aDestination.Data(), length); |
364 | 0 | } |
365 | | |
366 | | void |
367 | | AudioBuffer::CopyToChannel(JSContext* aJSContext, const Float32Array& aSource, |
368 | | uint32_t aChannelNumber, uint32_t aStartInChannel, |
369 | | ErrorResult& aRv) |
370 | 0 | { |
371 | 0 | aSource.ComputeLengthAndData(); |
372 | 0 |
|
373 | 0 | uint32_t length = aSource.Length(); |
374 | 0 | CheckedInt<uint32_t> end = aStartInChannel; |
375 | 0 | end += length; |
376 | 0 | if (aChannelNumber >= NumberOfChannels() || |
377 | 0 | !end.isValid() || end.value() > Length()) { |
378 | 0 | aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); |
379 | 0 | return; |
380 | 0 | } |
381 | 0 | |
382 | 0 | if (!RestoreJSChannelData(aJSContext)) { |
383 | 0 | aRv.Throw(NS_ERROR_OUT_OF_MEMORY); |
384 | 0 | return; |
385 | 0 | } |
386 | 0 | |
387 | 0 | JS::AutoCheckCannotGC nogc; |
388 | 0 | JSObject* channelArray = mJSChannels[aChannelNumber]; |
389 | 0 | if (JS_GetTypedArrayLength(channelArray) != Length()) { |
390 | 0 | // The array's buffer was detached. |
391 | 0 | aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); |
392 | 0 | return; |
393 | 0 | } |
394 | 0 | |
395 | 0 | bool isShared = false; |
396 | 0 | float* channelData = JS_GetFloat32ArrayData(channelArray, &isShared, nogc); |
397 | 0 | // The channelData arrays should all have originated in |
398 | 0 | // RestoreJSChannelData, where they are created unshared. |
399 | 0 | MOZ_ASSERT(!isShared); |
400 | 0 | PodMove(channelData + aStartInChannel, aSource.Data(), length); |
401 | 0 | } |
402 | | |
403 | | void |
404 | | AudioBuffer::GetChannelData(JSContext* aJSContext, uint32_t aChannel, |
405 | | JS::MutableHandle<JSObject*> aRetval, |
406 | | ErrorResult& aRv) |
407 | 0 | { |
408 | 0 | if (aChannel >= NumberOfChannels()) { |
409 | 0 | aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); |
410 | 0 | return; |
411 | 0 | } |
412 | 0 | |
413 | 0 | if (!RestoreJSChannelData(aJSContext)) { |
414 | 0 | aRv.Throw(NS_ERROR_OUT_OF_MEMORY); |
415 | 0 | return; |
416 | 0 | } |
417 | 0 | |
418 | 0 | aRetval.set(mJSChannels[aChannel]); |
419 | 0 | } |
420 | | |
421 | | already_AddRefed<ThreadSharedFloatArrayBufferList> |
422 | | AudioBuffer::StealJSArrayDataIntoSharedChannels(JSContext* aJSContext) |
423 | 0 | { |
424 | 0 | // "1. If any of the AudioBuffer's ArrayBuffer have been detached, abort |
425 | 0 | // these steps, and return a zero-length channel data buffers to the |
426 | 0 | // invoker." |
427 | 0 | for (uint32_t i = 0; i < mJSChannels.Length(); ++i) { |
428 | 0 | JSObject* channelArray = mJSChannels[i]; |
429 | 0 | if (!channelArray || Length() != JS_GetTypedArrayLength(channelArray)) { |
430 | 0 | // Either empty buffer or one of the arrays' buffers was detached. |
431 | 0 | return nullptr; |
432 | 0 | } |
433 | 0 | } |
434 | 0 |
|
435 | 0 | // "2. Detach all ArrayBuffers for arrays previously returned by |
436 | 0 | // getChannelData on this AudioBuffer." |
437 | 0 | // "3. Retain the underlying data buffers from those ArrayBuffers and return |
438 | 0 | // references to them to the invoker." |
439 | 0 | RefPtr<ThreadSharedFloatArrayBufferList> result = |
440 | 0 | new ThreadSharedFloatArrayBufferList(mJSChannels.Length()); |
441 | 0 | for (uint32_t i = 0; i < mJSChannels.Length(); ++i) { |
442 | 0 | JS::Rooted<JSObject*> arrayBufferView(aJSContext, mJSChannels[i]); |
443 | 0 | bool isSharedMemory; |
444 | 0 | JS::Rooted<JSObject*> arrayBuffer(aJSContext, |
445 | 0 | JS_GetArrayBufferViewBuffer(aJSContext, |
446 | 0 | arrayBufferView, |
447 | 0 | &isSharedMemory)); |
448 | 0 | // The channel data arrays should all have originated in |
449 | 0 | // RestoreJSChannelData, where they are created unshared. |
450 | 0 | MOZ_ASSERT(!isSharedMemory); |
451 | 0 | auto stolenData = |
452 | 0 | arrayBuffer ? static_cast<float*>( |
453 | 0 | JS_StealArrayBufferContents(aJSContext, arrayBuffer)) |
454 | 0 | : nullptr; |
455 | 0 | if (stolenData) { |
456 | 0 | result->SetData(i, stolenData, js_free, stolenData); |
457 | 0 | } else { |
458 | 0 | NS_ASSERTION(i == 0, "some channels lost when contents not acquired"); |
459 | 0 | return nullptr; |
460 | 0 | } |
461 | 0 | } |
462 | 0 |
|
463 | 0 | for (uint32_t i = 0; i < mJSChannels.Length(); ++i) { |
464 | 0 | mJSChannels[i] = nullptr; |
465 | 0 | } |
466 | 0 |
|
467 | 0 | return result.forget(); |
468 | 0 | } |
469 | | |
470 | | const AudioChunk& |
471 | | AudioBuffer::GetThreadSharedChannelsForRate(JSContext* aJSContext) |
472 | 0 | { |
473 | 0 | if (mSharedChannels.IsNull()) { |
474 | 0 | // mDuration is set in constructor |
475 | 0 | RefPtr<ThreadSharedFloatArrayBufferList> buffer = |
476 | 0 | StealJSArrayDataIntoSharedChannels(aJSContext); |
477 | 0 |
|
478 | 0 | if (buffer) { |
479 | 0 | SetSharedChannels(buffer.forget()); |
480 | 0 | } |
481 | 0 | } |
482 | 0 |
|
483 | 0 | return mSharedChannels; |
484 | 0 | } |
485 | | |
486 | | size_t |
487 | | AudioBuffer::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const |
488 | 0 | { |
489 | 0 | size_t amount = aMallocSizeOf(this); |
490 | 0 | amount += mJSChannels.ShallowSizeOfExcludingThis(aMallocSizeOf); |
491 | 0 | amount += mSharedChannels.SizeOfExcludingThis(aMallocSizeOf, false); |
492 | 0 | return amount; |
493 | 0 | } |
494 | | |
495 | | } // namespace dom |
496 | | } // namespace mozilla |