Coverage Report

Created: 2018-09-25 14:53

/src/mozilla-central/xpcom/base/CycleCollectedJSRuntime.cpp
Line
Count
Source (jump to first uncovered line)
1
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
3
/* This Source Code Form is subject to the terms of the Mozilla Public
4
 * License, v. 2.0. If a copy of the MPL was not distributed with this
5
 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7
// We're dividing JS objects into 3 categories:
8
//
9
// 1. "real" roots, held by the JS engine itself or rooted through the root
10
//    and lock JS APIs. Roots from this category are considered black in the
11
//    cycle collector, any cycle they participate in is uncollectable.
12
//
13
// 2. certain roots held by C++ objects that are guaranteed to be alive.
14
//    Roots from this category are considered black in the cycle collector,
15
//    and any cycle they participate in is uncollectable. These roots are
16
//    traced from TraceNativeBlackRoots.
17
//
18
// 3. all other roots held by C++ objects that participate in cycle
19
//    collection, held by us (see TraceNativeGrayRoots). Roots from this
20
//    category are considered grey in the cycle collector; whether or not
21
//    they are collected depends on the objects that hold them.
22
//
23
// Note that if a root is in multiple categories the fact that it is in
24
// category 1 or 2 that takes precedence, so it will be considered black.
25
//
26
// During garbage collection we switch to an additional mark color (gray)
27
// when tracing inside TraceNativeGrayRoots. This allows us to walk those
28
// roots later on and add all objects reachable only from them to the
29
// cycle collector.
30
//
31
// Phases:
32
//
33
// 1. marking of the roots in category 1 by having the JS GC do its marking
34
// 2. marking of the roots in category 2 by having the JS GC call us back
35
//    (via JS_SetExtraGCRootsTracer) and running TraceNativeBlackRoots
36
// 3. marking of the roots in category 3 by TraceNativeGrayRoots using an
37
//    additional color (gray).
38
// 4. end of GC, GC can sweep its heap
39
//
40
// At some later point, when the cycle collector runs:
41
//
42
// 5. walk gray objects and add them to the cycle collector, cycle collect
43
//
44
// JS objects that are part of cycles the cycle collector breaks will be
45
// collected by the next JS GC.
46
//
47
// If WantAllTraces() is false the cycle collector will not traverse roots
48
// from category 1 or any JS objects held by them. Any JS objects they hold
49
// will already be marked by the JS GC and will thus be colored black
50
// themselves. Any C++ objects they hold will have a missing (untraversed)
51
// edge from the JS object to the C++ object and so it will be marked black
52
// too. This decreases the number of objects that the cycle collector has to
53
// deal with.
54
// To improve debugging, if WantAllTraces() is true all JS objects are
55
// traversed.
56
57
#include "mozilla/CycleCollectedJSRuntime.h"
58
#include <algorithm>
59
#include "mozilla/ArrayUtils.h"
60
#include "mozilla/AutoRestore.h"
61
#include "mozilla/CycleCollectedJSContext.h"
62
#include "mozilla/Move.h"
63
#include "mozilla/MemoryReporting.h"
64
#include "mozilla/Sprintf.h"
65
#include "mozilla/Telemetry.h"
66
#include "mozilla/TimelineConsumers.h"
67
#include "mozilla/TimelineMarker.h"
68
#include "mozilla/Unused.h"
69
#include "mozilla/DebuggerOnGCRunnable.h"
70
#include "mozilla/dom/DOMJSClass.h"
71
#include "mozilla/dom/ProfileTimelineMarkerBinding.h"
72
#include "mozilla/dom/Promise.h"
73
#include "mozilla/dom/PromiseBinding.h"
74
#include "mozilla/dom/PromiseDebugging.h"
75
#include "mozilla/dom/ScriptSettings.h"
76
#include "js/Debug.h"
77
#include "js/GCAPI.h"
78
#include "nsContentUtils.h"
79
#include "nsCycleCollectionNoteRootCallback.h"
80
#include "nsCycleCollectionParticipant.h"
81
#include "nsCycleCollector.h"
82
#include "nsDOMJSUtils.h"
83
#include "nsExceptionHandler.h"
84
#include "nsJSUtils.h"
85
#include "nsWrapperCache.h"
86
#include "nsStringBuffer.h"
87
#include "GeckoProfiler.h"
88
89
#ifdef MOZ_GECKO_PROFILER
90
#include "ProfilerMarkerPayload.h"
91
#endif
92
93
#include "nsIException.h"
94
#include "nsThread.h"
95
#include "nsThreadUtils.h"
96
#include "xpcpublic.h"
97
98
#ifdef NIGHTLY_BUILD
99
// For performance reasons, we make the JS Dev Error Interceptor a Nightly-only feature.
100
#define MOZ_JS_DEV_ERROR_INTERCEPTOR = 1
101
#endif // NIGHTLY_BUILD
102
103
using namespace mozilla;
104
using namespace mozilla::dom;
105
106
namespace mozilla {
107
108
struct DeferredFinalizeFunctionHolder
109
{
110
  DeferredFinalizeFunction run;
111
  void* data;
112
};
113
114
class IncrementalFinalizeRunnable : public CancelableRunnable
115
{
116
  typedef AutoTArray<DeferredFinalizeFunctionHolder, 16> DeferredFinalizeArray;
117
  typedef CycleCollectedJSRuntime::DeferredFinalizerTable DeferredFinalizerTable;
118
119
  CycleCollectedJSRuntime* mRuntime;
120
  DeferredFinalizeArray mDeferredFinalizeFunctions;
121
  uint32_t mFinalizeFunctionToRun;
122
  bool mReleasing;
123
124
  static const PRTime SliceMillis = 5; /* ms */
125
126
public:
127
  IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt,
128
                              DeferredFinalizerTable& aFinalizerTable);
129
  virtual ~IncrementalFinalizeRunnable();
130
131
  void ReleaseNow(bool aLimited);
132
133
  NS_DECL_NSIRUNNABLE
134
};
135
136
} // namespace mozilla
137
138
struct NoteWeakMapChildrenTracer : public JS::CallbackTracer
139
{
140
  NoteWeakMapChildrenTracer(JSRuntime* aRt,
141
                            nsCycleCollectionNoteRootCallback& aCb)
142
    : JS::CallbackTracer(aRt), mCb(aCb), mTracedAny(false), mMap(nullptr),
143
      mKey(nullptr), mKeyDelegate(nullptr)
144
0
  {
145
0
    setCanSkipJsids(true);
146
0
  }
147
  void onChild(const JS::GCCellPtr& aThing) override;
148
  nsCycleCollectionNoteRootCallback& mCb;
149
  bool mTracedAny;
150
  JSObject* mMap;
151
  JS::GCCellPtr mKey;
152
  JSObject* mKeyDelegate;
153
};
154
155
void
156
NoteWeakMapChildrenTracer::onChild(const JS::GCCellPtr& aThing)
157
0
{
158
0
  if (aThing.is<JSString>()) {
159
0
    return;
160
0
  }
161
0
162
0
  if (!JS::GCThingIsMarkedGray(aThing) && !mCb.WantAllTraces()) {
163
0
    return;
164
0
  }
165
0
166
0
  if (AddToCCKind(aThing.kind())) {
167
0
    mCb.NoteWeakMapping(mMap, mKey, mKeyDelegate, aThing);
168
0
    mTracedAny = true;
169
0
  } else {
170
0
    JS::TraceChildren(this, aThing);
171
0
  }
172
0
}
173
174
struct NoteWeakMapsTracer : public js::WeakMapTracer
175
{
176
  NoteWeakMapsTracer(JSRuntime* aRt, nsCycleCollectionNoteRootCallback& aCccb)
177
    : js::WeakMapTracer(aRt), mCb(aCccb), mChildTracer(aRt, aCccb)
178
0
  {
179
0
  }
180
  void trace(JSObject* aMap, JS::GCCellPtr aKey, JS::GCCellPtr aValue) override;
181
  nsCycleCollectionNoteRootCallback& mCb;
182
  NoteWeakMapChildrenTracer mChildTracer;
183
};
184
185
void
186
NoteWeakMapsTracer::trace(JSObject* aMap, JS::GCCellPtr aKey,
187
                          JS::GCCellPtr aValue)
188
0
{
189
0
  // If nothing that could be held alive by this entry is marked gray, return.
190
0
  if ((!aKey || !JS::GCThingIsMarkedGray(aKey)) &&
191
0
      MOZ_LIKELY(!mCb.WantAllTraces())) {
192
0
    if (!aValue || !JS::GCThingIsMarkedGray(aValue) || aValue.is<JSString>()) {
193
0
      return;
194
0
    }
195
0
  }
196
0
197
0
  // The cycle collector can only properly reason about weak maps if it can
198
0
  // reason about the liveness of their keys, which in turn requires that
199
0
  // the key can be represented in the cycle collector graph.  All existing
200
0
  // uses of weak maps use either objects or scripts as keys, which are okay.
201
0
  MOZ_ASSERT(AddToCCKind(aKey.kind()));
202
0
203
0
  // As an emergency fallback for non-debug builds, if the key is not
204
0
  // representable in the cycle collector graph, we treat it as marked.  This
205
0
  // can cause leaks, but is preferable to ignoring the binding, which could
206
0
  // cause the cycle collector to free live objects.
207
0
  if (!AddToCCKind(aKey.kind())) {
208
0
    aKey = nullptr;
209
0
  }
210
0
211
0
  JSObject* kdelegate = nullptr;
212
0
  if (aKey.is<JSObject>()) {
213
0
    kdelegate = js::GetWeakmapKeyDelegate(&aKey.as<JSObject>());
214
0
  }
215
0
216
0
  if (AddToCCKind(aValue.kind())) {
217
0
    mCb.NoteWeakMapping(aMap, aKey, kdelegate, aValue);
218
0
  } else {
219
0
    mChildTracer.mTracedAny = false;
220
0
    mChildTracer.mMap = aMap;
221
0
    mChildTracer.mKey = aKey;
222
0
    mChildTracer.mKeyDelegate = kdelegate;
223
0
224
0
    if (!aValue.is<JSString>()) {
225
0
      JS::TraceChildren(&mChildTracer, aValue);
226
0
    }
227
0
228
0
    // The delegate could hold alive the key, so report something to the CC
229
0
    // if we haven't already.
230
0
    if (!mChildTracer.mTracedAny &&
231
0
        aKey && JS::GCThingIsMarkedGray(aKey) && kdelegate) {
232
0
      mCb.NoteWeakMapping(aMap, aKey, kdelegate, nullptr);
233
0
    }
234
0
  }
235
0
}
236
237
// Report whether the key or value of a weak mapping entry are gray but need to
238
// be marked black.
239
static void
240
ShouldWeakMappingEntryBeBlack(JSObject* aMap, JS::GCCellPtr aKey, JS::GCCellPtr aValue,
241
                              bool* aKeyShouldBeBlack, bool* aValueShouldBeBlack)
242
0
{
243
0
  *aKeyShouldBeBlack = false;
244
0
  *aValueShouldBeBlack = false;
245
0
246
0
  // If nothing that could be held alive by this entry is marked gray, return.
247
0
  bool keyMightNeedMarking = aKey && JS::GCThingIsMarkedGray(aKey);
248
0
  bool valueMightNeedMarking = aValue && JS::GCThingIsMarkedGray(aValue) &&
249
0
    aValue.kind() != JS::TraceKind::String;
250
0
  if (!keyMightNeedMarking && !valueMightNeedMarking) {
251
0
    return;
252
0
  }
253
0
254
0
  if (!AddToCCKind(aKey.kind())) {
255
0
    aKey = nullptr;
256
0
  }
257
0
258
0
  if (keyMightNeedMarking && aKey.is<JSObject>()) {
259
0
    JSObject* kdelegate = js::GetWeakmapKeyDelegate(&aKey.as<JSObject>());
260
0
    if (kdelegate && !JS::ObjectIsMarkedGray(kdelegate) &&
261
0
        (!aMap || !JS::ObjectIsMarkedGray(aMap)))
262
0
    {
263
0
      *aKeyShouldBeBlack = true;
264
0
    }
265
0
  }
266
0
267
0
  if (aValue && JS::GCThingIsMarkedGray(aValue) &&
268
0
      (!aKey || !JS::GCThingIsMarkedGray(aKey)) &&
269
0
      (!aMap || !JS::ObjectIsMarkedGray(aMap)) &&
270
0
      aValue.kind() != JS::TraceKind::Shape) {
271
0
    *aValueShouldBeBlack = true;
272
0
  }
273
0
}
274
275
struct FixWeakMappingGrayBitsTracer : public js::WeakMapTracer
276
{
277
  explicit FixWeakMappingGrayBitsTracer(JSRuntime* aRt)
278
    : js::WeakMapTracer(aRt)
279
0
  {
280
0
  }
281
282
  void
283
  FixAll()
284
0
  {
285
0
    do {
286
0
      mAnyMarked = false;
287
0
      js::TraceWeakMaps(this);
288
0
    } while (mAnyMarked);
289
0
  }
290
291
  void trace(JSObject* aMap, JS::GCCellPtr aKey, JS::GCCellPtr aValue) override
292
0
  {
293
0
    bool keyShouldBeBlack;
294
0
    bool valueShouldBeBlack;
295
0
    ShouldWeakMappingEntryBeBlack(aMap, aKey, aValue,
296
0
                                  &keyShouldBeBlack, &valueShouldBeBlack);
297
0
    if (keyShouldBeBlack && JS::UnmarkGrayGCThingRecursively(aKey)) {
298
0
      mAnyMarked = true;
299
0
    }
300
0
301
0
    if (valueShouldBeBlack && JS::UnmarkGrayGCThingRecursively(aValue)) {
302
0
      mAnyMarked = true;
303
0
    }
304
0
  }
305
306
  MOZ_INIT_OUTSIDE_CTOR bool mAnyMarked;
307
};
308
309
#ifdef DEBUG
310
// Check whether weak maps are marked correctly according to the logic above.
311
struct CheckWeakMappingGrayBitsTracer : public js::WeakMapTracer
312
{
313
  explicit CheckWeakMappingGrayBitsTracer(JSRuntime* aRt)
314
    : js::WeakMapTracer(aRt), mFailed(false)
315
  {
316
  }
317
318
  static bool
319
  Check(JSRuntime* aRt)
320
  {
321
    CheckWeakMappingGrayBitsTracer tracer(aRt);
322
    js::TraceWeakMaps(&tracer);
323
    return !tracer.mFailed;
324
  }
325
326
  void trace(JSObject* aMap, JS::GCCellPtr aKey, JS::GCCellPtr aValue) override
327
  {
328
    bool keyShouldBeBlack;
329
    bool valueShouldBeBlack;
330
    ShouldWeakMappingEntryBeBlack(aMap, aKey, aValue,
331
                                  &keyShouldBeBlack, &valueShouldBeBlack);
332
333
    if (keyShouldBeBlack) {
334
      fprintf(stderr, "Weak mapping key %p of map %p should be black\n",
335
              aKey.asCell(), aMap);
336
      mFailed = true;
337
    }
338
339
    if (valueShouldBeBlack) {
340
      fprintf(stderr, "Weak mapping value %p of map %p should be black\n",
341
              aValue.asCell(), aMap);
342
      mFailed = true;
343
    }
344
  }
345
346
  bool mFailed;
347
};
348
#endif // DEBUG
349
350
static void
351
CheckParticipatesInCycleCollection(JS::GCCellPtr aThing, const char* aName,
352
                                   void* aClosure)
353
0
{
354
0
  bool* cycleCollectionEnabled = static_cast<bool*>(aClosure);
355
0
356
0
  if (*cycleCollectionEnabled) {
357
0
    return;
358
0
  }
359
0
360
0
  if (AddToCCKind(aThing.kind()) && JS::GCThingIsMarkedGray(aThing)) {
361
0
    *cycleCollectionEnabled = true;
362
0
  }
363
0
}
364
365
NS_IMETHODIMP
366
JSGCThingParticipant::TraverseNative(void* aPtr,
367
                                     nsCycleCollectionTraversalCallback& aCb)
368
0
{
369
0
  auto runtime = reinterpret_cast<CycleCollectedJSRuntime*>(
370
0
    reinterpret_cast<char*>(this) - offsetof(CycleCollectedJSRuntime,
371
0
                                             mGCThingCycleCollectorGlobal));
372
0
373
0
  JS::GCCellPtr cellPtr(aPtr, JS::GCThingTraceKind(aPtr));
374
0
  runtime->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_FULL, cellPtr, aCb);
375
0
  return NS_OK;
376
0
}
377
378
// NB: This is only used to initialize the participant in
379
// CycleCollectedJSRuntime. It should never be used directly.
380
static JSGCThingParticipant sGCThingCycleCollectorGlobal;
381
382
NS_IMETHODIMP
383
JSZoneParticipant::TraverseNative(void* aPtr,
384
                                  nsCycleCollectionTraversalCallback& aCb)
385
0
{
386
0
  auto runtime = reinterpret_cast<CycleCollectedJSRuntime*>(
387
0
    reinterpret_cast<char*>(this) - offsetof(CycleCollectedJSRuntime,
388
0
                                             mJSZoneCycleCollectorGlobal));
389
0
390
0
  MOZ_ASSERT(!aCb.WantAllTraces());
391
0
  JS::Zone* zone = static_cast<JS::Zone*>(aPtr);
392
0
393
0
  runtime->TraverseZone(zone, aCb);
394
0
  return NS_OK;
395
0
}
396
397
struct TraversalTracer : public JS::CallbackTracer
398
{
399
  TraversalTracer(JSRuntime* aRt, nsCycleCollectionTraversalCallback& aCb)
400
    : JS::CallbackTracer(aRt, DoNotTraceWeakMaps), mCb(aCb)
401
0
  {
402
0
    setCanSkipJsids(true);
403
0
  }
404
  void onChild(const JS::GCCellPtr& aThing) override;
405
  nsCycleCollectionTraversalCallback& mCb;
406
};
407
408
void
409
TraversalTracer::onChild(const JS::GCCellPtr& aThing)
410
0
{
411
0
  // Checking strings and symbols for being gray is rather slow, and we don't
412
0
  // need either of them for the cycle collector.
413
0
  if (aThing.is<JSString>() || aThing.is<JS::Symbol>()) {
414
0
    return;
415
0
  }
416
0
417
0
  // Don't traverse non-gray objects, unless we want all traces.
418
0
  if (!JS::GCThingIsMarkedGray(aThing) && !mCb.WantAllTraces()) {
419
0
    return;
420
0
  }
421
0
422
0
  /*
423
0
   * This function needs to be careful to avoid stack overflow. Normally, when
424
0
   * AddToCCKind is true, the recursion terminates immediately as we just add
425
0
   * |thing| to the CC graph. So overflow is only possible when there are long
426
0
   * or cyclic chains of non-AddToCCKind GC things. Places where this can occur
427
0
   * use special APIs to handle such chains iteratively.
428
0
   */
429
0
  if (AddToCCKind(aThing.kind())) {
430
0
    if (MOZ_UNLIKELY(mCb.WantDebugInfo())) {
431
0
      char buffer[200];
432
0
      getTracingEdgeName(buffer, sizeof(buffer));
433
0
      mCb.NoteNextEdgeName(buffer);
434
0
    }
435
0
    mCb.NoteJSChild(aThing);
436
0
  } else if (aThing.is<js::Shape>()) {
437
0
    // The maximum depth of traversal when tracing a Shape is unbounded, due to
438
0
    // the parent pointers on the shape.
439
0
    JS_TraceShapeCycleCollectorChildren(this, aThing);
440
0
  } else if (aThing.is<js::ObjectGroup>()) {
441
0
    // The maximum depth of traversal when tracing an ObjectGroup is unbounded,
442
0
    // due to information attached to the groups which can lead other groups to
443
0
    // be traced.
444
0
    JS_TraceObjectGroupCycleCollectorChildren(this, aThing);
445
0
  } else {
446
0
    JS::TraceChildren(this, aThing);
447
0
  }
448
0
}
449
450
static void
451
NoteJSChildGrayWrapperShim(void* aData, JS::GCCellPtr aThing)
452
0
{
453
0
  TraversalTracer* trc = static_cast<TraversalTracer*>(aData);
454
0
  trc->onChild(aThing);
455
0
}
456
457
/*
458
 * The cycle collection participant for a Zone is intended to produce the same
459
 * results as if all of the gray GCthings in a zone were merged into a single node,
460
 * except for self-edges. This avoids the overhead of representing all of the GCthings in
461
 * the zone in the cycle collector graph, which should be much faster if many of
462
 * the GCthings in the zone are gray.
463
 *
464
 * Zone merging should not always be used, because it is a conservative
465
 * approximation of the true cycle collector graph that can incorrectly identify some
466
 * garbage objects as being live. For instance, consider two cycles that pass through a
467
 * zone, where one is garbage and the other is live. If we merge the entire
468
 * zone, the cycle collector will think that both are alive.
469
 *
470
 * We don't have to worry about losing track of a garbage cycle, because any such garbage
471
 * cycle incorrectly identified as live must contain at least one C++ to JS edge, and
472
 * XPConnect will always add the C++ object to the CC graph. (This is in contrast to pure
473
 * C++ garbage cycles, which must always be properly identified, because we clear the
474
 * purple buffer during every CC, which may contain the last reference to a garbage
475
 * cycle.)
476
 */
477
478
// NB: This is only used to initialize the participant in
479
// CycleCollectedJSRuntime. It should never be used directly.
480
static const JSZoneParticipant sJSZoneCycleCollectorGlobal;
481
482
static
483
void JSObjectsTenuredCb(JSContext* aContext, void* aData)
484
1
{
485
1
  static_cast<CycleCollectedJSRuntime*>(aData)->JSObjectsTenured();
486
1
}
487
488
static void
489
MozCrashWarningReporter(JSContext*, JSErrorReport*)
490
0
{
491
0
  MOZ_CRASH("Why is someone touching JSAPI without an AutoJSAPI?");
492
0
}
493
494
CycleCollectedJSRuntime::CycleCollectedJSRuntime(JSContext* aCx)
495
  : mGCThingCycleCollectorGlobal(sGCThingCycleCollectorGlobal)
496
  , mJSZoneCycleCollectorGlobal(sJSZoneCycleCollectorGlobal)
497
  , mJSRuntime(JS_GetRuntime(aCx))
498
  , mHasPendingIdleGCTask(false)
499
  , mPrevGCSliceCallback(nullptr)
500
  , mPrevGCNurseryCollectionCallback(nullptr)
501
  , mJSHolderMap(256)
502
  , mOutOfMemoryState(OOMState::OK)
503
  , mLargeAllocationFailureState(OOMState::OK)
504
#ifdef DEBUG
505
  , mShutdownCalled(false)
506
#endif
507
3
{
508
3
  MOZ_COUNT_CTOR(CycleCollectedJSRuntime);
509
3
  MOZ_ASSERT(aCx);
510
3
  MOZ_ASSERT(mJSRuntime);
511
3
512
3
  if (!JS_AddExtraGCRootsTracer(aCx, TraceBlackJS, this)) {
513
0
    MOZ_CRASH("JS_AddExtraGCRootsTracer failed");
514
0
  }
515
3
  JS_SetGrayGCRootsTracer(aCx, TraceGrayJS, this);
516
3
  JS_SetGCCallback(aCx, GCCallback, this);
517
3
  mPrevGCSliceCallback = JS::SetGCSliceCallback(aCx, GCSliceCallback);
518
3
519
3
  if (NS_IsMainThread()) {
520
3
    // We would like to support all threads here, but the way timeline consumers
521
3
    // are set up currently, you can either add a marker for one specific
522
3
    // docshell, or for every consumer globally. We would like to add a marker
523
3
    // for every consumer observing anything on this thread, but that is not
524
3
    // currently possible. For now, add global markers only when we are on the
525
3
    // main thread, since the UI for this tracing data only displays data
526
3
    // relevant to the main-thread.
527
3
    mPrevGCNurseryCollectionCallback = JS::SetGCNurseryCollectionCallback(
528
3
      aCx, GCNurseryCollectionCallback);
529
3
  }
530
3
531
3
  JS_SetObjectsTenuredCallback(aCx, JSObjectsTenuredCb, this);
532
3
  JS::SetOutOfMemoryCallback(aCx, OutOfMemoryCallback, this);
533
3
  JS_SetExternalStringSizeofCallback(aCx, SizeofExternalStringCallback);
534
3
  JS::SetWarningReporter(aCx, MozCrashWarningReporter);
535
3
536
3
  js::AutoEnterOOMUnsafeRegion::setAnnotateOOMAllocationSizeCallback(
537
3
    CrashReporter::AnnotateOOMAllocationSize);
538
3
539
3
  static js::DOMCallbacks DOMcallbacks = {
540
3
    InstanceClassHasProtoAtDepth
541
3
  };
542
3
  SetDOMCallbacks(aCx, &DOMcallbacks);
543
3
  js::SetScriptEnvironmentPreparer(aCx, &mEnvironmentPreparer);
544
3
545
3
  JS::dbg::SetDebuggerMallocSizeOf(aCx, moz_malloc_size_of);
546
3
547
3
#ifdef MOZ_JS_DEV_ERROR_INTERCEPTOR
548
3
  JS_SetErrorInterceptorCallback(mJSRuntime, &mErrorInterceptor);
549
3
#endif // MOZ_JS_DEV_ERROR_INTERCEPTOR
550
3
}
551
552
void
553
CycleCollectedJSRuntime::Shutdown(JSContext* cx)
554
0
{
555
0
#ifdef MOZ_JS_DEV_ERROR_INTERCEPTOR
556
0
  mErrorInterceptor.Shutdown(mJSRuntime);
557
0
#endif // MOZ_JS_DEV_ERROR_INTERCEPTOR
558
0
  JS_RemoveExtraGCRootsTracer(cx, TraceBlackJS, this);
559
0
  JS_RemoveExtraGCRootsTracer(cx, TraceGrayJS, this);
560
#ifdef DEBUG
561
  mShutdownCalled = true;
562
#endif
563
}
564
565
CycleCollectedJSRuntime::~CycleCollectedJSRuntime()
566
0
{
567
0
  MOZ_COUNT_DTOR(CycleCollectedJSRuntime);
568
0
  MOZ_ASSERT(!mDeferredFinalizerTable.Count());
569
0
  MOZ_ASSERT(mShutdownCalled);
570
0
}
571
572
void
573
CycleCollectedJSRuntime::AddContext(CycleCollectedJSContext* aContext)
574
3
{
575
3
  mContexts.insertBack(aContext);
576
3
}
577
578
void
579
CycleCollectedJSRuntime::RemoveContext(CycleCollectedJSContext* aContext)
580
0
{
581
0
  aContext->removeFrom(mContexts);
582
0
}
583
584
size_t
585
CycleCollectedJSRuntime::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
586
0
{
587
0
  size_t n = 0;
588
0
589
0
  // We're deliberately not measuring anything hanging off the entries in
590
0
  // mJSHolders.
591
0
  n += mJSHolders.SizeOfExcludingThis(aMallocSizeOf);
592
0
  n += mJSHolderMap.ShallowSizeOfExcludingThis(aMallocSizeOf);
593
0
594
0
  return n;
595
0
}
596
597
void
598
CycleCollectedJSRuntime::UnmarkSkippableJSHolders()
599
0
{
600
0
  // Prevent nsWrapperCaches accessed under CanSkip from adding recorded events
601
0
  // which might not replay in the same order.
602
0
  recordreplay::AutoDisallowThreadEvents disallow;
603
0
604
0
  for (auto iter = mJSHolders.Iter(); !iter.Done(); iter.Next()) {
605
0
    void* holder = iter.Get().mHolder;
606
0
    nsScriptObjectTracer* tracer = iter.Get().mTracer;
607
0
    tracer->CanSkip(holder, true);
608
0
  }
609
0
}
610
611
void
612
CycleCollectedJSRuntime::DescribeGCThing(bool aIsMarked, JS::GCCellPtr aThing,
613
                                         nsCycleCollectionTraversalCallback& aCb) const
614
0
{
615
0
  if (!aCb.WantDebugInfo()) {
616
0
    aCb.DescribeGCedNode(aIsMarked, "JS Object");
617
0
    return;
618
0
  }
619
0
620
0
  char name[72];
621
0
  uint64_t compartmentAddress = 0;
622
0
  if (aThing.is<JSObject>()) {
623
0
    JSObject* obj = &aThing.as<JSObject>();
624
0
    compartmentAddress = (uint64_t)js::GetObjectCompartment(obj);
625
0
    const js::Class* clasp = js::GetObjectClass(obj);
626
0
627
0
    // Give the subclass a chance to do something
628
0
    if (DescribeCustomObjects(obj, clasp, name)) {
629
0
      // Nothing else to do!
630
0
    } else if (js::IsFunctionObject(obj)) {
631
0
      JSFunction* fun = JS_GetObjectFunction(obj);
632
0
      JSString* str = JS_GetFunctionDisplayId(fun);
633
0
      if (str) {
634
0
        JSFlatString* flat = JS_ASSERT_STRING_IS_FLAT(str);
635
0
        nsAutoString chars;
636
0
        AssignJSFlatString(chars, flat);
637
0
        NS_ConvertUTF16toUTF8 fname(chars);
638
0
        SprintfLiteral(name, "JS Object (Function - %s)", fname.get());
639
0
      } else {
640
0
        SprintfLiteral(name, "JS Object (Function)");
641
0
      }
642
0
    } else {
643
0
      SprintfLiteral(name, "JS Object (%s)", clasp->name);
644
0
    }
645
0
  } else {
646
0
    SprintfLiteral(name, "JS %s", JS::GCTraceKindToAscii(aThing.kind()));
647
0
  }
648
0
649
0
  // Disable printing global for objects while we figure out ObjShrink fallout.
650
0
  aCb.DescribeGCedNode(aIsMarked, name, compartmentAddress);
651
0
}
652
653
void
654
CycleCollectedJSRuntime::NoteGCThingJSChildren(JS::GCCellPtr aThing,
655
                                               nsCycleCollectionTraversalCallback& aCb) const
656
0
{
657
0
  TraversalTracer trc(mJSRuntime, aCb);
658
0
  JS::TraceChildren(&trc, aThing);
659
0
}
660
661
void
662
CycleCollectedJSRuntime::NoteGCThingXPCOMChildren(const js::Class* aClasp,
663
                                                  JSObject* aObj,
664
                                                  nsCycleCollectionTraversalCallback& aCb) const
665
0
{
666
0
  MOZ_ASSERT(aClasp);
667
0
  MOZ_ASSERT(aClasp == js::GetObjectClass(aObj));
668
0
669
0
  if (NoteCustomGCThingXPCOMChildren(aClasp, aObj, aCb)) {
670
0
    // Nothing else to do!
671
0
    return;
672
0
  }
673
0
  // XXX This test does seem fragile, we should probably whitelist classes
674
0
  //     that do hold a strong reference, but that might not be possible.
675
0
  else if (aClasp->flags & JSCLASS_HAS_PRIVATE &&
676
0
           aClasp->flags & JSCLASS_PRIVATE_IS_NSISUPPORTS) {
677
0
    NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "js::GetObjectPrivate(obj)");
678
0
    aCb.NoteXPCOMChild(static_cast<nsISupports*>(js::GetObjectPrivate(aObj)));
679
0
  } else {
680
0
    const DOMJSClass* domClass = GetDOMClass(aObj);
681
0
    if (domClass) {
682
0
      NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "UnwrapDOMObject(obj)");
683
0
      // It's possible that our object is an unforgeable holder object, in
684
0
      // which case it doesn't actually have a C++ DOM object associated with
685
0
      // it.  Use UnwrapPossiblyNotInitializedDOMObject, which produces null in
686
0
      // that case, since NoteXPCOMChild/NoteNativeChild are null-safe.
687
0
      if (domClass->mDOMObjectIsISupports) {
688
0
        aCb.NoteXPCOMChild(UnwrapPossiblyNotInitializedDOMObject<nsISupports>(aObj));
689
0
      } else if (domClass->mParticipant) {
690
0
        aCb.NoteNativeChild(UnwrapPossiblyNotInitializedDOMObject<void>(aObj),
691
0
                            domClass->mParticipant);
692
0
      }
693
0
    }
694
0
  }
695
0
}
696
697
void
698
CycleCollectedJSRuntime::TraverseGCThing(TraverseSelect aTs, JS::GCCellPtr aThing,
699
                                         nsCycleCollectionTraversalCallback& aCb)
700
0
{
701
0
  bool isMarkedGray = JS::GCThingIsMarkedGray(aThing);
702
0
703
0
  if (aTs == TRAVERSE_FULL) {
704
0
    DescribeGCThing(!isMarkedGray, aThing, aCb);
705
0
  }
706
0
707
0
  // If this object is alive, then all of its children are alive. For JS objects,
708
0
  // the black-gray invariant ensures the children are also marked black. For C++
709
0
  // objects, the ref count from this object will keep them alive. Thus we don't
710
0
  // need to trace our children, unless we are debugging using WantAllTraces.
711
0
  if (!isMarkedGray && !aCb.WantAllTraces()) {
712
0
    return;
713
0
  }
714
0
715
0
  if (aTs == TRAVERSE_FULL) {
716
0
    NoteGCThingJSChildren(aThing, aCb);
717
0
  }
718
0
719
0
  if (aThing.is<JSObject>()) {
720
0
    JSObject* obj = &aThing.as<JSObject>();
721
0
    NoteGCThingXPCOMChildren(js::GetObjectClass(obj), obj, aCb);
722
0
  }
723
0
}
724
725
struct TraverseObjectShimClosure
726
{
727
  nsCycleCollectionTraversalCallback& cb;
728
  CycleCollectedJSRuntime* self;
729
};
730
731
void
732
CycleCollectedJSRuntime::TraverseZone(JS::Zone* aZone,
733
                                      nsCycleCollectionTraversalCallback& aCb)
734
0
{
735
0
  /*
736
0
   * We treat the zone as being gray. We handle non-gray GCthings in the
737
0
   * zone by not reporting their children to the CC. The black-gray invariant
738
0
   * ensures that any JS children will also be non-gray, and thus don't need to be
739
0
   * added to the graph. For C++ children, not representing the edge from the
740
0
   * non-gray JS GCthings to the C++ object will keep the child alive.
741
0
   *
742
0
   * We don't allow zone merging in a WantAllTraces CC, because then these
743
0
   * assumptions don't hold.
744
0
   */
745
0
  aCb.DescribeGCedNode(false, "JS Zone");
746
0
747
0
  /*
748
0
   * Every JS child of everything in the zone is either in the zone
749
0
   * or is a cross-compartment wrapper. In the former case, we don't need to
750
0
   * represent these edges in the CC graph because JS objects are not ref counted.
751
0
   * In the latter case, the JS engine keeps a map of these wrappers, which we
752
0
   * iterate over. Edges between compartments in the same zone will add
753
0
   * unnecessary loop edges to the graph (bug 842137).
754
0
   */
755
0
  TraversalTracer trc(mJSRuntime, aCb);
756
0
  js::VisitGrayWrapperTargets(aZone, NoteJSChildGrayWrapperShim, &trc);
757
0
758
0
  /*
759
0
   * To find C++ children of things in the zone, we scan every JS Object in
760
0
   * the zone. Only JS Objects can have C++ children.
761
0
   */
762
0
  TraverseObjectShimClosure closure = { aCb, this };
763
0
  js::IterateGrayObjects(aZone, TraverseObjectShim, &closure);
764
0
}
765
766
/* static */ void
767
CycleCollectedJSRuntime::TraverseObjectShim(void* aData, JS::GCCellPtr aThing)
768
0
{
769
0
  TraverseObjectShimClosure* closure =
770
0
    static_cast<TraverseObjectShimClosure*>(aData);
771
0
772
0
  MOZ_ASSERT(aThing.is<JSObject>());
773
0
  closure->self->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_CPP,
774
0
                                 aThing, closure->cb);
775
0
}
776
777
void
778
CycleCollectedJSRuntime::TraverseNativeRoots(nsCycleCollectionNoteRootCallback& aCb)
779
0
{
780
0
  // NB: This is here just to preserve the existing XPConnect order. I doubt it
781
0
  // would hurt to do this after the JS holders.
782
0
  TraverseAdditionalNativeRoots(aCb);
783
0
784
0
  for (auto iter = mJSHolders.Iter(); !iter.Done(); iter.Next()) {
785
0
    void* holder = iter.Get().mHolder;
786
0
    nsScriptObjectTracer* tracer = iter.Get().mTracer;
787
0
788
0
    bool noteRoot = false;
789
0
    if (MOZ_UNLIKELY(aCb.WantAllTraces())) {
790
0
      noteRoot = true;
791
0
    } else {
792
0
      tracer->Trace(holder,
793
0
                    TraceCallbackFunc(CheckParticipatesInCycleCollection),
794
0
                    &noteRoot);
795
0
    }
796
0
797
0
    if (noteRoot) {
798
0
      aCb.NoteNativeRoot(holder, tracer);
799
0
    }
800
0
  }
801
0
}
802
803
/* static */ void
804
CycleCollectedJSRuntime::TraceBlackJS(JSTracer* aTracer, void* aData)
805
18
{
806
18
  CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
807
18
808
18
  self->TraceNativeBlackRoots(aTracer);
809
18
}
810
811
/* static */ void
812
CycleCollectedJSRuntime::TraceGrayJS(JSTracer* aTracer, void* aData)
813
18
{
814
18
  CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
815
18
816
18
  // Mark these roots as gray so the CC can walk them later.
817
18
  self->TraceNativeGrayRoots(aTracer);
818
18
}
819
820
/* static */ void
821
CycleCollectedJSRuntime::GCCallback(JSContext* aContext,
822
                                    JSGCStatus aStatus,
823
                                    void* aData)
824
36
{
825
36
  CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
826
36
827
36
  MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
828
36
  MOZ_ASSERT(CycleCollectedJSContext::Get()->Runtime() == self);
829
36
830
36
  self->OnGC(aContext, aStatus);
831
36
}
832
833
/* static */ void
834
CycleCollectedJSRuntime::GCSliceCallback(JSContext* aContext,
835
                                         JS::GCProgress aProgress,
836
                                         const JS::GCDescription& aDesc)
837
236
{
838
236
  CycleCollectedJSRuntime* self = CycleCollectedJSRuntime::Get();
839
236
  MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
840
236
841
236
#ifdef MOZ_GECKO_PROFILER
842
236
  if (profiler_is_active()) {
843
0
    if (aProgress == JS::GC_CYCLE_END) {
844
0
      profiler_add_marker(
845
0
        "GCMajor",
846
0
        MakeUnique<GCMajorMarkerPayload>(aDesc.startTime(aContext),
847
0
                                         aDesc.endTime(aContext),
848
0
                                         aDesc.summaryToJSON(aContext)));
849
0
    } else if (aProgress == JS::GC_SLICE_END) {
850
0
      profiler_add_marker(
851
0
        "GCSlice",
852
0
        MakeUnique<GCSliceMarkerPayload>(aDesc.lastSliceStart(aContext),
853
0
                                         aDesc.lastSliceEnd(aContext),
854
0
                                         aDesc.sliceToJSON(aContext)));
855
0
    }
856
0
  }
857
236
#endif
858
236
859
236
  if (aProgress == JS::GC_CYCLE_END &&
860
236
      JS::dbg::FireOnGarbageCollectionHookRequired(aContext)) {
861
0
    JS::gcreason::Reason reason = aDesc.reason_;
862
0
    Unused <<
863
0
      NS_WARN_IF(NS_FAILED(DebuggerOnGCRunnable::Enqueue(aContext, aDesc)) &&
864
0
                 reason != JS::gcreason::SHUTDOWN_CC &&
865
0
                 reason != JS::gcreason::DESTROY_RUNTIME &&
866
0
                 reason != JS::gcreason::XPCONNECT_SHUTDOWN);
867
0
  }
868
236
869
236
  if (self->mPrevGCSliceCallback) {
870
0
    self->mPrevGCSliceCallback(aContext, aProgress, aDesc);
871
0
  }
872
236
}
873
874
class MinorGCMarker : public TimelineMarker
875
{
876
private:
877
  JS::gcreason::Reason mReason;
878
879
public:
880
  MinorGCMarker(MarkerTracingType aTracingType,
881
                JS::gcreason::Reason aReason)
882
    : TimelineMarker("MinorGC",
883
                     aTracingType,
884
                     MarkerStackRequest::NO_STACK)
885
    , mReason(aReason)
886
0
    {
887
0
      MOZ_ASSERT(aTracingType == MarkerTracingType::START ||
888
0
                 aTracingType == MarkerTracingType::END);
889
0
    }
890
891
  MinorGCMarker(JS::GCNurseryProgress aProgress,
892
                JS::gcreason::Reason aReason)
893
    : TimelineMarker("MinorGC",
894
                     aProgress == JS::GCNurseryProgress::GC_NURSERY_COLLECTION_START
895
                       ? MarkerTracingType::START
896
                       : MarkerTracingType::END,
897
                     MarkerStackRequest::NO_STACK)
898
    , mReason(aReason)
899
0
  { }
900
901
  virtual void
902
  AddDetails(JSContext* aCx,
903
             dom::ProfileTimelineMarker& aMarker) override
904
0
  {
905
0
    TimelineMarker::AddDetails(aCx, aMarker);
906
0
907
0
    if (GetTracingType() == MarkerTracingType::START) {
908
0
      auto reason = JS::gcreason::ExplainReason(mReason);
909
0
      aMarker.mCauseName.Construct(NS_ConvertUTF8toUTF16(reason));
910
0
    }
911
0
  }
912
913
  virtual UniquePtr<AbstractTimelineMarker>
914
  Clone() override
915
0
  {
916
0
    auto clone = MakeUnique<MinorGCMarker>(GetTracingType(), mReason);
917
0
    clone->SetCustomTime(GetTime());
918
0
    return UniquePtr<AbstractTimelineMarker>(std::move(clone));
919
0
  }
920
};
921
922
/* static */ void
923
CycleCollectedJSRuntime::GCNurseryCollectionCallback(JSContext* aContext,
924
                                                     JS::GCNurseryProgress aProgress,
925
                                                     JS::gcreason::Reason aReason)
926
170
{
927
170
  CycleCollectedJSRuntime* self = CycleCollectedJSRuntime::Get();
928
170
  MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
929
170
  MOZ_ASSERT(NS_IsMainThread());
930
170
931
170
  RefPtr<TimelineConsumers> timelines = TimelineConsumers::Get();
932
170
  if (timelines && !timelines->IsEmpty()) {
933
0
    UniquePtr<AbstractTimelineMarker> abstractMarker(
934
0
      MakeUnique<MinorGCMarker>(aProgress, aReason));
935
0
    timelines->AddMarkerForAllObservedDocShells(abstractMarker);
936
0
  }
937
170
938
170
  if (aProgress == JS::GCNurseryProgress::GC_NURSERY_COLLECTION_START) {
939
85
    self->mLatestNurseryCollectionStart = TimeStamp::Now();
940
85
  }
941
85
#ifdef MOZ_GECKO_PROFILER
942
85
  else if (aProgress == JS::GCNurseryProgress::GC_NURSERY_COLLECTION_END &&
943
85
           profiler_is_active())
944
0
  {
945
0
    profiler_add_marker(
946
0
      "GCMinor",
947
0
      MakeUnique<GCMinorMarkerPayload>(self->mLatestNurseryCollectionStart,
948
0
                                       TimeStamp::Now(),
949
0
                                       JS::MinorGcToJSON(aContext)));
950
0
  }
951
170
#endif
952
170
953
170
  if (self->mPrevGCNurseryCollectionCallback) {
954
0
    self->mPrevGCNurseryCollectionCallback(aContext, aProgress, aReason);
955
0
  }
956
170
}
957
958
959
/* static */ void
960
CycleCollectedJSRuntime::OutOfMemoryCallback(JSContext* aContext,
961
                                             void* aData)
962
0
{
963
0
  CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
964
0
965
0
  MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
966
0
  MOZ_ASSERT(CycleCollectedJSContext::Get()->Runtime() == self);
967
0
968
0
  self->OnOutOfMemory();
969
0
}
970
971
/* static */ size_t
972
CycleCollectedJSRuntime::SizeofExternalStringCallback(JSString* aStr,
973
                                                      MallocSizeOf aMallocSizeOf)
974
0
{
975
0
  // We promised the JS engine we would not GC.  Enforce that:
976
0
  JS::AutoCheckCannotGC autoCannotGC;
977
0
978
0
  if (!XPCStringConvert::IsDOMString(aStr)) {
979
0
    // Might be a literal or something we don't understand.  Just claim 0.
980
0
    return 0;
981
0
  }
982
0
983
0
  const char16_t* chars = JS_GetTwoByteExternalStringChars(aStr);
984
0
  const nsStringBuffer* buf = nsStringBuffer::FromData((void*)chars);
985
0
  // We want sizeof including this, because the entire string buffer is owned by
986
0
  // the external string.  But only report here if we're unshared; if we're
987
0
  // shared then we don't know who really owns this data.
988
0
  return buf->SizeOfIncludingThisIfUnshared(aMallocSizeOf);
989
0
}
990
991
struct JsGcTracer : public TraceCallbacks
992
{
993
  virtual void Trace(JS::Heap<JS::Value>* aPtr, const char* aName,
994
                     void* aClosure) const override
995
0
  {
996
0
    JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
997
0
  }
998
  virtual void Trace(JS::Heap<jsid>* aPtr, const char* aName,
999
                     void* aClosure) const override
1000
0
  {
1001
0
    JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1002
0
  }
1003
  virtual void Trace(JS::Heap<JSObject*>* aPtr, const char* aName,
1004
                     void* aClosure) const override
1005
0
  {
1006
0
    JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1007
0
  }
1008
  virtual void Trace(JSObject** aPtr, const char* aName,
1009
                     void* aClosure) const override
1010
18
  {
1011
18
    js::UnsafeTraceManuallyBarrieredEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1012
18
  }
1013
  virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char* aName,
1014
                     void* aClosure) const override
1015
0
  {
1016
0
    JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1017
0
  }
1018
  virtual void Trace(JS::Heap<JSString*>* aPtr, const char* aName,
1019
                     void* aClosure) const override
1020
0
  {
1021
0
    JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1022
0
  }
1023
  virtual void Trace(JS::Heap<JSScript*>* aPtr, const char* aName,
1024
                     void* aClosure) const override
1025
0
  {
1026
0
    JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1027
0
  }
1028
  virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char* aName,
1029
                     void* aClosure) const override
1030
0
  {
1031
0
    JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1032
0
  }
1033
};
1034
1035
void
1036
mozilla::TraceScriptHolder(nsISupports* aHolder, JSTracer* aTracer)
1037
0
{
1038
0
  nsXPCOMCycleCollectionParticipant* participant = nullptr;
1039
0
  CallQueryInterface(aHolder, &participant);
1040
0
  participant->Trace(aHolder, JsGcTracer(), aTracer);
1041
0
}
1042
1043
void
1044
CycleCollectedJSRuntime::TraceNativeGrayRoots(JSTracer* aTracer)
1045
18
{
1046
18
  // NB: This is here just to preserve the existing XPConnect order. I doubt it
1047
18
  // would hurt to do this after the JS holders.
1048
18
  TraceAdditionalNativeGrayRoots(aTracer);
1049
18
1050
36
  for (auto iter = mJSHolders.Iter(); !iter.Done(); iter.Next()) {
1051
18
    void* holder = iter.Get().mHolder;
1052
18
    nsScriptObjectTracer* tracer = iter.Get().mTracer;
1053
18
    tracer->Trace(holder, JsGcTracer(), aTracer);
1054
18
  }
1055
18
}
1056
1057
void
1058
CycleCollectedJSRuntime::AddJSHolder(void* aHolder, nsScriptObjectTracer* aTracer)
1059
3
{
1060
3
  auto entry = mJSHolderMap.LookupForAdd(aHolder);
1061
3
  if (entry) {
1062
0
    JSHolderInfo* info = entry.Data();
1063
0
    MOZ_ASSERT(info->mHolder == aHolder);
1064
0
    info->mTracer = aTracer;
1065
0
    return;
1066
0
  }
1067
3
1068
3
  mJSHolders.InfallibleAppend(JSHolderInfo {aHolder, aTracer});
1069
3
  entry.OrInsert([&] {return &mJSHolders.GetLast();});
1070
3
}
1071
1072
struct ClearJSHolder : public TraceCallbacks
1073
{
1074
  virtual void Trace(JS::Heap<JS::Value>* aPtr, const char*, void*) const override
1075
0
  {
1076
0
    aPtr->setUndefined();
1077
0
  }
1078
1079
  virtual void Trace(JS::Heap<jsid>* aPtr, const char*, void*) const override
1080
0
  {
1081
0
    *aPtr = JSID_VOID;
1082
0
  }
1083
1084
  virtual void Trace(JS::Heap<JSObject*>* aPtr, const char*, void*) const override
1085
0
  {
1086
0
    *aPtr = nullptr;
1087
0
  }
1088
1089
  virtual void Trace(JSObject** aPtr, const char* aName,
1090
                     void* aClosure) const override
1091
0
  {
1092
0
    *aPtr = nullptr;
1093
0
  }
1094
1095
  virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char*, void*) const override
1096
0
  {
1097
0
    *aPtr = nullptr;
1098
0
  }
1099
1100
  virtual void Trace(JS::Heap<JSString*>* aPtr, const char*, void*) const override
1101
0
  {
1102
0
    *aPtr = nullptr;
1103
0
  }
1104
1105
  virtual void Trace(JS::Heap<JSScript*>* aPtr, const char*, void*) const override
1106
0
  {
1107
0
    *aPtr = nullptr;
1108
0
  }
1109
1110
  virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char*, void*) const override
1111
0
  {
1112
0
    *aPtr = nullptr;
1113
0
  }
1114
};
1115
1116
void
1117
CycleCollectedJSRuntime::RemoveJSHolder(void* aHolder)
1118
0
{
1119
0
  auto entry = mJSHolderMap.Lookup(aHolder);
1120
0
  if (entry) {
1121
0
    JSHolderInfo* info = entry.Data();
1122
0
    MOZ_ASSERT(info->mHolder == aHolder);
1123
0
    info->mTracer->Trace(aHolder, ClearJSHolder(), nullptr);
1124
0
1125
0
    JSHolderInfo* lastInfo = &mJSHolders.GetLast();
1126
0
    bool updateLast = (info != lastInfo);
1127
0
    if (updateLast) {
1128
0
      *info = *lastInfo;
1129
0
    }
1130
0
1131
0
    mJSHolders.PopLast();
1132
0
    entry.Remove();
1133
0
1134
0
    if (updateLast) {
1135
0
      // We have to do this after removing the entry above to ensure that we
1136
0
      // don't trip over the hashtable generation number assertion.
1137
0
      mJSHolderMap.Put(info->mHolder, info);
1138
0
    }
1139
0
  }
1140
0
}
1141
1142
#ifdef DEBUG
1143
bool
1144
CycleCollectedJSRuntime::IsJSHolder(void* aHolder)
1145
{
1146
  return mJSHolderMap.Get(aHolder, nullptr);
1147
}
1148
1149
static void
1150
AssertNoGcThing(JS::GCCellPtr aGCThing, const char* aName, void* aClosure)
1151
{
1152
  MOZ_ASSERT(!aGCThing);
1153
}
1154
1155
void
1156
CycleCollectedJSRuntime::AssertNoObjectsToTrace(void* aPossibleJSHolder)
1157
{
1158
  JSHolderInfo* info = nullptr;
1159
  if (!mJSHolderMap.Get(aPossibleJSHolder, &info)) {
1160
    return;
1161
  }
1162
1163
  MOZ_ASSERT(info->mHolder == aPossibleJSHolder);
1164
  info->mTracer->Trace(aPossibleJSHolder, TraceCallbackFunc(AssertNoGcThing), nullptr);
1165
}
1166
#endif
1167
1168
nsCycleCollectionParticipant*
1169
CycleCollectedJSRuntime::GCThingParticipant()
1170
0
{
1171
0
  return &mGCThingCycleCollectorGlobal;
1172
0
}
1173
1174
nsCycleCollectionParticipant*
1175
CycleCollectedJSRuntime::ZoneParticipant()
1176
0
{
1177
0
  return &mJSZoneCycleCollectorGlobal;
1178
0
}
1179
1180
nsresult
1181
CycleCollectedJSRuntime::TraverseRoots(nsCycleCollectionNoteRootCallback& aCb)
1182
0
{
1183
0
  TraverseNativeRoots(aCb);
1184
0
1185
0
  NoteWeakMapsTracer trc(mJSRuntime, aCb);
1186
0
  js::TraceWeakMaps(&trc);
1187
0
1188
0
  return NS_OK;
1189
0
}
1190
1191
bool
1192
CycleCollectedJSRuntime::UsefulToMergeZones() const
1193
0
{
1194
0
  return false;
1195
0
}
1196
1197
void
1198
CycleCollectedJSRuntime::FixWeakMappingGrayBits() const
1199
0
{
1200
0
  MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSRuntime),
1201
0
             "Don't call FixWeakMappingGrayBits during a GC.");
1202
0
  FixWeakMappingGrayBitsTracer fixer(mJSRuntime);
1203
0
  fixer.FixAll();
1204
0
}
1205
1206
void
1207
CycleCollectedJSRuntime::CheckGrayBits() const
1208
0
{
1209
0
  MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSRuntime),
1210
0
             "Don't call CheckGrayBits during a GC.");
1211
0
1212
0
#ifndef ANDROID
1213
0
  // Bug 1346874 - The gray state check is expensive. Android tests are already
1214
0
  // slow enough that this check can easily push them over the threshold to a
1215
0
  // timeout.
1216
0
1217
0
  MOZ_ASSERT(js::CheckGrayMarkingState(mJSRuntime));
1218
0
  MOZ_ASSERT(CheckWeakMappingGrayBitsTracer::Check(mJSRuntime));
1219
0
#endif
1220
0
}
1221
1222
bool
1223
CycleCollectedJSRuntime::AreGCGrayBitsValid() const
1224
0
{
1225
0
  return js::AreGCGrayBitsValid(mJSRuntime);
1226
0
}
1227
1228
void
1229
CycleCollectedJSRuntime::GarbageCollect(uint32_t aReason) const
1230
0
{
1231
0
  MOZ_ASSERT(aReason < JS::gcreason::NUM_REASONS);
1232
0
  JS::gcreason::Reason gcreason = static_cast<JS::gcreason::Reason>(aReason);
1233
0
1234
0
  JSContext* cx = CycleCollectedJSContext::Get()->Context();
1235
0
  JS::PrepareForFullGC(cx);
1236
0
  JS::NonIncrementalGC(cx, GC_NORMAL, gcreason);
1237
0
}
1238
1239
void
1240
CycleCollectedJSRuntime::JSObjectsTenured()
1241
1
{
1242
1
  for (auto iter = mNurseryObjects.Iter(); !iter.Done(); iter.Next()) {
1243
0
    nsWrapperCache* cache = iter.Get();
1244
0
    JSObject* wrapper = cache->GetWrapperMaybeDead();
1245
0
    MOZ_DIAGNOSTIC_ASSERT(wrapper || recordreplay::IsReplaying());
1246
0
    if (!JS::ObjectIsTenured(wrapper)) {
1247
0
      MOZ_ASSERT(!cache->PreservingWrapper());
1248
0
      const JSClass* jsClass = js::GetObjectJSClass(wrapper);
1249
0
      jsClass->doFinalize(nullptr, wrapper);
1250
0
    }
1251
0
  }
1252
1
1253
#ifdef DEBUG
1254
for (auto iter = mPreservedNurseryObjects.Iter(); !iter.Done(); iter.Next()) {
1255
  MOZ_ASSERT(JS::ObjectIsTenured(iter.Get().get()));
1256
}
1257
#endif
1258
1259
1
  mNurseryObjects.Clear();
1260
1
  mPreservedNurseryObjects.Clear();
1261
1
}
1262
1263
void
1264
CycleCollectedJSRuntime::NurseryWrapperAdded(nsWrapperCache* aCache)
1265
0
{
1266
0
  MOZ_ASSERT(aCache);
1267
0
  MOZ_ASSERT(aCache->GetWrapperMaybeDead());
1268
0
  MOZ_ASSERT(!JS::ObjectIsTenured(aCache->GetWrapperMaybeDead()));
1269
0
  mNurseryObjects.InfallibleAppend(aCache);
1270
0
}
1271
1272
void
1273
CycleCollectedJSRuntime::NurseryWrapperPreserved(JSObject* aWrapper)
1274
0
{
1275
0
  mPreservedNurseryObjects.InfallibleAppend(
1276
0
    JS::PersistentRooted<JSObject*>(mJSRuntime, aWrapper));
1277
0
}
1278
1279
void
1280
CycleCollectedJSRuntime::DeferredFinalize(DeferredFinalizeAppendFunction aAppendFunc,
1281
                                          DeferredFinalizeFunction aFunc,
1282
                                          void* aThing)
1283
7.50M
{
1284
7.50M
  if (auto entry = mDeferredFinalizerTable.LookupForAdd(aFunc)) {
1285
7.50M
    aAppendFunc(entry.Data(), aThing);
1286
7.50M
  } else {
1287
18
    entry.OrInsert(
1288
18
      [aAppendFunc, aThing] () { return aAppendFunc(nullptr, aThing); });
1289
18
  }
1290
7.50M
}
1291
1292
void
1293
CycleCollectedJSRuntime::DeferredFinalize(nsISupports* aSupports)
1294
7.50M
{
1295
7.50M
  typedef DeferredFinalizerImpl<nsISupports> Impl;
1296
7.50M
  DeferredFinalize(Impl::AppendDeferredFinalizePointer, Impl::DeferredFinalize,
1297
7.50M
                   aSupports);
1298
7.50M
}
1299
1300
void
1301
CycleCollectedJSRuntime::DumpJSHeap(FILE* aFile)
1302
0
{
1303
0
  JSContext* cx = CycleCollectedJSContext::Get()->Context();
1304
0
  js::DumpHeap(cx, aFile, js::CollectNurseryBeforeDump);
1305
0
}
1306
1307
IncrementalFinalizeRunnable::IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt,
1308
                                                         DeferredFinalizerTable& aFinalizers)
1309
  : CancelableRunnable("IncrementalFinalizeRunnable")
1310
  , mRuntime(aRt)
1311
  , mFinalizeFunctionToRun(0)
1312
  , mReleasing(false)
1313
18
{
1314
36
  for (auto iter = aFinalizers.Iter(); !iter.Done(); iter.Next()) {
1315
18
    DeferredFinalizeFunction& function = iter.Key();
1316
18
    void*& data = iter.Data();
1317
18
1318
18
    DeferredFinalizeFunctionHolder* holder =
1319
18
      mDeferredFinalizeFunctions.AppendElement();
1320
18
    holder->run = function;
1321
18
    holder->data = data;
1322
18
1323
18
    iter.Remove();
1324
18
  }
1325
18
}
1326
1327
IncrementalFinalizeRunnable::~IncrementalFinalizeRunnable()
1328
18
{
1329
18
  MOZ_ASSERT(this != mRuntime->mFinalizeRunnable);
1330
18
}
1331
1332
void
1333
IncrementalFinalizeRunnable::ReleaseNow(bool aLimited)
1334
18
{
1335
18
  if (mReleasing) {
1336
0
    NS_WARNING("Re-entering ReleaseNow");
1337
0
    return;
1338
0
  }
1339
18
  {
1340
18
    mozilla::AutoRestore<bool> ar(mReleasing);
1341
18
    mReleasing = true;
1342
18
    MOZ_ASSERT(mDeferredFinalizeFunctions.Length() != 0,
1343
18
               "We should have at least ReleaseSliceNow to run");
1344
18
    MOZ_ASSERT(mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length(),
1345
18
               "No more finalizers to run?");
1346
18
    if (recordreplay::IsRecordingOrReplaying()) {
1347
0
      aLimited = false;
1348
0
    }
1349
18
1350
18
    TimeDuration sliceTime = TimeDuration::FromMilliseconds(SliceMillis);
1351
18
    TimeStamp started = aLimited ? TimeStamp::Now() : TimeStamp();
1352
18
    bool timeout = false;
1353
18
    do {
1354
18
      const DeferredFinalizeFunctionHolder& function =
1355
18
        mDeferredFinalizeFunctions[mFinalizeFunctionToRun];
1356
18
      if (aLimited) {
1357
0
        bool done = false;
1358
0
        while (!timeout && !done) {
1359
0
          /*
1360
0
           * We don't want to read the clock too often, so we try to
1361
0
           * release slices of 100 items.
1362
0
           */
1363
0
          done = function.run(100, function.data);
1364
0
          timeout = TimeStamp::Now() - started >= sliceTime;
1365
0
        }
1366
0
        if (done) {
1367
0
          ++mFinalizeFunctionToRun;
1368
0
        }
1369
0
        if (timeout) {
1370
0
          break;
1371
0
        }
1372
18
      } else {
1373
18
        while (!function.run(UINT32_MAX, function.data));
1374
18
        ++mFinalizeFunctionToRun;
1375
18
      }
1376
18
    } while (mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length());
1377
18
  }
1378
18
1379
18
  if (mFinalizeFunctionToRun == mDeferredFinalizeFunctions.Length()) {
1380
18
    MOZ_ASSERT(mRuntime->mFinalizeRunnable == this);
1381
18
    mDeferredFinalizeFunctions.Clear();
1382
18
    // NB: This may delete this!
1383
18
    mRuntime->mFinalizeRunnable = nullptr;
1384
18
  }
1385
18
}
1386
1387
NS_IMETHODIMP
1388
IncrementalFinalizeRunnable::Run()
1389
0
{
1390
0
  AUTO_PROFILER_LABEL("IncrementalFinalizeRunnable::Run", GCCC);
1391
0
1392
0
  if (mRuntime->mFinalizeRunnable != this) {
1393
0
    /* These items were already processed synchronously in JSGC_END. */
1394
0
    MOZ_ASSERT(!mDeferredFinalizeFunctions.Length());
1395
0
    return NS_OK;
1396
0
  }
1397
0
1398
0
  TimeStamp start = TimeStamp::Now();
1399
0
  ReleaseNow(true);
1400
0
1401
0
  if (mDeferredFinalizeFunctions.Length()) {
1402
0
    nsresult rv = NS_DispatchToCurrentThread(this);
1403
0
    if (NS_FAILED(rv)) {
1404
0
      ReleaseNow(false);
1405
0
    }
1406
0
  }
1407
0
1408
0
  uint32_t duration = (uint32_t)((TimeStamp::Now() - start).ToMilliseconds());
1409
0
  Telemetry::Accumulate(Telemetry::DEFERRED_FINALIZE_ASYNC, duration);
1410
0
1411
0
  return NS_OK;
1412
0
}
1413
1414
void
1415
CycleCollectedJSRuntime::FinalizeDeferredThings(CycleCollectedJSContext::DeferredFinalizeType aType)
1416
18
{
1417
18
  /*
1418
18
   * If the previous GC created a runnable to finalize objects
1419
18
   * incrementally, and if it hasn't finished yet, finish it now. We
1420
18
   * don't want these to build up. We also don't want to allow any
1421
18
   * existing incremental finalize runnables to run after a
1422
18
   * non-incremental GC, since they are often used to detect leaks.
1423
18
   */
1424
18
  if (mFinalizeRunnable) {
1425
0
    mFinalizeRunnable->ReleaseNow(false);
1426
0
    if (mFinalizeRunnable) {
1427
0
      // If we re-entered ReleaseNow, we couldn't delete mFinalizeRunnable and
1428
0
      // we need to just continue processing it.
1429
0
      return;
1430
0
    }
1431
18
  }
1432
18
1433
18
  // When recording or replaying, execute triggers that were activated recently
1434
18
  // by mozilla::DeferredFinalize. This will populate the deferred finalizer
1435
18
  // table with a consistent set of entries between the recording and replay.
1436
18
  if (recordreplay::IsRecordingOrReplaying()) {
1437
0
    recordreplay::ExecuteTriggers();
1438
0
  }
1439
18
1440
18
  if (mDeferredFinalizerTable.Count() == 0) {
1441
0
    return;
1442
0
  }
1443
18
1444
18
  mFinalizeRunnable = new IncrementalFinalizeRunnable(this,
1445
18
                                                      mDeferredFinalizerTable);
1446
18
1447
18
  // Everything should be gone now.
1448
18
  MOZ_ASSERT(mDeferredFinalizerTable.Count() == 0);
1449
18
1450
18
  if (aType == CycleCollectedJSContext::FinalizeIncrementally) {
1451
0
    NS_IdleDispatchToCurrentThread(do_AddRef(mFinalizeRunnable), 2500);
1452
18
  } else {
1453
18
    mFinalizeRunnable->ReleaseNow(false);
1454
18
    MOZ_ASSERT(!mFinalizeRunnable);
1455
18
  }
1456
18
}
1457
1458
const char*
1459
CycleCollectedJSRuntime::OOMStateToString(const OOMState aOomState) const
1460
0
{
1461
0
  switch (aOomState) {
1462
0
    case OOMState::OK:
1463
0
      return "OK";
1464
0
    case OOMState::Reporting:
1465
0
      return "Reporting";
1466
0
    case OOMState::Reported:
1467
0
      return "Reported";
1468
0
    case OOMState::Recovered:
1469
0
      return "Recovered";
1470
0
    default:
1471
0
      MOZ_ASSERT_UNREACHABLE("OOMState holds an invalid value");
1472
0
      return "Unknown";
1473
0
  }
1474
0
}
1475
1476
void
1477
CycleCollectedJSRuntime::AnnotateAndSetOutOfMemory(OOMState* aStatePtr,
1478
                                                   OOMState aNewState)
1479
0
{
1480
0
  *aStatePtr = aNewState;
1481
0
  CrashReporter::Annotation annotation = (aStatePtr == &mOutOfMemoryState)
1482
0
    ? CrashReporter::Annotation::JSOutOfMemory
1483
0
    : CrashReporter::Annotation::JSLargeAllocationFailure;
1484
0
1485
0
  CrashReporter::AnnotateCrashReport(
1486
0
    annotation, nsDependentCString(OOMStateToString(aNewState)));
1487
0
}
1488
1489
void
1490
CycleCollectedJSRuntime::OnGC(JSContext* aContext,
1491
                              JSGCStatus aStatus)
1492
36
{
1493
36
  switch (aStatus) {
1494
36
    case JSGC_BEGIN:
1495
18
      nsCycleCollector_prepareForGarbageCollection();
1496
18
      mZonesWaitingForGC.Clear();
1497
18
      break;
1498
36
    case JSGC_END: {
1499
18
      if (mOutOfMemoryState == OOMState::Reported) {
1500
0
        AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Recovered);
1501
0
      }
1502
18
      if (mLargeAllocationFailureState == OOMState::Reported) {
1503
0
        AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState, OOMState::Recovered);
1504
0
      }
1505
18
1506
18
      // Do any deferred finalization of native objects. Normally we do this
1507
18
      // incrementally for an incremental GC, and immediately for a
1508
18
      // non-incremental GC, on the basis that the type of GC reflects how
1509
18
      // urgently resources should be destroyed. However under some circumstances
1510
18
      // (such as in js::InternalCallOrConstruct) we can end up running a
1511
18
      // non-incremental GC when there is a pending exception, and the finalizers
1512
18
      // are not set up to handle that. In that case, just run them later, after
1513
18
      // we've returned to the event loop.
1514
18
      bool finalizeIncrementally = JS::WasIncrementalGC(mJSRuntime) || JS_IsExceptionPending(aContext);
1515
18
      FinalizeDeferredThings(finalizeIncrementally
1516
18
                             ? CycleCollectedJSContext::FinalizeIncrementally
1517
18
                             : CycleCollectedJSContext::FinalizeNow);
1518
18
1519
18
      break;
1520
36
    }
1521
36
    default:
1522
0
      MOZ_CRASH();
1523
36
  }
1524
36
1525
36
  CustomGCCallback(aStatus);
1526
36
}
1527
1528
void
1529
CycleCollectedJSRuntime::OnOutOfMemory()
1530
0
{
1531
0
  AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Reporting);
1532
0
  CustomOutOfMemoryCallback();
1533
0
  AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Reported);
1534
0
}
1535
1536
void
1537
CycleCollectedJSRuntime::SetLargeAllocationFailure(OOMState aNewState)
1538
0
{
1539
0
  AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState, aNewState);
1540
0
}
1541
1542
void
1543
CycleCollectedJSRuntime::PrepareWaitingZonesForGC()
1544
0
{
1545
0
  JSContext* cx = CycleCollectedJSContext::Get()->Context();
1546
0
  if (mZonesWaitingForGC.Count() == 0) {
1547
0
    JS::PrepareForFullGC(cx);
1548
0
  } else {
1549
0
    for (auto iter = mZonesWaitingForGC.Iter(); !iter.Done(); iter.Next()) {
1550
0
      JS::PrepareZoneForGC(iter.Get()->GetKey());
1551
0
    }
1552
0
    mZonesWaitingForGC.Clear();
1553
0
  }
1554
0
}
1555
1556
void
1557
CycleCollectedJSRuntime::EnvironmentPreparer::invoke(JS::HandleObject global,
1558
                                                     js::ScriptEnvironmentPreparer::Closure& closure)
1559
0
{
1560
0
  MOZ_ASSERT(JS_IsGlobalObject(global));
1561
0
  nsIGlobalObject* nativeGlobal = xpc::NativeGlobal(global);
1562
0
1563
0
  // Not much we can do if we simply don't have a usable global here...
1564
0
  NS_ENSURE_TRUE_VOID(nativeGlobal && nativeGlobal->GetGlobalJSObject());
1565
0
1566
0
  AutoEntryScript aes(nativeGlobal, "JS-engine-initiated execution");
1567
0
1568
0
  MOZ_ASSERT(!JS_IsExceptionPending(aes.cx()));
1569
0
1570
0
  DebugOnly<bool> ok = closure(aes.cx());
1571
0
1572
0
  MOZ_ASSERT_IF(ok, !JS_IsExceptionPending(aes.cx()));
1573
0
1574
0
  // The AutoEntryScript will check for JS_IsExceptionPending on the
1575
0
  // JSContext and report it as needed as it comes off the stack.
1576
0
}
1577
1578
/* static */ CycleCollectedJSRuntime*
1579
CycleCollectedJSRuntime::Get()
1580
7.50M
{
1581
7.50M
  auto context = CycleCollectedJSContext::Get();
1582
7.50M
  if (context) {
1583
7.50M
    return context->Runtime();
1584
7.50M
  }
1585
0
  return nullptr;
1586
0
}
1587
1588
#ifdef MOZ_JS_DEV_ERROR_INTERCEPTOR
1589
1590
namespace js {
1591
extern void DumpValue(const JS::Value& val);
1592
}
1593
1594
void
1595
CycleCollectedJSRuntime::ErrorInterceptor::Shutdown(JSRuntime* rt)
1596
0
{
1597
0
  JS_SetErrorInterceptorCallback(rt, nullptr);
1598
0
  mThrownError.reset();
1599
0
}
1600
1601
/* virtual */ void
1602
CycleCollectedJSRuntime::ErrorInterceptor::interceptError(JSContext* cx, const JS::Value& exn)
1603
0
{
1604
0
  if (mThrownError) {
1605
0
    // We already have an error, we don't need anything more.
1606
0
    return;
1607
0
  }
1608
0
1609
0
  if (!nsContentUtils::ThreadsafeIsSystemCaller(cx)) {
1610
0
    // We are only interested in chrome code.
1611
0
    return;
1612
0
  }
1613
0
1614
0
  const auto type = JS_GetErrorType(exn);
1615
0
  if (!type) {
1616
0
    // This is not one of the primitive error types.
1617
0
    return;
1618
0
  }
1619
0
1620
0
  switch (*type) {
1621
0
    case JSExnType::JSEXN_REFERENCEERR:
1622
0
    case JSExnType::JSEXN_SYNTAXERR:
1623
0
    case JSExnType::JSEXN_TYPEERR:
1624
0
      break;
1625
0
    default:
1626
0
      // Not one of the errors we are interested in.
1627
0
      return;
1628
0
  }
1629
0
1630
0
  // Now copy the details of the exception locally.
1631
0
  // While copying the details of an exception could be expensive, in most runs,
1632
0
  // this will be done at most once during the execution of the process, so the
1633
0
  // total cost should be reasonable.
1634
0
  JS::RootedValue value(cx, exn);
1635
0
1636
0
  ErrorDetails details;
1637
0
  details.mType = *type;
1638
0
  // If `exn` isn't an exception object, `ExtractErrorValues` could end up calling
1639
0
  // `toString()`, which could in turn end up throwing an error. While this should
1640
0
  // work, we want to avoid that complex use case.
1641
0
  // Fortunately, we have already checked above that `exn` is an exception object,
1642
0
  // so nothing such should happen.
1643
0
  nsContentUtils::ExtractErrorValues(cx, value, details.mFilename, &details.mLine, &details.mColumn, details.mMessage);
1644
0
1645
0
  JS::UniqueChars buf = JS::FormatStackDump(cx, /* showArgs = */ false, /* showLocals = */ false, /* showThisProps = */ false);
1646
0
  CopyUTF8toUTF16(mozilla::MakeStringSpan(buf.get()), details.mStack);
1647
0
1648
0
  mThrownError.emplace(std::move(details));
1649
0
}
1650
1651
void
1652
CycleCollectedJSRuntime::ClearRecentDevError()
1653
0
{
1654
0
  mErrorInterceptor.mThrownError.reset();
1655
0
}
1656
1657
bool
1658
CycleCollectedJSRuntime::GetRecentDevError(JSContext*cx, JS::MutableHandle<JS::Value> error)
1659
0
{
1660
0
  if (!mErrorInterceptor.mThrownError) {
1661
0
    return true;
1662
0
  }
1663
0
1664
0
  // Create a copy of the exception.
1665
0
  JS::RootedObject obj(cx, JS_NewPlainObject(cx));
1666
0
  if (!obj) {
1667
0
    return false;
1668
0
  }
1669
0
1670
0
  JS::RootedValue message(cx);
1671
0
  JS::RootedValue filename(cx);
1672
0
  JS::RootedValue stack(cx);
1673
0
  if (!ToJSValue(cx, mErrorInterceptor.mThrownError->mMessage, &message) ||
1674
0
      !ToJSValue(cx, mErrorInterceptor.mThrownError->mFilename, &filename) ||
1675
0
      !ToJSValue(cx, mErrorInterceptor.mThrownError->mStack, &stack)) {
1676
0
    return false;
1677
0
  }
1678
0
1679
0
  // Build the object.
1680
0
  const auto FLAGS = JSPROP_READONLY | JSPROP_ENUMERATE | JSPROP_PERMANENT;
1681
0
  if (!JS_DefineProperty(cx, obj, "message", message, FLAGS) ||
1682
0
      !JS_DefineProperty(cx, obj, "fileName", filename, FLAGS) ||
1683
0
      !JS_DefineProperty(cx, obj, "lineNumber", mErrorInterceptor.mThrownError->mLine, FLAGS) ||
1684
0
      !JS_DefineProperty(cx, obj, "stack", stack, FLAGS)) {
1685
0
    return false;
1686
0
  }
1687
0
1688
0
  // Pass the result.
1689
0
  error.setObject(*obj);
1690
0
  return true;
1691
0
}
1692
#endif // MOZ_JS_DEV_ERROR_INTERCEPTOR
1693
1694
#undef MOZ_JS_DEV_ERROR_INTERCEPTOR