Coverage Report

Created: 2025-12-11 06:40

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/hermes/lib/VM/Runtime.cpp
Line
Count
Source
1
/*
2
 * Copyright (c) Meta Platforms, Inc. and affiliates.
3
 *
4
 * This source code is licensed under the MIT license found in the
5
 * LICENSE file in the root directory of this source tree.
6
 */
7
8
#include "hermes/Support/StackOverflowGuard.h"
9
#define DEBUG_TYPE "vm"
10
#include "hermes/VM/Runtime.h"
11
12
#include "hermes/AST/SemValidate.h"
13
#include "hermes/BCGen/HBC/BytecodeDataProvider.h"
14
#include "hermes/BCGen/HBC/BytecodeProviderFromSrc.h"
15
#include "hermes/BCGen/HBC/SimpleBytecodeBuilder.h"
16
#include "hermes/FrontEndDefs/Builtins.h"
17
#include "hermes/InternalBytecode/InternalBytecode.h"
18
#include "hermes/Platform/Logging.h"
19
#include "hermes/Support/OSCompat.h"
20
#include "hermes/Support/PerfSection.h"
21
#include "hermes/VM/AlignedStorage.h"
22
#include "hermes/VM/BuildMetadata.h"
23
#include "hermes/VM/Callable.h"
24
#include "hermes/VM/CodeBlock.h"
25
#include "hermes/VM/Domain.h"
26
#include "hermes/VM/FillerCell.h"
27
#include "hermes/VM/HeapRuntime.h"
28
#include "hermes/VM/IdentifierTable.h"
29
#include "hermes/VM/JSArray.h"
30
#include "hermes/VM/JSError.h"
31
#include "hermes/VM/JSLib.h"
32
#include "hermes/VM/JSLib/JSLibStorage.h"
33
#include "hermes/VM/Operations.h"
34
#include "hermes/VM/OrderedHashMap.h"
35
#include "hermes/VM/PredefinedStringIDs.h"
36
#include "hermes/VM/Profiler/CodeCoverageProfiler.h"
37
#include "hermes/VM/Profiler/SamplingProfiler.h"
38
#include "hermes/VM/StackFrame-inline.h"
39
#include "hermes/VM/StackTracesTree.h"
40
#include "hermes/VM/StringView.h"
41
42
#ifndef HERMESVM_LEAN
43
#include "hermes/Support/MemoryBuffer.h"
44
#endif
45
46
#include "llvh/ADT/Hashing.h"
47
#include "llvh/ADT/ScopeExit.h"
48
#include "llvh/Support/Debug.h"
49
#include "llvh/Support/raw_ostream.h"
50
51
#ifdef HERMESVM_PROFILER_BB
52
#include "hermes/VM/IterationKind.h"
53
#include "hermes/VM/JSArray.h"
54
#include "hermes/VM/Profiler/InlineCacheProfiler.h"
55
#include "llvh/ADT/DenseMap.h"
56
#endif
57
58
#include <future>
59
#pragma GCC diagnostic push
60
61
#ifdef HERMES_COMPILER_SUPPORTS_WSHORTEN_64_TO_32
62
#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
63
#endif
64
65
#ifdef __EMSCRIPTEN__
66
/// Provide implementations with weak linkage that can serve as the default in a
67
/// wasm build, while allowing them to be overridden depending on the target.
68
/// Since Emscripten will effectively LTO, these should be inlined.
69
__attribute__((__weak__)) extern "C" bool test_wasm_host_timeout() {
70
  return false;
71
}
72
__attribute__((__weak__)) extern "C" bool test_and_clear_wasm_host_timeout() {
73
  return false;
74
}
75
#endif
76
77
namespace hermes {
78
namespace vm {
79
80
namespace {
81
82
/// The maximum number of registers that can be requested in a RuntimeConfig.
83
static constexpr uint32_t kMaxSupportedNumRegisters =
84
    UINT32_MAX / sizeof(PinnedHermesValue);
85
86
#ifdef HERMES_CHECK_NATIVE_STACK
87
/// The minimum stack gap allowed from RuntimeConfig.
88
static constexpr uint32_t kMinSupportedNativeStackGap =
89
#if LLVM_ADDRESS_SANITIZER_BUILD
90
    256 * 1024;
91
#else
92
    64 * 1024;
93
#endif
94
#endif
95
96
// Only track I/O for buffers > 64 kB (which excludes things like
97
// Runtime::generateSpecialRuntimeBytecode).
98
static constexpr size_t MIN_IO_TRACKING_SIZE = 64 * 1024;
99
100
static const Predefined::Str fixedPropCacheNames[(size_t)PropCacheID::_COUNT] =
101
    {
102
#define V(id, predef) predef,
103
        PROP_CACHE_IDS(V)
104
#undef V
105
};
106
107
} // namespace
108
109
// Minidumps include stack memory, not heap memory.  If we want to be
110
// able to inspect the Runtime object in a minidump, we can do that by
111
// arranging for it to be allocated on a stack.  No existing stack is
112
// a good candidate, so we achieve this by creating a thread just to
113
// hold the Runtime.
114
class Runtime::StackRuntime {
115
 public:
116
  StackRuntime(const vm::RuntimeConfig &runtimeConfig)
117
0
      : thread_(runtimeMemoryThread, this) {
118
0
    startup_.get_future().get();
119
0
    new (runtime_) Runtime(StorageProvider::mmapProvider(), runtimeConfig);
120
0
  }
121
122
0
  ~StackRuntime() {
123
0
    runtime_->~Runtime();
124
0
    shutdown_.set_value();
125
0
    thread_.join();
126
0
  }
127
128
0
  static std::shared_ptr<Runtime> create(const RuntimeConfig &runtimeConfig) {
129
0
    auto srt = std::make_shared<StackRuntime>(runtimeConfig);
130
0
    return std::shared_ptr<Runtime>(srt, srt->runtime_);
131
0
  }
132
133
 private:
134
0
  static void runtimeMemoryThread(StackRuntime *stack) {
135
0
    ::hermes::oscompat::set_thread_name("hermes-runtime-memorythread");
136
0
    std::aligned_storage<sizeof(Runtime)>::type rt;
137
0
    stack->runtime_ = reinterpret_cast<Runtime *>(&rt);
138
0
    stack->startup_.set_value();
139
0
    stack->shutdown_.get_future().get();
140
0
  }
141
142
  // The order here matters.
143
  // * Set up the promises
144
  // * Initialize runtime_ to null
145
  // * Start the thread which uses them
146
  // * Initialize runtime_ from that thread
147
  std::promise<void> startup_;
148
  std::promise<void> shutdown_;
149
  Runtime *runtime_{nullptr};
150
  std::thread thread_;
151
};
152
153
/* static */
154
94
std::shared_ptr<Runtime> Runtime::create(const RuntimeConfig &runtimeConfig) {
155
94
#if defined(HERMESVM_CONTIGUOUS_HEAP)
156
94
  uint64_t maxHeapSize = runtimeConfig.getGCConfig().getMaxHeapSize();
157
  // Allow some extra segments for the runtime, and as a buffer for the GC.
158
94
  uint64_t providerSize =
159
94
      std::min<uint64_t>(1ULL << 32, maxHeapSize + AlignedStorage::size() * 4);
160
94
  std::shared_ptr<StorageProvider> sp =
161
94
      StorageProvider::contiguousVAProvider(providerSize);
162
94
  auto rt = HeapRuntime<Runtime>::create(sp);
163
94
  new (rt.get()) Runtime(std::move(sp), runtimeConfig);
164
94
  return rt;
165
#elif defined(HERMES_FACEBOOK_BUILD) && !defined(HERMES_FBCODE_BUILD) && \
166
    !defined(__EMSCRIPTEN__)
167
  // TODO (T84179835): Disable this once it is no longer useful for debugging.
168
  return StackRuntime::create(runtimeConfig);
169
#else
170
  return std::shared_ptr<Runtime>{
171
      new Runtime(StorageProvider::mmapProvider(), runtimeConfig)};
172
#endif
173
94
}
174
175
CallResult<PseudoHandle<>> Runtime::getNamed(
176
    Handle<JSObject> obj,
177
0
    PropCacheID id) {
178
0
  CompressedPointer clazzPtr{obj->getClassGCPtr()};
179
0
  auto *cacheEntry = &fixedPropCache_[static_cast<int>(id)];
180
0
  if (LLVM_LIKELY(cacheEntry->clazz == clazzPtr)) {
181
    // The slot is cached, so it is safe to use the Internal function.
182
0
    return createPseudoHandle(
183
0
        JSObject::getNamedSlotValueUnsafe<PropStorage::Inline::Yes>(
184
0
            *obj, *this, cacheEntry->slot)
185
0
            .unboxToHV(*this));
186
0
  }
187
0
  auto sym = Predefined::getSymbolID(fixedPropCacheNames[static_cast<int>(id)]);
188
0
  NamedPropertyDescriptor desc;
189
  // Check writable and internalSetter flags since the cache slot is shared for
190
  // get/put.
191
0
  if (LLVM_LIKELY(
192
0
          JSObject::tryGetOwnNamedDescriptorFast(*obj, *this, sym, desc)) &&
193
0
      !desc.flags.accessor && desc.flags.writable &&
194
0
      !desc.flags.internalSetter) {
195
0
    HiddenClass *clazz = vmcast<HiddenClass>(clazzPtr.getNonNull(*this));
196
0
    if (LLVM_LIKELY(!clazz->isDictionary())) {
197
      // Cache the class, id and property slot.
198
0
      cacheEntry->clazz = clazzPtr;
199
0
      cacheEntry->slot = desc.slot;
200
0
    }
201
0
    return JSObject::getNamedSlotValue(createPseudoHandle(*obj), *this, desc);
202
0
  }
203
0
  return JSObject::getNamed_RJS(obj, *this, sym);
204
0
}
205
206
ExecutionStatus Runtime::putNamedThrowOnError(
207
    Handle<JSObject> obj,
208
    PropCacheID id,
209
0
    SmallHermesValue shv) {
210
0
  CompressedPointer clazzPtr{obj->getClassGCPtr()};
211
0
  auto *cacheEntry = &fixedPropCache_[static_cast<int>(id)];
212
0
  if (LLVM_LIKELY(cacheEntry->clazz == clazzPtr)) {
213
0
    JSObject::setNamedSlotValueUnsafe<PropStorage::Inline::Yes>(
214
0
        *obj, *this, cacheEntry->slot, shv);
215
0
    return ExecutionStatus::RETURNED;
216
0
  }
217
0
  auto sym = Predefined::getSymbolID(fixedPropCacheNames[static_cast<int>(id)]);
218
0
  NamedPropertyDescriptor desc;
219
0
  if (LLVM_LIKELY(
220
0
          JSObject::tryGetOwnNamedDescriptorFast(*obj, *this, sym, desc)) &&
221
0
      !desc.flags.accessor && desc.flags.writable &&
222
0
      !desc.flags.internalSetter) {
223
0
    HiddenClass *clazz = vmcast<HiddenClass>(clazzPtr.getNonNull(*this));
224
0
    if (LLVM_LIKELY(!clazz->isDictionary())) {
225
      // Cache the class and property slot.
226
0
      cacheEntry->clazz = clazzPtr;
227
0
      cacheEntry->slot = desc.slot;
228
0
    }
229
0
    JSObject::setNamedSlotValueUnsafe(*obj, *this, desc.slot, shv);
230
0
    return ExecutionStatus::RETURNED;
231
0
  }
232
0
  Handle<> value = makeHandle(shv.unboxToHV(*this));
233
0
  return JSObject::putNamed_RJS(
234
0
             obj, *this, sym, value, PropOpFlags().plusThrowOnError())
235
0
      .getStatus();
236
0
}
237
238
Runtime::Runtime(
239
    std::shared_ptr<StorageProvider> provider,
240
    const RuntimeConfig &runtimeConfig)
241
    // The initial heap size can't be larger than the max.
242
94
    : enableEval(runtimeConfig.getEnableEval()),
243
94
      verifyEvalIR(runtimeConfig.getVerifyEvalIR()),
244
94
      optimizedEval(runtimeConfig.getOptimizedEval()),
245
94
      asyncBreakCheckInEval(runtimeConfig.getAsyncBreakCheckInEval()),
246
94
      enableBlockScopingInEval(runtimeConfig.getEnableBlockScoping()),
247
94
      traceMode(runtimeConfig.getSynthTraceMode()),
248
94
      heapStorage_(
249
94
          *this,
250
94
          *this,
251
94
          runtimeConfig.getGCConfig(),
252
94
          runtimeConfig.getCrashMgr(),
253
94
          std::move(provider),
254
94
          runtimeConfig.getVMExperimentFlags()),
255
94
      hasES6Promise_(runtimeConfig.getES6Promise()),
256
94
      hasES6Proxy_(runtimeConfig.getES6Proxy()),
257
94
      hasES6Class_(runtimeConfig.getES6Class()),
258
94
      hasIntl_(runtimeConfig.getIntl()),
259
94
      hasArrayBuffer_(runtimeConfig.getArrayBuffer()),
260
94
      hasMicrotaskQueue_(runtimeConfig.getMicrotaskQueue()),
261
94
      shouldRandomizeMemoryLayout_(runtimeConfig.getRandomizeMemoryLayout()),
262
94
      bytecodeWarmupPercent_(runtimeConfig.getBytecodeWarmupPercent()),
263
94
      trackIO_(runtimeConfig.getTrackIO()),
264
94
      vmExperimentFlags_(runtimeConfig.getVMExperimentFlags()),
265
94
      jsLibStorage_(createJSLibStorage()),
266
      stackPointer_(),
267
94
      crashMgr_(runtimeConfig.getCrashMgr()),
268
#ifdef HERMES_CHECK_NATIVE_STACK
269
      overflowGuard_(
270
94
          StackOverflowGuard::nativeStackGuard(
271
94
              std::max(
272
94
                  runtimeConfig.getNativeStackGap(),
273
94
                  kMinSupportedNativeStackGap))),
274
#else
275
      overflowGuard_(
276
          StackOverflowGuard::depthCounterGuard(
277
              Runtime::MAX_NATIVE_CALL_FRAME_DEPTH)),
278
#endif
279
      crashCallbackKey_(
280
94
          crashMgr_->registerCallback([this](int fd) { crashCallback(fd); })),
281
94
      codeCoverageProfiler_(std::make_unique<CodeCoverageProfiler>(*this)),
282
94
      gcEventCallback_(runtimeConfig.getGCConfig().getCallback()) {
283
94
  assert(
284
94
      (void *)this == (void *)(HandleRootOwner *)this &&
285
94
      "cast to HandleRootOwner should be no-op");
286
#ifdef HERMES_FACEBOOK_BUILD
287
  const bool isSnapshot = std::strstr(__FILE__, "hermes-snapshot");
288
  crashMgr_->setCustomData("HermesIsSnapshot", isSnapshot ? "true" : "false");
289
#endif
290
94
  crashMgr_->registerMemory(this, sizeof(Runtime));
291
94
  auto maxNumRegisters = runtimeConfig.getMaxNumRegisters();
292
94
  if (LLVM_UNLIKELY(maxNumRegisters > kMaxSupportedNumRegisters)) {
293
0
    hermes_fatal("RuntimeConfig maxNumRegisters too big");
294
0
  }
295
94
  registerStackStart_ = runtimeConfig.getRegisterStack();
296
94
  if (!registerStackStart_) {
297
    // registerStackAllocation_ should not be allocated with new, because then
298
    // default constructors would run for the whole stack space.
299
    // Round up to page size as required by vm_allocate.
300
94
    const uint32_t numBytesForRegisters = llvh::alignTo(
301
94
        sizeof(PinnedHermesValue) * maxNumRegisters, oscompat::page_size());
302
94
    auto result = oscompat::vm_allocate(numBytesForRegisters);
303
94
    if (!result) {
304
0
      hermes_fatal("Failed to allocate register stack", result.getError());
305
0
    }
306
94
    registerStackStart_ = static_cast<PinnedHermesValue *>(result.get());
307
94
    registerStackAllocation_ = {registerStackStart_, numBytesForRegisters};
308
94
    crashMgr_->registerMemory(registerStackStart_, numBytesForRegisters);
309
94
  }
310
311
94
  registerStackEnd_ = registerStackStart_ + maxNumRegisters;
312
94
  if (shouldRandomizeMemoryLayout_) {
313
0
    const unsigned bytesOff = std::random_device()() % oscompat::page_size();
314
0
    registerStackStart_ += bytesOff / sizeof(PinnedHermesValue);
315
0
    assert(
316
0
        registerStackEnd_ >= registerStackStart_ && "register stack too small");
317
0
  }
318
94
  stackPointer_ = registerStackStart_;
319
320
  // Setup the "root" stack frame.
321
94
  setCurrentFrameToTopOfStack();
322
  // Allocate the "reserved" registers in the root frame.
323
94
  allocStack(
324
94
      StackFrameLayout::CalleeExtraRegistersAtStart,
325
94
      HermesValue::encodeUndefinedValue());
326
327
  // Initialize Predefined Strings.
328
  // This function does not do any allocations.
329
94
  initPredefinedStrings();
330
  // Initialize special code blocks pointing to their own runtime module.
331
  // specialCodeBlockRuntimeModule_ will be owned by runtimeModuleList_.
332
94
  RuntimeModuleFlags flags;
333
94
  flags.hidesEpilogue = true;
334
94
  specialCodeBlockDomain_ = Domain::create(*this).getHermesValue();
335
94
  specialCodeBlockRuntimeModule_ = RuntimeModule::createUninitialized(
336
94
      *this, Handle<Domain>::vmcast(&specialCodeBlockDomain_), flags);
337
94
  assert(
338
94
      &runtimeModuleList_.back() == specialCodeBlockRuntimeModule_ &&
339
94
      "specialCodeBlockRuntimeModule_ not added to runtimeModuleList_");
340
341
  // At this point, allocations can begin, as all the roots are markable.
342
343
  // Initialize the pre-allocated character strings.
344
94
  initCharacterStrings();
345
346
94
  GCScope scope(*this);
347
348
  // Explicitly initialize the specialCodeBlockRuntimeModule_ without CJS
349
  // modules.
350
94
  specialCodeBlockRuntimeModule_->initializeWithoutCJSModulesMayAllocate(
351
94
      hbc::BCProviderFromBuffer::createBCProviderFromBuffer(
352
94
          generateSpecialRuntimeBytecode())
353
94
          .first);
354
94
  emptyCodeBlock_ = specialCodeBlockRuntimeModule_->getCodeBlockMayAllocate(0);
355
94
  returnThisCodeBlock_ =
356
94
      specialCodeBlockRuntimeModule_->getCodeBlockMayAllocate(1);
357
358
  // Initialize the root hidden class and its variants.
359
94
  {
360
94
    MutableHandle<HiddenClass> clazz(
361
94
        *this,
362
94
        vmcast<HiddenClass>(
363
94
            ignoreAllocationFailure(HiddenClass::createRoot(*this))));
364
94
    rootClazzes_[0] = clazz.getHermesValue();
365
752
    for (unsigned i = 1; i <= InternalProperty::NumAnonymousInternalProperties;
366
658
         ++i) {
367
658
      auto addResult = HiddenClass::reserveSlot(clazz, *this);
368
658
      assert(
369
658
          addResult != ExecutionStatus::EXCEPTION &&
370
658
          "Could not possibly grow larger than the limit");
371
658
      clazz = *addResult->first;
372
658
      rootClazzes_[i] = clazz.getHermesValue();
373
658
    }
374
94
  }
375
376
94
  global_ =
377
94
      JSObject::create(*this, makeNullHandle<JSObject>()).getHermesValue();
378
379
94
  JSLibFlags jsLibFlags{};
380
94
  jsLibFlags.enableHermesInternal = runtimeConfig.getEnableHermesInternal();
381
94
  jsLibFlags.enableHermesInternalTestMethods =
382
94
      runtimeConfig.getEnableHermesInternalTestMethods();
383
94
  initGlobalObject(*this, jsLibFlags);
384
385
  // Once the global object has been initialized, populate native builtins to
386
  // the builtins table.
387
94
  initNativeBuiltins();
388
389
  // Set the prototype of the global object to the standard object prototype,
390
  // which has now been defined.
391
94
  ignoreAllocationFailure(
392
94
      JSObject::setParent(
393
94
          vmcast<JSObject>(global_),
394
94
          *this,
395
94
          vmcast<JSObject>(objectPrototype),
396
94
          PropOpFlags().plusThrowOnError()));
397
398
94
  symbolRegistry_.init(*this);
399
400
  // BB Profiler need to be ready before running internal bytecode.
401
#ifdef HERMESVM_PROFILER_BB
402
  inlineCacheProfiler_.setHiddenClassArray(
403
      ignoreAllocationFailure(JSArray::create(*this, 4, 4)).get());
404
#endif
405
406
94
  codeCoverageProfiler_->disable();
407
  // Execute our internal bytecode.
408
94
  auto jsBuiltinsObj = runInternalBytecode();
409
94
  codeCoverageProfiler_->restore();
410
411
  // Populate JS builtins returned from internal bytecode to the builtins table.
412
94
  initJSBuiltins(builtins_, jsBuiltinsObj);
413
414
94
#if HERMESVM_SAMPLING_PROFILER_AVAILABLE
415
94
  if (runtimeConfig.getEnableSampleProfiling())
416
0
    samplingProfiler = SamplingProfiler::create(*this);
417
94
#endif // HERMESVM_SAMPLING_PROFILER_AVAILABLE
418
419
94
  LLVM_DEBUG(llvh::dbgs() << "Runtime initialized\n");
420
94
}
421
422
94
Runtime::~Runtime() {
423
94
#if HERMESVM_SAMPLING_PROFILER_AVAILABLE
424
94
  samplingProfiler.reset();
425
94
#endif // HERMESVM_SAMPLING_PROFILER_AVAILABLE
426
427
94
  getHeap().finalizeAll();
428
  // Remove inter-module dependencies so we can delete them in any order.
429
269
  for (auto &module : runtimeModuleList_) {
430
269
    module.prepareForDestruction();
431
269
  }
432
  // All RuntimeModules must be destroyed before the next assertion, to untrack
433
  // all native IDs related to it (e.g., CodeBlock).
434
363
  while (!runtimeModuleList_.empty()) {
435
    // Calling delete will automatically remove it from the list.
436
269
    delete &runtimeModuleList_.back();
437
269
  }
438
  // Now that all objects are finalized, there shouldn't be any native memory
439
  // keys left in the ID tracker for memory profiling. Assert that the only IDs
440
  // left are JS heap pointers.
441
94
  assert(
442
94
      !getHeap().getIDTracker().hasNativeIDs() &&
443
94
      "A pointer is left in the ID tracker that is from non-JS memory. "
444
94
      "Was untrackNative called?");
445
94
  crashMgr_->unregisterCallback(crashCallbackKey_);
446
94
  if (!registerStackAllocation_.empty()) {
447
94
    crashMgr_->unregisterMemory(registerStackAllocation_.data());
448
94
    oscompat::vm_free(
449
94
        registerStackAllocation_.data(), registerStackAllocation_.size());
450
94
  }
451
452
94
  assert(
453
94
      !formattingStackTrace_ &&
454
94
      "Runtime is being destroyed while exception is being formatted");
455
456
  // Unwatch the runtime from the time limit monitor in case the latter still
457
  // has any references to this.
458
94
  if (timeLimitMonitor) {
459
94
    timeLimitMonitor->unwatchRuntime(*this);
460
94
  }
461
462
94
  crashMgr_->unregisterMemory(this);
463
94
}
464
465
/// A helper class used to measure the duration of GC marking different roots.
466
/// It accumulates the times in \c Runtime::markRootsPhaseTimes[] and \c
467
/// Runtime::totalMarkRootsTime.
468
class Runtime::MarkRootsPhaseTimer {
469
 public:
470
  MarkRootsPhaseTimer(Runtime &rt, RootAcceptor::Section section)
471
1.51k
      : rt_(rt), section_(section), start_(std::chrono::steady_clock::now()) {
472
1.51k
    if (static_cast<unsigned>(section) == 0) {
473
      // The first phase; record the start as the start of markRoots.
474
101
      rt_.startOfMarkRoots_ = start_;
475
101
    }
476
1.51k
  }
477
1.51k
  ~MarkRootsPhaseTimer() {
478
1.51k
    auto tp = std::chrono::steady_clock::now();
479
1.51k
    std::chrono::duration<double> elapsed = (tp - start_);
480
1.51k
    start_ = tp;
481
1.51k
    unsigned index = static_cast<unsigned>(section_);
482
1.51k
    rt_.markRootsPhaseTimes_[index] += elapsed.count();
483
1.51k
    if (index + 1 ==
484
1.51k
        static_cast<unsigned>(RootAcceptor::Section::NumSections)) {
485
101
      std::chrono::duration<double> totalElapsed = (tp - rt_.startOfMarkRoots_);
486
101
      rt_.totalMarkRootsTime_ += totalElapsed.count();
487
101
    }
488
1.51k
  }
489
490
 private:
491
  Runtime &rt_;
492
  RootAcceptor::Section section_;
493
  std::chrono::time_point<std::chrono::steady_clock> start_;
494
};
495
496
void Runtime::markRoots(
497
    RootAndSlotAcceptorWithNames &acceptor,
498
101
    bool markLongLived) {
499
  // The body of markRoots should be sequence of blocks, each of which starts
500
  // with the declaration of an appropriate RootSection instance.
501
101
  {
502
101
    MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::Registers);
503
101
    acceptor.beginRootSection(RootAcceptor::Section::Registers);
504
283k
    for (auto *p = registerStackStart_, *e = stackPointer_; p != e; ++p)
505
283k
      acceptor.accept(*p);
506
101
    acceptor.endRootSection();
507
101
  }
508
509
101
  {
510
101
    MarkRootsPhaseTimer timer(
511
101
        *this, RootAcceptor::Section::RuntimeInstanceVars);
512
101
    acceptor.beginRootSection(RootAcceptor::Section::RuntimeInstanceVars);
513
101
    for (auto &clazz : rootClazzes_)
514
808
      acceptor.accept(clazz, "rootClass");
515
505
#define RUNTIME_HV_FIELD_INSTANCE(name) acceptor.accept((name), #name);
516
101
#include "hermes/VM/RuntimeHermesValueFields.def"
517
101
#undef RUNTIME_HV_FIELD_INSTANCE
518
101
    acceptor.endRootSection();
519
101
  }
520
521
101
  {
522
101
    MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::RuntimeModules);
523
101
    acceptor.beginRootSection(RootAcceptor::Section::RuntimeModules);
524
101
#define RUNTIME_HV_FIELD_RUNTIMEMODULE(name) acceptor.accept(name);
525
101
#include "hermes/VM/RuntimeHermesValueFields.def"
526
101
#undef RUNTIME_HV_FIELD_RUNTIMEMODULE
527
101
    for (auto &rm : runtimeModuleList_)
528
575
      rm.markRoots(acceptor, markLongLived);
529
101
    acceptor.endRootSection();
530
101
  }
531
532
101
  {
533
101
    MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::CharStrings);
534
101
    acceptor.beginRootSection(RootAcceptor::Section::CharStrings);
535
101
    if (markLongLived) {
536
68
      for (auto &hv : charStrings_)
537
17.4k
        acceptor.accept(hv);
538
68
    }
539
101
    acceptor.endRootSection();
540
101
  }
541
542
101
  {
543
101
    MarkRootsPhaseTimer timer(
544
101
        *this, RootAcceptor::Section::StringCycleCheckVisited);
545
101
    acceptor.beginRootSection(RootAcceptor::Section::StringCycleCheckVisited);
546
101
    for (auto *&ptr : stringCycleCheckVisited_)
547
0
      acceptor.acceptPtr(ptr);
548
101
    acceptor.endRootSection();
549
101
  }
550
551
101
  {
552
101
    MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::Builtins);
553
101
    acceptor.beginRootSection(RootAcceptor::Section::Builtins);
554
101
    for (Callable *&f : builtins_)
555
5.35k
      acceptor.acceptPtr(f);
556
101
    acceptor.endRootSection();
557
101
  }
558
559
101
  {
560
101
    MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::Jobs);
561
101
    acceptor.beginRootSection(RootAcceptor::Section::Jobs);
562
101
    for (Callable *&f : jobQueue_)
563
0
      acceptor.acceptPtr(f);
564
101
    acceptor.endRootSection();
565
101
  }
566
567
#ifdef MARK
568
#error "Shouldn't have defined mark already"
569
#endif
570
8.58k
#define MARK(field) acceptor.accept((field), #field)
571
101
  {
572
101
    MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::Prototypes);
573
101
    acceptor.beginRootSection(RootAcceptor::Section::Prototypes);
574
    // Prototypes.
575
8.58k
#define RUNTIME_HV_FIELD_PROTOTYPE(name) MARK(name);
576
101
#include "hermes/VM/RuntimeHermesValueFields.def"
577
101
#undef RUNTIME_HV_FIELD_PROTOTYPE
578
101
    acceptor.acceptPtr(objectPrototypeRawPtr, "objectPrototype");
579
101
    acceptor.acceptPtr(functionPrototypeRawPtr, "functionPrototype");
580
101
#undef MARK
581
101
    acceptor.endRootSection();
582
101
  }
583
584
101
  {
585
101
    MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::IdentifierTable);
586
101
    if (markLongLived) {
587
68
#ifdef HERMES_MEMORY_INSTRUMENTATION
588
      // Need to add nodes before the root section, and edges during the root
589
      // section.
590
68
      acceptor.provideSnapshot([this](HeapSnapshot &snap) {
591
0
        identifierTable_.snapshotAddNodes(snap);
592
0
      });
593
68
#endif
594
68
      acceptor.beginRootSection(RootAcceptor::Section::IdentifierTable);
595
68
      identifierTable_.markIdentifiers(acceptor, getHeap());
596
68
#ifdef HERMES_MEMORY_INSTRUMENTATION
597
68
      acceptor.provideSnapshot([this](HeapSnapshot &snap) {
598
0
        identifierTable_.snapshotAddEdges(snap);
599
0
      });
600
68
#endif
601
68
      acceptor.endRootSection();
602
68
    }
603
101
  }
604
605
101
  {
606
101
    MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::GCScopes);
607
101
    acceptor.beginRootSection(RootAcceptor::Section::GCScopes);
608
101
    markGCScopes(acceptor);
609
101
    acceptor.endRootSection();
610
101
  }
611
612
101
  {
613
101
    MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::SymbolRegistry);
614
101
    acceptor.beginRootSection(RootAcceptor::Section::SymbolRegistry);
615
101
    symbolRegistry_.markRoots(acceptor);
616
101
    acceptor.endRootSection();
617
101
  }
618
101
#if HERMESVM_SAMPLING_PROFILER_AVAILABLE
619
101
  {
620
101
    MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::SamplingProfiler);
621
101
    acceptor.beginRootSection(RootAcceptor::Section::SamplingProfiler);
622
101
    if (samplingProfiler) {
623
0
      samplingProfiler->markRoots(acceptor);
624
0
    }
625
101
    acceptor.endRootSection();
626
101
  }
627
101
#endif
628
629
101
  {
630
101
    MarkRootsPhaseTimer timer(
631
101
        *this, RootAcceptor::Section::CodeCoverageProfiler);
632
101
    acceptor.beginRootSection(RootAcceptor::Section::CodeCoverageProfiler);
633
101
    if (codeCoverageProfiler_) {
634
101
      codeCoverageProfiler_->markRoots(acceptor);
635
101
    }
636
#ifdef HERMESVM_PROFILER_BB
637
    auto *&hiddenClassArray = inlineCacheProfiler_.getHiddenClassArray();
638
    if (hiddenClassArray) {
639
      acceptor.acceptPtr(hiddenClassArray);
640
    }
641
#endif
642
101
    acceptor.endRootSection();
643
101
  }
644
645
101
  {
646
101
    MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::Custom);
647
    // Define nodes before the root section starts.
648
101
    for (auto &fn : customSnapshotNodeFuncs_) {
649
101
      acceptor.provideSnapshot(fn);
650
101
    }
651
101
    acceptor.beginRootSection(RootAcceptor::Section::Custom);
652
101
    for (auto &fn : customMarkRootFuncs_)
653
101
      fn(&getHeap(), acceptor);
654
    // Define edges while inside the root section.
655
101
    for (auto &fn : customSnapshotEdgeFuncs_) {
656
101
      acceptor.provideSnapshot(fn);
657
101
    }
658
101
    acceptor.endRootSection();
659
101
  }
660
101
}
661
662
101
void Runtime::markWeakRoots(WeakRootAcceptor &acceptor, bool markLongLived) {
663
101
  MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::WeakRefs);
664
101
  acceptor.beginRootSection(RootAcceptor::Section::WeakRefs);
665
  // Call this first so that it can remove RuntimeModules whose owning Domain is
666
  // dead from runtimeModuleList_, before marking long-lived WeakRoots in them.
667
101
  markDomainRefInRuntimeModules(acceptor);
668
101
  if (markLongLived) {
669
68
    for (auto &entry : fixedPropCache_) {
670
68
      acceptor.acceptWeak(entry.clazz);
671
68
    }
672
68
    for (auto &rm : runtimeModuleList_)
673
340
      rm.markLongLivedWeakRoots(acceptor);
674
68
  }
675
101
  for (auto &fn : customMarkWeakRootFuncs_)
676
101
    fn(&getHeap(), acceptor);
677
101
  acceptor.endRootSection();
678
101
}
679
680
101
void Runtime::markDomainRefInRuntimeModules(WeakRootAcceptor &acceptor) {
681
101
  std::vector<RuntimeModule *> modulesToDelete;
682
575
  for (auto &rm : runtimeModuleList_) {
683
575
    rm.markDomainRef(acceptor);
684
    // If the owning domain is dead, store the RuntimeModule pointer for
685
    // destruction later.
686
575
    if (LLVM_UNLIKELY(rm.isOwningDomainDead())) {
687
      // Prepare these RuntimeModules for destruction so that we don't rely on
688
      // their relative order in runtimeModuleList_.
689
136
      rm.prepareForDestruction();
690
136
      modulesToDelete.push_back(&rm);
691
136
    }
692
575
  }
693
694
  // We need to destroy these RuntimeModules after we call
695
  // prepareForDestruction() on all of them, otherwise, it may cause
696
  // use-after-free when checking the ownership of a CodeBlock in the destructor
697
  // of a RuntimeModule (which may refer a CodeBlock that is owned and deleted
698
  // by another RuntimeModule).
699
136
  for (auto *rm : modulesToDelete) {
700
    // Calling delete will automatically remove it from the list.
701
136
    delete rm;
702
136
  }
703
101
}
704
705
void Runtime::markRootsForCompleteMarking(
706
1
    RootAndSlotAcceptorWithNames &acceptor) {
707
1
#if HERMESVM_SAMPLING_PROFILER_AVAILABLE
708
1
  MarkRootsPhaseTimer timer(*this, RootAcceptor::Section::SamplingProfiler);
709
1
  acceptor.beginRootSection(RootAcceptor::Section::SamplingProfiler);
710
1
  if (samplingProfiler) {
711
0
    samplingProfiler->markRootsForCompleteMarking(acceptor);
712
0
  }
713
1
#endif // HERMESVM_SAMPLING_PROFILER_AVAILABLE
714
1
  acceptor.endRootSection();
715
1
}
716
717
void Runtime::visitIdentifiers(
718
0
    const std::function<void(SymbolID, const StringPrimitive *)> &acceptor) {
719
0
  identifierTable_.visitIdentifiers(acceptor);
720
0
}
721
722
0
std::string Runtime::convertSymbolToUTF8(SymbolID id) {
723
0
  return identifierTable_.convertSymbolToUTF8(id);
724
0
}
725
726
0
void Runtime::printRuntimeGCStats(JSONEmitter &json) const {
727
0
  const unsigned kNumPhases =
728
0
      static_cast<unsigned>(RootAcceptor::Section::NumSections);
729
0
#define ROOT_SECTION(phase) "MarkRoots_" #phase,
730
0
  static const char *markRootsPhaseNames[kNumPhases] = {
731
0
#include "hermes/VM/RootSections.def"
732
0
  };
733
0
#undef ROOT_SECTION
734
0
  json.emitKey("runtime");
735
0
  json.openDict();
736
0
  json.emitKeyValue("totalMarkRootsTime", formatSecs(totalMarkRootsTime_).secs);
737
0
  for (unsigned phaseNum = 0; phaseNum < kNumPhases; phaseNum++) {
738
0
    json.emitKeyValue(
739
0
        std::string(markRootsPhaseNames[phaseNum]) + "Time",
740
0
        formatSecs(markRootsPhaseTimes_[phaseNum]).secs);
741
0
  }
742
0
  json.closeDict();
743
0
}
744
745
0
void Runtime::printHeapStats(llvh::raw_ostream &os) {
746
0
  getHeap().printAllCollectedStats(os);
747
0
#ifndef NDEBUG
748
0
  printArrayCensus(os);
749
0
#endif
750
0
  if (trackIO_) {
751
0
    getIOTrackingInfoJSON(os);
752
0
  }
753
0
}
754
755
0
void Runtime::getIOTrackingInfoJSON(llvh::raw_ostream &os) {
756
0
  JSONEmitter json(os);
757
0
  json.openArray();
758
0
  for (auto &module : getRuntimeModules()) {
759
0
    auto tracker = module.getBytecode()->getPageAccessTracker();
760
0
    if (tracker) {
761
0
      json.openDict();
762
0
      json.emitKeyValue("url", module.getSourceURL());
763
0
      json.emitKey("tracking_info");
764
0
      tracker->getJSONStats(json);
765
0
      json.closeDict();
766
0
    }
767
0
  }
768
0
  json.closeArray();
769
0
}
770
771
405
void Runtime::removeRuntimeModule(RuntimeModule *rm) {
772
405
#ifdef HERMES_ENABLE_DEBUGGER
773
405
  debugger_.willUnloadModule(rm);
774
405
#endif
775
405
  runtimeModuleList_.remove(*rm);
776
405
}
777
778
#ifndef NDEBUG
779
0
void Runtime::printArrayCensus(llvh::raw_ostream &os) {
780
  // Do array capacity histogram.
781
  // Map from array size to number of arrays that are that size.
782
  // Arrays includes ArrayStorage and SegmentedArray.
783
0
  std::map<std::pair<size_t, size_t>, std::pair<size_t, size_t>>
784
0
      arraySizeToCountAndWastedSlots;
785
0
  auto printTable = [&os](
786
0
                        const std::map<
787
0
                            std::pair<size_t, size_t>,
788
0
                            std::pair<size_t, size_t>>
789
0
                            &arraySizeToCountAndWastedSlots) {
790
0
    os << llvh::format(
791
0
        "%8s %8s %8s %10s %15s %15s %15s %20s %25s\n",
792
0
        (const char *)"Capacity",
793
0
        (const char *)"Sizeof",
794
0
        (const char *)"Count",
795
0
        (const char *)"Count %",
796
0
        (const char *)"Cum Count %",
797
0
        (const char *)"Bytes %",
798
0
        (const char *)"Cum Bytes %",
799
0
        (const char *)"Wasted Slots %",
800
0
        (const char *)"Cum Wasted Slots %");
801
0
    size_t totalBytes = 0;
802
0
    size_t totalCount = 0;
803
0
    size_t totalWastedSlots = 0;
804
0
    for (const auto &p : arraySizeToCountAndWastedSlots) {
805
0
      totalBytes += p.first.second * p.second.first;
806
0
      totalCount += p.second.first;
807
0
      totalWastedSlots += p.second.second;
808
0
    }
809
0
    size_t cumulativeBytes = 0;
810
0
    size_t cumulativeCount = 0;
811
0
    size_t cumulativeWastedSlots = 0;
812
0
    for (const auto &p : arraySizeToCountAndWastedSlots) {
813
0
      cumulativeBytes += p.first.second * p.second.first;
814
0
      cumulativeCount += p.second.first;
815
0
      cumulativeWastedSlots += p.second.second;
816
0
      os << llvh::format(
817
0
          "%8d %8d %8d %9.2f%% %14.2f%% %14.2f%% %14.2f%% %19.2f%% %24.2f%%\n",
818
0
          p.first.first,
819
0
          p.first.second,
820
0
          p.second.first,
821
0
          p.second.first * 100.0 / totalCount,
822
0
          cumulativeCount * 100.0 / totalCount,
823
0
          p.first.second * p.second.first * 100.0 / totalBytes,
824
0
          cumulativeBytes * 100.0 / totalBytes,
825
0
          totalWastedSlots ? p.second.second * 100.0 / totalWastedSlots : 100.0,
826
0
          totalWastedSlots ? cumulativeWastedSlots * 100.0 / totalWastedSlots
827
0
                           : 100.0);
828
0
    }
829
0
    os << "\n";
830
0
  };
831
832
0
  os << "Array Census for ArrayStorage:\n";
833
0
  getHeap().forAllObjs([&arraySizeToCountAndWastedSlots](GCCell *cell) {
834
0
    if (cell->getKind() == CellKind::ArrayStorageKind) {
835
0
      ArrayStorage *arr = vmcast<ArrayStorage>(cell);
836
0
      const auto key = std::make_pair(arr->capacity(), arr->getAllocatedSize());
837
0
      arraySizeToCountAndWastedSlots[key].first++;
838
0
      arraySizeToCountAndWastedSlots[key].second +=
839
0
          arr->capacity() - arr->size();
840
0
    }
841
0
  });
842
0
  if (arraySizeToCountAndWastedSlots.empty()) {
843
0
    os << "\tNo ArrayStorages\n\n";
844
0
  } else {
845
0
    printTable(arraySizeToCountAndWastedSlots);
846
0
  }
847
848
0
  os << "Array Census for SegmentedArray:\n";
849
0
  arraySizeToCountAndWastedSlots.clear();
850
0
  getHeap().forAllObjs([&arraySizeToCountAndWastedSlots, this](GCCell *cell) {
851
0
    if (cell->getKind() == CellKind::SegmentedArrayKind) {
852
0
      SegmentedArray *arr = vmcast<SegmentedArray>(cell);
853
0
      const auto key =
854
0
          std::make_pair(arr->totalCapacityOfSpine(), arr->getAllocatedSize());
855
0
      arraySizeToCountAndWastedSlots[key].first++;
856
0
      arraySizeToCountAndWastedSlots[key].second +=
857
0
          arr->totalCapacityOfSpine() - arr->size(*this);
858
0
    }
859
0
  });
860
0
  if (arraySizeToCountAndWastedSlots.empty()) {
861
0
    os << "\tNo SegmentedArrays\n\n";
862
0
  } else {
863
0
    printTable(arraySizeToCountAndWastedSlots);
864
0
  }
865
866
0
  os << "Array Census for Segment:\n";
867
0
  arraySizeToCountAndWastedSlots.clear();
868
0
  getHeap().forAllObjs([&arraySizeToCountAndWastedSlots](GCCell *cell) {
869
0
    if (cell->getKind() == CellKind::SegmentKind) {
870
0
      SegmentedArray::Segment *seg = vmcast<SegmentedArray::Segment>(cell);
871
0
      const auto key = std::make_pair(seg->length(), seg->getAllocatedSize());
872
0
      arraySizeToCountAndWastedSlots[key].first++;
873
0
      arraySizeToCountAndWastedSlots[key].second +=
874
0
          SegmentedArray::Segment::kMaxLength - seg->length();
875
0
    }
876
0
  });
877
0
  if (arraySizeToCountAndWastedSlots.empty()) {
878
0
    os << "\tNo Segments\n\n";
879
0
  } else {
880
0
    printTable(arraySizeToCountAndWastedSlots);
881
0
  }
882
883
0
  os << "Array Census for JSArray:\n";
884
0
  arraySizeToCountAndWastedSlots.clear();
885
0
  getHeap().forAllObjs([&arraySizeToCountAndWastedSlots, this](GCCell *cell) {
886
0
    if (JSArray *arr = dyn_vmcast<JSArray>(cell)) {
887
0
      JSArray::StorageType *storage = arr->getIndexedStorage(*this);
888
0
      const auto capacity = storage ? storage->totalCapacityOfSpine() : 0;
889
0
      const auto sz = storage ? storage->size(*this) : 0;
890
0
      const auto key = std::make_pair(capacity, arr->getAllocatedSize());
891
0
      arraySizeToCountAndWastedSlots[key].first++;
892
0
      arraySizeToCountAndWastedSlots[key].second += capacity - sz;
893
0
    }
894
0
  });
895
0
  if (arraySizeToCountAndWastedSlots.empty()) {
896
0
    os << "\tNo JSArrays\n\n";
897
0
  } else {
898
0
    printTable(arraySizeToCountAndWastedSlots);
899
0
  }
900
901
0
  os << "\n";
902
0
}
903
#endif
904
905
2
unsigned Runtime::getSymbolsEnd() const {
906
2
  return identifierTable_.getSymbolsEnd();
907
2
}
908
909
1
void Runtime::unmarkSymbols() {
910
1
  identifierTable_.unmarkSymbols();
911
1
}
912
913
1
void Runtime::freeSymbols(const llvh::BitVector &markedSymbols) {
914
1
  identifierTable_.freeUnmarkedSymbols(markedSymbols, getHeap().getIDTracker());
915
1
}
916
917
#ifdef HERMES_SLOW_DEBUG
918
480k
bool Runtime::isSymbolLive(SymbolID id) {
919
480k
  return identifierTable_.isSymbolLive(id);
920
480k
}
921
922
480k
const void *Runtime::getStringForSymbol(SymbolID id) {
923
480k
  return identifierTable_.getStringForSymbol(id);
924
480k
}
925
#endif
926
927
0
size_t Runtime::mallocSize() const {
928
  // Register stack uses mmap and RuntimeModules are tracked by their owning
929
  // Domains. So this only considers IdentifierTable size.
930
0
  return sizeof(IdentifierTable) + identifierTable_.additionalMemorySize();
931
0
}
932
933
#ifdef HERMESVM_SANITIZE_HANDLES
934
void Runtime::potentiallyMoveHeap() {
935
  // Do a dummy allocation which could force a heap move if handle sanitization
936
  // is on.
937
  FillerCell::create(
938
      *this,
939
      std::max<size_t>(
940
          heapAlignSize(sizeof(FillerCell)), GC::minAllocationSize()));
941
}
942
#endif
943
944
LLVM_ATTRIBUTE_NOINLINE
945
static CallResult<HermesValue> interpretFunctionWithRandomStack(
946
    Runtime &runtime,
947
0
    CodeBlock *globalCode) {
948
0
  static void *volatile dummy;
949
0
  const unsigned amount = std::random_device()() % oscompat::page_size();
950
  // Prevent compiler from optimizing alloca away by assigning to volatile
951
0
  dummy = alloca(amount);
952
0
  (void)dummy;
953
0
  return runtime.interpretFunction(globalCode);
954
0
}
955
956
CallResult<HermesValue> Runtime::run(
957
    llvh::StringRef code,
958
    llvh::StringRef sourceURL,
959
0
    const hbc::CompileFlags &compileFlags) {
960
#ifdef HERMESVM_LEAN
961
  return raiseEvalUnsupported(code);
962
#else
963
0
  std::unique_ptr<hermes::Buffer> buffer;
964
0
  if (compileFlags.lazy) {
965
0
    buffer.reset(new hermes::OwnedMemoryBuffer(
966
0
        llvh::MemoryBuffer::getMemBufferCopy(code)));
967
0
  } else {
968
0
    buffer.reset(
969
0
        new hermes::OwnedMemoryBuffer(llvh::MemoryBuffer::getMemBuffer(code)));
970
0
  }
971
0
  return run(std::move(buffer), sourceURL, compileFlags);
972
0
#endif
973
0
}
974
975
CallResult<HermesValue> Runtime::run(
976
    std::unique_ptr<hermes::Buffer> code,
977
    llvh::StringRef sourceURL,
978
0
    const hbc::CompileFlags &compileFlags) {
979
#ifdef HERMESVM_LEAN
980
  auto buffer = code.get();
981
  return raiseEvalUnsupported(
982
      llvh::StringRef(
983
          reinterpret_cast<const char *>(buffer->data()), buffer->size()));
984
#else
985
0
  std::unique_ptr<hbc::BCProviderFromSrc> bytecode;
986
0
  {
987
0
    PerfSection loading("Loading new JavaScript code");
988
0
    loading.addArg("url", sourceURL);
989
0
    auto bytecode_err = hbc::BCProviderFromSrc::createBCProviderFromSrc(
990
0
        std::move(code), sourceURL, compileFlags);
991
0
    if (!bytecode_err.first) {
992
0
      return raiseSyntaxError(TwineChar16(bytecode_err.second));
993
0
    }
994
0
    bytecode = std::move(bytecode_err.first);
995
0
  }
996
997
0
  PerfSection loading("Executing global function");
998
0
  RuntimeModuleFlags rmflags;
999
0
  rmflags.persistent = true;
1000
0
  return runBytecode(
1001
0
      std::move(bytecode), rmflags, sourceURL, makeNullHandle<Environment>());
1002
0
#endif
1003
0
}
1004
1005
CallResult<HermesValue> Runtime::runBytecode(
1006
    std::shared_ptr<hbc::BCProvider> &&bytecode,
1007
    RuntimeModuleFlags flags,
1008
    llvh::StringRef sourceURL,
1009
    Handle<Environment> environment,
1010
231
    Handle<> thisArg) {
1011
231
  clearThrownValue();
1012
1013
231
  auto globalFunctionIndex = bytecode->getGlobalFunctionIndex();
1014
1015
231
  if (bytecode->getBytecodeOptions().staticBuiltins && !builtinsFrozen_) {
1016
0
    if (assertBuiltinsUnmodified() == ExecutionStatus::EXCEPTION) {
1017
0
      return ExecutionStatus::EXCEPTION;
1018
0
    }
1019
0
    freezeBuiltins();
1020
0
    assert(builtinsFrozen_ && "Builtins must be frozen by now.");
1021
0
  }
1022
1023
231
  if (bytecode->getBytecodeOptions().hasAsync && !hasES6Promise_) {
1024
0
    return raiseTypeError(
1025
0
        "Cannot execute a bytecode having async functions when Promise is disabled.");
1026
0
  }
1027
1028
231
  if (flags.persistent) {
1029
151
    persistentBCProviders_.push_back(bytecode);
1030
151
    if (bytecodeWarmupPercent_ > 0) {
1031
      // Start the warmup thread for this bytecode if it's a buffer.
1032
0
      bytecode->startWarmup(bytecodeWarmupPercent_);
1033
0
    }
1034
151
    if (getVMExperimentFlags() & experiments::MAdviseRandom) {
1035
0
      bytecode->madvise(oscompat::MAdvice::Random);
1036
151
    } else if (getVMExperimentFlags() & experiments::MAdviseSequential) {
1037
0
      bytecode->madvise(oscompat::MAdvice::Sequential);
1038
0
    }
1039
151
    if (getVMExperimentFlags() & experiments::VerifyBytecodeChecksum) {
1040
0
      llvh::ArrayRef<uint8_t> buf = bytecode->getRawBuffer();
1041
      // buf is empty for non-buffer providers
1042
0
      if (!buf.empty()) {
1043
0
        if (!hbc::BCProviderFromBuffer::bytecodeHashIsValid(buf)) {
1044
0
          const char *msg = "Bytecode checksum verification failed";
1045
0
          hermesLog("Hermes", "%s", msg);
1046
0
          hermes_fatal(msg);
1047
0
        }
1048
0
      }
1049
0
    }
1050
151
  }
1051
  // Only track I/O for buffers > 64 kB (which excludes things like
1052
  // Runtime::generateSpecialRuntimeBytecode).
1053
231
  if (flags.persistent && trackIO_ &&
1054
0
      bytecode->getRawBuffer().size() > MIN_IO_TRACKING_SIZE) {
1055
0
    bytecode->startPageAccessTracker();
1056
0
    if (!bytecode->getPageAccessTracker()) {
1057
0
      hermesLog(
1058
0
          "Hermes",
1059
0
          "Failed to start bytecode I/O instrumentation, "
1060
0
          "maybe not supported on this platform.");
1061
0
    }
1062
0
  }
1063
1064
231
  GCScope scope(*this);
1065
1066
231
  Handle<Domain> domain = makeHandle(Domain::create(*this));
1067
1068
231
  auto runtimeModuleRes = RuntimeModule::create(
1069
231
      *this, domain, nextScriptId_++, std::move(bytecode), flags, sourceURL);
1070
231
  if (LLVM_UNLIKELY(runtimeModuleRes == ExecutionStatus::EXCEPTION)) {
1071
0
    return ExecutionStatus::EXCEPTION;
1072
0
  }
1073
231
  auto runtimeModule = *runtimeModuleRes;
1074
231
  auto globalCode = runtimeModule->getCodeBlockMayAllocate(globalFunctionIndex);
1075
1076
231
#ifdef HERMES_ENABLE_DEBUGGER
1077
  // If the debugger is configured to pause on load, give it a chance to pause.
1078
231
  getDebugger().willExecuteModule(runtimeModule, globalCode);
1079
231
#endif
1080
1081
231
  if (runtimeModule->hasCJSModules()) {
1082
0
    auto requireContext = RequireContext::create(
1083
0
        *this, domain, getPredefinedStringHandle(Predefined::dotSlash));
1084
0
    return runRequireCall(
1085
0
        *this,
1086
0
        requireContext,
1087
0
        domain,
1088
0
        *domain->getCJSModuleOffset(*this, domain->getCJSEntryModuleID()));
1089
231
  } else if (runtimeModule->hasCJSModulesStatic()) {
1090
0
    return runRequireCall(
1091
0
        *this,
1092
0
        makeNullHandle<RequireContext>(),
1093
0
        domain,
1094
0
        *domain->getCJSModuleOffset(*this, domain->getCJSEntryModuleID()));
1095
231
  } else {
1096
    // Create a JSFunction which will reference count the runtime module.
1097
    // Note that its handle gets registered in the scope, so we don't need to
1098
    // save it. Also note that environment will often be null here, except if
1099
    // this is local eval.
1100
231
    auto func = JSFunction::create(
1101
231
        *this,
1102
231
        domain,
1103
231
        Handle<JSObject>::vmcast(&functionPrototype),
1104
231
        environment,
1105
231
        globalCode);
1106
1107
231
    ScopedNativeCallFrame newFrame{
1108
231
        *this,
1109
231
        0,
1110
231
        func.getHermesValue(),
1111
231
        HermesValue::encodeUndefinedValue(),
1112
231
        *thisArg};
1113
231
    if (LLVM_UNLIKELY(newFrame.overflowed()))
1114
0
      return raiseStackOverflow(StackOverflowKind::NativeStack);
1115
231
    return shouldRandomizeMemoryLayout_
1116
231
        ? interpretFunctionWithRandomStack(*this, globalCode)
1117
231
        : interpretFunction(globalCode);
1118
231
  }
1119
231
}
1120
1121
ExecutionStatus Runtime::loadSegment(
1122
    std::shared_ptr<hbc::BCProvider> &&bytecode,
1123
    Handle<RequireContext> requireContext,
1124
0
    RuntimeModuleFlags flags) {
1125
0
  GCScopeMarkerRAII marker{*this};
1126
0
  auto domain = makeHandle(RequireContext::getDomain(*this, *requireContext));
1127
1128
0
  if (LLVM_UNLIKELY(
1129
0
          RuntimeModule::create(
1130
0
              *this, domain, nextScriptId_++, std::move(bytecode), flags, "") ==
1131
0
          ExecutionStatus::EXCEPTION)) {
1132
0
    return ExecutionStatus::EXCEPTION;
1133
0
  }
1134
1135
0
  return ExecutionStatus::RETURNED;
1136
0
}
1137
1138
94
Handle<JSObject> Runtime::runInternalBytecode() {
1139
94
  auto module = getInternalBytecode();
1140
94
  std::pair<std::unique_ptr<hbc::BCProvider>, std::string> bcResult =
1141
94
      hbc::BCProviderFromBuffer::createBCProviderFromBuffer(
1142
94
          std::make_unique<Buffer>(module.data(), module.size()));
1143
94
  if (LLVM_UNLIKELY(!bcResult.first)) {
1144
0
    hermes_fatal((llvh::Twine("Error running internal bytecode: ") +
1145
0
                  bcResult.second.c_str())
1146
0
                     .str());
1147
0
  }
1148
  // The bytes backing our buffer are immortal, so we can be persistent.
1149
94
  RuntimeModuleFlags flags;
1150
94
  flags.persistent = true;
1151
94
  flags.hidesEpilogue = true;
1152
94
  auto res = runBytecode(
1153
94
      std::move(bcResult.first),
1154
94
      flags,
1155
94
      /*sourceURL*/ "InternalBytecode.js",
1156
94
      makeNullHandle<Environment>());
1157
  // It is a fatal error for the internal bytecode to throw an exception.
1158
94
  assert(
1159
94
      res != ExecutionStatus::EXCEPTION && "Internal bytecode threw exception");
1160
94
  assert(
1161
94
      res->isObject() &&
1162
94
      "Completion value of internal bytecode must be an object");
1163
1164
94
  return makeHandle<JSObject>(*res);
1165
94
}
1166
1167
0
void Runtime::printException(llvh::raw_ostream &os, Handle<> valueHandle) {
1168
0
  os << "Uncaught ";
1169
0
  clearThrownValue();
1170
1171
  // Try to fetch the stack trace.
1172
0
  CallResult<PseudoHandle<>> propRes{ExecutionStatus::EXCEPTION};
1173
0
  if (auto objHandle = Handle<JSObject>::dyn_vmcast(valueHandle)) {
1174
0
    if (LLVM_UNLIKELY(
1175
0
            (propRes = JSObject::getNamed_RJS(
1176
0
                 objHandle,
1177
0
                 *this,
1178
0
                 Predefined::getSymbolID(Predefined::stack))) ==
1179
0
            ExecutionStatus::EXCEPTION)) {
1180
      // Suppress prepareStackTrace using the recursion-breaking flag and retry.
1181
0
      bool wasFormattingStackTrace = formattingStackTrace();
1182
0
      if (LLVM_LIKELY(!wasFormattingStackTrace)) {
1183
0
        setFormattingStackTrace(true);
1184
0
      }
1185
0
      const auto &guard =
1186
0
          llvh::make_scope_exit([this, wasFormattingStackTrace]() {
1187
0
            if (formattingStackTrace() != wasFormattingStackTrace) {
1188
0
              setFormattingStackTrace(wasFormattingStackTrace);
1189
0
            }
1190
0
          });
1191
0
      (void)guard;
1192
1193
0
      if (LLVM_UNLIKELY(
1194
0
              (propRes = JSObject::getNamed_RJS(
1195
0
                   objHandle,
1196
0
                   *this,
1197
0
                   Predefined::getSymbolID(Predefined::stack))) ==
1198
0
              ExecutionStatus::EXCEPTION)) {
1199
0
        os << "exception thrown while getting stack trace\n";
1200
0
        return;
1201
0
      }
1202
0
    }
1203
0
  }
1204
0
  SmallU16String<32> tmp;
1205
0
  if (LLVM_UNLIKELY(
1206
0
          propRes == ExecutionStatus::EXCEPTION || (*propRes)->isUndefined())) {
1207
    // If stack trace is unavailable, we just print error.toString.
1208
0
    auto strRes = toString_RJS(*this, valueHandle);
1209
0
    if (LLVM_UNLIKELY(strRes == ExecutionStatus::EXCEPTION)) {
1210
0
      os << "exception thrown in toString of original exception\n";
1211
0
      return;
1212
0
    }
1213
1214
0
    strRes->get()->appendUTF16String(tmp);
1215
0
    os << tmp << "\n";
1216
0
    return;
1217
0
  }
1218
  // stack trace is available, try to convert it to string.
1219
0
  auto strRes = toString_RJS(*this, makeHandle(std::move(*propRes)));
1220
0
  if (LLVM_UNLIKELY(strRes == ExecutionStatus::EXCEPTION)) {
1221
0
    os << "exception thrown in toString of stack trace\n";
1222
0
    return;
1223
0
  }
1224
0
  PseudoHandle<StringPrimitive> str = std::move(*strRes);
1225
0
  if (str->getStringLength() == 0) {
1226
0
    str.invalidate();
1227
    // If the final value is the empty string,
1228
    // fall back to just printing the error.toString directly.
1229
0
    auto errToStringRes = toString_RJS(*this, valueHandle);
1230
0
    if (LLVM_UNLIKELY(errToStringRes == ExecutionStatus::EXCEPTION)) {
1231
0
      os << "exception thrown in toString of original exception\n";
1232
0
      return;
1233
0
    }
1234
0
    str = std::move(*errToStringRes);
1235
0
  }
1236
0
  str->appendUTF16String(tmp);
1237
0
  os << tmp << "\n";
1238
0
}
1239
1240
7.32k
Handle<JSObject> Runtime::getGlobal() {
1241
7.32k
  return Handle<JSObject>::vmcast(&global_);
1242
7.32k
}
1243
1244
0
std::vector<llvh::ArrayRef<uint8_t>> Runtime::getEpilogues() {
1245
0
  std::vector<llvh::ArrayRef<uint8_t>> result;
1246
0
  for (const auto &m : runtimeModuleList_) {
1247
0
    if (!m.hidesEpilogue()) {
1248
0
      result.push_back(m.getEpilogue());
1249
0
    }
1250
0
  }
1251
0
  return result;
1252
0
}
1253
1254
#ifdef HERMES_ENABLE_DEBUGGER
1255
1256
llvh::Optional<Runtime::StackFrameInfo> Runtime::stackFrameInfoByIndex(
1257
0
    uint32_t frameIdx) const {
1258
  // Locate the frame.
1259
0
  auto frames = getStackFrames();
1260
0
  auto it = frames.begin();
1261
0
  for (; frameIdx && it != frames.end(); ++it, --frameIdx) {
1262
0
  }
1263
0
  if (it == frames.end())
1264
0
    return llvh::None;
1265
1266
0
  StackFrameInfo info;
1267
0
  info.frame = *it++;
1268
0
  info.isGlobal = it == frames.end();
1269
0
  return info;
1270
0
}
1271
1272
/// Calculate and \return the offset between the location of the specified
1273
/// frame and the start of the stack. This value increases with every nested
1274
/// call.
1275
0
uint32_t Runtime::calcFrameOffset(ConstStackFrameIterator it) const {
1276
0
  assert(it != getStackFrames().end() && "invalid frame");
1277
0
  return it->ptr() - registerStackStart_;
1278
0
}
1279
1280
/// \return the offset between the location of the current frame and the
1281
///   start of the stack. This value increases with every nested call.
1282
0
uint32_t Runtime::getCurrentFrameOffset() const {
1283
0
  return calcFrameOffset(getStackFrames().begin());
1284
0
}
1285
1286
#endif
1287
1288
static ExecutionStatus
1289
30
raisePlaceholder(Runtime &runtime, Handle<JSError> errorObj, Handle<> message) {
1290
30
  JSError::recordStackTrace(errorObj, runtime);
1291
30
  JSError::setMessage(errorObj, runtime, message);
1292
30
  return runtime.setThrownValue(errorObj.getHermesValue());
1293
30
}
1294
1295
/// A placeholder used to construct a Error Object that takes in a the specified
1296
/// message.
1297
static ExecutionStatus raisePlaceholder(
1298
    Runtime &runtime,
1299
    Handle<JSObject> prototype,
1300
30
    Handle<> message) {
1301
30
  GCScopeMarkerRAII gcScope{runtime};
1302
1303
  // Create the error object, initialize stack property and set message.
1304
30
  auto errorObj = runtime.makeHandle(JSError::create(runtime, prototype));
1305
30
  return raisePlaceholder(runtime, errorObj, message);
1306
30
}
1307
1308
/// A placeholder used to construct a Error Object that takes in a const
1309
/// message. A new StringPrimitive is created each time.
1310
// TODO: Predefine each error message.
1311
static ExecutionStatus raisePlaceholder(
1312
    Runtime &runtime,
1313
    Handle<JSObject> prototype,
1314
30
    const TwineChar16 &msg) {
1315
  // Since this happens unexpectedly and rarely, don't rely on the parent
1316
  // GCScope.
1317
30
  GCScope gcScope{runtime};
1318
1319
30
  SmallU16String<64> buf;
1320
30
  msg.toVector(buf);
1321
1322
30
  auto strRes = StringPrimitive::create(runtime, buf);
1323
30
  if (strRes == ExecutionStatus::EXCEPTION) {
1324
0
    return ExecutionStatus::EXCEPTION;
1325
0
  }
1326
30
  auto str = runtime.makeHandle<StringPrimitive>(*strRes);
1327
30
  LLVM_DEBUG(llvh::errs() << buf.arrayRef() << "\n");
1328
30
  return raisePlaceholder(runtime, prototype, str);
1329
30
}
1330
1331
0
ExecutionStatus Runtime::raiseError(const TwineChar16 &msg) {
1332
0
  return raisePlaceholder(
1333
0
      *this, Handle<JSObject>::vmcast(&ErrorPrototype), msg);
1334
0
}
1335
1336
0
ExecutionStatus Runtime::raiseTypeError(Handle<> message) {
1337
  // Since this happens unexpectedly and rarely, don't rely on the parent
1338
  // GCScope.
1339
0
  GCScope gcScope{*this};
1340
0
  return raisePlaceholder(
1341
0
      *this, Handle<JSObject>::vmcast(&TypeErrorPrototype), message);
1342
0
}
1343
1344
ExecutionStatus Runtime::raiseTypeErrorForValue(
1345
    const TwineChar16 &msg1,
1346
    Handle<> value,
1347
1
    const TwineChar16 &msg2) {
1348
1
  switch (value->getTag()) {
1349
0
    case HermesValue::Tag::Object:
1350
0
      return raiseTypeError(msg1 + "Object" + msg2);
1351
0
    case HermesValue::Tag::Str:
1352
0
      return raiseTypeError(
1353
0
          msg1 + "'" + vmcast<StringPrimitive>(*value) + "'" + msg2);
1354
0
    case HermesValue::Tag::BoolSymbol:
1355
0
      if (value->isBool()) {
1356
0
        if (value->getBool()) {
1357
0
          return raiseTypeError(msg1 + "true" + msg2);
1358
0
        } else {
1359
0
          return raiseTypeError(msg1 + "false" + msg2);
1360
0
        }
1361
0
      }
1362
0
      return raiseTypeError(
1363
0
          msg1 + "Symbol(" + getStringPrimFromSymbolID(value->getSymbol()) +
1364
0
          ")" + msg2);
1365
1
    case HermesValue::Tag::UndefinedNull:
1366
1
      if (value->isUndefined())
1367
1
        return raiseTypeError(msg1 + "undefined" + msg2);
1368
0
      else
1369
0
        return raiseTypeError(msg1 + "null" + msg2);
1370
0
    default:
1371
0
      if (value->isNumber()) {
1372
0
        char buf[hermes::NUMBER_TO_STRING_BUF_SIZE];
1373
0
        size_t len = hermes::numberToString(
1374
0
            value->getNumber(), buf, hermes::NUMBER_TO_STRING_BUF_SIZE);
1375
0
        return raiseTypeError(msg1 + llvh::StringRef{buf, len} + msg2);
1376
0
      }
1377
1
  }
1378
0
  return raiseTypeError(msg1 + "Value" + msg2);
1379
1
}
1380
1381
1
ExecutionStatus Runtime::raiseTypeErrorForCallable(Handle<> callable) {
1382
1
  if (CodeBlock *curCodeBlock = getCurrentFrame().getCalleeCodeBlock(*this)) {
1383
1
    if (OptValue<uint32_t> textifiedCalleeOffset =
1384
1
            curCodeBlock->getTextifiedCalleeOffset()) {
1385
      // Look up the textified callee for the current IP in the debug
1386
      // information. If one is available, use that in the error message.
1387
1
      OptValue<llvh::StringRef> tCallee =
1388
1
          curCodeBlock->getRuntimeModule()
1389
1
              ->getBytecode()
1390
1
              ->getDebugInfo()
1391
1
              ->getTextifiedCalleeUTF8(
1392
1
                  *textifiedCalleeOffset,
1393
1
                  curCodeBlock->getOffsetOf(getCurrentIP()));
1394
1
      if (tCallee.hasValue()) {
1395
        // The textified callee is UTF8, so it may need to be converted to
1396
        // UTF16.
1397
0
        if (isAllASCII(tCallee->begin(), tCallee->end())) {
1398
          // All ASCII means no conversion is needed.
1399
0
          return raiseTypeErrorForValue(
1400
0
              TwineChar16(*tCallee) + " is not a function (it is ",
1401
0
              callable,
1402
0
              ")");
1403
0
        }
1404
1405
        // Convert UTF8 to UTF16 before creating the Error.
1406
0
        llvh::SmallVector<char16_t, 16> tCalleeUTF16;
1407
0
        convertUTF8WithSurrogatesToUTF16(
1408
0
            std::back_inserter(tCalleeUTF16), tCallee->begin(), tCallee->end());
1409
0
        return raiseTypeErrorForValue(
1410
0
            TwineChar16(tCalleeUTF16) + " is not a function (it is ",
1411
0
            callable,
1412
0
            ")");
1413
0
      }
1414
1
    }
1415
1
  }
1416
1417
1
  return raiseTypeErrorForValue(callable, " is not a function");
1418
1
}
1419
1420
1
ExecutionStatus Runtime::raiseTypeError(const TwineChar16 &msg) {
1421
1
  return raisePlaceholder(
1422
1
      *this, Handle<JSObject>::vmcast(&TypeErrorPrototype), msg);
1423
1
}
1424
1425
4
ExecutionStatus Runtime::raiseSyntaxError(const TwineChar16 &msg) {
1426
4
  return raisePlaceholder(
1427
4
      *this, Handle<JSObject>::vmcast(&SyntaxErrorPrototype), msg);
1428
4
}
1429
1430
0
ExecutionStatus Runtime::raiseRangeError(const TwineChar16 &msg) {
1431
0
  return raisePlaceholder(
1432
0
      *this, Handle<JSObject>::vmcast(&RangeErrorPrototype), msg);
1433
0
}
1434
1435
25
ExecutionStatus Runtime::raiseReferenceError(const TwineChar16 &msg) {
1436
25
  return raisePlaceholder(
1437
25
      *this, Handle<JSObject>::vmcast(&ReferenceErrorPrototype), msg);
1438
25
}
1439
1440
0
ExecutionStatus Runtime::raiseURIError(const TwineChar16 &msg) {
1441
0
  return raisePlaceholder(
1442
0
      *this, Handle<JSObject>::vmcast(&URIErrorPrototype), msg);
1443
0
}
1444
1445
0
ExecutionStatus Runtime::raiseStackOverflow(StackOverflowKind kind) {
1446
0
  const char *msg;
1447
0
  switch (kind) {
1448
0
    case StackOverflowKind::JSRegisterStack:
1449
0
      msg = "Maximum call stack size exceeded";
1450
0
      break;
1451
0
    case StackOverflowKind::NativeStack:
1452
0
      msg = "Maximum call stack size exceeded (native stack depth)";
1453
0
      break;
1454
0
    case StackOverflowKind::JSONParser:
1455
0
      msg = "Maximum nesting level in JSON parser exceeded";
1456
0
      break;
1457
0
    case StackOverflowKind::JSONStringify:
1458
0
      msg = "Maximum nesting level in JSON stringifyer exceeded";
1459
0
      break;
1460
0
  }
1461
0
  return raisePlaceholder(
1462
0
      *this, Handle<JSObject>::vmcast(&RangeErrorPrototype), msg);
1463
0
}
1464
1465
0
ExecutionStatus Runtime::raiseQuitError() {
1466
0
  return raiseUncatchableError(
1467
0
      Handle<JSObject>::vmcast(&QuitErrorPrototype), "Quit");
1468
0
}
1469
1470
0
ExecutionStatus Runtime::raiseTimeoutError() {
1471
0
  return raiseUncatchableError(
1472
0
      Handle<JSObject>::vmcast(&TimeoutErrorPrototype),
1473
0
      "Javascript execution has timed out.");
1474
0
}
1475
1476
ExecutionStatus Runtime::raiseUncatchableError(
1477
    Handle<JSObject> prototype,
1478
0
    llvh::StringRef errMessage) {
1479
0
  Handle<JSError> err =
1480
0
      makeHandle(JSError::createUncatchable(*this, prototype));
1481
0
  auto res = StringPrimitive::create(
1482
0
      *this, llvh::ASCIIRef{errMessage.begin(), errMessage.end()});
1483
0
  if (res == ExecutionStatus::EXCEPTION) {
1484
0
    return ExecutionStatus::EXCEPTION;
1485
0
  }
1486
0
  auto str = makeHandle(*res);
1487
0
  return raisePlaceholder(*this, err, str);
1488
0
}
1489
1490
0
ExecutionStatus Runtime::raiseEvalUnsupported(llvh::StringRef code) {
1491
0
  return raiseSyntaxError(
1492
0
      TwineChar16("Parsing source code unsupported: ") + code.substr(0, 32));
1493
0
}
1494
1495
91
bool Runtime::insertVisitedObject(JSObject *obj) {
1496
91
  if (llvh::find(stringCycleCheckVisited_, obj) !=
1497
91
      stringCycleCheckVisited_.end())
1498
0
    return true;
1499
91
  stringCycleCheckVisited_.push_back(obj);
1500
91
  return false;
1501
91
}
1502
1503
91
void Runtime::removeVisitedObject(JSObject *obj) {
1504
91
  (void)obj;
1505
91
  assert(
1506
91
      stringCycleCheckVisited_.back() == obj && "string cycle stack corrupted");
1507
91
  stringCycleCheckVisited_.pop_back();
1508
91
}
1509
1510
94
std::unique_ptr<Buffer> Runtime::generateSpecialRuntimeBytecode() {
1511
94
  hbc::SimpleBytecodeBuilder builder;
1512
94
  {
1513
94
    hbc::BytecodeInstructionGenerator bcGen;
1514
94
    bcGen.emitLoadConstUndefined(0);
1515
94
    bcGen.emitRet(0);
1516
94
    builder.addFunction(1, 1, bcGen.acquireBytecode());
1517
94
  }
1518
94
  {
1519
94
    hbc::BytecodeInstructionGenerator bcGen;
1520
94
    bcGen.emitGetGlobalObject(0);
1521
94
    bcGen.emitRet(0);
1522
94
    builder.addFunction(1, 1, bcGen.acquireBytecode());
1523
94
  }
1524
94
  auto buffer = builder.generateBytecodeBuffer();
1525
94
  assert(buffer->size() < MIN_IO_TRACKING_SIZE);
1526
94
  return buffer;
1527
94
}
1528
1529
94
void Runtime::initPredefinedStrings() {
1530
94
  assert(!getTopGCScope() && "There shouldn't be any handles allocated yet");
1531
1532
94
  auto buffer = predefStringAndSymbolChars;
1533
94
  auto propLengths = predefPropertyLengths;
1534
94
  auto strLengths = predefStringLengths;
1535
94
  auto symLengths = predefSymbolLengths;
1536
1537
94
  static const uint32_t hashes[] = {
1538
51.3k
#define STR(name, string) constexprHashString(string),
1539
94
#include "hermes/VM/PredefinedStrings.def"
1540
94
  };
1541
1542
94
  uint32_t offset = 0;
1543
94
  uint32_t registered = 0;
1544
94
  (void)registered;
1545
94
  const uint32_t strCount = Predefined::NumStrings;
1546
94
  const uint32_t symCount = Predefined::NumSymbols;
1547
94
  identifierTable_.reserve(Predefined::_IPROP_AFTER_LAST + strCount + symCount);
1548
1549
1.41k
  for (uint32_t idx = 0; idx < Predefined::_IPROP_AFTER_LAST; ++idx) {
1550
1.31k
    SymbolID sym = identifierTable_.createNotUniquedLazySymbol(
1551
1.31k
        ASCIIRef{&buffer[offset], propLengths[idx]});
1552
1553
1.31k
    assert(sym == Predefined::getSymbolID((Predefined::IProp)registered++));
1554
1.31k
    (void)sym;
1555
1556
1.31k
    offset += propLengths[idx];
1557
1.31k
  }
1558
1559
94
  assert(
1560
94
      strCount == sizeof hashes / sizeof hashes[0] &&
1561
94
      "Arrays should have same length");
1562
51.4k
  for (uint32_t idx = 0; idx < strCount; idx++) {
1563
51.3k
    SymbolID sym = identifierTable_.registerLazyIdentifier(
1564
51.3k
        ASCIIRef{&buffer[offset], strLengths[idx]}, hashes[idx]);
1565
1566
51.3k
    assert(sym == Predefined::getSymbolID((Predefined::Str)registered++));
1567
51.3k
    (void)sym;
1568
1569
51.3k
    offset += strLengths[idx];
1570
51.3k
  }
1571
1572
1.03k
  for (uint32_t idx = 0; idx < symCount; ++idx) {
1573
940
    SymbolID sym = identifierTable_.createNotUniquedLazySymbol(
1574
940
        ASCIIRef{&buffer[offset], symLengths[idx]});
1575
1576
940
    assert(sym == Predefined::getSymbolID((Predefined::Sym)registered++));
1577
940
    (void)sym;
1578
1579
940
    offset += symLengths[idx];
1580
940
  }
1581
1582
94
  assert(
1583
94
      !getTopGCScope() &&
1584
94
      "There shouldn't be any handles allocated during initializing the predefined strings");
1585
94
}
1586
1587
94
void Runtime::initCharacterStrings() {
1588
94
  GCScope gc(*this);
1589
94
  auto marker = gc.createMarker();
1590
94
  charStrings_.reserve(256);
1591
24.1k
  for (char16_t ch = 0; ch < 256; ++ch) {
1592
24.0k
    gc.flushToMarker(marker);
1593
24.0k
    charStrings_.push_back(allocateCharacterString(ch).getHermesValue());
1594
24.0k
  }
1595
94
}
1596
1597
24.0k
Handle<StringPrimitive> Runtime::allocateCharacterString(char16_t ch) {
1598
  // This can in theory throw when called out of initialization phase.
1599
  // However there is only that many character strings and in practice this
1600
  // is not a problem.  Note that we allocate these as "long-lived" objects,
1601
  // so we don't have to scan the charStrings_ array in
1602
  // young-generation collections.
1603
1604
24.0k
  PinnedHermesValue strRes;
1605
24.0k
  if (LLVM_LIKELY(ch < 128)) {
1606
12.0k
    strRes = ignoreAllocationFailure(
1607
12.0k
        StringPrimitive::createLongLived(*this, ASCIIRef(ch)));
1608
12.0k
  } else {
1609
12.0k
    strRes = ignoreAllocationFailure(
1610
12.0k
        StringPrimitive::createLongLived(*this, UTF16Ref(ch)));
1611
12.0k
  }
1612
24.0k
  return makeHandle<StringPrimitive>(strRes);
1613
24.0k
}
1614
1615
126k
Handle<StringPrimitive> Runtime::getCharacterString(char16_t ch) {
1616
126k
  if (LLVM_LIKELY(ch < 256))
1617
126k
    return Handle<StringPrimitive>::vmcast(&charStrings_[ch]);
1618
1619
0
  return makeHandle<StringPrimitive>(
1620
0
      ignoreAllocationFailure(StringPrimitive::create(*this, UTF16Ref(ch))));
1621
126k
}
1622
1623
// Store all object and symbol ids in a static table to conserve code size.
1624
static const struct {
1625
  uint16_t object, method;
1626
#ifndef NDEBUG
1627
  const char *name;
1628
#define BUILTIN_METHOD(object, method) \
1629
  {(uint16_t)Predefined::object,       \
1630
   (uint16_t)Predefined::method,       \
1631
   #object "::" #method},
1632
#else
1633
#define BUILTIN_METHOD(object, method) \
1634
  {(uint16_t)Predefined::object, (uint16_t)Predefined::method},
1635
#endif
1636
#define PRIVATE_BUILTIN(name)
1637
#define JS_BUILTIN(name)
1638
} publicNativeBuiltins[] = {
1639
#include "hermes/FrontEndDefs/Builtins.def"
1640
};
1641
1642
static_assert(
1643
    sizeof(publicNativeBuiltins) / sizeof(publicNativeBuiltins[0]) ==
1644
        BuiltinMethod::_firstPrivate,
1645
    "builtin method table mismatch");
1646
1647
ExecutionStatus Runtime::forEachPublicNativeBuiltin(
1648
    const std::function<ExecutionStatus(
1649
        unsigned methodIndex,
1650
        Predefined::Str objectName,
1651
        Handle<JSObject> &object,
1652
94
        SymbolID methodID)> &callback) {
1653
94
  MutableHandle<JSObject> lastObject{*this};
1654
94
  Predefined::Str lastObjectName = Predefined::_STRING_AFTER_LAST;
1655
1656
3.57k
  for (unsigned methodIndex = 0; methodIndex < BuiltinMethod::_firstPrivate;
1657
3.47k
       ++methodIndex) {
1658
3.47k
    GCScopeMarkerRAII marker{*this};
1659
3.47k
    LLVM_DEBUG(llvh::dbgs() << publicNativeBuiltins[methodIndex].name << "\n");
1660
    // Find the object first, if it changed.
1661
3.47k
    auto objectName = (Predefined::Str)publicNativeBuiltins[methodIndex].object;
1662
3.47k
    if (objectName != lastObjectName) {
1663
564
      auto objectID = Predefined::getSymbolID(objectName);
1664
      // Avoid running any JS here to avoid modifying the builtins while
1665
      // iterating them.
1666
564
      NamedPropertyDescriptor desc;
1667
      // Check if the builtin is overridden.
1668
564
      if (!JSObject::getOwnNamedDescriptor(
1669
564
              getGlobal(), *this, objectID, desc)) {
1670
0
        return raiseTypeError(
1671
0
            TwineChar16{
1672
0
                "Cannot execute a bytecode compiled with -fstatic-builtins: "} +
1673
0
            getPredefinedString(objectName) + " was deleted");
1674
0
      }
1675
      // Doesn't run accessors.
1676
564
      auto cr = JSObject::getNamedSlotValue(getGlobal(), *this, desc);
1677
564
      if (LLVM_UNLIKELY(cr == ExecutionStatus::EXCEPTION)) {
1678
0
        return ExecutionStatus::EXCEPTION;
1679
0
      }
1680
      // This is known to not be PropertyAccessor, so check that casting it to
1681
      // JSObject is allowed.
1682
564
      if (LLVM_UNLIKELY(!vmisa<JSObject>(cr->get()))) {
1683
0
        return raiseTypeError(
1684
0
            TwineChar16{
1685
0
                "Cannot execute a bytecode compiled with -fstatic-builtins: "} +
1686
0
            getPredefinedString(objectName) + " is not an object");
1687
0
      }
1688
1689
564
      lastObject = vmcast<JSObject>(cr->get());
1690
564
      lastObjectName = objectName;
1691
564
    }
1692
1693
    // Find the method.
1694
3.47k
    auto methodName = (Predefined::Str)publicNativeBuiltins[methodIndex].method;
1695
3.47k
    auto methodID = Predefined::getSymbolID(methodName);
1696
1697
3.47k
    ExecutionStatus status =
1698
3.47k
        callback(methodIndex, objectName, lastObject, methodID);
1699
3.47k
    if (status != ExecutionStatus::RETURNED) {
1700
0
      return ExecutionStatus::EXCEPTION;
1701
0
    }
1702
3.47k
  }
1703
94
  return ExecutionStatus::RETURNED;
1704
94
}
1705
1706
94
void Runtime::initNativeBuiltins() {
1707
94
  GCScopeMarkerRAII gcScope{*this};
1708
1709
94
  builtins_.resize(BuiltinMethod::_count);
1710
1711
94
  (void)forEachPublicNativeBuiltin([this](
1712
94
                                       unsigned methodIndex,
1713
94
                                       Predefined::Str /* objectName */,
1714
94
                                       Handle<JSObject> &currentObject,
1715
3.47k
                                       SymbolID methodID) {
1716
3.47k
    auto cr = JSObject::getNamed_RJS(currentObject, *this, methodID);
1717
3.47k
    assert(
1718
3.47k
        cr.getStatus() != ExecutionStatus::EXCEPTION &&
1719
3.47k
        "getNamed() of builtin method failed");
1720
3.47k
    assert(
1721
3.47k
        vmisa<NativeFunction>(cr->get()) &&
1722
3.47k
        "getNamed() of builtin method must be a NativeFunction");
1723
3.47k
    builtins_[methodIndex] = vmcast<NativeFunction>(cr->get());
1724
3.47k
    return ExecutionStatus::RETURNED;
1725
3.47k
  });
1726
1727
  // Now add the private native builtins.
1728
94
  createHermesBuiltins(*this, builtins_);
1729
94
#ifndef NDEBUG
1730
  // Make sure native builtins are all defined.
1731
4.98k
  for (unsigned i = 0; i < BuiltinMethod::_firstJS; ++i) {
1732
4.88k
    assert(builtins_[i] && "native builtin not initialized");
1733
4.88k
  }
1734
94
#endif
1735
94
}
1736
1737
static const struct JSBuiltin {
1738
  uint16_t symID, builtinIndex;
1739
} jsBuiltins[] = {
1740
#define BUILTIN_METHOD(object, method)
1741
#define PRIVATE_BUILTIN(name)
1742
#define JS_BUILTIN(name) \
1743
  {(uint16_t)Predefined::name, (uint16_t)BuiltinMethod::HermesBuiltin##_##name}
1744
#include "hermes/FrontEndDefs/Builtins.def"
1745
};
1746
1747
void Runtime::initJSBuiltins(
1748
    llvh::MutableArrayRef<Callable *> builtins,
1749
94
    Handle<JSObject> jsBuiltinsObj) {
1750
94
  for (const JSBuiltin &jsBuiltin : jsBuiltins) {
1751
94
    auto symID = jsBuiltin.symID;
1752
94
    auto builtinIndex = jsBuiltin.builtinIndex;
1753
1754
    // Try to get the JS function from jsBuiltinsObj.
1755
94
    auto getRes = JSObject::getNamed_RJS(
1756
94
        jsBuiltinsObj, *this, Predefined::getSymbolID((Predefined::Str)symID));
1757
94
    assert(getRes == ExecutionStatus::RETURNED && "Failed to get JS builtin.");
1758
94
    JSFunction *jsFunc = vmcast<JSFunction>(getRes->getHermesValue());
1759
1760
94
    builtins[builtinIndex] = jsFunc;
1761
94
  }
1762
94
}
1763
1764
0
ExecutionStatus Runtime::assertBuiltinsUnmodified() {
1765
0
  assert(!builtinsFrozen_ && "Builtins are already frozen.");
1766
0
  GCScope gcScope(*this);
1767
0
  NoRJSScope noRJS{*this};
1768
1769
0
  return forEachPublicNativeBuiltin([this](
1770
0
                                        unsigned methodIndex,
1771
0
                                        Predefined::Str objectName,
1772
0
                                        Handle<JSObject> &currentObject,
1773
0
                                        SymbolID methodID) {
1774
    // Avoid running any JS here to avoid modifying the builtins while iterating
1775
    // them.
1776
0
    NamedPropertyDescriptor desc;
1777
    // Check if the builtin is overridden.
1778
    // Need to check for flags which could result in JS execution.
1779
0
    if (!JSObject::getOwnNamedDescriptor(
1780
0
            currentObject, *this, methodID, desc) ||
1781
0
        desc.flags.proxyObject || desc.flags.hostObject) {
1782
0
      return raiseTypeError(
1783
0
          TwineChar16{
1784
0
              "Cannot execute a bytecode compiled with -fstatic-builtins: "} +
1785
0
          getPredefinedString(objectName) + "." +
1786
0
          getStringPrimFromSymbolID(methodID) + " has been modified");
1787
0
    }
1788
0
    auto cr = JSObject::getNamedSlotValue(currentObject, *this, desc);
1789
0
    if (LLVM_UNLIKELY(cr == ExecutionStatus::EXCEPTION)) {
1790
0
      return ExecutionStatus::EXCEPTION;
1791
0
    }
1792
0
    auto currentBuiltin = dyn_vmcast<NativeFunction>(cr->get());
1793
0
    if (!currentBuiltin || currentBuiltin != builtins_[methodIndex]) {
1794
0
      return raiseTypeError(
1795
0
          TwineChar16{
1796
0
              "Cannot execute a bytecode compiled with -fstatic-builtins: "} +
1797
0
          getPredefinedString(objectName) + "." +
1798
0
          getStringPrimFromSymbolID(methodID) + " has been modified");
1799
0
    }
1800
0
    return ExecutionStatus::RETURNED;
1801
0
  });
1802
0
}
1803
1804
0
void Runtime::freezeBuiltins() {
1805
0
  assert(!builtinsFrozen_ && "Builtins are already frozen.");
1806
0
  GCScope gcScope{*this};
1807
1808
  // A list storing all the object ids that we will freeze on the global object
1809
  // in the end.
1810
0
  std::vector<SymbolID> objectList;
1811
  // A list storing all the method ids on the same object that we will freeze on
1812
  // each object.
1813
0
  std::vector<SymbolID> methodList;
1814
1815
  // Masks for setting the property a static builtin and read-only.
1816
0
  PropertyFlags clearFlags;
1817
0
  clearFlags.configurable = 1;
1818
0
  clearFlags.writable = 1;
1819
0
  PropertyFlags setFlags;
1820
0
  setFlags.staticBuiltin = 1;
1821
1822
0
  (void)forEachPublicNativeBuiltin(
1823
0
      [this, &objectList, &methodList, &clearFlags, &setFlags](
1824
0
          unsigned methodIndex,
1825
0
          Predefined::Str objectName,
1826
0
          Handle<JSObject> &currentObject,
1827
0
          SymbolID methodID) {
1828
0
        methodList.push_back(methodID);
1829
        // This is the last method on current object.
1830
0
        if (methodIndex + 1 == BuiltinMethod::_publicCount ||
1831
0
            objectName != publicNativeBuiltins[methodIndex + 1].object) {
1832
          // Store the object id in the object set.
1833
0
          SymbolID objectID = Predefined::getSymbolID(objectName);
1834
0
          objectList.push_back(objectID);
1835
          // Freeze all methods and mark them as static builtins on the current
1836
          // object.
1837
0
          JSObject::updatePropertyFlagsWithoutTransitions(
1838
0
              currentObject,
1839
0
              *this,
1840
0
              clearFlags,
1841
0
              setFlags,
1842
0
              llvh::ArrayRef<SymbolID>(methodList));
1843
0
          methodList.clear();
1844
0
        }
1845
0
        return ExecutionStatus::RETURNED;
1846
0
      });
1847
1848
  // Freeze all builtin objects and mark them as static builtins on the global
1849
  // object.
1850
0
  JSObject::updatePropertyFlagsWithoutTransitions(
1851
0
      getGlobal(),
1852
0
      *this,
1853
0
      clearFlags,
1854
0
      setFlags,
1855
0
      llvh::ArrayRef<SymbolID>(objectList));
1856
1857
0
  builtinsFrozen_ = true;
1858
0
}
1859
1860
0
ExecutionStatus Runtime::drainJobs() {
1861
0
  GCScope gcScope{*this};
1862
0
  MutableHandle<Callable> job{*this};
1863
  // Note that new jobs can be enqueued during the draining.
1864
0
  while (!jobQueue_.empty()) {
1865
0
    GCScopeMarkerRAII marker{gcScope};
1866
1867
0
    job = jobQueue_.front();
1868
0
    jobQueue_.pop_front();
1869
1870
    // Jobs are guaranteed to behave as thunks.
1871
0
    auto callRes =
1872
0
        Callable::executeCall0(job, *this, Runtime::getUndefinedValue());
1873
1874
    // Early return to signal the caller. Note that the exceptional job has been
1875
    // popped, so re-invocation would pick up from the next available job.
1876
0
    if (LLVM_UNLIKELY(callRes == ExecutionStatus::EXCEPTION)) {
1877
0
      return ExecutionStatus::EXCEPTION;
1878
0
    }
1879
0
  }
1880
0
  return ExecutionStatus::RETURNED;
1881
0
}
1882
1883
0
ExecutionStatus Runtime::addToKeptObjects(Handle<JSObject> obj) {
1884
  // Lazily create the map for keptObjects_
1885
0
  if (keptObjects_.isUndefined()) {
1886
0
    auto mapRes = OrderedHashMap::create(*this);
1887
0
    if (LLVM_UNLIKELY(mapRes == ExecutionStatus::EXCEPTION)) {
1888
0
      return ExecutionStatus::EXCEPTION;
1889
0
    }
1890
0
    keptObjects_ = mapRes->getHermesValue();
1891
0
  }
1892
0
  auto mapHandle = Handle<OrderedHashMap>::vmcast(&keptObjects_);
1893
0
  return OrderedHashMap::insert(mapHandle, *this, obj, obj);
1894
0
}
1895
1896
0
void Runtime::clearKeptObjects() {
1897
0
  keptObjects_ = HermesValue::encodeUndefinedValue();
1898
0
}
1899
1900
0
uint64_t Runtime::gcStableHashHermesValue(Handle<HermesValue> value) {
1901
0
  switch (value->getTag()) {
1902
0
    case HermesValue::Tag::Object: {
1903
      // For objects, because pointers can move, we need a unique ID
1904
      // that does not change for each object.
1905
0
      auto id = JSObject::getObjectID(vmcast<JSObject>(*value), *this);
1906
0
      return llvh::hash_value(id);
1907
0
    }
1908
0
    case HermesValue::Tag::BigInt: {
1909
      // For bigints, we hash the string content.
1910
0
      auto bytes = Handle<BigIntPrimitive>::vmcast(value)->getRawDataCompact();
1911
0
      return llvh::hash_combine_range(bytes.begin(), bytes.end());
1912
0
    }
1913
0
    case HermesValue::Tag::Str: {
1914
      // For strings, we hash the string content.
1915
0
      auto strView = StringPrimitive::createStringView(
1916
0
          *this, Handle<StringPrimitive>::vmcast(value));
1917
0
      return llvh::hash_combine_range(strView.begin(), strView.end());
1918
0
    }
1919
0
    default:
1920
0
      assert(!value->isPointer() && "Unhandled pointer type");
1921
0
      if (value->isNumber()) {
1922
        // We need to check for NaNs because they may differ in the sign bit,
1923
        // but they should have the same hash value.
1924
0
        if (LLVM_UNLIKELY(value->isNaN()))
1925
0
          return llvh::hash_value(HermesValue::encodeNaNValue().getRaw());
1926
        // To normalize -0 to 0.
1927
0
        if (value->getNumber() == 0)
1928
0
          return 0;
1929
0
      }
1930
      // For everything else, we just take advantage of HermesValue.
1931
0
      return llvh::hash_value(value->getRaw());
1932
0
  }
1933
0
}
1934
1935
356
bool Runtime::symbolEqualsToStringPrim(SymbolID id, StringPrimitive *strPrim) {
1936
356
  auto view = identifierTable_.getStringView(*this, id);
1937
356
  return strPrim->equals(view);
1938
356
}
1939
1940
LLVM_ATTRIBUTE_NOINLINE
1941
140k
void Runtime::allocStack(uint32_t count, HermesValue initValue) {
1942
  // Note: it is important that allocStack be defined out-of-line. If inline,
1943
  // constants are propagated into initValue, which enables clang to use
1944
  // memset_pattern_16. This ends up being a significant loss as it is an
1945
  // indirect call.
1946
140k
  auto *oldStackPointer = stackPointer_;
1947
140k
  allocUninitializedStack(count);
1948
  // Initialize the new registers.
1949
140k
  std::uninitialized_fill_n(oldStackPointer, count, initValue);
1950
140k
}
1951
1952
0
void Runtime::dumpCallFrames(llvh::raw_ostream &OS) {
1953
0
  OS << "== Call Frames ==\n";
1954
0
  const PinnedHermesValue *next = getStackPointer();
1955
0
  unsigned i = 0;
1956
0
  for (StackFramePtr sf : getStackFrames()) {
1957
0
    OS << i++ << " ";
1958
0
    if (auto *closure = dyn_vmcast<Callable>(sf.getCalleeClosureOrCBRef())) {
1959
0
      OS << cellKindStr(closure->getKind()) << " ";
1960
0
    }
1961
0
    if (auto *cb = sf.getCalleeCodeBlock(*this)) {
1962
0
      OS << formatSymbolID(cb->getNameMayAllocate()) << " ";
1963
0
    }
1964
0
    dumpStackFrame(sf, OS, next);
1965
0
    next = sf.ptr();
1966
0
  }
1967
0
}
1968
1969
LLVM_ATTRIBUTE_NOINLINE
1970
0
void Runtime::dumpCallFrames() {
1971
0
  dumpCallFrames(llvh::errs());
1972
0
}
1973
1974
/// Serialize a SymbolID.
1975
llvh::raw_ostream &operator<<(
1976
    llvh::raw_ostream &OS,
1977
0
    Runtime::FormatSymbolID format) {
1978
0
  if (!format.symbolID.isValid())
1979
0
    return OS << "SymbolID(INVALID)";
1980
1981
0
  OS << "SymbolID("
1982
0
     << (format.symbolID.isNotUniqued() ? "(External)" : "(Internal)")
1983
0
     << format.symbolID.unsafeGetIndex() << " \"";
1984
1985
0
  OS << format.runtime.getIdentifierTable().convertSymbolToUTF8(
1986
0
      format.symbolID);
1987
0
  return OS << "\")";
1988
0
}
1989
1990
/****************************************************************************
1991
 * WARNING: This code is run after a crash. Avoid walking data structures,
1992
 *          doing memory allocation, or using libc etc. as much as possible
1993
 ****************************************************************************/
1994
0
void Runtime::crashCallback(int fd) {
1995
0
  llvh::raw_fd_ostream jsonStream(fd, false);
1996
0
  JSONEmitter json(jsonStream);
1997
1998
  // Temporary buffer for pointers converted to strings. 20 bytes is enough,
1999
  // since an 8 byte pointer is 16 characters, plus the "0x" and the null
2000
  // terminator.
2001
0
  char hexBuf[20];
2002
0
  auto writeHex = [&hexBuf](void *ptr) {
2003
0
    unsigned len = snprintf(hexBuf, sizeof(hexBuf), "%p", ptr);
2004
0
    assert(len < sizeof(hexBuf) && "Need more chars than expected");
2005
0
    return llvh::StringRef{hexBuf, len};
2006
0
  };
2007
2008
0
  json.openDict();
2009
0
  json.emitKeyValue("type", "runtime");
2010
0
  json.emitKeyValue("address", writeHex(this));
2011
0
  json.emitKeyValue(
2012
0
      "registerStackAllocation", writeHex(registerStackAllocation_.data()));
2013
0
  json.emitKeyValue("registerStackStart", writeHex(registerStackStart_));
2014
0
  json.emitKeyValue("registerStackPointer", writeHex(stackPointer_));
2015
0
  json.emitKeyValue("registerStackEnd", writeHex(registerStackEnd_));
2016
0
  json.emitKey("callstack");
2017
0
  crashWriteCallStack(json);
2018
0
  json.closeDict();
2019
0
}
2020
2021
/****************************************************************************
2022
 * WARNING: This code is run after a crash. Avoid walking data structures,
2023
 *          doing memory allocation, or using libc etc. as much as possible
2024
 ****************************************************************************/
2025
0
void Runtime::crashWriteCallStack(JSONEmitter &json) {
2026
0
  json.openArray();
2027
0
  for (auto frame : getStackFrames()) {
2028
0
    json.openDict();
2029
0
    json.emitKeyValue(
2030
0
        "StackFrameRegOffs", (uint32_t)(frame.ptr() - registerStackStart_));
2031
0
    auto codeBlock = frame.getSavedCodeBlock();
2032
0
    if (codeBlock) {
2033
0
      json.emitKeyValue("FunctionID", codeBlock->getFunctionID());
2034
0
      auto bytecodeOffs = codeBlock->getOffsetOf(frame.getSavedIP());
2035
0
      json.emitKeyValue("ByteCodeOffset", bytecodeOffs);
2036
0
      auto blockSourceCode = codeBlock->getDebugSourceLocationsOffset();
2037
0
      const RuntimeModule *runtimeModule = codeBlock->getRuntimeModule();
2038
0
      if (blockSourceCode.hasValue()) {
2039
0
        auto debugInfo = runtimeModule->getBytecode()->getDebugInfo();
2040
0
        auto sourceLocation = debugInfo->getLocationForAddress(
2041
0
            blockSourceCode.getValue(), bytecodeOffs);
2042
0
        if (sourceLocation) {
2043
0
          auto file = debugInfo->getFilenameByID(sourceLocation->filenameId);
2044
0
          char buf[256];
2045
0
          unsigned len = snprintf(
2046
0
              buf,
2047
0
              sizeof(buf),
2048
0
              "%s:%d:%d",
2049
0
              file.c_str(),
2050
0
              sourceLocation->line,
2051
0
              sourceLocation->column);
2052
          // The length is either the return value of snprintf, or the buffer
2053
          // size without the null terminator, whichever is smaller.
2054
0
          llvh::StringRef str{buf, std::min<size_t>(len, sizeof(buf) - 1)};
2055
0
          json.emitKeyValue("SourceLocation", str);
2056
0
        }
2057
0
      }
2058
0
      uint32_t segmentID = runtimeModule->getBytecode()->getSegmentID();
2059
0
      llvh::StringRef sourceURL = runtimeModule->getSourceURL();
2060
0
      json.emitKeyValue("SegmentID", segmentID);
2061
0
      json.emitKeyValue("SourceURL", sourceURL);
2062
0
    } else {
2063
0
      json.emitKeyValue("NativeCode", true);
2064
0
    }
2065
0
    json.closeDict(); // frame
2066
0
  }
2067
0
  json.closeArray(); // frames
2068
0
}
2069
2070
0
std::string Runtime::getCallStackNoAlloc(const Inst *ip) {
2071
0
  NoAllocScope noAlloc(*this);
2072
0
  std::string res;
2073
  // Note that the order of iteration is youngest (leaf) frame to oldest.
2074
0
  for (auto frame : getStackFrames()) {
2075
0
    auto codeBlock = frame->getCalleeCodeBlock(*this);
2076
0
    if (codeBlock) {
2077
0
      res += codeBlock->getNameString(*this);
2078
      // Default to the function entrypoint, this
2079
      // ensures source location is provided for leaf frame even
2080
      // if ip is not available.
2081
0
      const uint32_t bytecodeOffs =
2082
0
          ip != nullptr ? codeBlock->getOffsetOf(ip) : 0;
2083
0
      auto blockSourceCode = codeBlock->getDebugSourceLocationsOffset();
2084
0
      if (blockSourceCode.hasValue()) {
2085
0
        auto debugInfo =
2086
0
            codeBlock->getRuntimeModule()->getBytecode()->getDebugInfo();
2087
0
        auto sourceLocation = debugInfo->getLocationForAddress(
2088
0
            blockSourceCode.getValue(), bytecodeOffs);
2089
0
        if (sourceLocation) {
2090
0
          auto file = debugInfo->getFilenameByID(sourceLocation->filenameId);
2091
0
          res += ": " + file + ":" + std::to_string(sourceLocation->line) +
2092
0
              ":" + std::to_string(sourceLocation->column);
2093
0
        }
2094
0
      }
2095
0
      res += "\n";
2096
0
    } else {
2097
0
      res += "<Native code>\n";
2098
0
    }
2099
    // Get the ip of the caller frame -- which will then be correct for the
2100
    // next iteration.
2101
0
    ip = frame.getSavedIP();
2102
0
  }
2103
0
  return res;
2104
0
}
2105
2106
68
void Runtime::onGCEvent(GCEventKind kind, const std::string &extraInfo) {
2107
68
#if HERMESVM_SAMPLING_PROFILER_AVAILABLE
2108
68
  if (samplingProfiler) {
2109
0
    switch (kind) {
2110
0
      case GCEventKind::CollectionStart:
2111
0
        samplingProfiler->suspend(
2112
0
            SamplingProfiler::SuspendFrameInfo::Kind::GC, extraInfo);
2113
0
        break;
2114
0
      case GCEventKind::CollectionEnd:
2115
0
        samplingProfiler->resume();
2116
0
        break;
2117
0
      default:
2118
0
        llvm_unreachable("unknown GCEventKind");
2119
0
    }
2120
0
  }
2121
68
#endif // HERMESVM_SAMPLING_PROFILER_AVAILABLE
2122
68
  if (gcEventCallback_) {
2123
0
    gcEventCallback_(kind, extraInfo.c_str());
2124
0
  }
2125
68
}
2126
2127
#ifdef HERMESVM_PROFILER_BB
2128
2129
llvh::Optional<std::tuple<std::string, uint32_t, uint32_t>>
2130
Runtime::getIPSourceLocation(const CodeBlock *codeBlock, const Inst *ip) {
2131
  auto bytecodeOffs = codeBlock->getOffsetOf(ip);
2132
  auto blockSourceCode = codeBlock->getDebugSourceLocationsOffset();
2133
2134
  if (!blockSourceCode) {
2135
    return llvh::None;
2136
  }
2137
  auto debugInfo = codeBlock->getRuntimeModule()->getBytecode()->getDebugInfo();
2138
  auto sourceLocation = debugInfo->getLocationForAddress(
2139
      blockSourceCode.getValue(), bytecodeOffs);
2140
  if (!sourceLocation) {
2141
    return llvh::None;
2142
  }
2143
  auto filename = debugInfo->getFilenameByID(sourceLocation->filenameId);
2144
  auto line = sourceLocation->line;
2145
  auto col = sourceLocation->column;
2146
  return std::make_tuple(filename, line, col);
2147
}
2148
2149
void Runtime::preventHCGC(HiddenClass *hc) {
2150
  auto &classIdToIdxMap = inlineCacheProfiler_.getClassIdtoIndexMap();
2151
  auto &hcIdx = inlineCacheProfiler_.getHiddenClassArrayIndex();
2152
  auto ret = classIdToIdxMap.insert(
2153
      std::pair<ClassId, uint32_t>(getHeap().getObjectID(hc), hcIdx));
2154
  if (ret.second) {
2155
    auto *hiddenClassArray = inlineCacheProfiler_.getHiddenClassArray();
2156
    JSArray::setElementAt(
2157
        makeHandle(hiddenClassArray), *this, hcIdx++, makeHandle(hc));
2158
  }
2159
}
2160
2161
void Runtime::recordHiddenClass(
2162
    CodeBlock *codeBlock,
2163
    const Inst *cacheMissInst,
2164
    SymbolID symbolID,
2165
    HiddenClass *objectHiddenClass,
2166
    HiddenClass *cachedHiddenClass) {
2167
  auto offset = codeBlock->getOffsetOf(cacheMissInst);
2168
2169
  // inline caching hit
2170
  if (objectHiddenClass == cachedHiddenClass) {
2171
    inlineCacheProfiler_.insertICHit(codeBlock, offset);
2172
    return;
2173
  }
2174
2175
  // inline caching miss
2176
  assert(objectHiddenClass != nullptr && "object hidden class should exist");
2177
  // prevent object hidden class from being GC-ed
2178
  preventHCGC(objectHiddenClass);
2179
  ClassId objectHiddenClassId = getHeap().getObjectID(objectHiddenClass);
2180
  // prevent cached hidden class from being GC-ed
2181
  ClassId cachedHiddenClassId =
2182
      static_cast<ClassId>(GCBase::IDTracker::kInvalidNode);
2183
  if (cachedHiddenClass != nullptr) {
2184
    preventHCGC(cachedHiddenClass);
2185
    cachedHiddenClassId = getHeap().getObjectID(cachedHiddenClass);
2186
  }
2187
  // add the record to inline caching profiler
2188
  inlineCacheProfiler_.insertICMiss(
2189
      codeBlock, offset, symbolID, objectHiddenClassId, cachedHiddenClassId);
2190
}
2191
2192
void Runtime::getInlineCacheProfilerInfo(llvh::raw_ostream &ostream) {
2193
  inlineCacheProfiler_.dumpRankedInlineCachingMisses(*this, ostream);
2194
}
2195
2196
HiddenClass *Runtime::resolveHiddenClassId(ClassId classId) {
2197
  if (classId == static_cast<ClassId>(GCBase::IDTracker::kInvalidNode)) {
2198
    return nullptr;
2199
  }
2200
  auto &classIdToIdxMap = inlineCacheProfiler_.getClassIdtoIndexMap();
2201
  auto *hiddenClassArray = inlineCacheProfiler_.getHiddenClassArray();
2202
  auto hcHermesVal =
2203
      hiddenClassArray->at(*this, classIdToIdxMap[classId]).unboxToHV(*this);
2204
  return vmcast<HiddenClass>(hcHermesVal);
2205
}
2206
2207
#endif
2208
2209
0
ExecutionStatus Runtime::notifyTimeout() {
2210
  // TODO: allow a vector of callbacks.
2211
0
  return raiseTimeoutError();
2212
0
}
2213
2214
#ifdef HERMES_MEMORY_INSTRUMENTATION
2215
2216
std::pair<const CodeBlock *, const inst::Inst *>
2217
80
Runtime::getCurrentInterpreterLocation(const inst::Inst *ip) {
2218
80
  assert(ip && "IP being null implies we're not currently in the interpreter.");
2219
80
  auto callFrames = getStackFrames();
2220
80
  const CodeBlock *codeBlock = nullptr;
2221
160
  for (auto frameIt = callFrames.begin(); frameIt != callFrames.end();
2222
160
       ++frameIt) {
2223
160
    codeBlock = frameIt->getCalleeCodeBlock(*this);
2224
160
    if (codeBlock) {
2225
80
      break;
2226
80
    } else {
2227
80
      ip = frameIt->getSavedIP();
2228
80
    }
2229
160
  }
2230
80
  assert(codeBlock && "Could not find CodeBlock.");
2231
80
  return {codeBlock, ip};
2232
80
}
2233
2234
StackTracesTreeNode *Runtime::getCurrentStackTracesTreeNode(
2235
0
    const inst::Inst *ip) {
2236
0
  assert(stackTracesTree_ && "Runtime not configured to track alloc stacks");
2237
0
  assert(
2238
0
      (getHeap().getAllocationLocationTracker().isEnabled() ||
2239
0
       getHeap().getSamplingAllocationTracker().isEnabled()) &&
2240
0
      "AllocationLocationTracker not enabled");
2241
0
  if (!ip) {
2242
0
    return nullptr;
2243
0
  }
2244
0
  const CodeBlock *codeBlock;
2245
0
  std::tie(codeBlock, ip) = getCurrentInterpreterLocation(ip);
2246
0
  return stackTracesTree_->getStackTrace(*this, codeBlock, ip);
2247
0
}
2248
2249
void Runtime::enableAllocationLocationTracker(
2250
    std::function<void(
2251
        uint64_t,
2252
        std::chrono::microseconds,
2253
        std::vector<GCBase::AllocationLocationTracker::HeapStatsUpdate>)>
2254
0
        fragmentCallback) {
2255
0
  if (!stackTracesTree_) {
2256
0
    stackTracesTree_ = std::make_unique<StackTracesTree>();
2257
0
  }
2258
0
  stackTracesTree_->syncWithRuntimeStack(*this);
2259
0
  getHeap().enableHeapProfiler(std::move(fragmentCallback));
2260
0
}
2261
2262
0
void Runtime::disableAllocationLocationTracker(bool clearExistingTree) {
2263
0
  getHeap().disableHeapProfiler();
2264
0
  if (clearExistingTree) {
2265
0
    stackTracesTree_.reset();
2266
0
  }
2267
0
}
2268
2269
void Runtime::enableSamplingHeapProfiler(
2270
    size_t samplingInterval,
2271
0
    int64_t seed) {
2272
0
  if (!stackTracesTree_) {
2273
0
    stackTracesTree_ = std::make_unique<StackTracesTree>();
2274
0
  }
2275
0
  stackTracesTree_->syncWithRuntimeStack(*this);
2276
0
  getHeap().enableSamplingHeapProfiler(samplingInterval, seed);
2277
0
}
2278
2279
0
void Runtime::disableSamplingHeapProfiler(llvh::raw_ostream &os) {
2280
0
  getHeap().disableSamplingHeapProfiler(os);
2281
0
  stackTracesTree_.reset();
2282
0
}
2283
2284
0
void Runtime::popCallStackImpl() {
2285
0
  assert(stackTracesTree_ && "Runtime not configured to track alloc stacks");
2286
0
  stackTracesTree_->popCallStack();
2287
0
}
2288
2289
void Runtime::pushCallStackImpl(
2290
    const CodeBlock *codeBlock,
2291
0
    const inst::Inst *ip) {
2292
0
  assert(stackTracesTree_ && "Runtime not configured to track alloc stacks");
2293
0
  stackTracesTree_->pushCallStack(*this, codeBlock, ip);
2294
0
}
2295
2296
#endif // HERMES_MEMORY_INSTRUMENTATION
2297
2298
0
void ScopedNativeDepthReducer::staticAsserts() {
2299
0
#ifdef HERMES_CHECK_NATIVE_STACK
2300
0
  static_assert(
2301
0
      kReducedNativeStackGap < kMinSupportedNativeStackGap,
2302
0
      "kMinSupportedNativeStackGap too low, must be reduced in the reducer");
2303
0
#endif
2304
0
}
2305
2306
} // namespace vm
2307
} // namespace hermes
2308
2309
#undef DEBUG_TYPE