Coverage Report

Created: 2025-12-11 06:40

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/hermes/lib/VM/StackTracesTree.cpp
Line
Count
Source
1
/*
2
 * Copyright (c) Meta Platforms, Inc. and affiliates.
3
 *
4
 * This source code is licensed under the MIT license found in the
5
 * LICENSE file in the root directory of this source tree.
6
 */
7
8
#include "hermes/VM/StackTracesTree-NoRuntime.h"
9
10
#ifdef HERMES_MEMORY_INSTRUMENTATION
11
12
#include "hermes/VM/Callable.h"
13
#include "hermes/VM/StackFrame-inline.h"
14
#include "hermes/VM/StackTracesTree.h"
15
#include "hermes/VM/StringPrimitive.h"
16
#include "hermes/VM/StringView.h"
17
#pragma GCC diagnostic push
18
19
#ifdef HERMES_COMPILER_SUPPORTS_WSHORTEN_64_TO_32
20
#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
21
#endif
22
namespace hermes {
23
namespace vm {
24
25
StackTracesTreeNode *StackTracesTreeNode::findChild(
26
    const CodeBlock *codeBlock,
27
0
    uint32_t bytecodeOffset) const {
28
0
  auto matchingCodeBlockChildren = codeBlockToChildMap_.find(codeBlock);
29
0
  if (matchingCodeBlockChildren != codeBlockToChildMap_.end()) {
30
0
    auto matchingOffset =
31
0
        matchingCodeBlockChildren->getSecond().find(bytecodeOffset);
32
0
    if (matchingOffset != matchingCodeBlockChildren->getSecond().end()) {
33
0
      return children_[matchingOffset->getSecond()];
34
0
    }
35
0
  }
36
0
  return nullptr;
37
0
}
38
39
OptValue<uint32_t> StackTracesTreeNode::findChildIndex(
40
0
    const SourceLoc &sourceLoc) const {
41
0
  auto matchingChild = sourceLocToChildMap_.find(sourceLoc);
42
0
  if (matchingChild != sourceLocToChildMap_.end()) {
43
0
    return matchingChild->getSecond();
44
0
  }
45
0
  return llvh::None;
46
0
}
47
48
StackTracesTreeNode *StackTracesTreeNode::findChild(
49
0
    const SourceLoc &sourceLoc) const {
50
0
  auto optIndex = findChildIndex(sourceLoc);
51
0
  if (!optIndex.hasValue())
52
0
    return nullptr;
53
0
  return children_[*optIndex];
54
0
}
55
56
void StackTracesTreeNode::addChild(
57
    StackTracesTreeNode *child,
58
    const CodeBlock *codeBlock,
59
    uint32_t bytecodeOffset,
60
0
    SourceLoc sourceLoc) {
61
0
  uint32_t childIndex = children_.size();
62
0
  children_.push_back(child);
63
0
  bool inserted =
64
0
      sourceLocToChildMap_.try_emplace(sourceLoc, childIndex).second;
65
0
  (void)inserted;
66
0
  assert(inserted && "Tried to add a node for the same sourceLoc twice.");
67
0
  addMapping(codeBlock, bytecodeOffset, childIndex);
68
0
}
69
70
void StackTracesTreeNode::addMapping(
71
    const CodeBlock *codeBlock,
72
    uint32_t bytecodeOffset,
73
0
    uint32_t childIndex) {
74
0
  auto matchingCodeBlockChildren = codeBlockToChildMap_.find(codeBlock);
75
0
  if (matchingCodeBlockChildren == codeBlockToChildMap_.end()) {
76
0
    ChildBytecodeMap newBytecodeMapping;
77
0
    newBytecodeMapping.try_emplace(bytecodeOffset, childIndex);
78
0
    codeBlockToChildMap_.try_emplace(
79
0
        static_cast<const void *>(codeBlock), std::move(newBytecodeMapping));
80
0
  } else {
81
0
    auto &bytecodeMapping = matchingCodeBlockChildren->getSecond();
82
0
    assert(
83
0
        bytecodeMapping.find(bytecodeOffset) == bytecodeMapping.end() &&
84
0
        "Tried to add a node for the same codeLoc twice");
85
0
    bytecodeMapping.try_emplace(bytecodeOffset, childIndex);
86
0
  }
87
0
}
88
89
StackTracesTree::StackTracesTree()
90
0
    : strings_(std::make_shared<StringSetVector>()),
91
0
      rootFunctionID_(strings_->insert("(root)")),
92
0
      rootScriptNameID_(strings_->insert("")),
93
0
      nativeFunctionID_(strings_->insert("(native)")),
94
0
      anonymousFunctionID_(strings_->insert("(anonymous)")),
95
0
      head_(root_.get()) {}
96
97
0
void StackTracesTree::syncWithRuntimeStack(Runtime &runtime) {
98
0
  head_ = root_.get();
99
100
0
  const StackFramePtr framesEnd = *runtime.getStackFrames().end();
101
0
  std::vector<std::pair<CodeBlock *, const Inst *>> stack;
102
103
  // Walk the current stack, and call pushCallStack for each JS frame (not
104
  // native frames). The current frame is not included, because any allocs after
105
  // this point will call pushCallStack which will get the most recent IP. Each
106
  // stack frame tracks information about the caller.
107
0
  for (StackFramePtr cf : runtime.getStackFrames()) {
108
0
    CodeBlock *savedCodeBlock = cf.getSavedCodeBlock();
109
0
    const Inst *savedIP = cf.getSavedIP();
110
    // Go up one frame and get the callee code block but use the current
111
    // frame's saved IP. This also allows us to account for bound functions,
112
    // which have savedCodeBlock == nullptr in order to allow proper returns in
113
    // the interpreter.
114
0
    StackFramePtr prev = cf.getPreviousFrame();
115
0
    if (prev != framesEnd) {
116
0
      if (CodeBlock *parentCB = prev.getCalleeCodeBlock(runtime)) {
117
0
        assert(
118
0
            (!savedCodeBlock || savedCodeBlock == parentCB) &&
119
0
            "If savedCodeBlock is non-null, it should match the parent's "
120
0
            "callee code block");
121
0
        savedCodeBlock = parentCB;
122
0
      }
123
0
    } else {
124
      // The last frame is the entry into the global function, use the callee
125
      // code block instead of the caller.
126
      // TODO: This leaves an extra global call frame that doesn't make any
127
      // sense laying around. But that matches the behavior of enabling from the
128
      // beginning. When a fix for the non-synced version is found, remove this
129
      // branch as well.
130
0
      savedCodeBlock = cf.getCalleeCodeBlock(runtime);
131
0
      savedIP = savedCodeBlock->getOffsetPtr(0);
132
0
    }
133
0
    stack.emplace_back(savedCodeBlock, savedIP);
134
0
  }
135
136
  // Iterate over the stack in reverse to push calls.
137
0
  for (auto it = stack.rbegin(); it != stack.rend(); ++it) {
138
    // Check that both the code block and ip are non-null, which means it was a
139
    // JS frame, and not a native frame.
140
0
    if (it->first && it->second) {
141
0
      pushCallStack(runtime, it->first, it->second);
142
0
    }
143
0
  }
144
0
}
145
146
0
StackTracesTreeNode *StackTracesTree::getRootNode() const {
147
0
  return root_.get();
148
0
}
149
150
0
void StackTracesTree::popCallStack() {
151
0
  if (head_->duplicatePushDepth_) {
152
0
    head_->duplicatePushDepth_--;
153
0
    return;
154
0
  }
155
0
  head_ = head_->parent;
156
0
  assert(head_ && "Pop'ed too far up tree");
157
0
}
158
159
StackTracesTreeNode::SourceLoc StackTracesTree::computeSourceLoc(
160
    Runtime &runtime,
161
    const CodeBlock *codeBlock,
162
0
    uint32_t bytecodeOffset) {
163
0
  auto location = codeBlock->getSourceLocation(bytecodeOffset);
164
  // Get filename. If we have a source location, use the filename from
165
  // that location; otherwise use the RuntimeModule's sourceURL; otherwise
166
  // report unknown.
167
0
  RuntimeModule *runtimeModule = codeBlock->getRuntimeModule();
168
0
  std::string scriptName;
169
0
  auto scriptID = runtimeModule->getScriptID();
170
0
  int32_t lineNo, columnNo;
171
0
  if (location) {
172
0
    scriptName = runtimeModule->getBytecode()->getDebugInfo()->getFilenameByID(
173
0
        location->filenameId);
174
0
    lineNo = location->line;
175
0
    columnNo = location->column;
176
0
  } else {
177
0
    auto sourceURL = runtimeModule->getSourceURL();
178
0
    scriptName = sourceURL.empty() ? "unknown" : sourceURL;
179
    // Lines and columns in SourceLoc are 1-based.
180
0
    lineNo = runtimeModule->getBytecode()->getSegmentID() + 1;
181
    // Note the +1 for columnNo! This is *unlike* Error.prototype.stack (etc)
182
    // where, for legacy reasons, we print columns as 1-based but virtual
183
    // offsets as 0-based. Here we prefer to expose a simpler API at the cost
184
    // of consistency with other places we surface this information.
185
0
    columnNo = codeBlock->getVirtualOffset() + bytecodeOffset + 1;
186
0
  }
187
0
  return {strings_->insert(scriptName), scriptID, lineNo, columnNo};
188
0
}
189
190
void StackTracesTree::pushCallStack(
191
    Runtime &runtime,
192
    const CodeBlock *codeBlock,
193
0
    const Inst *ip) {
194
0
  assert(codeBlock && ip && "Code block and IP must be known");
195
196
  /// This collapses together multiple calls apparently from the same codeBlock
197
  /// + IP into one node. This can happen with with bound functions, or anything
198
  /// else where C++ code makes calls into the interpreter without executing
199
  /// further bytecode. This depth will then be depleted in calls to
200
  /// \c popCallStack() .
201
0
  if (head_->codeBlock_ == codeBlock && head_->ip_ == ip) {
202
0
    head_->duplicatePushDepth_++;
203
0
    return;
204
0
  }
205
206
0
  auto bytecodeOffset = codeBlock->getOffsetOf(ip);
207
  // Quick-path: Node already exists in tree, and we have a cached mapping for
208
  // this codeLoc.
209
0
  if (auto existingNode = head_->findChild(codeBlock, bytecodeOffset)) {
210
0
    head_ = existingNode;
211
0
    return;
212
0
  }
213
214
  // Node exists in tree but doesn't have a pre-computed mapping for this
215
  // codeBlock + ip. In this case we need to compute the SourceLoc and
216
  //  a mapping, but can return before we create a new tree node.
217
0
  auto sourceLoc = computeSourceLoc(runtime, codeBlock, bytecodeOffset);
218
0
  if (OptValue<uint32_t> existingNodeIndex = head_->findChildIndex(sourceLoc)) {
219
0
    auto existingNode = head_->children_[*existingNodeIndex];
220
0
    assert(existingNode->parent && "Stack trace tree node has no parent");
221
0
    existingNode->parent->addMapping(
222
0
        codeBlock, bytecodeOffset, *existingNodeIndex);
223
0
    head_ = existingNode;
224
0
    return;
225
0
  }
226
227
  // Full-path: Create a new node.
228
229
  // TODO: Getting the name in this way works in most cases, but not for things
230
  // like functions which are dynamically renamed using accessors. E.g.:
231
  //
232
  //   function foo() {
233
  //     return new Object();
234
  //   }
235
  //   Object.defineProperty(foo, 'name', {writable:true, value: 'bar'});
236
  //
237
0
  auto nameStr = codeBlock->getNameString(runtime.getHeap().getCallbacks());
238
0
  auto nameID =
239
0
      nameStr.empty() ? anonymousFunctionID_ : strings_->insert(nameStr);
240
241
0
  auto newNode = std::make_unique<StackTracesTreeNode>(
242
0
      nextNodeID_++, head_, sourceLoc, codeBlock, ip, nameID);
243
0
  auto newNodePtr = newNode.get();
244
0
  nodes_.emplace_back(std::move(newNode));
245
0
  head_->addChild(newNodePtr, codeBlock, bytecodeOffset, sourceLoc);
246
0
  head_ = newNodePtr;
247
0
}
248
249
StackTracesTreeNode *StackTracesTree::getStackTrace(
250
    Runtime &runtime,
251
    const CodeBlock *codeBlock,
252
0
    const Inst *ip) {
253
0
  if (!codeBlock || !ip) {
254
0
    return getRootNode();
255
0
  }
256
0
  pushCallStack(runtime, codeBlock, ip);
257
0
  auto res = head_;
258
0
  popCallStack();
259
0
  return res;
260
0
}
261
262
} // namespace vm
263
} // namespace hermes
264
265
#endif // HERMES_MEMORY_INSTRUMENTATION