Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/escape-analysis.h"
6 :
7 : #include "src/bootstrapper.h"
8 : #include "src/compiler/linkage.h"
9 : #include "src/compiler/node-matchers.h"
10 : #include "src/compiler/operator-properties.h"
11 : #include "src/compiler/simplified-operator.h"
12 : #include "src/handles-inl.h"
13 : #include "src/objects/map-inl.h"
14 :
15 : #ifdef DEBUG
16 : #define TRACE(...) \
17 : do { \
18 : if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
19 : } while (false)
20 : #else
21 : #define TRACE(...)
22 : #endif
23 :
24 : namespace v8 {
25 : namespace internal {
26 : namespace compiler {
27 :
28 : template <class T>
29 : class Sidetable {
30 : public:
31 : explicit Sidetable(Zone* zone) : map_(zone) {}
32 125273942 : T& operator[](const Node* node) {
33 : NodeId id = node->id();
34 375821830 : if (id >= map_.size()) {
35 2433418 : map_.resize(id + 1);
36 : }
37 125273946 : return map_[id];
38 : }
39 :
40 : private:
41 : ZoneVector<T> map_;
42 : };
43 :
44 : template <class T>
45 : class SparseSidetable {
46 : public:
47 : explicit SparseSidetable(Zone* zone, T def_value = T())
48 913389 : : def_value_(std::move(def_value)), map_(zone) {}
49 73897907 : void Set(const Node* node, T value) {
50 135990092 : auto iter = map_.find(node->id());
51 67995006 : if (iter != map_.end()) {
52 2269751 : iter->second = std::move(value);
53 65725271 : } else if (value != def_value_) {
54 6126028 : map_.insert(iter, std::make_pair(node->id(), std::move(value)));
55 : }
56 67995021 : }
57 182659032 : const T& Get(const Node* node) const {
58 365301946 : auto iter = map_.find(node->id());
59 182642914 : return iter != map_.end() ? iter->second : def_value_;
60 : }
61 :
62 : private:
63 : T def_value_;
64 : ZoneUnorderedMap<NodeId, T> map_;
65 : };
66 :
67 : // Keeps track of the changes to the current node during reduction.
68 : // Encapsulates the current state of the IR graph and the reducer state like
69 : // side-tables. All access to the IR and the reducer state should happen through
70 : // a ReduceScope to ensure that changes and dependencies are tracked and all
71 : // necessary node revisitations happen.
72 : class ReduceScope {
73 : public:
74 : typedef EffectGraphReducer::Reduction Reduction;
75 : explicit ReduceScope(Node* node, Reduction* reduction)
76 33998126 : : current_node_(node), reduction_(reduction) {}
77 :
78 : protected:
79 : Node* current_node() const { return current_node_; }
80 : Reduction* reduction() { return reduction_; }
81 :
82 : private:
83 : Node* current_node_;
84 : Reduction* reduction_;
85 : };
86 :
87 : // A VariableTracker object keeps track of the values of variables at all points
88 : // of the effect chain and introduces new phi nodes when necessary.
89 : // Initially and by default, variables are mapped to nullptr, which means that
90 : // the variable allocation point does not dominate the current point on the
91 : // effect chain. We map variables that represent uninitialized memory to the
92 : // Dead node to ensure it is not read.
93 : // Unmapped values are impossible by construction, it is indistinguishable if a
94 : // PersistentMap does not contain an element or maps it to the default element.
95 : class VariableTracker {
96 : private:
97 : // The state of all variables at one point in the effect chain.
98 : class State {
99 : typedef PersistentMap<Variable, Node*> Map;
100 :
101 : public:
102 : explicit State(Zone* zone) : map_(zone) {}
103 15139235 : Node* Get(Variable var) const {
104 15139235 : CHECK(var != Variable::Invalid());
105 15139235 : return map_.Get(var);
106 : }
107 6931178 : void Set(Variable var, Node* node) {
108 6931178 : CHECK(var != Variable::Invalid());
109 6931178 : return map_.Set(var, node);
110 : }
111 388249 : Map::iterator begin() const { return map_.begin(); }
112 388254 : Map::iterator end() const { return map_.end(); }
113 65975663 : bool operator!=(const State& other) const { return map_ != other.map_; }
114 :
115 : private:
116 : Map map_;
117 : };
118 :
119 : public:
120 : VariableTracker(JSGraph* graph, EffectGraphReducer* reducer, Zone* zone);
121 752378 : Variable NewVariable() { return Variable(next_variable_++); }
122 729967 : Node* Get(Variable var, Node* effect) { return table_.Get(effect).Get(var); }
123 : Zone* zone() { return zone_; }
124 :
125 : class Scope : public ReduceScope {
126 : public:
127 : Scope(VariableTracker* tracker, Node* node, Reduction* reduction);
128 : ~Scope();
129 27029 : Maybe<Node*> Get(Variable var) {
130 27029 : Node* node = current_state_.Get(var);
131 46943 : if (node && node->opcode() == IrOpcode::kDead) {
132 : // TODO(tebbi): We use {Dead} as a sentinel for uninitialized memory.
133 : // Reading uninitialized memory can only happen in unreachable code. In
134 : // this case, we have to mark the object as escaping to avoid dead nodes
135 : // in the graph. This is a workaround that should be removed once we can
136 : // handle dead nodes everywhere.
137 : return Nothing<Node*>();
138 : }
139 : return Just(node);
140 : }
141 2601735 : void Set(Variable var, Node* node) { current_state_.Set(var, node); }
142 :
143 : private:
144 : VariableTracker* states_;
145 : State current_state_;
146 : };
147 :
148 : private:
149 : State MergeInputs(Node* effect_phi);
150 : Zone* zone_;
151 : JSGraph* graph_;
152 : SparseSidetable<State> table_;
153 : ZoneVector<Node*> buffer_;
154 : EffectGraphReducer* reducer_;
155 : int next_variable_ = 0;
156 :
157 : DISALLOW_COPY_AND_ASSIGN(VariableTracker);
158 : };
159 :
160 : // Encapsulates the current state of the escape analysis reducer to preserve
161 : // invariants regarding changes and re-visitation.
162 : class EscapeAnalysisTracker : public ZoneObject {
163 : public:
164 456695 : EscapeAnalysisTracker(JSGraph* jsgraph, EffectGraphReducer* reducer,
165 : Zone* zone)
166 : : virtual_objects_(zone),
167 : replacements_(zone),
168 : variable_states_(jsgraph, reducer, zone),
169 : jsgraph_(jsgraph),
170 456692 : zone_(zone) {}
171 :
172 : class Scope : public VariableTracker::Scope {
173 : public:
174 : Scope(EffectGraphReducer* reducer, EscapeAnalysisTracker* tracker,
175 : Node* node, Reduction* reduction)
176 : : VariableTracker::Scope(&tracker->variable_states_, node, reduction),
177 : tracker_(tracker),
178 33997764 : reducer_(reducer) {}
179 5292117 : const VirtualObject* GetVirtualObject(Node* node) {
180 5292117 : VirtualObject* vobject = tracker_->virtual_objects_.Get(node);
181 5292120 : if (vobject) vobject->AddDependency(current_node());
182 5292115 : return vobject;
183 : }
184 : // Create or retrieve a virtual object for the current node.
185 296613 : const VirtualObject* InitVirtualObject(int size) {
186 : DCHECK_EQ(IrOpcode::kAllocate, current_node()->opcode());
187 537178 : VirtualObject* vobject = tracker_->virtual_objects_.Get(current_node());
188 296613 : if (vobject) {
189 130039 : CHECK(vobject->size() == size);
190 : } else {
191 166574 : vobject = tracker_->NewVirtualObject(size);
192 : }
193 296613 : if (vobject) vobject->AddDependency(current_node());
194 296612 : vobject_ = vobject;
195 296612 : return vobject;
196 : }
197 :
198 : void SetVirtualObject(Node* object) {
199 340791 : vobject_ = tracker_->virtual_objects_.Get(object);
200 : }
201 :
202 22436229 : void SetEscaped(Node* node) {
203 22436229 : if (VirtualObject* object = tracker_->virtual_objects_.Get(node)) {
204 23785419 : if (object->HasEscaped()) return;
205 : TRACE("Setting %s#%d to escaped because of use by %s#%d\n",
206 : node->op()->mnemonic(), node->id(),
207 : current_node()->op()->mnemonic(), current_node()->id());
208 : object->SetEscaped();
209 83158 : object->RevisitDependants(reducer_);
210 : }
211 : }
212 : // The inputs of the current node have to be accessed through the scope to
213 : // ensure that they respect the node replacements.
214 21765508 : Node* ValueInput(int i) {
215 : return tracker_->ResolveReplacement(
216 43531053 : NodeProperties::GetValueInput(current_node(), i));
217 : }
218 3840215 : Node* ContextInput() {
219 : return tracker_->ResolveReplacement(
220 7680433 : NodeProperties::GetContextInput(current_node()));
221 : }
222 :
223 1015389 : void SetReplacement(Node* replacement) {
224 1015389 : replacement_ = replacement;
225 : vobject_ =
226 1015389 : replacement ? tracker_->virtual_objects_.Get(replacement) : nullptr;
227 : if (replacement) {
228 : TRACE("Set %s#%d as replacement.\n", replacement->op()->mnemonic(),
229 : replacement->id());
230 : } else {
231 : TRACE("Set nullptr as replacement.\n");
232 : }
233 1015380 : }
234 :
235 993917 : void MarkForDeletion() { SetReplacement(tracker_->jsgraph_->Dead()); }
236 :
237 67994831 : ~Scope() {
238 166959285 : if (replacement_ != tracker_->replacements_[current_node()] ||
239 64967068 : vobject_ != tracker_->virtual_objects_.Get(current_node())) {
240 : reduction()->set_value_changed();
241 : }
242 33997406 : tracker_->replacements_[current_node()] = replacement_;
243 67994786 : tracker_->virtual_objects_.Set(current_node(), vobject_);
244 33997379 : }
245 :
246 : private:
247 : EscapeAnalysisTracker* tracker_;
248 : EffectGraphReducer* reducer_;
249 : VirtualObject* vobject_ = nullptr;
250 : Node* replacement_ = nullptr;
251 : };
252 :
253 57286395 : Node* GetReplacementOf(Node* node) { return replacements_[node]; }
254 : Node* ResolveReplacement(Node* node) {
255 25605763 : if (Node* replacement = GetReplacementOf(node)) {
256 : return replacement;
257 : }
258 : return node;
259 : }
260 :
261 : private:
262 : friend class EscapeAnalysisResult;
263 : static const size_t kMaxTrackedObjects = 100;
264 :
265 166574 : VirtualObject* NewVirtualObject(int size) {
266 166574 : if (next_object_id_ >= kMaxTrackedObjects) return nullptr;
267 : return new (zone_)
268 221052 : VirtualObject(&variable_states_, next_object_id_++, size);
269 : }
270 :
271 : SparseSidetable<VirtualObject*> virtual_objects_;
272 : Sidetable<Node*> replacements_;
273 : VariableTracker variable_states_;
274 : VirtualObject::Id next_object_id_ = 0;
275 : JSGraph* const jsgraph_;
276 : Zone* const zone_;
277 :
278 : DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisTracker);
279 : };
280 :
281 456697 : EffectGraphReducer::EffectGraphReducer(
282 : Graph* graph, std::function<void(Node*, Reduction*)> reduce, Zone* zone)
283 : : graph_(graph),
284 : state_(graph, kNumStates),
285 : revisit_(zone),
286 : stack_(zone),
287 913394 : reduce_(std::move(reduce)) {}
288 :
289 456693 : void EffectGraphReducer::ReduceFrom(Node* node) {
290 : // Perform DFS and eagerly trigger revisitation as soon as possible.
291 : // A stack element {node, i} indicates that input i of node should be visited
292 : // next.
293 : DCHECK(stack_.empty());
294 913725 : stack_.push({node, 0});
295 145734870 : while (!stack_.empty()) {
296 144821142 : Node* current = stack_.top().node;
297 : int& input_index = stack_.top().input_index;
298 289642284 : if (input_index < current->InputCount()) {
299 : Node* input = current->InputAt(input_index);
300 110823298 : input_index++;
301 110822748 : switch (state_.Get(input)) {
302 : case State::kVisited:
303 : // The input is already reduced.
304 : break;
305 : case State::kOnStack:
306 : // The input is on the DFS stack right now, so it will be revisited
307 : // later anyway.
308 : break;
309 : case State::kUnvisited:
310 : case State::kRevisit: {
311 : state_.Set(input, State::kOnStack);
312 62313458 : stack_.push({input, 0});
313 31156818 : break;
314 : }
315 : }
316 : } else {
317 : stack_.pop();
318 33997844 : Reduction reduction;
319 33997844 : reduce_(current, &reduction);
320 284373622 : for (Edge edge : current->use_edges()) {
321 : // Mark uses for revisitation.
322 : Node* use = edge.from();
323 108189263 : if (NodeProperties::IsEffectEdge(edge)) {
324 15598661 : if (reduction.effect_changed()) Revisit(use);
325 : } else {
326 92592727 : if (reduction.value_changed()) Revisit(use);
327 : }
328 : }
329 : state_.Set(current, State::kVisited);
330 : // Process the revisitation buffer immediately. This improves performance
331 : // of escape analysis. Using a stack for {revisit_} reverses the order in
332 : // which the revisitation happens. This also seems to improve performance.
333 36382720 : while (!revisit_.empty()) {
334 2384939 : Node* revisit = revisit_.top();
335 2384945 : if (state_.Get(revisit) == State::kRevisit) {
336 : state_.Set(revisit, State::kOnStack);
337 4770041 : stack_.push({revisit, 0});
338 : }
339 : revisit_.pop();
340 : }
341 : }
342 : }
343 456696 : }
344 :
345 10687900 : void EffectGraphReducer::Revisit(Node* node) {
346 21375798 : if (state_.Get(node) == State::kVisited) {
347 : TRACE(" Queueing for revisit: %s#%d\n", node->op()->mnemonic(),
348 : node->id());
349 : state_.Set(node, State::kRevisit);
350 : revisit_.push(node);
351 : }
352 10687875 : }
353 :
354 0 : VariableTracker::VariableTracker(JSGraph* graph, EffectGraphReducer* reducer,
355 : Zone* zone)
356 : : zone_(zone),
357 : graph_(graph),
358 : table_(zone, State(zone)),
359 : buffer_(zone),
360 913386 : reducer_(reducer) {}
361 :
362 67996252 : VariableTracker::Scope::Scope(VariableTracker* states, Node* node,
363 : Reduction* reduction)
364 : : ReduceScope(node, reduction),
365 : states_(states),
366 33998126 : current_state_(states->zone_) {
367 33998126 : switch (node->opcode()) {
368 : case IrOpcode::kEffectPhi:
369 388267 : current_state_ = states_->MergeInputs(node);
370 388261 : break;
371 : default:
372 33609859 : int effect_inputs = node->op()->EffectInputCount();
373 33609859 : if (effect_inputs == 1) {
374 : current_state_ =
375 14563346 : states_->table_.Get(NodeProperties::GetEffectInput(node, 0));
376 : } else {
377 : DCHECK_EQ(0, effect_inputs);
378 : }
379 : }
380 33997925 : }
381 :
382 33997252 : VariableTracker::Scope::~Scope() {
383 101992547 : if (!reduction()->effect_changed() &&
384 33997337 : states_->table_.Get(current_node()) != current_state_) {
385 : reduction()->set_effect_changed();
386 : }
387 33997605 : states_->table_.Set(current_node(), current_state_);
388 33997376 : }
389 :
390 388276 : VariableTracker::State VariableTracker::MergeInputs(Node* effect_phi) {
391 : // A variable that is mapped to [nullptr] was not assigned a value on every
392 : // execution path to the current effect phi. Relying on the invariant that
393 : // every variable is initialized (at least with a sentinel like the Dead
394 : // node), this means that the variable initialization does not dominate the
395 : // current point. So for loop effect phis, we can keep nullptr for a variable
396 : // as long as the first input of the loop has nullptr for this variable. For
397 : // non-loop effect phis, we can even keep it nullptr as long as any input has
398 : // nullptr.
399 : DCHECK_EQ(IrOpcode::kEffectPhi, effect_phi->opcode());
400 388276 : int arity = effect_phi->op()->EffectInputCount();
401 388276 : Node* control = NodeProperties::GetControlInput(effect_phi, 0);
402 : TRACE("control: %s#%d\n", control->op()->mnemonic(), control->id());
403 776530 : bool is_loop = control->opcode() == IrOpcode::kLoop;
404 1471806 : buffer_.reserve(arity + 1);
405 :
406 388260 : State first_input = table_.Get(NodeProperties::GetEffectInput(effect_phi, 0));
407 388249 : State result = first_input;
408 9078048 : for (std::pair<Variable, Node*> var_value : first_input) {
409 4344449 : if (Node* value = var_value.second) {
410 4344278 : Variable var = var_value.first;
411 : TRACE("var %i:\n", var.id_);
412 : buffer_.clear();
413 4344278 : buffer_.push_back(value);
414 : bool identical_inputs = true;
415 : int num_defined_inputs = 1;
416 : TRACE(" input 0: %s#%d\n", value->op()->mnemonic(), value->id());
417 14558296 : for (int i = 1; i < arity; ++i) {
418 : Node* next_value =
419 10223412 : table_.Get(NodeProperties::GetEffectInput(effect_phi, i)).Get(var);
420 10184233 : if (next_value != value) identical_inputs = false;
421 10184233 : if (next_value != nullptr) {
422 8983336 : num_defined_inputs++;
423 : TRACE(" input %i: %s#%d\n", i, next_value->op()->mnemonic(),
424 : next_value->id());
425 : } else {
426 : TRACE(" input %i: nullptr\n", i);
427 : }
428 10184233 : buffer_.push_back(next_value);
429 : }
430 :
431 6248881 : Node* old_value = table_.Get(effect_phi).Get(var);
432 : if (old_value) {
433 : TRACE(" old: %s#%d\n", old_value->op()->mnemonic(), old_value->id());
434 : } else {
435 : TRACE(" old: nullptr\n");
436 : }
437 : // Reuse a previously created phi node if possible.
438 6708520 : if (old_value && old_value->opcode() == IrOpcode::kPhi &&
439 460442 : NodeProperties::GetControlInput(old_value, 0) == control) {
440 : // Since a phi node can never dominate its control node,
441 : // [old_value] cannot originate from the inputs. Thus [old_value]
442 : // must have been created by a previous reduction of this [effect_phi].
443 1083541 : for (int i = 0; i < arity; ++i) {
444 : NodeProperties::ReplaceValueInput(
445 2521804 : old_value, buffer_[i] ? buffer_[i] : graph_->Dead(), i);
446 : // This change cannot affect the rest of the reducer, so there is no
447 : // need to trigger additional revisitations.
448 : }
449 170844 : result.Set(var, old_value);
450 : } else {
451 4163237 : if (num_defined_inputs == 1 && is_loop) {
452 : // For loop effect phis, the variable initialization dominates iff it
453 : // dominates the first input.
454 : DCHECK_EQ(2, arity);
455 : DCHECK_EQ(value, buffer_[0]);
456 326874 : result.Set(var, value);
457 3836363 : } else if (num_defined_inputs < arity) {
458 : // If the variable is undefined on some input of this non-loop effect
459 : // phi, then its initialization does not dominate this point.
460 234117 : result.Set(var, nullptr);
461 : } else {
462 : DCHECK_EQ(num_defined_inputs, arity);
463 : // We only create a phi if the values are different.
464 3602246 : if (identical_inputs) {
465 3424885 : result.Set(var, value);
466 : } else {
467 : TRACE("Creating new phi\n");
468 177361 : buffer_.push_back(control);
469 : Node* phi = graph_->graph()->NewNode(
470 : graph_->common()->Phi(MachineRepresentation::kTagged, arity),
471 532083 : arity + 1, &buffer_.front());
472 : // TODO(tebbi): Computing precise types here is tricky, because of
473 : // the necessary revisitations. If we really need this, we should
474 : // probably do it afterwards.
475 : NodeProperties::SetType(phi, Type::Any());
476 177361 : reducer_->AddRoot(phi);
477 177361 : result.Set(var, phi);
478 : }
479 : }
480 : }
481 : #ifdef DEBUG
482 : if (Node* result_node = result.Get(var)) {
483 : TRACE(" result: %s#%d\n", result_node->op()->mnemonic(),
484 : result_node->id());
485 : } else {
486 : TRACE(" result: nullptr\n");
487 : }
488 : #endif
489 : }
490 : }
491 388264 : return result;
492 : }
493 :
494 : namespace {
495 :
496 : int OffsetOfFieldAccess(const Operator* op) {
497 : DCHECK(op->opcode() == IrOpcode::kLoadField ||
498 : op->opcode() == IrOpcode::kStoreField);
499 966601 : FieldAccess access = FieldAccessOf(op);
500 : return access.offset;
501 : }
502 :
503 : int OffsetOfElementAt(ElementAccess const& access, int index) {
504 : DCHECK_GE(index, 0);
505 : DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
506 : kTaggedSizeLog2);
507 44399 : return access.header_size +
508 44531 : (index << ElementSizeLog2Of(access.machine_type.representation()));
509 : }
510 :
511 43898 : Maybe<int> OffsetOfElementsAccess(const Operator* op, Node* index_node) {
512 : DCHECK(op->opcode() == IrOpcode::kLoadElement ||
513 : op->opcode() == IrOpcode::kStoreElement);
514 43898 : Type index_type = NodeProperties::GetType(index_node);
515 43898 : if (!index_type.Is(Type::OrderedNumber())) return Nothing<int>();
516 43898 : double max = index_type.Max();
517 43898 : double min = index_type.Min();
518 43898 : int index = static_cast<int>(min);
519 43898 : if (index < 0 || index != min || index != max) return Nothing<int>();
520 42892 : return Just(OffsetOfElementAt(ElementAccessOf(op), index));
521 : }
522 :
523 98 : Node* LowerCompareMapsWithoutLoad(Node* checked_map,
524 : ZoneHandleSet<Map> const& checked_against,
525 98 : JSGraph* jsgraph) {
526 98 : Node* true_node = jsgraph->TrueConstant();
527 98 : Node* false_node = jsgraph->FalseConstant();
528 : Node* replacement = false_node;
529 196 : for (Handle<Map> map : checked_against) {
530 98 : Node* map_node = jsgraph->HeapConstant(map);
531 : // We cannot create a HeapConstant type here as we are off-thread.
532 : NodeProperties::SetType(map_node, Type::Internal());
533 : Node* comparison = jsgraph->graph()->NewNode(
534 98 : jsgraph->simplified()->ReferenceEqual(), checked_map, map_node);
535 : NodeProperties::SetType(comparison, Type::Boolean());
536 98 : if (replacement == false_node) {
537 : replacement = comparison;
538 : } else {
539 : replacement = jsgraph->graph()->NewNode(
540 : jsgraph->common()->Select(MachineRepresentation::kTaggedPointer),
541 0 : comparison, true_node, replacement);
542 : NodeProperties::SetType(replacement, Type::Boolean());
543 : }
544 : }
545 98 : return replacement;
546 : }
547 :
548 76231699 : void ReduceNode(const Operator* op, EscapeAnalysisTracker::Scope* current,
549 443 : JSGraph* jsgraph) {
550 33997778 : switch (op->opcode()) {
551 : case IrOpcode::kAllocate: {
552 296612 : NumberMatcher size(current->ValueInput(0));
553 296612 : if (!size.HasValue()) break;
554 296613 : int size_int = static_cast<int>(size.Value());
555 296613 : if (size_int != size.Value()) break;
556 296613 : if (const VirtualObject* vobject = current->InitVirtualObject(size_int)) {
557 : // Initialize with dead nodes as a sentinel for uninitialized memory.
558 2093279 : for (Variable field : *vobject) {
559 1613478 : current->Set(field, jsgraph->Dead());
560 : }
561 : }
562 : break;
563 : }
564 : case IrOpcode::kFinishRegion:
565 310100 : current->SetVirtualObject(current->ValueInput(0));
566 : break;
567 : case IrOpcode::kStoreField: {
568 3340811 : Node* object = current->ValueInput(0);
569 3340810 : Node* value = current->ValueInput(1);
570 5066281 : const VirtualObject* vobject = current->GetVirtualObject(object);
571 : Variable var;
572 11747887 : if (vobject && !vobject->HasEscaped() &&
573 5234801 : vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
574 : current->Set(var, value);
575 947009 : current->MarkForDeletion();
576 : } else {
577 2393803 : current->SetEscaped(object);
578 2393803 : current->SetEscaped(value);
579 : }
580 : break;
581 : }
582 : case IrOpcode::kStoreElement: {
583 98224 : Node* object = current->ValueInput(0);
584 98224 : Node* index = current->ValueInput(1);
585 98224 : Node* value = current->ValueInput(2);
586 185584 : const VirtualObject* vobject = current->GetVirtualObject(object);
587 : int offset;
588 : Variable var;
589 283808 : if (vobject && !vobject->HasEscaped() &&
590 324236 : OffsetOfElementsAccess(op, index).To(&offset) &&
591 140810 : vobject->FieldAt(offset).To(&var)) {
592 : current->Set(var, value);
593 42586 : current->MarkForDeletion();
594 : } else {
595 55638 : current->SetEscaped(value);
596 55638 : current->SetEscaped(object);
597 : }
598 : break;
599 : }
600 : case IrOpcode::kLoadField: {
601 1337109 : Node* object = current->ValueInput(0);
602 1434069 : const VirtualObject* vobject = current->GetVirtualObject(object);
603 : Variable var;
604 : Node* value;
605 2771175 : if (vobject && !vobject->HasEscaped() &&
606 2732996 : vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var) &&
607 1356687 : current->Get(var).To(&value)) {
608 19580 : current->SetReplacement(value);
609 : } else {
610 1317527 : current->SetEscaped(object);
611 : }
612 : break;
613 : }
614 : case IrOpcode::kLoadElement: {
615 28694 : Node* object = current->ValueInput(0);
616 28694 : Node* index = current->ValueInput(1);
617 32752 : const VirtualObject* vobject = current->GetVirtualObject(object);
618 : int offset;
619 : Variable var;
620 : Node* value;
621 59566 : if (vobject && !vobject->HasEscaped() &&
622 31594 : OffsetOfElementsAccess(op, index).To(&offset) &&
623 29596 : vobject->FieldAt(offset).To(&var) && current->Get(var).To(&value)) {
624 298 : current->SetReplacement(value);
625 30276 : } else if (vobject && !vobject->HasEscaped()) {
626 : // Compute the known length (aka the number of elements) of {object}
627 : // based on the virtual object information.
628 999 : ElementAccess const& access = ElementAccessOf(op);
629 : int const length =
630 999 : (vobject->size() - access.header_size) >>
631 1442 : ElementSizeLog2Of(access.machine_type.representation());
632 : Variable var0, var1;
633 : Node* value0;
634 : Node* value1;
635 1998 : if (length == 1 &&
636 1395 : vobject->FieldAt(OffsetOfElementAt(access, 0)).To(&var) &&
637 2262 : current->Get(var).To(&value) &&
638 88 : (value == nullptr ||
639 1087 : NodeProperties::GetType(value).Is(access.type))) {
640 : // The {object} has no elements, and we know that the LoadElement
641 : // {index} must be within bounds, thus it must always yield this
642 : // one element of {object}.
643 132 : current->SetReplacement(value);
644 132 : break;
645 1734 : } else if (length == 2 &&
646 3138 : vobject->FieldAt(OffsetOfElementAt(access, 0)).To(&var0) &&
647 2381 : current->Get(var0).To(&value0) &&
648 450 : (value0 == nullptr ||
649 2067 : NodeProperties::GetType(value0).Is(access.type)) &&
650 2367 : vobject->FieldAt(OffsetOfElementAt(access, 1)).To(&var1) &&
651 3234 : current->Get(var1).To(&value1) &&
652 443 : (value1 == nullptr ||
653 1310 : NodeProperties::GetType(value1).Is(access.type))) {
654 750 : if (value0 && value1) {
655 : // The {object} has exactly two elements, so the LoadElement
656 : // must return one of them (i.e. either the element at index
657 : // 0 or the one at index 1). So we can turn the LoadElement
658 : // into a Select operation instead (still allowing the {object}
659 : // to be scalar replaced). We must however mark the elements
660 : // of the {object} itself as escaping.
661 : Node* check =
662 : jsgraph->graph()->NewNode(jsgraph->simplified()->NumberEqual(),
663 886 : index, jsgraph->ZeroConstant());
664 : NodeProperties::SetType(check, Type::Boolean());
665 : Node* select = jsgraph->graph()->NewNode(
666 : jsgraph->common()->Select(access.machine_type.representation()),
667 443 : check, value0, value1);
668 : NodeProperties::SetType(select, access.type);
669 443 : current->SetReplacement(select);
670 443 : current->SetEscaped(value0);
671 443 : current->SetEscaped(value1);
672 443 : break;
673 : } else {
674 : // If the variables have no values, we have
675 : // not reached the fixed-point yet.
676 : break;
677 : }
678 : }
679 : }
680 27812 : current->SetEscaped(object);
681 27812 : break;
682 : }
683 : case IrOpcode::kTypeGuard: {
684 30694 : current->SetVirtualObject(current->ValueInput(0));
685 : break;
686 : }
687 : case IrOpcode::kReferenceEqual: {
688 196463 : Node* left = current->ValueInput(0);
689 196463 : Node* right = current->ValueInput(1);
690 197509 : const VirtualObject* left_object = current->GetVirtualObject(left);
691 197925 : const VirtualObject* right_object = current->GetVirtualObject(right);
692 : Node* replacement = nullptr;
693 197275 : if (left_object && !left_object->HasEscaped()) {
694 724 : if (right_object && !right_object->HasEscaped() &&
695 : left_object->id() == right_object->id()) {
696 4 : replacement = jsgraph->TrueConstant();
697 : } else {
698 252 : replacement = jsgraph->FalseConstant();
699 : }
700 197201 : } else if (right_object && !right_object->HasEscaped()) {
701 469 : replacement = jsgraph->FalseConstant();
702 : }
703 196463 : if (replacement) {
704 : // TODO(tebbi) This is a workaround for uninhabited types. If we
705 : // replaced a value of uninhabited type with a constant, we would
706 : // widen the type of the node. This could produce inconsistent
707 : // types (which might confuse representation selection). We get
708 : // around this by refusing to constant-fold and escape-analyze
709 : // if the type is not inhabited.
710 725 : if (!NodeProperties::GetType(left).IsNone() &&
711 : !NodeProperties::GetType(right).IsNone()) {
712 725 : current->SetReplacement(replacement);
713 : } else {
714 0 : current->SetEscaped(left);
715 0 : current->SetEscaped(right);
716 : }
717 : }
718 : break;
719 : }
720 : case IrOpcode::kCheckMaps: {
721 77067 : CheckMapsParameters params = CheckMapsParametersOf(op);
722 77067 : Node* checked = current->ValueInput(0);
723 93061 : const VirtualObject* vobject = current->GetVirtualObject(checked);
724 : Variable map_field;
725 : Node* map;
726 170128 : if (vobject && !vobject->HasEscaped() &&
727 170355 : vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) &&
728 82474 : current->Get(map_field).To(&map)) {
729 5407 : if (map) {
730 4314 : Type const map_type = NodeProperties::GetType(map);
731 : AllowHandleDereference handle_dereference;
732 8619 : if (map_type.IsHeapConstant() &&
733 : params.maps().contains(
734 4305 : Handle<Map>::cast(map_type.AsHeapConstant()->Value()))) {
735 4217 : current->MarkForDeletion();
736 4217 : break;
737 : }
738 : } else {
739 : // If the variable has no value, we have not reached the fixed-point
740 : // yet.
741 : break;
742 : }
743 : }
744 71757 : current->SetEscaped(checked);
745 71757 : break;
746 : }
747 : case IrOpcode::kCompareMaps: {
748 9245 : Node* object = current->ValueInput(0);
749 9444 : const VirtualObject* vobject = current->GetVirtualObject(object);
750 : Variable map_field;
751 : Node* object_map;
752 18689 : if (vobject && !vobject->HasEscaped() &&
753 18805 : vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) &&
754 9350 : current->Get(map_field).To(&object_map)) {
755 105 : if (object_map) {
756 : current->SetReplacement(LowerCompareMapsWithoutLoad(
757 98 : object_map, CompareMapsParametersOf(op).maps(), jsgraph));
758 98 : break;
759 : } else {
760 : // If the variable has no value, we have not reached the fixed-point
761 : // yet.
762 : break;
763 : }
764 : }
765 9140 : current->SetEscaped(object);
766 9140 : break;
767 : }
768 : case IrOpcode::kCheckHeapObject: {
769 34530 : Node* checked = current->ValueInput(0);
770 34530 : switch (checked->opcode()) {
771 : case IrOpcode::kAllocate:
772 : case IrOpcode::kFinishRegion:
773 : case IrOpcode::kHeapConstant:
774 197 : current->SetReplacement(checked);
775 197 : break;
776 : default:
777 34333 : current->SetEscaped(checked);
778 34333 : break;
779 : }
780 : break;
781 : }
782 : case IrOpcode::kMapGuard: {
783 8073 : Node* object = current->ValueInput(0);
784 8271 : const VirtualObject* vobject = current->GetVirtualObject(object);
785 8271 : if (vobject && !vobject->HasEscaped()) {
786 104 : current->MarkForDeletion();
787 : }
788 : break;
789 : }
790 : case IrOpcode::kStateValues:
791 : case IrOpcode::kFrameState:
792 : // These uses are always safe.
793 : break;
794 : default: {
795 : // For unknown nodes, treat all value inputs as escaping.
796 : int value_input_count = op->ValueInputCount();
797 31692494 : for (int i = 0; i < value_input_count; ++i) {
798 12235754 : Node* input = current->ValueInput(i);
799 12235794 : current->SetEscaped(input);
800 : }
801 19456740 : if (OperatorProperties::HasContextInput(op)) {
802 7680437 : current->SetEscaped(current->ContextInput());
803 : }
804 : break;
805 : }
806 : }
807 33997701 : }
808 :
809 : } // namespace
810 :
811 67995253 : void EscapeAnalysis::Reduce(Node* node, Reduction* reduction) {
812 : const Operator* op = node->op();
813 : TRACE("Reducing %s#%d\n", op->mnemonic(), node->id());
814 :
815 33997764 : EscapeAnalysisTracker::Scope current(this, tracker_, node, reduction);
816 33997489 : ReduceNode(op, ¤t, jsgraph());
817 33997373 : }
818 :
819 456690 : EscapeAnalysis::EscapeAnalysis(JSGraph* jsgraph, Zone* zone)
820 : : EffectGraphReducer(
821 : jsgraph->graph(),
822 33997747 : [this](Node* node, Reduction* reduction) { Reduce(node, reduction); },
823 : zone),
824 456695 : tracker_(new (zone) EscapeAnalysisTracker(jsgraph, this, zone)),
825 1370084 : jsgraph_(jsgraph) {}
826 :
827 31680643 : Node* EscapeAnalysisResult::GetReplacementOf(Node* node) {
828 31680643 : Node* replacement = tracker_->GetReplacementOf(node);
829 : // Replacements cannot have replacements. This is important to ensure
830 : // re-visitation: If a replacement is replaced, then all nodes accessing
831 : // the replacement have to be updated.
832 : if (replacement) DCHECK_NULL(tracker_->GetReplacementOf(replacement));
833 31680788 : return replacement;
834 : }
835 :
836 729967 : Node* EscapeAnalysisResult::GetVirtualObjectField(const VirtualObject* vobject,
837 : int field, Node* effect) {
838 : return tracker_->variable_states_.Get(vobject->FieldAt(field).FromJust(),
839 2189901 : effect);
840 : }
841 :
842 56749138 : const VirtualObject* EscapeAnalysisResult::GetVirtualObject(Node* node) {
843 56749138 : return tracker_->virtual_objects_.Get(node);
844 : }
845 :
846 221052 : VirtualObject::VirtualObject(VariableTracker* var_states, VirtualObject::Id id,
847 : int size)
848 110526 : : Dependable(var_states->zone()), id_(id), fields_(var_states->zone()) {
849 : DCHECK(IsAligned(size, kTaggedSize));
850 : TRACE("Creating VirtualObject id:%d size:%d\n", id, size);
851 110526 : int num_fields = size / kTaggedSize;
852 110526 : fields_.reserve(num_fields);
853 862904 : for (int i = 0; i < num_fields; ++i) {
854 1504756 : fields_.push_back(var_states->NewVariable());
855 : }
856 110526 : }
857 :
858 : #undef TRACE
859 :
860 : } // namespace compiler
861 : } // namespace internal
862 178779 : } // namespace v8
|