Line data Source code
1 : // Copyright 2015 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/escape-analysis.h"
6 :
7 : #include <limits>
8 :
9 : #include "src/base/flags.h"
10 : #include "src/bootstrapper.h"
11 : #include "src/compilation-dependencies.h"
12 : #include "src/compiler/common-operator.h"
13 : #include "src/compiler/graph-reducer.h"
14 : #include "src/compiler/js-operator.h"
15 : #include "src/compiler/linkage.h"
16 : #include "src/compiler/node-matchers.h"
17 : #include "src/compiler/node-properties.h"
18 : #include "src/compiler/node.h"
19 : #include "src/compiler/operator-properties.h"
20 : #include "src/compiler/simplified-operator.h"
21 : #include "src/compiler/type-cache.h"
22 : #include "src/objects-inl.h"
23 :
24 : namespace v8 {
25 : namespace internal {
26 : namespace compiler {
27 :
28 : typedef NodeId Alias;
29 :
30 : #ifdef DEBUG
31 : #define TRACE(...) \
32 : do { \
33 : if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
34 : } while (false)
35 : #else
36 : #define TRACE(...)
37 : #endif
38 :
39 : // EscapeStatusAnalysis determines for each allocation whether it escapes.
40 : class EscapeStatusAnalysis : public ZoneObject {
41 : public:
42 : enum Status {
43 : kUnknown = 0u,
44 : kTracked = 1u << 0,
45 : kEscaped = 1u << 1,
46 : kOnStack = 1u << 2,
47 : kVisited = 1u << 3,
48 : // A node is dangling, if it is a load of some kind, and does not have
49 : // an effect successor.
50 : kDanglingComputed = 1u << 4,
51 : kDangling = 1u << 5,
52 : // A node is is an effect branch point, if it has more than 2 non-dangling
53 : // effect successors.
54 : kBranchPointComputed = 1u << 6,
55 : kBranchPoint = 1u << 7,
56 : kInQueue = 1u << 8
57 : };
58 : typedef base::Flags<Status, uint16_t> StatusFlags;
59 :
60 : void RunStatusAnalysis();
61 :
62 : bool IsVirtual(Node* node);
63 : bool IsEscaped(Node* node);
64 : bool IsAllocation(Node* node);
65 :
66 : bool IsInQueue(NodeId id);
67 : void SetInQueue(NodeId id, bool on_stack);
68 :
69 : void DebugPrint();
70 :
71 : EscapeStatusAnalysis(EscapeAnalysis* object_analysis, Graph* graph,
72 : Zone* zone);
73 : void EnqueueForStatusAnalysis(Node* node);
74 : bool SetEscaped(Node* node);
75 : bool IsEffectBranchPoint(Node* node);
76 : bool IsDanglingEffectNode(Node* node);
77 : void ResizeStatusVector();
78 : size_t GetStatusVectorSize();
79 : bool IsVirtual(NodeId id);
80 :
81 : Graph* graph() const { return graph_; }
82 : void AssignAliases();
83 35392408 : Alias GetAlias(NodeId id) const { return aliases_[id]; }
84 : const ZoneVector<Alias>& GetAliasMap() const { return aliases_; }
85 : Alias AliasCount() const { return next_free_alias_; }
86 : static const Alias kNotReachable;
87 : static const Alias kUntrackable;
88 :
89 : bool IsNotReachable(Node* node);
90 :
91 : private:
92 : void Process(Node* node);
93 : void ProcessAllocate(Node* node);
94 : void ProcessFinishRegion(Node* node);
95 : void ProcessStoreField(Node* node);
96 : void ProcessStoreElement(Node* node);
97 : bool CheckUsesForEscape(Node* node, bool phi_escaping = false) {
98 697329 : return CheckUsesForEscape(node, node, phi_escaping);
99 : }
100 : bool CheckUsesForEscape(Node* node, Node* rep, bool phi_escaping = false);
101 : void RevisitUses(Node* node);
102 : void RevisitInputs(Node* node);
103 :
104 115359 : Alias NextAlias() { return next_free_alias_++; }
105 :
106 : bool HasEntry(Node* node);
107 :
108 : bool IsAllocationPhi(Node* node);
109 :
110 : ZoneVector<Node*> stack_;
111 : EscapeAnalysis* object_analysis_;
112 : Graph* const graph_;
113 : ZoneVector<StatusFlags> status_;
114 : Alias next_free_alias_;
115 : ZoneVector<Node*> status_stack_;
116 : ZoneVector<Alias> aliases_;
117 :
118 : DISALLOW_COPY_AND_ASSIGN(EscapeStatusAnalysis);
119 : };
120 :
121 : DEFINE_OPERATORS_FOR_FLAGS(EscapeStatusAnalysis::StatusFlags)
122 :
123 : const Alias EscapeStatusAnalysis::kNotReachable =
124 : std::numeric_limits<Alias>::max();
125 : const Alias EscapeStatusAnalysis::kUntrackable =
126 : std::numeric_limits<Alias>::max() - 1;
127 :
128 : class VirtualObject : public ZoneObject {
129 : public:
130 : enum Status {
131 : kInitial = 0,
132 : kTracked = 1u << 0,
133 : kInitialized = 1u << 1,
134 : kCopyRequired = 1u << 2,
135 : };
136 : typedef base::Flags<Status, unsigned char> StatusFlags;
137 :
138 : VirtualObject(NodeId id, VirtualState* owner, Zone* zone)
139 : : id_(id),
140 : status_(kInitial),
141 : fields_(zone),
142 : phi_(zone),
143 : object_state_(nullptr),
144 0 : owner_(owner) {}
145 :
146 272802 : VirtualObject(VirtualState* owner, const VirtualObject& other)
147 : : id_(other.id_),
148 : status_(other.status_ & ~kCopyRequired),
149 : fields_(other.fields_),
150 : phi_(other.phi_),
151 : object_state_(other.object_state_),
152 818406 : owner_(owner) {}
153 :
154 476067 : VirtualObject(NodeId id, VirtualState* owner, Zone* zone, size_t field_number,
155 : bool initialized)
156 : : id_(id),
157 : status_(kTracked | (initialized ? kInitialized : kInitial)),
158 : fields_(zone),
159 : phi_(zone),
160 : object_state_(nullptr),
161 952134 : owner_(owner) {
162 476067 : fields_.resize(field_number);
163 476067 : phi_.resize(field_number, false);
164 476067 : }
165 :
166 1062419665 : Node* GetField(size_t offset) { return fields_[offset]; }
167 :
168 1861859 : bool IsCreatedPhi(size_t offset) { return phi_[offset]; }
169 :
170 4190266 : void SetField(size_t offset, Node* node, bool created_phi = false) {
171 8380532 : fields_[offset] = node;
172 4190266 : phi_[offset] = created_phi;
173 4190266 : }
174 : bool IsTracked() const { return status_ & kTracked; }
175 360708 : bool IsInitialized() const { return status_ & kInitialized; }
176 : bool SetInitialized() { return status_ |= kInitialized; }
177 : VirtualState* owner() const { return owner_; }
178 :
179 : Node** fields_array() { return &fields_.front(); }
180 24123522 : size_t field_count() { return fields_.size(); }
181 339013 : bool ResizeFields(size_t field_count) {
182 678026 : if (field_count > fields_.size()) {
183 0 : fields_.resize(field_count);
184 0 : phi_.resize(field_count);
185 0 : return true;
186 : }
187 : return false;
188 : }
189 81244 : void ClearAllFields() {
190 1507456 : for (size_t i = 0; i < fields_.size(); ++i) {
191 1426212 : fields_[i] = nullptr;
192 672484 : phi_[i] = false;
193 : }
194 81244 : }
195 : bool AllFieldsClear() {
196 29029704 : for (size_t i = 0; i < fields_.size(); ++i) {
197 32657228 : if (fields_[i] != nullptr) {
198 : return false;
199 : }
200 : }
201 : return true;
202 : }
203 : bool UpdateFrom(const VirtualObject& other);
204 : bool MergeFrom(MergeCache* cache, Node* at, Graph* graph,
205 : CommonOperatorBuilder* common, bool initialMerge);
206 8835 : void SetObjectState(Node* node) { object_state_ = node; }
207 : Node* GetObjectState() const { return object_state_; }
208 : bool IsCopyRequired() const { return status_ & kCopyRequired; }
209 : void SetCopyRequired() { status_ |= kCopyRequired; }
210 : bool NeedCopyForModification() {
211 1529465 : if (!IsCopyRequired() || !IsInitialized()) {
212 : return false;
213 : }
214 : return true;
215 : }
216 :
217 : NodeId id() const { return id_; }
218 : void id(NodeId id) { id_ = id; }
219 :
220 : private:
221 : bool MergeFields(size_t i, Node* at, MergeCache* cache, Graph* graph,
222 : CommonOperatorBuilder* common);
223 :
224 : NodeId id_;
225 : StatusFlags status_;
226 : ZoneVector<Node*> fields_;
227 : ZoneVector<bool> phi_;
228 : Node* object_state_;
229 : VirtualState* owner_;
230 :
231 : DISALLOW_COPY_AND_ASSIGN(VirtualObject);
232 : };
233 :
234 : DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags)
235 :
236 89668 : bool VirtualObject::UpdateFrom(const VirtualObject& other) {
237 89668 : bool changed = status_ != other.status_;
238 89668 : status_ = other.status_;
239 : phi_ = other.phi_;
240 1373010 : if (fields_.size() != other.fields_.size()) {
241 : fields_ = other.fields_;
242 0 : return true;
243 : }
244 1104006 : for (size_t i = 0; i < fields_.size(); ++i) {
245 1014338 : if (fields_[i] != other.fields_[i]) {
246 : changed = true;
247 45498 : fields_[i] = other.fields_[i];
248 : }
249 : }
250 : return changed;
251 : }
252 :
253 : class VirtualState : public ZoneObject {
254 : public:
255 267427 : VirtualState(Node* owner, Zone* zone, size_t size)
256 : : info_(size, nullptr, zone),
257 : initialized_(static_cast<int>(size), zone),
258 267427 : owner_(owner) {}
259 :
260 126519 : VirtualState(Node* owner, const VirtualState& state)
261 : : info_(state.info_.size(), nullptr, state.info_.get_allocator().zone()),
262 : initialized_(state.initialized_.length(),
263 : state.info_.get_allocator().zone()),
264 3035756 : owner_(owner) {
265 5818474 : for (size_t i = 0; i < info_.size(); ++i) {
266 2782718 : if (state.info_[i]) {
267 3244282 : info_[i] = state.info_[i];
268 : }
269 : }
270 126519 : }
271 :
272 : VirtualObject* VirtualObjectFromAlias(size_t alias);
273 : void SetVirtualObject(Alias alias, VirtualObject* state);
274 : bool UpdateFrom(VirtualState* state, Zone* zone);
275 : bool MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
276 : CommonOperatorBuilder* common, Node* at);
277 72833684 : size_t size() const { return info_.size(); }
278 : Node* owner() const { return owner_; }
279 : VirtualObject* Copy(VirtualObject* obj, Alias alias);
280 : void SetCopyRequired() {
281 77040842 : for (VirtualObject* obj : info_) {
282 37347270 : if (obj) obj->SetCopyRequired();
283 : }
284 : }
285 :
286 : private:
287 : ZoneVector<VirtualObject*> info_;
288 : BitVector initialized_;
289 : Node* owner_;
290 :
291 : DISALLOW_COPY_AND_ASSIGN(VirtualState);
292 : };
293 :
294 : class MergeCache : public ZoneObject {
295 : public:
296 39769 : explicit MergeCache(Zone* zone)
297 : : states_(zone), objects_(zone), fields_(zone) {
298 39769 : states_.reserve(5);
299 39769 : objects_.reserve(5);
300 39769 : fields_.reserve(5);
301 39769 : }
302 : ZoneVector<VirtualState*>& states() { return states_; }
303 : ZoneVector<VirtualObject*>& objects() { return objects_; }
304 : ZoneVector<Node*>& fields() { return fields_; }
305 : void Clear() {
306 : states_.clear();
307 : objects_.clear();
308 : fields_.clear();
309 : }
310 : size_t LoadVirtualObjectsFromStatesFor(Alias alias);
311 : void LoadVirtualObjectsForFieldsFrom(VirtualState* state,
312 : const ZoneVector<Alias>& aliases);
313 : Node* GetFields(size_t pos);
314 :
315 : private:
316 : ZoneVector<VirtualState*> states_;
317 : ZoneVector<VirtualObject*> objects_;
318 : ZoneVector<Node*> fields_;
319 :
320 : DISALLOW_COPY_AND_ASSIGN(MergeCache);
321 : };
322 :
323 0 : size_t MergeCache::LoadVirtualObjectsFromStatesFor(Alias alias) {
324 : objects_.clear();
325 : DCHECK_GT(states_.size(), 0u);
326 0 : size_t min = std::numeric_limits<size_t>::max();
327 0 : for (VirtualState* state : states_) {
328 0 : if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
329 0 : objects_.push_back(obj);
330 0 : min = std::min(obj->field_count(), min);
331 : }
332 : }
333 0 : return min;
334 : }
335 :
336 15964 : void MergeCache::LoadVirtualObjectsForFieldsFrom(
337 : VirtualState* state, const ZoneVector<Alias>& aliases) {
338 : objects_.clear();
339 : size_t max_alias = state->size();
340 47892 : for (Node* field : fields_) {
341 31928 : Alias alias = aliases[field->id()];
342 15964 : if (alias >= max_alias) continue;
343 0 : if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
344 0 : objects_.push_back(obj);
345 : }
346 : }
347 15964 : }
348 :
349 3318215 : Node* MergeCache::GetFields(size_t pos) {
350 : fields_.clear();
351 3318215 : Node* rep = pos >= objects_.front()->field_count()
352 : ? nullptr
353 3318215 : : objects_.front()->GetField(pos);
354 18339934 : for (VirtualObject* obj : objects_) {
355 11703504 : if (pos >= obj->field_count()) continue;
356 11703504 : Node* field = obj->GetField(pos);
357 11703504 : if (field) {
358 7263236 : fields_.push_back(field);
359 : }
360 11703504 : if (field != rep) {
361 : rep = nullptr;
362 : }
363 : }
364 3318215 : return rep;
365 : }
366 :
367 321597 : VirtualObject* VirtualState::Copy(VirtualObject* obj, Alias alias) {
368 321597 : if (obj->owner() == this) return obj;
369 : VirtualObject* new_obj =
370 272380 : new (info_.get_allocator().zone()) VirtualObject(this, *obj);
371 : TRACE("At state %p, alias @%d (#%d), copying virtual object from %p to %p\n",
372 : static_cast<void*>(this), alias, obj->id(), static_cast<void*>(obj),
373 : static_cast<void*>(new_obj));
374 544760 : info_[alias] = new_obj;
375 272380 : return new_obj;
376 : }
377 :
378 0 : VirtualObject* VirtualState::VirtualObjectFromAlias(size_t alias) {
379 110626468 : return info_[alias];
380 : }
381 :
382 0 : void VirtualState::SetVirtualObject(Alias alias, VirtualObject* obj) {
383 14580192 : info_[alias] = obj;
384 476489 : if (obj) initialized_.Add(alias);
385 0 : }
386 :
387 689904 : bool VirtualState::UpdateFrom(VirtualState* from, Zone* zone) {
388 689904 : if (from == this) return false;
389 : bool changed = false;
390 800826 : for (Alias alias = 0; alias < size(); ++alias) {
391 : VirtualObject* ls = VirtualObjectFromAlias(alias);
392 : VirtualObject* rs = from->VirtualObjectFromAlias(alias);
393 :
394 382855 : if (ls == rs || rs == nullptr) continue;
395 :
396 89668 : if (ls == nullptr) {
397 0 : ls = new (zone) VirtualObject(this, *rs);
398 : SetVirtualObject(alias, ls);
399 : changed = true;
400 : continue;
401 : }
402 :
403 : TRACE(" Updating fields of @%d\n", alias);
404 :
405 89668 : changed = ls->UpdateFrom(*rs) || changed;
406 : }
407 : return false;
408 : }
409 :
410 : namespace {
411 :
412 14 : bool IsEquivalentPhi(Node* node1, Node* node2) {
413 7 : if (node1 == node2) return true;
414 7 : if (node1->opcode() != IrOpcode::kPhi || node2->opcode() != IrOpcode::kPhi ||
415 0 : node1->op()->ValueInputCount() != node2->op()->ValueInputCount()) {
416 : return false;
417 : }
418 0 : for (int i = 0; i < node1->op()->ValueInputCount(); ++i) {
419 0 : Node* input1 = NodeProperties::GetValueInput(node1, i);
420 0 : Node* input2 = NodeProperties::GetValueInput(node2, i);
421 0 : if (!IsEquivalentPhi(input1, input2)) {
422 : return false;
423 : }
424 : }
425 : return true;
426 : }
427 :
428 0 : bool IsEquivalentPhi(Node* phi, ZoneVector<Node*>& inputs) {
429 0 : if (phi->opcode() != IrOpcode::kPhi) return false;
430 0 : if (static_cast<size_t>(phi->op()->ValueInputCount()) != inputs.size()) {
431 : return false;
432 : }
433 0 : for (size_t i = 0; i < inputs.size(); ++i) {
434 0 : Node* input = NodeProperties::GetValueInput(phi, static_cast<int>(i));
435 0 : if (!IsEquivalentPhi(input, inputs[i])) {
436 : return false;
437 : }
438 : }
439 : return true;
440 : }
441 : } // namespace
442 :
443 55848 : bool VirtualObject::MergeFields(size_t i, Node* at, MergeCache* cache,
444 63318 : Graph* graph, CommonOperatorBuilder* common) {
445 : bool changed = false;
446 111696 : int value_input_count = static_cast<int>(cache->fields().size());
447 : Node* rep = GetField(i);
448 108631 : if (!rep || !IsCreatedPhi(i)) {
449 : Type* phi_type = Type::None();
450 150279 : for (Node* input : cache->fields()) {
451 63318 : CHECK_NOT_NULL(input);
452 63318 : CHECK(!input->IsDead());
453 : Type* input_type = NodeProperties::GetType(input);
454 63318 : phi_type = Type::Union(phi_type, input_type, graph->zone());
455 : }
456 23643 : Node* control = NodeProperties::GetControlInput(at);
457 23643 : cache->fields().push_back(control);
458 : Node* phi = graph->NewNode(
459 : common->Phi(MachineRepresentation::kTagged, value_input_count),
460 23643 : value_input_count + 1, &cache->fields().front());
461 : NodeProperties::SetType(phi, phi_type);
462 23643 : SetField(i, phi, true);
463 :
464 : #ifdef DEBUG
465 : if (FLAG_trace_turbo_escape) {
466 : PrintF(" Creating Phi #%d as merge of", phi->id());
467 : for (int i = 0; i < value_input_count; i++) {
468 : PrintF(" #%d (%s)", cache->fields()[i]->id(),
469 : cache->fields()[i]->op()->mnemonic());
470 : }
471 : PrintF("\n");
472 : }
473 : #endif
474 : changed = true;
475 : } else {
476 : DCHECK(rep->opcode() == IrOpcode::kPhi);
477 91510 : for (int n = 0; n < value_input_count; ++n) {
478 91510 : Node* old = NodeProperties::GetValueInput(rep, n);
479 183020 : if (old != cache->fields()[n]) {
480 : changed = true;
481 21079 : NodeProperties::ReplaceValueInput(rep, cache->fields()[n], n);
482 : }
483 : }
484 : }
485 55848 : return changed;
486 : }
487 :
488 2214706 : bool VirtualObject::MergeFrom(MergeCache* cache, Node* at, Graph* graph,
489 : CommonOperatorBuilder* common,
490 : bool initialMerge) {
491 : DCHECK(at->opcode() == IrOpcode::kEffectPhi ||
492 : at->opcode() == IrOpcode::kPhi);
493 : bool changed = false;
494 9787536 : for (size_t i = 0; i < field_count(); ++i) {
495 6125299 : if (!initialMerge && GetField(i) == nullptr) continue;
496 3318215 : Node* field = cache->GetFields(i);
497 5127291 : if (field && !IsCreatedPhi(i)) {
498 2908210 : changed = changed || GetField(i) != field;
499 1803652 : SetField(i, field);
500 : TRACE(" Field %zu agree on rep #%d\n", i, field->id());
501 : } else {
502 : size_t arity = at->opcode() == IrOpcode::kEffectPhi
503 1514563 : ? at->op()->EffectInputCount()
504 3029126 : : at->op()->ValueInputCount();
505 3029126 : if (cache->fields().size() == arity) {
506 55848 : changed = MergeFields(i, at, cache, graph, common) || changed;
507 : } else {
508 1458715 : if (GetField(i) != nullptr) {
509 : TRACE(" Field %zu cleared\n", i);
510 : changed = true;
511 : }
512 1458715 : SetField(i, nullptr);
513 : }
514 : }
515 : }
516 700143 : return changed;
517 : }
518 :
519 421631 : bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
520 : CommonOperatorBuilder* common, Node* at) {
521 : DCHECK_GT(cache->states().size(), 0u);
522 : bool changed = false;
523 15870762 : for (Alias alias = 0; alias < size(); ++alias) {
524 : cache->objects().clear();
525 : VirtualObject* mergeObject = VirtualObjectFromAlias(alias);
526 : bool copy_merge_object = false;
527 7513750 : size_t fields = std::numeric_limits<size_t>::max();
528 33563502 : for (VirtualState* state : cache->states()) {
529 18536002 : if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
530 2872787 : cache->objects().push_back(obj);
531 2872787 : if (mergeObject == obj) {
532 : copy_merge_object = true;
533 : }
534 5745574 : fields = std::min(obj->field_count(), fields);
535 : }
536 : }
537 30055000 : if (cache->objects().size() == cache->states().size() &&
538 721416 : (mergeObject || !initialized_.Contains(alias))) {
539 : bool initialMerge = false;
540 700143 : if (!mergeObject) {
541 : initialMerge = true;
542 : VirtualObject* obj = new (zone)
543 360708 : VirtualObject(cache->objects().front()->id(), this, zone, fields,
544 721416 : cache->objects().front()->IsInitialized());
545 : SetVirtualObject(alias, obj);
546 : mergeObject = obj;
547 : changed = true;
548 339435 : } else if (copy_merge_object) {
549 422 : VirtualObject* obj = new (zone) VirtualObject(this, *mergeObject);
550 : SetVirtualObject(alias, obj);
551 : mergeObject = obj;
552 : changed = true;
553 : } else {
554 339013 : changed = mergeObject->ResizeFields(fields) || changed;
555 : }
556 : #ifdef DEBUG
557 : if (FLAG_trace_turbo_escape) {
558 : PrintF(" Alias @%d, merging into %p virtual objects", alias,
559 : static_cast<void*>(mergeObject));
560 : for (size_t i = 0; i < cache->objects().size(); i++) {
561 : PrintF(" %p", static_cast<void*>(cache->objects()[i]));
562 : }
563 : PrintF("\n");
564 : }
565 : #endif // DEBUG
566 : changed =
567 700143 : mergeObject->MergeFrom(cache, at, graph, common, initialMerge) ||
568 : changed;
569 : } else {
570 6813607 : if (mergeObject) {
571 : TRACE(" Alias %d, virtual object removed\n", alias);
572 : changed = true;
573 : }
574 : SetVirtualObject(alias, nullptr);
575 : }
576 : }
577 421631 : return changed;
578 : }
579 :
580 0 : EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
581 : Graph* graph, Zone* zone)
582 : : stack_(zone),
583 : object_analysis_(object_analysis),
584 : graph_(graph),
585 : status_(zone),
586 : next_free_alias_(0),
587 : status_stack_(zone),
588 786404 : aliases_(zone) {}
589 :
590 697329 : bool EscapeStatusAnalysis::HasEntry(Node* node) {
591 697329 : return status_[node->id()] & (kTracked | kEscaped);
592 : }
593 :
594 0 : bool EscapeStatusAnalysis::IsVirtual(Node* node) {
595 0 : return IsVirtual(node->id());
596 : }
597 :
598 0 : bool EscapeStatusAnalysis::IsVirtual(NodeId id) {
599 5213428 : return (status_[id] & kTracked) && !(status_[id] & kEscaped);
600 : }
601 :
602 6050294 : bool EscapeStatusAnalysis::IsEscaped(Node* node) {
603 6050312 : return status_[node->id()] & kEscaped;
604 : }
605 :
606 1022210 : bool EscapeStatusAnalysis::IsAllocation(Node* node) {
607 1597824 : return node->opcode() == IrOpcode::kAllocate ||
608 0 : node->opcode() == IrOpcode::kFinishRegion;
609 : }
610 :
611 5047975 : bool EscapeStatusAnalysis::SetEscaped(Node* node) {
612 5047975 : bool changed = !(status_[node->id()] & kEscaped);
613 : status_[node->id()] |= kEscaped | kTracked;
614 0 : return changed;
615 : }
616 :
617 0 : bool EscapeStatusAnalysis::IsInQueue(NodeId id) {
618 5416518 : return status_[id] & kInQueue;
619 : }
620 :
621 0 : void EscapeStatusAnalysis::SetInQueue(NodeId id, bool on_stack) {
622 0 : if (on_stack) {
623 5048293 : status_[id] |= kInQueue;
624 : } else {
625 5048293 : status_[id] &= ~kInQueue;
626 : }
627 0 : }
628 :
629 1534540 : void EscapeStatusAnalysis::ResizeStatusVector() {
630 2301810 : if (status_.size() <= graph()->NodeCount()) {
631 393202 : status_.resize(graph()->NodeCount() * 1.1, kUnknown);
632 : }
633 767270 : }
634 :
635 2503200 : size_t EscapeStatusAnalysis::GetStatusVectorSize() { return status_.size(); }
636 :
637 39769 : void EscapeStatusAnalysis::RunStatusAnalysis() {
638 : // TODO(tebbi): This checks for faulty VirtualObject states, which can happen
639 : // due to bug https://bugs.chromium.org/p/v8/issues/detail?id=6302. As a
640 : // workaround, we set everything to escaped if such a faulty state was
641 : // detected.
642 39769 : bool all_objects_complete = object_analysis_->AllObjectsComplete();
643 39769 : ResizeStatusVector();
644 9209655 : while (!status_stack_.empty()) {
645 27390351 : Node* node = status_stack_.back();
646 : status_stack_.pop_back();
647 18260234 : status_[node->id()] &= ~kOnStack;
648 9130117 : Process(node);
649 9130117 : status_[node->id()] |= kVisited;
650 9130117 : if (!all_objects_complete) SetEscaped(node);
651 : }
652 39769 : }
653 :
654 115359 : void EscapeStatusAnalysis::EnqueueForStatusAnalysis(Node* node) {
655 : DCHECK_NOT_NULL(node);
656 576795 : if (!(status_[node->id()] & kOnStack)) {
657 115359 : status_stack_.push_back(node);
658 230718 : status_[node->id()] |= kOnStack;
659 : }
660 115359 : }
661 :
662 785757 : void EscapeStatusAnalysis::RevisitInputs(Node* node) {
663 1882273 : for (Edge edge : node->input_edges()) {
664 1096516 : Node* input = edge.to();
665 5070840 : if (!(status_[input->id()] & kOnStack)) {
666 890646 : status_stack_.push_back(input);
667 1781292 : status_[input->id()] |= kOnStack;
668 : }
669 : }
670 785757 : }
671 :
672 1756328 : void EscapeStatusAnalysis::RevisitUses(Node* node) {
673 13101724 : for (Edge edge : node->use_edges()) {
674 11345395 : Node* use = edge.from();
675 58419801 : if (!(status_[use->id()] & kOnStack) && !IsNotReachable(use)) {
676 8124149 : status_stack_.push_back(use);
677 16248300 : status_[use->id()] |= kOnStack;
678 : }
679 : }
680 1756329 : }
681 :
682 9130125 : void EscapeStatusAnalysis::Process(Node* node) {
683 9130125 : switch (node->opcode()) {
684 : case IrOpcode::kAllocate:
685 182775 : ProcessAllocate(node);
686 182775 : break;
687 : case IrOpcode::kFinishRegion:
688 176389 : ProcessFinishRegion(node);
689 176389 : break;
690 : case IrOpcode::kStoreField:
691 2315081 : ProcessStoreField(node);
692 2315077 : break;
693 : case IrOpcode::kStoreElement:
694 435438 : ProcessStoreElement(node);
695 435438 : break;
696 : case IrOpcode::kLoadField:
697 : case IrOpcode::kLoadElement: {
698 549333 : if (Node* rep = object_analysis_->GetReplacement(node)) {
699 17162 : if (IsAllocation(rep) && CheckUsesForEscape(node, rep)) {
700 1 : RevisitInputs(rep);
701 1 : RevisitUses(rep);
702 : }
703 : } else {
704 532171 : Node* from = NodeProperties::GetValueInput(node, 0);
705 532171 : from = object_analysis_->ResolveReplacement(from);
706 532171 : if (SetEscaped(from)) {
707 : TRACE("Setting #%d (%s) to escaped because of unresolved load #%i\n",
708 : from->id(), from->op()->mnemonic(), node->id());
709 44659 : RevisitInputs(from);
710 44659 : RevisitUses(from);
711 : }
712 : }
713 549333 : RevisitUses(node);
714 549333 : break;
715 : }
716 : case IrOpcode::kPhi:
717 338165 : if (!HasEntry(node)) {
718 : status_[node->id()] |= kTracked;
719 125847 : RevisitUses(node);
720 : }
721 676330 : if (!IsAllocationPhi(node) && SetEscaped(node)) {
722 125847 : RevisitInputs(node);
723 125847 : RevisitUses(node);
724 : }
725 : CheckUsesForEscape(node);
726 : default:
727 : break;
728 : }
729 9130121 : }
730 :
731 338165 : bool EscapeStatusAnalysis::IsAllocationPhi(Node* node) {
732 624757 : for (Edge edge : node->input_edges()) {
733 624757 : Node* input = edge.to();
734 767658 : if (input->opcode() == IrOpcode::kPhi && !IsEscaped(input)) continue;
735 575614 : if (IsAllocation(input)) continue;
736 : return false;
737 : }
738 0 : return true;
739 : }
740 :
741 2315080 : void EscapeStatusAnalysis::ProcessStoreField(Node* node) {
742 : DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
743 2315080 : Node* to = NodeProperties::GetValueInput(node, 0);
744 2315080 : Node* val = NodeProperties::GetValueInput(node, 1);
745 4856832 : if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
746 423206 : RevisitUses(val);
747 423206 : RevisitInputs(val);
748 : TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
749 : val->id(), val->op()->mnemonic(), to->id());
750 : }
751 2315077 : }
752 :
753 435438 : void EscapeStatusAnalysis::ProcessStoreElement(Node* node) {
754 : DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
755 435438 : Node* to = NodeProperties::GetValueInput(node, 0);
756 435438 : Node* val = NodeProperties::GetValueInput(node, 2);
757 872325 : if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
758 136193 : RevisitUses(val);
759 136193 : RevisitInputs(val);
760 : TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
761 : val->id(), val->op()->mnemonic(), to->id());
762 : }
763 435438 : }
764 :
765 182775 : void EscapeStatusAnalysis::ProcessAllocate(Node* node) {
766 : DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
767 182775 : if (!HasEntry(node)) {
768 : status_[node->id()] |= kTracked;
769 : TRACE("Created status entry for node #%d (%s)\n", node->id(),
770 : node->op()->mnemonic());
771 : NumberMatcher size(node->InputAt(0));
772 : DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
773 : node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
774 : node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
775 : node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
776 115356 : RevisitUses(node);
777 115356 : if (!size.HasValue() && SetEscaped(node)) {
778 : TRACE("Setting #%d to escaped because of non-const alloc\n", node->id());
779 : // This node is already known to escape, uses do not have to be checked
780 : // for escape.
781 182775 : return;
782 : }
783 : }
784 182775 : if (CheckUsesForEscape(node, true)) {
785 72235 : RevisitUses(node);
786 : }
787 : }
788 :
789 754143 : bool EscapeStatusAnalysis::CheckUsesForEscape(Node* uses, Node* rep,
790 : bool phi_escaping) {
791 4694132 : for (Edge edge : uses->use_edges()) {
792 7995562 : Node* use = edge.from();
793 4122514 : if (IsNotReachable(use)) continue;
794 8238649 : if (edge.index() >= use->op()->ValueInputCount() +
795 : OperatorProperties::GetContextInputCount(use->op()))
796 : continue;
797 3876237 : switch (use->opcode()) {
798 : case IrOpcode::kPhi:
799 638715 : if (phi_escaping && SetEscaped(rep)) {
800 : TRACE(
801 : "Setting #%d (%s) to escaped because of use by phi node "
802 : "#%d (%s)\n",
803 : rep->id(), rep->op()->mnemonic(), use->id(),
804 : use->op()->mnemonic());
805 : return true;
806 : }
807 : // Fallthrough.
808 : case IrOpcode::kStoreField:
809 : case IrOpcode::kLoadField:
810 : case IrOpcode::kStoreElement:
811 : case IrOpcode::kLoadElement:
812 : case IrOpcode::kFrameState:
813 : case IrOpcode::kStateValues:
814 : case IrOpcode::kReferenceEqual:
815 : case IrOpcode::kFinishRegion:
816 : case IrOpcode::kCheckMaps:
817 4543556 : if (IsEscaped(use) && SetEscaped(rep)) {
818 : TRACE(
819 : "Setting #%d (%s) to escaped because of use by escaping node "
820 : "#%d (%s)\n",
821 : rep->id(), rep->op()->mnemonic(), use->id(),
822 : use->op()->mnemonic());
823 : return true;
824 : }
825 : break;
826 : case IrOpcode::kObjectIsSmi:
827 152 : if (!IsAllocation(rep) && SetEscaped(rep)) {
828 : TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
829 : rep->id(), rep->op()->mnemonic(), use->id(),
830 : use->op()->mnemonic());
831 : return true;
832 : }
833 : break;
834 : case IrOpcode::kSelect:
835 : // TODO(mstarzinger): The following list of operators will eventually be
836 : // handled by the EscapeAnalysisReducer (similar to ObjectIsSmi).
837 : case IrOpcode::kConvertTaggedHoleToUndefined:
838 : case IrOpcode::kStringEqual:
839 : case IrOpcode::kStringLessThan:
840 : case IrOpcode::kStringLessThanOrEqual:
841 : case IrOpcode::kTypeGuard:
842 : case IrOpcode::kPlainPrimitiveToNumber:
843 : case IrOpcode::kPlainPrimitiveToWord32:
844 : case IrOpcode::kPlainPrimitiveToFloat64:
845 : case IrOpcode::kStringCharAt:
846 : case IrOpcode::kStringCharCodeAt:
847 : case IrOpcode::kStringIndexOf:
848 : case IrOpcode::kObjectIsDetectableCallable:
849 : case IrOpcode::kObjectIsNaN:
850 : case IrOpcode::kObjectIsNonCallable:
851 : case IrOpcode::kObjectIsNumber:
852 : case IrOpcode::kObjectIsReceiver:
853 : case IrOpcode::kObjectIsString:
854 : case IrOpcode::kObjectIsSymbol:
855 : case IrOpcode::kObjectIsUndetectable:
856 1155 : if (SetEscaped(rep)) {
857 : TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
858 : rep->id(), rep->op()->mnemonic(), use->id(),
859 : use->op()->mnemonic());
860 : return true;
861 : }
862 : break;
863 : default:
864 1472095 : if (use->op()->EffectInputCount() == 0 &&
865 783325 : uses->op()->EffectInputCount() > 0 &&
866 : !IrOpcode::IsJsOpcode(use->opcode())) {
867 : V8_Fatal(__FILE__, __LINE__,
868 : "Encountered unaccounted use by #%d (%s)\n", use->id(),
869 0 : use->op()->mnemonic());
870 : }
871 708829 : if (SetEscaped(rep)) {
872 : TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
873 : rep->id(), rep->op()->mnemonic(), use->id(),
874 : use->op()->mnemonic());
875 : return true;
876 : }
877 : }
878 : }
879 571618 : return false;
880 : }
881 :
882 176389 : void EscapeStatusAnalysis::ProcessFinishRegion(Node* node) {
883 : DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
884 176389 : if (!HasEntry(node)) {
885 : status_[node->id()] |= kTracked;
886 107805 : RevisitUses(node);
887 : }
888 176389 : if (CheckUsesForEscape(node, true)) {
889 55851 : RevisitInputs(node);
890 55851 : RevisitUses(node);
891 : }
892 176389 : }
893 :
894 0 : void EscapeStatusAnalysis::DebugPrint() {
895 0 : for (NodeId id = 0; id < status_.size(); id++) {
896 0 : if (status_[id] & kTracked) {
897 : PrintF("Node #%d is %s\n", id,
898 0 : (status_[id] & kEscaped) ? "escaping" : "virtual");
899 : }
900 : }
901 0 : }
902 :
903 393202 : EscapeAnalysis::EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common,
904 : Zone* zone)
905 : : zone_(zone),
906 393202 : slot_not_analyzed_(graph->NewNode(common->NumberConstant(0x1c0debad))),
907 : common_(common),
908 : status_analysis_(new (zone) EscapeStatusAnalysis(this, graph, zone)),
909 : virtual_states_(zone),
910 : replacements_(zone),
911 : cycle_detection_(zone),
912 1572808 : cache_(nullptr) {
913 : // Type slot_not_analyzed_ manually.
914 786404 : double v = OpParameter<double>(slot_not_analyzed_);
915 393202 : NodeProperties::SetType(slot_not_analyzed_, Type::Range(v, v, zone));
916 393202 : }
917 :
918 786403 : EscapeAnalysis::~EscapeAnalysis() {}
919 :
920 432971 : bool EscapeAnalysis::Run() {
921 786404 : replacements_.resize(graph()->NodeCount());
922 786404 : status_analysis_->AssignAliases();
923 786404 : if (status_analysis_->AliasCount() > 0) {
924 39769 : cache_ = new (zone()) MergeCache(zone());
925 79538 : replacements_.resize(graph()->NodeCount());
926 39769 : status_analysis_->ResizeStatusVector();
927 39769 : RunObjectAnalysis();
928 39769 : status_analysis_->RunStatusAnalysis();
929 39769 : return true;
930 : } else {
931 : return false;
932 : }
933 : }
934 :
935 1966010 : void EscapeStatusAnalysis::AssignAliases() {
936 393202 : size_t max_size = 1024;
937 393202 : size_t min_size = 32;
938 : size_t stack_size =
939 1179606 : std::min(std::max(graph()->NodeCount() / 5, min_size), max_size);
940 393202 : stack_.reserve(stack_size);
941 393202 : ResizeStatusVector();
942 786404 : stack_.push_back(graph()->end());
943 393202 : CHECK_LT(graph()->NodeCount(), kUntrackable);
944 112984293 : aliases_.resize(graph()->NodeCount(), kNotReachable);
945 1179606 : aliases_[graph()->end()->id()] = kUntrackable;
946 393202 : status_stack_.reserve(8);
947 : TRACE("Discovering trackable nodes");
948 29521869 : while (!stack_.empty()) {
949 57701633 : Node* node = stack_.back();
950 : stack_.pop_back();
951 28735458 : switch (node->opcode()) {
952 : case IrOpcode::kAllocate:
953 230718 : if (aliases_[node->id()] >= kUntrackable) {
954 2 : aliases_[node->id()] = NextAlias();
955 : TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
956 : node->id());
957 2 : EnqueueForStatusAnalysis(node);
958 : }
959 : break;
960 : case IrOpcode::kFinishRegion: {
961 120066 : Node* allocate = NodeProperties::GetValueInput(node, 0);
962 : DCHECK_NOT_NULL(allocate);
963 470847 : if (allocate->opcode() == IrOpcode::kAllocate) {
964 230716 : if (aliases_[allocate->id()] >= kUntrackable) {
965 115357 : if (aliases_[allocate->id()] == kNotReachable) {
966 115353 : stack_.push_back(allocate);
967 : }
968 346071 : aliases_[allocate->id()] = NextAlias();
969 : TRACE(" @%d:%s#%u", aliases_[allocate->id()],
970 : allocate->op()->mnemonic(), allocate->id());
971 115357 : EnqueueForStatusAnalysis(allocate);
972 : }
973 461432 : aliases_[node->id()] = aliases_[allocate->id()];
974 : TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
975 : node->id());
976 : }
977 : break;
978 : }
979 : default:
980 : DCHECK_EQ(aliases_[node->id()], kUntrackable);
981 : break;
982 : }
983 112244898 : for (Edge edge : node->input_edges()) {
984 83509447 : Node* input = edge.to();
985 278755351 : if (aliases_[input->id()] == kNotReachable) {
986 28227029 : stack_.push_back(input);
987 84681030 : aliases_[input->id()] = kUntrackable;
988 : }
989 : }
990 : }
991 : TRACE("\n");
992 393202 : }
993 :
994 20231586 : bool EscapeStatusAnalysis::IsNotReachable(Node* node) {
995 56733804 : if (node->id() >= aliases_.size()) {
996 : return false;
997 : }
998 28074693 : return aliases_[node->id()] == kNotReachable;
999 : }
1000 :
1001 39769 : bool EscapeAnalysis::AllObjectsComplete() {
1002 12113949 : for (VirtualState* state : virtual_states_) {
1003 12049898 : if (state) {
1004 110765760 : for (size_t i = 0; i < state->size(); ++i) {
1005 54034320 : if (VirtualObject* object = state->VirtualObjectFromAlias(i)) {
1006 5150528 : if (!object->AllFieldsClear()) {
1007 1037904080 : for (size_t i = 0; i < object->field_count(); ++i) {
1008 1037919567 : if (object->GetField(i) == nullptr) {
1009 : return false;
1010 : }
1011 : }
1012 : }
1013 : }
1014 : }
1015 : }
1016 : }
1017 : return true;
1018 : }
1019 :
1020 119307 : void EscapeAnalysis::RunObjectAnalysis() {
1021 79538 : virtual_states_.resize(graph()->NodeCount());
1022 : ZoneDeque<Node*> queue(zone());
1023 79538 : queue.push_back(graph()->start());
1024 : ZoneVector<Node*> danglers(zone());
1025 5088059 : while (!queue.empty()) {
1026 10096583 : Node* node = queue.back();
1027 : queue.pop_back();
1028 5048293 : status_analysis_->SetInQueue(node->id(), false);
1029 5048293 : if (Process(node)) {
1030 37139318 : for (Edge edge : node->use_edges()) {
1031 16109072 : Node* use = edge.from();
1032 43051286 : if (status_analysis_->IsNotReachable(use)) {
1033 9941 : continue;
1034 : }
1035 16099131 : if (NodeProperties::IsEffectEdge(edge)) {
1036 : // Iteration order: depth first, but delay phis.
1037 : // We need DFS do avoid some duplication of VirtualStates and
1038 : // VirtualObjects, and we want to delay phis to improve performance.
1039 10833248 : if (use->opcode() == IrOpcode::kEffectPhi) {
1040 1659482 : if (!status_analysis_->IsInQueue(use->id())) {
1041 : status_analysis_->SetInQueue(use->id(), true);
1042 421631 : queue.push_front(use);
1043 : }
1044 8638168 : } else if ((use->opcode() != IrOpcode::kLoadField &&
1045 5139815 : use->opcode() != IrOpcode::kLoadElement) ||
1046 552932 : !status_analysis_->IsDanglingEffectNode(use)) {
1047 13760331 : if (!status_analysis_->IsInQueue(use->id())) {
1048 : status_analysis_->SetInQueue(use->id(), true);
1049 4586777 : queue.push_back(use);
1050 : }
1051 : } else {
1052 106 : danglers.push_back(use);
1053 : }
1054 : }
1055 : }
1056 : // Danglers need to be processed immediately, even if they are
1057 : // on the stack. Since they do not have effect outputs,
1058 : // we don't have to track whether they are on the stack.
1059 4921174 : queue.insert(queue.end(), danglers.begin(), danglers.end());
1060 : danglers.clear();
1061 : }
1062 : }
1063 :
1064 : #ifdef DEBUG
1065 : if (FLAG_trace_turbo_escape) {
1066 : DebugPrint();
1067 : }
1068 : #endif
1069 39769 : }
1070 :
1071 1886660 : bool EscapeStatusAnalysis::IsDanglingEffectNode(Node* node) {
1072 2434019 : if (status_[node->id()] & kDanglingComputed) {
1073 547359 : return status_[node->id()] & kDangling;
1074 : }
1075 1339300 : if (node->op()->EffectInputCount() == 0 ||
1076 892867 : node->op()->EffectOutputCount() == 0 ||
1077 446434 : (node->op()->EffectInputCount() == 1 &&
1078 446434 : NodeProperties::GetEffectInput(node)->opcode() == IrOpcode::kStart)) {
1079 : // The start node is used as sentinel for nodes that are in general
1080 : // effectful, but of which an analysis has determined that they do not
1081 : // produce effects in this instance. We don't consider these nodes dangling.
1082 0 : status_[node->id()] |= kDanglingComputed;
1083 0 : return false;
1084 : }
1085 2156828 : for (Edge edge : node->use_edges()) {
1086 1078361 : Node* use = edge.from();
1087 2156722 : if (aliases_[use->id()] == kNotReachable) continue;
1088 1078353 : if (NodeProperties::IsEffectEdge(edge)) {
1089 446329 : status_[node->id()] |= kDanglingComputed;
1090 446329 : return false;
1091 : }
1092 : }
1093 106 : status_[node->id()] |= kDanglingComputed | kDangling;
1094 106 : return true;
1095 : }
1096 :
1097 7487958 : bool EscapeStatusAnalysis::IsEffectBranchPoint(Node* node) {
1098 11384950 : if (status_[node->id()] & kBranchPointComputed) {
1099 306027 : return status_[node->id()] & kBranchPoint;
1100 : }
1101 : int count = 0;
1102 28101374 : for (Edge edge : node->use_edges()) {
1103 16499608 : Node* use = edge.from();
1104 24946020 : if (aliases_[use->id()] == kNotReachable) continue;
1105 12463864 : if (NodeProperties::IsEffectEdge(edge)) {
1106 7624144 : if ((use->opcode() == IrOpcode::kLoadField ||
1107 3585736 : use->opcode() == IrOpcode::kLoadElement ||
1108 4467461 : use->opcode() == IrOpcode::kLoad) &&
1109 440863 : IsDanglingEffectNode(use))
1110 : continue;
1111 4026577 : if (++count > 1) {
1112 435612 : status_[node->id()] |= kBranchPointComputed | kBranchPoint;
1113 435612 : return true;
1114 : }
1115 : }
1116 : }
1117 3155354 : status_[node->id()] |= kBranchPointComputed;
1118 3155354 : return false;
1119 : }
1120 :
1121 : namespace {
1122 :
1123 4626654 : bool HasFrameStateInput(const Operator* op) {
1124 4626654 : if (op->opcode() == IrOpcode::kCall || op->opcode() == IrOpcode::kTailCall) {
1125 277600 : const CallDescriptor* d = CallDescriptorOf(op);
1126 277600 : return d->NeedsFrameState();
1127 : } else {
1128 4349054 : return OperatorProperties::HasFrameStateInput(op);
1129 : }
1130 : }
1131 :
1132 : } // namespace
1133 :
1134 11279636 : bool EscapeAnalysis::Process(Node* node) {
1135 5048298 : switch (node->opcode()) {
1136 : case IrOpcode::kAllocate:
1137 136036 : ProcessAllocation(node);
1138 136036 : break;
1139 : case IrOpcode::kBeginRegion:
1140 140515 : ForwardVirtualState(node);
1141 140515 : break;
1142 : case IrOpcode::kFinishRegion:
1143 140515 : ProcessFinishRegion(node);
1144 140515 : break;
1145 : case IrOpcode::kStoreField:
1146 1090966 : ProcessStoreField(node);
1147 1090962 : break;
1148 : case IrOpcode::kLoadField:
1149 535601 : ProcessLoadField(node);
1150 535601 : break;
1151 : case IrOpcode::kStoreElement:
1152 156038 : ProcessStoreElement(node);
1153 156038 : break;
1154 : case IrOpcode::kLoadElement:
1155 17331 : ProcessLoadElement(node);
1156 17331 : break;
1157 : case IrOpcode::kCheckMaps:
1158 41445 : ProcessCheckMaps(node);
1159 41445 : break;
1160 : case IrOpcode::kStart:
1161 39769 : ProcessStart(node);
1162 39769 : break;
1163 : case IrOpcode::kEffectPhi:
1164 421631 : return ProcessEffectPhi(node);
1165 : break;
1166 : default:
1167 4656902 : if (node->op()->EffectInputCount() > 0) {
1168 2328451 : ForwardVirtualState(node);
1169 : }
1170 2328452 : ProcessAllocationUsers(node);
1171 2328445 : break;
1172 : }
1173 4626657 : if (HasFrameStateInput(node->op())) {
1174 3209362 : virtual_states_[node->id()]->SetCopyRequired();
1175 : }
1176 : return true;
1177 : }
1178 :
1179 10608889 : void EscapeAnalysis::ProcessAllocationUsers(Node* node) {
1180 12448219 : for (Edge edge : node->input_edges()) {
1181 : Node* input = edge.to();
1182 10119774 : Node* use = edge.from();
1183 20239547 : if (edge.index() >= use->op()->ValueInputCount() +
1184 : OperatorProperties::GetContextInputCount(use->op()))
1185 : continue;
1186 4140220 : switch (node->opcode()) {
1187 : case IrOpcode::kStoreField:
1188 : case IrOpcode::kLoadField:
1189 : case IrOpcode::kStoreElement:
1190 : case IrOpcode::kLoadElement:
1191 : case IrOpcode::kFrameState:
1192 : case IrOpcode::kStateValues:
1193 : case IrOpcode::kReferenceEqual:
1194 : case IrOpcode::kFinishRegion:
1195 : case IrOpcode::kObjectIsSmi:
1196 : break;
1197 : case IrOpcode::kCheckMaps: {
1198 0 : CheckMapsParameters params = CheckMapsParametersOf(node->op());
1199 0 : if (params.flags() == CheckMapsFlag::kNone) break;
1200 : } // Fallthrough.
1201 : default:
1202 8280442 : VirtualState* state = virtual_states_[node->id()];
1203 4140219 : if (VirtualObject* obj =
1204 4140221 : GetVirtualObject(state, ResolveReplacement(input))) {
1205 346051 : if (!obj->AllFieldsClear()) {
1206 79129 : obj = CopyForModificationAt(obj, state, node);
1207 79129 : obj->ClearAllFields();
1208 : TRACE("Cleared all fields of @%d:#%d\n",
1209 : status_analysis_->GetAlias(obj->id()), obj->id());
1210 : }
1211 : }
1212 : break;
1213 : }
1214 : }
1215 2328445 : }
1216 :
1217 136253 : VirtualState* EscapeAnalysis::CopyForModificationAt(VirtualState* state,
1218 253038 : Node* node) {
1219 136253 : if (state->owner() != node) {
1220 126519 : VirtualState* new_state = new (zone()) VirtualState(node, *state);
1221 253038 : virtual_states_[node->id()] = new_state;
1222 : TRACE("Copying virtual state %p to new state %p at node %s#%d\n",
1223 : static_cast<void*>(state), static_cast<void*>(new_state),
1224 : node->op()->mnemonic(), node->id());
1225 126519 : return new_state;
1226 : }
1227 : return state;
1228 : }
1229 :
1230 1085247 : VirtualObject* EscapeAnalysis::CopyForModificationAt(VirtualObject* obj,
1231 : VirtualState* state,
1232 : Node* node) {
1233 985500 : if (obj->NeedCopyForModification()) {
1234 99747 : state = CopyForModificationAt(state, node);
1235 : // TODO(tebbi): this copies the complete virtual state. Replace with a more
1236 : // precise analysis of which objects are actually affected by the change.
1237 99747 : Alias changed_alias = status_analysis_->GetAlias(obj->id());
1238 3531430 : for (Alias alias = 0; alias < state->size(); ++alias) {
1239 1665968 : if (VirtualObject* next_obj = state->VirtualObjectFromAlias(alias)) {
1240 544483 : if (alias != changed_alias && next_obj->NeedCopyForModification()) {
1241 221850 : state->Copy(next_obj, alias);
1242 : }
1243 : }
1244 : }
1245 99747 : return state->Copy(obj, changed_alias);
1246 : }
1247 : return obj;
1248 : }
1249 :
1250 10605313 : void EscapeAnalysis::ForwardVirtualState(Node* node) {
1251 : DCHECK_EQ(node->op()->EffectInputCount(), 1);
1252 : #ifdef DEBUG
1253 : if (node->opcode() != IrOpcode::kLoadField &&
1254 : node->opcode() != IrOpcode::kLoadElement &&
1255 : node->opcode() != IrOpcode::kLoad &&
1256 : status_analysis_->IsDanglingEffectNode(node)) {
1257 : PrintF("Dangeling effect node: #%d (%s)\n", node->id(),
1258 : node->op()->mnemonic());
1259 : UNREACHABLE();
1260 : }
1261 : #endif // DEBUG
1262 9173788 : Node* effect = NodeProperties::GetEffectInput(node);
1263 : DCHECK_NOT_NULL(virtual_states_[effect->id()]);
1264 9915409 : if (virtual_states_[node->id()]) {
1265 689904 : virtual_states_[node->id()]->UpdateFrom(virtual_states_[effect->id()],
1266 689904 : zone());
1267 : } else {
1268 7793980 : virtual_states_[node->id()] = virtual_states_[effect->id()];
1269 : TRACE("Forwarding object state %p from %s#%d to %s#%d",
1270 : static_cast<void*>(virtual_states_[effect->id()]),
1271 : effect->op()->mnemonic(), effect->id(), node->op()->mnemonic(),
1272 : node->id());
1273 3896990 : if (status_analysis_->IsEffectBranchPoint(effect)) {
1274 1483242 : virtual_states_[node->id()]->SetCopyRequired();
1275 : TRACE(", effect input %s#%d is branch point", effect->op()->mnemonic(),
1276 : effect->id());
1277 : }
1278 : TRACE("\n");
1279 : }
1280 4586895 : }
1281 :
1282 79538 : void EscapeAnalysis::ProcessStart(Node* node) {
1283 : DCHECK_EQ(node->opcode(), IrOpcode::kStart);
1284 39769 : virtual_states_[node->id()] =
1285 119307 : new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1286 39769 : }
1287 :
1288 3641860 : bool EscapeAnalysis::ProcessEffectPhi(Node* node) {
1289 : DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
1290 : bool changed = false;
1291 :
1292 2570940 : VirtualState* mergeState = virtual_states_[node->id()];
1293 421631 : if (!mergeState) {
1294 : mergeState =
1295 450088 : new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1296 444860 : virtual_states_[node->id()] = mergeState;
1297 : changed = true;
1298 : TRACE("Effect Phi #%d got new virtual state %p.\n", node->id(),
1299 : static_cast<void*>(mergeState));
1300 : }
1301 :
1302 421631 : cache_->Clear();
1303 :
1304 : TRACE("At Effect Phi #%d, merging states into %p:", node->id(),
1305 : static_cast<void*>(mergeState));
1306 :
1307 5764953 : for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
1308 1500020 : Node* input = NodeProperties::GetEffectInput(node, i);
1309 3000040 : VirtualState* state = virtual_states_[input->id()];
1310 1500020 : if (state) {
1311 1206250 : cache_->states().push_back(state);
1312 1206250 : if (state == mergeState) {
1313 : mergeState = new (zone())
1314 15684 : VirtualState(node, zone(), status_analysis_->AliasCount());
1315 10456 : virtual_states_[node->id()] = mergeState;
1316 : changed = true;
1317 : }
1318 : }
1319 : TRACE(" %p (from %d %s)", static_cast<void*>(state), input->id(),
1320 : input->op()->mnemonic());
1321 : }
1322 : TRACE("\n");
1323 :
1324 843262 : if (cache_->states().size() == 0) {
1325 : return changed;
1326 : }
1327 :
1328 : changed =
1329 421631 : mergeState->MergeFrom(cache_, zone(), graph(), common(), node) || changed;
1330 :
1331 : TRACE("Merge %s the node.\n", changed ? "changed" : "did not change");
1332 :
1333 421631 : if (changed) {
1334 294530 : status_analysis_->ResizeStatusVector();
1335 : }
1336 421631 : return changed;
1337 : }
1338 :
1339 387431 : void EscapeAnalysis::ProcessAllocation(Node* node) {
1340 : DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
1341 136036 : ForwardVirtualState(node);
1342 387431 : VirtualState* state = virtual_states_[node->id()];
1343 136036 : Alias alias = status_analysis_->GetAlias(node->id());
1344 :
1345 : // Check if we have already processed this node.
1346 272072 : if (state->VirtualObjectFromAlias(alias)) {
1347 136036 : return;
1348 : }
1349 :
1350 230718 : if (state->owner()->opcode() == IrOpcode::kEffectPhi) {
1351 36506 : state = CopyForModificationAt(state, node);
1352 : }
1353 :
1354 : NumberMatcher size(node->InputAt(0));
1355 : DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
1356 : node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
1357 : node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
1358 : node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
1359 115359 : if (size.HasValue()) {
1360 : VirtualObject* obj = new (zone()) VirtualObject(
1361 230718 : node->id(), state, zone(), size.Value() / kPointerSize, false);
1362 : state->SetVirtualObject(alias, obj);
1363 : } else {
1364 : state->SetVirtualObject(
1365 : alias, new (zone()) VirtualObject(node->id(), state, zone()));
1366 : }
1367 : }
1368 :
1369 276550 : void EscapeAnalysis::ProcessFinishRegion(Node* node) {
1370 : DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
1371 140515 : ForwardVirtualState(node);
1372 140515 : Node* allocation = NodeProperties::GetValueInput(node, 0);
1373 140515 : if (allocation->opcode() == IrOpcode::kAllocate) {
1374 272070 : VirtualState* state = virtual_states_[node->id()];
1375 : VirtualObject* obj =
1376 272070 : state->VirtualObjectFromAlias(status_analysis_->GetAlias(node->id()));
1377 : DCHECK_NOT_NULL(obj);
1378 : obj->SetInitialized();
1379 : }
1380 140515 : }
1381 :
1382 8222296 : Node* EscapeAnalysis::replacement(Node* node) {
1383 16497446 : if (node->id() >= replacements_.size()) return nullptr;
1384 8262110 : return replacements_[node->id()];
1385 : }
1386 :
1387 520037 : bool EscapeAnalysis::SetReplacement(Node* node, Node* rep) {
1388 521642 : bool changed = replacements_[node->id()] != rep;
1389 521248 : replacements_[node->id()] = rep;
1390 394 : return changed;
1391 : }
1392 :
1393 0 : bool EscapeAnalysis::UpdateReplacement(VirtualState* state, Node* node,
1394 : Node* rep) {
1395 0 : if (SetReplacement(node, rep)) {
1396 : if (rep) {
1397 : TRACE("Replacement of #%d is #%d (%s)\n", node->id(), rep->id(),
1398 : rep->op()->mnemonic());
1399 : } else {
1400 : TRACE("Replacement of #%d cleared\n", node->id());
1401 : }
1402 : return true;
1403 : }
1404 0 : return false;
1405 : }
1406 :
1407 7619718 : Node* EscapeAnalysis::ResolveReplacement(Node* node) {
1408 15274740 : while (replacement(node)) {
1409 : node = replacement(node);
1410 : }
1411 7619718 : return node;
1412 : }
1413 :
1414 549724 : Node* EscapeAnalysis::GetReplacement(Node* node) {
1415 : Node* result = nullptr;
1416 1116998 : while (replacement(node)) {
1417 : node = result = replacement(node);
1418 : }
1419 549724 : return result;
1420 : }
1421 :
1422 2503182 : bool EscapeAnalysis::IsVirtual(Node* node) {
1423 5006364 : if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1424 : return false;
1425 : }
1426 2503182 : return status_analysis_->IsVirtual(node);
1427 : }
1428 :
1429 18 : bool EscapeAnalysis::IsEscaped(Node* node) {
1430 36 : if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1431 : return false;
1432 : }
1433 18 : return status_analysis_->IsEscaped(node);
1434 : }
1435 :
1436 7 : bool EscapeAnalysis::CompareVirtualObjects(Node* left, Node* right) {
1437 : DCHECK(IsVirtual(left) && IsVirtual(right));
1438 7 : left = ResolveReplacement(left);
1439 7 : right = ResolveReplacement(right);
1440 7 : if (IsEquivalentPhi(left, right)) {
1441 : return true;
1442 : }
1443 7 : return false;
1444 : }
1445 :
1446 : namespace {
1447 :
1448 : bool IsOffsetForFieldAccessCorrect(const FieldAccess& access) {
1449 : #if V8_TARGET_LITTLE_ENDIAN
1450 15964 : return (access.offset % kPointerSize) == 0;
1451 : #else
1452 : return ((access.offset +
1453 : (1 << ElementSizeLog2Of(access.machine_type.representation()))) %
1454 : kPointerSize) == 0;
1455 : #endif
1456 : }
1457 :
1458 1001069 : int OffsetForFieldAccess(Node* node) {
1459 1001069 : FieldAccess access = FieldAccessOf(node->op());
1460 : DCHECK(IsOffsetForFieldAccessCorrect(access));
1461 1001069 : return access.offset / kPointerSize;
1462 : }
1463 :
1464 148680 : int OffsetForElementAccess(Node* node, int index) {
1465 148680 : ElementAccess access = ElementAccessOf(node->op());
1466 : DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
1467 : kPointerSizeLog2);
1468 : DCHECK_EQ(access.header_size % kPointerSize, 0);
1469 148680 : return access.header_size / kPointerSize + index;
1470 : }
1471 :
1472 : } // namespace
1473 :
1474 47892 : void EscapeAnalysis::ProcessLoadFromPhi(int offset, Node* from, Node* load,
1475 0 : VirtualState* state) {
1476 : TRACE("Load #%d from phi #%d", load->id(), from->id());
1477 :
1478 15964 : cache_->fields().clear();
1479 95784 : for (int i = 0; i < load->op()->ValueInputCount(); ++i) {
1480 15964 : Node* input = NodeProperties::GetValueInput(load, i);
1481 15964 : cache_->fields().push_back(input);
1482 : }
1483 :
1484 : cache_->LoadVirtualObjectsForFieldsFrom(state,
1485 15964 : status_analysis_->GetAliasMap());
1486 47892 : if (cache_->objects().size() == cache_->fields().size()) {
1487 0 : cache_->GetFields(offset);
1488 0 : if (cache_->fields().size() == cache_->objects().size()) {
1489 : Node* rep = replacement(load);
1490 0 : if (!rep || !IsEquivalentPhi(rep, cache_->fields())) {
1491 0 : int value_input_count = static_cast<int>(cache_->fields().size());
1492 : Type* phi_type = Type::None();
1493 0 : for (Node* input : cache_->fields()) {
1494 : Type* input_type = NodeProperties::GetType(input);
1495 0 : phi_type = Type::Union(phi_type, input_type, graph()->zone());
1496 : }
1497 0 : cache_->fields().push_back(NodeProperties::GetControlInput(from));
1498 : Node* phi = graph()->NewNode(
1499 : common()->Phi(MachineRepresentation::kTagged, value_input_count),
1500 0 : value_input_count + 1, &cache_->fields().front());
1501 : NodeProperties::SetType(phi, phi_type);
1502 0 : status_analysis_->ResizeStatusVector();
1503 : SetReplacement(load, phi);
1504 : TRACE(" got phi created.\n");
1505 : } else {
1506 : TRACE(" has already phi #%d.\n", rep->id());
1507 : }
1508 : } else {
1509 : TRACE(" has incomplete field info.\n");
1510 : }
1511 : } else {
1512 : TRACE(" has incomplete virtual object info.\n");
1513 : }
1514 15964 : }
1515 :
1516 1087166 : void EscapeAnalysis::ProcessLoadField(Node* node) {
1517 : DCHECK_EQ(node->opcode(), IrOpcode::kLoadField);
1518 535601 : ForwardVirtualState(node);
1519 945395 : Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1520 1071202 : VirtualState* state = virtual_states_[node->id()];
1521 535601 : if (VirtualObject* object = GetVirtualObject(state, from)) {
1522 125807 : if (!object->IsTracked()) return;
1523 : int offset = OffsetForFieldAccess(node);
1524 251614 : if (static_cast<size_t>(offset) >= object->field_count()) {
1525 : // We have a load from a field that is not inside the {object}. This
1526 : // can only happen with conflicting type feedback and for dead {node}s.
1527 : // For now, we just mark the {object} as escaping.
1528 : // TODO(turbofan): Consider introducing an Undefined or None operator
1529 : // that we can replace this load with, since we know it's dead code.
1530 17 : if (status_analysis_->SetEscaped(from)) {
1531 : TRACE(
1532 : "Setting #%d (%s) to escaped because load field #%d from "
1533 : "offset %d outside of object\n",
1534 : from->id(), from->op()->mnemonic(), node->id(), offset);
1535 : }
1536 : return;
1537 : }
1538 : Node* value = object->GetField(offset);
1539 125790 : if (value) {
1540 23054 : value = ResolveReplacement(value);
1541 : }
1542 : // Record that the load has this alias.
1543 : UpdateReplacement(state, node, value);
1544 425758 : } else if (from->opcode() == IrOpcode::kPhi &&
1545 15964 : IsOffsetForFieldAccessCorrect(FieldAccessOf(node->op()))) {
1546 : int offset = OffsetForFieldAccess(node);
1547 : // Only binary phis are supported for now.
1548 15964 : ProcessLoadFromPhi(offset, from, node, state);
1549 : } else {
1550 : UpdateReplacement(state, node, nullptr);
1551 : }
1552 : }
1553 :
1554 41565 : void EscapeAnalysis::ProcessCheckMaps(Node* node) {
1555 : DCHECK_EQ(node->opcode(), IrOpcode::kCheckMaps);
1556 41445 : ForwardVirtualState(node);
1557 41445 : Node* checked = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1558 41445 : if (FLAG_turbo_experimental) {
1559 120 : VirtualState* state = virtual_states_[node->id()];
1560 60 : if (VirtualObject* object = GetVirtualObject(state, checked)) {
1561 60 : if (!object->IsTracked()) {
1562 0 : if (status_analysis_->SetEscaped(node)) {
1563 : TRACE(
1564 : "Setting #%d (%s) to escaped because checked object #%i is not "
1565 : "tracked\n",
1566 : node->id(), node->op()->mnemonic(), object->id());
1567 : }
1568 60 : return;
1569 : }
1570 60 : CheckMapsParameters params = CheckMapsParametersOf(node->op());
1571 :
1572 60 : Node* value = object->GetField(HeapObject::kMapOffset / kPointerSize);
1573 60 : if (value) {
1574 60 : value = ResolveReplacement(value);
1575 : // TODO(tebbi): We want to extend this beyond constant folding with a
1576 : // CheckMapsValue operator that takes the load-eliminated map value as
1577 : // input.
1578 180 : if (value->opcode() == IrOpcode::kHeapConstant &&
1579 : params.maps().contains(ZoneHandleSet<Map>(
1580 180 : Handle<Map>::cast(OpParameter<Handle<HeapObject>>(value))))) {
1581 : TRACE("CheckMaps #%i seems to be redundant (until now).\n",
1582 : node->id());
1583 : return;
1584 : }
1585 : }
1586 : }
1587 : }
1588 41385 : if (status_analysis_->SetEscaped(node)) {
1589 : TRACE("Setting #%d (%s) to escaped (checking #%i)\n", node->id(),
1590 : node->op()->mnemonic(), checked->id());
1591 : }
1592 : }
1593 :
1594 34662 : void EscapeAnalysis::ProcessLoadElement(Node* node) {
1595 : DCHECK_EQ(node->opcode(), IrOpcode::kLoadElement);
1596 17331 : ForwardVirtualState(node);
1597 18542 : Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1598 34662 : VirtualState* state = virtual_states_[node->id()];
1599 : Node* index_node = node->InputAt(1);
1600 : NumberMatcher index(index_node);
1601 : DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
1602 : index_node->opcode() != IrOpcode::kInt64Constant &&
1603 : index_node->opcode() != IrOpcode::kFloat32Constant &&
1604 : index_node->opcode() != IrOpcode::kFloat64Constant);
1605 17331 : if (index.HasValue()) {
1606 1234 : if (VirtualObject* object = GetVirtualObject(state, from)) {
1607 23 : if (!object->IsTracked()) return;
1608 23 : int offset = OffsetForElementAccess(node, index.Value());
1609 46 : if (static_cast<size_t>(offset) >= object->field_count()) return;
1610 : Node* value = object->GetField(offset);
1611 23 : if (value) {
1612 0 : value = ResolveReplacement(value);
1613 : }
1614 : // Record that the load has this alias.
1615 : UpdateReplacement(state, node, value);
1616 1211 : } else if (from->opcode() == IrOpcode::kPhi) {
1617 0 : int offset = OffsetForElementAccess(node, index.Value());
1618 0 : ProcessLoadFromPhi(offset, from, node, state);
1619 : } else {
1620 : UpdateReplacement(state, node, nullptr);
1621 : }
1622 : } else {
1623 : // We have a load from a non-const index, cannot eliminate object.
1624 16097 : if (status_analysis_->SetEscaped(from)) {
1625 : TRACE(
1626 : "Setting #%d (%s) to escaped because load element #%d from non-const "
1627 : "index #%d (%s)\n",
1628 : from->id(), from->op()->mnemonic(), node->id(), index_node->id(),
1629 : index_node->op()->mnemonic());
1630 : }
1631 : }
1632 : }
1633 :
1634 2181932 : void EscapeAnalysis::ProcessStoreField(Node* node) {
1635 : DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
1636 1090966 : ForwardVirtualState(node);
1637 1090966 : Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1638 2181932 : VirtualState* state = virtual_states_[node->id()];
1639 1090966 : if (VirtualObject* object = GetVirtualObject(state, to)) {
1640 859298 : if (!object->IsTracked()) return;
1641 : int offset = OffsetForFieldAccess(node);
1642 1718596 : if (static_cast<size_t>(offset) >= object->field_count()) {
1643 : // We have a store to a field that is not inside the {object}. This
1644 : // can only happen with conflicting type feedback and for dead {node}s.
1645 : // For now, we just mark the {object} as escaping.
1646 : // TODO(turbofan): Consider just eliminating the store in the reducer
1647 : // pass, as it's dead code anyways.
1648 0 : if (status_analysis_->SetEscaped(to)) {
1649 : TRACE(
1650 : "Setting #%d (%s) to escaped because store field #%d to "
1651 : "offset %d outside of object\n",
1652 : to->id(), to->op()->mnemonic(), node->id(), offset);
1653 : }
1654 : return;
1655 : }
1656 859298 : Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 1));
1657 : // TODO(mstarzinger): The following is a workaround to not track some well
1658 : // known raw fields. We only ever store default initial values into these
1659 : // fields which are hard-coded in {TranslatedState::MaterializeAt} as well.
1660 859298 : if (val->opcode() == IrOpcode::kInt32Constant ||
1661 : val->opcode() == IrOpcode::kInt64Constant) {
1662 : DCHECK(FieldAccessOf(node->op()).offset == JSFunction::kCodeEntryOffset ||
1663 : FieldAccessOf(node->op()).offset == Name::kHashFieldOffset);
1664 0 : val = slot_not_analyzed_;
1665 : }
1666 859298 : if (object->GetField(offset) != val) {
1667 756135 : object = CopyForModificationAt(object, state, node);
1668 756135 : object->SetField(offset, val);
1669 : }
1670 : }
1671 : }
1672 :
1673 156038 : void EscapeAnalysis::ProcessStoreElement(Node* node) {
1674 : DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
1675 156038 : ForwardVirtualState(node);
1676 156038 : Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1677 : Node* index_node = node->InputAt(1);
1678 : NumberMatcher index(index_node);
1679 : DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
1680 : index_node->opcode() != IrOpcode::kInt64Constant &&
1681 : index_node->opcode() != IrOpcode::kFloat32Constant &&
1682 : index_node->opcode() != IrOpcode::kFloat64Constant);
1683 312076 : VirtualState* state = virtual_states_[node->id()];
1684 156038 : if (index.HasValue()) {
1685 148706 : if (VirtualObject* object = GetVirtualObject(state, to)) {
1686 148657 : if (!object->IsTracked()) return;
1687 148657 : int offset = OffsetForElementAccess(node, index.Value());
1688 297314 : if (static_cast<size_t>(offset) >= object->field_count()) return;
1689 148657 : Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 2));
1690 148657 : if (object->GetField(offset) != val) {
1691 148121 : object = CopyForModificationAt(object, state, node);
1692 148121 : object->SetField(offset, val);
1693 : }
1694 : }
1695 : } else {
1696 : // We have a store to a non-const index, cannot eliminate object.
1697 7332 : if (status_analysis_->SetEscaped(to)) {
1698 : TRACE(
1699 : "Setting #%d (%s) to escaped because store element #%d to non-const "
1700 : "index #%d (%s)\n",
1701 : to->id(), to->op()->mnemonic(), node->id(), index_node->id(),
1702 : index_node->op()->mnemonic());
1703 : }
1704 7332 : if (VirtualObject* object = GetVirtualObject(state, to)) {
1705 4773 : if (!object->IsTracked()) return;
1706 4773 : if (!object->AllFieldsClear()) {
1707 2115 : object = CopyForModificationAt(object, state, node);
1708 2115 : object->ClearAllFields();
1709 : TRACE("Cleared all fields of @%d:#%d\n",
1710 : status_analysis_->GetAlias(object->id()), object->id());
1711 : }
1712 : }
1713 : }
1714 : }
1715 :
1716 79500 : Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) {
1717 95897 : if ((node->opcode() == IrOpcode::kFinishRegion ||
1718 70972 : node->opcode() == IrOpcode::kAllocate) &&
1719 15349 : IsVirtual(node)) {
1720 30084 : if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
1721 30084 : ResolveReplacement(node))) {
1722 15042 : if (Node* object_state = vobj->GetObjectState()) {
1723 : return object_state;
1724 : } else {
1725 8835 : cache_->fields().clear();
1726 103124 : for (size_t i = 0; i < vobj->field_count(); ++i) {
1727 42727 : if (Node* field = vobj->GetField(i)) {
1728 85454 : cache_->fields().push_back(ResolveReplacement(field));
1729 : } else {
1730 : return nullptr;
1731 : }
1732 : }
1733 17670 : int input_count = static_cast<int>(cache_->fields().size());
1734 : Node* new_object_state =
1735 : graph()->NewNode(common()->ObjectState(input_count), input_count,
1736 17670 : &cache_->fields().front());
1737 : NodeProperties::SetType(new_object_state, Type::OtherInternal());
1738 : vobj->SetObjectState(new_object_state);
1739 : TRACE(
1740 : "Creating object state #%d for vobj %p (from node #%d) at effect "
1741 : "#%d\n",
1742 : new_object_state->id(), static_cast<void*>(vobj), node->id(),
1743 : effect->id());
1744 : // Now fix uses of other objects.
1745 103124 : for (size_t i = 0; i < vobj->field_count(); ++i) {
1746 42727 : if (Node* field = vobj->GetField(i)) {
1747 42727 : if (Node* field_object_state =
1748 42727 : GetOrCreateObjectState(effect, field)) {
1749 : NodeProperties::ReplaceValueInput(
1750 2146 : new_object_state, field_object_state, static_cast<int>(i));
1751 : }
1752 : }
1753 : }
1754 : return new_object_state;
1755 : }
1756 : }
1757 : }
1758 : return nullptr;
1759 : }
1760 :
1761 111235 : bool EscapeAnalysis::IsCyclicObjectState(Node* effect, Node* node) {
1762 170658 : if ((node->opcode() == IrOpcode::kFinishRegion ||
1763 111708 : node->opcode() == IrOpcode::kAllocate) &&
1764 17586 : IsVirtual(node)) {
1765 34226 : if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
1766 34226 : ResolveReplacement(node))) {
1767 34226 : if (cycle_detection_.find(vobj) != cycle_detection_.end()) return true;
1768 : cycle_detection_.insert(vobj);
1769 : bool cycle_detected = false;
1770 196678 : for (size_t i = 0; i < vobj->field_count(); ++i) {
1771 81226 : if (Node* field = vobj->GetField(i)) {
1772 81226 : if (IsCyclicObjectState(effect, field)) cycle_detected = true;
1773 : }
1774 : }
1775 : cycle_detection_.erase(vobj);
1776 17113 : return cycle_detected;
1777 : }
1778 : }
1779 : return false;
1780 : }
1781 :
1782 0 : void EscapeAnalysis::DebugPrintState(VirtualState* state) {
1783 0 : PrintF("Dumping virtual state %p\n", static_cast<void*>(state));
1784 0 : for (Alias alias = 0; alias < status_analysis_->AliasCount(); ++alias) {
1785 0 : if (VirtualObject* object = state->VirtualObjectFromAlias(alias)) {
1786 : PrintF(" Alias @%d: Object #%d with %zu fields\n", alias, object->id(),
1787 0 : object->field_count());
1788 0 : for (size_t i = 0; i < object->field_count(); ++i) {
1789 0 : if (Node* f = object->GetField(i)) {
1790 0 : PrintF(" Field %zu = #%d (%s)\n", i, f->id(), f->op()->mnemonic());
1791 : }
1792 : }
1793 : }
1794 : }
1795 0 : }
1796 :
1797 0 : void EscapeAnalysis::DebugPrint() {
1798 : ZoneVector<VirtualState*> object_states(zone());
1799 0 : for (NodeId id = 0; id < virtual_states_.size(); id++) {
1800 0 : if (VirtualState* states = virtual_states_[id]) {
1801 0 : if (std::find(object_states.begin(), object_states.end(), states) ==
1802 : object_states.end()) {
1803 0 : object_states.push_back(states);
1804 : }
1805 : }
1806 : }
1807 0 : for (size_t n = 0; n < object_states.size(); n++) {
1808 0 : DebugPrintState(object_states[n]);
1809 : }
1810 0 : }
1811 :
1812 5956265 : VirtualObject* EscapeAnalysis::GetVirtualObject(VirtualState* state,
1813 5956265 : Node* node) {
1814 11912530 : if (node->id() >= status_analysis_->GetAliasMap().size()) return nullptr;
1815 : Alias alias = status_analysis_->GetAlias(node->id());
1816 11903452 : if (alias >= state->size()) return nullptr;
1817 1516824 : return state->VirtualObjectFromAlias(alias);
1818 : }
1819 :
1820 39769 : bool EscapeAnalysis::ExistsVirtualAllocate() {
1821 28756374 : for (size_t id = 0; id < status_analysis_->GetAliasMap().size(); ++id) {
1822 : Alias alias = status_analysis_->GetAlias(static_cast<NodeId>(id));
1823 14348523 : if (alias < EscapeStatusAnalysis::kUntrackable) {
1824 166059 : if (status_analysis_->IsVirtual(static_cast<int>(id))) {
1825 : return true;
1826 : }
1827 : }
1828 : }
1829 : return false;
1830 : }
1831 :
1832 942975 : Graph* EscapeAnalysis::graph() const { return status_analysis_->graph(); }
1833 :
1834 : } // namespace compiler
1835 : } // namespace internal
1836 : } // namespace v8
|