Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/escape-analysis-reducer.h"
6 :
7 : #include "src/compiler/all-nodes.h"
8 : #include "src/compiler/simplified-operator.h"
9 : #include "src/compiler/type-cache.h"
10 : #include "src/frame-constants.h"
11 :
12 : namespace v8 {
13 : namespace internal {
14 : namespace compiler {
15 :
16 : #ifdef DEBUG
17 : #define TRACE(...) \
18 : do { \
19 : if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
20 : } while (false)
21 : #else
22 : #define TRACE(...)
23 : #endif // DEBUG
24 :
25 443359 : EscapeAnalysisReducer::EscapeAnalysisReducer(
26 443359 : Editor* editor, JSGraph* jsgraph, EscapeAnalysisResult analysis_result,
27 : Zone* zone)
28 : : AdvancedReducer(editor),
29 : jsgraph_(jsgraph),
30 : analysis_result_(analysis_result),
31 : object_id_cache_(zone),
32 : node_cache_(jsgraph->graph(), zone),
33 : arguments_elements_(zone),
34 886718 : zone_(zone) {}
35 :
36 66443 : Node* EscapeAnalysisReducer::MaybeGuard(Node* original, Node* replacement) {
37 : // We might need to guard the replacement if the type of the {replacement}
38 : // node is not in a sub-type relation to the type of the the {original} node.
39 : Type* const replacement_type = NodeProperties::GetType(replacement);
40 : Type* const original_type = NodeProperties::GetType(original);
41 27383 : if (!replacement_type->Is(original_type)) {
42 19530 : Node* const control = NodeProperties::GetControlInput(original);
43 : replacement = jsgraph()->graph()->NewNode(
44 39060 : jsgraph()->common()->TypeGuard(original_type), replacement, control);
45 : NodeProperties::SetType(replacement, original_type);
46 : }
47 27383 : return replacement;
48 : }
49 :
50 : namespace {
51 :
52 48831150 : Node* SkipTypeGuards(Node* node) {
53 48831150 : while (node->opcode() == IrOpcode::kTypeGuard) {
54 317 : node = NodeProperties::GetValueInput(node, 0);
55 : }
56 : return node;
57 : }
58 :
59 : } // namespace
60 :
61 22082 : Node* EscapeAnalysisReducer::ObjectIdNode(const VirtualObject* vobject) {
62 : VirtualObject::Id id = vobject->id();
63 48696 : if (id >= object_id_cache_.size()) object_id_cache_.resize(id + 1);
64 15062 : if (!object_id_cache_[id]) {
65 7020 : Node* node = jsgraph()->graph()->NewNode(jsgraph()->common()->ObjectId(id));
66 : NodeProperties::SetType(node, Type::Object());
67 3510 : object_id_cache_[id] = node;
68 : }
69 15062 : return object_id_cache_[id];
70 : }
71 :
72 29116945 : Reduction EscapeAnalysisReducer::Reduce(Node* node) {
73 57661582 : if (Node* replacement = analysis_result().GetReplacementOf(node)) {
74 : DCHECK(node->opcode() != IrOpcode::kAllocate &&
75 : node->opcode() != IrOpcode::kFinishRegion);
76 : DCHECK_NE(replacement, node);
77 190873 : if (replacement != jsgraph()->Dead()) {
78 27383 : replacement = MaybeGuard(node, replacement);
79 : }
80 190873 : RelaxEffectsAndControls(node);
81 : return Replace(replacement);
82 : }
83 :
84 57471020 : switch (node->opcode()) {
85 : case IrOpcode::kAllocate: {
86 311136 : const VirtualObject* vobject = analysis_result().GetVirtualObject(node);
87 311136 : if (vobject && !vobject->HasEscaped()) {
88 61633 : RelaxEffectsAndControls(node);
89 : }
90 : return NoChange();
91 : }
92 : case IrOpcode::kFinishRegion: {
93 176700 : Node* effect = NodeProperties::GetEffectInput(node, 0);
94 176700 : if (effect->opcode() == IrOpcode::kBeginRegion) {
95 : RelaxEffectsAndControls(effect);
96 33182 : RelaxEffectsAndControls(node);
97 : }
98 : return NoChange();
99 : }
100 : case IrOpcode::kNewArgumentsElements:
101 : arguments_elements_.insert(node);
102 : return NoChange();
103 : default: {
104 : // TODO(sigurds): Change this to GetFrameStateInputCount once
105 : // it is working. For now we use EffectInputCount > 0 to determine
106 : // whether a node might have a frame state input.
107 56771644 : if (node->op()->EffectInputCount() > 0) {
108 10240483 : ReduceFrameStateInputs(node);
109 : }
110 : return NoChange();
111 : }
112 : }
113 : }
114 :
115 : // While doing DFS on the FrameState tree, we have to recognize duplicate
116 : // occurrences of virtual objects.
117 : class Deduplicator {
118 : public:
119 : explicit Deduplicator(Zone* zone) : is_duplicate_(zone) {}
120 80233 : bool SeenBefore(const VirtualObject* vobject) {
121 : VirtualObject::Id id = vobject->id();
122 240699 : if (id >= is_duplicate_.size()) {
123 55896 : is_duplicate_.resize(id + 1);
124 : }
125 : bool is_duplicate = is_duplicate_[id];
126 : is_duplicate_[id] = true;
127 80233 : return is_duplicate;
128 : }
129 :
130 : private:
131 : ZoneVector<bool> is_duplicate_;
132 : };
133 :
134 15558669 : void EscapeAnalysisReducer::ReduceFrameStateInputs(Node* node) {
135 : DCHECK_GE(node->op()->EffectInputCount(), 1);
136 102599172 : for (int i = 0; i < node->InputCount(); ++i) {
137 41059071 : Node* input = node->InputAt(i);
138 41059071 : if (input->opcode() == IrOpcode::kFrameState) {
139 : Deduplicator deduplicator(zone());
140 5318222 : if (Node* ret = ReduceDeoptState(input, node, &deduplicator)) {
141 5318290 : node->ReplaceInput(i, ret);
142 : }
143 : }
144 : }
145 10240515 : }
146 :
147 106080296 : Node* EscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect,
148 387792 : Deduplicator* deduplicator) {
149 66690625 : if (node->opcode() == IrOpcode::kFrameState) {
150 5645732 : NodeHashCache::Constructor new_node(&node_cache_, node);
151 : // This input order is important to match the DFS traversal used in the
152 : // instruction selector. Otherwise, the instruction selector might find a
153 : // duplicate node before the original one.
154 39519648 : for (int input_id : {kFrameStateOuterStateInput, kFrameStateFunctionInput,
155 : kFrameStateParametersInput, kFrameStateContextInput,
156 33873952 : kFrameStateLocalsInput, kFrameStateStackInput}) {
157 : Node* input = node->InputAt(input_id);
158 : new_node.ReplaceInput(ReduceDeoptState(input, effect, deduplicator),
159 33873952 : input_id);
160 : }
161 5645696 : return new_node.Get();
162 61044893 : } else if (node->opcode() == IrOpcode::kStateValues) {
163 12213992 : NodeHashCache::Constructor new_node(&node_cache_, node);
164 118169013 : for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
165 27175686 : Node* input = NodeProperties::GetValueInput(node, i);
166 : new_node.ReplaceValueInput(ReduceDeoptState(input, effect, deduplicator),
167 27175684 : i);
168 : }
169 12213985 : return new_node.Get();
170 48830771 : } else if (const VirtualObject* vobject =
171 48830833 : analysis_result().GetVirtualObject(SkipTypeGuards(node))) {
172 1048905 : if (vobject->HasEscaped()) return node;
173 80233 : if (deduplicator->SeenBefore(vobject)) {
174 15062 : return ObjectIdNode(vobject);
175 : } else {
176 : std::vector<Node*> inputs;
177 775584 : for (int offset = 0; offset < vobject->size(); offset += kPointerSize) {
178 : Node* field =
179 322621 : analysis_result().GetVirtualObjectField(vobject, offset, effect);
180 322621 : CHECK_NOT_NULL(field);
181 322621 : if (field != jsgraph()->Dead()) {
182 645242 : inputs.push_back(ReduceDeoptState(field, effect, deduplicator));
183 : }
184 : }
185 130342 : int num_inputs = static_cast<int>(inputs.size());
186 : NodeHashCache::Constructor new_node(
187 : &node_cache_,
188 : jsgraph()->common()->ObjectState(vobject->id(), num_inputs),
189 65171 : num_inputs, &inputs.front(), NodeProperties::GetType(node));
190 65171 : return new_node.Get();
191 : }
192 : } else {
193 : return node;
194 : }
195 : }
196 :
197 886718 : void EscapeAnalysisReducer::VerifyReplacement() const {
198 886718 : AllNodes all(zone(), jsgraph()->graph());
199 29436825 : for (Node* node : all.reachable) {
200 28550073 : if (node->opcode() == IrOpcode::kAllocate) {
201 94691 : if (const VirtualObject* vobject =
202 94723 : analysis_result().GetVirtualObject(node)) {
203 93179 : if (!vobject->HasEscaped()) {
204 : V8_Fatal(__FILE__, __LINE__,
205 : "Escape analysis failed to remove node %s#%d\n",
206 0 : node->op()->mnemonic(), node->id());
207 : }
208 : }
209 : }
210 : }
211 443360 : }
212 :
213 477492 : void EscapeAnalysisReducer::Finalize() {
214 907970 : for (Node* node : arguments_elements_) {
215 : DCHECK_EQ(IrOpcode::kNewArgumentsElements, node->opcode());
216 18197 : int mapped_count = OpParameter<int>(node);
217 :
218 18197 : Node* arguments_frame = NodeProperties::GetValueInput(node, 0);
219 18197 : if (arguments_frame->opcode() != IrOpcode::kArgumentsFrame) continue;
220 18197 : Node* arguments_length = NodeProperties::GetValueInput(node, 1);
221 18197 : if (arguments_length->opcode() != IrOpcode::kArgumentsLength) continue;
222 :
223 : // If mapped arguments are specified, then their number is always equal to
224 : // the number of formal parameters. This allows to use just the three-value
225 : // {ArgumentsStateType} enum because the deoptimizer can reconstruct the
226 : // value of {mapped_count} from the number of formal parameters.
227 : DCHECK_IMPLIES(
228 : mapped_count != 0,
229 : mapped_count == FormalParameterCountOf(arguments_length->op()));
230 18197 : ArgumentsStateType type = IsRestLengthOf(arguments_length->op())
231 : ? ArgumentsStateType::kRestParameter
232 : : (mapped_count == 0)
233 : ? ArgumentsStateType::kUnmappedArguments
234 18197 : : ArgumentsStateType::kMappedArguments;
235 :
236 : Node* arguments_length_state = nullptr;
237 107583 : for (Edge edge : arguments_length->use_edges()) {
238 44693 : Node* use = edge.from();
239 44693 : switch (use->opcode()) {
240 : case IrOpcode::kObjectState:
241 : case IrOpcode::kTypedObjectState:
242 : case IrOpcode::kStateValues:
243 : case IrOpcode::kTypedStateValues:
244 1696 : if (!arguments_length_state) {
245 : arguments_length_state = jsgraph()->graph()->NewNode(
246 3068 : jsgraph()->common()->ArgumentsLengthState(type));
247 : NodeProperties::SetType(arguments_length_state,
248 : Type::OtherInternal());
249 : }
250 1696 : edge.UpdateTo(arguments_length_state);
251 1696 : break;
252 : default:
253 : break;
254 : }
255 : }
256 :
257 : bool escaping_use = false;
258 : ZoneVector<Node*> loads(zone());
259 53078 : for (Edge edge : node->use_edges()) {
260 24998 : Node* use = edge.from();
261 31324 : if (!NodeProperties::IsValueEdge(edge)) continue;
262 37356 : if (use->use_edges().empty()) {
263 : // A node without uses is dead, so we don't have to care about it.
264 : continue;
265 : }
266 37344 : switch (use->opcode()) {
267 : case IrOpcode::kStateValues:
268 : case IrOpcode::kTypedStateValues:
269 : case IrOpcode::kObjectState:
270 : case IrOpcode::kTypedObjectState:
271 : break;
272 : case IrOpcode::kLoadElement:
273 1048 : if (mapped_count == 0) {
274 1048 : loads.push_back(use);
275 : } else {
276 : escaping_use = true;
277 : }
278 : break;
279 : case IrOpcode::kLoadField:
280 984 : if (FieldAccessOf(use->op()).offset == FixedArray::kLengthOffset) {
281 984 : loads.push_back(use);
282 : } else {
283 : escaping_use = true;
284 : }
285 : break;
286 : default:
287 : // If the arguments elements node node is used by an unhandled node,
288 : // then we cannot remove this allocation.
289 : escaping_use = true;
290 15115 : break;
291 : }
292 18672 : if (escaping_use) break;
293 : }
294 18197 : if (!escaping_use) {
295 : Node* arguments_elements_state = jsgraph()->graph()->NewNode(
296 6164 : jsgraph()->common()->ArgumentsElementsState(type));
297 : NodeProperties::SetType(arguments_elements_state, Type::OtherInternal());
298 4065 : ReplaceWithValue(node, arguments_elements_state);
299 :
300 : ElementAccess stack_access;
301 3082 : stack_access.base_is_tagged = BaseTaggedness::kUntaggedBase;
302 : // Reduce base address by {kPointerSize} such that (length - index)
303 : // resolves to the right position.
304 : stack_access.header_size =
305 3082 : CommonFrameConstants::kFixedFrameSizeAboveFp - kPointerSize;
306 3082 : stack_access.type = Type::NonInternal();
307 3082 : stack_access.machine_type = MachineType::AnyTagged();
308 3082 : stack_access.write_barrier_kind = WriteBarrierKind::kNoWriteBarrier;
309 : const Operator* load_stack_op =
310 3082 : jsgraph()->simplified()->LoadElement(stack_access);
311 :
312 8194 : for (Node* load : loads) {
313 2030 : switch (load->opcode()) {
314 : case IrOpcode::kLoadElement: {
315 1047 : Node* index = NodeProperties::GetValueInput(load, 1);
316 : // {offset} is a reverted index starting from 1. The base address is
317 : // adapted to allow offsets starting from 1.
318 : Node* offset = jsgraph()->graph()->NewNode(
319 : jsgraph()->simplified()->NumberSubtract(), arguments_length,
320 2094 : index);
321 : NodeProperties::SetType(offset,
322 1047 : TypeCache::Get().kArgumentsLengthType);
323 1047 : NodeProperties::ReplaceValueInput(load, arguments_frame, 0);
324 1047 : NodeProperties::ReplaceValueInput(load, offset, 1);
325 1047 : NodeProperties::ChangeOp(load, load_stack_op);
326 1047 : break;
327 : }
328 : case IrOpcode::kLoadField: {
329 : DCHECK_EQ(FieldAccessOf(load->op()).offset,
330 : FixedArray::kLengthOffset);
331 983 : Node* length = NodeProperties::GetValueInput(node, 1);
332 : ReplaceWithValue(load, length);
333 : break;
334 : }
335 : default:
336 0 : UNREACHABLE();
337 : }
338 : }
339 : }
340 : }
341 444886 : }
342 :
343 0 : Node* NodeHashCache::Query(Node* node) {
344 : auto it = cache_.find(node);
345 17924802 : if (it != cache_.end()) {
346 124026 : return *it;
347 : } else {
348 : return nullptr;
349 : }
350 : }
351 :
352 65171 : NodeHashCache::Constructor::Constructor(NodeHashCache* cache,
353 : const Operator* op, int input_count,
354 : Node** inputs, Type* type)
355 65171 : : node_cache_(cache), from_(nullptr) {
356 130342 : if (node_cache_->temp_nodes_.size() > 0) {
357 18376 : tmp_ = node_cache_->temp_nodes_.back();
358 : node_cache_->temp_nodes_.pop_back();
359 18376 : int tmp_input_count = tmp_->InputCount();
360 18376 : if (input_count <= tmp_input_count) {
361 7009 : tmp_->TrimInputCount(input_count);
362 : }
363 91517 : for (int i = 0; i < input_count; ++i) {
364 91517 : if (i < tmp_input_count) {
365 66575 : tmp_->ReplaceInput(i, inputs[i]);
366 : } else {
367 24942 : tmp_->AppendInput(node_cache_->graph_->zone(), inputs[i]);
368 : }
369 : }
370 18376 : NodeProperties::ChangeOp(tmp_, op);
371 : } else {
372 46795 : tmp_ = node_cache_->graph_->NewNode(op, input_count, inputs);
373 : }
374 65171 : NodeProperties::SetType(tmp_, type);
375 65171 : }
376 :
377 17924824 : Node* NodeHashCache::Constructor::Get() {
378 : DCHECK(tmp_ || from_);
379 : Node* node;
380 17924824 : if (!tmp_) {
381 17706654 : node = node_cache_->Query(from_);
382 17706632 : if (!node) node = from_;
383 : } else {
384 218170 : node = node_cache_->Query(tmp_);
385 218170 : if (node) {
386 121057 : node_cache_->temp_nodes_.push_back(tmp_);
387 : } else {
388 97113 : node = tmp_;
389 97113 : node_cache_->Insert(node);
390 : }
391 : }
392 17924802 : tmp_ = from_ = nullptr;
393 17924802 : return node;
394 : }
395 :
396 529228 : Node* NodeHashCache::Constructor::MutableNode() {
397 : DCHECK(tmp_ || from_);
398 529228 : if (!tmp_) {
399 305998 : if (node_cache_->temp_nodes_.empty()) {
400 268979 : tmp_ = node_cache_->graph_->CloneNode(from_);
401 : } else {
402 100774 : tmp_ = node_cache_->temp_nodes_.back();
403 : node_cache_->temp_nodes_.pop_back();
404 100774 : int from_input_count = from_->InputCount();
405 100774 : int tmp_input_count = tmp_->InputCount();
406 100774 : if (from_input_count <= tmp_input_count) {
407 68088 : tmp_->TrimInputCount(from_input_count);
408 : }
409 486298 : for (int i = 0; i < from_input_count; ++i) {
410 486298 : if (i < tmp_input_count) {
411 740636 : tmp_->ReplaceInput(i, from_->InputAt(i));
412 : } else {
413 347940 : tmp_->AppendInput(node_cache_->graph_->zone(), from_->InputAt(i));
414 : }
415 : }
416 201548 : NodeProperties::SetType(tmp_, NodeProperties::GetType(from_));
417 201548 : NodeProperties::ChangeOp(tmp_, from_->op());
418 : }
419 : }
420 529228 : return tmp_;
421 : }
422 :
423 : #undef TRACE
424 :
425 : } // namespace compiler
426 : } // namespace internal
427 : } // namespace v8
|