Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_COMPILER_MEMORY_OPTIMIZER_H_
6 : #define V8_COMPILER_MEMORY_OPTIMIZER_H_
7 :
8 : #include "src/compiler/graph-assembler.h"
9 : #include "src/zone/zone-containers.h"
10 :
11 : namespace v8 {
12 : namespace internal {
13 : namespace compiler {
14 :
15 : // Forward declarations.
16 : class CommonOperatorBuilder;
17 : struct ElementAccess;
18 : class Graph;
19 : class JSGraph;
20 : class MachineOperatorBuilder;
21 : class Node;
22 : class Operator;
23 :
24 : // NodeIds are identifying numbers for nodes that can be used to index auxiliary
25 : // out-of-line data associated with each node.
26 : typedef uint32_t NodeId;
27 :
28 : // Lowers all simplified memory access and allocation related nodes (i.e.
29 : // Allocate, LoadField, StoreField and friends) to machine operators.
30 : // Performs allocation folding and store write barrier elimination
31 : // implicitly.
32 : class MemoryOptimizer final {
33 : public:
34 : MemoryOptimizer(JSGraph* jsgraph, Zone* zone);
35 : ~MemoryOptimizer() {}
36 :
37 : void Optimize();
38 :
39 : private:
40 : // An allocation group represents a set of allocations that have been folded
41 : // together.
42 : class AllocationGroup final : public ZoneObject {
43 : public:
44 : AllocationGroup(Node* node, PretenureFlag pretenure, Zone* zone);
45 : AllocationGroup(Node* node, PretenureFlag pretenure, Node* size,
46 : Zone* zone);
47 : ~AllocationGroup() {}
48 :
49 : void Add(Node* object);
50 : bool Contains(Node* object) const;
51 843982 : bool IsNewSpaceAllocation() const { return pretenure() == NOT_TENURED; }
52 :
53 : PretenureFlag pretenure() const { return pretenure_; }
54 : Node* size() const { return size_; }
55 :
56 : private:
57 : ZoneSet<NodeId> node_ids_;
58 : PretenureFlag const pretenure_;
59 : Node* const size_;
60 :
61 : DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationGroup);
62 : };
63 :
64 : // An allocation state is propagated on the effect paths through the graph.
65 : class AllocationState final : public ZoneObject {
66 : public:
67 : static AllocationState const* Empty(Zone* zone) {
68 : return new (zone) AllocationState();
69 : }
70 : static AllocationState const* Closed(AllocationGroup* group, Zone* zone) {
71 : return new (zone) AllocationState(group);
72 : }
73 : static AllocationState const* Open(AllocationGroup* group, int size,
74 : Node* top, Zone* zone) {
75 : return new (zone) AllocationState(group, size, top);
76 : }
77 :
78 : bool IsNewSpaceAllocation() const;
79 :
80 : AllocationGroup* group() const { return group_; }
81 : Node* top() const { return top_; }
82 : int size() const { return size_; }
83 :
84 : private:
85 : AllocationState();
86 : explicit AllocationState(AllocationGroup* group);
87 : AllocationState(AllocationGroup* group, int size, Node* top);
88 :
89 : AllocationGroup* const group_;
90 : // The upper bound of the combined allocated object size on the current path
91 : // (max int if allocation folding is impossible on this path).
92 : int const size_;
93 : Node* const top_;
94 :
95 : DISALLOW_COPY_AND_ASSIGN(AllocationState);
96 : };
97 :
98 : // An array of allocation states used to collect states on merges.
99 : typedef ZoneVector<AllocationState const*> AllocationStates;
100 :
101 : // We thread through tokens to represent the current state on a given effect
102 : // path through the graph.
103 : struct Token {
104 : Node* node;
105 : AllocationState const* state;
106 : };
107 :
108 : void VisitNode(Node*, AllocationState const*);
109 : void VisitAllocate(Node*, AllocationState const*);
110 : void VisitCall(Node*, AllocationState const*);
111 : void VisitLoadElement(Node*, AllocationState const*);
112 : void VisitLoadField(Node*, AllocationState const*);
113 : void VisitStoreElement(Node*, AllocationState const*);
114 : void VisitStoreField(Node*, AllocationState const*);
115 : void VisitOtherEffect(Node*, AllocationState const*);
116 :
117 : Node* ComputeIndex(ElementAccess const&, Node*);
118 : WriteBarrierKind ComputeWriteBarrierKind(Node* object,
119 : AllocationState const* state,
120 : WriteBarrierKind);
121 :
122 : AllocationState const* MergeStates(AllocationStates const& states);
123 :
124 : void EnqueueMerge(Node*, int, AllocationState const*);
125 : void EnqueueUses(Node*, AllocationState const*);
126 : void EnqueueUse(Node*, int, AllocationState const*);
127 :
128 : AllocationState const* empty_state() const { return empty_state_; }
129 : Graph* graph() const;
130 : Isolate* isolate() const;
131 : JSGraph* jsgraph() const { return jsgraph_; }
132 : CommonOperatorBuilder* common() const;
133 : MachineOperatorBuilder* machine() const;
134 : Zone* zone() const { return zone_; }
135 : GraphAssembler* gasm() { return &graph_assembler_; }
136 :
137 : SetOncePointer<const Operator> allocate_operator_;
138 : JSGraph* const jsgraph_;
139 : AllocationState const* const empty_state_;
140 : ZoneMap<NodeId, AllocationStates> pending_;
141 : ZoneQueue<Token> tokens_;
142 : Zone* const zone_;
143 : GraphAssembler graph_assembler_;
144 :
145 : DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryOptimizer);
146 : };
147 :
148 : } // namespace compiler
149 : } // namespace internal
150 : } // namespace v8
151 :
152 : #endif // V8_COMPILER_MEMORY_OPTIMIZER_H_
|