Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_CRANKSHAFT_X64_LITHIUM_CODEGEN_X64_H_
6 : #define V8_CRANKSHAFT_X64_LITHIUM_CODEGEN_X64_H_
7 :
8 :
9 : #include "src/ast/scopes.h"
10 : #include "src/base/logging.h"
11 : #include "src/crankshaft/lithium-codegen.h"
12 : #include "src/crankshaft/x64/lithium-gap-resolver-x64.h"
13 : #include "src/crankshaft/x64/lithium-x64.h"
14 : #include "src/deoptimizer.h"
15 : #include "src/safepoint-table.h"
16 : #include "src/utils.h"
17 :
18 : namespace v8 {
19 : namespace internal {
20 :
21 : // Forward declarations.
22 : class LDeferredCode;
23 : class SafepointGenerator;
24 :
25 279085 : class LCodeGen: public LCodeGenBase {
26 : public:
27 1116340 : LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
28 : : LCodeGenBase(chunk, assembler, info),
29 : jump_table_(4, info->zone()),
30 279085 : scope_(info->scope()),
31 : deferred_(8, info->zone()),
32 : frame_is_built_(false),
33 : safepoints_(info->zone()),
34 : resolver_(this),
35 1395425 : expected_safepoint_kind_(Safepoint::kSimple) {
36 279085 : PopulateDeoptimizationLiteralsWithInlinedFunctions();
37 279085 : }
38 :
39 1017147 : int LookupDestination(int block_id) const {
40 1017147 : return chunk()->LookupDestination(block_id);
41 : }
42 :
43 1017147 : bool IsNextEmittedBlock(int block_id) const {
44 1017147 : return LookupDestination(block_id) == GetNextEmittedBlock();
45 : }
46 :
47 4302474 : bool NeedsEagerFrame() const {
48 179904 : return HasAllocatedStackSlots() || info()->is_non_deferred_calling() ||
49 4422410 : !info()->IsStub() || info()->requires_frame();
50 : }
51 380527 : bool NeedsDeferredFrame() const {
52 389529 : return !NeedsEagerFrame() && info()->is_deferred_calling();
53 : }
54 :
55 : // Support for converting LOperands to assembler types.
56 : Register ToRegister(LOperand* op) const;
57 : XMMRegister ToDoubleRegister(LOperand* op) const;
58 : bool IsInteger32Constant(LConstantOperand* op) const;
59 : bool IsExternalConstant(LConstantOperand* op) const;
60 : bool IsDehoistedKeyConstant(LConstantOperand* op) const;
61 : bool IsSmiConstant(LConstantOperand* op) const;
62 : int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
63 : int32_t ToInteger32(LConstantOperand* op) const;
64 : Smi* ToSmi(LConstantOperand* op) const;
65 : double ToDouble(LConstantOperand* op) const;
66 : ExternalReference ToExternalReference(LConstantOperand* op) const;
67 : Handle<Object> ToHandle(LConstantOperand* op) const;
68 : Operand ToOperand(LOperand* op) const;
69 :
70 : // Try to generate code for the entire chunk, but it may fail if the
71 : // chunk contains constructs we cannot handle. Returns true if the
72 : // code generation attempt succeeded.
73 : bool GenerateCode();
74 :
75 : // Finish the code by setting stack height, safepoint, and bailout
76 : // information on it.
77 : void FinishCode(Handle<Code> code);
78 :
79 : // Deferred code support.
80 : void DoDeferredNumberTagD(LNumberTagD* instr);
81 :
82 : enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
83 : void DoDeferredNumberTagIU(LInstruction* instr,
84 : LOperand* value,
85 : LOperand* temp1,
86 : LOperand* temp2,
87 : IntegerSignedness signedness);
88 :
89 : void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
90 : void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
91 : void DoDeferredStackCheck(LStackCheck* instr);
92 : void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
93 : void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
94 : void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
95 : void DoDeferredAllocate(LAllocate* instr);
96 : void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
97 : void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
98 : Register object,
99 : Register index);
100 :
101 : // Parallel move support.
102 : void DoParallelMove(LParallelMove* move);
103 : void DoGap(LGap* instr);
104 :
105 : // Emit frame translation commands for an environment.
106 : void WriteTranslation(LEnvironment* environment, Translation* translation);
107 :
108 : // Declare methods that deal with the individual node types.
109 : #define DECLARE_DO(type) void Do##type(L##type* node);
110 : LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
111 : #undef DECLARE_DO
112 :
113 : private:
114 : LPlatformChunk* chunk() const { return chunk_; }
115 : Scope* scope() const { return scope_; }
116 2369 : HGraph* graph() const { return chunk()->graph(); }
117 :
118 : XMMRegister double_scratch0() const { return kScratchDoubleReg; }
119 :
120 : void EmitClassOfTest(Label* if_true, Label* if_false,
121 : Handle<String> class_name, Register input,
122 : Register temporary, Register scratch);
123 :
124 4302474 : bool HasAllocatedStackSlots() const {
125 4302474 : return chunk()->HasAllocatedStackSlots();
126 : }
127 281454 : int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); }
128 558096 : int GetTotalFrameSlotCount() const {
129 558096 : return chunk()->GetTotalFrameSlotCount();
130 : }
131 :
132 154162 : void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
133 :
134 :
135 : void SaveCallerDoubles();
136 : void RestoreCallerDoubles();
137 :
138 : // Code generation passes. Returns true if code generation should
139 : // continue.
140 : void GenerateBodyInstructionPre(LInstruction* instr) override;
141 : void GenerateBodyInstructionPost(LInstruction* instr) override;
142 : bool GeneratePrologue();
143 : bool GenerateDeferredCode();
144 : bool GenerateJumpTable();
145 : bool GenerateSafepointTable();
146 :
147 : // Generates the custom OSR entrypoint and sets the osr_pc_offset.
148 : void GenerateOsrPrologue();
149 :
150 : enum SafepointMode {
151 : RECORD_SIMPLE_SAFEPOINT,
152 : RECORD_SAFEPOINT_WITH_REGISTERS
153 : };
154 :
155 : void CallCodeGeneric(Handle<Code> code,
156 : RelocInfo::Mode mode,
157 : LInstruction* instr,
158 : SafepointMode safepoint_mode,
159 : int argc);
160 :
161 :
162 : void CallCode(Handle<Code> code,
163 : RelocInfo::Mode mode,
164 : LInstruction* instr);
165 :
166 : void CallRuntime(const Runtime::Function* function,
167 : int num_arguments,
168 : LInstruction* instr,
169 : SaveFPRegsMode save_doubles = kDontSaveFPRegs);
170 :
171 : void CallRuntime(Runtime::FunctionId id,
172 : int num_arguments,
173 : LInstruction* instr) {
174 : const Runtime::Function* function = Runtime::FunctionForId(id);
175 : CallRuntime(function, num_arguments, instr);
176 : }
177 :
178 13085 : void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
179 13085 : const Runtime::Function* function = Runtime::FunctionForId(id);
180 13085 : CallRuntime(function, function->nargs, instr);
181 13085 : }
182 :
183 : void CallRuntimeFromDeferred(Runtime::FunctionId id,
184 : int argc,
185 : LInstruction* instr,
186 : LOperand* context);
187 :
188 : void LoadContextFromDeferred(LOperand* context);
189 :
190 : void PrepareForTailCall(const ParameterCount& actual, Register scratch1,
191 : Register scratch2, Register scratch3);
192 :
193 : // Generate a direct call to a known function. Expects the function
194 : // to be in rdi.
195 : void CallKnownFunction(Handle<JSFunction> function,
196 : int formal_parameter_count, int arity,
197 : bool is_tail_call, LInstruction* instr);
198 :
199 : void RecordSafepointWithLazyDeopt(LInstruction* instr,
200 : SafepointMode safepoint_mode,
201 : int argc);
202 : void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
203 : Safepoint::DeoptMode mode);
204 : void DeoptimizeIf(Condition cc, LInstruction* instr,
205 : DeoptimizeReason deopt_reason,
206 : Deoptimizer::BailoutType bailout_type);
207 : void DeoptimizeIf(Condition cc, LInstruction* instr,
208 : DeoptimizeReason deopt_reason);
209 :
210 : bool DeoptEveryNTimes() {
211 802953 : return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
212 : }
213 :
214 : void AddToTranslation(LEnvironment* environment,
215 : Translation* translation,
216 : LOperand* op,
217 : bool is_tagged,
218 : bool is_uint32,
219 : int* object_index_pointer,
220 : int* dematerialized_index_pointer);
221 :
222 : Register ToRegister(int index) const;
223 : XMMRegister ToDoubleRegister(int index) const;
224 : Operand BuildFastArrayOperand(
225 : LOperand* elements_pointer,
226 : LOperand* key,
227 : Representation key_representation,
228 : ElementsKind elements_kind,
229 : uint32_t base_offset);
230 :
231 : Operand BuildSeqStringOperand(Register string,
232 : LOperand* index,
233 : String::Encoding encoding);
234 :
235 : void EmitIntegerMathAbs(LMathAbs* instr);
236 : void EmitSmiMathAbs(LMathAbs* instr);
237 :
238 : // Support for recording safepoint information.
239 : void RecordSafepoint(LPointerMap* pointers,
240 : Safepoint::Kind kind,
241 : int arguments,
242 : Safepoint::DeoptMode mode);
243 : void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
244 : void RecordSafepoint(Safepoint::DeoptMode mode);
245 : void RecordSafepointWithRegisters(LPointerMap* pointers,
246 : int arguments,
247 : Safepoint::DeoptMode mode);
248 :
249 : static Condition TokenToCondition(Token::Value op, bool is_unsigned);
250 : void EmitGoto(int block);
251 :
252 : // EmitBranch expects to be the last instruction of a block.
253 : template<class InstrType>
254 : void EmitBranch(InstrType instr, Condition cc);
255 : template <class InstrType>
256 : void EmitTrueBranch(InstrType instr, Condition cc);
257 : template <class InstrType>
258 : void EmitFalseBranch(InstrType instr, Condition cc);
259 : void EmitNumberUntagD(LNumberUntagD* instr, Register input,
260 : XMMRegister result, NumberUntagDMode mode);
261 :
262 : // Emits optimized code for typeof x == "y". Modifies input register.
263 : // Returns the condition on which a final split to
264 : // true and false label should be made, to optimize fallthrough.
265 : Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
266 :
267 : // Emits optimized code for %_IsString(x). Preserves input register.
268 : // Returns the condition on which a final split to
269 : // true and false label should be made, to optimize fallthrough.
270 : Condition EmitIsString(Register input,
271 : Register temp1,
272 : Label* is_not_string,
273 : SmiCheck check_needed);
274 :
275 : // Emits code for pushing either a tagged constant, a (non-double)
276 : // register, or a stack slot operand.
277 : void EmitPushTaggedOperand(LOperand* operand);
278 :
279 : // Emits optimized code to deep-copy the contents of statically known
280 : // object graphs (e.g. object literal boilerplate).
281 : void EmitDeepCopy(Handle<JSObject> object,
282 : Register result,
283 : Register source,
284 : int* offset,
285 : AllocationSiteMode mode);
286 :
287 : void EnsureSpaceForLazyDeopt(int space_needed) override;
288 : void DoLoadKeyedExternalArray(LLoadKeyed* instr);
289 : void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
290 : void DoLoadKeyedFixedArray(LLoadKeyed* instr);
291 : void DoStoreKeyedExternalArray(LStoreKeyed* instr);
292 : void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
293 : void DoStoreKeyedFixedArray(LStoreKeyed* instr);
294 :
295 : template <class T>
296 : void EmitVectorLoadICRegisters(T* instr);
297 :
298 : #ifdef _MSC_VER
299 : // On windows, you may not access the stack more than one page below
300 : // the most recently mapped page. To make the allocated area randomly
301 : // accessible, we write an arbitrary value to each page in range
302 : // rsp + offset - page_size .. rsp in turn.
303 : void MakeSureStackPagesMapped(int offset);
304 : #endif
305 :
306 : ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
307 : Scope* const scope_;
308 : ZoneList<LDeferredCode*> deferred_;
309 : bool frame_is_built_;
310 :
311 : // Builder that keeps track of safepoints in the code. The table
312 : // itself is emitted at the end of the generated code.
313 : SafepointTableBuilder safepoints_;
314 :
315 : // Compiler from a set of parallel moves to a sequential list of moves.
316 : LGapResolver resolver_;
317 :
318 : Safepoint::Kind expected_safepoint_kind_;
319 :
320 : class PushSafepointRegistersScope final BASE_EMBEDDED {
321 : public:
322 : explicit PushSafepointRegistersScope(LCodeGen* codegen)
323 : : codegen_(codegen) {
324 : DCHECK(codegen_->info()->is_calling());
325 : DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
326 84995 : codegen_->masm_->PushSafepointRegisters();
327 84995 : codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
328 : }
329 :
330 : ~PushSafepointRegistersScope() {
331 : DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
332 84995 : codegen_->masm_->PopSafepointRegisters();
333 84995 : codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
334 : }
335 :
336 : private:
337 : LCodeGen* codegen_;
338 : };
339 :
340 : friend class LDeferredCode;
341 : friend class LEnvironment;
342 : friend class SafepointGenerator;
343 : DISALLOW_COPY_AND_ASSIGN(LCodeGen);
344 : };
345 :
346 :
347 : class LDeferredCode: public ZoneObject {
348 : public:
349 154162 : explicit LDeferredCode(LCodeGen* codegen)
350 : : codegen_(codegen),
351 : external_exit_(NULL),
352 308324 : instruction_index_(codegen->current_instruction_) {
353 : codegen->AddDeferredCode(this);
354 154162 : }
355 :
356 0 : virtual ~LDeferredCode() {}
357 : virtual void Generate() = 0;
358 : virtual LInstruction* instr() = 0;
359 :
360 32746 : void SetExit(Label* exit) { external_exit_ = exit; }
361 : Label* entry() { return &entry_; }
362 344075 : Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
363 72211 : Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
364 : int instruction_index() const { return instruction_index_; }
365 :
366 : protected:
367 : LCodeGen* codegen() const { return codegen_; }
368 : MacroAssembler* masm() const { return codegen_->masm(); }
369 :
370 : private:
371 : LCodeGen* codegen_;
372 : Label entry_;
373 : Label exit_;
374 : Label done_;
375 : Label* external_exit_;
376 : int instruction_index_;
377 : };
378 :
379 : } // namespace internal
380 : } // namespace v8
381 :
382 : #endif // V8_CRANKSHAFT_X64_LITHIUM_CODEGEN_X64_H_
|