Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
6 :
7 : #include "src/base/atomicops.h"
8 : #include "src/base/template-utils.h"
9 : #include "src/cancelable-task.h"
10 : #include "src/compiler.h"
11 : #include "src/counters.h"
12 : #include "src/isolate.h"
13 : #include "src/log.h"
14 : #include "src/objects-inl.h"
15 : #include "src/optimized-compilation-info.h"
16 : #include "src/tracing/trace-event.h"
17 : #include "src/v8.h"
18 :
19 : namespace v8 {
20 : namespace internal {
21 :
22 : namespace {
23 :
24 198 : void DisposeCompilationJob(OptimizedCompilationJob* job,
25 : bool restore_function_code) {
26 140 : if (restore_function_code) {
27 : Handle<JSFunction> function = job->compilation_info()->closure();
28 116 : function->set_code(function->shared()->GetCode());
29 58 : if (function->IsInOptimizationQueue()) {
30 102 : function->ClearOptimizationMarker();
31 : }
32 : // TODO(mvstanton): We can't call EnsureFeedbackVector here due to
33 : // allocation, but we probably shouldn't call set_code either, as this
34 : // sometimes runs on the worker thread!
35 : // JSFunction::EnsureFeedbackVector(function);
36 : }
37 140 : delete job;
38 140 : }
39 :
40 : } // namespace
41 :
42 : class OptimizingCompileDispatcher::CompileTask : public CancelableTask {
43 : public:
44 7170 : explicit CompileTask(Isolate* isolate,
45 : OptimizingCompileDispatcher* dispatcher)
46 : : CancelableTask(isolate),
47 : isolate_(isolate),
48 : worker_thread_runtime_call_stats_(
49 7170 : isolate->counters()->worker_thread_runtime_call_stats()),
50 14340 : dispatcher_(dispatcher) {
51 7170 : base::MutexGuard lock_guard(&dispatcher_->ref_count_mutex_);
52 7170 : ++dispatcher_->ref_count_;
53 7170 : }
54 :
55 14339 : ~CompileTask() override = default;
56 :
57 : private:
58 : // v8::Task overrides.
59 7170 : void RunInternal() override {
60 : DisallowHeapAllocation no_allocation;
61 : DisallowHandleAllocation no_handles;
62 : DisallowHandleDereference no_deref;
63 :
64 : {
65 : WorkerThreadRuntimeCallStatsScope runtime_call_stats_scope(
66 7170 : worker_thread_runtime_call_stats_);
67 : RuntimeCallTimerScope runtimeTimer(
68 : runtime_call_stats_scope.Get(),
69 7170 : RuntimeCallCounterId::kRecompileConcurrent);
70 :
71 7170 : TimerEventScope<TimerEventRecompileConcurrent> timer(isolate_);
72 21510 : TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
73 : "V8.RecompileConcurrent");
74 :
75 7170 : if (dispatcher_->recompilation_delay_ != 0) {
76 : base::OS::Sleep(base::TimeDelta::FromMilliseconds(
77 0 : dispatcher_->recompilation_delay_));
78 : }
79 :
80 14340 : dispatcher_->CompileNext(dispatcher_->NextInput(true));
81 : }
82 : {
83 7170 : base::MutexGuard lock_guard(&dispatcher_->ref_count_mutex_);
84 7170 : if (--dispatcher_->ref_count_ == 0) {
85 5603 : dispatcher_->ref_count_zero_.NotifyOne();
86 : }
87 : }
88 7170 : }
89 :
90 : Isolate* isolate_;
91 : WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats_;
92 : OptimizingCompileDispatcher* dispatcher_;
93 :
94 : DISALLOW_COPY_AND_ASSIGN(CompileTask);
95 : };
96 :
97 182807 : OptimizingCompileDispatcher::~OptimizingCompileDispatcher() {
98 : #ifdef DEBUG
99 : {
100 : base::MutexGuard lock_guard(&ref_count_mutex_);
101 : DCHECK_EQ(0, ref_count_);
102 : }
103 : #endif
104 : DCHECK_EQ(0, input_queue_length_);
105 60935 : DeleteArray(input_queue_);
106 60936 : }
107 :
108 7170 : OptimizedCompilationJob* OptimizingCompileDispatcher::NextInput(
109 7166 : bool check_if_flushing) {
110 7170 : base::MutexGuard access_input_queue_(&input_queue_mutex_);
111 7170 : if (input_queue_length_ == 0) return nullptr;
112 14332 : OptimizedCompilationJob* job = input_queue_[InputQueueIndex(0)];
113 : DCHECK_NOT_NULL(job);
114 7166 : input_queue_shift_ = InputQueueIndex(1);
115 7166 : input_queue_length_--;
116 7166 : if (check_if_flushing) {
117 7166 : if (mode_ == FLUSH) {
118 : AllowHandleDereference allow_handle_dereference;
119 30 : DisposeCompilationJob(job, true);
120 : return nullptr;
121 : }
122 : }
123 7136 : return job;
124 : }
125 :
126 7170 : void OptimizingCompileDispatcher::CompileNext(OptimizedCompilationJob* job) {
127 14340 : if (!job) return;
128 :
129 : // The function may have already been optimized by OSR. Simply continue.
130 7136 : CompilationJob::Status status = job->ExecuteJob();
131 : USE(status); // Prevent an unused-variable error.
132 :
133 : // The function may have already been optimized by OSR. Simply continue.
134 : // Use a mutex to make sure that functions marked for install
135 : // are always also queued.
136 7132 : base::MutexGuard access_output_queue_(&output_queue_mutex_);
137 : output_queue_.push(job);
138 7136 : isolate_->stack_guard()->RequestInstallCode();
139 : }
140 :
141 89599 : void OptimizingCompileDispatcher::FlushOutputQueue(bool restore_function_code) {
142 : for (;;) {
143 : OptimizedCompilationJob* job = nullptr;
144 : {
145 89701 : base::MutexGuard access_output_queue_(&output_queue_mutex_);
146 179300 : if (output_queue_.empty()) return;
147 102 : job = output_queue_.front();
148 : output_queue_.pop();
149 : }
150 :
151 102 : DisposeCompilationJob(job, restore_function_code);
152 102 : }
153 : }
154 :
155 28669 : void OptimizingCompileDispatcher::Flush(BlockingBehavior blocking_behavior) {
156 28665 : if (blocking_behavior == BlockingBehavior::kDontBlock) {
157 9485 : if (FLAG_block_concurrent_recompilation) Unblock();
158 9485 : base::MutexGuard access_input_queue_(&input_queue_mutex_);
159 9489 : while (input_queue_length_ > 0) {
160 8 : OptimizedCompilationJob* job = input_queue_[InputQueueIndex(0)];
161 : DCHECK_NOT_NULL(job);
162 4 : input_queue_shift_ = InputQueueIndex(1);
163 4 : input_queue_length_--;
164 4 : DisposeCompilationJob(job, true);
165 : }
166 9485 : FlushOutputQueue(true);
167 9485 : if (FLAG_trace_concurrent_recompilation) {
168 0 : PrintF(" ** Flushed concurrent recompilation queues (not blocking).\n");
169 : }
170 28665 : return;
171 : }
172 : mode_ = FLUSH;
173 19180 : if (FLAG_block_concurrent_recompilation) Unblock();
174 : {
175 19180 : base::MutexGuard lock_guard(&ref_count_mutex_);
176 17 : while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_);
177 : mode_ = COMPILE;
178 : }
179 19180 : FlushOutputQueue(true);
180 19180 : if (FLAG_trace_concurrent_recompilation) {
181 0 : PrintF(" ** Flushed concurrent recompilation queues.\n");
182 : }
183 : }
184 :
185 60934 : void OptimizingCompileDispatcher::Stop() {
186 : mode_ = FLUSH;
187 60934 : if (FLAG_block_concurrent_recompilation) Unblock();
188 : {
189 60934 : base::MutexGuard lock_guard(&ref_count_mutex_);
190 22 : while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_);
191 : mode_ = COMPILE;
192 : }
193 :
194 60934 : if (recompilation_delay_ != 0) {
195 : // At this point the optimizing compiler thread's event loop has stopped.
196 : // There is no need for a mutex when reading input_queue_length_.
197 0 : while (input_queue_length_ > 0) CompileNext(NextInput());
198 0 : InstallOptimizedFunctions();
199 : } else {
200 60934 : FlushOutputQueue(false);
201 : }
202 60934 : }
203 :
204 6473 : void OptimizingCompileDispatcher::InstallOptimizedFunctions() {
205 6473 : HandleScope handle_scope(isolate_);
206 :
207 : for (;;) {
208 7034 : OptimizedCompilationJob* job = nullptr;
209 : {
210 13507 : base::MutexGuard access_output_queue_(&output_queue_mutex_);
211 19980 : if (output_queue_.empty()) return;
212 7034 : job = output_queue_.front();
213 : output_queue_.pop();
214 : }
215 : OptimizedCompilationInfo* info = job->compilation_info();
216 7034 : Handle<JSFunction> function(*info->closure(), isolate_);
217 7034 : if (function->HasOptimizedCode()) {
218 4 : if (FLAG_trace_concurrent_recompilation) {
219 0 : PrintF(" ** Aborting compilation for ");
220 0 : function->ShortPrint();
221 0 : PrintF(" as it has already been optimized.\n");
222 : }
223 4 : DisposeCompilationJob(job, false);
224 : } else {
225 7030 : Compiler::FinalizeOptimizedCompilationJob(job, isolate_);
226 : }
227 : }
228 : }
229 :
230 7170 : void OptimizingCompileDispatcher::QueueForOptimization(
231 7170 : OptimizedCompilationJob* job) {
232 : DCHECK(IsQueueAvailable());
233 : {
234 : // Add job to the back of the input queue.
235 7170 : base::MutexGuard access_input_queue(&input_queue_mutex_);
236 : DCHECK_LT(input_queue_length_, input_queue_capacity_);
237 14340 : input_queue_[InputQueueIndex(input_queue_length_)] = job;
238 7170 : input_queue_length_++;
239 : }
240 7170 : if (FLAG_block_concurrent_recompilation) {
241 76 : blocked_jobs_++;
242 : } else {
243 7094 : V8::GetCurrentPlatform()->CallOnWorkerThread(
244 28376 : base::make_unique<CompileTask>(isolate_, this));
245 : }
246 7170 : }
247 :
248 211 : void OptimizingCompileDispatcher::Unblock() {
249 498 : while (blocked_jobs_ > 0) {
250 76 : V8::GetCurrentPlatform()->CallOnWorkerThread(
251 304 : base::make_unique<CompileTask>(isolate_, this));
252 76 : blocked_jobs_--;
253 : }
254 211 : }
255 :
256 : } // namespace internal
257 178779 : } // namespace v8
|