/src/brpc/src/bthread/stack_inl.h
Line | Count | Source |
1 | | // Licensed to the Apache Software Foundation (ASF) under one |
2 | | // or more contributor license agreements. See the NOTICE file |
3 | | // distributed with this work for additional information |
4 | | // regarding copyright ownership. The ASF licenses this file |
5 | | // to you under the Apache License, Version 2.0 (the |
6 | | // "License"); you may not use this file except in compliance |
7 | | // with the License. You may obtain a copy of the License at |
8 | | // |
9 | | // http://www.apache.org/licenses/LICENSE-2.0 |
10 | | // |
11 | | // Unless required by applicable law or agreed to in writing, |
12 | | // software distributed under the License is distributed on an |
13 | | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
14 | | // KIND, either express or implied. See the License for the |
15 | | // specific language governing permissions and limitations |
16 | | // under the License. |
17 | | |
18 | | // bthread - An M:N threading library to make applications more concurrent. |
19 | | |
20 | | // Date: Sun Sep 7 22:37:39 CST 2014 |
21 | | |
22 | | #ifndef BTHREAD_ALLOCATE_STACK_INL_H |
23 | | #define BTHREAD_ALLOCATE_STACK_INL_H |
24 | | |
25 | | DECLARE_int32(guard_page_size); |
26 | | DECLARE_int32(tc_stack_small); |
27 | | DECLARE_int32(tc_stack_normal); |
28 | | |
29 | | namespace bthread { |
30 | | |
31 | | #ifdef BUTIL_USE_ASAN |
32 | | namespace internal { |
33 | | |
34 | | BUTIL_FORCE_INLINE void ASanPoisonMemoryRegion(const StackStorage& storage) { |
35 | | if (NULL == storage.bottom) { |
36 | | return; |
37 | | } |
38 | | |
39 | | CHECK_GT((void*)storage.bottom, |
40 | | reinterpret_cast<void*>(storage.stacksize + + storage.guardsize)); |
41 | | BUTIL_ASAN_POISON_MEMORY_REGION( |
42 | | (char*)storage.bottom - storage.stacksize, storage.stacksize); |
43 | | } |
44 | | |
45 | | BUTIL_FORCE_INLINE void ASanUnpoisonMemoryRegion(const StackStorage& storage) { |
46 | | if (NULL == storage.bottom) { |
47 | | return; |
48 | | } |
49 | | CHECK_GT(storage.bottom, |
50 | | reinterpret_cast<void*>(storage.stacksize + storage.guardsize)); |
51 | | BUTIL_ASAN_UNPOISON_MEMORY_REGION( |
52 | | (char*)storage.bottom - storage.stacksize, storage.stacksize); |
53 | | } |
54 | | |
55 | | |
56 | | BUTIL_FORCE_INLINE void StartSwitchFiber(void** fake_stack_save, StackStorage& storage) { |
57 | | if (NULL == storage.bottom) { |
58 | | return; |
59 | | } |
60 | | RELEASE_ASSERT(storage.bottom > |
61 | | reinterpret_cast<void*>(storage.stacksize + storage.guardsize)); |
62 | | // Lowest address of this stack. |
63 | | void* asan_stack_bottom = (char*)storage.bottom - storage.stacksize; |
64 | | BUTIL_ASAN_START_SWITCH_FIBER(fake_stack_save, asan_stack_bottom, storage.stacksize); |
65 | | } |
66 | | |
67 | | BUTIL_FORCE_INLINE void FinishSwitchFiber(void* fake_stack_save) { |
68 | | BUTIL_ASAN_FINISH_SWITCH_FIBER(fake_stack_save, NULL, NULL); |
69 | | } |
70 | | |
71 | | class ScopedASanFiberSwitcher { |
72 | | public: |
73 | | ScopedASanFiberSwitcher(StackStorage& next_storage) { |
74 | | StartSwitchFiber(&_fake_stack, next_storage); |
75 | | } |
76 | | |
77 | | ~ScopedASanFiberSwitcher() { |
78 | | FinishSwitchFiber(_fake_stack); |
79 | | } |
80 | | |
81 | | DISALLOW_COPY_AND_ASSIGN(ScopedASanFiberSwitcher); |
82 | | |
83 | | private: |
84 | | void* _fake_stack{NULL}; |
85 | | }; |
86 | | |
87 | | #define BTHREAD_ASAN_POISON_MEMORY_REGION(storage) \ |
88 | | ::bthread::internal::ASanPoisonMemoryRegion(storage) |
89 | | |
90 | | #define BTHREAD_ASAN_UNPOISON_MEMORY_REGION(storage) \ |
91 | | ::bthread::internal::ASanUnpoisonMemoryRegion(storage) |
92 | | |
93 | | #define BTHREAD_SCOPED_ASAN_FIBER_SWITCHER(storage) \ |
94 | | ::bthread::internal::ScopedASanFiberSwitcher switcher(storage) |
95 | | |
96 | | } // namespace internal |
97 | | #else |
98 | | |
99 | | // If ASan are used, the annotations should be no-ops. |
100 | 0 | #define BTHREAD_ASAN_POISON_MEMORY_REGION(storage) ((void)(storage)) |
101 | 0 | #define BTHREAD_ASAN_UNPOISON_MEMORY_REGION(storage) ((void)(storage)) |
102 | 0 | #define BTHREAD_SCOPED_ASAN_FIBER_SWITCHER(storage) ((void)(storage)) |
103 | | |
104 | | #endif // BUTIL_USE_ASAN |
105 | | |
106 | | struct MainStackClass {}; |
107 | | |
108 | | struct SmallStackClass { |
109 | | static int* stack_size_flag; |
110 | | // Older gcc does not allow static const enum, use int instead. |
111 | | static const int stacktype = (int)STACK_TYPE_SMALL; |
112 | | }; |
113 | | |
114 | | struct NormalStackClass { |
115 | | static int* stack_size_flag; |
116 | | static const int stacktype = (int)STACK_TYPE_NORMAL; |
117 | | }; |
118 | | |
119 | | struct LargeStackClass { |
120 | | static int* stack_size_flag; |
121 | | static const int stacktype = (int)STACK_TYPE_LARGE; |
122 | | }; |
123 | | |
124 | | template <typename StackClass> struct StackFactory { |
125 | | struct Wrapper : public ContextualStack { |
126 | 0 | explicit Wrapper(void (*entry)(intptr_t)) { |
127 | 0 | if (allocate_stack_storage(&storage, *StackClass::stack_size_flag, |
128 | 0 | FLAGS_guard_page_size) != 0) { |
129 | 0 | storage.zeroize(); |
130 | 0 | context = NULL; |
131 | 0 | return; |
132 | 0 | } |
133 | 0 | context = bthread_make_fcontext(storage.bottom, storage.stacksize, entry); |
134 | 0 | stacktype = (StackType)StackClass::stacktype; |
135 | | // It's poisoned prior to use. |
136 | 0 | BTHREAD_ASAN_POISON_MEMORY_REGION(storage); |
137 | 0 | } Unexecuted instantiation: bthread::StackFactory<bthread::SmallStackClass>::Wrapper::Wrapper(void (*)(long)) Unexecuted instantiation: bthread::StackFactory<bthread::NormalStackClass>::Wrapper::Wrapper(void (*)(long)) Unexecuted instantiation: bthread::StackFactory<bthread::LargeStackClass>::Wrapper::Wrapper(void (*)(long)) |
138 | 0 | ~Wrapper() { |
139 | 0 | if (context) { |
140 | 0 | context = NULL; |
141 | | // Unpoison to avoid affecting other allocator. |
142 | 0 | BTHREAD_ASAN_UNPOISON_MEMORY_REGION(storage); |
143 | 0 | deallocate_stack_storage(&storage); |
144 | 0 | storage.zeroize(); |
145 | 0 | } |
146 | 0 | } Unexecuted instantiation: bthread::StackFactory<bthread::SmallStackClass>::Wrapper::~Wrapper() Unexecuted instantiation: bthread::StackFactory<bthread::NormalStackClass>::Wrapper::~Wrapper() Unexecuted instantiation: bthread::StackFactory<bthread::LargeStackClass>::Wrapper::~Wrapper() |
147 | | }; |
148 | | |
149 | 0 | static ContextualStack* get_stack(void (*entry)(intptr_t)) { |
150 | 0 | ContextualStack* cs = butil::get_object<Wrapper>(entry); |
151 | | // Marks stack as addressable. |
152 | 0 | BTHREAD_ASAN_UNPOISON_MEMORY_REGION(cs->storage); |
153 | 0 | return cs; |
154 | 0 | } Unexecuted instantiation: bthread::StackFactory<bthread::SmallStackClass>::get_stack(void (*)(long)) Unexecuted instantiation: bthread::StackFactory<bthread::NormalStackClass>::get_stack(void (*)(long)) Unexecuted instantiation: bthread::StackFactory<bthread::LargeStackClass>::get_stack(void (*)(long)) |
155 | | |
156 | 0 | static void return_stack(ContextualStack* cs) { |
157 | | // Marks stack as unaddressable. |
158 | 0 | BTHREAD_ASAN_POISON_MEMORY_REGION(cs->storage); |
159 | 0 | butil::return_object(static_cast<Wrapper*>(cs)); |
160 | 0 | } Unexecuted instantiation: bthread::StackFactory<bthread::SmallStackClass>::return_stack(bthread::ContextualStack*) Unexecuted instantiation: bthread::StackFactory<bthread::NormalStackClass>::return_stack(bthread::ContextualStack*) Unexecuted instantiation: bthread::StackFactory<bthread::LargeStackClass>::return_stack(bthread::ContextualStack*) |
161 | | }; |
162 | | |
163 | | template <> struct StackFactory<MainStackClass> { |
164 | 0 | static ContextualStack* get_stack(void (*)(intptr_t)) { |
165 | 0 | ContextualStack* s = new (std::nothrow) ContextualStack; |
166 | 0 | if (NULL == s) { |
167 | 0 | return NULL; |
168 | 0 | } |
169 | 0 | s->context = NULL; |
170 | 0 | s->stacktype = STACK_TYPE_MAIN; |
171 | 0 | s->storage.zeroize(); |
172 | 0 | return s; |
173 | 0 | } |
174 | | |
175 | 0 | static void return_stack(ContextualStack* s) { |
176 | 0 | delete s; |
177 | 0 | } |
178 | | }; |
179 | | |
180 | 0 | inline ContextualStack* get_stack(StackType type, void (*entry)(intptr_t)) { |
181 | 0 | switch (type) { |
182 | 0 | case STACK_TYPE_PTHREAD: |
183 | 0 | return NULL; |
184 | 0 | case STACK_TYPE_SMALL: |
185 | 0 | return StackFactory<SmallStackClass>::get_stack(entry); |
186 | 0 | case STACK_TYPE_NORMAL: |
187 | 0 | return StackFactory<NormalStackClass>::get_stack(entry); |
188 | 0 | case STACK_TYPE_LARGE: |
189 | 0 | return StackFactory<LargeStackClass>::get_stack(entry); |
190 | 0 | case STACK_TYPE_MAIN: |
191 | 0 | return StackFactory<MainStackClass>::get_stack(entry); |
192 | 0 | } |
193 | 0 | return NULL; |
194 | 0 | } |
195 | | |
196 | 0 | inline void return_stack(ContextualStack* s) { |
197 | 0 | if (NULL == s) { |
198 | 0 | return; |
199 | 0 | } |
200 | 0 | switch (s->stacktype) { |
201 | 0 | case STACK_TYPE_PTHREAD: |
202 | 0 | assert(false); |
203 | 0 | return; |
204 | 0 | case STACK_TYPE_SMALL: |
205 | 0 | return StackFactory<SmallStackClass>::return_stack(s); |
206 | 0 | case STACK_TYPE_NORMAL: |
207 | 0 | return StackFactory<NormalStackClass>::return_stack(s); |
208 | 0 | case STACK_TYPE_LARGE: |
209 | 0 | return StackFactory<LargeStackClass>::return_stack(s); |
210 | 0 | case STACK_TYPE_MAIN: |
211 | 0 | return StackFactory<MainStackClass>::return_stack(s); |
212 | 0 | } |
213 | 0 | } |
214 | | |
215 | 0 | inline void jump_stack(ContextualStack* from, ContextualStack* to) { |
216 | 0 | bthread_jump_fcontext(&from->context, to->context, 0/*not skip remained*/); |
217 | 0 | } |
218 | | |
219 | | } // namespace bthread |
220 | | |
221 | | namespace butil { |
222 | | |
223 | | template <> struct ObjectPoolBlockMaxItem< |
224 | | bthread::StackFactory<bthread::LargeStackClass>::Wrapper> { |
225 | | static const size_t value = 64; |
226 | | }; |
227 | | template <> struct ObjectPoolBlockMaxItem< |
228 | | bthread::StackFactory<bthread::NormalStackClass>::Wrapper> { |
229 | | static const size_t value = 64; |
230 | | }; |
231 | | |
232 | | template <> struct ObjectPoolBlockMaxItem< |
233 | | bthread::StackFactory<bthread::SmallStackClass>::Wrapper> { |
234 | | static const size_t value = 64; |
235 | | }; |
236 | | |
237 | | template <> struct ObjectPoolFreeChunkMaxItem< |
238 | | bthread::StackFactory<bthread::SmallStackClass>::Wrapper> { |
239 | 0 | inline static size_t value() { |
240 | 0 | return (FLAGS_tc_stack_small <= 0 ? 0 : FLAGS_tc_stack_small); |
241 | 0 | } |
242 | | }; |
243 | | |
244 | | template <> struct ObjectPoolFreeChunkMaxItem< |
245 | | bthread::StackFactory<bthread::NormalStackClass>::Wrapper> { |
246 | 0 | inline static size_t value() { |
247 | 0 | return (FLAGS_tc_stack_normal <= 0 ? 0 : FLAGS_tc_stack_normal); |
248 | 0 | } |
249 | | }; |
250 | | |
251 | | template <> struct ObjectPoolFreeChunkMaxItem< |
252 | | bthread::StackFactory<bthread::LargeStackClass>::Wrapper> { |
253 | 0 | inline static size_t value() { return 1UL; } |
254 | | }; |
255 | | |
256 | | template <> struct ObjectPoolValidator< |
257 | | bthread::StackFactory<bthread::LargeStackClass>::Wrapper> { |
258 | | inline static bool validate( |
259 | 0 | const bthread::StackFactory<bthread::LargeStackClass>::Wrapper* w) { |
260 | 0 | return w->context != NULL; |
261 | 0 | } |
262 | | }; |
263 | | |
264 | | template <> struct ObjectPoolValidator< |
265 | | bthread::StackFactory<bthread::NormalStackClass>::Wrapper> { |
266 | | inline static bool validate( |
267 | 0 | const bthread::StackFactory<bthread::NormalStackClass>::Wrapper* w) { |
268 | 0 | return w->context != NULL; |
269 | 0 | } |
270 | | }; |
271 | | |
272 | | template <> struct ObjectPoolValidator< |
273 | | bthread::StackFactory<bthread::SmallStackClass>::Wrapper> { |
274 | | inline static bool validate( |
275 | 0 | const bthread::StackFactory<bthread::SmallStackClass>::Wrapper* w) { |
276 | | return w->context != NULL; |
277 | 0 | } |
278 | | }; |
279 | | |
280 | | } // namespace butil |
281 | | |
282 | | #endif // BTHREAD_ALLOCATE_STACK_INL_H |