/src/php-src/ext/opcache/jit/ir/ir_check.c
Line | Count | Source |
1 | | /* |
2 | | * IR - Lightweight JIT Compilation Framework |
3 | | * (IR verification) |
4 | | * Copyright (C) 2022 Zend by Perforce. |
5 | | * Authors: Dmitry Stogov <dmitry@php.net> |
6 | | */ |
7 | | |
8 | | #include "ir.h" |
9 | | #include "ir_private.h" |
10 | | |
11 | | void ir_consistency_check(void) |
12 | 0 | { |
13 | 0 | IR_ASSERT(IR_UNUSED == 0); |
14 | 0 | IR_ASSERT(IR_NOP == 0); |
15 | |
|
16 | 0 | IR_ASSERT((int)IR_BOOL == (int)IR_C_BOOL); |
17 | 0 | IR_ASSERT((int)IR_U8 == (int)IR_C_U8); |
18 | 0 | IR_ASSERT((int)IR_U16 == (int)IR_C_U16); |
19 | 0 | IR_ASSERT((int)IR_U32 == (int)IR_C_U32); |
20 | 0 | IR_ASSERT((int)IR_U64 == (int)IR_C_U64); |
21 | 0 | IR_ASSERT((int)IR_ADDR == (int)IR_C_ADDR); |
22 | 0 | IR_ASSERT((int)IR_CHAR == (int)IR_C_CHAR); |
23 | 0 | IR_ASSERT((int)IR_I8 == (int)IR_C_I8); |
24 | 0 | IR_ASSERT((int)IR_I16 == (int)IR_C_I16); |
25 | 0 | IR_ASSERT((int)IR_I32 == (int)IR_C_I32); |
26 | 0 | IR_ASSERT((int)IR_I64 == (int)IR_C_I64); |
27 | 0 | IR_ASSERT((int)IR_DOUBLE == (int)IR_C_DOUBLE); |
28 | 0 | IR_ASSERT((int)IR_FLOAT == (int)IR_C_FLOAT); |
29 | |
|
30 | 0 | IR_ASSERT((IR_EQ ^ 1) == IR_NE); |
31 | 0 | IR_ASSERT((IR_LT ^ 3) == IR_GT); |
32 | 0 | IR_ASSERT((IR_GT ^ 3) == IR_LT); |
33 | 0 | IR_ASSERT((IR_LE ^ 3) == IR_GE); |
34 | 0 | IR_ASSERT((IR_GE ^ 3) == IR_LE); |
35 | 0 | IR_ASSERT((IR_ULT ^ 3) == IR_UGT); |
36 | 0 | IR_ASSERT((IR_UGT ^ 3) == IR_ULT); |
37 | 0 | IR_ASSERT((IR_ULE ^ 3) == IR_UGE); |
38 | 0 | IR_ASSERT((IR_UGE ^ 3) == IR_ULE); |
39 | 0 | IR_ASSERT((IR_ORDERED ^ 1) == IR_UNORDERED); |
40 | |
|
41 | 0 | IR_ASSERT(IR_ADD + 1 == IR_SUB); |
42 | 0 | } |
43 | | |
44 | | typedef struct { |
45 | | ir_arena *arena; |
46 | | ir_bitset *use_set; |
47 | | ir_bitset *input_set; |
48 | | } ir_check_ctx; |
49 | | |
50 | | static bool ir_check_use_list(ir_check_ctx *check_ctx, const ir_ctx *ctx, ir_ref from, ir_ref to) |
51 | 0 | { |
52 | 0 | ir_ref n, *p; |
53 | 0 | ir_use_list *use_list = &ctx->use_lists[from]; |
54 | |
|
55 | 0 | n = use_list->count; |
56 | 0 | if (n > 16) { |
57 | | /* Avoid quadratic complexity by maintaining a temporary bit-set */ |
58 | 0 | ir_bitset set; |
59 | |
|
60 | 0 | if (!check_ctx->use_set || !(set = check_ctx->use_set[from])) { |
61 | 0 | if (!check_ctx->arena) { |
62 | 0 | check_ctx->arena = ir_arena_create(sizeof(ir_arena) + |
63 | 0 | ctx->insns_count * sizeof(ir_bitset) + |
64 | 0 | ir_bitset_len(ctx->insns_count) * sizeof(ir_bitset_base_t)); |
65 | 0 | } |
66 | 0 | if (!check_ctx->use_set) { |
67 | 0 | check_ctx->use_set = ir_arena_alloc(&check_ctx->arena, ctx->insns_count * sizeof(ir_bitset)); |
68 | 0 | memset(check_ctx->use_set, 0, ctx->insns_count * sizeof(ir_bitset)); |
69 | 0 | } |
70 | 0 | check_ctx->use_set[from] = set = (ir_bitset)ir_arena_alloc(&check_ctx->arena, |
71 | 0 | ir_bitset_len(ctx->insns_count) * sizeof(ir_bitset_base_t)); |
72 | 0 | memset(set, 0, ir_bitset_len(ctx->insns_count) * sizeof(ir_bitset_base_t)); |
73 | 0 | for (p = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) { |
74 | 0 | ir_bitset_incl(set, *p); |
75 | 0 | } |
76 | 0 | } |
77 | 0 | return ir_bitset_in(set, to); |
78 | 0 | } |
79 | 0 | for (p = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) { |
80 | 0 | if (*p == to) { |
81 | 0 | return 1; |
82 | 0 | } |
83 | 0 | } |
84 | 0 | return 0; |
85 | 0 | } |
86 | | |
87 | | static bool ir_check_input_list(ir_check_ctx *check_ctx, const ir_ctx *ctx, ir_ref from, ir_ref to) |
88 | 0 | { |
89 | 0 | ir_insn *insn = &ctx->ir_base[to]; |
90 | 0 | ir_ref n, j, *p; |
91 | |
|
92 | 0 | n = ir_input_edges_count(ctx, insn); |
93 | 0 | if (n > 16) { |
94 | | /* Avoid quadratic complexity by maintaining a temporary bit-set */ |
95 | 0 | ir_bitset set; |
96 | |
|
97 | 0 | if (!check_ctx->input_set || !(set = check_ctx->input_set[to])) { |
98 | 0 | if (!check_ctx->arena) { |
99 | 0 | check_ctx->arena = ir_arena_create(sizeof(ir_arena) + |
100 | 0 | ctx->insns_count * sizeof(ir_bitset) + |
101 | 0 | ir_bitset_len(ctx->insns_count) * sizeof(ir_bitset_base_t)); |
102 | 0 | } |
103 | 0 | if (!check_ctx->input_set) { |
104 | 0 | check_ctx->input_set = ir_arena_alloc(&check_ctx->arena, ctx->insns_count * sizeof(ir_bitset)); |
105 | 0 | memset(check_ctx->input_set, 0, ctx->insns_count * sizeof(ir_bitset)); |
106 | 0 | } |
107 | 0 | check_ctx->input_set[to] = set = (ir_bitset)ir_arena_alloc(&check_ctx->arena, |
108 | 0 | ir_bitset_len(ctx->insns_count) * sizeof(ir_bitset_base_t)); |
109 | 0 | memset(set, 0, ir_bitset_len(ctx->insns_count) * sizeof(ir_bitset_base_t)); |
110 | 0 | for (j = 1, p = insn->ops + 1; j <= n; j++, p++) { |
111 | 0 | if (*p > 0) ir_bitset_incl(set, *p); |
112 | 0 | } |
113 | 0 | } |
114 | 0 | return ir_bitset_in(set, from); |
115 | 0 | } |
116 | 0 | for (j = 1, p = insn->ops + 1; j <= n; j++, p++) { |
117 | 0 | if (*p == from) { |
118 | 0 | return 1; |
119 | 0 | } |
120 | 0 | } |
121 | 0 | return 0; |
122 | 0 | } |
123 | | |
124 | | static bool ir_check_domination(const ir_ctx *ctx, ir_ref def, ir_ref use) |
125 | 0 | { |
126 | 0 | uint32_t b1 = ctx->cfg_map[def]; |
127 | 0 | uint32_t b2 = ctx->cfg_map[use]; |
128 | 0 | ir_block *blocks = ctx->cfg_blocks; |
129 | 0 | uint32_t b1_depth = blocks[b1].dom_depth; |
130 | 0 | const ir_block *bb2 = &blocks[b2]; |
131 | |
|
132 | 0 | if (b1 == b2) { |
133 | 0 | return def < use; |
134 | 0 | } |
135 | 0 | while (bb2->dom_depth > b1_depth) { |
136 | 0 | b2 = bb2->dom_parent; |
137 | 0 | bb2 = &blocks[b2]; |
138 | 0 | } |
139 | 0 | return b1 == b2; |
140 | 0 | } |
141 | | |
142 | | bool ir_check(const ir_ctx *ctx) |
143 | 0 | { |
144 | 0 | ir_ref i, j, n, *p, use; |
145 | 0 | ir_insn *insn, *use_insn; |
146 | 0 | ir_type type; |
147 | 0 | uint32_t flags; |
148 | 0 | bool ok = 1; |
149 | 0 | ir_check_ctx check_ctx; |
150 | |
|
151 | 0 | check_ctx.arena = NULL; |
152 | 0 | check_ctx.use_set = NULL; |
153 | 0 | check_ctx.input_set = NULL; |
154 | |
|
155 | 0 | for (i = IR_UNUSED + 1, insn = ctx->ir_base + i; i < ctx->insns_count;) { |
156 | 0 | if (insn->op >= IR_LAST_OP) { |
157 | 0 | fprintf(stderr, "ir_base[%d].op invalid opcode (%d)\n", i, insn->op); |
158 | 0 | ok = 0; |
159 | 0 | break; |
160 | 0 | } |
161 | 0 | flags = ir_op_flags[insn->op]; |
162 | 0 | n = ir_input_edges_count(ctx, insn); |
163 | 0 | for (j = 1, p = insn->ops + 1; j <= n; j++, p++) { |
164 | 0 | use = *p; |
165 | 0 | if (use != IR_UNUSED) { |
166 | 0 | if (IR_IS_CONST_REF(use)) { |
167 | 0 | if (IR_OPND_KIND(flags, j) != IR_OPND_DATA) { |
168 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] reference (%d) must not be constant\n", i, j, use); |
169 | 0 | ok = 0; |
170 | 0 | } else if (use >= ctx->consts_count) { |
171 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] constant reference (%d) is out of range\n", i, j, use); |
172 | 0 | ok = 0; |
173 | 0 | } |
174 | 0 | } else { |
175 | 0 | if (use >= ctx->insns_count) { |
176 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] insn reference (%d) is out of range\n", i, j, use); |
177 | 0 | ok = 0; |
178 | 0 | } |
179 | 0 | use_insn = &ctx->ir_base[use]; |
180 | 0 | switch (IR_OPND_KIND(flags, j)) { |
181 | 0 | case IR_OPND_DATA: |
182 | 0 | if (!(ir_op_flags[use_insn->op] & IR_OP_FLAG_DATA)) { |
183 | 0 | if (!(ir_op_flags[use_insn->op] & IR_OP_FLAG_MEM) |
184 | 0 | || use_insn->type == IR_VOID) { |
185 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] reference (%d) must be DATA\n", i, j, use); |
186 | 0 | ok = 0; |
187 | 0 | } |
188 | 0 | } |
189 | 0 | if ((ctx->flags2 & IR_LINEAR) |
190 | 0 | && use >= i |
191 | 0 | && !(insn->op == IR_PHI && ctx->ir_base[insn->op1].op == IR_LOOP_BEGIN)) { |
192 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] invalid forward reference (%d)\n", i, j, use); |
193 | 0 | ok = 0; |
194 | 0 | } |
195 | 0 | if (flags & IR_OP_FLAG_DATA) { |
196 | 0 | switch (insn->op) { |
197 | 0 | case IR_COND: |
198 | 0 | if (j == 1) { |
199 | 0 | break; |
200 | 0 | } |
201 | 0 | IR_FALLTHROUGH; |
202 | 0 | case IR_ADD: |
203 | 0 | case IR_SUB: |
204 | 0 | case IR_MUL: |
205 | 0 | case IR_DIV: |
206 | 0 | case IR_MOD: |
207 | 0 | case IR_NEG: |
208 | 0 | case IR_ABS: |
209 | 0 | case IR_ADD_OV: |
210 | 0 | case IR_SUB_OV: |
211 | 0 | case IR_MUL_OV: |
212 | 0 | case IR_NOT: |
213 | 0 | case IR_OR: |
214 | 0 | case IR_AND: |
215 | 0 | case IR_XOR: |
216 | 0 | case IR_SHL: |
217 | 0 | case IR_SHR: |
218 | 0 | case IR_SAR: |
219 | 0 | case IR_ROL: |
220 | 0 | case IR_ROR: |
221 | 0 | case IR_BSWAP: |
222 | 0 | case IR_MIN: |
223 | 0 | case IR_MAX: |
224 | 0 | case IR_PHI: |
225 | 0 | case IR_COPY: |
226 | 0 | case IR_PI: |
227 | 0 | if (insn->type != use_insn->type) { |
228 | 0 | if (j == 2 |
229 | 0 | && (insn->op == IR_SHL |
230 | 0 | || insn->op == IR_SHR |
231 | 0 | || insn->op == IR_SAR |
232 | 0 | || insn->op == IR_ROL |
233 | 0 | || insn->op == IR_ROR) |
234 | 0 | && ir_type_size[use_insn->type] < ir_type_size[insn->type]) { |
235 | | /* second argument of SHIFT may be incompatible with result */ |
236 | 0 | break; |
237 | 0 | } |
238 | 0 | if (insn->op == IR_NOT && insn->type == IR_BOOL) { |
239 | | /* boolean not */ |
240 | 0 | break; |
241 | 0 | } |
242 | 0 | if (insn->type == IR_ADDR && (use_insn->type == IR_UINTPTR_T || use_insn->type == IR_INTPTR_T)) { |
243 | 0 | break; |
244 | 0 | } else if (use_insn->type == IR_ADDR && (insn->type == IR_UINTPTR_T || insn->type == IR_INTPTR_T)) { |
245 | 0 | break; |
246 | 0 | } |
247 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] (%d) type is incompatible with result type (%d != %d)\n", |
248 | 0 | i, j, use, use_insn->type, insn->type); |
249 | 0 | ok = 0; |
250 | 0 | } |
251 | 0 | break; |
252 | 0 | } |
253 | 0 | } |
254 | 0 | if ((ctx->flags2 & IR_LINEAR) |
255 | 0 | && ctx->cfg_map |
256 | 0 | && insn->op != IR_PHI |
257 | 0 | && !ir_check_domination(ctx, use, i)) { |
258 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] -> %d, %d doesn't dominate %d\n", i, j, use, use, i); |
259 | 0 | ok = 0; |
260 | 0 | } |
261 | 0 | break; |
262 | 0 | case IR_OPND_CONTROL: |
263 | 0 | if (flags & IR_OP_FLAG_BB_START) { |
264 | 0 | if (!(ir_op_flags[use_insn->op] & IR_OP_FLAG_BB_END)) { |
265 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] reference (%d) must be BB_END\n", i, j, use); |
266 | 0 | ok = 0; |
267 | 0 | } |
268 | 0 | } else { |
269 | 0 | if (ir_op_flags[use_insn->op] & IR_OP_FLAG_BB_END) { |
270 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] reference (%d) must not be BB_END\n", i, j, use); |
271 | 0 | ok = 0; |
272 | 0 | } |
273 | 0 | } |
274 | 0 | if ((ctx->flags2 & IR_LINEAR) |
275 | 0 | && use >= i |
276 | 0 | && !(insn->op == IR_LOOP_BEGIN)) { |
277 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] invalid forward reference (%d)\n", i, j, use); |
278 | 0 | ok = 0; |
279 | 0 | } |
280 | 0 | break; |
281 | 0 | case IR_OPND_CONTROL_DEP: |
282 | 0 | if ((ctx->flags2 & IR_LINEAR) |
283 | 0 | && use >= i) { |
284 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] invalid forward reference (%d)\n", i, j, use); |
285 | 0 | ok = 0; |
286 | 0 | } else if (insn->op == IR_PHI) { |
287 | 0 | ir_insn *merge_insn = &ctx->ir_base[insn->op1]; |
288 | 0 | if (merge_insn->op != IR_MERGE && merge_insn->op != IR_LOOP_BEGIN) { |
289 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] reference (%d) must be MERGE or LOOP_BEGIN\n", i, j, use); |
290 | 0 | ok = 0; |
291 | 0 | } |
292 | 0 | } |
293 | 0 | break; |
294 | 0 | case IR_OPND_CONTROL_REF: |
295 | 0 | if (!(ir_op_flags[use_insn->op] & IR_OP_FLAG_CONTROL)) { |
296 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] reference (%d) must be CONTROL\n", i, j, use); |
297 | 0 | ok = 0; |
298 | 0 | } |
299 | 0 | break; |
300 | 0 | default: |
301 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] reference (%d) of unsupported kind\n", i, j, use); |
302 | 0 | ok = 0; |
303 | 0 | } |
304 | 0 | } |
305 | 0 | } else if ((insn->op == IR_RETURN || insn->op == IR_UNREACHABLE) && j == 2) { |
306 | | /* pass (function returns void) */ |
307 | 0 | } else if (insn->op == IR_BEGIN && j == 1) { |
308 | | /* pass (start of unreachable basic block) */ |
309 | 0 | } else if (IR_OPND_KIND(flags, j) != IR_OPND_CONTROL_REF |
310 | 0 | && (insn->op != IR_SNAPSHOT || j == 1)) { |
311 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] missing reference (%d)\n", i, j, use); |
312 | 0 | ok = 0; |
313 | 0 | } |
314 | 0 | if (ctx->use_lists |
315 | 0 | && use > 0 |
316 | 0 | && !ir_check_use_list(&check_ctx, ctx, use, i)) { |
317 | 0 | fprintf(stderr, "ir_base[%d].ops[%d] is not in use list (%d)\n", i, j, use); |
318 | 0 | ok = 0; |
319 | 0 | } |
320 | 0 | } |
321 | | |
322 | 0 | switch (insn->op) { |
323 | 0 | case IR_PHI: |
324 | 0 | if (insn->inputs_count != ctx->ir_base[insn->op1].inputs_count + 1) { |
325 | 0 | fprintf(stderr, "ir_base[%d] inconsistent PHI inputs_count (%d != %d)\n", |
326 | 0 | i, insn->inputs_count, ctx->ir_base[insn->op1].inputs_count + 1); |
327 | 0 | ok = 0; |
328 | 0 | } |
329 | 0 | break; |
330 | 0 | case IR_LOAD: |
331 | 0 | case IR_STORE: |
332 | 0 | type = ctx->ir_base[insn->op2].type; |
333 | 0 | if (type != IR_ADDR |
334 | 0 | && (!IR_IS_TYPE_INT(type) || ir_type_size[type] != ir_type_size[IR_ADDR])) { |
335 | 0 | fprintf(stderr, "ir_base[%d].op2 must have ADDR type (%s)\n", |
336 | 0 | i, ir_type_name[type]); |
337 | 0 | ok = 0; |
338 | 0 | } |
339 | 0 | break; |
340 | 0 | case IR_VLOAD: |
341 | 0 | case IR_VSTORE: |
342 | 0 | if (ctx->ir_base[insn->op2].op != IR_VAR) { |
343 | 0 | fprintf(stderr, "ir_base[%d].op2 must be 'VAR' (%s)\n", |
344 | 0 | i, ir_op_name[ctx->ir_base[insn->op2].op]); |
345 | 0 | ok = 0; |
346 | 0 | } |
347 | 0 | break; |
348 | 0 | case IR_RETURN: |
349 | 0 | if (ctx->ret_type != (insn->op2 ? ctx->ir_base[insn->op2].type : IR_VOID)) { |
350 | 0 | fprintf(stderr, "ir_base[%d].type incompatible return type\n", i); |
351 | 0 | ok = 0; |
352 | 0 | } |
353 | 0 | break; |
354 | 0 | case IR_TAILCALL: |
355 | 0 | if (ctx->ret_type != insn->type) { |
356 | 0 | fprintf(stderr, "ir_base[%d].type incompatible return type\n", i); |
357 | 0 | ok = 0; |
358 | 0 | } |
359 | 0 | break; |
360 | 0 | case IR_PARAM: |
361 | 0 | if (i > 2 && ctx->ir_base[i - 1].op != IR_PARAM) { |
362 | 0 | fprintf(stderr, "ir_base[%d].op PARAMs must be used only right after START\n", i); |
363 | 0 | ok = 0; |
364 | 0 | } |
365 | 0 | break; |
366 | 0 | } |
367 | | |
368 | 0 | if (ctx->use_lists) { |
369 | 0 | ir_use_list *use_list = &ctx->use_lists[i]; |
370 | 0 | ir_ref count, n = use_list->count; |
371 | |
|
372 | 0 | for (p = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) { |
373 | 0 | use = *p; |
374 | 0 | if (!ir_check_input_list(&check_ctx, ctx, i, use)) { |
375 | 0 | fprintf(stderr, "ir_base[%d] is in use list of ir_base[%d]\n", use, i); |
376 | 0 | ok = 0; |
377 | 0 | } |
378 | 0 | } |
379 | |
|
380 | 0 | if ((flags & IR_OP_FLAG_CONTROL) && !(flags & IR_OP_FLAG_MEM)) { |
381 | 0 | switch (insn->op) { |
382 | 0 | case IR_SWITCH: |
383 | | /* may have many successors */ |
384 | 0 | if (use_list->count < 1) { |
385 | 0 | fprintf(stderr, "ir_base[%d].op (SWITCH) must have at least 1 successor (%d)\n", i, use_list->count); |
386 | 0 | ok = 0; |
387 | 0 | } |
388 | 0 | break; |
389 | 0 | case IR_IF: |
390 | 0 | if (use_list->count != 2) { |
391 | 0 | fprintf(stderr, "ir_base[%d].op (IF) must have 2 successors (%d)\n", i, use_list->count); |
392 | 0 | ok = 0; |
393 | 0 | } |
394 | 0 | break; |
395 | 0 | case IR_UNREACHABLE: |
396 | 0 | case IR_RETURN: |
397 | 0 | if (use_list->count == 1) { |
398 | | /* UNREACHABLE and RETURN may be linked with the following ENTRY by a fake edge */ |
399 | 0 | if (ctx->ir_base[ctx->use_edges[use_list->refs]].op == IR_ENTRY) { |
400 | 0 | break; |
401 | 0 | } |
402 | 0 | } |
403 | 0 | IR_FALLTHROUGH; |
404 | 0 | case IR_IJMP: |
405 | 0 | if (use_list->count != 0) { |
406 | 0 | fprintf(stderr, "ir_base[%d].op (%s) must not have successors (%d)\n", |
407 | 0 | i, ir_op_name[insn->op], use_list->count); |
408 | 0 | ok = 0; |
409 | 0 | } |
410 | 0 | break; |
411 | 0 | default: |
412 | | /* skip data references */ |
413 | 0 | count = n = use_list->count; |
414 | 0 | for (p = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) { |
415 | 0 | use = *p; |
416 | 0 | if (!(ir_op_flags[ctx->ir_base[use].op] & IR_OP_FLAG_CONTROL)) { |
417 | 0 | count--; |
418 | 0 | } |
419 | 0 | } |
420 | 0 | if (count != 1) { |
421 | 0 | if (insn->op == IR_CALL && count == 2) { |
422 | | /* result of CALL may be used as data in control instruction */ |
423 | 0 | break; |
424 | 0 | } |
425 | 0 | if ((insn->op == IR_LOOP_END || insn->op == IR_END) && count == 2) { |
426 | | /* LOOP_END/END may be linked with the following ENTRY by a fake edge */ |
427 | 0 | if (ctx->ir_base[ctx->use_edges[use_list->refs]].op == IR_ENTRY) { |
428 | 0 | count--; |
429 | 0 | } |
430 | 0 | if (ctx->ir_base[ctx->use_edges[use_list->refs + 1]].op == IR_ENTRY) { |
431 | 0 | count--; |
432 | 0 | } |
433 | 0 | if (count == 1) { |
434 | 0 | break; |
435 | 0 | } |
436 | 0 | } |
437 | 0 | if (count == 0 && (insn->op == IR_END || insn->op == IR_LOOP_END)) { |
438 | | /* Dead block */ |
439 | 0 | break; |
440 | 0 | } |
441 | 0 | fprintf(stderr, "ir_base[%d].op (%s) must have 1 successor (%d)\n", |
442 | 0 | i, ir_op_name[insn->op], count); |
443 | 0 | ok = 0; |
444 | 0 | } |
445 | 0 | break; |
446 | 0 | } |
447 | 0 | } |
448 | 0 | } |
449 | 0 | n = ir_insn_inputs_to_len(n); |
450 | 0 | i += n; |
451 | 0 | insn += n; |
452 | 0 | } |
453 | | |
454 | 0 | if (check_ctx.arena) { |
455 | 0 | ir_arena_free(check_ctx.arena); |
456 | 0 | } |
457 | | |
458 | | // if (!ok) { |
459 | | // ir_dump_codegen(ctx, stderr); |
460 | | // } |
461 | 0 | IR_ASSERT(ok); |
462 | 0 | return ok; |
463 | 0 | } |