Coverage Report

Created: 2025-09-05 07:10

/src/cpython/Python/flowgraph.c
Line
Count
Source (jump to first uncovered line)
1
#include "Python.h"
2
#include "opcode.h"
3
#include "pycore_c_array.h"       // _Py_CArray_EnsureCapacity
4
#include "pycore_flowgraph.h"
5
#include "pycore_compile.h"
6
#include "pycore_intrinsics.h"
7
#include "pycore_pymem.h"         // _PyMem_IsPtrFreed()
8
#include "pycore_long.h"          // _PY_IS_SMALL_INT()
9
10
#include "pycore_opcode_utils.h"
11
#include "pycore_opcode_metadata.h" // OPCODE_HAS_ARG, etc
12
13
#include <stdbool.h>
14
15
16
#undef SUCCESS
17
#undef ERROR
18
1.17M
#define SUCCESS 0
19
6.74k
#define ERROR -1
20
21
#define RETURN_IF_ERROR(X)  \
22
1.83M
    if ((X) == -1) {        \
23
0
        return ERROR;       \
24
0
    }
25
26
392k
#define DEFAULT_BLOCK_SIZE 16
27
28
typedef _Py_SourceLocation location;
29
typedef _PyJumpTargetLabel jump_target_label;
30
31
typedef struct _PyCfgInstruction {
32
    int i_opcode;
33
    int i_oparg;
34
    _Py_SourceLocation i_loc;
35
    struct _PyCfgBasicblock *i_target; /* target block (if jump instruction) */
36
    struct _PyCfgBasicblock *i_except; /* target block when exception is raised */
37
} cfg_instr;
38
39
typedef struct _PyCfgBasicblock {
40
    /* Each basicblock in a compilation unit is linked via b_list in the
41
       reverse order that the block are allocated.  b_list points to the next
42
       block in this list, not to be confused with b_next, which is next by
43
       control flow. */
44
    struct _PyCfgBasicblock *b_list;
45
    /* The label of this block if it is a jump target, -1 otherwise */
46
    _PyJumpTargetLabel b_label;
47
    /* Exception stack at start of block, used by assembler to create the exception handling table */
48
    struct _PyCfgExceptStack *b_exceptstack;
49
    /* pointer to an array of instructions, initially NULL */
50
    cfg_instr *b_instr;
51
    /* If b_next is non-NULL, it is a pointer to the next
52
       block reached by normal control flow. */
53
    struct _PyCfgBasicblock *b_next;
54
    /* number of instructions used */
55
    int b_iused;
56
    /* length of instruction array (b_instr) */
57
    int b_ialloc;
58
    /* Used by add_checks_for_loads_of_unknown_variables */
59
    uint64_t b_unsafe_locals_mask;
60
    /* Number of predecessors that a block has. */
61
    int b_predecessors;
62
    /* depth of stack upon entry of block, computed by stackdepth() */
63
    int b_startdepth;
64
    /* Basic block is an exception handler that preserves lasti */
65
    unsigned b_preserve_lasti : 1;
66
    /* Used by compiler passes to mark whether they have visited a basic block. */
67
    unsigned b_visited : 1;
68
    /* b_except_handler is used by the cold-detection algorithm to mark exception targets */
69
    unsigned b_except_handler : 1;
70
    /* b_cold is true if this block is not perf critical (like an exception handler) */
71
    unsigned b_cold : 1;
72
    /* b_warm is used by the cold-detection algorithm to mark blocks which are definitely not cold */
73
    unsigned b_warm : 1;
74
} basicblock;
75
76
77
struct _PyCfgBuilder {
78
    /* The entryblock, at which control flow begins. All blocks of the
79
       CFG are reachable through the b_next links */
80
    struct _PyCfgBasicblock *g_entryblock;
81
    /* Pointer to the most recently allocated block.  By following
82
       b_list links, you can reach all allocated blocks. */
83
    struct _PyCfgBasicblock *g_block_list;
84
    /* pointer to the block currently being constructed */
85
    struct _PyCfgBasicblock *g_curblock;
86
    /* label for the next instruction to be placed */
87
    _PyJumpTargetLabel g_current_label;
88
};
89
90
typedef struct _PyCfgBuilder cfg_builder;
91
92
402k
#define SAME_LABEL(L1, L2) ((L1).id == (L2).id)
93
402k
#define IS_LABEL(L) (!SAME_LABEL((L), (NO_LABEL)))
94
95
#define LOCATION(LNO, END_LNO, COL, END_COL) \
96
354
    ((const _Py_SourceLocation){(LNO), (END_LNO), (COL), (END_COL)})
97
98
static inline int
99
is_block_push(cfg_instr *i)
100
1.71M
{
101
1.71M
    assert(OPCODE_HAS_ARG(i->i_opcode) || !IS_BLOCK_PUSH_OPCODE(i->i_opcode));
102
1.71M
    return IS_BLOCK_PUSH_OPCODE(i->i_opcode);
103
1.71M
}
104
105
static inline int
106
is_jump(cfg_instr *i)
107
1.56M
{
108
1.56M
    return OPCODE_HAS_JUMP(i->i_opcode);
109
1.56M
}
110
111
/* One arg*/
112
#define INSTR_SET_OP1(I, OP, ARG) \
113
40.4k
    do { \
114
40.4k
        assert(OPCODE_HAS_ARG(OP)); \
115
40.4k
        cfg_instr *_instr__ptr_ = (I); \
116
40.4k
        _instr__ptr_->i_opcode = (OP); \
117
40.4k
        _instr__ptr_->i_oparg = (ARG); \
118
40.4k
    } while (0);
119
120
/* No args*/
121
#define INSTR_SET_OP0(I, OP) \
122
82.3k
    do { \
123
82.3k
        assert(!OPCODE_HAS_ARG(OP)); \
124
82.3k
        cfg_instr *_instr__ptr_ = (I); \
125
82.3k
        _instr__ptr_->i_opcode = (OP); \
126
82.3k
        _instr__ptr_->i_oparg = 0; \
127
82.3k
    } while (0);
128
129
#define INSTR_SET_LOC(I, LOC) \
130
4.93k
    do { \
131
4.93k
        cfg_instr *_instr__ptr_ = (I); \
132
4.93k
        _instr__ptr_->i_loc = (LOC); \
133
4.93k
    } while (0);
134
135
/***** Blocks *****/
136
137
/* Returns the offset of the next instruction in the current block's
138
   b_instr array.  Resizes the b_instr as necessary.
139
   Returns -1 on failure.
140
*/
141
static int
142
basicblock_next_instr(basicblock *b)
143
392k
{
144
392k
    assert(b != NULL);
145
392k
    _Py_c_array_t array = {
146
392k
        .array = (void*)b->b_instr,
147
392k
        .allocated_entries = b->b_ialloc,
148
392k
        .item_size = sizeof(cfg_instr),
149
392k
        .initial_num_entries = DEFAULT_BLOCK_SIZE,
150
392k
    };
151
152
392k
    RETURN_IF_ERROR(_Py_CArray_EnsureCapacity(&array, b->b_iused + 1));
153
392k
    b->b_instr = array.array;
154
392k
    b->b_ialloc = array.allocated_entries;
155
392k
    return b->b_iused++;
156
392k
}
157
158
static cfg_instr *
159
1.45M
basicblock_last_instr(const basicblock *b) {
160
1.45M
    assert(b->b_iused >= 0);
161
1.45M
    if (b->b_iused > 0) {
162
1.34M
        assert(b->b_instr != NULL);
163
1.34M
        return &b->b_instr[b->b_iused - 1];
164
1.34M
    }
165
114k
    return NULL;
166
1.45M
}
167
168
/* Allocate a new block and return a pointer to it.
169
   Returns NULL on error.
170
*/
171
172
static basicblock *
173
cfg_builder_new_block(cfg_builder *g)
174
45.8k
{
175
45.8k
    basicblock *b = (basicblock *)PyMem_Calloc(1, sizeof(basicblock));
176
45.8k
    if (b == NULL) {
177
0
        PyErr_NoMemory();
178
0
        return NULL;
179
0
    }
180
    /* Extend the singly linked list of blocks with new block. */
181
45.8k
    b->b_list = g->g_block_list;
182
45.8k
    g->g_block_list = b;
183
45.8k
    b->b_label = NO_LABEL;
184
45.8k
    return b;
185
45.8k
}
186
187
static int
188
basicblock_addop(basicblock *b, int opcode, int oparg, location loc)
189
383k
{
190
383k
    assert(IS_WITHIN_OPCODE_RANGE(opcode));
191
383k
    assert(!IS_ASSEMBLER_OPCODE(opcode));
192
383k
    assert(OPCODE_HAS_ARG(opcode) || HAS_TARGET(opcode) || oparg == 0);
193
383k
    assert(0 <= oparg && oparg < (1 << 30));
194
195
383k
    int off = basicblock_next_instr(b);
196
383k
    if (off < 0) {
197
0
        return ERROR;
198
0
    }
199
383k
    cfg_instr *i = &b->b_instr[off];
200
383k
    i->i_opcode = opcode;
201
383k
    i->i_oparg = oparg;
202
383k
    i->i_loc = loc;
203
    // memory is already zero initialized
204
383k
    assert(i->i_target == NULL);
205
383k
    assert(i->i_except == NULL);
206
207
383k
    return SUCCESS;
208
383k
}
209
210
static int
211
basicblock_add_jump(basicblock *b, int opcode, basicblock *target, location loc)
212
1.43k
{
213
1.43k
    cfg_instr *last = basicblock_last_instr(b);
214
1.43k
    if (last && is_jump(last)) {
215
0
        return ERROR;
216
0
    }
217
218
1.43k
    RETURN_IF_ERROR(
219
1.43k
        basicblock_addop(b, opcode, target->b_label.id, loc));
220
1.43k
    last = basicblock_last_instr(b);
221
1.43k
    assert(last && last->i_opcode == opcode);
222
1.43k
    last->i_target = target;
223
1.43k
    return SUCCESS;
224
1.43k
}
225
226
static inline int
227
basicblock_append_instructions(basicblock *to, basicblock *from)
228
3.10k
{
229
8.18k
    for (int i = 0; i < from->b_iused; i++) {
230
5.07k
        int n = basicblock_next_instr(to);
231
5.07k
        if (n < 0) {
232
0
            return ERROR;
233
0
        }
234
5.07k
        to->b_instr[n] = from->b_instr[i];
235
5.07k
    }
236
3.10k
    return SUCCESS;
237
3.10k
}
238
239
static inline int
240
454k
basicblock_nofallthrough(const basicblock *b) {
241
454k
    cfg_instr *last = basicblock_last_instr(b);
242
454k
    return (last &&
243
454k
            (IS_SCOPE_EXIT_OPCODE(last->i_opcode) ||
244
430k
             IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)));
245
454k
}
246
247
#define BB_NO_FALLTHROUGH(B) (basicblock_nofallthrough(B))
248
734k
#define BB_HAS_FALLTHROUGH(B) (!basicblock_nofallthrough(B))
249
250
static basicblock *
251
copy_basicblock(cfg_builder *g, basicblock *block)
252
614
{
253
    /* Cannot copy a block if it has a fallthrough, since
254
     * a block can only have one fallthrough predecessor.
255
     */
256
614
    assert(BB_NO_FALLTHROUGH(block));
257
614
    basicblock *result = cfg_builder_new_block(g);
258
614
    if (result == NULL) {
259
0
        return NULL;
260
0
    }
261
614
    if (basicblock_append_instructions(result, block) < 0) {
262
0
        return NULL;
263
0
    }
264
614
    return result;
265
614
}
266
267
static int
268
3.56k
basicblock_insert_instruction(basicblock *block, int pos, cfg_instr *instr) {
269
3.56k
    RETURN_IF_ERROR(basicblock_next_instr(block));
270
65.2k
    for (int i = block->b_iused - 1; i > pos; i--) {
271
61.7k
        block->b_instr[i] = block->b_instr[i-1];
272
61.7k
    }
273
3.56k
    block->b_instr[pos] = *instr;
274
3.56k
    return SUCCESS;
275
3.56k
}
276
277
/* For debugging purposes only */
278
#if 0
279
static void
280
dump_instr(cfg_instr *i)
281
{
282
    const char *jump = is_jump(i) ? "jump " : "";
283
284
    char arg[128];
285
286
    *arg = '\0';
287
    if (OPCODE_HAS_ARG(i->i_opcode)) {
288
        sprintf(arg, "arg: %d ", i->i_oparg);
289
    }
290
    if (HAS_TARGET(i->i_opcode)) {
291
        sprintf(arg, "target: %p [%d] ", i->i_target, i->i_oparg);
292
    }
293
    fprintf(stderr, "line: %d, %s (%d)  %s%s\n",
294
                    i->i_loc.lineno, _PyOpcode_OpName[i->i_opcode], i->i_opcode, arg, jump);
295
}
296
297
static inline int
298
basicblock_returns(const basicblock *b) {
299
    cfg_instr *last = basicblock_last_instr(b);
300
    return last && IS_RETURN_OPCODE(last->i_opcode);
301
}
302
303
static void
304
dump_basicblock(const basicblock *b, bool highlight)
305
{
306
    const char *b_return = basicblock_returns(b) ? "return " : "";
307
    if (highlight) {
308
        fprintf(stderr, ">>> ");
309
    }
310
    fprintf(stderr, "%d: [EH=%d CLD=%d WRM=%d NO_FT=%d %p] used: %d, depth: %d, preds: %d %s\n",
311
        b->b_label.id, b->b_except_handler, b->b_cold, b->b_warm, BB_NO_FALLTHROUGH(b), b, b->b_iused,
312
        b->b_startdepth, b->b_predecessors, b_return);
313
    int depth = b->b_startdepth;
314
    if (b->b_instr) {
315
        int i;
316
        for (i = 0; i < b->b_iused; i++) {
317
            fprintf(stderr, "  [%02d] depth: %d ", i, depth);
318
            dump_instr(b->b_instr + i);
319
320
            int popped = _PyOpcode_num_popped(b->b_instr[i].i_opcode, b->b_instr[i].i_oparg);
321
            int pushed = _PyOpcode_num_pushed(b->b_instr[i].i_opcode, b->b_instr[i].i_oparg);
322
            depth += (pushed - popped);
323
        }
324
    }
325
}
326
327
void
328
_PyCfgBuilder_DumpGraph(const basicblock *entryblock, const basicblock *mark)
329
{
330
    for (const basicblock *b = entryblock; b != NULL; b = b->b_next) {
331
        dump_basicblock(b, b == mark);
332
    }
333
}
334
335
#endif
336
337
338
/***** CFG construction and modification *****/
339
340
static basicblock *
341
cfg_builder_use_next_block(cfg_builder *g, basicblock *block)
342
37.7k
{
343
37.7k
    assert(block != NULL);
344
37.7k
    g->g_curblock->b_next = block;
345
37.7k
    g->g_curblock = block;
346
37.7k
    return block;
347
37.7k
}
348
349
static inline int
350
86.9k
basicblock_exits_scope(const basicblock *b) {
351
86.9k
    cfg_instr *last = basicblock_last_instr(b);
352
86.9k
    return last && IS_SCOPE_EXIT_OPCODE(last->i_opcode);
353
86.9k
}
354
355
static inline int
356
55.6k
basicblock_has_eval_break(const basicblock *b) {
357
273k
    for (int i = 0; i < b->b_iused; i++) {
358
240k
        if (OPCODE_HAS_EVAL_BREAK(b->b_instr[i].i_opcode)) {
359
22.3k
            return true;
360
22.3k
        }
361
240k
    }
362
33.2k
    return false;
363
55.6k
}
364
365
static bool
366
cfg_builder_current_block_is_terminated(cfg_builder *g)
367
389k
{
368
389k
    cfg_instr *last = basicblock_last_instr(g->g_curblock);
369
389k
    if (last && IS_TERMINATOR_OPCODE(last->i_opcode)) {
370
32.0k
        return true;
371
32.0k
    }
372
357k
    if (IS_LABEL(g->g_current_label)) {
373
5.66k
        if (last || IS_LABEL(g->g_curblock->b_label)) {
374
5.66k
            return true;
375
5.66k
        }
376
0
        else {
377
            /* current block is empty, label it */
378
0
            g->g_curblock->b_label = g->g_current_label;
379
0
            g->g_current_label = NO_LABEL;
380
0
        }
381
5.66k
    }
382
351k
    return false;
383
357k
}
384
385
static int
386
cfg_builder_maybe_start_new_block(cfg_builder *g)
387
389k
{
388
389k
    if (cfg_builder_current_block_is_terminated(g)) {
389
37.7k
        basicblock *b = cfg_builder_new_block(g);
390
37.7k
        if (b == NULL) {
391
0
            return ERROR;
392
0
        }
393
37.7k
        b->b_label = g->g_current_label;
394
37.7k
        g->g_current_label = NO_LABEL;
395
37.7k
        cfg_builder_use_next_block(g, b);
396
37.7k
    }
397
389k
    return SUCCESS;
398
389k
}
399
400
#ifndef NDEBUG
401
static bool
402
cfg_builder_check(cfg_builder *g)
403
{
404
    assert(g->g_entryblock->b_iused > 0);
405
    for (basicblock *block = g->g_block_list; block != NULL; block = block->b_list) {
406
        assert(!_PyMem_IsPtrFreed(block));
407
        if (block->b_instr != NULL) {
408
            assert(block->b_ialloc > 0);
409
            assert(block->b_iused >= 0);
410
            assert(block->b_ialloc >= block->b_iused);
411
        }
412
        else {
413
            assert (block->b_iused == 0);
414
            assert (block->b_ialloc == 0);
415
        }
416
    }
417
    return true;
418
}
419
#endif
420
421
static int
422
init_cfg_builder(cfg_builder *g)
423
6.74k
{
424
6.74k
    g->g_block_list = NULL;
425
6.74k
    basicblock *block = cfg_builder_new_block(g);
426
6.74k
    if (block == NULL) {
427
0
        return ERROR;
428
0
    }
429
6.74k
    g->g_curblock = g->g_entryblock = block;
430
6.74k
    g->g_current_label = NO_LABEL;
431
6.74k
    return SUCCESS;
432
6.74k
}
433
434
cfg_builder *
435
_PyCfgBuilder_New(void)
436
6.74k
{
437
6.74k
    cfg_builder *g = PyMem_Malloc(sizeof(cfg_builder));
438
6.74k
    if (g == NULL) {
439
0
        PyErr_NoMemory();
440
0
        return NULL;
441
0
    }
442
6.74k
    memset(g, 0, sizeof(cfg_builder));
443
6.74k
    if (init_cfg_builder(g) < 0) {
444
0
        PyMem_Free(g);
445
0
        return NULL;
446
0
    }
447
6.74k
    return g;
448
6.74k
}
449
450
void
451
_PyCfgBuilder_Free(cfg_builder *g)
452
6.74k
{
453
6.74k
    if (g == NULL) {
454
0
        return;
455
0
    }
456
6.74k
    assert(cfg_builder_check(g));
457
6.74k
    basicblock *b = g->g_block_list;
458
52.6k
    while (b != NULL) {
459
45.8k
        if (b->b_instr) {
460
45.8k
            PyMem_Free((void *)b->b_instr);
461
45.8k
        }
462
45.8k
        basicblock *next = b->b_list;
463
45.8k
        PyMem_Free((void *)b);
464
45.8k
        b = next;
465
45.8k
    }
466
6.74k
    PyMem_Free(g);
467
6.74k
}
468
469
int
470
_PyCfgBuilder_CheckSize(cfg_builder *g)
471
6.74k
{
472
6.74k
    int nblocks = 0;
473
51.2k
    for (basicblock *b = g->g_block_list; b != NULL; b = b->b_list) {
474
44.4k
        nblocks++;
475
44.4k
    }
476
6.74k
    if ((size_t)nblocks > SIZE_MAX / sizeof(basicblock *)) {
477
0
        PyErr_NoMemory();
478
0
        return ERROR;
479
0
    }
480
6.74k
    return SUCCESS;
481
6.74k
}
482
483
int
484
_PyCfgBuilder_UseLabel(cfg_builder *g, jump_target_label lbl)
485
18.7k
{
486
18.7k
    g->g_current_label = lbl;
487
18.7k
    return cfg_builder_maybe_start_new_block(g);
488
18.7k
}
489
490
int
491
_PyCfgBuilder_Addop(cfg_builder *g, int opcode, int oparg, location loc)
492
370k
{
493
370k
    RETURN_IF_ERROR(cfg_builder_maybe_start_new_block(g));
494
370k
    return basicblock_addop(g->g_curblock, opcode, oparg, loc);
495
370k
}
496
497
498
static basicblock *
499
next_nonempty_block(basicblock *b)
500
75.6k
{
501
78.7k
    while (b && b->b_iused == 0) {
502
3.13k
        b = b->b_next;
503
3.13k
    }
504
75.6k
    return b;
505
75.6k
}
506
507
/***** debugging helpers *****/
508
509
#ifndef NDEBUG
510
static int remove_redundant_nops(cfg_builder *g);
511
512
static bool
513
no_redundant_nops(cfg_builder *g) {
514
    if (remove_redundant_nops(g) != 0) {
515
        return false;
516
    }
517
    return true;
518
}
519
520
static bool
521
no_redundant_jumps(cfg_builder *g) {
522
    for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
523
        cfg_instr *last = basicblock_last_instr(b);
524
        if (last != NULL) {
525
            if (IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) {
526
                basicblock *next = next_nonempty_block(b->b_next);
527
                basicblock *jump_target = next_nonempty_block(last->i_target);
528
                if (jump_target == next) {
529
                    assert(next);
530
                    if (last->i_loc.lineno == next->b_instr[0].i_loc.lineno) {
531
                        assert(0);
532
                        return false;
533
                    }
534
                }
535
            }
536
        }
537
    }
538
    return true;
539
}
540
#endif
541
542
/***** CFG preprocessing (jump targets and exceptions) *****/
543
544
static int
545
45.8k
normalize_jumps_in_block(cfg_builder *g, basicblock *b) {
546
45.8k
    cfg_instr *last = basicblock_last_instr(b);
547
45.8k
    if (last == NULL || !IS_CONDITIONAL_JUMP_OPCODE(last->i_opcode)) {
548
34.3k
        return SUCCESS;
549
34.3k
    }
550
11.4k
    assert(!IS_ASSEMBLER_OPCODE(last->i_opcode));
551
552
11.4k
    bool is_forward = last->i_target->b_visited == 0;
553
11.4k
    if (is_forward) {
554
10.7k
        RETURN_IF_ERROR(
555
10.7k
            basicblock_addop(b, NOT_TAKEN, 0, last->i_loc));
556
10.7k
        return SUCCESS;
557
10.7k
    }
558
559
741
    int reversed_opcode = 0;
560
741
    switch(last->i_opcode) {
561
29
        case POP_JUMP_IF_NOT_NONE:
562
29
            reversed_opcode = POP_JUMP_IF_NONE;
563
29
            break;
564
36
        case POP_JUMP_IF_NONE:
565
36
            reversed_opcode = POP_JUMP_IF_NOT_NONE;
566
36
            break;
567
611
        case POP_JUMP_IF_FALSE:
568
611
            reversed_opcode = POP_JUMP_IF_TRUE;
569
611
            break;
570
65
        case POP_JUMP_IF_TRUE:
571
65
            reversed_opcode = POP_JUMP_IF_FALSE;
572
65
            break;
573
741
    }
574
    /* transform 'conditional jump T' to
575
     * 'reversed_jump b_next' followed by 'jump_backwards T'
576
     */
577
578
741
    basicblock *target = last->i_target;
579
741
    basicblock *backwards_jump = cfg_builder_new_block(g);
580
741
    if (backwards_jump == NULL) {
581
0
        return ERROR;
582
0
    }
583
741
    RETURN_IF_ERROR(
584
741
        basicblock_addop(backwards_jump, NOT_TAKEN, 0, last->i_loc));
585
741
    RETURN_IF_ERROR(
586
741
        basicblock_add_jump(backwards_jump, JUMP, target, last->i_loc));
587
741
    backwards_jump->b_startdepth = target->b_startdepth;
588
741
    last->i_opcode = reversed_opcode;
589
741
    last->i_target = b->b_next;
590
591
741
    backwards_jump->b_cold = b->b_cold;
592
741
    backwards_jump->b_next = b->b_next;
593
741
    b->b_next = backwards_jump;
594
741
    return SUCCESS;
595
741
}
596
597
598
static int
599
normalize_jumps(cfg_builder *g)
600
6.74k
{
601
6.74k
    basicblock *entryblock = g->g_entryblock;
602
51.8k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
603
45.1k
        b->b_visited = 0;
604
45.1k
    }
605
52.6k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
606
45.8k
        b->b_visited = 1;
607
45.8k
        RETURN_IF_ERROR(normalize_jumps_in_block(g, b));
608
45.8k
    }
609
6.74k
    return SUCCESS;
610
6.74k
}
611
612
static int
613
6.74k
check_cfg(cfg_builder *g) {
614
51.2k
    for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
615
        /* Raise SystemError if jump or exit is not last instruction in the block. */
616
415k
        for (int i = 0; i < b->b_iused; i++) {
617
370k
            int opcode = b->b_instr[i].i_opcode;
618
370k
            assert(!IS_ASSEMBLER_OPCODE(opcode));
619
370k
            if (IS_TERMINATOR_OPCODE(opcode)) {
620
38.7k
                if (i != b->b_iused - 1) {
621
0
                    PyErr_SetString(PyExc_SystemError, "malformed control flow graph.");
622
0
                    return ERROR;
623
0
                }
624
38.7k
            }
625
370k
        }
626
44.4k
    }
627
6.74k
    return SUCCESS;
628
6.74k
}
629
630
static int
631
get_max_label(basicblock *entryblock)
632
26.2k
{
633
26.2k
    int lbl = -1;
634
204k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
635
178k
        if (b->b_label.id > lbl) {
636
68.3k
            lbl = b->b_label.id;
637
68.3k
        }
638
178k
    }
639
26.2k
    return lbl;
640
26.2k
}
641
642
/* Calculate the actual jump target from the target_label */
643
static int
644
translate_jump_labels_to_targets(basicblock *entryblock)
645
6.74k
{
646
6.74k
    int max_label = get_max_label(entryblock);
647
6.74k
    size_t mapsize = sizeof(basicblock *) * (max_label + 1);
648
6.74k
    basicblock **label2block = (basicblock **)PyMem_Malloc(mapsize);
649
6.74k
    if (!label2block) {
650
0
        PyErr_NoMemory();
651
0
        return ERROR;
652
0
    }
653
6.74k
    memset(label2block, 0, mapsize);
654
51.2k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
655
44.4k
        if (b->b_label.id >= 0) {
656
18.7k
            label2block[b->b_label.id] = b;
657
18.7k
        }
658
44.4k
    }
659
51.2k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
660
415k
        for (int i = 0; i < b->b_iused; i++) {
661
370k
            cfg_instr *instr = &b->b_instr[i];
662
370k
            assert(instr->i_target == NULL);
663
370k
            if (HAS_TARGET(instr->i_opcode)) {
664
23.1k
                int lbl = instr->i_oparg;
665
23.1k
                assert(lbl >= 0 && lbl <= max_label);
666
23.1k
                instr->i_target = label2block[lbl];
667
23.1k
                assert(instr->i_target != NULL);
668
23.1k
                assert(instr->i_target->b_label.id == lbl);
669
23.1k
            }
670
370k
        }
671
44.4k
    }
672
6.74k
    PyMem_Free(label2block);
673
6.74k
    return SUCCESS;
674
6.74k
}
675
676
static int
677
6.74k
mark_except_handlers(basicblock *entryblock) {
678
#ifndef NDEBUG
679
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
680
        assert(!b->b_except_handler);
681
    }
682
#endif
683
51.2k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
684
415k
        for (int i=0; i < b->b_iused; i++) {
685
370k
            cfg_instr *instr = &b->b_instr[i];
686
370k
            if (is_block_push(instr)) {
687
2.60k
                instr->i_target->b_except_handler = 1;
688
2.60k
            }
689
370k
        }
690
44.4k
    }
691
6.74k
    return SUCCESS;
692
6.74k
}
693
694
695
struct _PyCfgExceptStack {
696
    basicblock *handlers[CO_MAXBLOCKS+2];
697
    int depth;
698
};
699
700
701
static basicblock *
702
2.59k
push_except_block(struct _PyCfgExceptStack *stack, cfg_instr *setup) {
703
2.59k
    assert(is_block_push(setup));
704
2.59k
    int opcode = setup->i_opcode;
705
2.59k
    basicblock * target = setup->i_target;
706
2.59k
    if (opcode == SETUP_WITH || opcode == SETUP_CLEANUP) {
707
1.49k
        target->b_preserve_lasti = 1;
708
1.49k
    }
709
2.59k
    assert(stack->depth <= CO_MAXBLOCKS);
710
2.59k
    stack->handlers[++stack->depth] = target;
711
2.59k
    return target;
712
2.59k
}
713
714
static basicblock *
715
2.08k
pop_except_block(struct _PyCfgExceptStack *stack) {
716
2.08k
    assert(stack->depth > 0);
717
2.08k
    return stack->handlers[--stack->depth];
718
2.08k
}
719
720
static basicblock *
721
37.9k
except_stack_top(struct _PyCfgExceptStack *stack) {
722
37.9k
    return stack->handlers[stack->depth];
723
37.9k
}
724
725
static struct _PyCfgExceptStack *
726
6.74k
make_except_stack(void) {
727
6.74k
    struct _PyCfgExceptStack *new = PyMem_Malloc(sizeof(struct _PyCfgExceptStack));
728
6.74k
    if (new == NULL) {
729
0
        PyErr_NoMemory();
730
0
        return NULL;
731
0
    }
732
6.74k
    new->depth = 0;
733
6.74k
    new->handlers[0] = NULL;
734
6.74k
    return new;
735
6.74k
}
736
737
static struct _PyCfgExceptStack *
738
14.0k
copy_except_stack(struct _PyCfgExceptStack *stack) {
739
14.0k
    struct _PyCfgExceptStack *copy = PyMem_Malloc(sizeof(struct _PyCfgExceptStack));
740
14.0k
    if (copy == NULL) {
741
0
        PyErr_NoMemory();
742
0
        return NULL;
743
0
    }
744
14.0k
    memcpy(copy, stack, sizeof(struct _PyCfgExceptStack));
745
14.0k
    return copy;
746
14.0k
}
747
748
static basicblock**
749
50.8k
make_cfg_traversal_stack(basicblock *entryblock) {
750
50.8k
    int nblocks = 0;
751
406k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
752
355k
        b->b_visited = 0;
753
355k
        nblocks++;
754
355k
    }
755
50.8k
    basicblock **stack = (basicblock **)PyMem_Malloc(sizeof(basicblock *) * nblocks);
756
50.8k
    if (!stack) {
757
0
        PyErr_NoMemory();
758
0
    }
759
50.8k
    return stack;
760
50.8k
}
761
762
/* Compute the stack effects of opcode with argument oparg.
763
764
   Some opcodes have different stack effect when jump to the target and
765
   when not jump. The 'jump' parameter specifies the case:
766
767
   * 0 -- when not jump
768
   * 1 -- when jump
769
   * -1 -- maximal
770
 */
771
typedef struct {
772
    /* The stack effect of the instruction. */
773
    int net;
774
} stack_effects;
775
776
Py_LOCAL(int)
777
get_stack_effects(int opcode, int oparg, int jump, stack_effects *effects)
778
345k
{
779
345k
    if (opcode < 0) {
780
0
        return -1;
781
0
    }
782
345k
    if ((opcode <= MAX_REAL_OPCODE) && (_PyOpcode_Deopt[opcode] != opcode)) {
783
        // Specialized instructions are not supported.
784
0
        return -1;
785
0
    }
786
345k
    int popped = _PyOpcode_num_popped(opcode, oparg);
787
345k
    int pushed = _PyOpcode_num_pushed(opcode, oparg);
788
345k
    if (popped < 0 || pushed < 0) {
789
0
        return -1;
790
0
    }
791
345k
    if (IS_BLOCK_PUSH_OPCODE(opcode) && !jump) {
792
2.59k
        effects->net = 0;
793
2.59k
        return 0;
794
2.59k
    }
795
342k
    effects->net = pushed - popped;
796
342k
    return 0;
797
345k
}
798
799
Py_LOCAL_INLINE(int)
800
stackdepth_push(basicblock ***sp, basicblock *b, int depth)
801
46.7k
{
802
46.7k
    if (!(b->b_startdepth < 0 || b->b_startdepth == depth)) {
803
0
        PyErr_Format(PyExc_ValueError, "Invalid CFG, inconsistent stackdepth");
804
0
        return ERROR;
805
0
    }
806
46.7k
    if (b->b_startdepth < depth && b->b_startdepth < 100) {
807
37.8k
        assert(b->b_startdepth < 0);
808
37.8k
        b->b_startdepth = depth;
809
37.8k
        *(*sp)++ = b;
810
37.8k
    }
811
46.7k
    return SUCCESS;
812
46.7k
}
813
814
/* Find the flow path that needs the largest stack.  We assume that
815
 * cycles in the flow graph have no net effect on the stack depth.
816
 */
817
static int
818
calculate_stackdepth(cfg_builder *g)
819
6.74k
{
820
6.74k
    basicblock *entryblock = g->g_entryblock;
821
51.8k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
822
45.1k
        b->b_startdepth = INT_MIN;
823
45.1k
    }
824
6.74k
    basicblock **stack = make_cfg_traversal_stack(entryblock);
825
6.74k
    if (!stack) {
826
0
        return ERROR;
827
0
    }
828
829
830
6.74k
    int stackdepth = -1;
831
6.74k
    int maxdepth = 0;
832
6.74k
    basicblock **sp = stack;
833
6.74k
    if (stackdepth_push(&sp, entryblock, 0) < 0) {
834
0
        goto error;
835
0
    }
836
44.6k
    while (sp != stack) {
837
37.8k
        basicblock *b = *--sp;
838
37.8k
        int depth = b->b_startdepth;
839
37.8k
        assert(depth >= 0);
840
37.8k
        basicblock *next = b->b_next;
841
344k
        for (int i = 0; i < b->b_iused; i++) {
842
325k
            cfg_instr *instr = &b->b_instr[i];
843
325k
            stack_effects effects;
844
325k
            if (get_stack_effects(instr->i_opcode, instr->i_oparg, 0, &effects) < 0) {
845
0
                PyErr_Format(PyExc_SystemError,
846
0
                             "Invalid stack effect for opcode=%d, arg=%i",
847
0
                             instr->i_opcode, instr->i_oparg);
848
0
                goto error;
849
0
            }
850
325k
            int new_depth = depth + effects.net;
851
325k
            if (new_depth < 0) {
852
0
                PyErr_Format(PyExc_ValueError,
853
0
                             "Invalid CFG, stack underflow");
854
0
                goto error;
855
0
            }
856
325k
            maxdepth = Py_MAX(maxdepth, depth);
857
325k
            if (HAS_TARGET(instr->i_opcode) && instr->i_opcode != END_ASYNC_FOR) {
858
20.2k
                if (get_stack_effects(instr->i_opcode, instr->i_oparg, 1, &effects) < 0) {
859
0
                    PyErr_Format(PyExc_SystemError,
860
0
                                 "Invalid stack effect for opcode=%d, arg=%i",
861
0
                                 instr->i_opcode, instr->i_oparg);
862
0
                    goto error;
863
0
                }
864
20.2k
                int target_depth = depth + effects.net;
865
20.2k
                assert(target_depth >= 0); /* invalid code or bug in stackdepth() */
866
20.2k
                maxdepth = Py_MAX(maxdepth, depth);
867
20.2k
                if (stackdepth_push(&sp, instr->i_target, target_depth) < 0) {
868
0
                    goto error;
869
0
                }
870
20.2k
            }
871
325k
            depth = new_depth;
872
325k
            assert(!IS_ASSEMBLER_OPCODE(instr->i_opcode));
873
325k
            if (IS_UNCONDITIONAL_JUMP_OPCODE(instr->i_opcode) ||
874
325k
                IS_SCOPE_EXIT_OPCODE(instr->i_opcode))
875
18.0k
            {
876
                /* remaining code is dead */
877
18.0k
                next = NULL;
878
18.0k
                break;
879
18.0k
            }
880
325k
        }
881
37.8k
        if (next != NULL) {
882
19.7k
            assert(BB_HAS_FALLTHROUGH(b));
883
19.7k
            if (stackdepth_push(&sp, next, depth) < 0) {
884
0
                goto error;
885
0
            }
886
19.7k
        }
887
37.8k
    }
888
6.74k
    stackdepth = maxdepth;
889
6.74k
error:
890
6.74k
    PyMem_Free(stack);
891
6.74k
    return stackdepth;
892
6.74k
}
893
894
static int
895
6.74k
label_exception_targets(basicblock *entryblock) {
896
6.74k
    basicblock **todo_stack = make_cfg_traversal_stack(entryblock);
897
6.74k
    if (todo_stack == NULL) {
898
0
        return ERROR;
899
0
    }
900
6.74k
    struct _PyCfgExceptStack *except_stack = make_except_stack();
901
6.74k
    if (except_stack == NULL) {
902
0
        PyMem_Free(todo_stack);
903
0
        PyErr_NoMemory();
904
0
        return ERROR;
905
0
    }
906
6.74k
    except_stack->depth = 0;
907
6.74k
    todo_stack[0] = entryblock;
908
6.74k
    entryblock->b_visited = 1;
909
6.74k
    entryblock->b_exceptstack = except_stack;
910
6.74k
    basicblock **todo = &todo_stack[1];
911
6.74k
    basicblock *handler = NULL;
912
44.7k
    while (todo > todo_stack) {
913
37.9k
        todo--;
914
37.9k
        basicblock *b = todo[0];
915
37.9k
        assert(b->b_visited == 1);
916
37.9k
        except_stack = b->b_exceptstack;
917
37.9k
        assert(except_stack != NULL);
918
37.9k
        b->b_exceptstack = NULL;
919
37.9k
        handler = except_stack_top(except_stack);
920
37.9k
        int last_yield_except_depth = -1;
921
395k
        for (int i = 0; i < b->b_iused; i++) {
922
357k
            cfg_instr *instr = &b->b_instr[i];
923
357k
            if (is_block_push(instr)) {
924
2.59k
                if (!instr->i_target->b_visited) {
925
2.59k
                    struct _PyCfgExceptStack *copy = copy_except_stack(except_stack);
926
2.59k
                    if (copy == NULL) {
927
0
                        goto error;
928
0
                    }
929
2.59k
                    instr->i_target->b_exceptstack = copy;
930
2.59k
                    todo[0] = instr->i_target;
931
2.59k
                    instr->i_target->b_visited = 1;
932
2.59k
                    todo++;
933
2.59k
                }
934
2.59k
                handler = push_except_block(except_stack, instr);
935
2.59k
            }
936
354k
            else if (instr->i_opcode == POP_BLOCK) {
937
2.08k
                handler = pop_except_block(except_stack);
938
2.08k
                INSTR_SET_OP0(instr, NOP);
939
2.08k
            }
940
352k
            else if (is_jump(instr)) {
941
19.3k
                instr->i_except = handler;
942
19.3k
                assert(i == b->b_iused -1);
943
19.3k
                if (!instr->i_target->b_visited) {
944
13.9k
                    if (BB_HAS_FALLTHROUGH(b)) {
945
11.4k
                        struct _PyCfgExceptStack *copy = copy_except_stack(except_stack);
946
11.4k
                        if (copy == NULL) {
947
0
                            goto error;
948
0
                        }
949
11.4k
                        instr->i_target->b_exceptstack = copy;
950
11.4k
                    }
951
2.42k
                    else {
952
2.42k
                        instr->i_target->b_exceptstack = except_stack;
953
2.42k
                        except_stack = NULL;
954
2.42k
                    }
955
13.9k
                    todo[0] = instr->i_target;
956
13.9k
                    instr->i_target->b_visited = 1;
957
13.9k
                    todo++;
958
13.9k
                }
959
19.3k
            }
960
333k
            else if (instr->i_opcode == YIELD_VALUE) {
961
476
                instr->i_except = handler;
962
476
                last_yield_except_depth = except_stack->depth;
963
476
            }
964
333k
            else if (instr->i_opcode == RESUME) {
965
7.22k
                instr->i_except = handler;
966
7.22k
                if (instr->i_oparg != RESUME_AT_FUNC_START) {
967
476
                    assert(last_yield_except_depth >= 0);
968
476
                    if (last_yield_except_depth == 1) {
969
410
                        instr->i_oparg |= RESUME_OPARG_DEPTH1_MASK;
970
410
                    }
971
476
                    last_yield_except_depth = -1;
972
476
                }
973
7.22k
            }
974
325k
            else {
975
325k
                instr->i_except = handler;
976
325k
            }
977
357k
        }
978
37.9k
        if (BB_HAS_FALLTHROUGH(b) && !b->b_next->b_visited) {
979
14.6k
            assert(except_stack != NULL);
980
14.6k
            b->b_next->b_exceptstack = except_stack;
981
14.6k
            todo[0] = b->b_next;
982
14.6k
            b->b_next->b_visited = 1;
983
14.6k
            todo++;
984
14.6k
        }
985
23.2k
        else if (except_stack != NULL) {
986
20.8k
           PyMem_Free(except_stack);
987
20.8k
        }
988
37.9k
    }
989
#ifdef Py_DEBUG
990
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
991
        assert(b->b_exceptstack == NULL);
992
    }
993
#endif
994
6.74k
    PyMem_Free(todo_stack);
995
6.74k
    return SUCCESS;
996
0
error:
997
0
    PyMem_Free(todo_stack);
998
0
    PyMem_Free(except_stack);
999
0
    return ERROR;
1000
6.74k
}
1001
1002
/***** CFG optimizations *****/
1003
1004
static int
1005
13.4k
remove_unreachable(basicblock *entryblock) {
1006
103k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
1007
89.5k
        b->b_predecessors = 0;
1008
89.5k
    }
1009
13.4k
    basicblock **stack = make_cfg_traversal_stack(entryblock);
1010
13.4k
    if (stack == NULL) {
1011
0
        return ERROR;
1012
0
    }
1013
13.4k
    basicblock **sp = stack;
1014
13.4k
    entryblock->b_predecessors = 1;
1015
13.4k
    *sp++ = entryblock;
1016
13.4k
    entryblock->b_visited = 1;
1017
88.6k
    while (sp > stack) {
1018
75.1k
        basicblock *b = *(--sp);
1019
75.1k
        if (b->b_next && BB_HAS_FALLTHROUGH(b)) {
1020
37.7k
            if (!b->b_next->b_visited) {
1021
32.2k
                assert(b->b_next->b_predecessors == 0);
1022
32.2k
                *sp++ = b->b_next;
1023
32.2k
                b->b_next->b_visited = 1;
1024
32.2k
            }
1025
37.7k
            b->b_next->b_predecessors++;
1026
37.7k
        }
1027
766k
        for (int i = 0; i < b->b_iused; i++) {
1028
691k
            basicblock *target;
1029
691k
            cfg_instr *instr = &b->b_instr[i];
1030
691k
            if (is_jump(instr) || is_block_push(instr)) {
1031
41.8k
                target = instr->i_target;
1032
41.8k
                if (!target->b_visited) {
1033
29.3k
                    *sp++ = target;
1034
29.3k
                    target->b_visited = 1;
1035
29.3k
                }
1036
41.8k
                target->b_predecessors++;
1037
41.8k
            }
1038
691k
        }
1039
75.1k
    }
1040
13.4k
    PyMem_Free(stack);
1041
1042
    /* Delete unreachable instructions */
1043
103k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
1044
89.5k
       if (b->b_predecessors == 0) {
1045
14.4k
            b->b_iused = 0;
1046
14.4k
            b->b_except_handler = 0;
1047
14.4k
       }
1048
89.5k
    }
1049
13.4k
    return SUCCESS;
1050
13.4k
}
1051
1052
static int
1053
247k
basicblock_remove_redundant_nops(basicblock *bb) {
1054
    /* Remove NOPs when legal to do so. */
1055
247k
    int dest = 0;
1056
247k
    int prev_lineno = -1;
1057
1.97M
    for (int src = 0; src < bb->b_iused; src++) {
1058
1.72M
        int lineno = bb->b_instr[src].i_loc.lineno;
1059
1.72M
        if (bb->b_instr[src].i_opcode == NOP) {
1060
            /* Eliminate no-op if it doesn't have a line number */
1061
41.0k
            if (lineno < 0) {
1062
5.83k
                continue;
1063
5.83k
            }
1064
            /* or, if the previous instruction had the same line number. */
1065
35.2k
            if (prev_lineno == lineno) {
1066
28.5k
                continue;
1067
28.5k
            }
1068
            /* or, if the next instruction has same line number or no line number */
1069
6.67k
            if (src < bb->b_iused - 1) {
1070
5.76k
                int next_lineno = bb->b_instr[src+1].i_loc.lineno;
1071
5.76k
                if (next_lineno == lineno) {
1072
3.84k
                    continue;
1073
3.84k
                }
1074
1.91k
                if (next_lineno < 0) {
1075
0
                    bb->b_instr[src+1].i_loc = bb->b_instr[src].i_loc;
1076
0
                    continue;
1077
0
                }
1078
1.91k
            }
1079
913
            else {
1080
913
                basicblock *next = next_nonempty_block(bb->b_next);
1081
                /* or if last instruction in BB and next BB has same line number */
1082
913
                if (next) {
1083
913
                    location next_loc = NO_LOCATION;
1084
913
                    for (int next_i=0; next_i < next->b_iused; next_i++) {
1085
913
                        cfg_instr *instr = &next->b_instr[next_i];
1086
913
                        if (instr->i_opcode == NOP && instr->i_loc.lineno < 0) {
1087
                            /* Skip over NOPs without a location, they will be removed */
1088
0
                            continue;
1089
0
                        }
1090
913
                        next_loc = instr->i_loc;
1091
913
                        break;
1092
913
                    }
1093
913
                    if (lineno == next_loc.lineno) {
1094
7
                        continue;
1095
7
                    }
1096
913
                }
1097
913
            }
1098
1099
6.67k
        }
1100
1.69M
        if (dest != src) {
1101
159k
            bb->b_instr[dest] = bb->b_instr[src];
1102
159k
        }
1103
1.69M
        dest++;
1104
1.69M
        prev_lineno = lineno;
1105
1.69M
    }
1106
247k
    assert(dest <= bb->b_iused);
1107
247k
    int num_removed = bb->b_iused - dest;
1108
247k
    bb->b_iused = dest;
1109
247k
    memset(&bb->b_instr[dest], 0, sizeof(cfg_instr) * num_removed);
1110
247k
    return num_removed;
1111
247k
}
1112
1113
static int
1114
23.1k
remove_redundant_nops(cfg_builder *g) {
1115
23.1k
    int changes = 0;
1116
225k
    for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
1117
202k
        int change = basicblock_remove_redundant_nops(b);
1118
202k
        RETURN_IF_ERROR(change);
1119
202k
        changes += change;
1120
202k
    }
1121
23.1k
    return changes;
1122
23.1k
}
1123
1124
static int
1125
remove_redundant_nops_and_pairs(basicblock *entryblock)
1126
6.74k
{
1127
6.74k
    bool done = false;
1128
1129
13.4k
    while (! done) {
1130
6.74k
        done = true;
1131
6.74k
        cfg_instr *prev_instr = NULL;
1132
6.74k
        cfg_instr *instr = NULL;
1133
51.8k
        for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
1134
45.0k
            RETURN_IF_ERROR(basicblock_remove_redundant_nops(b));
1135
45.0k
            if (IS_LABEL(b->b_label)) {
1136
                /* this block is a jump target, forget instr */
1137
19.3k
                instr = NULL;
1138
19.3k
            }
1139
377k
            for (int i = 0; i < b->b_iused; i++) {
1140
332k
                prev_instr = instr;
1141
332k
                instr = &b->b_instr[i];
1142
332k
                int prev_opcode = prev_instr ? prev_instr->i_opcode : 0;
1143
332k
                int prev_oparg = prev_instr ? prev_instr->i_oparg : 0;
1144
332k
                int opcode = instr->i_opcode;
1145
332k
                bool is_redundant_pair = false;
1146
332k
                if (opcode == POP_TOP) {
1147
7.56k
                   if (prev_opcode == LOAD_CONST || prev_opcode == LOAD_SMALL_INT) {
1148
0
                       is_redundant_pair = true;
1149
0
                   }
1150
7.56k
                   else if (prev_opcode == COPY && prev_oparg == 1) {
1151
0
                       is_redundant_pair = true;
1152
0
                   }
1153
7.56k
                }
1154
332k
                if (is_redundant_pair) {
1155
0
                    INSTR_SET_OP0(prev_instr, NOP);
1156
0
                    INSTR_SET_OP0(instr, NOP);
1157
0
                    done = false;
1158
0
                }
1159
332k
            }
1160
45.0k
            if ((instr && is_jump(instr)) || !BB_HAS_FALLTHROUGH(b)) {
1161
32.4k
                instr = NULL;
1162
32.4k
            }
1163
45.0k
        }
1164
6.74k
    }
1165
6.74k
    return SUCCESS;
1166
6.74k
}
1167
1168
static int
1169
16.4k
remove_redundant_jumps(cfg_builder *g) {
1170
    /* If a non-empty block ends with a jump instruction, check if the next
1171
     * non-empty block reached through normal flow control is the target
1172
     * of that jump. If it is, then the jump instruction is redundant and
1173
     * can be deleted.
1174
     *
1175
     * Return the number of changes applied, or -1 on error.
1176
     */
1177
1178
16.4k
    int changes = 0;
1179
173k
    for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
1180
157k
        cfg_instr *last = basicblock_last_instr(b);
1181
157k
        if (last == NULL) {
1182
22.0k
            continue;
1183
22.0k
        }
1184
135k
        assert(!IS_ASSEMBLER_OPCODE(last->i_opcode));
1185
135k
        if (IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) {
1186
19.0k
            basicblock* jump_target = next_nonempty_block(last->i_target);
1187
19.0k
            if (jump_target == NULL) {
1188
0
                PyErr_SetString(PyExc_SystemError, "jump with NULL target");
1189
0
                return ERROR;
1190
0
            }
1191
19.0k
            basicblock *next = next_nonempty_block(b->b_next);
1192
19.0k
            if (jump_target == next) {
1193
635
                changes++;
1194
635
                INSTR_SET_OP0(last, NOP);
1195
635
            }
1196
19.0k
        }
1197
135k
    }
1198
1199
16.4k
    return changes;
1200
16.4k
}
1201
1202
static inline bool
1203
53.7k
basicblock_has_no_lineno(basicblock *b) {
1204
63.9k
    for (int i = 0; i < b->b_iused; i++) {
1205
58.9k
        if (b->b_instr[i].i_loc.lineno >= 0) {
1206
48.6k
            return false;
1207
48.6k
        }
1208
58.9k
    }
1209
5.07k
    return true;
1210
53.7k
}
1211
1212
/* Maximum size of basic block that should be copied in optimizer */
1213
2.16k
#define MAX_COPY_SIZE 4
1214
1215
/* If this block ends with an unconditional jump to a small exit block or
1216
 * a block that has no line numbers (and no fallthrough), then
1217
 * remove the jump and extend this block with the target.
1218
 * Returns 1 if extended, 0 if no change, and -1 on error.
1219
 */
1220
static int
1221
69.3k
basicblock_inline_small_or_no_lineno_blocks(basicblock *bb) {
1222
69.3k
    cfg_instr *last = basicblock_last_instr(bb);
1223
69.3k
    if (last == NULL) {
1224
0
        return 0;
1225
0
    }
1226
69.3k
    if (!IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) {
1227
57.2k
        return 0;
1228
57.2k
    }
1229
12.0k
    basicblock *target = last->i_target;
1230
12.0k
    bool small_exit_block = (basicblock_exits_scope(target) &&
1231
12.0k
                             target->b_iused <= MAX_COPY_SIZE);
1232
12.0k
    bool no_lineno_no_fallthrough = (basicblock_has_no_lineno(target) &&
1233
12.0k
                                     !BB_HAS_FALLTHROUGH(target));
1234
12.0k
    if (small_exit_block || no_lineno_no_fallthrough) {
1235
2.49k
        assert(is_jump(last));
1236
2.49k
        int removed_jump_opcode = last->i_opcode;
1237
2.49k
        INSTR_SET_OP0(last, NOP);
1238
2.49k
        RETURN_IF_ERROR(basicblock_append_instructions(bb, target));
1239
2.49k
        if (no_lineno_no_fallthrough) {
1240
2.10k
            last = basicblock_last_instr(bb);
1241
2.10k
            if (IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode) &&
1242
2.10k
                removed_jump_opcode == JUMP)
1243
52
            {
1244
                /* Make sure we don't lose eval breaker checks */
1245
52
                last->i_opcode = JUMP;
1246
52
            }
1247
2.10k
        }
1248
2.49k
        target->b_predecessors--;
1249
2.49k
        return 1;
1250
2.49k
    }
1251
9.60k
    return 0;
1252
12.0k
}
1253
1254
static int
1255
6.74k
inline_small_or_no_lineno_blocks(basicblock *entryblock) {
1256
6.74k
    bool changes;
1257
7.82k
    do {
1258
7.82k
        changes = false;
1259
77.1k
        for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
1260
69.3k
            int res = basicblock_inline_small_or_no_lineno_blocks(b);
1261
69.3k
            RETURN_IF_ERROR(res);
1262
69.3k
            if (res) {
1263
2.49k
                changes = true;
1264
2.49k
            }
1265
69.3k
        }
1266
7.82k
    } while(changes); /* every change removes a jump, ensuring convergence */
1267
6.74k
    return changes;
1268
6.74k
}
1269
1270
// Attempt to eliminate jumps to jumps by updating inst to jump to
1271
// target->i_target using the provided opcode. Return whether or not the
1272
// optimization was successful.
1273
static bool
1274
jump_thread(basicblock *bb, cfg_instr *inst, cfg_instr *target, int opcode)
1275
696
{
1276
696
    assert(is_jump(inst));
1277
696
    assert(is_jump(target));
1278
696
    assert(inst == basicblock_last_instr(bb));
1279
    // bpo-45773: If inst->i_target == target->i_target, then nothing actually
1280
    // changes (and we fall into an infinite loop):
1281
696
    if (inst->i_target != target->i_target) {
1282
        /* Change inst to NOP and append a jump to target->i_target. The
1283
         * NOP will be removed later if it's not needed for the lineno.
1284
         */
1285
696
        INSTR_SET_OP0(inst, NOP);
1286
1287
696
        RETURN_IF_ERROR(
1288
696
            basicblock_add_jump(
1289
696
                bb, opcode, target->i_target, target->i_loc));
1290
1291
696
        return true;
1292
696
    }
1293
0
    return false;
1294
696
}
1295
1296
static int
1297
loads_const(int opcode)
1298
17.9k
{
1299
17.9k
    return OPCODE_HAS_CONST(opcode) || opcode == LOAD_SMALL_INT;
1300
17.9k
}
1301
1302
/* Returns new reference */
1303
static PyObject*
1304
get_const_value(int opcode, int oparg, PyObject *co_consts)
1305
73.3k
{
1306
73.3k
    PyObject *constant = NULL;
1307
73.3k
    assert(loads_const(opcode));
1308
73.3k
    if (opcode == LOAD_CONST) {
1309
72.5k
        constant = PyList_GET_ITEM(co_consts, oparg);
1310
72.5k
    }
1311
73.3k
    if (opcode == LOAD_SMALL_INT) {
1312
757
        return PyLong_FromLong(oparg);
1313
757
    }
1314
1315
72.5k
    if (constant == NULL) {
1316
0
        PyErr_SetString(PyExc_SystemError,
1317
0
                        "Internal error: failed to get value of a constant");
1318
0
        return NULL;
1319
0
    }
1320
72.5k
    return Py_NewRef(constant);
1321
72.5k
}
1322
1323
// Steals a reference to newconst.
1324
static int
1325
add_const(PyObject *newconst, PyObject *consts, PyObject *const_cache)
1326
2.29k
{
1327
2.29k
    if (_PyCompile_ConstCacheMergeOne(const_cache, &newconst) < 0) {
1328
0
        Py_DECREF(newconst);
1329
0
        return -1;
1330
0
    }
1331
1332
2.29k
    Py_ssize_t index;
1333
104k
    for (index = 0; index < PyList_GET_SIZE(consts); index++) {
1334
102k
        if (PyList_GET_ITEM(consts, index) == newconst) {
1335
687
            break;
1336
687
        }
1337
102k
    }
1338
2.29k
    if (index == PyList_GET_SIZE(consts)) {
1339
1.60k
        if ((size_t)index >= (size_t)INT_MAX - 1) {
1340
0
            PyErr_SetString(PyExc_OverflowError, "too many constants");
1341
0
            Py_DECREF(newconst);
1342
0
            return -1;
1343
0
        }
1344
1.60k
        if (PyList_Append(consts, newconst)) {
1345
0
            Py_DECREF(newconst);
1346
0
            return -1;
1347
0
        }
1348
1.60k
    }
1349
2.29k
    Py_DECREF(newconst);
1350
2.29k
    return (int)index;
1351
2.29k
}
1352
1353
/*
1354
  Traverse the instructions of the basic block backwards from index "start", skipping over NOPs.
1355
  Try to collect "size" number of consecutive instructions that load constants into the array "instrs".
1356
  Caller must make sure that length of "instrs" is sufficient to fit in at least "size" instructions.
1357
1358
  Return boolean indicating whether "size" such instructions were found.
1359
*/
1360
static bool
1361
get_const_loading_instrs(basicblock *bb, int start, cfg_instr **instrs, int size)
1362
11.7k
{
1363
11.7k
    assert(start < bb->b_iused);
1364
11.7k
    assert(size >= 0);
1365
11.7k
    assert(size <= _PY_STACK_USE_GUIDELINE);
1366
1367
20.1k
    for (; start >= 0 && size > 0; start--) {
1368
17.9k
        cfg_instr *instr = &bb->b_instr[start];
1369
17.9k
        if (instr->i_opcode == NOP) {
1370
266
            continue;
1371
266
        }
1372
17.7k
        if (!loads_const(instr->i_opcode)) {
1373
9.54k
            return false;
1374
9.54k
        }
1375
8.16k
        instrs[--size] = instr;
1376
8.16k
    }
1377
1378
2.22k
    return size == 0;
1379
11.7k
}
1380
1381
/*
1382
  Change every instruction in "instrs" NOP and set its location to NO_LOCATION.
1383
  Caller must make sure "instrs" has at least "size" elements.
1384
*/
1385
static void
1386
nop_out(cfg_instr **instrs, int size)
1387
2.36k
{
1388
7.17k
    for (int i = 0; i < size; i++) {
1389
4.80k
        cfg_instr *instr = instrs[i];
1390
4.80k
        assert(instr->i_opcode != NOP);
1391
4.80k
        INSTR_SET_OP0(instr, NOP);
1392
4.80k
        INSTR_SET_LOC(instr, NO_LOCATION);
1393
4.80k
    }
1394
2.36k
}
1395
1396
/* Does not steal reference to "newconst".
1397
   Return 1 if changed instruction to LOAD_SMALL_INT.
1398
   Return 0 if could not change instruction to LOAD_SMALL_INT.
1399
   Return -1 on error.
1400
*/
1401
static int
1402
maybe_instr_make_load_smallint(cfg_instr *instr, PyObject *newconst,
1403
                               PyObject *consts, PyObject *const_cache)
1404
68.9k
{
1405
68.9k
    if (PyLong_CheckExact(newconst)) {
1406
23.9k
        int overflow;
1407
23.9k
        long val = PyLong_AsLongAndOverflow(newconst, &overflow);
1408
23.9k
        if (val == -1 && PyErr_Occurred()) {
1409
0
            return -1;
1410
0
        }
1411
23.9k
        if (!overflow && _PY_IS_SMALL_INT(val)) {
1412
18.5k
            assert(_Py_IsImmortal(newconst));
1413
18.5k
            INSTR_SET_OP1(instr, LOAD_SMALL_INT, (int)val);
1414
18.5k
            return 1;
1415
18.5k
        }
1416
23.9k
    }
1417
50.3k
    return 0;
1418
68.9k
}
1419
1420
1421
/* Steals reference to "newconst" */
1422
static int
1423
instr_make_load_const(cfg_instr *instr, PyObject *newconst,
1424
                      PyObject *consts, PyObject *const_cache)
1425
2.00k
{
1426
2.00k
    int res = maybe_instr_make_load_smallint(instr, newconst, consts, const_cache);
1427
2.00k
    if (res < 0) {
1428
0
        Py_DECREF(newconst);
1429
0
        return ERROR;
1430
0
    }
1431
2.00k
    if (res > 0) {
1432
9
        return SUCCESS;
1433
9
    }
1434
2.00k
    int oparg = add_const(newconst, consts, const_cache);
1435
2.00k
    RETURN_IF_ERROR(oparg);
1436
2.00k
    INSTR_SET_OP1(instr, LOAD_CONST, oparg);
1437
2.00k
    return SUCCESS;
1438
2.00k
}
1439
1440
/* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cn, BUILD_TUPLE n
1441
   with    LOAD_CONST (c1, c2, ... cn).
1442
   The consts table must still be in list form so that the
1443
   new constant (c1, c2, ... cn) can be appended.
1444
   Called with codestr pointing to the first LOAD_CONST.
1445
*/
1446
static int
1447
fold_tuple_of_constants(basicblock *bb, int i, PyObject *consts, PyObject *const_cache)
1448
3.82k
{
1449
    /* Pre-conditions */
1450
3.82k
    assert(PyDict_CheckExact(const_cache));
1451
3.82k
    assert(PyList_CheckExact(consts));
1452
1453
3.82k
    cfg_instr *instr = &bb->b_instr[i];
1454
3.82k
    assert(instr->i_opcode == BUILD_TUPLE);
1455
1456
3.82k
    int seq_size = instr->i_oparg;
1457
3.82k
    if (seq_size > _PY_STACK_USE_GUIDELINE) {
1458
0
        return SUCCESS;
1459
0
    }
1460
1461
3.82k
    cfg_instr *const_instrs[_PY_STACK_USE_GUIDELINE];
1462
3.82k
    if (!get_const_loading_instrs(bb, i-1, const_instrs, seq_size)) {
1463
        /* not a const sequence */
1464
2.40k
        return SUCCESS;
1465
2.40k
    }
1466
1467
1.42k
    PyObject *const_tuple = PyTuple_New((Py_ssize_t)seq_size);
1468
1.42k
    if (const_tuple == NULL) {
1469
0
        return ERROR;
1470
0
    }
1471
1472
3.84k
    for (int i = 0; i < seq_size; i++) {
1473
2.42k
        cfg_instr *inst = const_instrs[i];
1474
2.42k
        assert(loads_const(inst->i_opcode));
1475
2.42k
        PyObject *element = get_const_value(inst->i_opcode, inst->i_oparg, consts);
1476
2.42k
        if (element == NULL) {
1477
0
            Py_DECREF(const_tuple);
1478
0
            return ERROR;
1479
0
        }
1480
2.42k
        PyTuple_SET_ITEM(const_tuple, i, element);
1481
2.42k
    }
1482
1483
1.42k
    nop_out(const_instrs, seq_size);
1484
1.42k
    return instr_make_load_const(instr, const_tuple, consts, const_cache);
1485
1.42k
}
1486
1487
/* Replace:
1488
    BUILD_LIST 0
1489
    LOAD_CONST c1
1490
    LIST_APPEND 1
1491
    LOAD_CONST c2
1492
    LIST_APPEND 1
1493
    ...
1494
    LOAD_CONST cN
1495
    LIST_APPEND 1
1496
    CALL_INTRINSIC_1 INTRINSIC_LIST_TO_TUPLE
1497
   with:
1498
    LOAD_CONST (c1, c2, ... cN)
1499
*/
1500
static int
1501
fold_constant_intrinsic_list_to_tuple(basicblock *bb, int i,
1502
                                      PyObject *consts, PyObject *const_cache)
1503
99
{
1504
99
    assert(PyDict_CheckExact(const_cache));
1505
99
    assert(PyList_CheckExact(consts));
1506
99
    assert(i >= 0);
1507
99
    assert(i < bb->b_iused);
1508
1509
99
    cfg_instr *intrinsic = &bb->b_instr[i];
1510
99
    assert(intrinsic->i_opcode == CALL_INTRINSIC_1);
1511
99
    assert(intrinsic->i_oparg == INTRINSIC_LIST_TO_TUPLE);
1512
1513
99
    int consts_found = 0;
1514
99
    bool expect_append = true;
1515
1516
282
    for (int pos = i - 1; pos >= 0; pos--) {
1517
282
        cfg_instr *instr = &bb->b_instr[pos];
1518
282
        int opcode = instr->i_opcode;
1519
282
        int oparg = instr->i_oparg;
1520
1521
282
        if (opcode == NOP) {
1522
0
            continue;
1523
0
        }
1524
1525
282
        if (opcode == BUILD_LIST && oparg == 0) {
1526
2
            if (!expect_append) {
1527
                /* Not a sequence start. */
1528
0
                return SUCCESS;
1529
0
            }
1530
1531
            /* Sequence start, we are done. */
1532
2
            PyObject *newconst = PyTuple_New((Py_ssize_t)consts_found);
1533
2
            if (newconst == NULL) {
1534
0
                return ERROR;
1535
0
            }
1536
1537
184
            for (int newpos = i - 1; newpos >= pos; newpos--) {
1538
182
                instr = &bb->b_instr[newpos];
1539
182
                if (instr->i_opcode == NOP) {
1540
0
                    continue;
1541
0
                }
1542
182
                if (loads_const(instr->i_opcode)) {
1543
90
                    PyObject *constant = get_const_value(instr->i_opcode, instr->i_oparg, consts);
1544
90
                    if (constant == NULL) {
1545
0
                        Py_DECREF(newconst);
1546
0
                        return ERROR;
1547
0
                    }
1548
90
                    assert(consts_found > 0);
1549
90
                    PyTuple_SET_ITEM(newconst, --consts_found, constant);
1550
90
                }
1551
182
                nop_out(&instr, 1);
1552
182
            }
1553
2
            assert(consts_found == 0);
1554
2
            return instr_make_load_const(intrinsic, newconst, consts, const_cache);
1555
2
        }
1556
1557
280
        if (expect_append) {
1558
188
            if (opcode != LIST_APPEND || oparg != 1) {
1559
96
                return SUCCESS;
1560
96
            }
1561
188
        }
1562
92
        else {
1563
92
            if (!loads_const(opcode)) {
1564
1
                return SUCCESS;
1565
1
            }
1566
91
            consts_found++;
1567
91
        }
1568
1569
183
        expect_append = !expect_append;
1570
183
    }
1571
1572
    /* Did not find sequence start. */
1573
0
    return SUCCESS;
1574
99
}
1575
1576
2.56k
#define MIN_CONST_SEQUENCE_SIZE 3
1577
/*
1578
Optimize lists and sets for:
1579
    1. "for" loop, comprehension or "in"/"not in" tests:
1580
           Change literal list or set of constants into constant
1581
           tuple or frozenset respectively. Change list of
1582
           non-constants into tuple.
1583
    2. Constant literal lists/set with length >= MIN_CONST_SEQUENCE_SIZE:
1584
           Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cN, BUILD_LIST N
1585
           with BUILD_LIST 0, LOAD_CONST (c1, c2, ... cN), LIST_EXTEND 1,
1586
           or BUILD_SET & SET_UPDATE respectively.
1587
*/
1588
static int
1589
optimize_lists_and_sets(basicblock *bb, int i, int nextop,
1590
                        PyObject *consts, PyObject *const_cache)
1591
1.28k
{
1592
1.28k
    assert(PyDict_CheckExact(const_cache));
1593
1.28k
    assert(PyList_CheckExact(consts));
1594
1595
1.28k
    cfg_instr *instr = &bb->b_instr[i];
1596
1.28k
    assert(instr->i_opcode == BUILD_LIST || instr->i_opcode == BUILD_SET);
1597
1598
1.28k
    bool contains_or_iter = nextop == GET_ITER || nextop == CONTAINS_OP;
1599
1.28k
    int seq_size = instr->i_oparg;
1600
1.28k
    if (seq_size > _PY_STACK_USE_GUIDELINE ||
1601
1.28k
        (seq_size < MIN_CONST_SEQUENCE_SIZE && !contains_or_iter))
1602
1.04k
    {
1603
1.04k
        return SUCCESS;
1604
1.04k
    }
1605
1606
234
    cfg_instr *const_instrs[_PY_STACK_USE_GUIDELINE];
1607
234
    if (!get_const_loading_instrs(bb, i-1, const_instrs, seq_size)) {  /* not a const sequence */
1608
56
        if (contains_or_iter && instr->i_opcode == BUILD_LIST) {
1609
            /* iterate over a tuple instead of list */
1610
0
            INSTR_SET_OP1(instr, BUILD_TUPLE, instr->i_oparg);
1611
0
        }
1612
56
        return SUCCESS;
1613
56
    }
1614
1615
178
    PyObject *const_result = PyTuple_New((Py_ssize_t)seq_size);
1616
178
    if (const_result == NULL) {
1617
0
        return ERROR;
1618
0
    }
1619
1620
1.76k
    for (int i = 0; i < seq_size; i++) {
1621
1.58k
        cfg_instr *inst = const_instrs[i];
1622
1.58k
        assert(loads_const(inst->i_opcode));
1623
1.58k
        PyObject *element = get_const_value(inst->i_opcode, inst->i_oparg, consts);
1624
1.58k
        if (element == NULL) {
1625
0
            Py_DECREF(const_result);
1626
0
            return ERROR;
1627
0
        }
1628
1.58k
        PyTuple_SET_ITEM(const_result, i, element);
1629
1.58k
    }
1630
1631
178
    if (instr->i_opcode == BUILD_SET) {
1632
51
        PyObject *frozenset = PyFrozenSet_New(const_result);
1633
51
        if (frozenset == NULL) {
1634
0
            Py_DECREF(const_result);
1635
0
            return ERROR;
1636
0
        }
1637
51
        Py_SETREF(const_result, frozenset);
1638
51
    }
1639
1640
178
    int index = add_const(const_result, consts, const_cache);
1641
178
    RETURN_IF_ERROR(index);
1642
178
    nop_out(const_instrs, seq_size);
1643
1644
178
    if (contains_or_iter) {
1645
51
        INSTR_SET_OP1(instr, LOAD_CONST, index);
1646
51
    }
1647
127
    else {
1648
127
        assert(i >= 2);
1649
127
        assert(instr->i_opcode == BUILD_LIST || instr->i_opcode == BUILD_SET);
1650
1651
127
        INSTR_SET_LOC(&bb->b_instr[i-2], instr->i_loc);
1652
1653
127
        INSTR_SET_OP1(&bb->b_instr[i-2], instr->i_opcode, 0);
1654
127
        INSTR_SET_OP1(&bb->b_instr[i-1], LOAD_CONST, index);
1655
127
        INSTR_SET_OP1(&bb->b_instr[i], instr->i_opcode == BUILD_LIST ? LIST_EXTEND : SET_UPDATE, 1);
1656
127
    }
1657
178
    return SUCCESS;
1658
178
}
1659
1660
/* Check whether the total number of items in the (possibly nested) collection obj exceeds
1661
 * limit. Return a negative number if it does, and a non-negative number otherwise.
1662
 * Used to avoid creating constants which are slow to hash.
1663
 */
1664
static Py_ssize_t
1665
const_folding_check_complexity(PyObject *obj, Py_ssize_t limit)
1666
0
{
1667
0
    if (PyTuple_Check(obj)) {
1668
0
        Py_ssize_t i;
1669
0
        limit -= PyTuple_GET_SIZE(obj);
1670
0
        for (i = 0; limit >= 0 && i < PyTuple_GET_SIZE(obj); i++) {
1671
0
            limit = const_folding_check_complexity(PyTuple_GET_ITEM(obj, i), limit);
1672
0
            if (limit < 0) {
1673
0
                return limit;
1674
0
            }
1675
0
        }
1676
0
    }
1677
0
    return limit;
1678
0
}
1679
1680
32
#define MAX_INT_SIZE           128  /* bits */
1681
0
#define MAX_COLLECTION_SIZE    256  /* items */
1682
10
#define MAX_STR_SIZE          4096  /* characters */
1683
0
#define MAX_TOTAL_ITEMS       1024  /* including nested collections */
1684
1685
static PyObject *
1686
const_folding_safe_multiply(PyObject *v, PyObject *w)
1687
23
{
1688
23
    if (PyLong_Check(v) && PyLong_Check(w) &&
1689
23
        !_PyLong_IsZero((PyLongObject *)v) && !_PyLong_IsZero((PyLongObject *)w)
1690
23
    ) {
1691
3
        int64_t vbits = _PyLong_NumBits(v);
1692
3
        int64_t wbits = _PyLong_NumBits(w);
1693
3
        assert(vbits >= 0);
1694
3
        assert(wbits >= 0);
1695
3
        if (vbits + wbits > MAX_INT_SIZE) {
1696
0
            return NULL;
1697
0
        }
1698
3
    }
1699
20
    else if (PyLong_Check(v) && PyTuple_Check(w)) {
1700
0
        Py_ssize_t size = PyTuple_GET_SIZE(w);
1701
0
        if (size) {
1702
0
            long n = PyLong_AsLong(v);
1703
0
            if (n < 0 || n > MAX_COLLECTION_SIZE / size) {
1704
0
                return NULL;
1705
0
            }
1706
0
            if (n && const_folding_check_complexity(w, MAX_TOTAL_ITEMS / n) < 0) {
1707
0
                return NULL;
1708
0
            }
1709
0
        }
1710
0
    }
1711
20
    else if (PyLong_Check(v) && (PyUnicode_Check(w) || PyBytes_Check(w))) {
1712
10
        Py_ssize_t size = PyUnicode_Check(w) ? PyUnicode_GET_LENGTH(w) :
1713
10
                                               PyBytes_GET_SIZE(w);
1714
10
        if (size) {
1715
10
            long n = PyLong_AsLong(v);
1716
10
            if (n < 0 || n > MAX_STR_SIZE / size) {
1717
3
                return NULL;
1718
3
            }
1719
10
        }
1720
10
    }
1721
10
    else if (PyLong_Check(w) &&
1722
10
             (PyTuple_Check(v) || PyUnicode_Check(v) || PyBytes_Check(v)))
1723
10
    {
1724
10
        return const_folding_safe_multiply(w, v);
1725
10
    }
1726
1727
10
    return PyNumber_Multiply(v, w);
1728
23
}
1729
1730
static PyObject *
1731
const_folding_safe_power(PyObject *v, PyObject *w)
1732
8
{
1733
8
    if (PyLong_Check(v) && PyLong_Check(w) &&
1734
8
        !_PyLong_IsZero((PyLongObject *)v) && _PyLong_IsPositive((PyLongObject *)w)
1735
8
    ) {
1736
8
        int64_t vbits = _PyLong_NumBits(v);
1737
8
        size_t wbits = PyLong_AsSize_t(w);
1738
8
        assert(vbits >= 0);
1739
8
        if (wbits == (size_t)-1) {
1740
0
            return NULL;
1741
0
        }
1742
8
        if ((uint64_t)vbits > MAX_INT_SIZE / wbits) {
1743
0
            return NULL;
1744
0
        }
1745
8
    }
1746
1747
8
    return PyNumber_Power(v, w, Py_None);
1748
8
}
1749
1750
static PyObject *
1751
const_folding_safe_lshift(PyObject *v, PyObject *w)
1752
7
{
1753
7
    if (PyLong_Check(v) && PyLong_Check(w) &&
1754
7
        !_PyLong_IsZero((PyLongObject *)v) && !_PyLong_IsZero((PyLongObject *)w)
1755
7
    ) {
1756
7
        int64_t vbits = _PyLong_NumBits(v);
1757
7
        size_t wbits = PyLong_AsSize_t(w);
1758
7
        assert(vbits >= 0);
1759
7
        if (wbits == (size_t)-1) {
1760
0
            return NULL;
1761
0
        }
1762
7
        if (wbits > MAX_INT_SIZE || (uint64_t)vbits > MAX_INT_SIZE - wbits) {
1763
0
            return NULL;
1764
0
        }
1765
7
    }
1766
1767
7
    return PyNumber_Lshift(v, w);
1768
7
}
1769
1770
static PyObject *
1771
const_folding_safe_mod(PyObject *v, PyObject *w)
1772
0
{
1773
0
    if (PyUnicode_Check(v) || PyBytes_Check(v)) {
1774
0
        return NULL;
1775
0
    }
1776
1777
0
    return PyNumber_Remainder(v, w);
1778
0
}
1779
1780
static PyObject *
1781
eval_const_binop(PyObject *left, int op, PyObject *right)
1782
42
{
1783
42
    assert(left != NULL && right != NULL);
1784
42
    assert(op >= 0 && op <= NB_OPARG_LAST);
1785
1786
42
    PyObject *result = NULL;
1787
42
    switch (op) {
1788
4
        case NB_ADD:
1789
4
            result = PyNumber_Add(left, right);
1790
4
            break;
1791
0
        case NB_SUBTRACT:
1792
0
            result = PyNumber_Subtract(left, right);
1793
0
            break;
1794
13
        case NB_MULTIPLY:
1795
13
            result = const_folding_safe_multiply(left, right);
1796
13
            break;
1797
1
        case NB_TRUE_DIVIDE:
1798
1
            result = PyNumber_TrueDivide(left, right);
1799
1
            break;
1800
0
        case NB_FLOOR_DIVIDE:
1801
0
            result = PyNumber_FloorDivide(left, right);
1802
0
            break;
1803
0
        case NB_REMAINDER:
1804
0
            result = const_folding_safe_mod(left, right);
1805
0
            break;
1806
8
        case NB_POWER:
1807
8
            result = const_folding_safe_power(left, right);
1808
8
            break;
1809
7
        case NB_LSHIFT:
1810
7
            result = const_folding_safe_lshift(left, right);
1811
7
            break;
1812
0
        case NB_RSHIFT:
1813
0
            result = PyNumber_Rshift(left, right);
1814
0
            break;
1815
0
        case NB_OR:
1816
0
            result = PyNumber_Or(left, right);
1817
0
            break;
1818
0
        case NB_XOR:
1819
0
            result = PyNumber_Xor(left, right);
1820
0
            break;
1821
0
        case NB_AND:
1822
0
            result = PyNumber_And(left, right);
1823
0
            break;
1824
9
        case NB_SUBSCR:
1825
9
            result = PyObject_GetItem(left, right);
1826
9
            break;
1827
0
        case NB_MATRIX_MULTIPLY:
1828
            // No builtin constants implement matrix multiplication
1829
0
            break;
1830
0
        default:
1831
0
            Py_UNREACHABLE();
1832
42
    }
1833
42
    return result;
1834
42
}
1835
1836
static int
1837
fold_const_binop(basicblock *bb, int i, PyObject *consts, PyObject *const_cache)
1838
7.01k
{
1839
7.05k
    #define BINOP_OPERAND_COUNT 2
1840
7.01k
    assert(PyDict_CheckExact(const_cache));
1841
7.01k
    assert(PyList_CheckExact(consts));
1842
1843
7.01k
    cfg_instr *binop = &bb->b_instr[i];
1844
7.01k
    assert(binop->i_opcode == BINARY_OP);
1845
1846
7.01k
    cfg_instr *operands_instrs[BINOP_OPERAND_COUNT];
1847
7.01k
    if (!get_const_loading_instrs(bb, i-1, operands_instrs, BINOP_OPERAND_COUNT)) {
1848
        /* not a const sequence */
1849
6.97k
        return SUCCESS;
1850
6.97k
    }
1851
1852
42
    cfg_instr *lhs_instr = operands_instrs[0];
1853
42
    assert(loads_const(lhs_instr->i_opcode));
1854
42
    PyObject *lhs = get_const_value(lhs_instr->i_opcode, lhs_instr->i_oparg, consts);
1855
42
    if (lhs == NULL) {
1856
0
        return ERROR;
1857
0
    }
1858
1859
42
    cfg_instr *rhs_instr = operands_instrs[1];
1860
42
    assert(loads_const(rhs_instr->i_opcode));
1861
42
    PyObject *rhs = get_const_value(rhs_instr->i_opcode, rhs_instr->i_oparg, consts);
1862
42
    if (rhs == NULL) {
1863
0
        Py_DECREF(lhs);
1864
0
        return ERROR;
1865
0
    }
1866
1867
42
    PyObject *newconst = eval_const_binop(lhs, binop->i_oparg, rhs);
1868
42
    Py_DECREF(lhs);
1869
42
    Py_DECREF(rhs);
1870
42
    if (newconst == NULL) {
1871
3
        if (PyErr_ExceptionMatches(PyExc_KeyboardInterrupt)) {
1872
0
            return ERROR;
1873
0
        }
1874
3
        PyErr_Clear();
1875
3
        return SUCCESS;
1876
3
    }
1877
1878
39
    nop_out(operands_instrs, BINOP_OPERAND_COUNT);
1879
39
    return instr_make_load_const(binop, newconst, consts, const_cache);
1880
42
}
1881
1882
static PyObject *
1883
eval_const_unaryop(PyObject *operand, int opcode, int oparg)
1884
542
{
1885
542
    assert(operand != NULL);
1886
542
    assert(
1887
542
        opcode == UNARY_NEGATIVE ||
1888
542
        opcode == UNARY_INVERT ||
1889
542
        opcode == UNARY_NOT ||
1890
542
        (opcode == CALL_INTRINSIC_1 && oparg == INTRINSIC_UNARY_POSITIVE)
1891
542
    );
1892
542
    PyObject *result;
1893
542
    switch (opcode) {
1894
542
        case UNARY_NEGATIVE:
1895
542
            result = PyNumber_Negative(operand);
1896
542
            break;
1897
0
        case UNARY_INVERT:
1898
            // XXX: This should be removed once the ~bool depreciation expires.
1899
0
            if (PyBool_Check(operand)) {
1900
0
                return NULL;
1901
0
            }
1902
0
            result = PyNumber_Invert(operand);
1903
0
            break;
1904
0
        case UNARY_NOT: {
1905
0
            int r = PyObject_IsTrue(operand);
1906
0
            if (r < 0) {
1907
0
                return NULL;
1908
0
            }
1909
0
            result = PyBool_FromLong(!r);
1910
0
            break;
1911
0
        }
1912
0
        case CALL_INTRINSIC_1:
1913
0
            if (oparg != INTRINSIC_UNARY_POSITIVE) {
1914
0
                Py_UNREACHABLE();
1915
0
            }
1916
0
            result = PyNumber_Positive(operand);
1917
0
            break;
1918
0
        default:
1919
0
            Py_UNREACHABLE();
1920
542
    }
1921
542
    return result;
1922
542
}
1923
1924
static int
1925
fold_const_unaryop(basicblock *bb, int i, PyObject *consts, PyObject *const_cache)
1926
689
{
1927
1.23k
    #define UNARYOP_OPERAND_COUNT 1
1928
689
    assert(PyDict_CheckExact(const_cache));
1929
689
    assert(PyList_CheckExact(consts));
1930
689
    cfg_instr *unaryop = &bb->b_instr[i];
1931
1932
689
    cfg_instr *operand_instr;
1933
689
    if (!get_const_loading_instrs(bb, i-1, &operand_instr, UNARYOP_OPERAND_COUNT)) {
1934
        /* not a const */
1935
147
        return SUCCESS;
1936
147
    }
1937
1938
542
    assert(loads_const(operand_instr->i_opcode));
1939
542
    PyObject *operand = get_const_value(
1940
542
        operand_instr->i_opcode,
1941
542
        operand_instr->i_oparg,
1942
542
        consts
1943
542
    );
1944
542
    if (operand == NULL) {
1945
0
        return ERROR;
1946
0
    }
1947
1948
542
    PyObject *newconst = eval_const_unaryop(operand, unaryop->i_opcode, unaryop->i_oparg);
1949
542
    Py_DECREF(operand);
1950
542
    if (newconst == NULL) {
1951
0
        if (PyErr_ExceptionMatches(PyExc_KeyboardInterrupt)) {
1952
0
            return ERROR;
1953
0
        }
1954
0
        PyErr_Clear();
1955
0
        return SUCCESS;
1956
0
    }
1957
1958
542
    if (unaryop->i_opcode == UNARY_NOT) {
1959
0
        assert(PyBool_Check(newconst));
1960
0
    }
1961
542
    nop_out(&operand_instr, UNARYOP_OPERAND_COUNT);
1962
542
    return instr_make_load_const(unaryop, newconst, consts, const_cache);
1963
542
}
1964
1965
1.66k
#define VISITED (-1)
1966
1967
// Replace an arbitrary run of SWAPs and NOPs with an optimal one that has the
1968
// same effect.
1969
static int
1970
swaptimize(basicblock *block, int *ix)
1971
1.67k
{
1972
    // NOTE: "./python -m test test_patma" serves as a good, quick stress test
1973
    // for this function. Make sure to blow away cached *.pyc files first!
1974
1.67k
    assert(*ix < block->b_iused);
1975
1.67k
    cfg_instr *instructions = &block->b_instr[*ix];
1976
    // Find the length of the current sequence of SWAPs and NOPs, and record the
1977
    // maximum depth of the stack manipulations:
1978
1.67k
    assert(instructions[0].i_opcode == SWAP);
1979
1.67k
    int depth = instructions[0].i_oparg;
1980
1.67k
    int len = 0;
1981
1.67k
    int more = false;
1982
1.67k
    int limit = block->b_iused - *ix;
1983
1.88k
    while (++len < limit) {
1984
1.88k
        int opcode = instructions[len].i_opcode;
1985
1.88k
        if (opcode == SWAP) {
1986
128
            depth = Py_MAX(depth, instructions[len].i_oparg);
1987
128
            more = true;
1988
128
        }
1989
1.75k
        else if (opcode != NOP) {
1990
1.67k
            break;
1991
1.67k
        }
1992
1.88k
    }
1993
    // It's already optimal if there's only one SWAP:
1994
1.67k
    if (!more) {
1995
1.55k
        return SUCCESS;
1996
1.55k
    }
1997
    // Create an array with elements {0, 1, 2, ..., depth - 1}:
1998
128
    int *stack = PyMem_Malloc(depth * sizeof(int));
1999
128
    if (stack == NULL) {
2000
0
        PyErr_NoMemory();
2001
0
        return ERROR;
2002
0
    }
2003
512
    for (int i = 0; i < depth; i++) {
2004
384
        stack[i] = i;
2005
384
    }
2006
    // Simulate the combined effect of these instructions by "running" them on
2007
    // our "stack":
2008
384
    for (int i = 0; i < len; i++) {
2009
256
        if (instructions[i].i_opcode == SWAP) {
2010
256
            int oparg = instructions[i].i_oparg;
2011
256
            int top = stack[0];
2012
            // SWAPs are 1-indexed:
2013
256
            stack[0] = stack[oparg - 1];
2014
256
            stack[oparg - 1] = top;
2015
256
        }
2016
256
    }
2017
    // Now we can begin! Our approach here is based on a solution to a closely
2018
    // related problem (https://cs.stackexchange.com/a/13938). It's easiest to
2019
    // think of this algorithm as determining the steps needed to efficiently
2020
    // "un-shuffle" our stack. By performing the moves in *reverse* order,
2021
    // though, we can efficiently *shuffle* it! For this reason, we will be
2022
    // replacing instructions starting from the *end* of the run. Since the
2023
    // solution is optimal, we don't need to worry about running out of space:
2024
128
    int current = len - 1;
2025
512
    for (int i = 0; i < depth; i++) {
2026
        // Skip items that have already been visited, or just happen to be in
2027
        // the correct location:
2028
384
        if (stack[i] == VISITED || stack[i] == i) {
2029
256
            continue;
2030
256
        }
2031
        // Okay, we've found an item that hasn't been visited. It forms a cycle
2032
        // with other items; traversing the cycle and swapping each item with
2033
        // the next will put them all in the correct place. The weird
2034
        // loop-and-a-half is necessary to insert 0 into every cycle, since we
2035
        // can only swap from that position:
2036
128
        int j = i;
2037
512
        while (true) {
2038
            // Skip the actual swap if our item is zero, since swapping the top
2039
            // item with itself is pointless:
2040
512
            if (j) {
2041
256
                assert(0 <= current);
2042
                // SWAPs are 1-indexed:
2043
256
                instructions[current].i_opcode = SWAP;
2044
256
                instructions[current--].i_oparg = j + 1;
2045
256
            }
2046
512
            if (stack[j] == VISITED) {
2047
                // Completed the cycle:
2048
128
                assert(j == i);
2049
128
                break;
2050
128
            }
2051
384
            int next_j = stack[j];
2052
384
            stack[j] = VISITED;
2053
384
            j = next_j;
2054
384
        }
2055
128
    }
2056
    // NOP out any unused instructions:
2057
128
    while (0 <= current) {
2058
0
        INSTR_SET_OP0(&instructions[current--], NOP);
2059
0
    }
2060
128
    PyMem_Free(stack);
2061
128
    *ix += len - 1;
2062
128
    return SUCCESS;
2063
128
}
2064
2065
2066
// This list is pretty small, since it's only okay to reorder opcodes that:
2067
// - can't affect control flow (like jumping or raising exceptions)
2068
// - can't invoke arbitrary code (besides finalizers)
2069
// - only touch the TOS (and pop it when finished)
2070
#define SWAPPABLE(opcode) \
2071
2.82k
    ((opcode) == STORE_FAST || \
2072
2.82k
     (opcode) == STORE_FAST_MAYBE_NULL || \
2073
2.82k
     (opcode) == POP_TOP)
2074
2075
#define STORES_TO(instr) \
2076
396
    (((instr).i_opcode == STORE_FAST || \
2077
396
      (instr).i_opcode == STORE_FAST_MAYBE_NULL) \
2078
396
     ? (instr).i_oparg : -1)
2079
2080
static int
2081
next_swappable_instruction(basicblock *block, int i, int lineno)
2082
2.78k
{
2083
2.85k
    while (++i < block->b_iused) {
2084
2.83k
        cfg_instr *instruction = &block->b_instr[i];
2085
2.83k
        if (0 <= lineno && instruction->i_loc.lineno != lineno) {
2086
            // Optimizing across this instruction could cause user-visible
2087
            // changes in the names bound between line tracing events!
2088
12
            return -1;
2089
12
        }
2090
2.82k
        if (instruction->i_opcode == NOP) {
2091
74
            continue;
2092
74
        }
2093
2.74k
        if (SWAPPABLE(instruction->i_opcode)) {
2094
1.28k
            return i;
2095
1.28k
        }
2096
1.46k
        return -1;
2097
2.74k
    }
2098
20
    return -1;
2099
2.78k
}
2100
2101
// Attempt to apply SWAPs statically by swapping *instructions* rather than
2102
// stack items. For example, we can replace SWAP(2), POP_TOP, STORE_FAST(42)
2103
// with the more efficient NOP, STORE_FAST(42), POP_TOP.
2104
static void
2105
apply_static_swaps(basicblock *block, int i)
2106
1.67k
{
2107
    // SWAPs are to our left, and potential swaperands are to our right:
2108
1.93k
    for (; 0 <= i; i--) {
2109
1.83k
        assert(i < block->b_iused);
2110
1.83k
        cfg_instr *swap = &block->b_instr[i];
2111
1.83k
        if (swap->i_opcode != SWAP) {
2112
158
            if (swap->i_opcode == NOP || SWAPPABLE(swap->i_opcode)) {
2113
                // Nope, but we know how to handle these. Keep looking:
2114
79
                continue;
2115
79
            }
2116
            // We can't reason about what this instruction does. Bail:
2117
79
            return;
2118
158
        }
2119
1.67k
        int j = next_swappable_instruction(block, i, -1);
2120
1.67k
        if (j < 0) {
2121
816
            return;
2122
816
        }
2123
863
        int k = j;
2124
863
        int lineno = block->b_instr[j].i_loc.lineno;
2125
1.28k
        for (int count = swap->i_oparg - 1; 0 < count; count--) {
2126
1.10k
            k = next_swappable_instruction(block, k, lineno);
2127
1.10k
            if (k < 0) {
2128
683
                return;
2129
683
            }
2130
1.10k
        }
2131
        // The reordering is not safe if the two instructions to be swapped
2132
        // store to the same location, or if any intervening instruction stores
2133
        // to the same location as either of them.
2134
180
        int store_j = STORES_TO(block->b_instr[j]);
2135
180
        int store_k = STORES_TO(block->b_instr[k]);
2136
180
        if (store_j >= 0 || store_k >= 0) {
2137
180
            if (store_j == store_k) {
2138
0
                return;
2139
0
            }
2140
216
            for (int idx = j + 1; idx < k; idx++) {
2141
36
                int store_idx = STORES_TO(block->b_instr[idx]);
2142
36
                if (store_idx >= 0 && (store_idx == store_j || store_idx == store_k)) {
2143
0
                    return;
2144
0
                }
2145
36
            }
2146
180
        }
2147
2148
        // Success!
2149
180
        INSTR_SET_OP0(swap, NOP);
2150
180
        cfg_instr temp = block->b_instr[j];
2151
180
        block->b_instr[j] = block->b_instr[k];
2152
180
        block->b_instr[k] = temp;
2153
180
    }
2154
1.67k
}
2155
2156
static int
2157
basicblock_optimize_load_const(PyObject *const_cache, basicblock *bb, PyObject *consts)
2158
45.0k
{
2159
45.0k
    assert(PyDict_CheckExact(const_cache));
2160
45.0k
    assert(PyList_CheckExact(consts));
2161
45.0k
    int opcode = 0;
2162
45.0k
    int oparg = 0;
2163
405k
    for (int i = 0; i < bb->b_iused; i++) {
2164
360k
        cfg_instr *inst = &bb->b_instr[i];
2165
360k
        if (inst->i_opcode == LOAD_CONST) {
2166
66.9k
            PyObject *constant = get_const_value(inst->i_opcode, inst->i_oparg, consts);
2167
66.9k
            int res = maybe_instr_make_load_smallint(inst, constant, consts, const_cache);
2168
66.9k
            Py_DECREF(constant);
2169
66.9k
            if (res < 0) {
2170
0
                return ERROR;
2171
0
            }
2172
66.9k
        }
2173
360k
        bool is_copy_of_load_const = (opcode == LOAD_CONST &&
2174
360k
                                      inst->i_opcode == COPY &&
2175
360k
                                      inst->i_oparg == 1);
2176
360k
        if (! is_copy_of_load_const) {
2177
360k
            opcode = inst->i_opcode;
2178
360k
            oparg = inst->i_oparg;
2179
360k
        }
2180
360k
        assert(!IS_ASSEMBLER_OPCODE(opcode));
2181
360k
        if (opcode != LOAD_CONST && opcode != LOAD_SMALL_INT) {
2182
293k
            continue;
2183
293k
        }
2184
67.0k
        int nextop = i+1 < bb->b_iused ? bb->b_instr[i+1].i_opcode : 0;
2185
67.0k
        switch(nextop) {
2186
110
            case POP_JUMP_IF_FALSE:
2187
113
            case POP_JUMP_IF_TRUE:
2188
113
            case JUMP_IF_FALSE:
2189
113
            case JUMP_IF_TRUE:
2190
113
            {
2191
                /* Remove LOAD_CONST const; conditional jump */
2192
113
                PyObject* cnt = get_const_value(opcode, oparg, consts);
2193
113
                if (cnt == NULL) {
2194
0
                    return ERROR;
2195
0
                }
2196
113
                int is_true = PyObject_IsTrue(cnt);
2197
113
                Py_DECREF(cnt);
2198
113
                if (is_true == -1) {
2199
0
                    return ERROR;
2200
0
                }
2201
113
                if (PyCompile_OpcodeStackEffect(nextop, 0) == -1) {
2202
                    /* POP_JUMP_IF_FALSE or POP_JUMP_IF_TRUE */
2203
113
                    INSTR_SET_OP0(inst, NOP);
2204
113
                }
2205
113
                int jump_if_true = (nextop == POP_JUMP_IF_TRUE || nextop == JUMP_IF_TRUE);
2206
113
                if (is_true == jump_if_true) {
2207
1
                    bb->b_instr[i+1].i_opcode = JUMP;
2208
1
                }
2209
112
                else {
2210
112
                    INSTR_SET_OP0(&bb->b_instr[i + 1], NOP);
2211
112
                }
2212
113
                break;
2213
113
            }
2214
1.39k
            case IS_OP:
2215
1.39k
            {
2216
                // Fold to POP_JUMP_IF_NONE:
2217
                // - LOAD_CONST(None) IS_OP(0) POP_JUMP_IF_TRUE
2218
                // - LOAD_CONST(None) IS_OP(1) POP_JUMP_IF_FALSE
2219
                // - LOAD_CONST(None) IS_OP(0) TO_BOOL POP_JUMP_IF_TRUE
2220
                // - LOAD_CONST(None) IS_OP(1) TO_BOOL POP_JUMP_IF_FALSE
2221
                // Fold to POP_JUMP_IF_NOT_NONE:
2222
                // - LOAD_CONST(None) IS_OP(0) POP_JUMP_IF_FALSE
2223
                // - LOAD_CONST(None) IS_OP(1) POP_JUMP_IF_TRUE
2224
                // - LOAD_CONST(None) IS_OP(0) TO_BOOL POP_JUMP_IF_FALSE
2225
                // - LOAD_CONST(None) IS_OP(1) TO_BOOL POP_JUMP_IF_TRUE
2226
1.39k
                PyObject *cnt = get_const_value(opcode, oparg, consts);
2227
1.39k
                if (cnt == NULL) {
2228
0
                    return ERROR;
2229
0
                }
2230
1.39k
                if (!Py_IsNone(cnt)) {
2231
18
                    Py_DECREF(cnt);
2232
18
                    break;
2233
18
                }
2234
1.37k
                if (bb->b_iused <= i + 2) {
2235
7
                    break;
2236
7
                }
2237
1.36k
                cfg_instr *is_instr = &bb->b_instr[i + 1];
2238
1.36k
                cfg_instr *jump_instr = &bb->b_instr[i + 2];
2239
                // Get rid of TO_BOOL regardless:
2240
1.36k
                if (jump_instr->i_opcode == TO_BOOL) {
2241
1.33k
                    INSTR_SET_OP0(jump_instr, NOP);
2242
1.33k
                    if (bb->b_iused <= i + 3) {
2243
0
                        break;
2244
0
                    }
2245
1.33k
                    jump_instr = &bb->b_instr[i + 3];
2246
1.33k
                }
2247
1.36k
                bool invert = is_instr->i_oparg;
2248
1.36k
                if (jump_instr->i_opcode == POP_JUMP_IF_FALSE) {
2249
1.29k
                    invert = !invert;
2250
1.29k
                }
2251
74
                else if (jump_instr->i_opcode != POP_JUMP_IF_TRUE) {
2252
25
                    break;
2253
25
                }
2254
1.34k
                INSTR_SET_OP0(inst, NOP);
2255
1.34k
                INSTR_SET_OP0(is_instr, NOP);
2256
1.34k
                jump_instr->i_opcode = invert ? POP_JUMP_IF_NOT_NONE
2257
1.34k
                                              : POP_JUMP_IF_NONE;
2258
1.34k
                break;
2259
1.36k
            }
2260
113
            case TO_BOOL:
2261
113
            {
2262
113
                PyObject *cnt = get_const_value(opcode, oparg, consts);
2263
113
                if (cnt == NULL) {
2264
0
                    return ERROR;
2265
0
                }
2266
113
                int is_true = PyObject_IsTrue(cnt);
2267
113
                Py_DECREF(cnt);
2268
113
                if (is_true == -1) {
2269
0
                    return ERROR;
2270
0
                }
2271
113
                cnt = PyBool_FromLong(is_true);
2272
113
                int index = add_const(cnt, consts, const_cache);
2273
113
                if (index < 0) {
2274
0
                    return ERROR;
2275
0
                }
2276
113
                INSTR_SET_OP0(inst, NOP);
2277
113
                INSTR_SET_OP1(&bb->b_instr[i + 1], LOAD_CONST, index);
2278
113
                break;
2279
113
            }
2280
67.0k
        }
2281
67.0k
    }
2282
45.0k
    return SUCCESS;
2283
45.0k
}
2284
2285
static int
2286
6.74k
optimize_load_const(PyObject *const_cache, cfg_builder *g, PyObject *consts) {
2287
51.8k
    for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
2288
45.0k
        RETURN_IF_ERROR(basicblock_optimize_load_const(const_cache, b, consts));
2289
45.0k
    }
2290
6.74k
    return SUCCESS;
2291
6.74k
}
2292
2293
static int
2294
optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts)
2295
45.0k
{
2296
45.0k
    assert(PyDict_CheckExact(const_cache));
2297
45.0k
    assert(PyList_CheckExact(consts));
2298
45.0k
    cfg_instr nop;
2299
45.0k
    INSTR_SET_OP0(&nop, NOP);
2300
406k
    for (int i = 0; i < bb->b_iused; i++) {
2301
361k
        cfg_instr *inst = &bb->b_instr[i];
2302
361k
        cfg_instr *target;
2303
361k
        int opcode = inst->i_opcode;
2304
361k
        int oparg = inst->i_oparg;
2305
361k
        if (HAS_TARGET(opcode)) {
2306
22.1k
            assert(inst->i_target->b_iused > 0);
2307
22.1k
            target = &inst->i_target->b_instr[0];
2308
22.1k
            assert(!IS_ASSEMBLER_OPCODE(target->i_opcode));
2309
22.1k
        }
2310
339k
        else {
2311
339k
            target = &nop;
2312
339k
        }
2313
361k
        int nextop = i+1 < bb->b_iused ? bb->b_instr[i+1].i_opcode : 0;
2314
361k
        assert(!IS_ASSEMBLER_OPCODE(opcode));
2315
361k
        switch (opcode) {
2316
            /* Try to fold tuples of constants.
2317
               Skip over BUILD_TUPLE(1) UNPACK_SEQUENCE(1).
2318
               Replace BUILD_TUPLE(2) UNPACK_SEQUENCE(2) with SWAP(2).
2319
               Replace BUILD_TUPLE(3) UNPACK_SEQUENCE(3) with SWAP(3). */
2320
3.91k
            case BUILD_TUPLE:
2321
3.91k
                if (nextop == UNPACK_SEQUENCE && oparg == bb->b_instr[i+1].i_oparg) {
2322
91
                    switch(oparg) {
2323
0
                        case 1:
2324
0
                            INSTR_SET_OP0(inst, NOP);
2325
0
                            INSTR_SET_OP0(&bb->b_instr[i + 1], NOP);
2326
0
                            continue;
2327
76
                        case 2:
2328
87
                        case 3:
2329
87
                            INSTR_SET_OP0(inst, NOP);
2330
87
                            bb->b_instr[i+1].i_opcode = SWAP;
2331
87
                            continue;
2332
91
                    }
2333
91
                }
2334
3.82k
                RETURN_IF_ERROR(fold_tuple_of_constants(bb, i, consts, const_cache));
2335
3.82k
                break;
2336
1.18k
            case BUILD_LIST:
2337
1.28k
            case BUILD_SET:
2338
1.28k
                RETURN_IF_ERROR(optimize_lists_and_sets(bb, i, nextop, consts, const_cache));
2339
1.28k
                break;
2340
707
            case POP_JUMP_IF_NOT_NONE:
2341
1.40k
            case POP_JUMP_IF_NONE:
2342
1.40k
                switch (target->i_opcode) {
2343
68
                    case JUMP:
2344
68
                        i -= jump_thread(bb, inst, target, inst->i_opcode);
2345
1.40k
                }
2346
1.40k
                break;
2347
8.58k
            case POP_JUMP_IF_FALSE:
2348
8.58k
                switch (target->i_opcode) {
2349
581
                    case JUMP:
2350
581
                        i -= jump_thread(bb, inst, target, POP_JUMP_IF_FALSE);
2351
8.58k
                }
2352
8.58k
                break;
2353
8.58k
            case POP_JUMP_IF_TRUE:
2354
1.65k
                switch (target->i_opcode) {
2355
47
                    case JUMP:
2356
47
                        i -= jump_thread(bb, inst, target, POP_JUMP_IF_TRUE);
2357
1.65k
                }
2358
1.65k
                break;
2359
1.65k
            case JUMP_IF_FALSE:
2360
283
                switch (target->i_opcode) {
2361
0
                    case JUMP:
2362
0
                    case JUMP_IF_FALSE:
2363
0
                        i -= jump_thread(bb, inst, target, JUMP_IF_FALSE);
2364
0
                        continue;
2365
3
                    case JUMP_IF_TRUE:
2366
                        // No need to check for loops here, a block's b_next
2367
                        // cannot point to itself.
2368
3
                        assert(inst->i_target != inst->i_target->b_next);
2369
3
                        inst->i_target = inst->i_target->b_next;
2370
3
                        i--;
2371
3
                        continue;
2372
283
                }
2373
280
                break;
2374
280
            case JUMP_IF_TRUE:
2375
273
                switch (target->i_opcode) {
2376
0
                    case JUMP:
2377
0
                    case JUMP_IF_TRUE:
2378
0
                        i -= jump_thread(bb, inst, target, JUMP_IF_TRUE);
2379
0
                        continue;
2380
5
                    case JUMP_IF_FALSE:
2381
                        // No need to check for loops here, a block's b_next
2382
                        // cannot point to itself.
2383
5
                        assert(inst->i_target != inst->i_target->b_next);
2384
5
                        inst->i_target = inst->i_target->b_next;
2385
5
                        i--;
2386
5
                        continue;
2387
273
                }
2388
268
                break;
2389
3.41k
            case JUMP:
2390
5.70k
            case JUMP_NO_INTERRUPT:
2391
5.70k
                switch (target->i_opcode) {
2392
0
                    case JUMP:
2393
0
                        i -= jump_thread(bb, inst, target, JUMP);
2394
0
                        continue;
2395
0
                    case JUMP_NO_INTERRUPT:
2396
0
                        i -= jump_thread(bb, inst, target, opcode);
2397
0
                        continue;
2398
5.70k
                }
2399
5.70k
                break;
2400
5.70k
            case FOR_ITER:
2401
1.57k
                if (target->i_opcode == JUMP) {
2402
                    /* This will not work now because the jump (at target) could
2403
                     * be forward or backward and FOR_ITER only jumps forward. We
2404
                     * can re-enable this if ever we implement a backward version
2405
                     * of FOR_ITER.
2406
                     */
2407
                    /*
2408
                    i -= jump_thread(bb, inst, target, FOR_ITER);
2409
                    */
2410
0
                }
2411
1.57k
                break;
2412
12.9k
            case STORE_FAST:
2413
12.9k
                if (opcode == nextop &&
2414
12.9k
                    oparg == bb->b_instr[i+1].i_oparg &&
2415
12.9k
                    bb->b_instr[i].i_loc.lineno == bb->b_instr[i+1].i_loc.lineno) {
2416
31
                    bb->b_instr[i].i_opcode = POP_TOP;
2417
31
                    bb->b_instr[i].i_oparg = 0;
2418
31
                }
2419
12.9k
                break;
2420
1.80k
            case SWAP:
2421
1.80k
                if (oparg == 1) {
2422
0
                    INSTR_SET_OP0(inst, NOP);
2423
0
                }
2424
1.80k
                break;
2425
16.1k
            case LOAD_GLOBAL:
2426
16.1k
                if (nextop == PUSH_NULL && (oparg & 1) == 0) {
2427
8.01k
                    INSTR_SET_OP1(inst, LOAD_GLOBAL, oparg | 1);
2428
8.01k
                    INSTR_SET_OP0(&bb->b_instr[i + 1], NOP);
2429
8.01k
                }
2430
16.1k
                break;
2431
5.66k
            case COMPARE_OP:
2432
5.66k
                if (nextop == TO_BOOL) {
2433
2.60k
                    INSTR_SET_OP0(inst, NOP);
2434
2.60k
                    INSTR_SET_OP1(&bb->b_instr[i + 1], COMPARE_OP, oparg | 16);
2435
2.60k
                    continue;
2436
2.60k
                }
2437
3.06k
                break;
2438
3.06k
            case CONTAINS_OP:
2439
3.71k
            case IS_OP:
2440
3.71k
                if (nextop == TO_BOOL) {
2441
1.71k
                    INSTR_SET_OP0(inst, NOP);
2442
1.71k
                    INSTR_SET_OP1(&bb->b_instr[i + 1], opcode, oparg);
2443
1.71k
                    continue;
2444
1.71k
                }
2445
1.99k
                if (nextop == UNARY_NOT) {
2446
0
                    INSTR_SET_OP0(inst, NOP);
2447
0
                    int inverted = oparg ^ 1;
2448
0
                    assert(inverted == 0 || inverted == 1);
2449
0
                    INSTR_SET_OP1(&bb->b_instr[i + 1], opcode, inverted);
2450
0
                    continue;
2451
0
                }
2452
1.99k
                break;
2453
4.43k
            case TO_BOOL:
2454
4.43k
                if (nextop == TO_BOOL) {
2455
0
                    INSTR_SET_OP0(inst, NOP);
2456
0
                    continue;
2457
0
                }
2458
4.43k
                break;
2459
4.43k
            case UNARY_NOT:
2460
59
                if (nextop == TO_BOOL) {
2461
0
                    INSTR_SET_OP0(inst, NOP);
2462
0
                    INSTR_SET_OP0(&bb->b_instr[i + 1], UNARY_NOT);
2463
0
                    continue;
2464
0
                }
2465
59
                if (nextop == UNARY_NOT) {
2466
0
                    INSTR_SET_OP0(inst, NOP);
2467
0
                    INSTR_SET_OP0(&bb->b_instr[i + 1], NOP);
2468
0
                    continue;
2469
0
                }
2470
59
                _Py_FALLTHROUGH;
2471
99
            case UNARY_INVERT:
2472
687
            case UNARY_NEGATIVE:
2473
687
                RETURN_IF_ERROR(fold_const_unaryop(bb, i, consts, const_cache));
2474
687
                break;
2475
476
            case CALL_INTRINSIC_1:
2476
476
                if (oparg == INTRINSIC_LIST_TO_TUPLE) {
2477
99
                    if (nextop == GET_ITER) {
2478
0
                        INSTR_SET_OP0(inst, NOP);
2479
0
                    }
2480
99
                    else {
2481
99
                        RETURN_IF_ERROR(fold_constant_intrinsic_list_to_tuple(bb, i, consts, const_cache));
2482
99
                    }
2483
99
                }
2484
377
                else if (oparg == INTRINSIC_UNARY_POSITIVE) {
2485
2
                    RETURN_IF_ERROR(fold_const_unaryop(bb, i, consts, const_cache));
2486
2
                }
2487
476
                break;
2488
7.01k
            case BINARY_OP:
2489
7.01k
                RETURN_IF_ERROR(fold_const_binop(bb, i, consts, const_cache));
2490
7.01k
                break;
2491
361k
        }
2492
361k
    }
2493
2494
406k
    for (int i = 0; i < bb->b_iused; i++) {
2495
361k
        cfg_instr *inst = &bb->b_instr[i];
2496
361k
        if (inst->i_opcode == SWAP) {
2497
1.67k
            if (swaptimize(bb, &i) < 0) {
2498
0
                goto error;
2499
0
            }
2500
1.67k
            apply_static_swaps(bb, i);
2501
1.67k
        }
2502
361k
    }
2503
45.0k
    return SUCCESS;
2504
0
error:
2505
0
    return ERROR;
2506
45.0k
}
2507
2508
static int resolve_line_numbers(cfg_builder *g, int firstlineno);
2509
2510
static int
2511
remove_redundant_nops_and_jumps(cfg_builder *g)
2512
14.5k
{
2513
14.5k
    int removed_nops, removed_jumps;
2514
16.4k
    do {
2515
        /* Convergence is guaranteed because the number of
2516
         * redundant jumps and nops only decreases.
2517
         */
2518
16.4k
        removed_nops = remove_redundant_nops(g);
2519
16.4k
        RETURN_IF_ERROR(removed_nops);
2520
16.4k
        removed_jumps = remove_redundant_jumps(g);
2521
16.4k
        RETURN_IF_ERROR(removed_jumps);
2522
16.4k
    } while(removed_nops + removed_jumps > 0);
2523
14.5k
    return SUCCESS;
2524
14.5k
}
2525
2526
/* Perform optimizations on a control flow graph.
2527
   The consts object should still be in list form to allow new constants
2528
   to be appended.
2529
2530
   Code trasnformations that reduce code size initially fill the gaps with
2531
   NOPs.  Later those NOPs are removed.
2532
*/
2533
static int
2534
optimize_cfg(cfg_builder *g, PyObject *consts, PyObject *const_cache, int firstlineno)
2535
6.74k
{
2536
6.74k
    assert(PyDict_CheckExact(const_cache));
2537
6.74k
    RETURN_IF_ERROR(check_cfg(g));
2538
6.74k
    RETURN_IF_ERROR(inline_small_or_no_lineno_blocks(g->g_entryblock));
2539
6.74k
    RETURN_IF_ERROR(remove_unreachable(g->g_entryblock));
2540
6.74k
    RETURN_IF_ERROR(resolve_line_numbers(g, firstlineno));
2541
6.74k
    RETURN_IF_ERROR(optimize_load_const(const_cache, g, consts));
2542
51.8k
    for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
2543
45.0k
        RETURN_IF_ERROR(optimize_basic_block(const_cache, b, consts));
2544
45.0k
    }
2545
6.74k
    RETURN_IF_ERROR(remove_redundant_nops_and_pairs(g->g_entryblock));
2546
6.74k
    RETURN_IF_ERROR(remove_unreachable(g->g_entryblock));
2547
6.74k
    RETURN_IF_ERROR(remove_redundant_nops_and_jumps(g));
2548
6.74k
    assert(no_redundant_jumps(g));
2549
6.74k
    return SUCCESS;
2550
6.74k
}
2551
2552
static void
2553
make_super_instruction(cfg_instr *inst1, cfg_instr *inst2, int super_op)
2554
12.1k
{
2555
12.1k
    int32_t line1 = inst1->i_loc.lineno;
2556
12.1k
    int32_t line2 = inst2->i_loc.lineno;
2557
    /* Skip if instructions are on different lines */
2558
12.1k
    if (line1 >= 0 && line2 >= 0 && line1 != line2) {
2559
4.42k
        return;
2560
4.42k
    }
2561
7.74k
    if (inst1->i_oparg >= 16 || inst2->i_oparg >= 16) {
2562
784
        return;
2563
784
    }
2564
6.96k
    INSTR_SET_OP1(inst1, super_op, (inst1->i_oparg << 4) | inst2->i_oparg);
2565
6.96k
    INSTR_SET_OP0(inst2, NOP);
2566
6.96k
}
2567
2568
static int
2569
insert_superinstructions(cfg_builder *g)
2570
6.74k
{
2571
51.8k
    for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
2572
2573
376k
        for (int i = 0; i < b->b_iused; i++) {
2574
331k
            cfg_instr *inst = &b->b_instr[i];
2575
331k
            int nextop = i+1 < b->b_iused ? b->b_instr[i+1].i_opcode : 0;
2576
331k
            switch(inst->i_opcode) {
2577
42.1k
                case LOAD_FAST:
2578
42.1k
                    if (nextop == LOAD_FAST) {
2579
6.40k
                        make_super_instruction(inst, &b->b_instr[i + 1], LOAD_FAST_LOAD_FAST);
2580
6.40k
                    }
2581
42.1k
                    break;
2582
11.7k
                case STORE_FAST:
2583
11.7k
                    switch (nextop) {
2584
4.46k
                        case LOAD_FAST:
2585
4.46k
                            make_super_instruction(inst, &b->b_instr[i + 1], STORE_FAST_LOAD_FAST);
2586
4.46k
                            break;
2587
1.30k
                        case STORE_FAST:
2588
1.30k
                            make_super_instruction(inst, &b->b_instr[i + 1], STORE_FAST_STORE_FAST);
2589
1.30k
                            break;
2590
11.7k
                    }
2591
11.7k
                    break;
2592
331k
            }
2593
331k
        }
2594
45.0k
    }
2595
6.74k
    int res = remove_redundant_nops(g);
2596
6.74k
    assert(no_redundant_nops(g));
2597
6.74k
    return res;
2598
6.74k
}
2599
2600
#define NOT_LOCAL -1
2601
12.5k
#define DUMMY_INSTR -1
2602
2603
typedef struct {
2604
    // Index of instruction that produced the reference or DUMMY_INSTR.
2605
    int instr;
2606
2607
    // The local to which the reference refers or NOT_LOCAL.
2608
    int local;
2609
} ref;
2610
2611
typedef struct {
2612
    ref *refs;
2613
    Py_ssize_t size;
2614
    Py_ssize_t capacity;
2615
} ref_stack;
2616
2617
static int
2618
ref_stack_push(ref_stack *stack, ref r)
2619
277k
{
2620
277k
    if (stack->size == stack->capacity) {
2621
6.74k
        Py_ssize_t new_cap = Py_MAX(32, stack->capacity * 2);
2622
6.74k
        ref *refs = PyMem_Realloc(stack->refs, sizeof(*stack->refs) * new_cap);
2623
6.74k
        if (refs == NULL) {
2624
0
            PyErr_NoMemory();
2625
0
            return -1;
2626
0
        }
2627
6.74k
        stack->refs = refs;
2628
6.74k
        stack->capacity = new_cap;
2629
6.74k
    }
2630
277k
    stack->refs[stack->size] = r;
2631
277k
    stack->size++;
2632
277k
    return 0;
2633
277k
}
2634
2635
static ref
2636
ref_stack_pop(ref_stack *stack)
2637
239k
{
2638
239k
    assert(stack->size > 0);
2639
239k
    stack->size--;
2640
239k
    ref r = stack->refs[stack->size];
2641
239k
    return r;
2642
239k
}
2643
2644
static void
2645
ref_stack_swap_top(ref_stack *stack, Py_ssize_t off)
2646
1.08k
{
2647
1.08k
    Py_ssize_t idx = stack->size - off;
2648
1.08k
    assert(idx >= 0 && idx < stack->size);
2649
1.08k
    ref tmp = stack->refs[idx];
2650
1.08k
    stack->refs[idx] = stack->refs[stack->size - 1];
2651
1.08k
    stack->refs[stack->size - 1] = tmp;
2652
1.08k
}
2653
2654
static ref
2655
ref_stack_at(ref_stack *stack, Py_ssize_t idx)
2656
51.8k
{
2657
51.8k
    assert(idx >= 0 && idx < stack->size);
2658
51.8k
    return stack->refs[idx];
2659
51.8k
}
2660
2661
static void
2662
ref_stack_clear(ref_stack *stack)
2663
33.4k
{
2664
33.4k
    stack->size = 0;
2665
33.4k
}
2666
2667
static void
2668
ref_stack_fini(ref_stack *stack)
2669
6.74k
{
2670
6.74k
    if (stack->refs != NULL) {
2671
6.74k
        PyMem_Free(stack->refs);
2672
6.74k
    }
2673
6.74k
    stack->refs = NULL;
2674
6.74k
    stack->capacity = 0;
2675
6.74k
    stack->size = 0;
2676
6.74k
}
2677
2678
typedef enum {
2679
    // The loaded reference is still on the stack when the local is killed
2680
    SUPPORT_KILLED  = 1,
2681
    // The loaded reference is stored into a local
2682
    STORED_AS_LOCAL = 2,
2683
    // The loaded reference is still on the stack at the end of the basic block
2684
    REF_UNCONSUMED  = 4,
2685
} LoadFastInstrFlag;
2686
2687
static void
2688
kill_local(uint8_t *instr_flags, ref_stack *refs, int local)
2689
12.8k
{
2690
25.2k
    for (Py_ssize_t i = 0; i < refs->size; i++) {
2691
12.4k
        ref r = ref_stack_at(refs, i);
2692
12.4k
        if (r.local == local) {
2693
12
            assert(r.instr >= 0);
2694
12
            instr_flags[r.instr] |= SUPPORT_KILLED;
2695
12
        }
2696
12.4k
    }
2697
12.8k
}
2698
2699
static void
2700
store_local(uint8_t *instr_flags, ref_stack *refs, int local, ref r)
2701
12.5k
{
2702
12.5k
    kill_local(instr_flags, refs, local);
2703
12.5k
    if (r.instr != DUMMY_INSTR) {
2704
10.8k
        instr_flags[r.instr] |= STORED_AS_LOCAL;
2705
10.8k
    }
2706
12.5k
}
2707
2708
static void
2709
load_fast_push_block(basicblock ***sp, basicblock *target,
2710
                     Py_ssize_t start_depth)
2711
34.9k
{
2712
34.9k
    assert(target->b_startdepth >= 0 && target->b_startdepth == start_depth);
2713
34.9k
    if (!target->b_visited) {
2714
26.6k
        target->b_visited = 1;
2715
26.6k
        *(*sp)++ = target;
2716
26.6k
    }
2717
34.9k
}
2718
2719
/*
2720
 * Strength reduce LOAD_FAST{_LOAD_FAST} instructions into faster variants that
2721
 * load borrowed references onto the operand stack.
2722
 *
2723
 * This is only safe when we can prove that the reference in the frame outlives
2724
 * the borrowed reference produced by the instruction. We make this tractable
2725
 * by enforcing the following lifetimes:
2726
 *
2727
 * 1. Borrowed references loaded onto the operand stack live until the end of
2728
 *    the instruction that consumes them from the stack. Any borrowed
2729
 *    references that would escape into the heap (e.g. into frame objects or
2730
 *    generators) are converted into new, strong references.
2731
 *
2732
 * 2. Locals live until they are either killed by an instruction
2733
 *    (e.g. STORE_FAST) or the frame is unwound. Any local that is overwritten
2734
 *    via `f_locals` is added to a tuple owned by the frame object.
2735
 *
2736
 * To simplify the problem of detecting which supporting references in the
2737
 * frame are killed by instructions that overwrite locals, we only allow
2738
 * borrowed references to be stored as a local in the frame if they were passed
2739
 * as an argument. {RETURN,YIELD}_VALUE convert borrowed references into new,
2740
 * strong references.
2741
 *
2742
 * Using the above, we can optimize any LOAD_FAST{_LOAD_FAST} instructions
2743
 * that meet the following criteria:
2744
 *
2745
 * 1. The produced reference must be consumed from the stack before the
2746
 *    supporting reference in the frame is killed.
2747
 *
2748
 * 2. The produced reference cannot be stored as a local.
2749
 *
2750
 * We use abstract interpretation to identify instructions that meet these
2751
 * criteria. For each basic block, we simulate the effect the bytecode has on a
2752
 * stack of abstract references and note any instructions that violate the
2753
 * criteria above. Once we've processed all the instructions in a block, any
2754
 * non-violating LOAD_FAST{_LOAD_FAST} can be optimized.
2755
 */
2756
static int
2757
optimize_load_fast(cfg_builder *g)
2758
6.74k
{
2759
6.74k
    int status;
2760
6.74k
    ref_stack refs = {0};
2761
6.74k
    int max_instrs = 0;
2762
6.74k
    basicblock *entryblock = g->g_entryblock;
2763
52.6k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
2764
45.8k
        max_instrs = Py_MAX(max_instrs, b->b_iused);
2765
45.8k
    }
2766
6.74k
    size_t instr_flags_size = max_instrs * sizeof(uint8_t);
2767
6.74k
    uint8_t *instr_flags = PyMem_Malloc(instr_flags_size);
2768
6.74k
    if (instr_flags == NULL) {
2769
0
        PyErr_NoMemory();
2770
0
        return ERROR;
2771
0
    }
2772
6.74k
    basicblock **blocks = make_cfg_traversal_stack(entryblock);
2773
6.74k
    if (blocks == NULL) {
2774
0
        status = ERROR;
2775
0
        goto done;
2776
0
    }
2777
6.74k
    basicblock **sp = blocks;
2778
6.74k
    *sp = entryblock;
2779
6.74k
    sp++;
2780
6.74k
    entryblock->b_startdepth = 0;
2781
6.74k
    entryblock->b_visited = 1;
2782
2783
6.74k
    #define PUSH_REF(instr, local)                \
2784
277k
        do {                                      \
2785
277k
            if (ref_stack_push(&refs, (ref){(instr), (local)}) < 0) { \
2786
0
                status = ERROR;                   \
2787
0
                goto done;                        \
2788
0
            }                                     \
2789
277k
        } while(0)
2790
2791
40.1k
    while (sp != blocks) {
2792
33.4k
        basicblock *block = *--sp;
2793
33.4k
        assert(block->b_startdepth > -1);
2794
2795
        // Reset per-block state.
2796
33.4k
        memset(instr_flags, 0, block->b_iused * sizeof(*instr_flags));
2797
2798
        // Reset the stack of refs. We don't track references on the stack
2799
        // across basic blocks, but the bytecode will expect their
2800
        // presence. Add dummy references as necessary.
2801
33.4k
        ref_stack_clear(&refs);
2802
64.7k
        for (int i = 0; i < block->b_startdepth; i++) {
2803
31.3k
            PUSH_REF(DUMMY_INSTR, NOT_LOCAL);
2804
31.3k
        }
2805
2806
349k
        for (int i = 0; i < block->b_iused; i++) {
2807
315k
            cfg_instr *instr = &block->b_instr[i];
2808
315k
            int opcode = instr->i_opcode;
2809
315k
            int oparg = instr->i_oparg;
2810
315k
            assert(opcode != EXTENDED_ARG);
2811
315k
            switch (opcode) {
2812
                // Opcodes that load and store locals
2813
2
                case DELETE_FAST: {
2814
2
                    kill_local(instr_flags, &refs, oparg);
2815
2
                    break;
2816
0
                }
2817
2818
36.7k
                case LOAD_FAST: {
2819
36.7k
                    PUSH_REF(i, oparg);
2820
36.7k
                    break;
2821
36.7k
                }
2822
2823
36.7k
                case LOAD_FAST_AND_CLEAR: {
2824
292
                    kill_local(instr_flags, &refs, oparg);
2825
292
                    PUSH_REF(i, oparg);
2826
292
                    break;
2827
292
                }
2828
2829
5.45k
                case LOAD_FAST_LOAD_FAST: {
2830
5.45k
                    PUSH_REF(i, oparg >> 4);
2831
5.45k
                    PUSH_REF(i, oparg & 15);
2832
5.45k
                    break;
2833
5.45k
                }
2834
2835
10.0k
                case STORE_FAST: {
2836
10.0k
                    ref r = ref_stack_pop(&refs);
2837
10.0k
                    store_local(instr_flags, &refs, oparg, r);
2838
10.0k
                    break;
2839
5.45k
                }
2840
2841
207
                case STORE_FAST_LOAD_FAST: {
2842
                    // STORE_FAST
2843
207
                    ref r = ref_stack_pop(&refs);
2844
207
                    store_local(instr_flags, &refs, oparg >> 4, r);
2845
                    // LOAD_FAST
2846
207
                    PUSH_REF(i, oparg & 15);
2847
207
                    break;
2848
207
                }
2849
2850
1.11k
                case STORE_FAST_STORE_FAST: {
2851
                    // STORE_FAST
2852
1.11k
                    ref r = ref_stack_pop(&refs);
2853
1.11k
                    store_local(instr_flags, &refs, oparg >> 4, r);
2854
                    // STORE_FAST
2855
1.11k
                    r = ref_stack_pop(&refs);
2856
1.11k
                    store_local(instr_flags, &refs, oparg & 15, r);
2857
1.11k
                    break;
2858
207
                }
2859
2860
                // Opcodes that shuffle values on the stack
2861
1.26k
                case COPY: {
2862
1.26k
                    assert(oparg > 0);
2863
1.26k
                    Py_ssize_t idx = refs.size - oparg;
2864
1.26k
                    ref r = ref_stack_at(&refs, idx);
2865
1.26k
                    PUSH_REF(r.instr, r.local);
2866
1.26k
                    break;
2867
1.26k
                }
2868
2869
1.26k
                case SWAP: {
2870
1.08k
                    assert(oparg >= 2);
2871
1.08k
                    ref_stack_swap_top(&refs, oparg);
2872
1.08k
                    break;
2873
1.26k
                }
2874
2875
                // We treat opcodes that do not consume all of their inputs on
2876
                // a case by case basis, as we have no generic way of knowing
2877
                // how many inputs should be left on the stack.
2878
2879
                // Opcodes that consume no inputs
2880
1.27k
                case FORMAT_SIMPLE:
2881
1.27k
                case GET_ANEXT:
2882
2.75k
                case GET_ITER:
2883
2.75k
                case GET_LEN:
2884
2.78k
                case GET_YIELD_FROM_ITER:
2885
3.32k
                case IMPORT_FROM:
2886
3.32k
                case MATCH_KEYS:
2887
3.32k
                case MATCH_MAPPING:
2888
3.32k
                case MATCH_SEQUENCE:
2889
3.32k
                case WITH_EXCEPT_START: {
2890
3.32k
                    int num_popped = _PyOpcode_num_popped(opcode, oparg);
2891
3.32k
                    int num_pushed = _PyOpcode_num_pushed(opcode, oparg);
2892
3.32k
                    int net_pushed = num_pushed - num_popped;
2893
3.32k
                    assert(net_pushed >= 0);
2894
5.34k
                    for (int i = 0; i < net_pushed; i++) {
2895
2.01k
                        PUSH_REF(i, NOT_LOCAL);
2896
2.01k
                    }
2897
3.32k
                    break;
2898
3.32k
                }
2899
2900
                // Opcodes that consume some inputs and push no new values
2901
3.32k
                case DICT_MERGE:
2902
790
                case DICT_UPDATE:
2903
1.32k
                case LIST_APPEND:
2904
1.53k
                case LIST_EXTEND:
2905
12.9k
                case MAP_ADD:
2906
12.9k
                case RERAISE:
2907
13.1k
                case SET_ADD:
2908
13.1k
                case SET_UPDATE: {
2909
13.1k
                    int num_popped = _PyOpcode_num_popped(opcode, oparg);
2910
13.1k
                    int num_pushed = _PyOpcode_num_pushed(opcode, oparg);
2911
13.1k
                    int net_popped = num_popped - num_pushed;
2912
13.1k
                    assert(net_popped > 0);
2913
37.6k
                    for (int i = 0; i < net_popped; i++) {
2914
24.5k
                        ref_stack_pop(&refs);
2915
24.5k
                    }
2916
13.1k
                    break;
2917
13.1k
                }
2918
2919
45
                case END_SEND:
2920
1.80k
                case SET_FUNCTION_ATTRIBUTE: {
2921
1.80k
                    assert(_PyOpcode_num_popped(opcode, oparg) == 2);
2922
1.80k
                    assert(_PyOpcode_num_pushed(opcode, oparg) == 1);
2923
1.80k
                    ref tos = ref_stack_pop(&refs);
2924
1.80k
                    ref_stack_pop(&refs);
2925
1.80k
                    PUSH_REF(tos.instr, tos.local);
2926
1.80k
                    break;
2927
1.80k
                }
2928
2929
                // Opcodes that consume some inputs and push new values
2930
1.80k
                case CHECK_EXC_MATCH: {
2931
0
                    ref_stack_pop(&refs);
2932
0
                    PUSH_REF(i, NOT_LOCAL);
2933
0
                    break;
2934
0
                }
2935
2936
1.53k
                case FOR_ITER: {
2937
1.53k
                    load_fast_push_block(&sp, instr->i_target, refs.size + 1);
2938
1.53k
                    PUSH_REF(i, NOT_LOCAL);
2939
1.53k
                    break;
2940
1.53k
                }
2941
2942
19.4k
                case LOAD_ATTR:
2943
19.6k
                case LOAD_SUPER_ATTR: {
2944
19.6k
                    ref self = ref_stack_pop(&refs);
2945
19.6k
                    if (opcode == LOAD_SUPER_ATTR) {
2946
212
                        ref_stack_pop(&refs);
2947
212
                        ref_stack_pop(&refs);
2948
212
                    }
2949
19.6k
                    PUSH_REF(i, NOT_LOCAL);
2950
19.6k
                    if (oparg & 1) {
2951
                        // A method call; conservatively assume that self is pushed
2952
                        // back onto the stack
2953
7.69k
                        PUSH_REF(self.instr, self.local);
2954
7.69k
                    }
2955
19.6k
                    break;
2956
19.6k
                }
2957
2958
19.6k
                case LOAD_SPECIAL:
2959
188
                case PUSH_EXC_INFO: {
2960
188
                    ref tos = ref_stack_pop(&refs);
2961
188
                    PUSH_REF(i, NOT_LOCAL);
2962
188
                    PUSH_REF(tos.instr, tos.local);
2963
188
                    break;
2964
188
                }
2965
2966
188
                case SEND: {
2967
45
                    load_fast_push_block(&sp, instr->i_target, refs.size);
2968
45
                    ref_stack_pop(&refs);
2969
45
                    PUSH_REF(i, NOT_LOCAL);
2970
45
                    break;
2971
45
                }
2972
2973
                // Opcodes that consume all of their inputs
2974
219k
                default: {
2975
219k
                    int num_popped = _PyOpcode_num_popped(opcode, oparg);
2976
219k
                    int num_pushed = _PyOpcode_num_pushed(opcode, oparg);
2977
219k
                    if (HAS_TARGET(instr->i_opcode)) {
2978
14.9k
                        load_fast_push_block(&sp, instr->i_target, refs.size - num_popped + num_pushed);
2979
14.9k
                    }
2980
219k
                    if (!IS_BLOCK_PUSH_OPCODE(instr->i_opcode)) {
2981
                        // Block push opcodes only affect the stack when jumping
2982
                        // to the target.
2983
398k
                        for (int j = 0; j < num_popped; j++) {
2984
178k
                            ref_stack_pop(&refs);
2985
178k
                        }
2986
383k
                        for (int j = 0; j < num_pushed; j++) {
2987
163k
                            PUSH_REF(i, NOT_LOCAL);
2988
163k
                        }
2989
219k
                    }
2990
219k
                    break;
2991
219k
                }
2992
315k
            }
2993
315k
        }
2994
2995
        // Push fallthrough block
2996
33.4k
        if (BB_HAS_FALLTHROUGH(block)) {
2997
18.3k
            assert(block->b_next != NULL);
2998
18.3k
            load_fast_push_block(&sp, block->b_next, refs.size);
2999
18.3k
        }
3000
3001
        // Mark instructions that produce values that are on the stack at the
3002
        // end of the basic block
3003
71.5k
        for (Py_ssize_t i = 0; i < refs.size; i++) {
3004
38.0k
            ref r = ref_stack_at(&refs, i);
3005
38.0k
            if (r.instr != -1) {
3006
16.3k
                instr_flags[r.instr] |= REF_UNCONSUMED;
3007
16.3k
            }
3008
38.0k
        }
3009
3010
        // Optimize instructions
3011
349k
        for (int i = 0; i < block->b_iused; i++) {
3012
315k
            if (!instr_flags[i]) {
3013
290k
                cfg_instr *instr = &block->b_instr[i];
3014
290k
                switch (instr->i_opcode) {
3015
35.2k
                    case LOAD_FAST:
3016
35.2k
                        instr->i_opcode = LOAD_FAST_BORROW;
3017
35.2k
                        break;
3018
5.36k
                    case LOAD_FAST_LOAD_FAST:
3019
5.36k
                        instr->i_opcode = LOAD_FAST_BORROW_LOAD_FAST_BORROW;
3020
5.36k
                        break;
3021
249k
                    default:
3022
249k
                        break;
3023
290k
                }
3024
290k
            }
3025
315k
        }
3026
33.4k
    }
3027
3028
6.74k
    #undef PUSH_REF
3029
3030
6.74k
    status = SUCCESS;
3031
3032
6.74k
done:
3033
6.74k
    ref_stack_fini(&refs);
3034
6.74k
    PyMem_Free(instr_flags);
3035
6.74k
    PyMem_Free(blocks);
3036
6.74k
    return status;
3037
6.74k
}
3038
3039
// helper functions for add_checks_for_loads_of_unknown_variables
3040
static inline void
3041
maybe_push(basicblock *b, uint64_t unsafe_mask, basicblock ***sp)
3042
150k
{
3043
    // Push b if the unsafe mask is giving us any new information.
3044
    // To avoid overflowing the stack, only allow each block once.
3045
    // Use b->b_visited=1 to mean that b is currently on the stack.
3046
150k
    uint64_t both = b->b_unsafe_locals_mask | unsafe_mask;
3047
150k
    if (b->b_unsafe_locals_mask != both) {
3048
25.0k
        b->b_unsafe_locals_mask = both;
3049
        // More work left to do.
3050
25.0k
        if (!b->b_visited) {
3051
            // not on the stack, so push it.
3052
24.8k
            *(*sp)++ = b;
3053
24.8k
            b->b_visited = 1;
3054
24.8k
        }
3055
25.0k
    }
3056
150k
}
3057
3058
static void
3059
scan_block_for_locals(basicblock *b, basicblock ***sp)
3060
66.5k
{
3061
    // bit i is set if local i is potentially uninitialized
3062
66.5k
    uint64_t unsafe_mask = b->b_unsafe_locals_mask;
3063
461k
    for (int i = 0; i < b->b_iused; i++) {
3064
395k
        cfg_instr *instr = &b->b_instr[i];
3065
395k
        assert(instr->i_opcode != EXTENDED_ARG);
3066
395k
        if (instr->i_except != NULL) {
3067
77.6k
            maybe_push(instr->i_except, unsafe_mask, sp);
3068
77.6k
        }
3069
395k
        if (instr->i_oparg >= 64) {
3070
26.9k
            continue;
3071
26.9k
        }
3072
368k
        assert(instr->i_oparg >= 0);
3073
368k
        uint64_t bit = (uint64_t)1 << instr->i_oparg;
3074
368k
        switch (instr->i_opcode) {
3075
345
            case DELETE_FAST:
3076
939
            case LOAD_FAST_AND_CLEAR:
3077
2.12k
            case STORE_FAST_MAYBE_NULL:
3078
2.12k
                unsafe_mask |= bit;
3079
2.12k
                break;
3080
25.0k
            case STORE_FAST:
3081
25.0k
                unsafe_mask &= ~bit;
3082
25.0k
                break;
3083
111
            case LOAD_FAST_CHECK:
3084
                // If this doesn't raise, then the local is defined.
3085
111
                unsafe_mask &= ~bit;
3086
111
                break;
3087
81.6k
            case LOAD_FAST:
3088
81.6k
                if (unsafe_mask & bit) {
3089
111
                    instr->i_opcode = LOAD_FAST_CHECK;
3090
111
                }
3091
81.6k
                unsafe_mask &= ~bit;
3092
81.6k
                break;
3093
368k
        }
3094
368k
    }
3095
66.5k
    if (b->b_next && BB_HAS_FALLTHROUGH(b)) {
3096
35.4k
        maybe_push(b->b_next, unsafe_mask, sp);
3097
35.4k
    }
3098
66.5k
    cfg_instr *last = basicblock_last_instr(b);
3099
66.5k
    if (last && is_jump(last)) {
3100
32.2k
        assert(last->i_target != NULL);
3101
32.2k
        maybe_push(last->i_target, unsafe_mask, sp);
3102
32.2k
    }
3103
66.5k
}
3104
3105
static int
3106
fast_scan_many_locals(basicblock *entryblock, int nlocals)
3107
0
{
3108
0
    assert(nlocals > 64);
3109
0
    Py_ssize_t *states = PyMem_Calloc(nlocals - 64, sizeof(Py_ssize_t));
3110
0
    if (states == NULL) {
3111
0
        PyErr_NoMemory();
3112
0
        return ERROR;
3113
0
    }
3114
0
    Py_ssize_t blocknum = 0;
3115
    // state[i - 64] == blocknum if local i is guaranteed to
3116
    // be initialized, i.e., if it has had a previous LOAD_FAST or
3117
    // STORE_FAST within that basicblock (not followed by
3118
    // DELETE_FAST/LOAD_FAST_AND_CLEAR/STORE_FAST_MAYBE_NULL).
3119
0
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3120
0
        blocknum++;
3121
0
        for (int i = 0; i < b->b_iused; i++) {
3122
0
            cfg_instr *instr = &b->b_instr[i];
3123
0
            assert(instr->i_opcode != EXTENDED_ARG);
3124
0
            int arg = instr->i_oparg;
3125
0
            if (arg < 64) {
3126
0
                continue;
3127
0
            }
3128
0
            assert(arg >= 0);
3129
0
            switch (instr->i_opcode) {
3130
0
                case DELETE_FAST:
3131
0
                case LOAD_FAST_AND_CLEAR:
3132
0
                case STORE_FAST_MAYBE_NULL:
3133
0
                    states[arg - 64] = blocknum - 1;
3134
0
                    break;
3135
0
                case STORE_FAST:
3136
0
                    states[arg - 64] = blocknum;
3137
0
                    break;
3138
0
                case LOAD_FAST:
3139
0
                    if (states[arg - 64] != blocknum) {
3140
0
                        instr->i_opcode = LOAD_FAST_CHECK;
3141
0
                    }
3142
0
                    states[arg - 64] = blocknum;
3143
0
                    break;
3144
0
                    Py_UNREACHABLE();
3145
0
            }
3146
0
        }
3147
0
    }
3148
0
    PyMem_Free(states);
3149
0
    return SUCCESS;
3150
0
}
3151
3152
static int
3153
remove_unused_consts(basicblock *entryblock, PyObject *consts)
3154
6.74k
{
3155
6.74k
    assert(PyList_CheckExact(consts));
3156
6.74k
    Py_ssize_t nconsts = PyList_GET_SIZE(consts);
3157
6.74k
    if (nconsts == 0) {
3158
0
        return SUCCESS;  /* nothing to do */
3159
0
    }
3160
3161
6.74k
    Py_ssize_t *index_map = NULL;
3162
6.74k
    Py_ssize_t *reverse_index_map = NULL;
3163
6.74k
    int err = ERROR;
3164
3165
6.74k
    index_map = PyMem_Malloc(nconsts * sizeof(Py_ssize_t));
3166
6.74k
    if (index_map == NULL) {
3167
0
        goto end;
3168
0
    }
3169
51.7k
    for (Py_ssize_t i = 1; i < nconsts; i++) {
3170
44.9k
        index_map[i] = -1;
3171
44.9k
    }
3172
    // The first constant may be docstring; keep it always.
3173
6.74k
    index_map[0] = 0;
3174
3175
    /* mark used consts */
3176
51.8k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3177
376k
        for (int i = 0; i < b->b_iused; i++) {
3178
331k
            int opcode = b->b_instr[i].i_opcode;
3179
331k
            if (OPCODE_HAS_CONST(opcode)) {
3180
45.0k
                int index = b->b_instr[i].i_oparg;
3181
45.0k
                index_map[index] = index;
3182
45.0k
            }
3183
331k
        }
3184
45.0k
    }
3185
    /* now index_map[i] == i if consts[i] is used, -1 otherwise */
3186
    /* condense consts */
3187
6.74k
    Py_ssize_t n_used_consts = 0;
3188
58.4k
    for (Py_ssize_t i = 0; i < nconsts; i++) {
3189
51.7k
        if (index_map[i] != -1) {
3190
38.6k
            assert(index_map[i] == i);
3191
38.6k
            index_map[n_used_consts++] = index_map[i];
3192
38.6k
        }
3193
51.7k
    }
3194
6.74k
    if (n_used_consts == nconsts) {
3195
        /* nothing to do */
3196
2.75k
        err = SUCCESS;
3197
2.75k
        goto end;
3198
2.75k
    }
3199
3200
    /* move all used consts to the beginning of the consts list */
3201
3.99k
    assert(n_used_consts < nconsts);
3202
34.1k
    for (Py_ssize_t i = 0; i < n_used_consts; i++) {
3203
30.1k
        Py_ssize_t old_index = index_map[i];
3204
30.1k
        assert(i <= old_index && old_index < nconsts);
3205
30.1k
        if (i != old_index) {
3206
22.3k
            PyObject *value = PyList_GET_ITEM(consts, index_map[i]);
3207
22.3k
            assert(value != NULL);
3208
22.3k
            PyList_SetItem(consts, i, Py_NewRef(value));
3209
22.3k
        }
3210
30.1k
    }
3211
3212
    /* truncate the consts list at its new size */
3213
3.99k
    if (PyList_SetSlice(consts, n_used_consts, nconsts, NULL) < 0) {
3214
0
        goto end;
3215
0
    }
3216
    /* adjust const indices in the bytecode */
3217
3.99k
    reverse_index_map = PyMem_Malloc(nconsts * sizeof(Py_ssize_t));
3218
3.99k
    if (reverse_index_map == NULL) {
3219
0
        goto end;
3220
0
    }
3221
47.1k
    for (Py_ssize_t i = 0; i < nconsts; i++) {
3222
43.1k
        reverse_index_map[i] = -1;
3223
43.1k
    }
3224
34.1k
    for (Py_ssize_t i = 0; i < n_used_consts; i++) {
3225
30.1k
        assert(index_map[i] != -1);
3226
30.1k
        assert(reverse_index_map[index_map[i]] == -1);
3227
30.1k
        reverse_index_map[index_map[i]] = i;
3228
30.1k
    }
3229
3230
36.3k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3231
288k
        for (int i = 0; i < b->b_iused; i++) {
3232
256k
            int opcode = b->b_instr[i].i_opcode;
3233
256k
            if (OPCODE_HAS_CONST(opcode)) {
3234
35.8k
                int index = b->b_instr[i].i_oparg;
3235
35.8k
                assert(reverse_index_map[index] >= 0);
3236
35.8k
                assert(reverse_index_map[index] < n_used_consts);
3237
35.8k
                b->b_instr[i].i_oparg = (int)reverse_index_map[index];
3238
35.8k
            }
3239
256k
        }
3240
32.3k
    }
3241
3242
3.99k
    err = SUCCESS;
3243
6.74k
end:
3244
6.74k
    PyMem_Free(index_map);
3245
6.74k
    PyMem_Free(reverse_index_map);
3246
6.74k
    return err;
3247
3.99k
}
3248
3249
3250
3251
static int
3252
add_checks_for_loads_of_uninitialized_variables(basicblock *entryblock,
3253
                                                int nlocals,
3254
                                                int nparams)
3255
6.74k
{
3256
6.74k
    if (nlocals == 0) {
3257
1.65k
        return SUCCESS;
3258
1.65k
    }
3259
5.08k
    if (nlocals > 64) {
3260
        // To avoid O(nlocals**2) compilation, locals beyond the first
3261
        // 64 are only analyzed one basicblock at a time: initialization
3262
        // info is not passed between basicblocks.
3263
0
        if (fast_scan_many_locals(entryblock, nlocals) < 0) {
3264
0
            return ERROR;
3265
0
        }
3266
0
        nlocals = 64;
3267
0
    }
3268
5.08k
    basicblock **stack = make_cfg_traversal_stack(entryblock);
3269
5.08k
    if (stack == NULL) {
3270
0
        return ERROR;
3271
0
    }
3272
5.08k
    basicblock **sp = stack;
3273
3274
    // First origin of being uninitialized:
3275
    // The non-parameter locals in the entry block.
3276
5.08k
    uint64_t start_mask = 0;
3277
12.7k
    for (int i = nparams; i < nlocals; i++) {
3278
7.70k
        start_mask |= (uint64_t)1 << i;
3279
7.70k
    }
3280
5.08k
    maybe_push(entryblock, start_mask, &sp);
3281
3282
    // Second origin of being uninitialized:
3283
    // There could be DELETE_FAST somewhere, so
3284
    // be sure to scan each basicblock at least once.
3285
46.7k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3286
41.6k
        scan_block_for_locals(b, &sp);
3287
41.6k
    }
3288
    // Now propagate the uncertainty from the origins we found: Use
3289
    // LOAD_FAST_CHECK for any LOAD_FAST where the local could be undefined.
3290
29.9k
    while (sp > stack) {
3291
24.8k
        basicblock *b = *--sp;
3292
        // mark as no longer on stack
3293
24.8k
        b->b_visited = 0;
3294
24.8k
        scan_block_for_locals(b, &sp);
3295
24.8k
    }
3296
5.08k
    PyMem_Free(stack);
3297
5.08k
    return SUCCESS;
3298
5.08k
}
3299
3300
3301
static int
3302
6.00k
mark_warm(basicblock *entryblock) {
3303
6.00k
    basicblock **stack = make_cfg_traversal_stack(entryblock);
3304
6.00k
    if (stack == NULL) {
3305
0
        return ERROR;
3306
0
    }
3307
6.00k
    basicblock **sp = stack;
3308
3309
6.00k
    *sp++ = entryblock;
3310
6.00k
    entryblock->b_visited = 1;
3311
37.7k
    while (sp > stack) {
3312
31.7k
        basicblock *b = *(--sp);
3313
31.7k
        assert(!b->b_except_handler);
3314
31.7k
        b->b_warm = 1;
3315
31.7k
        basicblock *next = b->b_next;
3316
31.7k
        if (next && BB_HAS_FALLTHROUGH(b) && !next->b_visited) {
3317
15.1k
            *sp++ = next;
3318
15.1k
            next->b_visited = 1;
3319
15.1k
        }
3320
281k
        for (int i=0; i < b->b_iused; i++) {
3321
250k
            cfg_instr *instr = &b->b_instr[i];
3322
250k
            if (is_jump(instr) && !instr->i_target->b_visited) {
3323
10.5k
                *sp++ = instr->i_target;
3324
10.5k
                instr->i_target->b_visited = 1;
3325
10.5k
            }
3326
250k
        }
3327
31.7k
    }
3328
6.00k
    PyMem_Free(stack);
3329
6.00k
    return SUCCESS;
3330
6.00k
}
3331
3332
static int
3333
6.00k
mark_cold(basicblock *entryblock) {
3334
50.3k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3335
44.3k
        assert(!b->b_cold && !b->b_warm);
3336
44.3k
    }
3337
6.00k
    if (mark_warm(entryblock) < 0) {
3338
0
        return ERROR;
3339
0
    }
3340
3341
6.00k
    basicblock **stack = make_cfg_traversal_stack(entryblock);
3342
6.00k
    if (stack == NULL) {
3343
0
        return ERROR;
3344
0
    }
3345
3346
6.00k
    basicblock **sp = stack;
3347
50.3k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3348
44.3k
        if (b->b_except_handler) {
3349
2.59k
            assert(!b->b_warm);
3350
2.59k
            *sp++ = b;
3351
2.59k
            b->b_visited = 1;
3352
2.59k
        }
3353
44.3k
    }
3354
3355
11.0k
    while (sp > stack) {
3356
5.09k
        basicblock *b = *(--sp);
3357
5.09k
        b->b_cold = 1;
3358
5.09k
        basicblock *next = b->b_next;
3359
5.09k
        if (next && BB_HAS_FALLTHROUGH(b)) {
3360
1.39k
            if (!next->b_warm && !next->b_visited) {
3361
1.29k
                *sp++ = next;
3362
1.29k
                next->b_visited = 1;
3363
1.29k
            }
3364
1.39k
        }
3365
27.3k
        for (int i = 0; i < b->b_iused; i++) {
3366
22.2k
            cfg_instr *instr = &b->b_instr[i];
3367
22.2k
            if (is_jump(instr)) {
3368
1.74k
                assert(i == b->b_iused - 1);
3369
1.74k
                basicblock *target = b->b_instr[i].i_target;
3370
1.74k
                if (!target->b_warm && !target->b_visited) {
3371
1.20k
                    *sp++ = target;
3372
1.20k
                    target->b_visited = 1;
3373
1.20k
                }
3374
1.74k
            }
3375
22.2k
        }
3376
5.09k
    }
3377
6.00k
    PyMem_Free(stack);
3378
6.00k
    return SUCCESS;
3379
6.00k
}
3380
3381
3382
static int
3383
6.74k
push_cold_blocks_to_end(cfg_builder *g) {
3384
6.74k
    basicblock *entryblock = g->g_entryblock;
3385
6.74k
    if (entryblock->b_next == NULL) {
3386
        /* single basicblock, no need to reorder */
3387
741
        return SUCCESS;
3388
741
    }
3389
6.00k
    RETURN_IF_ERROR(mark_cold(entryblock));
3390
3391
6.00k
    int next_lbl = get_max_label(g->g_entryblock) + 1;
3392
3393
    /* If we have a cold block with fallthrough to a warm block, add */
3394
    /* an explicit jump instead of fallthrough */
3395
50.3k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3396
44.3k
        if (b->b_cold && BB_HAS_FALLTHROUGH(b) && b->b_next && b->b_next->b_warm) {
3397
45
            basicblock *explicit_jump = cfg_builder_new_block(g);
3398
45
            if (explicit_jump == NULL) {
3399
0
                return ERROR;
3400
0
            }
3401
45
            if (!IS_LABEL(b->b_next->b_label)) {
3402
0
                b->b_next->b_label.id = next_lbl++;
3403
0
            }
3404
45
            basicblock_addop(explicit_jump, JUMP_NO_INTERRUPT, b->b_next->b_label.id,
3405
45
                             NO_LOCATION);
3406
45
            explicit_jump->b_cold = 1;
3407
45
            explicit_jump->b_next = b->b_next;
3408
45
            explicit_jump->b_predecessors = 1;
3409
45
            b->b_next = explicit_jump;
3410
3411
            /* set target */
3412
45
            cfg_instr *last = basicblock_last_instr(explicit_jump);
3413
45
            last->i_target = explicit_jump->b_next;
3414
45
        }
3415
44.3k
    }
3416
3417
6.00k
    assert(!entryblock->b_cold);  /* First block can't be cold */
3418
6.00k
    basicblock *cold_blocks = NULL;
3419
6.00k
    basicblock *cold_blocks_tail = NULL;
3420
3421
6.00k
    basicblock *b = entryblock;
3422
7.97k
    while(b->b_next) {
3423
7.97k
        assert(!b->b_cold);
3424
41.2k
        while (b->b_next && !b->b_next->b_cold) {
3425
33.2k
            b = b->b_next;
3426
33.2k
        }
3427
7.97k
        if (b->b_next == NULL) {
3428
            /* no more cold blocks */
3429
6.00k
            break;
3430
6.00k
        }
3431
3432
        /* b->b_next is the beginning of a cold streak */
3433
1.97k
        assert(!b->b_cold && b->b_next->b_cold);
3434
3435
1.97k
        basicblock *b_end = b->b_next;
3436
5.13k
        while (b_end->b_next && b_end->b_next->b_cold) {
3437
3.16k
            b_end = b_end->b_next;
3438
3.16k
        }
3439
3440
        /* b_end is the end of the cold streak */
3441
1.97k
        assert(b_end && b_end->b_cold);
3442
1.97k
        assert(b_end->b_next == NULL || !b_end->b_next->b_cold);
3443
3444
1.97k
        if (cold_blocks == NULL) {
3445
1.05k
            cold_blocks = b->b_next;
3446
1.05k
        }
3447
917
        else {
3448
917
            cold_blocks_tail->b_next = b->b_next;
3449
917
        }
3450
1.97k
        cold_blocks_tail = b_end;
3451
1.97k
        b->b_next = b_end->b_next;
3452
1.97k
        b_end->b_next = NULL;
3453
1.97k
    }
3454
6.00k
    assert(b != NULL && b->b_next == NULL);
3455
6.00k
    b->b_next = cold_blocks;
3456
3457
6.00k
    if (cold_blocks != NULL) {
3458
1.05k
        RETURN_IF_ERROR(remove_redundant_nops_and_jumps(g));
3459
1.05k
    }
3460
6.00k
    return SUCCESS;
3461
6.00k
}
3462
3463
static int
3464
convert_pseudo_conditional_jumps(cfg_builder *g)
3465
6.74k
{
3466
6.74k
    basicblock *entryblock = g->g_entryblock;
3467
51.8k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3468
369k
        for (int i = 0; i < b->b_iused; i++) {
3469
324k
            cfg_instr *instr = &b->b_instr[i];
3470
324k
            if (instr->i_opcode == JUMP_IF_FALSE || instr->i_opcode == JUMP_IF_TRUE) {
3471
548
                assert(i == b->b_iused - 1);
3472
548
                instr->i_opcode = instr->i_opcode == JUMP_IF_FALSE ?
3473
280
                                          POP_JUMP_IF_FALSE : POP_JUMP_IF_TRUE;
3474
548
                location loc = instr->i_loc;
3475
548
                basicblock *except = instr->i_except;
3476
548
                cfg_instr copy = {
3477
548
                            .i_opcode = COPY,
3478
548
                            .i_oparg = 1,
3479
548
                            .i_loc = loc,
3480
548
                            .i_target = NULL,
3481
548
                            .i_except = except,
3482
548
                };
3483
548
                RETURN_IF_ERROR(basicblock_insert_instruction(b, i++, &copy));
3484
548
                cfg_instr to_bool = {
3485
548
                            .i_opcode = TO_BOOL,
3486
548
                            .i_oparg = 0,
3487
548
                            .i_loc = loc,
3488
548
                            .i_target = NULL,
3489
548
                            .i_except = except,
3490
548
                };
3491
548
                RETURN_IF_ERROR(basicblock_insert_instruction(b, i++, &to_bool));
3492
548
            }
3493
324k
        }
3494
45.1k
    }
3495
6.74k
    return SUCCESS;
3496
6.74k
}
3497
3498
static int
3499
convert_pseudo_ops(cfg_builder *g)
3500
6.74k
{
3501
6.74k
    basicblock *entryblock = g->g_entryblock;
3502
51.8k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3503
372k
        for (int i = 0; i < b->b_iused; i++) {
3504
327k
            cfg_instr *instr = &b->b_instr[i];
3505
327k
            if (is_block_push(instr)) {
3506
2.59k
                INSTR_SET_OP0(instr, NOP);
3507
2.59k
            }
3508
325k
            else if (instr->i_opcode == LOAD_CLOSURE) {
3509
1.65k
                assert(is_pseudo_target(LOAD_CLOSURE, LOAD_FAST));
3510
1.65k
                instr->i_opcode = LOAD_FAST;
3511
1.65k
            }
3512
323k
            else if (instr->i_opcode == STORE_FAST_MAYBE_NULL) {
3513
594
                assert(is_pseudo_target(STORE_FAST_MAYBE_NULL, STORE_FAST));
3514
594
                instr->i_opcode = STORE_FAST;
3515
594
            }
3516
327k
        }
3517
45.1k
    }
3518
6.74k
    return remove_redundant_nops_and_jumps(g);
3519
6.74k
}
3520
3521
static inline bool
3522
74.8k
is_exit_or_eval_check_without_lineno(basicblock *b) {
3523
74.8k
    if (basicblock_exits_scope(b) || basicblock_has_eval_break(b)) {
3524
41.6k
        return basicblock_has_no_lineno(b);
3525
41.6k
    }
3526
33.2k
    else {
3527
33.2k
        return false;
3528
33.2k
    }
3529
74.8k
}
3530
3531
3532
/* PEP 626 mandates that the f_lineno of a frame is correct
3533
 * after a frame terminates. It would be prohibitively expensive
3534
 * to continuously update the f_lineno field at runtime,
3535
 * so we make sure that all exiting instruction (raises and returns)
3536
 * have a valid line number, allowing us to compute f_lineno lazily.
3537
 * We can do this by duplicating the exit blocks without line number
3538
 * so that none have more than one predecessor. We can then safely
3539
 * copy the line number from the sole predecessor block.
3540
 */
3541
static int
3542
duplicate_exits_without_lineno(cfg_builder *g)
3543
13.4k
{
3544
13.4k
    int next_lbl = get_max_label(g->g_entryblock) + 1;
3545
3546
    /* Copy all exit blocks without line number that are targets of a jump.
3547
     */
3548
13.4k
    basicblock *entryblock = g->g_entryblock;
3549
103k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3550
90.1k
        cfg_instr *last = basicblock_last_instr(b);
3551
90.1k
        if (last == NULL) {
3552
14.4k
            continue;
3553
14.4k
        }
3554
75.7k
        if (is_jump(last)) {
3555
36.5k
            basicblock *target = next_nonempty_block(last->i_target);
3556
36.5k
            if (is_exit_or_eval_check_without_lineno(target) && target->b_predecessors > 1) {
3557
614
                basicblock *new_target = copy_basicblock(g, target);
3558
614
                if (new_target == NULL) {
3559
0
                    return ERROR;
3560
0
                }
3561
614
                new_target->b_instr[0].i_loc = last->i_loc;
3562
614
                last->i_target = new_target;
3563
614
                target->b_predecessors--;
3564
614
                new_target->b_predecessors = 1;
3565
614
                new_target->b_next = target->b_next;
3566
614
                new_target->b_label.id = next_lbl++;
3567
614
                target->b_next = new_target;
3568
614
            }
3569
36.5k
        }
3570
75.7k
    }
3571
3572
    /* Any remaining reachable exit blocks without line number can only be reached by
3573
     * fall through, and thus can only have a single predecessor */
3574
103k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3575
90.1k
        if (BB_HAS_FALLTHROUGH(b) && b->b_next && b->b_iused > 0) {
3576
38.3k
            if (is_exit_or_eval_check_without_lineno(b->b_next)) {
3577
772
                cfg_instr *last = basicblock_last_instr(b);
3578
772
                assert(last != NULL);
3579
772
                b->b_next->b_instr[0].i_loc = last->i_loc;
3580
772
            }
3581
38.3k
        }
3582
90.1k
    }
3583
13.4k
    return SUCCESS;
3584
13.4k
}
3585
3586
3587
/* If an instruction has no line number, but it's predecessor in the BB does,
3588
 * then copy the line number. If a successor block has no line number, and only
3589
 * one predecessor, then inherit the line number.
3590
 * This ensures that all exit blocks (with one predecessor) receive a line number.
3591
 * Also reduces the size of the line number table,
3592
 * but has no impact on the generated line number events.
3593
 */
3594
static void
3595
13.4k
propagate_line_numbers(basicblock *entryblock) {
3596
103k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3597
90.1k
        cfg_instr *last = basicblock_last_instr(b);
3598
90.1k
        if (last == NULL) {
3599
14.4k
            continue;
3600
14.4k
        }
3601
3602
75.7k
        location prev_location = NO_LOCATION;
3603
760k
        for (int i = 0; i < b->b_iused; i++) {
3604
684k
            if (b->b_instr[i].i_loc.lineno == NO_LOCATION.lineno) {
3605
38.0k
                b->b_instr[i].i_loc = prev_location;
3606
38.0k
            }
3607
646k
            else {
3608
646k
                prev_location = b->b_instr[i].i_loc;
3609
646k
            }
3610
684k
        }
3611
75.7k
        if (BB_HAS_FALLTHROUGH(b) && b->b_next->b_predecessors == 1) {
3612
27.5k
            if (b->b_next->b_iused > 0) {
3613
27.5k
                if (b->b_next->b_instr[0].i_loc.lineno == NO_LOCATION.lineno) {
3614
710
                    b->b_next->b_instr[0].i_loc = prev_location;
3615
710
                }
3616
27.5k
            }
3617
27.5k
        }
3618
75.7k
        if (is_jump(last)) {
3619
36.5k
            basicblock *target = last->i_target;
3620
36.5k
            if (target->b_predecessors == 1) {
3621
16.8k
                if (target->b_instr[0].i_loc.lineno == NO_LOCATION.lineno) {
3622
2.88k
                    target->b_instr[0].i_loc = prev_location;
3623
2.88k
                }
3624
16.8k
            }
3625
36.5k
        }
3626
75.7k
    }
3627
13.4k
}
3628
3629
static int
3630
resolve_line_numbers(cfg_builder *g, int firstlineno)
3631
13.4k
{
3632
13.4k
    RETURN_IF_ERROR(duplicate_exits_without_lineno(g));
3633
13.4k
    propagate_line_numbers(g->g_entryblock);
3634
13.4k
    return SUCCESS;
3635
13.4k
}
3636
3637
int
3638
_PyCfg_OptimizeCodeUnit(cfg_builder *g, PyObject *consts, PyObject *const_cache,
3639
                        int nlocals, int nparams, int firstlineno)
3640
6.74k
{
3641
6.74k
    assert(cfg_builder_check(g));
3642
    /** Preprocessing **/
3643
    /* Map labels to targets and mark exception handlers */
3644
6.74k
    RETURN_IF_ERROR(translate_jump_labels_to_targets(g->g_entryblock));
3645
6.74k
    RETURN_IF_ERROR(mark_except_handlers(g->g_entryblock));
3646
6.74k
    RETURN_IF_ERROR(label_exception_targets(g->g_entryblock));
3647
3648
    /** Optimization **/
3649
6.74k
    RETURN_IF_ERROR(optimize_cfg(g, consts, const_cache, firstlineno));
3650
6.74k
    RETURN_IF_ERROR(remove_unused_consts(g->g_entryblock, consts));
3651
6.74k
    RETURN_IF_ERROR(
3652
6.74k
        add_checks_for_loads_of_uninitialized_variables(
3653
6.74k
            g->g_entryblock, nlocals, nparams));
3654
6.74k
    RETURN_IF_ERROR(insert_superinstructions(g));
3655
3656
6.74k
    RETURN_IF_ERROR(push_cold_blocks_to_end(g));
3657
6.74k
    RETURN_IF_ERROR(resolve_line_numbers(g, firstlineno));
3658
    // temporarily remove assert. See https://github.com/python/cpython/issues/125845
3659
    // assert(all_exits_have_lineno(g->g_entryblock));
3660
6.74k
    return SUCCESS;
3661
6.74k
}
3662
3663
static int *
3664
build_cellfixedoffsets(_PyCompile_CodeUnitMetadata *umd)
3665
6.74k
{
3666
6.74k
    int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames);
3667
6.74k
    int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars);
3668
6.74k
    int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars);
3669
3670
6.74k
    int noffsets = ncellvars + nfreevars;
3671
6.74k
    int *fixed = PyMem_New(int, noffsets);
3672
6.74k
    if (fixed == NULL) {
3673
0
        PyErr_NoMemory();
3674
0
        return NULL;
3675
0
    }
3676
8.92k
    for (int i = 0; i < noffsets; i++) {
3677
2.17k
        fixed[i] = nlocals + i;
3678
2.17k
    }
3679
3680
6.74k
    PyObject *varname, *cellindex;
3681
6.74k
    Py_ssize_t pos = 0;
3682
7.99k
    while (PyDict_Next(umd->u_cellvars, &pos, &varname, &cellindex)) {
3683
1.25k
        PyObject *varindex;
3684
1.25k
        if (PyDict_GetItemRef(umd->u_varnames, varname, &varindex) < 0) {
3685
0
            goto error;
3686
0
        }
3687
1.25k
        if (varindex == NULL) {
3688
879
            continue;
3689
879
        }
3690
3691
373
        int argoffset = PyLong_AsInt(varindex);
3692
373
        Py_DECREF(varindex);
3693
373
        if (argoffset == -1 && PyErr_Occurred()) {
3694
0
            goto error;
3695
0
        }
3696
3697
373
        int oldindex = PyLong_AsInt(cellindex);
3698
373
        if (oldindex == -1 && PyErr_Occurred()) {
3699
0
            goto error;
3700
0
        }
3701
373
        fixed[oldindex] = argoffset;
3702
373
    }
3703
6.74k
    return fixed;
3704
3705
0
error:
3706
0
    PyMem_Free(fixed);
3707
0
    return NULL;
3708
6.74k
}
3709
3710
#define IS_GENERATOR(CF) \
3711
6.74k
    ((CF) & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR))
3712
3713
static int
3714
insert_prefix_instructions(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock,
3715
                           int *fixed, int nfreevars, int code_flags)
3716
6.74k
{
3717
6.74k
    assert(umd->u_firstlineno > 0);
3718
3719
    /* Add the generator prefix instructions. */
3720
6.74k
    if (IS_GENERATOR(code_flags)) {
3721
        /* Note that RETURN_GENERATOR + POP_TOP have a net stack effect
3722
         * of 0. This is because RETURN_GENERATOR pushes an element
3723
         * with _PyFrame_StackPush before switching stacks.
3724
         */
3725
3726
354
        location loc = LOCATION(umd->u_firstlineno, umd->u_firstlineno, -1, -1);
3727
354
        cfg_instr make_gen = {
3728
354
            .i_opcode = RETURN_GENERATOR,
3729
354
            .i_oparg = 0,
3730
354
            .i_loc = loc,
3731
354
            .i_target = NULL,
3732
354
            .i_except = NULL,
3733
354
        };
3734
354
        RETURN_IF_ERROR(basicblock_insert_instruction(entryblock, 0, &make_gen));
3735
354
        cfg_instr pop_top = {
3736
354
            .i_opcode = POP_TOP,
3737
354
            .i_oparg = 0,
3738
354
            .i_loc = loc,
3739
354
            .i_target = NULL,
3740
354
            .i_except = NULL,
3741
354
        };
3742
354
        RETURN_IF_ERROR(basicblock_insert_instruction(entryblock, 1, &pop_top));
3743
354
    }
3744
3745
    /* Set up cells for any variable that escapes, to be put in a closure. */
3746
6.74k
    const int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars);
3747
6.74k
    if (ncellvars) {
3748
        // umd->u_cellvars has the cells out of order so we sort them
3749
        // before adding the MAKE_CELL instructions.  Note that we
3750
        // adjust for arg cells, which come first.
3751
823
        const int nvars = ncellvars + (int)PyDict_GET_SIZE(umd->u_varnames);
3752
823
        int *sorted = PyMem_RawCalloc(nvars, sizeof(int));
3753
823
        if (sorted == NULL) {
3754
0
            PyErr_NoMemory();
3755
0
            return ERROR;
3756
0
        }
3757
2.07k
        for (int i = 0; i < ncellvars; i++) {
3758
1.25k
            sorted[fixed[i]] = i + 1;
3759
1.25k
        }
3760
2.65k
        for (int i = 0, ncellsused = 0; ncellsused < ncellvars; i++) {
3761
1.83k
            int oldindex = sorted[i] - 1;
3762
1.83k
            if (oldindex == -1) {
3763
580
                continue;
3764
580
            }
3765
1.25k
            cfg_instr make_cell = {
3766
1.25k
                .i_opcode = MAKE_CELL,
3767
                // This will get fixed in offset_derefs().
3768
1.25k
                .i_oparg = oldindex,
3769
1.25k
                .i_loc = NO_LOCATION,
3770
1.25k
                .i_target = NULL,
3771
1.25k
                .i_except = NULL,
3772
1.25k
            };
3773
1.25k
            if (basicblock_insert_instruction(entryblock, ncellsused, &make_cell) < 0) {
3774
0
                PyMem_RawFree(sorted);
3775
0
                return ERROR;
3776
0
            }
3777
1.25k
            ncellsused += 1;
3778
1.25k
        }
3779
823
        PyMem_RawFree(sorted);
3780
823
    }
3781
3782
6.74k
    if (nfreevars) {
3783
512
        cfg_instr copy_frees = {
3784
512
            .i_opcode = COPY_FREE_VARS,
3785
512
            .i_oparg = nfreevars,
3786
512
            .i_loc = NO_LOCATION,
3787
512
            .i_target = NULL,
3788
512
            .i_except = NULL,
3789
512
        };
3790
512
        RETURN_IF_ERROR(basicblock_insert_instruction(entryblock, 0, &copy_frees));
3791
512
    }
3792
3793
6.74k
    return SUCCESS;
3794
6.74k
}
3795
3796
static int
3797
fix_cell_offsets(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock, int *fixedmap)
3798
6.74k
{
3799
6.74k
    int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames);
3800
6.74k
    int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars);
3801
6.74k
    int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars);
3802
6.74k
    int noffsets = ncellvars + nfreevars;
3803
3804
    // First deal with duplicates (arg cells).
3805
6.74k
    int numdropped = 0;
3806
8.92k
    for (int i = 0; i < noffsets ; i++) {
3807
2.17k
        if (fixedmap[i] == i + nlocals) {
3808
1.80k
            fixedmap[i] -= numdropped;
3809
1.80k
        }
3810
373
        else {
3811
            // It was a duplicate (cell/arg).
3812
373
            numdropped += 1;
3813
373
        }
3814
2.17k
    }
3815
3816
    // Then update offsets, either relative to locals or by cell2arg.
3817
51.8k
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
3818
372k
        for (int i = 0; i < b->b_iused; i++) {
3819
327k
            cfg_instr *inst = &b->b_instr[i];
3820
            // This is called before extended args are generated.
3821
327k
            assert(inst->i_opcode != EXTENDED_ARG);
3822
327k
            int oldoffset = inst->i_oparg;
3823
327k
            switch(inst->i_opcode) {
3824
1.25k
                case MAKE_CELL:
3825
2.90k
                case LOAD_CLOSURE:
3826
4.88k
                case LOAD_DEREF:
3827
5.79k
                case STORE_DEREF:
3828
5.79k
                case DELETE_DEREF:
3829
5.79k
                case LOAD_FROM_DICT_OR_DEREF:
3830
5.79k
                    assert(oldoffset >= 0);
3831
5.79k
                    assert(oldoffset < noffsets);
3832
5.79k
                    assert(fixedmap[oldoffset] >= 0);
3833
5.79k
                    inst->i_oparg = fixedmap[oldoffset];
3834
327k
            }
3835
327k
        }
3836
45.1k
    }
3837
3838
6.74k
    return numdropped;
3839
6.74k
}
3840
3841
static int
3842
prepare_localsplus(_PyCompile_CodeUnitMetadata *umd, cfg_builder *g, int code_flags)
3843
6.74k
{
3844
6.74k
    assert(PyDict_GET_SIZE(umd->u_varnames) < INT_MAX);
3845
6.74k
    assert(PyDict_GET_SIZE(umd->u_cellvars) < INT_MAX);
3846
6.74k
    assert(PyDict_GET_SIZE(umd->u_freevars) < INT_MAX);
3847
6.74k
    int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames);
3848
6.74k
    int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars);
3849
6.74k
    int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars);
3850
6.74k
    assert(INT_MAX - nlocals - ncellvars > 0);
3851
6.74k
    assert(INT_MAX - nlocals - ncellvars - nfreevars > 0);
3852
6.74k
    int nlocalsplus = nlocals + ncellvars + nfreevars;
3853
6.74k
    int* cellfixedoffsets = build_cellfixedoffsets(umd);
3854
6.74k
    if (cellfixedoffsets == NULL) {
3855
0
        return ERROR;
3856
0
    }
3857
3858
    // This must be called before fix_cell_offsets().
3859
6.74k
    if (insert_prefix_instructions(umd, g->g_entryblock, cellfixedoffsets, nfreevars, code_flags)) {
3860
0
        PyMem_Free(cellfixedoffsets);
3861
0
        return ERROR;
3862
0
    }
3863
3864
6.74k
    int numdropped = fix_cell_offsets(umd, g->g_entryblock, cellfixedoffsets);
3865
6.74k
    PyMem_Free(cellfixedoffsets);  // At this point we're done with it.
3866
6.74k
    cellfixedoffsets = NULL;
3867
6.74k
    if (numdropped < 0) {
3868
0
        return ERROR;
3869
0
    }
3870
3871
6.74k
    nlocalsplus -= numdropped;
3872
6.74k
    return nlocalsplus;
3873
6.74k
}
3874
3875
cfg_builder *
3876
_PyCfg_FromInstructionSequence(_PyInstructionSequence *seq)
3877
6.74k
{
3878
6.74k
    if (_PyInstructionSequence_ApplyLabelMap(seq) < 0) {
3879
0
        return NULL;
3880
0
    }
3881
6.74k
    cfg_builder *g = _PyCfgBuilder_New();
3882
6.74k
    if (g == NULL) {
3883
0
        return NULL;
3884
0
    }
3885
377k
    for (int i = 0; i < seq->s_used; i++) {
3886
371k
        seq->s_instrs[i].i_target = 0;
3887
371k
    }
3888
377k
    for (int i = 0; i < seq->s_used; i++) {
3889
371k
        _PyInstruction *instr = &seq->s_instrs[i];
3890
371k
        if (HAS_TARGET(instr->i_opcode)) {
3891
23.1k
            assert(instr->i_oparg >= 0 && instr->i_oparg < seq->s_used);
3892
23.1k
            seq->s_instrs[instr->i_oparg].i_target = 1;
3893
23.1k
        }
3894
371k
    }
3895
6.74k
    int offset = 0;
3896
377k
    for (int i = 0; i < seq->s_used; i++) {
3897
371k
        _PyInstruction *instr = &seq->s_instrs[i];
3898
371k
        if (instr->i_opcode == ANNOTATIONS_PLACEHOLDER) {
3899
363
            if (seq->s_annotations_code != NULL) {
3900
1
                assert(seq->s_annotations_code->s_labelmap_size == 0
3901
1
                    && seq->s_annotations_code->s_nested == NULL);
3902
4
                for (int j = 0; j < seq->s_annotations_code->s_used; j++) {
3903
3
                    _PyInstruction *ann_instr = &seq->s_annotations_code->s_instrs[j];
3904
3
                    assert(!HAS_TARGET(ann_instr->i_opcode));
3905
3
                    if (_PyCfgBuilder_Addop(g, ann_instr->i_opcode, ann_instr->i_oparg, ann_instr->i_loc) < 0) {
3906
0
                        goto error;
3907
0
                    }
3908
3
                }
3909
1
                offset += seq->s_annotations_code->s_used - 1;
3910
1
            }
3911
362
            else {
3912
362
                offset -= 1;
3913
362
            }
3914
363
            continue;
3915
363
        }
3916
370k
        if (instr->i_target) {
3917
18.7k
            jump_target_label lbl_ = {i + offset};
3918
18.7k
            if (_PyCfgBuilder_UseLabel(g, lbl_) < 0) {
3919
0
                goto error;
3920
0
            }
3921
18.7k
        }
3922
370k
        int opcode = instr->i_opcode;
3923
370k
        int oparg = instr->i_oparg;
3924
370k
        if (HAS_TARGET(opcode)) {
3925
23.1k
            oparg += offset;
3926
23.1k
        }
3927
370k
        if (_PyCfgBuilder_Addop(g, opcode, oparg, instr->i_loc) < 0) {
3928
0
            goto error;
3929
0
        }
3930
370k
    }
3931
6.74k
    if (_PyCfgBuilder_CheckSize(g) < 0) {
3932
0
        goto error;
3933
0
    }
3934
6.74k
    return g;
3935
0
error:
3936
0
    _PyCfgBuilder_Free(g);
3937
0
    return NULL;
3938
6.74k
}
3939
3940
int
3941
_PyCfg_ToInstructionSequence(cfg_builder *g, _PyInstructionSequence *seq)
3942
6.74k
{
3943
6.74k
    int lbl = 0;
3944
52.6k
    for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
3945
45.8k
        b->b_label = (jump_target_label){lbl};
3946
45.8k
        lbl += 1;
3947
45.8k
    }
3948
52.6k
    for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
3949
45.8k
        RETURN_IF_ERROR(_PyInstructionSequence_UseLabel(seq, b->b_label.id));
3950
383k
        for (int i = 0; i < b->b_iused; i++) {
3951
338k
            cfg_instr *instr = &b->b_instr[i];
3952
338k
            if (HAS_TARGET(instr->i_opcode)) {
3953
                /* Set oparg to the label id (it will later be mapped to an offset) */
3954
18.3k
                instr->i_oparg = instr->i_target->b_label.id;
3955
18.3k
            }
3956
338k
            RETURN_IF_ERROR(
3957
338k
                _PyInstructionSequence_Addop(
3958
338k
                    seq, instr->i_opcode, instr->i_oparg, instr->i_loc));
3959
3960
338k
            _PyExceptHandlerInfo *hi = &seq->s_instrs[seq->s_used-1].i_except_handler_info;
3961
338k
            if (instr->i_except != NULL) {
3962
41.7k
                hi->h_label = instr->i_except->b_label.id;
3963
41.7k
                hi->h_startdepth = instr->i_except->b_startdepth;
3964
41.7k
                hi->h_preserve_lasti = instr->i_except->b_preserve_lasti;
3965
41.7k
            }
3966
296k
            else {
3967
296k
                hi->h_label = -1;
3968
296k
            }
3969
338k
        }
3970
45.8k
    }
3971
6.74k
    if (_PyInstructionSequence_ApplyLabelMap(seq) < 0) {
3972
0
        return ERROR;
3973
0
    }
3974
6.74k
    return SUCCESS;
3975
6.74k
}
3976
3977
3978
int
3979
_PyCfg_OptimizedCfgToInstructionSequence(cfg_builder *g,
3980
                                     _PyCompile_CodeUnitMetadata *umd, int code_flags,
3981
                                     int *stackdepth, int *nlocalsplus,
3982
                                     _PyInstructionSequence *seq)
3983
6.74k
{
3984
6.74k
    RETURN_IF_ERROR(convert_pseudo_conditional_jumps(g));
3985
3986
6.74k
    *stackdepth = calculate_stackdepth(g);
3987
6.74k
    if (*stackdepth < 0) {
3988
0
        return ERROR;
3989
0
    }
3990
3991
    /* prepare_localsplus adds instructions for generators that push
3992
     * and pop an item on the stack. This assertion makes sure there
3993
     * is space on the stack for that.
3994
     * It should always be true, because a generator must have at
3995
     * least one expression or call to INTRINSIC_STOPITERATION_ERROR,
3996
     * which requires stackspace.
3997
     */
3998
6.74k
    assert(!(IS_GENERATOR(code_flags) && *stackdepth == 0));
3999
4000
6.74k
    *nlocalsplus = prepare_localsplus(umd, g, code_flags);
4001
6.74k
    if (*nlocalsplus < 0) {
4002
0
        return ERROR;
4003
0
    }
4004
4005
6.74k
    RETURN_IF_ERROR(convert_pseudo_ops(g));
4006
4007
    /* Order of basic blocks must have been determined by now */
4008
4009
6.74k
    RETURN_IF_ERROR(normalize_jumps(g));
4010
6.74k
    assert(no_redundant_jumps(g));
4011
4012
    /* Can't modify the bytecode after inserting instructions that produce
4013
     * borrowed references.
4014
     */
4015
6.74k
    RETURN_IF_ERROR(optimize_load_fast(g));
4016
4017
    /* Can't modify the bytecode after computing jump offsets. */
4018
6.74k
    if (_PyCfg_ToInstructionSequence(g, seq) < 0) {
4019
0
        return ERROR;
4020
0
    }
4021
4022
6.74k
    return SUCCESS;
4023
6.74k
}
4024
4025
/* This is used by _PyCompile_Assemble to fill in the jump and exception
4026
 * targets in a synthetic CFG (which is not the output of the builtin compiler).
4027
 */
4028
int
4029
_PyCfg_JumpLabelsToTargets(cfg_builder *g)
4030
0
{
4031
0
    RETURN_IF_ERROR(translate_jump_labels_to_targets(g->g_entryblock));
4032
0
    RETURN_IF_ERROR(label_exception_targets(g->g_entryblock));
4033
0
    return SUCCESS;
4034
0
}
4035
4036
/* Exported API functions */
4037
4038
int
4039
PyCompile_OpcodeStackEffectWithJump(int opcode, int oparg, int jump)
4040
0
{
4041
0
    stack_effects effs;
4042
0
    if (get_stack_effects(opcode, oparg, jump, &effs) < 0) {
4043
0
        return PY_INVALID_STACK_EFFECT;
4044
0
    }
4045
0
    return effs.net;
4046
0
}
4047
4048
int
4049
PyCompile_OpcodeStackEffect(int opcode, int oparg)
4050
113
{
4051
113
    stack_effects effs;
4052
113
    if (get_stack_effects(opcode, oparg, -1, &effs) < 0) {
4053
0
        return PY_INVALID_STACK_EFFECT;
4054
0
    }
4055
113
    return effs.net;
4056
113
}
4057
4058
/* Access to compiler optimizations for unit tests.
4059
4060
 * _PyCompile_OptimizeCfg takes an instruction list, constructs
4061
 * a CFG, optimizes it and converts back to an instruction list.
4062
 */
4063
4064
static PyObject *
4065
cfg_to_instruction_sequence(cfg_builder *g)
4066
0
{
4067
0
    _PyInstructionSequence *seq = (_PyInstructionSequence *)_PyInstructionSequence_New();
4068
0
    if (seq == NULL) {
4069
0
        return NULL;
4070
0
    }
4071
0
    if (_PyCfg_ToInstructionSequence(g, seq) < 0) {
4072
0
        PyInstructionSequence_Fini(seq);
4073
0
        return NULL;
4074
0
    }
4075
0
    return (PyObject*)seq;
4076
0
}
4077
4078
PyObject *
4079
_PyCompile_OptimizeCfg(PyObject *seq, PyObject *consts, int nlocals)
4080
0
{
4081
0
    if (!_PyInstructionSequence_Check(seq)) {
4082
0
        PyErr_SetString(PyExc_ValueError, "expected an instruction sequence");
4083
0
        return NULL;
4084
0
    }
4085
0
    PyObject *const_cache = PyDict_New();
4086
0
    if (const_cache == NULL) {
4087
0
        return NULL;
4088
0
    }
4089
4090
0
    PyObject *res = NULL;
4091
0
    cfg_builder *g = _PyCfg_FromInstructionSequence((_PyInstructionSequence*)seq);
4092
0
    if (g == NULL) {
4093
0
        goto error;
4094
0
    }
4095
0
    int nparams = 0, firstlineno = 1;
4096
0
    if (_PyCfg_OptimizeCodeUnit(g, consts, const_cache, nlocals,
4097
0
                                nparams, firstlineno) < 0) {
4098
0
        goto error;
4099
0
    }
4100
4101
0
    if (calculate_stackdepth(g) == ERROR) {
4102
0
        goto error;
4103
0
    }
4104
4105
0
    if (optimize_load_fast(g) != SUCCESS) {
4106
0
        goto error;
4107
0
    }
4108
4109
0
    res = cfg_to_instruction_sequence(g);
4110
0
error:
4111
0
    Py_DECREF(const_cache);
4112
0
    _PyCfgBuilder_Free(g);
4113
0
    return res;
4114
0
}