Coverage Report

Created: 2025-07-11 06:59

/src/Python-3.8.3/Python/peephole.c
Line
Count
Source (jump to first uncovered line)
1
/* Peephole optimizations for bytecode compiler. */
2
3
#include "Python.h"
4
5
#include "Python-ast.h"
6
#include "node.h"
7
#include "ast.h"
8
#include "code.h"
9
#include "symtable.h"
10
#include "opcode.h"
11
#include "wordcode_helpers.h"
12
13
240
#define UNCONDITIONAL_JUMP(op)  (op==JUMP_ABSOLUTE || op==JUMP_FORWARD)
14
0
#define CONDITIONAL_JUMP(op) (op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
15
0
    || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
16
214
#define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE \
17
214
    || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
18
214
    || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
19
0
#define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP)
20
214
#define GETJUMPTGT(arr, i) (get_arg(arr, i) / sizeof(_Py_CODEUNIT) + \
21
214
        (ABSOLUTE_JUMP(_Py_OPCODE(arr[i])) ? 0 : i+1))
22
#define ISBASICBLOCK(blocks, start, end) \
23
60
    (blocks[start]==blocks[end])
24
25
26
/* Scans back N consecutive LOAD_CONST instructions, skipping NOPs,
27
   returns index of the Nth last's LOAD_CONST's EXTENDED_ARG prefix.
28
   Callers are responsible to check CONST_STACK_LEN beforehand.
29
*/
30
static Py_ssize_t
31
lastn_const_start(const _Py_CODEUNIT *codestr, Py_ssize_t i, Py_ssize_t n)
32
0
{
33
0
    assert(n > 0);
34
0
    for (;;) {
35
0
        i--;
36
0
        assert(i >= 0);
37
0
        if (_Py_OPCODE(codestr[i]) == LOAD_CONST) {
38
0
            if (!--n) {
39
0
                while (i > 0 && _Py_OPCODE(codestr[i-1]) == EXTENDED_ARG) {
40
0
                    i--;
41
0
                }
42
0
                return i;
43
0
            }
44
0
        }
45
0
        else {
46
0
            assert(_Py_OPCODE(codestr[i]) == EXTENDED_ARG);
47
0
        }
48
0
    }
49
0
}
50
51
/* Scans through EXTENDED ARGs, seeking the index of the effective opcode */
52
static Py_ssize_t
53
find_op(const _Py_CODEUNIT *codestr, Py_ssize_t codelen, Py_ssize_t i)
54
106
{
55
106
    while (i < codelen && _Py_OPCODE(codestr[i]) == EXTENDED_ARG) {
56
0
        i++;
57
0
    }
58
106
    return i;
59
106
}
60
61
/* Given the index of the effective opcode,
62
   scan back to construct the oparg with EXTENDED_ARG */
63
static unsigned int
64
get_arg(const _Py_CODEUNIT *codestr, Py_ssize_t i)
65
228
{
66
228
    _Py_CODEUNIT word;
67
228
    unsigned int oparg = _Py_OPARG(codestr[i]);
68
228
    if (i >= 1 && _Py_OPCODE(word = codestr[i-1]) == EXTENDED_ARG) {
69
0
        oparg |= _Py_OPARG(word) << 8;
70
0
        if (i >= 2 && _Py_OPCODE(word = codestr[i-2]) == EXTENDED_ARG) {
71
0
            oparg |= _Py_OPARG(word) << 16;
72
0
            if (i >= 3 && _Py_OPCODE(word = codestr[i-3]) == EXTENDED_ARG) {
73
0
                oparg |= _Py_OPARG(word) << 24;
74
0
            }
75
0
        }
76
0
    }
77
228
    return oparg;
78
228
}
79
80
/* Fill the region with NOPs. */
81
static void
82
fill_nops(_Py_CODEUNIT *codestr, Py_ssize_t start, Py_ssize_t end)
83
16
{
84
16
    memset(codestr + start, NOP, (end - start) * sizeof(_Py_CODEUNIT));
85
16
}
86
87
/* Given the index of the effective opcode,
88
   attempt to replace the argument, taking into account EXTENDED_ARG.
89
   Returns -1 on failure, or the new op index on success */
90
static Py_ssize_t
91
set_arg(_Py_CODEUNIT *codestr, Py_ssize_t i, unsigned int oparg)
92
0
{
93
0
    unsigned int curarg = get_arg(codestr, i);
94
0
    int curilen, newilen;
95
0
    if (curarg == oparg)
96
0
        return i;
97
0
    curilen = instrsize(curarg);
98
0
    newilen = instrsize(oparg);
99
0
    if (curilen < newilen) {
100
0
        return -1;
101
0
    }
102
103
0
    write_op_arg(codestr + i + 1 - curilen, _Py_OPCODE(codestr[i]), oparg, newilen);
104
0
    fill_nops(codestr, i + 1 - curilen + newilen, i + 1);
105
0
    return i-curilen+newilen;
106
0
}
107
108
/* Attempt to write op/arg at end of specified region of memory.
109
   Preceding memory in the region is overwritten with NOPs.
110
   Returns -1 on failure, op index on success */
111
static Py_ssize_t
112
copy_op_arg(_Py_CODEUNIT *codestr, Py_ssize_t i, unsigned char op,
113
            unsigned int oparg, Py_ssize_t maxi)
114
10
{
115
10
    int ilen = instrsize(oparg);
116
10
    if (i + ilen > maxi) {
117
0
        return -1;
118
0
    }
119
10
    write_op_arg(codestr + maxi - ilen, op, oparg, ilen);
120
10
    fill_nops(codestr, i, maxi - ilen);
121
10
    return maxi - 1;
122
10
}
123
124
/* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cn, BUILD_TUPLE n
125
   with    LOAD_CONST (c1, c2, ... cn).
126
   The consts table must still be in list form so that the
127
   new constant (c1, c2, ... cn) can be appended.
128
   Called with codestr pointing to the first LOAD_CONST.
129
*/
130
static Py_ssize_t
131
fold_tuple_on_constants(_Py_CODEUNIT *codestr, Py_ssize_t codelen,
132
                        Py_ssize_t c_start, Py_ssize_t opcode_end,
133
                        PyObject *consts, int n)
134
0
{
135
    /* Pre-conditions */
136
0
    assert(PyList_CheckExact(consts));
137
138
    /* Buildup new tuple of constants */
139
0
    PyObject *newconst = PyTuple_New(n);
140
0
    if (newconst == NULL) {
141
0
        return -1;
142
0
    }
143
144
0
    for (Py_ssize_t i = 0, pos = c_start; i < n; i++, pos++) {
145
0
        assert(pos < opcode_end);
146
0
        pos = find_op(codestr, codelen, pos);
147
0
        assert(_Py_OPCODE(codestr[pos]) == LOAD_CONST);
148
149
0
        unsigned int arg = get_arg(codestr, pos);
150
0
        PyObject *constant = PyList_GET_ITEM(consts, arg);
151
0
        Py_INCREF(constant);
152
0
        PyTuple_SET_ITEM(newconst, i, constant);
153
0
    }
154
155
0
    Py_ssize_t index = PyList_GET_SIZE(consts);
156
0
#if SIZEOF_SIZE_T > SIZEOF_INT
157
0
    if ((size_t)index >= UINT_MAX - 1) {
158
0
        Py_DECREF(newconst);
159
0
        PyErr_SetString(PyExc_OverflowError, "too many constants");
160
0
        return -1;
161
0
    }
162
0
#endif
163
164
    /* Append folded constant onto consts */
165
0
    if (PyList_Append(consts, newconst)) {
166
0
        Py_DECREF(newconst);
167
0
        return -1;
168
0
    }
169
0
    Py_DECREF(newconst);
170
171
0
    return copy_op_arg(codestr, c_start, LOAD_CONST,
172
0
                       (unsigned int)index, opcode_end);
173
0
}
174
175
static unsigned int *
176
markblocks(_Py_CODEUNIT *code, Py_ssize_t len)
177
22
{
178
22
    unsigned int *blocks = PyMem_New(unsigned int, len);
179
22
    int i, j, opcode, blockcnt = 0;
180
181
22
    if (blocks == NULL) {
182
0
        PyErr_NoMemory();
183
0
        return NULL;
184
0
    }
185
22
    memset(blocks, 0, len*sizeof(int));
186
187
    /* Mark labels in the first pass */
188
1.10k
    for (i = 0; i < len; i++) {
189
1.08k
        opcode = _Py_OPCODE(code[i]);
190
1.08k
        switch (opcode) {
191
8
            case FOR_ITER:
192
50
            case JUMP_FORWARD:
193
50
            case JUMP_IF_FALSE_OR_POP:
194
50
            case JUMP_IF_TRUE_OR_POP:
195
78
            case POP_JUMP_IF_FALSE:
196
84
            case POP_JUMP_IF_TRUE:
197
92
            case JUMP_ABSOLUTE:
198
124
            case SETUP_FINALLY:
199
124
            case SETUP_WITH:
200
124
            case SETUP_ASYNC_WITH:
201
124
            case CALL_FINALLY:
202
124
                j = GETJUMPTGT(code, i);
203
124
                assert(j < len);
204
124
                blocks[j] = 1;
205
124
                break;
206
1.08k
        }
207
1.08k
    }
208
    /* Build block numbers in the second pass */
209
1.10k
    for (i = 0; i < len; i++) {
210
1.08k
        blockcnt += blocks[i];          /* increment blockcnt over labels */
211
1.08k
        blocks[i] = blockcnt;
212
1.08k
    }
213
22
    return blocks;
214
22
}
215
216
/* Perform basic peephole optimizations to components of a code object.
217
   The consts object should still be in list form to allow new constants
218
   to be appended.
219
220
   To keep the optimizer simple, it bails when the lineno table has complex
221
   encoding for gaps >= 255.
222
223
   Optimizations are restricted to simple transformations occurring within a
224
   single basic block.  All transformations keep the code size the same or
225
   smaller.  For those that reduce size, the gaps are initially filled with
226
   NOPs.  Later those NOPs are removed and the jump addresses retargeted in
227
   a single pass. */
228
229
PyObject *
230
PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names,
231
                PyObject *lnotab_obj)
232
22
{
233
22
    Py_ssize_t h, i, nexti, op_start, tgt;
234
22
    unsigned int j, nops;
235
22
    unsigned char opcode, nextop;
236
22
    _Py_CODEUNIT *codestr = NULL;
237
22
    unsigned char *lnotab;
238
22
    unsigned int cum_orig_offset, last_offset;
239
22
    Py_ssize_t tabsiz;
240
    // Count runs of consecutive LOAD_CONSTs
241
22
    unsigned int cumlc = 0, lastlc = 0;
242
22
    unsigned int *blocks = NULL;
243
244
    /* Bail out if an exception is set */
245
22
    if (PyErr_Occurred())
246
0
        goto exitError;
247
248
    /* Bypass optimization when the lnotab table is too complex */
249
22
    assert(PyBytes_Check(lnotab_obj));
250
22
    lnotab = (unsigned char*)PyBytes_AS_STRING(lnotab_obj);
251
22
    tabsiz = PyBytes_GET_SIZE(lnotab_obj);
252
22
    assert(tabsiz == 0 || Py_REFCNT(lnotab_obj) == 1);
253
254
    /* Don't optimize if lnotab contains instruction pointer delta larger
255
       than +255 (encoded as multiple bytes), just to keep the peephole optimizer
256
       simple. The optimizer leaves line number deltas unchanged. */
257
258
176
    for (i = 0; i < tabsiz; i += 2) {
259
154
        if (lnotab[i] == 255) {
260
0
            goto exitUnchanged;
261
0
        }
262
154
    }
263
264
22
    assert(PyBytes_Check(code));
265
22
    Py_ssize_t codesize = PyBytes_GET_SIZE(code);
266
22
    assert(codesize % sizeof(_Py_CODEUNIT) == 0);
267
22
    Py_ssize_t codelen = codesize / sizeof(_Py_CODEUNIT);
268
22
    if (codelen > INT_MAX) {
269
        /* Python assembler is limited to INT_MAX: see assembler.a_offset in
270
           compile.c. */
271
0
        goto exitUnchanged;
272
0
    }
273
274
    /* Make a modifiable copy of the code string */
275
22
    codestr = (_Py_CODEUNIT *)PyMem_Malloc(codesize);
276
22
    if (codestr == NULL) {
277
0
        PyErr_NoMemory();
278
0
        goto exitError;
279
0
    }
280
22
    memcpy(codestr, PyBytes_AS_STRING(code), codesize);
281
282
22
    blocks = markblocks(codestr, codelen);
283
22
    if (blocks == NULL)
284
0
        goto exitError;
285
22
    assert(PyList_Check(consts));
286
287
1.09k
    for (i=find_op(codestr, codelen, 0) ; i<codelen ; i=nexti) {
288
1.07k
        opcode = _Py_OPCODE(codestr[i]);
289
1.07k
        op_start = i;
290
1.07k
        while (op_start >= 1 && _Py_OPCODE(codestr[op_start-1]) == EXTENDED_ARG) {
291
0
            op_start--;
292
0
        }
293
294
1.07k
        nexti = i + 1;
295
1.07k
        while (nexti < codelen && _Py_OPCODE(codestr[nexti]) == EXTENDED_ARG)
296
0
            nexti++;
297
1.07k
        nextop = nexti < codelen ? _Py_OPCODE(codestr[nexti]) : 0;
298
299
1.07k
        lastlc = cumlc;
300
1.07k
        cumlc = 0;
301
302
1.07k
        switch (opcode) {
303
                /* Skip over LOAD_CONST trueconst
304
                   POP_JUMP_IF_FALSE xx.  This improves
305
                   "while 1" performance.  */
306
106
            case LOAD_CONST:
307
106
                cumlc = lastlc + 1;
308
106
                if (nextop != POP_JUMP_IF_FALSE  ||
309
106
                    !ISBASICBLOCK(blocks, op_start, i + 1)) {
310
106
                    break;
311
106
                }
312
0
                PyObject* cnt = PyList_GET_ITEM(consts, get_arg(codestr, i));
313
0
                int is_true = PyObject_IsTrue(cnt);
314
0
                if (is_true == -1) {
315
0
                    goto exitError;
316
0
                }
317
0
                if (is_true == 1) {
318
0
                    fill_nops(codestr, op_start, nexti + 1);
319
0
                    cumlc = 0;
320
0
                }
321
0
                break;
322
323
                /* Try to fold tuples of constants.
324
                   Skip over BUILD_SEQN 1 UNPACK_SEQN 1.
325
                   Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.
326
                   Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */
327
12
            case BUILD_TUPLE:
328
12
                j = get_arg(codestr, i);
329
12
                if (j > 0 && lastlc >= j) {
330
0
                    h = lastn_const_start(codestr, op_start, j);
331
0
                    if (ISBASICBLOCK(blocks, h, op_start)) {
332
0
                        h = fold_tuple_on_constants(codestr, codelen,
333
0
                                                    h, i+1, consts, j);
334
0
                        break;
335
0
                    }
336
0
                }
337
12
                if (nextop != UNPACK_SEQUENCE  ||
338
12
                    !ISBASICBLOCK(blocks, op_start, i + 1) ||
339
12
                    j != get_arg(codestr, nexti))
340
10
                    break;
341
2
                if (j < 2) {
342
0
                    fill_nops(codestr, op_start, nexti + 1);
343
2
                } else if (j == 2) {
344
2
                    codestr[op_start] = PACKOPARG(ROT_TWO, 0);
345
2
                    fill_nops(codestr, op_start + 1, nexti + 1);
346
2
                } else if (j == 3) {
347
0
                    codestr[op_start] = PACKOPARG(ROT_THREE, 0);
348
0
                    codestr[op_start + 1] = PACKOPARG(ROT_TWO, 0);
349
0
                    fill_nops(codestr, op_start + 2, nexti + 1);
350
0
                }
351
2
                break;
352
353
                /* Simplify conditional jump to conditional jump where the
354
                   result of the first test implies the success of a similar
355
                   test or the failure of the opposite test.
356
                   Arises in code like:
357
                   "a and b or c"
358
                   "(a and b) and c"
359
                   "(a or b) or c"
360
                   "(a or b) and c"
361
                   x:JUMP_IF_FALSE_OR_POP y   y:JUMP_IF_FALSE_OR_POP z
362
                      -->  x:JUMP_IF_FALSE_OR_POP z
363
                   x:JUMP_IF_FALSE_OR_POP y   y:JUMP_IF_TRUE_OR_POP z
364
                      -->  x:POP_JUMP_IF_FALSE y+1
365
                   where y+1 is the instruction following the second test.
366
                */
367
0
            case JUMP_IF_FALSE_OR_POP:
368
0
            case JUMP_IF_TRUE_OR_POP:
369
0
                h = get_arg(codestr, i) / sizeof(_Py_CODEUNIT);
370
0
                tgt = find_op(codestr, codelen, h);
371
372
0
                j = _Py_OPCODE(codestr[tgt]);
373
0
                if (CONDITIONAL_JUMP(j)) {
374
                    /* NOTE: all possible jumps here are absolute. */
375
0
                    if (JUMPS_ON_TRUE(j) == JUMPS_ON_TRUE(opcode)) {
376
                        /* The second jump will be taken iff the first is.
377
                           The current opcode inherits its target's
378
                           stack effect */
379
0
                        h = set_arg(codestr, i, get_arg(codestr, tgt));
380
0
                    } else {
381
                        /* The second jump is not taken if the first is (so
382
                           jump past it), and all conditional jumps pop their
383
                           argument when they're not taken (so change the
384
                           first jump to pop its argument when it's taken). */
385
0
                        Py_ssize_t arg = (tgt + 1);
386
                        /* cannot overflow: codelen <= INT_MAX */
387
0
                        assert((size_t)arg <= UINT_MAX / sizeof(_Py_CODEUNIT));
388
0
                        arg *= sizeof(_Py_CODEUNIT);
389
0
                        h = set_arg(codestr, i, (unsigned int)arg);
390
0
                        j = opcode == JUMP_IF_TRUE_OR_POP ?
391
0
                            POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE;
392
0
                    }
393
394
0
                    if (h >= 0) {
395
0
                        nexti = h;
396
0
                        codestr[nexti] = PACKOPARG(j, _Py_OPARG(codestr[nexti]));
397
0
                        break;
398
0
                    }
399
0
                }
400
                /* Intentional fallthrough */
401
402
                /* Replace jumps to unconditional jumps */
403
28
            case POP_JUMP_IF_FALSE:
404
34
            case POP_JUMP_IF_TRUE:
405
72
            case JUMP_FORWARD:
406
80
            case JUMP_ABSOLUTE:
407
80
                h = GETJUMPTGT(codestr, i);
408
80
                tgt = find_op(codestr, codelen, h);
409
                /* Replace JUMP_* to a RETURN into just a RETURN */
410
80
                if (UNCONDITIONAL_JUMP(opcode) &&
411
80
                    _Py_OPCODE(codestr[tgt]) == RETURN_VALUE) {
412
0
                    codestr[op_start] = PACKOPARG(RETURN_VALUE, 0);
413
0
                    fill_nops(codestr, op_start + 1, i + 1);
414
80
                } else if (UNCONDITIONAL_JUMP(_Py_OPCODE(codestr[tgt]))) {
415
10
                    size_t arg = GETJUMPTGT(codestr, tgt);
416
10
                    if (opcode == JUMP_FORWARD) { /* JMP_ABS can go backwards */
417
10
                        opcode = JUMP_ABSOLUTE;
418
10
                    } else if (!ABSOLUTE_JUMP(opcode)) {
419
0
                        if (arg < (size_t)(i + 1)) {
420
0
                            break;           /* No backward relative jumps */
421
0
                        }
422
0
                        arg -= i + 1;          /* Calc relative jump addr */
423
0
                    }
424
                    /* cannot overflow: codelen <= INT_MAX */
425
10
                    assert(arg <= (UINT_MAX / sizeof(_Py_CODEUNIT)));
426
10
                    arg *= sizeof(_Py_CODEUNIT);
427
10
                    copy_op_arg(codestr, op_start, opcode,
428
10
                                (unsigned int)arg, i + 1);
429
10
                }
430
80
                break;
431
432
                /* Remove unreachable ops after RETURN */
433
80
            case RETURN_VALUE:
434
26
                h = i + 1;
435
                /* END_FINALLY should be kept since it denotes the end of
436
                   the 'finally' block in frame_setlineno() in frameobject.c.
437
                   SETUP_FINALLY should be kept for balancing.
438
                 */
439
34
                while (h < codelen && ISBASICBLOCK(blocks, i, h) &&
440
34
                       _Py_OPCODE(codestr[h]) != END_FINALLY)
441
8
                {
442
8
                    if (_Py_OPCODE(codestr[h]) == SETUP_FINALLY) {
443
0
                        while (h > i + 1 &&
444
0
                               _Py_OPCODE(codestr[h - 1]) == EXTENDED_ARG)
445
0
                        {
446
0
                            h--;
447
0
                        }
448
0
                        break;
449
0
                    }
450
8
                    h++;
451
8
                }
452
26
                if (h > i + 1) {
453
4
                    fill_nops(codestr, i + 1, h);
454
4
                    nexti = find_op(codestr, codelen, h);
455
4
                }
456
26
                break;
457
1.07k
        }
458
1.07k
    }
459
460
    /* Fixup lnotab */
461
1.10k
    for (i = 0, nops = 0; i < codelen; i++) {
462
1.08k
        size_t block = (size_t)i - nops;
463
        /* cannot overflow: codelen <= INT_MAX */
464
1.08k
        assert(block <= UINT_MAX);
465
        /* original code offset => new code offset */
466
1.08k
        blocks[i] = (unsigned int)block;
467
1.08k
        if (_Py_OPCODE(codestr[i]) == NOP) {
468
10
            nops++;
469
10
        }
470
1.08k
    }
471
22
    cum_orig_offset = 0;
472
22
    last_offset = 0;
473
176
    for (i=0 ; i < tabsiz ; i+=2) {
474
154
        unsigned int offset_delta, new_offset;
475
154
        cum_orig_offset += lnotab[i];
476
154
        assert(cum_orig_offset % sizeof(_Py_CODEUNIT) == 0);
477
154
        new_offset = blocks[cum_orig_offset / sizeof(_Py_CODEUNIT)] *
478
154
                sizeof(_Py_CODEUNIT);
479
154
        offset_delta = new_offset - last_offset;
480
154
        assert(offset_delta <= 255);
481
154
        lnotab[i] = (unsigned char)offset_delta;
482
154
        last_offset = new_offset;
483
154
    }
484
485
    /* Remove NOPs and fixup jump targets */
486
1.10k
    for (op_start = i = h = 0; i < codelen; i++, op_start = i) {
487
1.08k
        j = _Py_OPARG(codestr[i]);
488
1.08k
        while (_Py_OPCODE(codestr[i]) == EXTENDED_ARG) {
489
0
            i++;
490
0
            j = j<<8 | _Py_OPARG(codestr[i]);
491
0
        }
492
1.08k
        opcode = _Py_OPCODE(codestr[i]);
493
1.08k
        switch (opcode) {
494
10
            case NOP:continue;
495
496
18
            case JUMP_ABSOLUTE:
497
46
            case POP_JUMP_IF_FALSE:
498
52
            case POP_JUMP_IF_TRUE:
499
52
            case JUMP_IF_FALSE_OR_POP:
500
52
            case JUMP_IF_TRUE_OR_POP:
501
52
                j = blocks[j / sizeof(_Py_CODEUNIT)] * sizeof(_Py_CODEUNIT);
502
52
                break;
503
504
8
            case FOR_ITER:
505
36
            case JUMP_FORWARD:
506
68
            case SETUP_FINALLY:
507
68
            case SETUP_WITH:
508
68
            case SETUP_ASYNC_WITH:
509
68
            case CALL_FINALLY:
510
68
                j = blocks[j / sizeof(_Py_CODEUNIT) + i + 1] - blocks[i] - 1;
511
68
                j *= sizeof(_Py_CODEUNIT);
512
68
                break;
513
1.08k
        }
514
1.07k
        Py_ssize_t ilen = i - op_start + 1;
515
1.07k
        if (instrsize(j) > ilen) {
516
0
            goto exitUnchanged;
517
0
        }
518
1.07k
        assert(ilen <= INT_MAX);
519
        /* If instrsize(j) < ilen, we'll emit EXTENDED_ARG 0 */
520
1.07k
        write_op_arg(codestr + h, opcode, j, (int)ilen);
521
1.07k
        h += ilen;
522
1.07k
    }
523
22
    assert(h + (Py_ssize_t)nops == codelen);
524
525
22
    PyMem_Free(blocks);
526
22
    code = PyBytes_FromStringAndSize((char *)codestr, h * sizeof(_Py_CODEUNIT));
527
22
    PyMem_Free(codestr);
528
22
    return code;
529
530
0
 exitError:
531
0
    code = NULL;
532
533
0
 exitUnchanged:
534
0
    Py_XINCREF(code);
535
0
    PyMem_Free(blocks);
536
0
    PyMem_Free(codestr);
537
0
    return code;
538
0
}