Coverage Report

Created: 2025-11-24 06:11

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/cpython/Objects/tupleobject.c
Line
Count
Source
1
/* Tuple object implementation */
2
3
#include "Python.h"
4
#include "pycore_abstract.h"      // _PyIndex_Check()
5
#include "pycore_ceval.h"         // _PyEval_GetBuiltin()
6
#include "pycore_freelist.h"      // _Py_FREELIST_PUSH()
7
#include "pycore_gc.h"            // _PyObject_GC_IS_TRACKED()
8
#include "pycore_list.h"          // _Py_memory_repeat()
9
#include "pycore_modsupport.h"    // _PyArg_NoKwnames()
10
#include "pycore_object.h"        // _PyObject_GC_TRACK()
11
#include "pycore_stackref.h"      // PyStackRef_AsPyObjectSteal()
12
#include "pycore_tuple.h"         // _PyTupleIterObject
13
14
15
/*[clinic input]
16
class tuple "PyTupleObject *" "&PyTuple_Type"
17
[clinic start generated code]*/
18
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
19
20
#include "clinic/tupleobject.c.h"
21
22
23
static inline int maybe_freelist_push(PyTupleObject *);
24
25
26
/* Allocate an uninitialized tuple object. Before making it public, following
27
   steps must be done:
28
29
   - Initialize its items.
30
   - Call _PyObject_GC_TRACK() on it.
31
32
   Because the empty tuple is always reused and it's already tracked by GC,
33
   this function must not be called with size == 0 (unless from PyTuple_New()
34
   which wraps this function).
35
*/
36
static PyTupleObject *
37
tuple_alloc(Py_ssize_t size)
38
497M
{
39
497M
    if (size < 0) {
40
0
        PyErr_BadInternalCall();
41
0
        return NULL;
42
0
    }
43
497M
    assert(size != 0);    // The empty tuple is statically allocated.
44
497M
    Py_ssize_t index = size - 1;
45
497M
    if (index < PyTuple_MAXSAVESIZE) {
46
492M
        PyTupleObject *op = _Py_FREELIST_POP(PyTupleObject, tuples[index]);
47
492M
        if (op != NULL) {
48
378M
            _PyTuple_RESET_HASH_CACHE(op);
49
378M
            return op;
50
378M
        }
51
492M
    }
52
    /* Check for overflow */
53
118M
    if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - (sizeof(PyTupleObject) -
54
118M
                sizeof(PyObject *))) / sizeof(PyObject *)) {
55
0
        return (PyTupleObject *)PyErr_NoMemory();
56
0
    }
57
118M
    PyTupleObject *result = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
58
118M
    if (result != NULL) {
59
118M
        _PyTuple_RESET_HASH_CACHE(result);
60
118M
    }
61
118M
    return result;
62
118M
}
63
64
// The empty tuple singleton is not tracked by the GC.
65
// It does not contain any Python object.
66
// Note that tuple subclasses have their own empty instances.
67
68
static inline PyObject *
69
tuple_get_empty(void)
70
78.0M
{
71
78.0M
    return (PyObject *)&_Py_SINGLETON(tuple_empty);
72
78.0M
}
73
74
PyObject *
75
PyTuple_New(Py_ssize_t size)
76
59.1M
{
77
59.1M
    PyTupleObject *op;
78
59.1M
    if (size == 0) {
79
605k
        return tuple_get_empty();
80
605k
    }
81
58.5M
    op = tuple_alloc(size);
82
58.5M
    if (op == NULL) {
83
0
        return NULL;
84
0
    }
85
1.43G
    for (Py_ssize_t i = 0; i < size; i++) {
86
1.37G
        op->ob_item[i] = NULL;
87
1.37G
    }
88
58.5M
    _PyObject_GC_TRACK(op);
89
58.5M
    return (PyObject *) op;
90
58.5M
}
91
92
Py_ssize_t
93
PyTuple_Size(PyObject *op)
94
15.7M
{
95
15.7M
    if (!PyTuple_Check(op)) {
96
0
        PyErr_BadInternalCall();
97
0
        return -1;
98
0
    }
99
15.7M
    else
100
15.7M
        return Py_SIZE(op);
101
15.7M
}
102
103
PyObject *
104
PyTuple_GetItem(PyObject *op, Py_ssize_t i)
105
47.9M
{
106
47.9M
    if (!PyTuple_Check(op)) {
107
0
        PyErr_BadInternalCall();
108
0
        return NULL;
109
0
    }
110
47.9M
    if (i < 0 || i >= Py_SIZE(op)) {
111
0
        PyErr_SetString(PyExc_IndexError, "tuple index out of range");
112
0
        return NULL;
113
0
    }
114
47.9M
    return ((PyTupleObject *)op) -> ob_item[i];
115
47.9M
}
116
117
int
118
PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
119
96.4k
{
120
96.4k
    PyObject **p;
121
96.4k
    if (!PyTuple_Check(op) || !_PyObject_IsUniquelyReferenced(op)) {
122
0
        Py_XDECREF(newitem);
123
0
        PyErr_BadInternalCall();
124
0
        return -1;
125
0
    }
126
96.4k
    if (i < 0 || i >= Py_SIZE(op)) {
127
0
        Py_XDECREF(newitem);
128
0
        PyErr_SetString(PyExc_IndexError,
129
0
                        "tuple assignment index out of range");
130
0
        return -1;
131
0
    }
132
96.4k
    p = ((PyTupleObject *)op) -> ob_item + i;
133
96.4k
    Py_XSETREF(*p, newitem);
134
96.4k
    return 0;
135
96.4k
}
136
137
void
138
_PyTuple_MaybeUntrack(PyObject *op)
139
96.2M
{
140
96.2M
    PyTupleObject *t;
141
96.2M
    Py_ssize_t i, n;
142
143
96.2M
    if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
144
0
        return;
145
96.2M
    t = (PyTupleObject *) op;
146
96.2M
    n = Py_SIZE(t);
147
975M
    for (i = 0; i < n; i++) {
148
973M
        PyObject *elt = PyTuple_GET_ITEM(t, i);
149
        /* Tuple with NULL elements aren't
150
           fully constructed, don't untrack
151
           them yet. */
152
973M
        if (!elt ||
153
973M
            _PyObject_GC_MAY_BE_TRACKED(elt))
154
93.6M
            return;
155
973M
    }
156
2.53M
    _PyObject_GC_UNTRACK(op);
157
2.53M
}
158
159
/* Fast, but conservative check if an object maybe tracked
160
   May return true for an object that is not tracked,
161
   Will always return true for an object that is tracked.
162
   This is a temporary workaround until _PyObject_GC_IS_TRACKED
163
   becomes fast and safe to call on non-GC objects.
164
*/
165
static bool
166
maybe_tracked(PyObject *ob)
167
697M
{
168
697M
    return _PyType_IS_GC(Py_TYPE(ob));
169
697M
}
170
171
PyObject *
172
PyTuple_Pack(Py_ssize_t n, ...)
173
9.54M
{
174
9.54M
    Py_ssize_t i;
175
9.54M
    PyObject *o;
176
9.54M
    PyObject **items;
177
9.54M
    va_list vargs;
178
9.54M
    bool track = false;
179
180
9.54M
    if (n == 0) {
181
0
        return tuple_get_empty();
182
0
    }
183
184
9.54M
    va_start(vargs, n);
185
9.54M
    PyTupleObject *result = tuple_alloc(n);
186
9.54M
    if (result == NULL) {
187
0
        va_end(vargs);
188
0
        return NULL;
189
0
    }
190
9.54M
    items = result->ob_item;
191
29.2M
    for (i = 0; i < n; i++) {
192
19.6M
        o = va_arg(vargs, PyObject *);
193
19.6M
        if (!track && maybe_tracked(o)) {
194
684k
            track = true;
195
684k
        }
196
19.6M
        items[i] = Py_NewRef(o);
197
19.6M
    }
198
9.54M
    va_end(vargs);
199
9.54M
    if (track) {
200
684k
        _PyObject_GC_TRACK(result);
201
684k
    }
202
9.54M
    return (PyObject *)result;
203
9.54M
}
204
205
206
/* Methods */
207
208
static void
209
tuple_dealloc(PyObject *self)
210
498M
{
211
498M
    PyTupleObject *op = _PyTuple_CAST(self);
212
498M
    if (Py_SIZE(op) == 0) {
213
        /* The empty tuple is statically allocated. */
214
0
        if (op == &_Py_SINGLETON(tuple_empty)) {
215
#ifdef Py_DEBUG
216
            _Py_FatalRefcountError("deallocating the empty tuple singleton");
217
#else
218
0
            return;
219
0
#endif
220
0
        }
221
#ifdef Py_DEBUG
222
        /* tuple subclasses have their own empty instances. */
223
        assert(!PyTuple_CheckExact(op));
224
#endif
225
0
    }
226
227
498M
    PyObject_GC_UnTrack(op);
228
229
498M
    Py_ssize_t i = Py_SIZE(op);
230
6.07G
    while (--i >= 0) {
231
5.58G
        Py_XDECREF(op->ob_item[i]);
232
5.58G
    }
233
    // This will abort on the empty singleton (if there is one).
234
498M
    if (!maybe_freelist_push(op)) {
235
119M
        Py_TYPE(op)->tp_free((PyObject *)op);
236
119M
    }
237
498M
}
238
239
static PyObject *
240
tuple_repr(PyObject *self)
241
24
{
242
24
    PyTupleObject *v = _PyTuple_CAST(self);
243
24
    Py_ssize_t n = PyTuple_GET_SIZE(v);
244
24
    if (n == 0) {
245
0
        return PyUnicode_FromString("()");
246
0
    }
247
248
    /* While not mutable, it is still possible to end up with a cycle in a
249
       tuple through an object that stores itself within a tuple (and thus
250
       infinitely asks for the repr of itself). This should only be
251
       possible within a type. */
252
24
    int res = Py_ReprEnter((PyObject *)v);
253
24
    if (res != 0) {
254
0
        return res > 0 ? PyUnicode_FromString("(...)") : NULL;
255
0
    }
256
257
24
    Py_ssize_t prealloc;
258
24
    if (n > 1) {
259
        // "(" + "1" + ", 2" * (len - 1) + ")"
260
13
        prealloc = 1 + 1 + (2 + 1) * (n - 1) + 1;
261
13
    }
262
11
    else {
263
        // "(1,)"
264
11
        prealloc = 4;
265
11
    }
266
24
    PyUnicodeWriter *writer = PyUnicodeWriter_Create(prealloc);
267
24
    if (writer == NULL) {
268
0
        goto error;
269
0
    }
270
271
24
    if (PyUnicodeWriter_WriteChar(writer, '(') < 0) {
272
0
        goto error;
273
0
    }
274
275
    /* Do repr() on each element. */
276
935
    for (Py_ssize_t i = 0; i < n; ++i) {
277
911
        if (i > 0) {
278
887
            if (PyUnicodeWriter_WriteChar(writer, ',') < 0) {
279
0
                goto error;
280
0
            }
281
887
            if (PyUnicodeWriter_WriteChar(writer, ' ') < 0) {
282
0
                goto error;
283
0
            }
284
887
        }
285
286
911
        if (PyUnicodeWriter_WriteRepr(writer, v->ob_item[i]) < 0) {
287
0
            goto error;
288
0
        }
289
911
    }
290
291
24
    if (n == 1) {
292
11
        if (PyUnicodeWriter_WriteChar(writer, ',') < 0) {
293
0
            goto error;
294
0
        }
295
11
    }
296
24
    if (PyUnicodeWriter_WriteChar(writer, ')') < 0) {
297
0
        goto error;
298
0
    }
299
300
24
    Py_ReprLeave((PyObject *)v);
301
24
    return PyUnicodeWriter_Finish(writer);
302
303
0
error:
304
0
    PyUnicodeWriter_Discard(writer);
305
0
    Py_ReprLeave((PyObject *)v);
306
0
    return NULL;
307
24
}
308
309
310
/* Hash for tuples. This is a slightly simplified version of the xxHash
311
   non-cryptographic hash:
312
   - we do not use any parallelism, there is only 1 accumulator.
313
   - we drop the final mixing since this is just a permutation of the
314
     output space: it does not help against collisions.
315
   - at the end, we mangle the length with a single constant.
316
   For the xxHash specification, see
317
   https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
318
319
   The constants for the hash function are defined in pycore_tuple.h.
320
*/
321
322
static Py_hash_t
323
tuple_hash(PyObject *op)
324
19.7M
{
325
19.7M
    PyTupleObject *v = _PyTuple_CAST(op);
326
327
19.7M
    Py_uhash_t acc = FT_ATOMIC_LOAD_SSIZE_RELAXED(v->ob_hash);
328
19.7M
    if (acc != (Py_uhash_t)-1) {
329
316k
        return acc;
330
316k
    }
331
332
19.3M
    Py_ssize_t len = Py_SIZE(v);
333
19.3M
    PyObject **item = v->ob_item;
334
19.3M
    acc = _PyTuple_HASH_XXPRIME_5;
335
1.03G
    for (Py_ssize_t i = 0; i < len; i++) {
336
1.01G
        Py_uhash_t lane = PyObject_Hash(item[i]);
337
1.01G
        if (lane == (Py_uhash_t)-1) {
338
0
            return -1;
339
0
        }
340
1.01G
        acc += lane * _PyTuple_HASH_XXPRIME_2;
341
1.01G
        acc = _PyTuple_HASH_XXROTATE(acc);
342
1.01G
        acc *= _PyTuple_HASH_XXPRIME_1;
343
1.01G
    }
344
345
    /* Add input length, mangled to keep the historical value of hash(()). */
346
19.3M
    acc += len ^ (_PyTuple_HASH_XXPRIME_5 ^ 3527539UL);
347
348
19.3M
    if (acc == (Py_uhash_t)-1) {
349
0
        acc = 1546275796;
350
0
    }
351
352
19.3M
    FT_ATOMIC_STORE_SSIZE_RELAXED(v->ob_hash, acc);
353
354
19.3M
    return acc;
355
19.3M
}
356
357
static Py_ssize_t
358
tuple_length(PyObject *self)
359
5.34M
{
360
5.34M
    PyTupleObject *a = _PyTuple_CAST(self);
361
5.34M
    return Py_SIZE(a);
362
5.34M
}
363
364
static int
365
tuple_contains(PyObject *self, PyObject *el)
366
14.8M
{
367
14.8M
    PyTupleObject *a = _PyTuple_CAST(self);
368
14.8M
    int cmp = 0;
369
48.7M
    for (Py_ssize_t i = 0; cmp == 0 && i < Py_SIZE(a); ++i) {
370
33.9M
        cmp = PyObject_RichCompareBool(PyTuple_GET_ITEM(a, i), el, Py_EQ);
371
33.9M
    }
372
14.8M
    return cmp;
373
14.8M
}
374
375
static PyObject *
376
tuple_item(PyObject *op, Py_ssize_t i)
377
18.8M
{
378
18.8M
    PyTupleObject *a = _PyTuple_CAST(op);
379
18.8M
    if (i < 0 || i >= Py_SIZE(a)) {
380
28
        PyErr_SetString(PyExc_IndexError, "tuple index out of range");
381
28
        return NULL;
382
28
    }
383
18.8M
    return Py_NewRef(a->ob_item[i]);
384
18.8M
}
385
386
PyObject *
387
PyTuple_FromArray(PyObject *const *src, Py_ssize_t n)
388
264M
{
389
264M
    if (n == 0) {
390
77.4M
        return tuple_get_empty();
391
77.4M
    }
392
393
187M
    PyTupleObject *tuple = tuple_alloc(n);
394
187M
    if (tuple == NULL) {
395
0
        return NULL;
396
0
    }
397
187M
    PyObject **dst = tuple->ob_item;
398
187M
    bool track = false;
399
533M
    for (Py_ssize_t i = 0; i < n; i++) {
400
346M
        PyObject *item = src[i];
401
346M
        if (!track && maybe_tracked(item)) {
402
51.1M
            track = true;
403
51.1M
        }
404
346M
        dst[i] = Py_NewRef(item);
405
346M
    }
406
187M
    if (track) {
407
51.1M
        _PyObject_GC_TRACK(tuple);
408
51.1M
    }
409
187M
    return (PyObject *)tuple;
410
187M
}
411
412
PyObject *
413
_PyTuple_FromStackRefStealOnSuccess(const _PyStackRef *src, Py_ssize_t n)
414
237M
{
415
237M
    if (n == 0) {
416
0
        return tuple_get_empty();
417
0
    }
418
237M
    PyTupleObject *tuple = tuple_alloc(n);
419
237M
    if (tuple == NULL) {
420
0
        return NULL;
421
0
    }
422
237M
    PyObject **dst = tuple->ob_item;
423
237M
    bool track = false;
424
738M
    for (Py_ssize_t i = 0; i < n; i++) {
425
501M
        PyObject *item = PyStackRef_AsPyObjectSteal(src[i]);
426
501M
        if (!track && maybe_tracked(item)) {
427
147M
            track = true;
428
147M
        }
429
501M
        dst[i] = item;
430
501M
    }
431
237M
    if (track) {
432
147M
        _PyObject_GC_TRACK(tuple);
433
147M
    }
434
237M
    return (PyObject *)tuple;
435
237M
}
436
437
PyObject *
438
_PyTuple_FromArraySteal(PyObject *const *src, Py_ssize_t n)
439
540
{
440
540
    if (n == 0) {
441
0
        return tuple_get_empty();
442
0
    }
443
540
    PyTupleObject *tuple = tuple_alloc(n);
444
540
    if (tuple == NULL) {
445
0
        for (Py_ssize_t i = 0; i < n; i++) {
446
0
            Py_DECREF(src[i]);
447
0
        }
448
0
        return NULL;
449
0
    }
450
540
    PyObject **dst = tuple->ob_item;
451
2.91k
    for (Py_ssize_t i = 0; i < n; i++) {
452
2.37k
        PyObject *item = src[i];
453
2.37k
        dst[i] = item;
454
2.37k
    }
455
540
    _PyObject_GC_TRACK(tuple);
456
540
    return (PyObject *)tuple;
457
540
}
458
459
static PyObject *
460
tuple_slice(PyTupleObject *a, Py_ssize_t ilow,
461
           Py_ssize_t ihigh)
462
36.5M
{
463
36.5M
    if (ilow < 0)
464
0
        ilow = 0;
465
36.5M
    if (ihigh > Py_SIZE(a))
466
36
        ihigh = Py_SIZE(a);
467
36.5M
    if (ihigh < ilow)
468
0
        ihigh = ilow;
469
36.5M
    if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
470
0
        return Py_NewRef(a);
471
0
    }
472
36.5M
    return PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow);
473
36.5M
}
474
475
PyObject *
476
PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
477
36.5M
{
478
36.5M
    if (op == NULL || !PyTuple_Check(op)) {
479
0
        PyErr_BadInternalCall();
480
0
        return NULL;
481
0
    }
482
36.5M
    return tuple_slice((PyTupleObject *)op, i, j);
483
36.5M
}
484
485
static PyObject *
486
tuple_concat(PyObject *aa, PyObject *bb)
487
3.02M
{
488
3.02M
    PyTupleObject *a = _PyTuple_CAST(aa);
489
3.02M
    if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) {
490
732k
        return Py_NewRef(bb);
491
732k
    }
492
2.28M
    if (!PyTuple_Check(bb)) {
493
0
        PyErr_Format(PyExc_TypeError,
494
0
             "can only concatenate tuple (not \"%.200s\") to tuple",
495
0
                 Py_TYPE(bb)->tp_name);
496
0
        return NULL;
497
0
    }
498
2.28M
    PyTupleObject *b = (PyTupleObject *)bb;
499
500
2.28M
    if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) {
501
35
        return Py_NewRef(a);
502
35
    }
503
2.28M
    assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX);
504
2.28M
    Py_ssize_t size = Py_SIZE(a) + Py_SIZE(b);
505
2.28M
    if (size == 0) {
506
0
        return tuple_get_empty();
507
0
    }
508
509
2.28M
    PyTupleObject *np = tuple_alloc(size);
510
2.28M
    if (np == NULL) {
511
0
        return NULL;
512
0
    }
513
514
2.28M
    PyObject **src = a->ob_item;
515
2.28M
    PyObject **dest = np->ob_item;
516
1.09G
    for (Py_ssize_t i = 0; i < Py_SIZE(a); i++) {
517
1.09G
        PyObject *v = src[i];
518
1.09G
        dest[i] = Py_NewRef(v);
519
1.09G
    }
520
521
2.28M
    src = b->ob_item;
522
2.28M
    dest = np->ob_item + Py_SIZE(a);
523
6.52M
    for (Py_ssize_t i = 0; i < Py_SIZE(b); i++) {
524
4.23M
        PyObject *v = src[i];
525
4.23M
        dest[i] = Py_NewRef(v);
526
4.23M
    }
527
528
2.28M
    _PyObject_GC_TRACK(np);
529
2.28M
    return (PyObject *)np;
530
2.28M
}
531
532
static PyObject *
533
tuple_repeat(PyObject *self, Py_ssize_t n)
534
0
{
535
0
    PyTupleObject *a = _PyTuple_CAST(self);
536
0
    const Py_ssize_t input_size = Py_SIZE(a);
537
0
    if (input_size == 0 || n == 1) {
538
0
        if (PyTuple_CheckExact(a)) {
539
            /* Since tuples are immutable, we can return a shared
540
               copy in this case */
541
0
            return Py_NewRef(a);
542
0
        }
543
0
    }
544
0
    if (input_size == 0 || n <= 0) {
545
0
        return tuple_get_empty();
546
0
    }
547
0
    assert(n>0);
548
549
0
    if (input_size > PY_SSIZE_T_MAX / n)
550
0
        return PyErr_NoMemory();
551
0
    Py_ssize_t output_size = input_size * n;
552
553
0
    PyTupleObject *np = tuple_alloc(output_size);
554
0
    if (np == NULL)
555
0
        return NULL;
556
557
0
    PyObject **dest = np->ob_item;
558
0
    if (input_size == 1) {
559
0
        PyObject *elem = a->ob_item[0];
560
0
        _Py_RefcntAdd(elem, n);
561
0
        PyObject **dest_end = dest + output_size;
562
0
        while (dest < dest_end) {
563
0
            *dest++ = elem;
564
0
        }
565
0
    }
566
0
    else {
567
0
        PyObject **src = a->ob_item;
568
0
        PyObject **src_end = src + input_size;
569
0
        while (src < src_end) {
570
0
            _Py_RefcntAdd(*src, n);
571
0
            *dest++ = *src++;
572
0
        }
573
574
0
        _Py_memory_repeat((char *)np->ob_item, sizeof(PyObject *)*output_size,
575
0
                          sizeof(PyObject *)*input_size);
576
0
    }
577
0
    _PyObject_GC_TRACK(np);
578
0
    return (PyObject *) np;
579
0
}
580
581
/*[clinic input]
582
tuple.index
583
584
    value: object
585
    start: slice_index(accept={int}) = 0
586
    stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
587
    /
588
589
Return first index of value.
590
591
Raises ValueError if the value is not present.
592
[clinic start generated code]*/
593
594
static PyObject *
595
tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
596
                 Py_ssize_t stop)
597
/*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
598
4
{
599
4
    Py_ssize_t i;
600
601
4
    if (start < 0) {
602
0
        start += Py_SIZE(self);
603
0
        if (start < 0)
604
0
            start = 0;
605
0
    }
606
4
    if (stop < 0) {
607
0
        stop += Py_SIZE(self);
608
0
    }
609
4
    else if (stop > Py_SIZE(self)) {
610
4
        stop = Py_SIZE(self);
611
4
    }
612
4
    for (i = start; i < stop; i++) {
613
4
        int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
614
4
        if (cmp > 0)
615
4
            return PyLong_FromSsize_t(i);
616
0
        else if (cmp < 0)
617
0
            return NULL;
618
4
    }
619
0
    PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
620
0
    return NULL;
621
4
}
622
623
/*[clinic input]
624
tuple.count
625
626
     value: object
627
     /
628
629
Return number of occurrences of value.
630
[clinic start generated code]*/
631
632
static PyObject *
633
tuple_count_impl(PyTupleObject *self, PyObject *value)
634
/*[clinic end generated code: output=cf02888d4bc15d7a input=531721aff65bd772]*/
635
0
{
636
0
    Py_ssize_t count = 0;
637
0
    Py_ssize_t i;
638
639
0
    for (i = 0; i < Py_SIZE(self); i++) {
640
0
        int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
641
0
        if (cmp > 0)
642
0
            count++;
643
0
        else if (cmp < 0)
644
0
            return NULL;
645
0
    }
646
0
    return PyLong_FromSsize_t(count);
647
0
}
648
649
static int
650
tuple_traverse(PyObject *self, visitproc visit, void *arg)
651
141M
{
652
141M
    PyTupleObject *o = _PyTuple_CAST(self);
653
431M
    for (Py_ssize_t i = Py_SIZE(o); --i >= 0; ) {
654
289M
        Py_VISIT(o->ob_item[i]);
655
289M
    }
656
141M
    return 0;
657
141M
}
658
659
static PyObject *
660
tuple_richcompare(PyObject *v, PyObject *w, int op)
661
19.4M
{
662
19.4M
    PyTupleObject *vt, *wt;
663
19.4M
    Py_ssize_t i;
664
19.4M
    Py_ssize_t vlen, wlen;
665
666
19.4M
    if (!PyTuple_Check(v) || !PyTuple_Check(w))
667
0
        Py_RETURN_NOTIMPLEMENTED;
668
669
19.4M
    vt = (PyTupleObject *)v;
670
19.4M
    wt = (PyTupleObject *)w;
671
672
19.4M
    vlen = Py_SIZE(vt);
673
19.4M
    wlen = Py_SIZE(wt);
674
675
    /* Note:  the corresponding code for lists has an "early out" test
676
     * here when op is EQ or NE and the lengths differ.  That pays there,
677
     * but Tim was unable to find any real code where EQ/NE tuple
678
     * compares don't have the same length, so testing for it here would
679
     * have cost without benefit.
680
     */
681
682
    /* Search for the first index where items are different.
683
     * Note that because tuples are immutable, it's safe to reuse
684
     * vlen and wlen across the comparison calls.
685
     */
686
65.1M
    for (i = 0; i < vlen && i < wlen; i++) {
687
48.9M
        int k = PyObject_RichCompareBool(vt->ob_item[i],
688
48.9M
                                         wt->ob_item[i], Py_EQ);
689
48.9M
        if (k < 0)
690
0
            return NULL;
691
48.9M
        if (!k)
692
3.24M
            break;
693
48.9M
    }
694
695
19.4M
    if (i >= vlen || i >= wlen) {
696
        /* No more items to compare -- compare sizes */
697
16.2M
        Py_RETURN_RICHCOMPARE(vlen, wlen, op);
698
16.2M
    }
699
700
    /* We have an item that differs -- shortcuts for EQ/NE */
701
3.24M
    if (op == Py_EQ) {
702
59.6k
        Py_RETURN_FALSE;
703
59.6k
    }
704
3.18M
    if (op == Py_NE) {
705
1.51k
        Py_RETURN_TRUE;
706
1.51k
    }
707
708
    /* Compare the final item again using the proper operator */
709
3.18M
    return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
710
3.18M
}
711
712
static PyObject *
713
tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
714
715
/*[clinic input]
716
@classmethod
717
tuple.__new__ as tuple_new
718
    iterable: object(c_default="NULL") = ()
719
    /
720
721
Built-in immutable sequence.
722
723
If no argument is given, the constructor returns an empty tuple.
724
If iterable is specified the tuple is initialized from iterable's items.
725
726
If the argument is a tuple, the return value is the same object.
727
[clinic start generated code]*/
728
729
static PyObject *
730
tuple_new_impl(PyTypeObject *type, PyObject *iterable)
731
/*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
732
3.77M
{
733
3.77M
    if (type != &PyTuple_Type)
734
1.88M
        return tuple_subtype_new(type, iterable);
735
736
1.88M
    if (iterable == NULL) {
737
0
        return tuple_get_empty();
738
0
    }
739
1.88M
    else {
740
1.88M
        return PySequence_Tuple(iterable);
741
1.88M
    }
742
1.88M
}
743
744
static PyObject *
745
tuple_vectorcall(PyObject *type, PyObject * const*args,
746
                 size_t nargsf, PyObject *kwnames)
747
145
{
748
145
    if (!_PyArg_NoKwnames("tuple", kwnames)) {
749
0
        return NULL;
750
0
    }
751
752
145
    Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
753
145
    if (!_PyArg_CheckPositional("tuple", nargs, 0, 1)) {
754
0
        return NULL;
755
0
    }
756
757
145
    if (nargs) {
758
145
        return tuple_new_impl(_PyType_CAST(type), args[0]);
759
145
    }
760
0
    else {
761
0
        return tuple_get_empty();
762
0
    }
763
145
}
764
765
static PyObject *
766
tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
767
1.88M
{
768
1.88M
    PyObject *tmp, *newobj, *item;
769
1.88M
    Py_ssize_t i, n;
770
771
1.88M
    assert(PyType_IsSubtype(type, &PyTuple_Type));
772
    // tuple subclasses must implement the GC protocol
773
1.88M
    assert(_PyType_IS_GC(type));
774
775
1.88M
    tmp = tuple_new_impl(&PyTuple_Type, iterable);
776
1.88M
    if (tmp == NULL)
777
0
        return NULL;
778
1.88M
    assert(PyTuple_Check(tmp));
779
    /* This may allocate an empty tuple that is not the global one. */
780
1.88M
    newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
781
1.88M
    if (newobj == NULL) {
782
0
        Py_DECREF(tmp);
783
0
        return NULL;
784
0
    }
785
7.54M
    for (i = 0; i < n; i++) {
786
5.66M
        item = PyTuple_GET_ITEM(tmp, i);
787
5.66M
        PyTuple_SET_ITEM(newobj, i, Py_NewRef(item));
788
5.66M
    }
789
1.88M
    Py_DECREF(tmp);
790
791
1.88M
    _PyTuple_RESET_HASH_CACHE(newobj);
792
793
    // Don't track if a subclass tp_alloc is PyType_GenericAlloc()
794
1.88M
    if (!_PyObject_GC_IS_TRACKED(newobj)) {
795
0
        _PyObject_GC_TRACK(newobj);
796
0
    }
797
1.88M
    return newobj;
798
1.88M
}
799
800
static PySequenceMethods tuple_as_sequence = {
801
    tuple_length,                               /* sq_length */
802
    tuple_concat,                               /* sq_concat */
803
    tuple_repeat,                               /* sq_repeat */
804
    tuple_item,                                 /* sq_item */
805
    0,                                          /* sq_slice */
806
    0,                                          /* sq_ass_item */
807
    0,                                          /* sq_ass_slice */
808
    tuple_contains,                             /* sq_contains */
809
};
810
811
static PyObject*
812
tuple_subscript(PyObject *op, PyObject* item)
813
3.78M
{
814
3.78M
    PyTupleObject *self = _PyTuple_CAST(op);
815
3.78M
    if (_PyIndex_Check(item)) {
816
1.89M
        Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
817
1.89M
        if (i == -1 && PyErr_Occurred())
818
0
            return NULL;
819
1.89M
        if (i < 0)
820
1.89M
            i += PyTuple_GET_SIZE(self);
821
1.89M
        return tuple_item(op, i);
822
1.89M
    }
823
1.88M
    else if (PySlice_Check(item)) {
824
1.88M
        Py_ssize_t start, stop, step, slicelength, i;
825
1.88M
        size_t cur;
826
1.88M
        PyObject* it;
827
1.88M
        PyObject **src, **dest;
828
829
1.88M
        if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
830
0
            return NULL;
831
0
        }
832
1.88M
        slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
833
1.88M
                                            &stop, step);
834
835
1.88M
        if (slicelength <= 0) {
836
6.81k
            return tuple_get_empty();
837
6.81k
        }
838
1.88M
        else if (start == 0 && step == 1 &&
839
1.80M
                 slicelength == PyTuple_GET_SIZE(self) &&
840
8
                 PyTuple_CheckExact(self)) {
841
8
            return Py_NewRef(self);
842
8
        }
843
1.88M
        else {
844
1.88M
            PyTupleObject* result = tuple_alloc(slicelength);
845
1.88M
            if (!result) return NULL;
846
847
1.88M
            src = self->ob_item;
848
1.88M
            dest = result->ob_item;
849
2.23G
            for (cur = start, i = 0; i < slicelength;
850
2.23G
                 cur += step, i++) {
851
2.23G
                it = Py_NewRef(src[cur]);
852
2.23G
                dest[i] = it;
853
2.23G
            }
854
855
1.88M
            _PyObject_GC_TRACK(result);
856
1.88M
            return (PyObject *)result;
857
1.88M
        }
858
1.88M
    }
859
0
    else {
860
0
        PyErr_Format(PyExc_TypeError,
861
0
                     "tuple indices must be integers or slices, not %.200s",
862
0
                     Py_TYPE(item)->tp_name);
863
0
        return NULL;
864
0
    }
865
3.78M
}
866
867
/*[clinic input]
868
tuple.__getnewargs__
869
[clinic start generated code]*/
870
871
static PyObject *
872
tuple___getnewargs___impl(PyTupleObject *self)
873
/*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
874
0
{
875
0
    return Py_BuildValue("(N)", tuple_slice(self, 0, Py_SIZE(self)));
876
0
}
877
878
static PyMethodDef tuple_methods[] = {
879
    TUPLE___GETNEWARGS___METHODDEF
880
    TUPLE_INDEX_METHODDEF
881
    TUPLE_COUNT_METHODDEF
882
    {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
883
    {NULL,              NULL}           /* sentinel */
884
};
885
886
static PyMappingMethods tuple_as_mapping = {
887
    tuple_length,
888
    tuple_subscript,
889
    0
890
};
891
892
static PyObject *tuple_iter(PyObject *seq);
893
894
PyTypeObject PyTuple_Type = {
895
    PyVarObject_HEAD_INIT(&PyType_Type, 0)
896
    "tuple",
897
    sizeof(PyTupleObject) - sizeof(PyObject *),
898
    sizeof(PyObject *),
899
    tuple_dealloc,                              /* tp_dealloc */
900
    0,                                          /* tp_vectorcall_offset */
901
    0,                                          /* tp_getattr */
902
    0,                                          /* tp_setattr */
903
    0,                                          /* tp_as_async */
904
    tuple_repr,                                 /* tp_repr */
905
    0,                                          /* tp_as_number */
906
    &tuple_as_sequence,                         /* tp_as_sequence */
907
    &tuple_as_mapping,                          /* tp_as_mapping */
908
    tuple_hash,                                 /* tp_hash */
909
    0,                                          /* tp_call */
910
    0,                                          /* tp_str */
911
    PyObject_GenericGetAttr,                    /* tp_getattro */
912
    0,                                          /* tp_setattro */
913
    0,                                          /* tp_as_buffer */
914
    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
915
        Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS |
916
        _Py_TPFLAGS_MATCH_SELF | Py_TPFLAGS_SEQUENCE,  /* tp_flags */
917
    tuple_new__doc__,                           /* tp_doc */
918
    tuple_traverse,                             /* tp_traverse */
919
    0,                                          /* tp_clear */
920
    tuple_richcompare,                          /* tp_richcompare */
921
    0,                                          /* tp_weaklistoffset */
922
    tuple_iter,                                 /* tp_iter */
923
    0,                                          /* tp_iternext */
924
    tuple_methods,                              /* tp_methods */
925
    0,                                          /* tp_members */
926
    0,                                          /* tp_getset */
927
    0,                                          /* tp_base */
928
    0,                                          /* tp_dict */
929
    0,                                          /* tp_descr_get */
930
    0,                                          /* tp_descr_set */
931
    0,                                          /* tp_dictoffset */
932
    0,                                          /* tp_init */
933
    0,                                          /* tp_alloc */
934
    tuple_new,                                  /* tp_new */
935
    PyObject_GC_Del,                            /* tp_free */
936
    .tp_vectorcall = tuple_vectorcall,
937
    .tp_version_tag = _Py_TYPE_VERSION_TUPLE,
938
};
939
940
/* The following function breaks the notion that tuples are immutable:
941
   it changes the size of a tuple.  We get away with this only if there
942
   is only one module referencing the object.  You can also think of it
943
   as creating a new tuple object and destroying the old one, only more
944
   efficiently.  In any case, don't use this if the tuple may already be
945
   known to some other part of the code. */
946
947
int
948
_PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
949
512
{
950
512
    PyTupleObject *v;
951
512
    PyTupleObject *sv;
952
512
    Py_ssize_t i;
953
512
    Py_ssize_t oldsize;
954
955
512
    v = (PyTupleObject *) *pv;
956
512
    if (v == NULL || !Py_IS_TYPE(v, &PyTuple_Type) ||
957
512
        (Py_SIZE(v) != 0 && !_PyObject_IsUniquelyReferenced(*pv))) {
958
0
        *pv = 0;
959
0
        Py_XDECREF(v);
960
0
        PyErr_BadInternalCall();
961
0
        return -1;
962
0
    }
963
964
512
    oldsize = Py_SIZE(v);
965
512
    if (oldsize == newsize) {
966
484
        return 0;
967
484
    }
968
28
    if (newsize == 0) {
969
0
        Py_DECREF(v);
970
0
        *pv = tuple_get_empty();
971
0
        return 0;
972
0
    }
973
28
    if (oldsize == 0) {
974
#ifdef Py_DEBUG
975
        assert(v == &_Py_SINGLETON(tuple_empty));
976
#endif
977
        /* The empty tuple is statically allocated so we never
978
           resize it in-place. */
979
0
        Py_DECREF(v);
980
0
        *pv = PyTuple_New(newsize);
981
0
        return *pv == NULL ? -1 : 0;
982
0
    }
983
984
28
    if (_PyObject_GC_IS_TRACKED(v)) {
985
28
        _PyObject_GC_UNTRACK(v);
986
28
    }
987
#ifdef Py_TRACE_REFS
988
    _Py_ForgetReference((PyObject *) v);
989
#endif
990
    /* DECREF items deleted by shrinkage */
991
112
    for (i = newsize; i < oldsize; i++) {
992
84
        Py_CLEAR(v->ob_item[i]);
993
84
    }
994
28
    _PyReftracerTrack((PyObject *)v, PyRefTracer_DESTROY);
995
28
    sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
996
28
    if (sv == NULL) {
997
0
        *pv = NULL;
998
#ifdef Py_REF_DEBUG
999
        _Py_DecRefTotal(_PyThreadState_GET());
1000
#endif
1001
0
        PyObject_GC_Del(v);
1002
0
        return -1;
1003
0
    }
1004
28
    _Py_NewReferenceNoTotal((PyObject *) sv);
1005
    /* Zero out items added by growing */
1006
28
    if (newsize > oldsize)
1007
0
        memset(&sv->ob_item[oldsize], 0,
1008
0
               sizeof(*sv->ob_item) * (newsize - oldsize));
1009
28
    *pv = (PyObject *) sv;
1010
28
    _PyObject_GC_TRACK(sv);
1011
28
    return 0;
1012
28
}
1013
1014
/*********************** Tuple Iterator **************************/
1015
1016
1.25G
#define _PyTupleIterObject_CAST(op) ((_PyTupleIterObject *)(op))
1017
1018
static void
1019
tupleiter_dealloc(PyObject *self)
1020
5.27M
{
1021
5.27M
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1022
5.27M
    _PyObject_GC_UNTRACK(it);
1023
5.27M
    Py_XDECREF(it->it_seq);
1024
5.27M
    assert(Py_IS_TYPE(self, &PyTupleIter_Type));
1025
5.27M
    _Py_FREELIST_FREE(tuple_iters, it, PyObject_GC_Del);
1026
5.27M
}
1027
1028
static int
1029
tupleiter_traverse(PyObject *self, visitproc visit, void *arg)
1030
8
{
1031
8
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1032
8
    Py_VISIT(it->it_seq);
1033
8
    return 0;
1034
8
}
1035
1036
static PyObject *
1037
tupleiter_next(PyObject *self)
1038
1.24G
{
1039
1.24G
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1040
1.24G
    PyTupleObject *seq;
1041
1.24G
    PyObject *item;
1042
1043
1.24G
    assert(it != NULL);
1044
1.24G
    seq = it->it_seq;
1045
1.24G
#ifndef Py_GIL_DISABLED
1046
1.24G
    if (seq == NULL)
1047
0
        return NULL;
1048
1.24G
#endif
1049
1.24G
    assert(PyTuple_Check(seq));
1050
1051
1.24G
    Py_ssize_t index = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index);
1052
1.24G
    if (index < PyTuple_GET_SIZE(seq)) {
1053
1.24G
        FT_ATOMIC_STORE_SSIZE_RELAXED(it->it_index, index + 1);
1054
1.24G
        item = PyTuple_GET_ITEM(seq, index);
1055
1.24G
        return Py_NewRef(item);
1056
1.24G
    }
1057
1058
5.27M
#ifndef Py_GIL_DISABLED
1059
5.27M
    it->it_seq = NULL;
1060
5.27M
    Py_DECREF(seq);
1061
5.27M
#endif
1062
5.27M
    return NULL;
1063
1.24G
}
1064
1065
static PyObject *
1066
tupleiter_len(PyObject *self, PyObject *Py_UNUSED(ignored))
1067
0
{
1068
0
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1069
0
    Py_ssize_t len = 0;
1070
#ifdef Py_GIL_DISABLED
1071
    Py_ssize_t idx = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index);
1072
    Py_ssize_t seq_len = PyTuple_GET_SIZE(it->it_seq);
1073
    if (idx < seq_len)
1074
        len = seq_len - idx;
1075
#else
1076
0
    if (it->it_seq)
1077
0
        len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
1078
0
#endif
1079
0
    return PyLong_FromSsize_t(len);
1080
0
}
1081
1082
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1083
1084
static PyObject *
1085
tupleiter_reduce(PyObject *self, PyObject *Py_UNUSED(ignored))
1086
0
{
1087
0
    PyObject *iter = _PyEval_GetBuiltin(&_Py_ID(iter));
1088
1089
    /* _PyEval_GetBuiltin can invoke arbitrary code,
1090
     * call must be before access of iterator pointers.
1091
     * see issue #101765 */
1092
0
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1093
1094
#ifdef Py_GIL_DISABLED
1095
    Py_ssize_t idx = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index);
1096
    if (idx < PyTuple_GET_SIZE(it->it_seq))
1097
        return Py_BuildValue("N(O)n", iter, it->it_seq, idx);
1098
#else
1099
0
    if (it->it_seq)
1100
0
        return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index);
1101
0
#endif
1102
0
    return Py_BuildValue("N(())", iter);
1103
0
}
1104
1105
static PyObject *
1106
tupleiter_setstate(PyObject *self, PyObject *state)
1107
0
{
1108
0
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1109
0
    Py_ssize_t index = PyLong_AsSsize_t(state);
1110
0
    if (index == -1 && PyErr_Occurred())
1111
0
        return NULL;
1112
0
    if (it->it_seq != NULL) {
1113
0
        if (index < 0)
1114
0
            index = 0;
1115
0
        else if (index > PyTuple_GET_SIZE(it->it_seq))
1116
0
            index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1117
0
        FT_ATOMIC_STORE_SSIZE_RELAXED(it->it_index, index);
1118
0
    }
1119
0
    Py_RETURN_NONE;
1120
0
}
1121
1122
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1123
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1124
1125
static PyMethodDef tupleiter_methods[] = {
1126
    {"__length_hint__", tupleiter_len, METH_NOARGS, length_hint_doc},
1127
    {"__reduce__", tupleiter_reduce, METH_NOARGS, reduce_doc},
1128
    {"__setstate__", tupleiter_setstate, METH_O, setstate_doc},
1129
    {NULL, NULL, 0, NULL} /* sentinel */
1130
};
1131
1132
PyTypeObject PyTupleIter_Type = {
1133
    PyVarObject_HEAD_INIT(&PyType_Type, 0)
1134
    "tuple_iterator",                           /* tp_name */
1135
    sizeof(_PyTupleIterObject),                 /* tp_basicsize */
1136
    0,                                          /* tp_itemsize */
1137
    /* methods */
1138
    tupleiter_dealloc,                          /* tp_dealloc */
1139
    0,                                          /* tp_vectorcall_offset */
1140
    0,                                          /* tp_getattr */
1141
    0,                                          /* tp_setattr */
1142
    0,                                          /* tp_as_async */
1143
    0,                                          /* tp_repr */
1144
    0,                                          /* tp_as_number */
1145
    0,                                          /* tp_as_sequence */
1146
    0,                                          /* tp_as_mapping */
1147
    0,                                          /* tp_hash */
1148
    0,                                          /* tp_call */
1149
    0,                                          /* tp_str */
1150
    PyObject_GenericGetAttr,                    /* tp_getattro */
1151
    0,                                          /* tp_setattro */
1152
    0,                                          /* tp_as_buffer */
1153
    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1154
    0,                                          /* tp_doc */
1155
    tupleiter_traverse,                         /* tp_traverse */
1156
    0,                                          /* tp_clear */
1157
    0,                                          /* tp_richcompare */
1158
    0,                                          /* tp_weaklistoffset */
1159
    PyObject_SelfIter,                          /* tp_iter */
1160
    tupleiter_next,                             /* tp_iternext */
1161
    tupleiter_methods,                          /* tp_methods */
1162
    0,
1163
};
1164
1165
static PyObject *
1166
tuple_iter(PyObject *seq)
1167
5.27M
{
1168
5.27M
    if (!PyTuple_Check(seq)) {
1169
0
        PyErr_BadInternalCall();
1170
0
        return NULL;
1171
0
    }
1172
5.27M
    _PyTupleIterObject *it = _Py_FREELIST_POP(_PyTupleIterObject, tuple_iters);
1173
5.27M
    if (it == NULL) {
1174
60
        it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type);
1175
60
        if (it == NULL)
1176
0
            return NULL;
1177
60
    }
1178
5.27M
    it->it_index = 0;
1179
5.27M
    it->it_seq = (PyTupleObject *)Py_NewRef(seq);
1180
5.27M
    _PyObject_GC_TRACK(it);
1181
5.27M
    return (PyObject *)it;
1182
5.27M
}
1183
1184
1185
/*************
1186
 * freelists *
1187
 *************/
1188
1189
static inline int
1190
maybe_freelist_push(PyTupleObject *op)
1191
498M
{
1192
498M
    if (!Py_IS_TYPE(op, &PyTuple_Type)) {
1193
1.88M
        return 0;
1194
1.88M
    }
1195
496M
    Py_ssize_t index = Py_SIZE(op) - 1;
1196
496M
    if (index < PyTuple_MAXSAVESIZE) {
1197
491M
        return _Py_FREELIST_PUSH(tuples[index], op, Py_tuple_MAXFREELIST);
1198
491M
    }
1199
4.98M
    return 0;
1200
496M
}
1201
1202
/* Print summary info about the state of the optimized allocator */
1203
void
1204
_PyTuple_DebugMallocStats(FILE *out)
1205
0
{
1206
0
    for (int i = 0; i < PyTuple_MAXSAVESIZE; i++) {
1207
0
        int len = i + 1;
1208
0
        char buf[128];
1209
0
        PyOS_snprintf(buf, sizeof(buf),
1210
0
                      "free %d-sized PyTupleObject", len);
1211
0
        _PyDebugAllocatorStats(out, buf, _Py_FREELIST_SIZE(tuples[i]),
1212
0
                               _PyObject_VAR_SIZE(&PyTuple_Type, len));
1213
0
    }
1214
0
}