Coverage Report

Created: 2025-11-11 06:44

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/cpython/Objects/tupleobject.c
Line
Count
Source
1
/* Tuple object implementation */
2
3
#include "Python.h"
4
#include "pycore_abstract.h"      // _PyIndex_Check()
5
#include "pycore_ceval.h"         // _PyEval_GetBuiltin()
6
#include "pycore_freelist.h"      // _Py_FREELIST_PUSH()
7
#include "pycore_gc.h"            // _PyObject_GC_IS_TRACKED()
8
#include "pycore_list.h"          // _Py_memory_repeat()
9
#include "pycore_modsupport.h"    // _PyArg_NoKwnames()
10
#include "pycore_object.h"        // _PyObject_GC_TRACK()
11
#include "pycore_stackref.h"      // PyStackRef_AsPyObjectSteal()
12
#include "pycore_tuple.h"         // _PyTupleIterObject
13
14
15
/*[clinic input]
16
class tuple "PyTupleObject *" "&PyTuple_Type"
17
[clinic start generated code]*/
18
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
19
20
#include "clinic/tupleobject.c.h"
21
22
23
static inline int maybe_freelist_push(PyTupleObject *);
24
25
26
/* Allocate an uninitialized tuple object. Before making it public, following
27
   steps must be done:
28
29
   - Initialize its items.
30
   - Call _PyObject_GC_TRACK() on it.
31
32
   Because the empty tuple is always reused and it's already tracked by GC,
33
   this function must not be called with size == 0 (unless from PyTuple_New()
34
   which wraps this function).
35
*/
36
static PyTupleObject *
37
tuple_alloc(Py_ssize_t size)
38
310M
{
39
310M
    if (size < 0) {
40
0
        PyErr_BadInternalCall();
41
0
        return NULL;
42
0
    }
43
310M
    assert(size != 0);    // The empty tuple is statically allocated.
44
310M
    Py_ssize_t index = size - 1;
45
310M
    if (index < PyTuple_MAXSAVESIZE) {
46
310M
        PyTupleObject *op = _Py_FREELIST_POP(PyTupleObject, tuples[index]);
47
310M
        if (op != NULL) {
48
279M
            _PyTuple_RESET_HASH_CACHE(op);
49
279M
            return op;
50
279M
        }
51
310M
    }
52
    /* Check for overflow */
53
30.8M
    if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - (sizeof(PyTupleObject) -
54
30.8M
                sizeof(PyObject *))) / sizeof(PyObject *)) {
55
0
        return (PyTupleObject *)PyErr_NoMemory();
56
0
    }
57
30.8M
    PyTupleObject *result = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
58
30.8M
    if (result != NULL) {
59
30.8M
        _PyTuple_RESET_HASH_CACHE(result);
60
30.8M
    }
61
30.8M
    return result;
62
30.8M
}
63
64
// The empty tuple singleton is not tracked by the GC.
65
// It does not contain any Python object.
66
// Note that tuple subclasses have their own empty instances.
67
68
static inline PyObject *
69
tuple_get_empty(void)
70
78.4M
{
71
78.4M
    return (PyObject *)&_Py_SINGLETON(tuple_empty);
72
78.4M
}
73
74
PyObject *
75
PyTuple_New(Py_ssize_t size)
76
46.4M
{
77
46.4M
    PyTupleObject *op;
78
46.4M
    if (size == 0) {
79
26.4k
        return tuple_get_empty();
80
26.4k
    }
81
46.4M
    op = tuple_alloc(size);
82
46.4M
    if (op == NULL) {
83
0
        return NULL;
84
0
    }
85
159M
    for (Py_ssize_t i = 0; i < size; i++) {
86
113M
        op->ob_item[i] = NULL;
87
113M
    }
88
46.4M
    _PyObject_GC_TRACK(op);
89
46.4M
    return (PyObject *) op;
90
46.4M
}
91
92
Py_ssize_t
93
PyTuple_Size(PyObject *op)
94
5.84M
{
95
5.84M
    if (!PyTuple_Check(op)) {
96
0
        PyErr_BadInternalCall();
97
0
        return -1;
98
0
    }
99
5.84M
    else
100
5.84M
        return Py_SIZE(op);
101
5.84M
}
102
103
PyObject *
104
PyTuple_GetItem(PyObject *op, Py_ssize_t i)
105
28.0M
{
106
28.0M
    if (!PyTuple_Check(op)) {
107
0
        PyErr_BadInternalCall();
108
0
        return NULL;
109
0
    }
110
28.0M
    if (i < 0 || i >= Py_SIZE(op)) {
111
0
        PyErr_SetString(PyExc_IndexError, "tuple index out of range");
112
0
        return NULL;
113
0
    }
114
28.0M
    return ((PyTupleObject *)op) -> ob_item[i];
115
28.0M
}
116
117
int
118
PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
119
71.7k
{
120
71.7k
    PyObject **p;
121
71.7k
    if (!PyTuple_Check(op) || !_PyObject_IsUniquelyReferenced(op)) {
122
0
        Py_XDECREF(newitem);
123
0
        PyErr_BadInternalCall();
124
0
        return -1;
125
0
    }
126
71.7k
    if (i < 0 || i >= Py_SIZE(op)) {
127
0
        Py_XDECREF(newitem);
128
0
        PyErr_SetString(PyExc_IndexError,
129
0
                        "tuple assignment index out of range");
130
0
        return -1;
131
0
    }
132
71.7k
    p = ((PyTupleObject *)op) -> ob_item + i;
133
71.7k
    Py_XSETREF(*p, newitem);
134
71.7k
    return 0;
135
71.7k
}
136
137
void
138
_PyTuple_MaybeUntrack(PyObject *op)
139
2.42M
{
140
2.42M
    PyTupleObject *t;
141
2.42M
    Py_ssize_t i, n;
142
143
2.42M
    if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
144
0
        return;
145
2.42M
    t = (PyTupleObject *) op;
146
2.42M
    n = Py_SIZE(t);
147
5.48M
    for (i = 0; i < n; i++) {
148
4.22M
        PyObject *elt = PyTuple_GET_ITEM(t, i);
149
        /* Tuple with NULL elements aren't
150
           fully constructed, don't untrack
151
           them yet. */
152
4.22M
        if (!elt ||
153
4.22M
            _PyObject_GC_MAY_BE_TRACKED(elt))
154
1.16M
            return;
155
4.22M
    }
156
1.25M
    _PyObject_GC_UNTRACK(op);
157
1.25M
}
158
159
/* Fast, but conservative check if an object maybe tracked
160
   May return true for an object that is not tracked,
161
   Will always return true for an object that is tracked.
162
   This is a temporary workaround until _PyObject_GC_IS_TRACKED
163
   becomes fast and safe to call on non-GC objects.
164
*/
165
static bool
166
maybe_tracked(PyObject *ob)
167
468M
{
168
468M
    return _PyType_IS_GC(Py_TYPE(ob));
169
468M
}
170
171
PyObject *
172
PyTuple_Pack(Py_ssize_t n, ...)
173
8.71M
{
174
8.71M
    Py_ssize_t i;
175
8.71M
    PyObject *o;
176
8.71M
    PyObject **items;
177
8.71M
    va_list vargs;
178
8.71M
    bool track = false;
179
180
8.71M
    if (n == 0) {
181
0
        return tuple_get_empty();
182
0
    }
183
184
8.71M
    va_start(vargs, n);
185
8.71M
    PyTupleObject *result = tuple_alloc(n);
186
8.71M
    if (result == NULL) {
187
0
        va_end(vargs);
188
0
        return NULL;
189
0
    }
190
8.71M
    items = result->ob_item;
191
26.1M
    for (i = 0; i < n; i++) {
192
17.4M
        o = va_arg(vargs, PyObject *);
193
17.4M
        if (!track && maybe_tracked(o)) {
194
105k
            track = true;
195
105k
        }
196
17.4M
        items[i] = Py_NewRef(o);
197
17.4M
    }
198
8.71M
    va_end(vargs);
199
8.71M
    if (track) {
200
105k
        _PyObject_GC_TRACK(result);
201
105k
    }
202
8.71M
    return (PyObject *)result;
203
8.71M
}
204
205
206
/* Methods */
207
208
static void
209
tuple_dealloc(PyObject *self)
210
311M
{
211
311M
    PyTupleObject *op = _PyTuple_CAST(self);
212
311M
    if (Py_SIZE(op) == 0) {
213
        /* The empty tuple is statically allocated. */
214
0
        if (op == &_Py_SINGLETON(tuple_empty)) {
215
#ifdef Py_DEBUG
216
            _Py_FatalRefcountError("deallocating the empty tuple singleton");
217
#else
218
0
            return;
219
0
#endif
220
0
        }
221
#ifdef Py_DEBUG
222
        /* tuple subclasses have their own empty instances. */
223
        assert(!PyTuple_CheckExact(op));
224
#endif
225
0
    }
226
227
311M
    PyObject_GC_UnTrack(op);
228
229
311M
    Py_ssize_t i = Py_SIZE(op);
230
971M
    while (--i >= 0) {
231
660M
        Py_XDECREF(op->ob_item[i]);
232
660M
    }
233
    // This will abort on the empty singleton (if there is one).
234
311M
    if (!maybe_freelist_push(op)) {
235
32.2M
        Py_TYPE(op)->tp_free((PyObject *)op);
236
32.2M
    }
237
311M
}
238
239
static PyObject *
240
tuple_repr(PyObject *self)
241
0
{
242
0
    PyTupleObject *v = _PyTuple_CAST(self);
243
0
    Py_ssize_t n = PyTuple_GET_SIZE(v);
244
0
    if (n == 0) {
245
0
        return PyUnicode_FromString("()");
246
0
    }
247
248
    /* While not mutable, it is still possible to end up with a cycle in a
249
       tuple through an object that stores itself within a tuple (and thus
250
       infinitely asks for the repr of itself). This should only be
251
       possible within a type. */
252
0
    int res = Py_ReprEnter((PyObject *)v);
253
0
    if (res != 0) {
254
0
        return res > 0 ? PyUnicode_FromString("(...)") : NULL;
255
0
    }
256
257
0
    Py_ssize_t prealloc;
258
0
    if (n > 1) {
259
        // "(" + "1" + ", 2" * (len - 1) + ")"
260
0
        prealloc = 1 + 1 + (2 + 1) * (n - 1) + 1;
261
0
    }
262
0
    else {
263
        // "(1,)"
264
0
        prealloc = 4;
265
0
    }
266
0
    PyUnicodeWriter *writer = PyUnicodeWriter_Create(prealloc);
267
0
    if (writer == NULL) {
268
0
        goto error;
269
0
    }
270
271
0
    if (PyUnicodeWriter_WriteChar(writer, '(') < 0) {
272
0
        goto error;
273
0
    }
274
275
    /* Do repr() on each element. */
276
0
    for (Py_ssize_t i = 0; i < n; ++i) {
277
0
        if (i > 0) {
278
0
            if (PyUnicodeWriter_WriteChar(writer, ',') < 0) {
279
0
                goto error;
280
0
            }
281
0
            if (PyUnicodeWriter_WriteChar(writer, ' ') < 0) {
282
0
                goto error;
283
0
            }
284
0
        }
285
286
0
        if (PyUnicodeWriter_WriteRepr(writer, v->ob_item[i]) < 0) {
287
0
            goto error;
288
0
        }
289
0
    }
290
291
0
    if (n == 1) {
292
0
        if (PyUnicodeWriter_WriteChar(writer, ',') < 0) {
293
0
            goto error;
294
0
        }
295
0
    }
296
0
    if (PyUnicodeWriter_WriteChar(writer, ')') < 0) {
297
0
        goto error;
298
0
    }
299
300
0
    Py_ReprLeave((PyObject *)v);
301
0
    return PyUnicodeWriter_Finish(writer);
302
303
0
error:
304
0
    PyUnicodeWriter_Discard(writer);
305
0
    Py_ReprLeave((PyObject *)v);
306
0
    return NULL;
307
0
}
308
309
310
/* Hash for tuples. This is a slightly simplified version of the xxHash
311
   non-cryptographic hash:
312
   - we do not use any parallelism, there is only 1 accumulator.
313
   - we drop the final mixing since this is just a permutation of the
314
     output space: it does not help against collisions.
315
   - at the end, we mangle the length with a single constant.
316
   For the xxHash specification, see
317
   https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
318
319
   The constants for the hash function are defined in pycore_tuple.h.
320
*/
321
322
static Py_hash_t
323
tuple_hash(PyObject *op)
324
1.03M
{
325
1.03M
    PyTupleObject *v = _PyTuple_CAST(op);
326
327
1.03M
    Py_uhash_t acc = FT_ATOMIC_LOAD_SSIZE_RELAXED(v->ob_hash);
328
1.03M
    if (acc != (Py_uhash_t)-1) {
329
137k
        return acc;
330
137k
    }
331
332
900k
    Py_ssize_t len = Py_SIZE(v);
333
900k
    PyObject **item = v->ob_item;
334
900k
    acc = _PyTuple_HASH_XXPRIME_5;
335
3.64M
    for (Py_ssize_t i = 0; i < len; i++) {
336
2.74M
        Py_uhash_t lane = PyObject_Hash(item[i]);
337
2.74M
        if (lane == (Py_uhash_t)-1) {
338
0
            return -1;
339
0
        }
340
2.74M
        acc += lane * _PyTuple_HASH_XXPRIME_2;
341
2.74M
        acc = _PyTuple_HASH_XXROTATE(acc);
342
2.74M
        acc *= _PyTuple_HASH_XXPRIME_1;
343
2.74M
    }
344
345
    /* Add input length, mangled to keep the historical value of hash(()). */
346
900k
    acc += len ^ (_PyTuple_HASH_XXPRIME_5 ^ 3527539UL);
347
348
900k
    if (acc == (Py_uhash_t)-1) {
349
0
        acc = 1546275796;
350
0
    }
351
352
900k
    FT_ATOMIC_STORE_SSIZE_RELAXED(v->ob_hash, acc);
353
354
900k
    return acc;
355
900k
}
356
357
static Py_ssize_t
358
tuple_length(PyObject *self)
359
715k
{
360
715k
    PyTupleObject *a = _PyTuple_CAST(self);
361
715k
    return Py_SIZE(a);
362
715k
}
363
364
static int
365
tuple_contains(PyObject *self, PyObject *el)
366
8.65M
{
367
8.65M
    PyTupleObject *a = _PyTuple_CAST(self);
368
8.65M
    int cmp = 0;
369
31.2M
    for (Py_ssize_t i = 0; cmp == 0 && i < Py_SIZE(a); ++i) {
370
22.6M
        cmp = PyObject_RichCompareBool(PyTuple_GET_ITEM(a, i), el, Py_EQ);
371
22.6M
    }
372
8.65M
    return cmp;
373
8.65M
}
374
375
static PyObject *
376
tuple_item(PyObject *op, Py_ssize_t i)
377
18.5M
{
378
18.5M
    PyTupleObject *a = _PyTuple_CAST(op);
379
18.5M
    if (i < 0 || i >= Py_SIZE(a)) {
380
22
        PyErr_SetString(PyExc_IndexError, "tuple index out of range");
381
22
        return NULL;
382
22
    }
383
18.5M
    return Py_NewRef(a->ob_item[i]);
384
18.5M
}
385
386
PyObject *
387
PyTuple_FromArray(PyObject *const *src, Py_ssize_t n)
388
217M
{
389
217M
    if (n == 0) {
390
78.4M
        return tuple_get_empty();
391
78.4M
    }
392
393
138M
    PyTupleObject *tuple = tuple_alloc(n);
394
138M
    if (tuple == NULL) {
395
0
        return NULL;
396
0
    }
397
138M
    PyObject **dst = tuple->ob_item;
398
138M
    bool track = false;
399
396M
    for (Py_ssize_t i = 0; i < n; i++) {
400
257M
        PyObject *item = src[i];
401
257M
        if (!track && maybe_tracked(item)) {
402
32.0M
            track = true;
403
32.0M
        }
404
257M
        dst[i] = Py_NewRef(item);
405
257M
    }
406
138M
    if (track) {
407
32.0M
        _PyObject_GC_TRACK(tuple);
408
32.0M
    }
409
138M
    return (PyObject *)tuple;
410
138M
}
411
412
PyObject *
413
_PyTuple_FromStackRefStealOnSuccess(const _PyStackRef *src, Py_ssize_t n)
414
116M
{
415
116M
    if (n == 0) {
416
0
        return tuple_get_empty();
417
0
    }
418
116M
    PyTupleObject *tuple = tuple_alloc(n);
419
116M
    if (tuple == NULL) {
420
0
        return NULL;
421
0
    }
422
116M
    PyObject **dst = tuple->ob_item;
423
116M
    bool track = false;
424
383M
    for (Py_ssize_t i = 0; i < n; i++) {
425
267M
        PyObject *item = PyStackRef_AsPyObjectSteal(src[i]);
426
267M
        if (!track && maybe_tracked(item)) {
427
46.8M
            track = true;
428
46.8M
        }
429
267M
        dst[i] = item;
430
267M
    }
431
116M
    if (track) {
432
46.8M
        _PyObject_GC_TRACK(tuple);
433
46.8M
    }
434
116M
    return (PyObject *)tuple;
435
116M
}
436
437
PyObject *
438
_PyTuple_FromArraySteal(PyObject *const *src, Py_ssize_t n)
439
355
{
440
355
    if (n == 0) {
441
0
        return tuple_get_empty();
442
0
    }
443
355
    PyTupleObject *tuple = tuple_alloc(n);
444
355
    if (tuple == NULL) {
445
0
        for (Py_ssize_t i = 0; i < n; i++) {
446
0
            Py_DECREF(src[i]);
447
0
        }
448
0
        return NULL;
449
0
    }
450
355
    PyObject **dst = tuple->ob_item;
451
1.96k
    for (Py_ssize_t i = 0; i < n; i++) {
452
1.60k
        PyObject *item = src[i];
453
1.60k
        dst[i] = item;
454
1.60k
    }
455
355
    _PyObject_GC_TRACK(tuple);
456
355
    return (PyObject *)tuple;
457
355
}
458
459
static PyObject *
460
tuple_slice(PyTupleObject *a, Py_ssize_t ilow,
461
           Py_ssize_t ihigh)
462
26.5M
{
463
26.5M
    if (ilow < 0)
464
0
        ilow = 0;
465
26.5M
    if (ihigh > Py_SIZE(a))
466
0
        ihigh = Py_SIZE(a);
467
26.5M
    if (ihigh < ilow)
468
0
        ihigh = ilow;
469
26.5M
    if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
470
0
        return Py_NewRef(a);
471
0
    }
472
26.5M
    return PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow);
473
26.5M
}
474
475
PyObject *
476
PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
477
26.5M
{
478
26.5M
    if (op == NULL || !PyTuple_Check(op)) {
479
0
        PyErr_BadInternalCall();
480
0
        return NULL;
481
0
    }
482
26.5M
    return tuple_slice((PyTupleObject *)op, i, j);
483
26.5M
}
484
485
static PyObject *
486
tuple_concat(PyObject *aa, PyObject *bb)
487
30
{
488
30
    PyTupleObject *a = _PyTuple_CAST(aa);
489
30
    if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) {
490
0
        return Py_NewRef(bb);
491
0
    }
492
30
    if (!PyTuple_Check(bb)) {
493
0
        PyErr_Format(PyExc_TypeError,
494
0
             "can only concatenate tuple (not \"%.200s\") to tuple",
495
0
                 Py_TYPE(bb)->tp_name);
496
0
        return NULL;
497
0
    }
498
30
    PyTupleObject *b = (PyTupleObject *)bb;
499
500
30
    if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) {
501
0
        return Py_NewRef(a);
502
0
    }
503
30
    assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX);
504
30
    Py_ssize_t size = Py_SIZE(a) + Py_SIZE(b);
505
30
    if (size == 0) {
506
0
        return tuple_get_empty();
507
0
    }
508
509
30
    PyTupleObject *np = tuple_alloc(size);
510
30
    if (np == NULL) {
511
0
        return NULL;
512
0
    }
513
514
30
    PyObject **src = a->ob_item;
515
30
    PyObject **dest = np->ob_item;
516
244
    for (Py_ssize_t i = 0; i < Py_SIZE(a); i++) {
517
214
        PyObject *v = src[i];
518
214
        dest[i] = Py_NewRef(v);
519
214
    }
520
521
30
    src = b->ob_item;
522
30
    dest = np->ob_item + Py_SIZE(a);
523
74
    for (Py_ssize_t i = 0; i < Py_SIZE(b); i++) {
524
44
        PyObject *v = src[i];
525
44
        dest[i] = Py_NewRef(v);
526
44
    }
527
528
30
    _PyObject_GC_TRACK(np);
529
30
    return (PyObject *)np;
530
30
}
531
532
static PyObject *
533
tuple_repeat(PyObject *self, Py_ssize_t n)
534
0
{
535
0
    PyTupleObject *a = _PyTuple_CAST(self);
536
0
    const Py_ssize_t input_size = Py_SIZE(a);
537
0
    if (input_size == 0 || n == 1) {
538
0
        if (PyTuple_CheckExact(a)) {
539
            /* Since tuples are immutable, we can return a shared
540
               copy in this case */
541
0
            return Py_NewRef(a);
542
0
        }
543
0
    }
544
0
    if (input_size == 0 || n <= 0) {
545
0
        return tuple_get_empty();
546
0
    }
547
0
    assert(n>0);
548
549
0
    if (input_size > PY_SSIZE_T_MAX / n)
550
0
        return PyErr_NoMemory();
551
0
    Py_ssize_t output_size = input_size * n;
552
553
0
    PyTupleObject *np = tuple_alloc(output_size);
554
0
    if (np == NULL)
555
0
        return NULL;
556
557
0
    PyObject **dest = np->ob_item;
558
0
    if (input_size == 1) {
559
0
        PyObject *elem = a->ob_item[0];
560
0
        _Py_RefcntAdd(elem, n);
561
0
        PyObject **dest_end = dest + output_size;
562
0
        while (dest < dest_end) {
563
0
            *dest++ = elem;
564
0
        }
565
0
    }
566
0
    else {
567
0
        PyObject **src = a->ob_item;
568
0
        PyObject **src_end = src + input_size;
569
0
        while (src < src_end) {
570
0
            _Py_RefcntAdd(*src, n);
571
0
            *dest++ = *src++;
572
0
        }
573
574
0
        _Py_memory_repeat((char *)np->ob_item, sizeof(PyObject *)*output_size,
575
0
                          sizeof(PyObject *)*input_size);
576
0
    }
577
0
    _PyObject_GC_TRACK(np);
578
0
    return (PyObject *) np;
579
0
}
580
581
/*[clinic input]
582
tuple.index
583
584
    value: object
585
    start: slice_index(accept={int}) = 0
586
    stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
587
    /
588
589
Return first index of value.
590
591
Raises ValueError if the value is not present.
592
[clinic start generated code]*/
593
594
static PyObject *
595
tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
596
                 Py_ssize_t stop)
597
/*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
598
0
{
599
0
    Py_ssize_t i;
600
601
0
    if (start < 0) {
602
0
        start += Py_SIZE(self);
603
0
        if (start < 0)
604
0
            start = 0;
605
0
    }
606
0
    if (stop < 0) {
607
0
        stop += Py_SIZE(self);
608
0
    }
609
0
    else if (stop > Py_SIZE(self)) {
610
0
        stop = Py_SIZE(self);
611
0
    }
612
0
    for (i = start; i < stop; i++) {
613
0
        int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
614
0
        if (cmp > 0)
615
0
            return PyLong_FromSsize_t(i);
616
0
        else if (cmp < 0)
617
0
            return NULL;
618
0
    }
619
0
    PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
620
0
    return NULL;
621
0
}
622
623
/*[clinic input]
624
tuple.count
625
626
     value: object
627
     /
628
629
Return number of occurrences of value.
630
[clinic start generated code]*/
631
632
static PyObject *
633
tuple_count_impl(PyTupleObject *self, PyObject *value)
634
/*[clinic end generated code: output=cf02888d4bc15d7a input=531721aff65bd772]*/
635
0
{
636
0
    Py_ssize_t count = 0;
637
0
    Py_ssize_t i;
638
639
0
    for (i = 0; i < Py_SIZE(self); i++) {
640
0
        int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
641
0
        if (cmp > 0)
642
0
            count++;
643
0
        else if (cmp < 0)
644
0
            return NULL;
645
0
    }
646
0
    return PyLong_FromSsize_t(count);
647
0
}
648
649
static int
650
tuple_traverse(PyObject *self, visitproc visit, void *arg)
651
3.31M
{
652
3.31M
    PyTupleObject *o = _PyTuple_CAST(self);
653
14.7M
    for (Py_ssize_t i = Py_SIZE(o); --i >= 0; ) {
654
11.4M
        Py_VISIT(o->ob_item[i]);
655
11.4M
    }
656
3.31M
    return 0;
657
3.31M
}
658
659
static PyObject *
660
tuple_richcompare(PyObject *v, PyObject *w, int op)
661
3.56M
{
662
3.56M
    PyTupleObject *vt, *wt;
663
3.56M
    Py_ssize_t i;
664
3.56M
    Py_ssize_t vlen, wlen;
665
666
3.56M
    if (!PyTuple_Check(v) || !PyTuple_Check(w))
667
0
        Py_RETURN_NOTIMPLEMENTED;
668
669
3.56M
    vt = (PyTupleObject *)v;
670
3.56M
    wt = (PyTupleObject *)w;
671
672
3.56M
    vlen = Py_SIZE(vt);
673
3.56M
    wlen = Py_SIZE(wt);
674
675
    /* Note:  the corresponding code for lists has an "early out" test
676
     * here when op is EQ or NE and the lengths differ.  That pays there,
677
     * but Tim was unable to find any real code where EQ/NE tuple
678
     * compares don't have the same length, so testing for it here would
679
     * have cost without benefit.
680
     */
681
682
    /* Search for the first index where items are different.
683
     * Note that because tuples are immutable, it's safe to reuse
684
     * vlen and wlen across the comparison calls.
685
     */
686
5.93M
    for (i = 0; i < vlen && i < wlen; i++) {
687
5.13M
        int k = PyObject_RichCompareBool(vt->ob_item[i],
688
5.13M
                                         wt->ob_item[i], Py_EQ);
689
5.13M
        if (k < 0)
690
0
            return NULL;
691
5.13M
        if (!k)
692
2.75M
            break;
693
5.13M
    }
694
695
3.56M
    if (i >= vlen || i >= wlen) {
696
        /* No more items to compare -- compare sizes */
697
804k
        Py_RETURN_RICHCOMPARE(vlen, wlen, op);
698
804k
    }
699
700
    /* We have an item that differs -- shortcuts for EQ/NE */
701
2.75M
    if (op == Py_EQ) {
702
366
        Py_RETURN_FALSE;
703
366
    }
704
2.75M
    if (op == Py_NE) {
705
76
        Py_RETURN_TRUE;
706
76
    }
707
708
    /* Compare the final item again using the proper operator */
709
2.75M
    return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
710
2.75M
}
711
712
static PyObject *
713
tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
714
715
/*[clinic input]
716
@classmethod
717
tuple.__new__ as tuple_new
718
    iterable: object(c_default="NULL") = ()
719
    /
720
721
Built-in immutable sequence.
722
723
If no argument is given, the constructor returns an empty tuple.
724
If iterable is specified the tuple is initialized from iterable's items.
725
726
If the argument is a tuple, the return value is the same object.
727
[clinic start generated code]*/
728
729
static PyObject *
730
tuple_new_impl(PyTypeObject *type, PyObject *iterable)
731
/*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
732
3.19M
{
733
3.19M
    if (type != &PyTuple_Type)
734
1.59M
        return tuple_subtype_new(type, iterable);
735
736
1.59M
    if (iterable == NULL) {
737
0
        return tuple_get_empty();
738
0
    }
739
1.59M
    else {
740
1.59M
        return PySequence_Tuple(iterable);
741
1.59M
    }
742
1.59M
}
743
744
static PyObject *
745
tuple_vectorcall(PyObject *type, PyObject * const*args,
746
                 size_t nargsf, PyObject *kwnames)
747
103
{
748
103
    if (!_PyArg_NoKwnames("tuple", kwnames)) {
749
0
        return NULL;
750
0
    }
751
752
103
    Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
753
103
    if (!_PyArg_CheckPositional("tuple", nargs, 0, 1)) {
754
0
        return NULL;
755
0
    }
756
757
103
    if (nargs) {
758
103
        return tuple_new_impl(_PyType_CAST(type), args[0]);
759
103
    }
760
0
    else {
761
0
        return tuple_get_empty();
762
0
    }
763
103
}
764
765
static PyObject *
766
tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
767
1.59M
{
768
1.59M
    PyObject *tmp, *newobj, *item;
769
1.59M
    Py_ssize_t i, n;
770
771
1.59M
    assert(PyType_IsSubtype(type, &PyTuple_Type));
772
    // tuple subclasses must implement the GC protocol
773
1.59M
    assert(_PyType_IS_GC(type));
774
775
1.59M
    tmp = tuple_new_impl(&PyTuple_Type, iterable);
776
1.59M
    if (tmp == NULL)
777
0
        return NULL;
778
1.59M
    assert(PyTuple_Check(tmp));
779
    /* This may allocate an empty tuple that is not the global one. */
780
1.59M
    newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
781
1.59M
    if (newobj == NULL) {
782
0
        Py_DECREF(tmp);
783
0
        return NULL;
784
0
    }
785
6.40M
    for (i = 0; i < n; i++) {
786
4.80M
        item = PyTuple_GET_ITEM(tmp, i);
787
4.80M
        PyTuple_SET_ITEM(newobj, i, Py_NewRef(item));
788
4.80M
    }
789
1.59M
    Py_DECREF(tmp);
790
791
1.59M
    _PyTuple_RESET_HASH_CACHE(newobj);
792
793
    // Don't track if a subclass tp_alloc is PyType_GenericAlloc()
794
1.59M
    if (!_PyObject_GC_IS_TRACKED(newobj)) {
795
0
        _PyObject_GC_TRACK(newobj);
796
0
    }
797
1.59M
    return newobj;
798
1.59M
}
799
800
static PySequenceMethods tuple_as_sequence = {
801
    tuple_length,                               /* sq_length */
802
    tuple_concat,                               /* sq_concat */
803
    tuple_repeat,                               /* sq_repeat */
804
    tuple_item,                                 /* sq_item */
805
    0,                                          /* sq_slice */
806
    0,                                          /* sq_ass_item */
807
    0,                                          /* sq_ass_slice */
808
    tuple_contains,                             /* sq_contains */
809
};
810
811
static PyObject*
812
tuple_subscript(PyObject *op, PyObject* item)
813
1.29M
{
814
1.29M
    PyTupleObject *self = _PyTuple_CAST(op);
815
1.29M
    if (_PyIndex_Check(item)) {
816
1.29M
        Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
817
1.29M
        if (i == -1 && PyErr_Occurred())
818
0
            return NULL;
819
1.29M
        if (i < 0)
820
1.29M
            i += PyTuple_GET_SIZE(self);
821
1.29M
        return tuple_item(op, i);
822
1.29M
    }
823
347
    else if (PySlice_Check(item)) {
824
347
        Py_ssize_t start, stop, step, slicelength, i;
825
347
        size_t cur;
826
347
        PyObject* it;
827
347
        PyObject **src, **dest;
828
829
347
        if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
830
0
            return NULL;
831
0
        }
832
347
        slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
833
347
                                            &stop, step);
834
835
347
        if (slicelength <= 0) {
836
8
            return tuple_get_empty();
837
8
        }
838
339
        else if (start == 0 && step == 1 &&
839
18
                 slicelength == PyTuple_GET_SIZE(self) &&
840
8
                 PyTuple_CheckExact(self)) {
841
8
            return Py_NewRef(self);
842
8
        }
843
331
        else {
844
331
            PyTupleObject* result = tuple_alloc(slicelength);
845
331
            if (!result) return NULL;
846
847
331
            src = self->ob_item;
848
331
            dest = result->ob_item;
849
2.20k
            for (cur = start, i = 0; i < slicelength;
850
1.87k
                 cur += step, i++) {
851
1.87k
                it = Py_NewRef(src[cur]);
852
1.87k
                dest[i] = it;
853
1.87k
            }
854
855
331
            _PyObject_GC_TRACK(result);
856
331
            return (PyObject *)result;
857
331
        }
858
347
    }
859
0
    else {
860
0
        PyErr_Format(PyExc_TypeError,
861
0
                     "tuple indices must be integers or slices, not %.200s",
862
0
                     Py_TYPE(item)->tp_name);
863
0
        return NULL;
864
0
    }
865
1.29M
}
866
867
/*[clinic input]
868
tuple.__getnewargs__
869
[clinic start generated code]*/
870
871
static PyObject *
872
tuple___getnewargs___impl(PyTupleObject *self)
873
/*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
874
0
{
875
0
    return Py_BuildValue("(N)", tuple_slice(self, 0, Py_SIZE(self)));
876
0
}
877
878
static PyMethodDef tuple_methods[] = {
879
    TUPLE___GETNEWARGS___METHODDEF
880
    TUPLE_INDEX_METHODDEF
881
    TUPLE_COUNT_METHODDEF
882
    {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
883
    {NULL,              NULL}           /* sentinel */
884
};
885
886
static PyMappingMethods tuple_as_mapping = {
887
    tuple_length,
888
    tuple_subscript,
889
    0
890
};
891
892
static PyObject *tuple_iter(PyObject *seq);
893
894
PyTypeObject PyTuple_Type = {
895
    PyVarObject_HEAD_INIT(&PyType_Type, 0)
896
    "tuple",
897
    sizeof(PyTupleObject) - sizeof(PyObject *),
898
    sizeof(PyObject *),
899
    tuple_dealloc,                              /* tp_dealloc */
900
    0,                                          /* tp_vectorcall_offset */
901
    0,                                          /* tp_getattr */
902
    0,                                          /* tp_setattr */
903
    0,                                          /* tp_as_async */
904
    tuple_repr,                                 /* tp_repr */
905
    0,                                          /* tp_as_number */
906
    &tuple_as_sequence,                         /* tp_as_sequence */
907
    &tuple_as_mapping,                          /* tp_as_mapping */
908
    tuple_hash,                                 /* tp_hash */
909
    0,                                          /* tp_call */
910
    0,                                          /* tp_str */
911
    PyObject_GenericGetAttr,                    /* tp_getattro */
912
    0,                                          /* tp_setattro */
913
    0,                                          /* tp_as_buffer */
914
    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
915
        Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS |
916
        _Py_TPFLAGS_MATCH_SELF | Py_TPFLAGS_SEQUENCE,  /* tp_flags */
917
    tuple_new__doc__,                           /* tp_doc */
918
    tuple_traverse,                             /* tp_traverse */
919
    0,                                          /* tp_clear */
920
    tuple_richcompare,                          /* tp_richcompare */
921
    0,                                          /* tp_weaklistoffset */
922
    tuple_iter,                                 /* tp_iter */
923
    0,                                          /* tp_iternext */
924
    tuple_methods,                              /* tp_methods */
925
    0,                                          /* tp_members */
926
    0,                                          /* tp_getset */
927
    0,                                          /* tp_base */
928
    0,                                          /* tp_dict */
929
    0,                                          /* tp_descr_get */
930
    0,                                          /* tp_descr_set */
931
    0,                                          /* tp_dictoffset */
932
    0,                                          /* tp_init */
933
    0,                                          /* tp_alloc */
934
    tuple_new,                                  /* tp_new */
935
    PyObject_GC_Del,                            /* tp_free */
936
    .tp_vectorcall = tuple_vectorcall,
937
    .tp_version_tag = _Py_TYPE_VERSION_TUPLE,
938
};
939
940
/* The following function breaks the notion that tuples are immutable:
941
   it changes the size of a tuple.  We get away with this only if there
942
   is only one module referencing the object.  You can also think of it
943
   as creating a new tuple object and destroying the old one, only more
944
   efficiently.  In any case, don't use this if the tuple may already be
945
   known to some other part of the code. */
946
947
int
948
_PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
949
400
{
950
400
    PyTupleObject *v;
951
400
    PyTupleObject *sv;
952
400
    Py_ssize_t i;
953
400
    Py_ssize_t oldsize;
954
955
400
    v = (PyTupleObject *) *pv;
956
400
    if (v == NULL || !Py_IS_TYPE(v, &PyTuple_Type) ||
957
400
        (Py_SIZE(v) != 0 && !_PyObject_IsUniquelyReferenced(*pv))) {
958
0
        *pv = 0;
959
0
        Py_XDECREF(v);
960
0
        PyErr_BadInternalCall();
961
0
        return -1;
962
0
    }
963
964
400
    oldsize = Py_SIZE(v);
965
400
    if (oldsize == newsize) {
966
378
        return 0;
967
378
    }
968
22
    if (newsize == 0) {
969
0
        Py_DECREF(v);
970
0
        *pv = tuple_get_empty();
971
0
        return 0;
972
0
    }
973
22
    if (oldsize == 0) {
974
#ifdef Py_DEBUG
975
        assert(v == &_Py_SINGLETON(tuple_empty));
976
#endif
977
        /* The empty tuple is statically allocated so we never
978
           resize it in-place. */
979
0
        Py_DECREF(v);
980
0
        *pv = PyTuple_New(newsize);
981
0
        return *pv == NULL ? -1 : 0;
982
0
    }
983
984
22
    if (_PyObject_GC_IS_TRACKED(v)) {
985
22
        _PyObject_GC_UNTRACK(v);
986
22
    }
987
#ifdef Py_TRACE_REFS
988
    _Py_ForgetReference((PyObject *) v);
989
#endif
990
    /* DECREF items deleted by shrinkage */
991
88
    for (i = newsize; i < oldsize; i++) {
992
66
        Py_CLEAR(v->ob_item[i]);
993
66
    }
994
22
    _PyReftracerTrack((PyObject *)v, PyRefTracer_DESTROY);
995
22
    sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
996
22
    if (sv == NULL) {
997
0
        *pv = NULL;
998
#ifdef Py_REF_DEBUG
999
        _Py_DecRefTotal(_PyThreadState_GET());
1000
#endif
1001
0
        PyObject_GC_Del(v);
1002
0
        return -1;
1003
0
    }
1004
22
    _Py_NewReferenceNoTotal((PyObject *) sv);
1005
    /* Zero out items added by growing */
1006
22
    if (newsize > oldsize)
1007
0
        memset(&sv->ob_item[oldsize], 0,
1008
0
               sizeof(*sv->ob_item) * (newsize - oldsize));
1009
22
    *pv = (PyObject *) sv;
1010
22
    _PyObject_GC_TRACK(sv);
1011
22
    return 0;
1012
22
}
1013
1014
/*********************** Tuple Iterator **************************/
1015
1016
12.0M
#define _PyTupleIterObject_CAST(op) ((_PyTupleIterObject *)(op))
1017
1018
static void
1019
tupleiter_dealloc(PyObject *self)
1020
2.41M
{
1021
2.41M
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1022
2.41M
    _PyObject_GC_UNTRACK(it);
1023
2.41M
    Py_XDECREF(it->it_seq);
1024
2.41M
    assert(Py_IS_TYPE(self, &PyTupleIter_Type));
1025
2.41M
    _Py_FREELIST_FREE(tuple_iters, it, PyObject_GC_Del);
1026
2.41M
}
1027
1028
static int
1029
tupleiter_traverse(PyObject *self, visitproc visit, void *arg)
1030
0
{
1031
0
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1032
0
    Py_VISIT(it->it_seq);
1033
0
    return 0;
1034
0
}
1035
1036
static PyObject *
1037
tupleiter_next(PyObject *self)
1038
9.65M
{
1039
9.65M
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1040
9.65M
    PyTupleObject *seq;
1041
9.65M
    PyObject *item;
1042
1043
9.65M
    assert(it != NULL);
1044
9.65M
    seq = it->it_seq;
1045
9.65M
#ifndef Py_GIL_DISABLED
1046
9.65M
    if (seq == NULL)
1047
0
        return NULL;
1048
9.65M
#endif
1049
9.65M
    assert(PyTuple_Check(seq));
1050
1051
9.65M
    Py_ssize_t index = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index);
1052
9.65M
    if (index < PyTuple_GET_SIZE(seq)) {
1053
7.24M
        FT_ATOMIC_STORE_SSIZE_RELAXED(it->it_index, index + 1);
1054
7.24M
        item = PyTuple_GET_ITEM(seq, index);
1055
7.24M
        return Py_NewRef(item);
1056
7.24M
    }
1057
1058
2.41M
#ifndef Py_GIL_DISABLED
1059
2.41M
    it->it_seq = NULL;
1060
2.41M
    Py_DECREF(seq);
1061
2.41M
#endif
1062
2.41M
    return NULL;
1063
9.65M
}
1064
1065
static PyObject *
1066
tupleiter_len(PyObject *self, PyObject *Py_UNUSED(ignored))
1067
0
{
1068
0
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1069
0
    Py_ssize_t len = 0;
1070
#ifdef Py_GIL_DISABLED
1071
    Py_ssize_t idx = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index);
1072
    Py_ssize_t seq_len = PyTuple_GET_SIZE(it->it_seq);
1073
    if (idx < seq_len)
1074
        len = seq_len - idx;
1075
#else
1076
0
    if (it->it_seq)
1077
0
        len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
1078
0
#endif
1079
0
    return PyLong_FromSsize_t(len);
1080
0
}
1081
1082
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1083
1084
static PyObject *
1085
tupleiter_reduce(PyObject *self, PyObject *Py_UNUSED(ignored))
1086
0
{
1087
0
    PyObject *iter = _PyEval_GetBuiltin(&_Py_ID(iter));
1088
1089
    /* _PyEval_GetBuiltin can invoke arbitrary code,
1090
     * call must be before access of iterator pointers.
1091
     * see issue #101765 */
1092
0
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1093
1094
#ifdef Py_GIL_DISABLED
1095
    Py_ssize_t idx = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index);
1096
    if (idx < PyTuple_GET_SIZE(it->it_seq))
1097
        return Py_BuildValue("N(O)n", iter, it->it_seq, idx);
1098
#else
1099
0
    if (it->it_seq)
1100
0
        return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index);
1101
0
#endif
1102
0
    return Py_BuildValue("N(())", iter);
1103
0
}
1104
1105
static PyObject *
1106
tupleiter_setstate(PyObject *self, PyObject *state)
1107
0
{
1108
0
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1109
0
    Py_ssize_t index = PyLong_AsSsize_t(state);
1110
0
    if (index == -1 && PyErr_Occurred())
1111
0
        return NULL;
1112
0
    if (it->it_seq != NULL) {
1113
0
        if (index < 0)
1114
0
            index = 0;
1115
0
        else if (index > PyTuple_GET_SIZE(it->it_seq))
1116
0
            index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1117
0
        FT_ATOMIC_STORE_SSIZE_RELAXED(it->it_index, index);
1118
0
    }
1119
0
    Py_RETURN_NONE;
1120
0
}
1121
1122
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1123
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1124
1125
static PyMethodDef tupleiter_methods[] = {
1126
    {"__length_hint__", tupleiter_len, METH_NOARGS, length_hint_doc},
1127
    {"__reduce__", tupleiter_reduce, METH_NOARGS, reduce_doc},
1128
    {"__setstate__", tupleiter_setstate, METH_O, setstate_doc},
1129
    {NULL, NULL, 0, NULL} /* sentinel */
1130
};
1131
1132
PyTypeObject PyTupleIter_Type = {
1133
    PyVarObject_HEAD_INIT(&PyType_Type, 0)
1134
    "tuple_iterator",                           /* tp_name */
1135
    sizeof(_PyTupleIterObject),                 /* tp_basicsize */
1136
    0,                                          /* tp_itemsize */
1137
    /* methods */
1138
    tupleiter_dealloc,                          /* tp_dealloc */
1139
    0,                                          /* tp_vectorcall_offset */
1140
    0,                                          /* tp_getattr */
1141
    0,                                          /* tp_setattr */
1142
    0,                                          /* tp_as_async */
1143
    0,                                          /* tp_repr */
1144
    0,                                          /* tp_as_number */
1145
    0,                                          /* tp_as_sequence */
1146
    0,                                          /* tp_as_mapping */
1147
    0,                                          /* tp_hash */
1148
    0,                                          /* tp_call */
1149
    0,                                          /* tp_str */
1150
    PyObject_GenericGetAttr,                    /* tp_getattro */
1151
    0,                                          /* tp_setattro */
1152
    0,                                          /* tp_as_buffer */
1153
    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1154
    0,                                          /* tp_doc */
1155
    tupleiter_traverse,                         /* tp_traverse */
1156
    0,                                          /* tp_clear */
1157
    0,                                          /* tp_richcompare */
1158
    0,                                          /* tp_weaklistoffset */
1159
    PyObject_SelfIter,                          /* tp_iter */
1160
    tupleiter_next,                             /* tp_iternext */
1161
    tupleiter_methods,                          /* tp_methods */
1162
    0,
1163
};
1164
1165
static PyObject *
1166
tuple_iter(PyObject *seq)
1167
2.41M
{
1168
2.41M
    if (!PyTuple_Check(seq)) {
1169
0
        PyErr_BadInternalCall();
1170
0
        return NULL;
1171
0
    }
1172
2.41M
    _PyTupleIterObject *it = _Py_FREELIST_POP(_PyTupleIterObject, tuple_iters);
1173
2.41M
    if (it == NULL) {
1174
40
        it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type);
1175
40
        if (it == NULL)
1176
0
            return NULL;
1177
40
    }
1178
2.41M
    it->it_index = 0;
1179
2.41M
    it->it_seq = (PyTupleObject *)Py_NewRef(seq);
1180
2.41M
    _PyObject_GC_TRACK(it);
1181
2.41M
    return (PyObject *)it;
1182
2.41M
}
1183
1184
1185
/*************
1186
 * freelists *
1187
 *************/
1188
1189
static inline int
1190
maybe_freelist_push(PyTupleObject *op)
1191
311M
{
1192
311M
    if (!Py_IS_TYPE(op, &PyTuple_Type)) {
1193
1.59M
        return 0;
1194
1.59M
    }
1195
309M
    Py_ssize_t index = Py_SIZE(op) - 1;
1196
309M
    if (index < PyTuple_MAXSAVESIZE) {
1197
309M
        return _Py_FREELIST_PUSH(tuples[index], op, Py_tuple_MAXFREELIST);
1198
309M
    }
1199
3.44k
    return 0;
1200
309M
}
1201
1202
/* Print summary info about the state of the optimized allocator */
1203
void
1204
_PyTuple_DebugMallocStats(FILE *out)
1205
0
{
1206
0
    for (int i = 0; i < PyTuple_MAXSAVESIZE; i++) {
1207
0
        int len = i + 1;
1208
0
        char buf[128];
1209
0
        PyOS_snprintf(buf, sizeof(buf),
1210
0
                      "free %d-sized PyTupleObject", len);
1211
0
        _PyDebugAllocatorStats(out, buf, _Py_FREELIST_SIZE(tuples[i]),
1212
0
                               _PyObject_VAR_SIZE(&PyTuple_Type, len));
1213
0
    }
1214
0
}