Coverage Report

Created: 2025-12-07 07:03

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/cpython/Objects/tupleobject.c
Line
Count
Source
1
/* Tuple object implementation */
2
3
#include "Python.h"
4
#include "pycore_abstract.h"      // _PyIndex_Check()
5
#include "pycore_ceval.h"         // _PyEval_GetBuiltin()
6
#include "pycore_freelist.h"      // _Py_FREELIST_PUSH()
7
#include "pycore_gc.h"            // _PyObject_GC_IS_TRACKED()
8
#include "pycore_list.h"          // _Py_memory_repeat()
9
#include "pycore_modsupport.h"    // _PyArg_NoKwnames()
10
#include "pycore_object.h"        // _PyObject_GC_TRACK()
11
#include "pycore_stackref.h"      // PyStackRef_AsPyObjectSteal()
12
#include "pycore_tuple.h"         // _PyTupleIterObject
13
14
15
/*[clinic input]
16
class tuple "PyTupleObject *" "&PyTuple_Type"
17
[clinic start generated code]*/
18
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
19
20
#include "clinic/tupleobject.c.h"
21
22
23
static inline int maybe_freelist_push(PyTupleObject *);
24
25
26
/* Allocate an uninitialized tuple object. Before making it public, following
27
   steps must be done:
28
29
   - Initialize its items.
30
   - Call _PyObject_GC_TRACK() on it.
31
32
   Because the empty tuple is always reused and it's already tracked by GC,
33
   this function must not be called with size == 0 (unless from PyTuple_New()
34
   which wraps this function).
35
*/
36
static PyTupleObject *
37
tuple_alloc(Py_ssize_t size)
38
477M
{
39
477M
    if (size < 0) {
40
0
        PyErr_BadInternalCall();
41
0
        return NULL;
42
0
    }
43
477M
    assert(size != 0);    // The empty tuple is statically allocated.
44
477M
    Py_ssize_t index = size - 1;
45
477M
    if (index < PyTuple_MAXSAVESIZE) {
46
472M
        PyTupleObject *op = _Py_FREELIST_POP(PyTupleObject, tuples[index]);
47
472M
        if (op != NULL) {
48
366M
            _PyTuple_RESET_HASH_CACHE(op);
49
366M
            return op;
50
366M
        }
51
472M
    }
52
    /* Check for overflow */
53
110M
    if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - (sizeof(PyTupleObject) -
54
110M
                sizeof(PyObject *))) / sizeof(PyObject *)) {
55
0
        return (PyTupleObject *)PyErr_NoMemory();
56
0
    }
57
110M
    PyTupleObject *result = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
58
110M
    if (result != NULL) {
59
110M
        _PyTuple_RESET_HASH_CACHE(result);
60
110M
    }
61
110M
    return result;
62
110M
}
63
64
// The empty tuple singleton is not tracked by the GC.
65
// It does not contain any Python object.
66
// Note that tuple subclasses have their own empty instances.
67
68
static inline PyObject *
69
tuple_get_empty(void)
70
80.9M
{
71
80.9M
    return (PyObject *)&_Py_SINGLETON(tuple_empty);
72
80.9M
}
73
74
PyObject *
75
PyTuple_New(Py_ssize_t size)
76
52.3M
{
77
52.3M
    PyTupleObject *op;
78
52.3M
    if (size == 0) {
79
703k
        return tuple_get_empty();
80
703k
    }
81
51.6M
    op = tuple_alloc(size);
82
51.6M
    if (op == NULL) {
83
0
        return NULL;
84
0
    }
85
1.25G
    for (Py_ssize_t i = 0; i < size; i++) {
86
1.20G
        op->ob_item[i] = NULL;
87
1.20G
    }
88
51.6M
    _PyObject_GC_TRACK(op);
89
51.6M
    return (PyObject *) op;
90
51.6M
}
91
92
Py_ssize_t
93
PyTuple_Size(PyObject *op)
94
15.8M
{
95
15.8M
    if (!PyTuple_Check(op)) {
96
0
        PyErr_BadInternalCall();
97
0
        return -1;
98
0
    }
99
15.8M
    else
100
15.8M
        return Py_SIZE(op);
101
15.8M
}
102
103
PyObject *
104
PyTuple_GetItem(PyObject *op, Py_ssize_t i)
105
48.5M
{
106
48.5M
    if (!PyTuple_Check(op)) {
107
0
        PyErr_BadInternalCall();
108
0
        return NULL;
109
0
    }
110
48.5M
    if (i < 0 || i >= Py_SIZE(op)) {
111
0
        PyErr_SetString(PyExc_IndexError, "tuple index out of range");
112
0
        return NULL;
113
0
    }
114
48.5M
    return ((PyTupleObject *)op) -> ob_item[i];
115
48.5M
}
116
117
int
118
PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
119
97.9k
{
120
97.9k
    PyObject **p;
121
97.9k
    if (!PyTuple_Check(op) || !_PyObject_IsUniquelyReferenced(op)) {
122
0
        Py_XDECREF(newitem);
123
0
        PyErr_BadInternalCall();
124
0
        return -1;
125
0
    }
126
97.9k
    if (i < 0 || i >= Py_SIZE(op)) {
127
0
        Py_XDECREF(newitem);
128
0
        PyErr_SetString(PyExc_IndexError,
129
0
                        "tuple assignment index out of range");
130
0
        return -1;
131
0
    }
132
97.9k
    p = ((PyTupleObject *)op) -> ob_item + i;
133
97.9k
    Py_XSETREF(*p, newitem);
134
97.9k
    return 0;
135
97.9k
}
136
137
void
138
_PyTuple_MaybeUntrack(PyObject *op)
139
99.1M
{
140
99.1M
    PyTupleObject *t;
141
99.1M
    Py_ssize_t i, n;
142
143
99.1M
    if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
144
0
        return;
145
99.1M
    t = (PyTupleObject *) op;
146
99.1M
    n = Py_SIZE(t);
147
773M
    for (i = 0; i < n; i++) {
148
770M
        PyObject *elt = PyTuple_GET_ITEM(t, i);
149
        /* Tuple with NULL elements aren't
150
           fully constructed, don't untrack
151
           them yet. */
152
770M
        if (!elt ||
153
770M
            _PyObject_GC_MAY_BE_TRACKED(elt))
154
96.5M
            return;
155
770M
    }
156
2.61M
    _PyObject_GC_UNTRACK(op);
157
2.61M
}
158
159
/* Fast, but conservative check if an object maybe tracked
160
   May return true for an object that is not tracked,
161
   Will always return true for an object that is tracked.
162
   This is a temporary workaround until _PyObject_GC_IS_TRACKED
163
   becomes fast and safe to call on non-GC objects.
164
*/
165
static bool
166
maybe_tracked(PyObject *ob)
167
661M
{
168
661M
    return _PyType_IS_GC(Py_TYPE(ob));
169
661M
}
170
171
PyObject *
172
PyTuple_Pack(Py_ssize_t n, ...)
173
10.0M
{
174
10.0M
    Py_ssize_t i;
175
10.0M
    PyObject *o;
176
10.0M
    PyObject **items;
177
10.0M
    va_list vargs;
178
10.0M
    bool track = false;
179
180
10.0M
    if (n == 0) {
181
0
        return tuple_get_empty();
182
0
    }
183
184
10.0M
    va_start(vargs, n);
185
10.0M
    PyTupleObject *result = tuple_alloc(n);
186
10.0M
    if (result == NULL) {
187
0
        va_end(vargs);
188
0
        return NULL;
189
0
    }
190
10.0M
    items = result->ob_item;
191
30.9M
    for (i = 0; i < n; i++) {
192
20.8M
        o = va_arg(vargs, PyObject *);
193
20.8M
        if (!track && maybe_tracked(o)) {
194
873k
            track = true;
195
873k
        }
196
20.8M
        items[i] = Py_NewRef(o);
197
20.8M
    }
198
10.0M
    va_end(vargs);
199
10.0M
    if (track) {
200
873k
        _PyObject_GC_TRACK(result);
201
873k
    }
202
10.0M
    return (PyObject *)result;
203
10.0M
}
204
205
206
/* Methods */
207
208
static void
209
tuple_dealloc(PyObject *self)
210
478M
{
211
478M
    PyTupleObject *op = _PyTuple_CAST(self);
212
478M
    if (Py_SIZE(op) == 0) {
213
        /* The empty tuple is statically allocated. */
214
0
        if (op == &_Py_SINGLETON(tuple_empty)) {
215
#ifdef Py_DEBUG
216
            _Py_FatalRefcountError("deallocating the empty tuple singleton");
217
#else
218
0
            return;
219
0
#endif
220
0
        }
221
#ifdef Py_DEBUG
222
        /* tuple subclasses have their own empty instances. */
223
        assert(!PyTuple_CheckExact(op));
224
#endif
225
0
    }
226
227
478M
    PyObject_GC_UnTrack(op);
228
229
478M
    Py_ssize_t i = Py_SIZE(op);
230
5.59G
    while (--i >= 0) {
231
5.11G
        Py_XDECREF(op->ob_item[i]);
232
5.11G
    }
233
    // This will abort on the empty singleton (if there is one).
234
478M
    if (!maybe_freelist_push(op)) {
235
112M
        Py_TYPE(op)->tp_free((PyObject *)op);
236
112M
    }
237
478M
}
238
239
static PyObject *
240
tuple_repr(PyObject *self)
241
38
{
242
38
    PyTupleObject *v = _PyTuple_CAST(self);
243
38
    Py_ssize_t n = PyTuple_GET_SIZE(v);
244
38
    if (n == 0) {
245
0
        return PyUnicode_FromString("()");
246
0
    }
247
248
    /* While not mutable, it is still possible to end up with a cycle in a
249
       tuple through an object that stores itself within a tuple (and thus
250
       infinitely asks for the repr of itself). This should only be
251
       possible within a type. */
252
38
    int res = Py_ReprEnter((PyObject *)v);
253
38
    if (res != 0) {
254
0
        return res > 0 ? PyUnicode_FromString("(...)") : NULL;
255
0
    }
256
257
38
    Py_ssize_t prealloc;
258
38
    if (n > 1) {
259
        // "(" + "1" + ", 2" * (len - 1) + ")"
260
23
        prealloc = 1 + 1 + (2 + 1) * (n - 1) + 1;
261
23
    }
262
15
    else {
263
        // "(1,)"
264
15
        prealloc = 4;
265
15
    }
266
38
    PyUnicodeWriter *writer = PyUnicodeWriter_Create(prealloc);
267
38
    if (writer == NULL) {
268
0
        goto error;
269
0
    }
270
271
38
    if (PyUnicodeWriter_WriteChar(writer, '(') < 0) {
272
0
        goto error;
273
0
    }
274
275
    /* Do repr() on each element. */
276
1.78k
    for (Py_ssize_t i = 0; i < n; ++i) {
277
1.74k
        if (i > 0) {
278
1.70k
            if (PyUnicodeWriter_WriteChar(writer, ',') < 0) {
279
0
                goto error;
280
0
            }
281
1.70k
            if (PyUnicodeWriter_WriteChar(writer, ' ') < 0) {
282
0
                goto error;
283
0
            }
284
1.70k
        }
285
286
1.74k
        if (PyUnicodeWriter_WriteRepr(writer, v->ob_item[i]) < 0) {
287
0
            goto error;
288
0
        }
289
1.74k
    }
290
291
38
    if (n == 1) {
292
15
        if (PyUnicodeWriter_WriteChar(writer, ',') < 0) {
293
0
            goto error;
294
0
        }
295
15
    }
296
38
    if (PyUnicodeWriter_WriteChar(writer, ')') < 0) {
297
0
        goto error;
298
0
    }
299
300
38
    Py_ReprLeave((PyObject *)v);
301
38
    return PyUnicodeWriter_Finish(writer);
302
303
0
error:
304
0
    PyUnicodeWriter_Discard(writer);
305
0
    Py_ReprLeave((PyObject *)v);
306
0
    return NULL;
307
38
}
308
309
310
/* Hash for tuples. This is a slightly simplified version of the xxHash
311
   non-cryptographic hash:
312
   - we do not use any parallelism, there is only 1 accumulator.
313
   - we drop the final mixing since this is just a permutation of the
314
     output space: it does not help against collisions.
315
   - at the end, we mangle the length with a single constant.
316
   For the xxHash specification, see
317
   https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
318
319
   The constants for the hash function are defined in pycore_tuple.h.
320
*/
321
322
static Py_hash_t
323
tuple_hash(PyObject *op)
324
17.9M
{
325
17.9M
    PyTupleObject *v = _PyTuple_CAST(op);
326
327
17.9M
    Py_uhash_t acc = FT_ATOMIC_LOAD_SSIZE_RELAXED(v->ob_hash);
328
17.9M
    if (acc != (Py_uhash_t)-1) {
329
338k
        return acc;
330
338k
    }
331
332
17.5M
    Py_ssize_t len = Py_SIZE(v);
333
17.5M
    PyObject **item = v->ob_item;
334
17.5M
    acc = _PyTuple_HASH_XXPRIME_5;
335
864M
    for (Py_ssize_t i = 0; i < len; i++) {
336
847M
        Py_uhash_t lane = PyObject_Hash(item[i]);
337
847M
        if (lane == (Py_uhash_t)-1) {
338
0
            return -1;
339
0
        }
340
847M
        acc += lane * _PyTuple_HASH_XXPRIME_2;
341
847M
        acc = _PyTuple_HASH_XXROTATE(acc);
342
847M
        acc *= _PyTuple_HASH_XXPRIME_1;
343
847M
    }
344
345
    /* Add input length, mangled to keep the historical value of hash(()). */
346
17.5M
    acc += len ^ (_PyTuple_HASH_XXPRIME_5 ^ 3527539UL);
347
348
17.5M
    if (acc == (Py_uhash_t)-1) {
349
0
        acc = 1546275796;
350
0
    }
351
352
17.5M
    FT_ATOMIC_STORE_SSIZE_RELAXED(v->ob_hash, acc);
353
354
17.5M
    return acc;
355
17.5M
}
356
357
static Py_ssize_t
358
tuple_length(PyObject *self)
359
5.62M
{
360
5.62M
    PyTupleObject *a = _PyTuple_CAST(self);
361
5.62M
    return Py_SIZE(a);
362
5.62M
}
363
364
static int
365
tuple_contains(PyObject *self, PyObject *el)
366
12.4M
{
367
12.4M
    PyTupleObject *a = _PyTuple_CAST(self);
368
12.4M
    int cmp = 0;
369
41.5M
    for (Py_ssize_t i = 0; cmp == 0 && i < Py_SIZE(a); ++i) {
370
29.0M
        cmp = PyObject_RichCompareBool(PyTuple_GET_ITEM(a, i), el, Py_EQ);
371
29.0M
    }
372
12.4M
    return cmp;
373
12.4M
}
374
375
static PyObject *
376
tuple_item(PyObject *op, Py_ssize_t i)
377
19.6M
{
378
19.6M
    PyTupleObject *a = _PyTuple_CAST(op);
379
19.6M
    if (i < 0 || i >= Py_SIZE(a)) {
380
28
        PyErr_SetString(PyExc_IndexError, "tuple index out of range");
381
28
        return NULL;
382
28
    }
383
19.6M
    return Py_NewRef(a->ob_item[i]);
384
19.6M
}
385
386
PyObject *
387
PyTuple_FromArray(PyObject *const *src, Py_ssize_t n)
388
259M
{
389
259M
    if (n == 0) {
390
80.2M
        return tuple_get_empty();
391
80.2M
    }
392
393
179M
    PyTupleObject *tuple = tuple_alloc(n);
394
179M
    if (tuple == NULL) {
395
0
        return NULL;
396
0
    }
397
179M
    PyObject **dst = tuple->ob_item;
398
179M
    bool track = false;
399
504M
    for (Py_ssize_t i = 0; i < n; i++) {
400
324M
        PyObject *item = src[i];
401
324M
        if (!track && maybe_tracked(item)) {
402
54.0M
            track = true;
403
54.0M
        }
404
324M
        dst[i] = Py_NewRef(item);
405
324M
    }
406
179M
    if (track) {
407
54.0M
        _PyObject_GC_TRACK(tuple);
408
54.0M
    }
409
179M
    return (PyObject *)tuple;
410
179M
}
411
412
PyObject *
413
_PyTuple_FromStackRefStealOnSuccess(const _PyStackRef *src, Py_ssize_t n)
414
231M
{
415
231M
    if (n == 0) {
416
0
        return tuple_get_empty();
417
0
    }
418
231M
    PyTupleObject *tuple = tuple_alloc(n);
419
231M
    if (tuple == NULL) {
420
0
        return NULL;
421
0
    }
422
231M
    PyObject **dst = tuple->ob_item;
423
231M
    bool track = false;
424
720M
    for (Py_ssize_t i = 0; i < n; i++) {
425
488M
        PyObject *item = PyStackRef_AsPyObjectSteal(src[i]);
426
488M
        if (!track && maybe_tracked(item)) {
427
149M
            track = true;
428
149M
        }
429
488M
        dst[i] = item;
430
488M
    }
431
231M
    if (track) {
432
149M
        _PyObject_GC_TRACK(tuple);
433
149M
    }
434
231M
    return (PyObject *)tuple;
435
231M
}
436
437
PyObject *
438
_PyTuple_FromArraySteal(PyObject *const *src, Py_ssize_t n)
439
578
{
440
578
    if (n == 0) {
441
0
        return tuple_get_empty();
442
0
    }
443
578
    PyTupleObject *tuple = tuple_alloc(n);
444
578
    if (tuple == NULL) {
445
0
        for (Py_ssize_t i = 0; i < n; i++) {
446
0
            Py_DECREF(src[i]);
447
0
        }
448
0
        return NULL;
449
0
    }
450
578
    PyObject **dst = tuple->ob_item;
451
3.09k
    for (Py_ssize_t i = 0; i < n; i++) {
452
2.51k
        PyObject *item = src[i];
453
2.51k
        dst[i] = item;
454
2.51k
    }
455
578
    _PyObject_GC_TRACK(tuple);
456
578
    return (PyObject *)tuple;
457
578
}
458
459
static PyObject *
460
tuple_slice(PyTupleObject *a, Py_ssize_t ilow,
461
           Py_ssize_t ihigh)
462
38.3M
{
463
38.3M
    if (ilow < 0)
464
0
        ilow = 0;
465
38.3M
    if (ihigh > Py_SIZE(a))
466
36
        ihigh = Py_SIZE(a);
467
38.3M
    if (ihigh < ilow)
468
0
        ihigh = ilow;
469
38.3M
    if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
470
0
        return Py_NewRef(a);
471
0
    }
472
38.3M
    return PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow);
473
38.3M
}
474
475
PyObject *
476
PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
477
38.3M
{
478
38.3M
    if (op == NULL || !PyTuple_Check(op)) {
479
0
        PyErr_BadInternalCall();
480
0
        return NULL;
481
0
    }
482
38.3M
    return tuple_slice((PyTupleObject *)op, i, j);
483
38.3M
}
484
485
static PyObject *
486
tuple_concat(PyObject *aa, PyObject *bb)
487
3.22M
{
488
3.22M
    PyTupleObject *a = _PyTuple_CAST(aa);
489
3.22M
    if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) {
490
599k
        return Py_NewRef(bb);
491
599k
    }
492
2.62M
    if (!PyTuple_Check(bb)) {
493
0
        PyErr_Format(PyExc_TypeError,
494
0
             "can only concatenate tuple (not \"%.200s\") to tuple",
495
0
                 Py_TYPE(bb)->tp_name);
496
0
        return NULL;
497
0
    }
498
2.62M
    PyTupleObject *b = (PyTupleObject *)bb;
499
500
2.62M
    if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) {
501
85
        return Py_NewRef(a);
502
85
    }
503
2.62M
    assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX);
504
2.62M
    Py_ssize_t size = Py_SIZE(a) + Py_SIZE(b);
505
2.62M
    if (size == 0) {
506
0
        return tuple_get_empty();
507
0
    }
508
509
2.62M
    PyTupleObject *np = tuple_alloc(size);
510
2.62M
    if (np == NULL) {
511
0
        return NULL;
512
0
    }
513
514
2.62M
    PyObject **src = a->ob_item;
515
2.62M
    PyObject **dest = np->ob_item;
516
1.12G
    for (Py_ssize_t i = 0; i < Py_SIZE(a); i++) {
517
1.12G
        PyObject *v = src[i];
518
1.12G
        dest[i] = Py_NewRef(v);
519
1.12G
    }
520
521
2.62M
    src = b->ob_item;
522
2.62M
    dest = np->ob_item + Py_SIZE(a);
523
75.2M
    for (Py_ssize_t i = 0; i < Py_SIZE(b); i++) {
524
72.6M
        PyObject *v = src[i];
525
72.6M
        dest[i] = Py_NewRef(v);
526
72.6M
    }
527
528
2.62M
    _PyObject_GC_TRACK(np);
529
2.62M
    return (PyObject *)np;
530
2.62M
}
531
532
static PyObject *
533
tuple_repeat(PyObject *self, Py_ssize_t n)
534
0
{
535
0
    PyTupleObject *a = _PyTuple_CAST(self);
536
0
    const Py_ssize_t input_size = Py_SIZE(a);
537
0
    if (input_size == 0 || n == 1) {
538
0
        if (PyTuple_CheckExact(a)) {
539
            /* Since tuples are immutable, we can return a shared
540
               copy in this case */
541
0
            return Py_NewRef(a);
542
0
        }
543
0
    }
544
0
    if (input_size == 0 || n <= 0) {
545
0
        return tuple_get_empty();
546
0
    }
547
0
    assert(n>0);
548
549
0
    if (input_size > PY_SSIZE_T_MAX / n)
550
0
        return PyErr_NoMemory();
551
0
    Py_ssize_t output_size = input_size * n;
552
553
0
    PyTupleObject *np = tuple_alloc(output_size);
554
0
    if (np == NULL)
555
0
        return NULL;
556
557
0
    PyObject **dest = np->ob_item;
558
0
    if (input_size == 1) {
559
0
        PyObject *elem = a->ob_item[0];
560
0
        _Py_RefcntAdd(elem, n);
561
0
        PyObject **dest_end = dest + output_size;
562
0
        while (dest < dest_end) {
563
0
            *dest++ = elem;
564
0
        }
565
0
    }
566
0
    else {
567
0
        PyObject **src = a->ob_item;
568
0
        PyObject **src_end = src + input_size;
569
0
        while (src < src_end) {
570
0
            _Py_RefcntAdd(*src, n);
571
0
            *dest++ = *src++;
572
0
        }
573
574
0
        _Py_memory_repeat((char *)np->ob_item, sizeof(PyObject *)*output_size,
575
0
                          sizeof(PyObject *)*input_size);
576
0
    }
577
0
    _PyObject_GC_TRACK(np);
578
0
    return (PyObject *) np;
579
0
}
580
581
/*[clinic input]
582
tuple.index
583
584
    value: object
585
    start: slice_index(accept={int}) = 0
586
    stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
587
    /
588
589
Return first index of value.
590
591
Raises ValueError if the value is not present.
592
[clinic start generated code]*/
593
594
static PyObject *
595
tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
596
                 Py_ssize_t stop)
597
/*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
598
4
{
599
4
    Py_ssize_t i;
600
601
4
    if (start < 0) {
602
0
        start += Py_SIZE(self);
603
0
        if (start < 0)
604
0
            start = 0;
605
0
    }
606
4
    if (stop < 0) {
607
0
        stop += Py_SIZE(self);
608
0
    }
609
4
    else if (stop > Py_SIZE(self)) {
610
4
        stop = Py_SIZE(self);
611
4
    }
612
4
    for (i = start; i < stop; i++) {
613
4
        int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
614
4
        if (cmp > 0)
615
4
            return PyLong_FromSsize_t(i);
616
0
        else if (cmp < 0)
617
0
            return NULL;
618
4
    }
619
0
    PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
620
0
    return NULL;
621
4
}
622
623
/*[clinic input]
624
tuple.count
625
626
     value: object
627
     /
628
629
Return number of occurrences of value.
630
[clinic start generated code]*/
631
632
static PyObject *
633
tuple_count_impl(PyTupleObject *self, PyObject *value)
634
/*[clinic end generated code: output=cf02888d4bc15d7a input=531721aff65bd772]*/
635
0
{
636
0
    Py_ssize_t count = 0;
637
0
    Py_ssize_t i;
638
639
0
    for (i = 0; i < Py_SIZE(self); i++) {
640
0
        int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
641
0
        if (cmp > 0)
642
0
            count++;
643
0
        else if (cmp < 0)
644
0
            return NULL;
645
0
    }
646
0
    return PyLong_FromSsize_t(count);
647
0
}
648
649
static int
650
tuple_traverse(PyObject *self, visitproc visit, void *arg)
651
144M
{
652
144M
    PyTupleObject *o = _PyTuple_CAST(self);
653
441M
    for (Py_ssize_t i = Py_SIZE(o); --i >= 0; ) {
654
297M
        Py_VISIT(o->ob_item[i]);
655
297M
    }
656
144M
    return 0;
657
144M
}
658
659
static PyObject *
660
tuple_richcompare(PyObject *v, PyObject *w, int op)
661
17.8M
{
662
17.8M
    PyTupleObject *vt, *wt;
663
17.8M
    Py_ssize_t i;
664
17.8M
    Py_ssize_t vlen, wlen;
665
666
17.8M
    if (!PyTuple_Check(v) || !PyTuple_Check(w))
667
0
        Py_RETURN_NOTIMPLEMENTED;
668
669
17.8M
    vt = (PyTupleObject *)v;
670
17.8M
    wt = (PyTupleObject *)w;
671
672
17.8M
    vlen = Py_SIZE(vt);
673
17.8M
    wlen = Py_SIZE(wt);
674
675
    /* Note:  the corresponding code for lists has an "early out" test
676
     * here when op is EQ or NE and the lengths differ.  That pays there,
677
     * but Tim was unable to find any real code where EQ/NE tuple
678
     * compares don't have the same length, so testing for it here would
679
     * have cost without benefit.
680
     */
681
682
    /* Search for the first index where items are different.
683
     * Note that because tuples are immutable, it's safe to reuse
684
     * vlen and wlen across the comparison calls.
685
     */
686
53.5M
    for (i = 0; i < vlen && i < wlen; i++) {
687
38.9M
        int k = PyObject_RichCompareBool(vt->ob_item[i],
688
38.9M
                                         wt->ob_item[i], Py_EQ);
689
38.9M
        if (k < 0)
690
0
            return NULL;
691
38.9M
        if (!k)
692
3.31M
            break;
693
38.9M
    }
694
695
17.8M
    if (i >= vlen || i >= wlen) {
696
        /* No more items to compare -- compare sizes */
697
14.5M
        Py_RETURN_RICHCOMPARE(vlen, wlen, op);
698
14.5M
    }
699
700
    /* We have an item that differs -- shortcuts for EQ/NE */
701
3.31M
    if (op == Py_EQ) {
702
95.6k
        Py_RETURN_FALSE;
703
95.6k
    }
704
3.21M
    if (op == Py_NE) {
705
1.50k
        Py_RETURN_TRUE;
706
1.50k
    }
707
708
    /* Compare the final item again using the proper operator */
709
3.21M
    return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
710
3.21M
}
711
712
static PyObject *
713
tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
714
715
/*[clinic input]
716
@classmethod
717
tuple.__new__ as tuple_new
718
    iterable: object(c_default="NULL") = ()
719
    /
720
721
Built-in immutable sequence.
722
723
If no argument is given, the constructor returns an empty tuple.
724
If iterable is specified the tuple is initialized from iterable's items.
725
726
If the argument is a tuple, the return value is the same object.
727
[clinic start generated code]*/
728
729
static PyObject *
730
tuple_new_impl(PyTypeObject *type, PyObject *iterable)
731
/*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
732
3.62M
{
733
3.62M
    if (type != &PyTuple_Type)
734
1.81M
        return tuple_subtype_new(type, iterable);
735
736
1.81M
    if (iterable == NULL) {
737
0
        return tuple_get_empty();
738
0
    }
739
1.81M
    else {
740
1.81M
        return PySequence_Tuple(iterable);
741
1.81M
    }
742
1.81M
}
743
744
static PyObject *
745
tuple_vectorcall(PyObject *type, PyObject * const*args,
746
                 size_t nargsf, PyObject *kwnames)
747
145
{
748
145
    if (!_PyArg_NoKwnames("tuple", kwnames)) {
749
0
        return NULL;
750
0
    }
751
752
145
    Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
753
145
    if (!_PyArg_CheckPositional("tuple", nargs, 0, 1)) {
754
0
        return NULL;
755
0
    }
756
757
145
    if (nargs) {
758
145
        return tuple_new_impl(_PyType_CAST(type), args[0]);
759
145
    }
760
0
    else {
761
0
        return tuple_get_empty();
762
0
    }
763
145
}
764
765
static PyObject *
766
tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
767
1.81M
{
768
1.81M
    PyObject *tmp, *newobj, *item;
769
1.81M
    Py_ssize_t i, n;
770
771
1.81M
    assert(PyType_IsSubtype(type, &PyTuple_Type));
772
    // tuple subclasses must implement the GC protocol
773
1.81M
    assert(_PyType_IS_GC(type));
774
775
1.81M
    tmp = tuple_new_impl(&PyTuple_Type, iterable);
776
1.81M
    if (tmp == NULL)
777
0
        return NULL;
778
1.81M
    assert(PyTuple_Check(tmp));
779
    /* This may allocate an empty tuple that is not the global one. */
780
1.81M
    newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
781
1.81M
    if (newobj == NULL) {
782
0
        Py_DECREF(tmp);
783
0
        return NULL;
784
0
    }
785
7.25M
    for (i = 0; i < n; i++) {
786
5.44M
        item = PyTuple_GET_ITEM(tmp, i);
787
5.44M
        PyTuple_SET_ITEM(newobj, i, Py_NewRef(item));
788
5.44M
    }
789
1.81M
    Py_DECREF(tmp);
790
791
1.81M
    _PyTuple_RESET_HASH_CACHE(newobj);
792
793
    // Don't track if a subclass tp_alloc is PyType_GenericAlloc()
794
1.81M
    if (!_PyObject_GC_IS_TRACKED(newobj)) {
795
0
        _PyObject_GC_TRACK(newobj);
796
0
    }
797
1.81M
    return newobj;
798
1.81M
}
799
800
static PySequenceMethods tuple_as_sequence = {
801
    tuple_length,                               /* sq_length */
802
    tuple_concat,                               /* sq_concat */
803
    tuple_repeat,                               /* sq_repeat */
804
    tuple_item,                                 /* sq_item */
805
    0,                                          /* sq_slice */
806
    0,                                          /* sq_ass_item */
807
    0,                                          /* sq_ass_slice */
808
    tuple_contains,                             /* sq_contains */
809
};
810
811
static PyObject*
812
tuple_subscript(PyObject *op, PyObject* item)
813
3.46M
{
814
3.46M
    PyTupleObject *self = _PyTuple_CAST(op);
815
3.46M
    if (_PyIndex_Check(item)) {
816
1.75M
        Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
817
1.75M
        if (i == -1 && PyErr_Occurred())
818
0
            return NULL;
819
1.75M
        if (i < 0)
820
1.75M
            i += PyTuple_GET_SIZE(self);
821
1.75M
        return tuple_item(op, i);
822
1.75M
    }
823
1.71M
    else if (PySlice_Check(item)) {
824
1.71M
        Py_ssize_t start, stop, step, slicelength, i;
825
1.71M
        size_t cur;
826
1.71M
        PyObject* it;
827
1.71M
        PyObject **src, **dest;
828
829
1.71M
        if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
830
0
            return NULL;
831
0
        }
832
1.71M
        slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
833
1.71M
                                            &stop, step);
834
835
1.71M
        if (slicelength <= 0) {
836
6.26k
            return tuple_get_empty();
837
6.26k
        }
838
1.70M
        else if (start == 0 && step == 1 &&
839
1.57M
                 slicelength == PyTuple_GET_SIZE(self) &&
840
8
                 PyTuple_CheckExact(self)) {
841
8
            return Py_NewRef(self);
842
8
        }
843
1.70M
        else {
844
1.70M
            PyTupleObject* result = tuple_alloc(slicelength);
845
1.70M
            if (!result) return NULL;
846
847
1.70M
            src = self->ob_item;
848
1.70M
            dest = result->ob_item;
849
1.87G
            for (cur = start, i = 0; i < slicelength;
850
1.87G
                 cur += step, i++) {
851
1.87G
                it = Py_NewRef(src[cur]);
852
1.87G
                dest[i] = it;
853
1.87G
            }
854
855
1.70M
            _PyObject_GC_TRACK(result);
856
1.70M
            return (PyObject *)result;
857
1.70M
        }
858
1.71M
    }
859
0
    else {
860
0
        PyErr_Format(PyExc_TypeError,
861
0
                     "tuple indices must be integers or slices, not %.200s",
862
0
                     Py_TYPE(item)->tp_name);
863
0
        return NULL;
864
0
    }
865
3.46M
}
866
867
/*[clinic input]
868
tuple.__getnewargs__
869
[clinic start generated code]*/
870
871
static PyObject *
872
tuple___getnewargs___impl(PyTupleObject *self)
873
/*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
874
0
{
875
0
    return Py_BuildValue("(N)", tuple_slice(self, 0, Py_SIZE(self)));
876
0
}
877
878
static PyMethodDef tuple_methods[] = {
879
    TUPLE___GETNEWARGS___METHODDEF
880
    TUPLE_INDEX_METHODDEF
881
    TUPLE_COUNT_METHODDEF
882
    {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
883
    {NULL,              NULL}           /* sentinel */
884
};
885
886
static PyMappingMethods tuple_as_mapping = {
887
    tuple_length,
888
    tuple_subscript,
889
    0
890
};
891
892
static PyObject *tuple_iter(PyObject *seq);
893
894
PyTypeObject PyTuple_Type = {
895
    PyVarObject_HEAD_INIT(&PyType_Type, 0)
896
    "tuple",
897
    sizeof(PyTupleObject) - sizeof(PyObject *),
898
    sizeof(PyObject *),
899
    tuple_dealloc,                              /* tp_dealloc */
900
    0,                                          /* tp_vectorcall_offset */
901
    0,                                          /* tp_getattr */
902
    0,                                          /* tp_setattr */
903
    0,                                          /* tp_as_async */
904
    tuple_repr,                                 /* tp_repr */
905
    0,                                          /* tp_as_number */
906
    &tuple_as_sequence,                         /* tp_as_sequence */
907
    &tuple_as_mapping,                          /* tp_as_mapping */
908
    tuple_hash,                                 /* tp_hash */
909
    0,                                          /* tp_call */
910
    0,                                          /* tp_str */
911
    PyObject_GenericGetAttr,                    /* tp_getattro */
912
    0,                                          /* tp_setattro */
913
    0,                                          /* tp_as_buffer */
914
    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
915
        Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS |
916
        _Py_TPFLAGS_MATCH_SELF | Py_TPFLAGS_SEQUENCE,  /* tp_flags */
917
    tuple_new__doc__,                           /* tp_doc */
918
    tuple_traverse,                             /* tp_traverse */
919
    0,                                          /* tp_clear */
920
    tuple_richcompare,                          /* tp_richcompare */
921
    0,                                          /* tp_weaklistoffset */
922
    tuple_iter,                                 /* tp_iter */
923
    0,                                          /* tp_iternext */
924
    tuple_methods,                              /* tp_methods */
925
    0,                                          /* tp_members */
926
    0,                                          /* tp_getset */
927
    0,                                          /* tp_base */
928
    0,                                          /* tp_dict */
929
    0,                                          /* tp_descr_get */
930
    0,                                          /* tp_descr_set */
931
    0,                                          /* tp_dictoffset */
932
    0,                                          /* tp_init */
933
    0,                                          /* tp_alloc */
934
    tuple_new,                                  /* tp_new */
935
    PyObject_GC_Del,                            /* tp_free */
936
    .tp_vectorcall = tuple_vectorcall,
937
    .tp_version_tag = _Py_TYPE_VERSION_TUPLE,
938
};
939
940
/* The following function breaks the notion that tuples are immutable:
941
   it changes the size of a tuple.  We get away with this only if there
942
   is only one module referencing the object.  You can also think of it
943
   as creating a new tuple object and destroying the old one, only more
944
   efficiently.  In any case, don't use this if the tuple may already be
945
   known to some other part of the code. */
946
947
int
948
_PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
949
512
{
950
512
    PyTupleObject *v;
951
512
    PyTupleObject *sv;
952
512
    Py_ssize_t i;
953
512
    Py_ssize_t oldsize;
954
955
512
    v = (PyTupleObject *) *pv;
956
512
    if (v == NULL || !Py_IS_TYPE(v, &PyTuple_Type) ||
957
512
        (Py_SIZE(v) != 0 && !_PyObject_IsUniquelyReferenced(*pv))) {
958
0
        *pv = 0;
959
0
        Py_XDECREF(v);
960
0
        PyErr_BadInternalCall();
961
0
        return -1;
962
0
    }
963
964
512
    oldsize = Py_SIZE(v);
965
512
    if (oldsize == newsize) {
966
484
        return 0;
967
484
    }
968
28
    if (newsize == 0) {
969
0
        Py_DECREF(v);
970
0
        *pv = tuple_get_empty();
971
0
        return 0;
972
0
    }
973
28
    if (oldsize == 0) {
974
#ifdef Py_DEBUG
975
        assert(v == &_Py_SINGLETON(tuple_empty));
976
#endif
977
        /* The empty tuple is statically allocated so we never
978
           resize it in-place. */
979
0
        Py_DECREF(v);
980
0
        *pv = PyTuple_New(newsize);
981
0
        return *pv == NULL ? -1 : 0;
982
0
    }
983
984
28
    if (_PyObject_GC_IS_TRACKED(v)) {
985
28
        _PyObject_GC_UNTRACK(v);
986
28
    }
987
#ifdef Py_TRACE_REFS
988
    _Py_ForgetReference((PyObject *) v);
989
#endif
990
    /* DECREF items deleted by shrinkage */
991
112
    for (i = newsize; i < oldsize; i++) {
992
84
        Py_CLEAR(v->ob_item[i]);
993
84
    }
994
28
    _PyReftracerTrack((PyObject *)v, PyRefTracer_DESTROY);
995
28
    sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
996
28
    if (sv == NULL) {
997
0
        *pv = NULL;
998
#ifdef Py_REF_DEBUG
999
        _Py_DecRefTotal(_PyThreadState_GET());
1000
#endif
1001
0
        PyObject_GC_Del(v);
1002
0
        return -1;
1003
0
    }
1004
28
    _Py_NewReferenceNoTotal((PyObject *) sv);
1005
    /* Zero out items added by growing */
1006
28
    if (newsize > oldsize)
1007
0
        memset(&sv->ob_item[oldsize], 0,
1008
0
               sizeof(*sv->ob_item) * (newsize - oldsize));
1009
28
    *pv = (PyObject *) sv;
1010
28
    _PyObject_GC_TRACK(sv);
1011
28
    return 0;
1012
28
}
1013
1014
/*********************** Tuple Iterator **************************/
1015
1016
1.10G
#define _PyTupleIterObject_CAST(op) ((_PyTupleIterObject *)(op))
1017
1018
static void
1019
tupleiter_dealloc(PyObject *self)
1020
4.87M
{
1021
4.87M
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1022
4.87M
    _PyObject_GC_UNTRACK(it);
1023
4.87M
    Py_XDECREF(it->it_seq);
1024
4.87M
    assert(Py_IS_TYPE(self, &PyTupleIter_Type));
1025
4.87M
    _Py_FREELIST_FREE(tuple_iters, it, PyObject_GC_Del);
1026
4.87M
}
1027
1028
static int
1029
tupleiter_traverse(PyObject *self, visitproc visit, void *arg)
1030
10
{
1031
10
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1032
10
    Py_VISIT(it->it_seq);
1033
10
    return 0;
1034
10
}
1035
1036
static PyObject *
1037
tupleiter_next(PyObject *self)
1038
1.10G
{
1039
1.10G
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1040
1.10G
    PyTupleObject *seq;
1041
1.10G
    PyObject *item;
1042
1043
1.10G
    assert(it != NULL);
1044
1.10G
    seq = it->it_seq;
1045
1.10G
#ifndef Py_GIL_DISABLED
1046
1.10G
    if (seq == NULL)
1047
0
        return NULL;
1048
1.10G
#endif
1049
1.10G
    assert(PyTuple_Check(seq));
1050
1051
1.10G
    Py_ssize_t index = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index);
1052
1.10G
    if (index < PyTuple_GET_SIZE(seq)) {
1053
1.09G
        FT_ATOMIC_STORE_SSIZE_RELAXED(it->it_index, index + 1);
1054
1.09G
        item = PyTuple_GET_ITEM(seq, index);
1055
1.09G
        return Py_NewRef(item);
1056
1.09G
    }
1057
1058
4.87M
#ifndef Py_GIL_DISABLED
1059
4.87M
    it->it_seq = NULL;
1060
4.87M
    Py_DECREF(seq);
1061
4.87M
#endif
1062
4.87M
    return NULL;
1063
1.10G
}
1064
1065
static PyObject *
1066
tupleiter_len(PyObject *self, PyObject *Py_UNUSED(ignored))
1067
0
{
1068
0
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1069
0
    Py_ssize_t len = 0;
1070
#ifdef Py_GIL_DISABLED
1071
    Py_ssize_t idx = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index);
1072
    Py_ssize_t seq_len = PyTuple_GET_SIZE(it->it_seq);
1073
    if (idx < seq_len)
1074
        len = seq_len - idx;
1075
#else
1076
0
    if (it->it_seq)
1077
0
        len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
1078
0
#endif
1079
0
    return PyLong_FromSsize_t(len);
1080
0
}
1081
1082
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1083
1084
static PyObject *
1085
tupleiter_reduce(PyObject *self, PyObject *Py_UNUSED(ignored))
1086
0
{
1087
0
    PyObject *iter = _PyEval_GetBuiltin(&_Py_ID(iter));
1088
1089
    /* _PyEval_GetBuiltin can invoke arbitrary code,
1090
     * call must be before access of iterator pointers.
1091
     * see issue #101765 */
1092
0
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1093
1094
#ifdef Py_GIL_DISABLED
1095
    Py_ssize_t idx = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index);
1096
    if (idx < PyTuple_GET_SIZE(it->it_seq))
1097
        return Py_BuildValue("N(O)n", iter, it->it_seq, idx);
1098
#else
1099
0
    if (it->it_seq)
1100
0
        return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index);
1101
0
#endif
1102
0
    return Py_BuildValue("N(())", iter);
1103
0
}
1104
1105
static PyObject *
1106
tupleiter_setstate(PyObject *self, PyObject *state)
1107
0
{
1108
0
    _PyTupleIterObject *it = _PyTupleIterObject_CAST(self);
1109
0
    Py_ssize_t index = PyLong_AsSsize_t(state);
1110
0
    if (index == -1 && PyErr_Occurred())
1111
0
        return NULL;
1112
0
    if (it->it_seq != NULL) {
1113
0
        if (index < 0)
1114
0
            index = 0;
1115
0
        else if (index > PyTuple_GET_SIZE(it->it_seq))
1116
0
            index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1117
0
        FT_ATOMIC_STORE_SSIZE_RELAXED(it->it_index, index);
1118
0
    }
1119
0
    Py_RETURN_NONE;
1120
0
}
1121
1122
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1123
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1124
1125
static PyMethodDef tupleiter_methods[] = {
1126
    {"__length_hint__", tupleiter_len, METH_NOARGS, length_hint_doc},
1127
    {"__reduce__", tupleiter_reduce, METH_NOARGS, reduce_doc},
1128
    {"__setstate__", tupleiter_setstate, METH_O, setstate_doc},
1129
    {NULL, NULL, 0, NULL} /* sentinel */
1130
};
1131
1132
PyTypeObject PyTupleIter_Type = {
1133
    PyVarObject_HEAD_INIT(&PyType_Type, 0)
1134
    "tuple_iterator",                           /* tp_name */
1135
    sizeof(_PyTupleIterObject),                 /* tp_basicsize */
1136
    0,                                          /* tp_itemsize */
1137
    /* methods */
1138
    tupleiter_dealloc,                          /* tp_dealloc */
1139
    0,                                          /* tp_vectorcall_offset */
1140
    0,                                          /* tp_getattr */
1141
    0,                                          /* tp_setattr */
1142
    0,                                          /* tp_as_async */
1143
    0,                                          /* tp_repr */
1144
    0,                                          /* tp_as_number */
1145
    0,                                          /* tp_as_sequence */
1146
    0,                                          /* tp_as_mapping */
1147
    0,                                          /* tp_hash */
1148
    0,                                          /* tp_call */
1149
    0,                                          /* tp_str */
1150
    PyObject_GenericGetAttr,                    /* tp_getattro */
1151
    0,                                          /* tp_setattro */
1152
    0,                                          /* tp_as_buffer */
1153
    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1154
    0,                                          /* tp_doc */
1155
    tupleiter_traverse,                         /* tp_traverse */
1156
    0,                                          /* tp_clear */
1157
    0,                                          /* tp_richcompare */
1158
    0,                                          /* tp_weaklistoffset */
1159
    PyObject_SelfIter,                          /* tp_iter */
1160
    tupleiter_next,                             /* tp_iternext */
1161
    tupleiter_methods,                          /* tp_methods */
1162
    0,
1163
};
1164
1165
static PyObject *
1166
tuple_iter(PyObject *seq)
1167
4.87M
{
1168
4.87M
    if (!PyTuple_Check(seq)) {
1169
0
        PyErr_BadInternalCall();
1170
0
        return NULL;
1171
0
    }
1172
4.87M
    _PyTupleIterObject *it = _Py_FREELIST_POP(_PyTupleIterObject, tuple_iters);
1173
4.87M
    if (it == NULL) {
1174
60
        it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type);
1175
60
        if (it == NULL)
1176
0
            return NULL;
1177
60
    }
1178
4.87M
    it->it_index = 0;
1179
4.87M
    it->it_seq = (PyTupleObject *)Py_NewRef(seq);
1180
4.87M
    _PyObject_GC_TRACK(it);
1181
4.87M
    return (PyObject *)it;
1182
4.87M
}
1183
1184
1185
/*************
1186
 * freelists *
1187
 *************/
1188
1189
static inline int
1190
maybe_freelist_push(PyTupleObject *op)
1191
478M
{
1192
478M
    if (!Py_IS_TYPE(op, &PyTuple_Type)) {
1193
1.80M
        return 0;
1194
1.80M
    }
1195
476M
    Py_ssize_t index = Py_SIZE(op) - 1;
1196
476M
    if (index < PyTuple_MAXSAVESIZE) {
1197
472M
        return _Py_FREELIST_PUSH(tuples[index], op, Py_tuple_MAXFREELIST);
1198
472M
    }
1199
4.47M
    return 0;
1200
476M
}
1201
1202
/* Print summary info about the state of the optimized allocator */
1203
void
1204
_PyTuple_DebugMallocStats(FILE *out)
1205
0
{
1206
0
    for (int i = 0; i < PyTuple_MAXSAVESIZE; i++) {
1207
0
        int len = i + 1;
1208
0
        char buf[128];
1209
0
        PyOS_snprintf(buf, sizeof(buf),
1210
0
                      "free %d-sized PyTupleObject", len);
1211
0
        _PyDebugAllocatorStats(out, buf, _Py_FREELIST_SIZE(tuples[i]),
1212
0
                               _PyObject_VAR_SIZE(&PyTuple_Type, len));
1213
0
    }
1214
0
}