/src/cpython/Objects/tupleobject.c
Line | Count | Source |
1 | | /* Tuple object implementation */ |
2 | | |
3 | | #include "Python.h" |
4 | | #include "pycore_abstract.h" // _PyIndex_Check() |
5 | | #include "pycore_ceval.h" // _PyEval_GetBuiltin() |
6 | | #include "pycore_freelist.h" // _Py_FREELIST_PUSH() |
7 | | #include "pycore_gc.h" // _PyObject_GC_IS_TRACKED() |
8 | | #include "pycore_list.h" // _Py_memory_repeat() |
9 | | #include "pycore_modsupport.h" // _PyArg_NoKwnames() |
10 | | #include "pycore_object.h" // _PyObject_GC_TRACK() |
11 | | #include "pycore_stackref.h" // PyStackRef_AsPyObjectSteal() |
12 | | #include "pycore_tuple.h" // _PyTupleIterObject |
13 | | |
14 | | |
15 | | /*[clinic input] |
16 | | class tuple "PyTupleObject *" "&PyTuple_Type" |
17 | | [clinic start generated code]*/ |
18 | | /*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/ |
19 | | |
20 | | #include "clinic/tupleobject.c.h" |
21 | | |
22 | | |
23 | | static inline int maybe_freelist_push(PyTupleObject *); |
24 | | |
25 | | |
26 | | /* Allocate an uninitialized tuple object. Before making it public, following |
27 | | steps must be done: |
28 | | |
29 | | - Initialize its items. |
30 | | - Call _PyObject_GC_TRACK() on it. |
31 | | |
32 | | Because the empty tuple is always reused and it's already tracked by GC, |
33 | | this function must not be called with size == 0 (unless from PyTuple_New() |
34 | | which wraps this function). |
35 | | */ |
36 | | static PyTupleObject * |
37 | | tuple_alloc(Py_ssize_t size) |
38 | 422M | { |
39 | 422M | if (size < 0) { |
40 | 0 | PyErr_BadInternalCall(); |
41 | 0 | return NULL; |
42 | 0 | } |
43 | 422M | assert(size != 0); // The empty tuple is statically allocated. |
44 | 422M | Py_ssize_t index = size - 1; |
45 | 422M | if (index < PyTuple_MAXSAVESIZE) { |
46 | 415M | PyTupleObject *op = _Py_FREELIST_POP(PyTupleObject, tuples[index]); |
47 | 415M | if (op != NULL) { |
48 | 335M | _PyTuple_RESET_HASH_CACHE(op); |
49 | 335M | return op; |
50 | 335M | } |
51 | 415M | } |
52 | | /* Check for overflow */ |
53 | 87.2M | if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - (sizeof(PyTupleObject) - |
54 | 87.2M | sizeof(PyObject *))) / sizeof(PyObject *)) { |
55 | 0 | return (PyTupleObject *)PyErr_NoMemory(); |
56 | 0 | } |
57 | 87.2M | PyTupleObject *result = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size); |
58 | 87.2M | if (result != NULL) { |
59 | 87.2M | _PyTuple_RESET_HASH_CACHE(result); |
60 | 87.2M | } |
61 | 87.2M | return result; |
62 | 87.2M | } |
63 | | |
64 | | // The empty tuple singleton is not tracked by the GC. |
65 | | // It does not contain any Python object. |
66 | | // Note that tuple subclasses have their own empty instances. |
67 | | |
68 | | static inline PyObject * |
69 | | tuple_get_empty(void) |
70 | 78.9M | { |
71 | 78.9M | return (PyObject *)&_Py_SINGLETON(tuple_empty); |
72 | 78.9M | } |
73 | | |
74 | | PyObject * |
75 | | PyTuple_New(Py_ssize_t size) |
76 | 61.0M | { |
77 | 61.0M | PyTupleObject *op; |
78 | 61.0M | if (size == 0) { |
79 | 2.92M | return tuple_get_empty(); |
80 | 2.92M | } |
81 | 58.1M | op = tuple_alloc(size); |
82 | 58.1M | if (op == NULL) { |
83 | 0 | return NULL; |
84 | 0 | } |
85 | 1.89G | for (Py_ssize_t i = 0; i < size; i++) { |
86 | 1.83G | op->ob_item[i] = NULL; |
87 | 1.83G | } |
88 | 58.1M | _PyObject_GC_TRACK(op); |
89 | 58.1M | return (PyObject *) op; |
90 | 58.1M | } |
91 | | |
92 | | Py_ssize_t |
93 | | PyTuple_Size(PyObject *op) |
94 | 10.3M | { |
95 | 10.3M | if (!PyTuple_Check(op)) { |
96 | 0 | PyErr_BadInternalCall(); |
97 | 0 | return -1; |
98 | 0 | } |
99 | 10.3M | else |
100 | 10.3M | return Py_SIZE(op); |
101 | 10.3M | } |
102 | | |
103 | | PyObject * |
104 | | PyTuple_GetItem(PyObject *op, Py_ssize_t i) |
105 | 22.8M | { |
106 | 22.8M | if (!PyTuple_Check(op)) { |
107 | 0 | PyErr_BadInternalCall(); |
108 | 0 | return NULL; |
109 | 0 | } |
110 | 22.8M | if (i < 0 || i >= Py_SIZE(op)) { |
111 | 0 | PyErr_SetString(PyExc_IndexError, "tuple index out of range"); |
112 | 0 | return NULL; |
113 | 0 | } |
114 | 22.8M | return ((PyTupleObject *)op) -> ob_item[i]; |
115 | 22.8M | } |
116 | | |
117 | | int |
118 | | PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem) |
119 | 131k | { |
120 | 131k | PyObject **p; |
121 | 131k | if (!PyTuple_Check(op) || !_PyObject_IsUniquelyReferenced(op)) { |
122 | 0 | Py_XDECREF(newitem); |
123 | 0 | PyErr_BadInternalCall(); |
124 | 0 | return -1; |
125 | 0 | } |
126 | 131k | if (i < 0 || i >= Py_SIZE(op)) { |
127 | 0 | Py_XDECREF(newitem); |
128 | 0 | PyErr_SetString(PyExc_IndexError, |
129 | 0 | "tuple assignment index out of range"); |
130 | 0 | return -1; |
131 | 0 | } |
132 | 131k | p = ((PyTupleObject *)op) -> ob_item + i; |
133 | 131k | Py_XSETREF(*p, newitem); |
134 | 131k | return 0; |
135 | 131k | } |
136 | | |
137 | | void |
138 | | _PyTuple_MaybeUntrack(PyObject *op) |
139 | 79.1M | { |
140 | 79.1M | PyTupleObject *t; |
141 | 79.1M | Py_ssize_t i, n; |
142 | | |
143 | 79.1M | if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op)) |
144 | 0 | return; |
145 | 79.1M | t = (PyTupleObject *) op; |
146 | 79.1M | n = Py_SIZE(t); |
147 | 169M | for (i = 0; i < n; i++) { |
148 | 165M | PyObject *elt = PyTuple_GET_ITEM(t, i); |
149 | | /* Tuple with NULL elements aren't |
150 | | fully constructed, don't untrack |
151 | | them yet. */ |
152 | 165M | if (!elt || |
153 | 165M | _PyObject_GC_MAY_BE_TRACKED(elt)) |
154 | 75.7M | return; |
155 | 165M | } |
156 | 3.35M | _PyObject_GC_UNTRACK(op); |
157 | 3.35M | } |
158 | | |
159 | | /* Fast, but conservative check if an object maybe tracked |
160 | | May return true for an object that is not tracked, |
161 | | Will always return true for an object that is tracked. |
162 | | This is a temporary workaround until _PyObject_GC_IS_TRACKED |
163 | | becomes fast and safe to call on non-GC objects. |
164 | | */ |
165 | | static bool |
166 | | maybe_tracked(PyObject *ob) |
167 | 2.69G | { |
168 | 2.69G | return _PyType_IS_GC(Py_TYPE(ob)); |
169 | 2.69G | } |
170 | | |
171 | | PyObject * |
172 | | PyTuple_Pack(Py_ssize_t n, ...) |
173 | 10.6M | { |
174 | 10.6M | Py_ssize_t i; |
175 | 10.6M | PyObject *o; |
176 | 10.6M | PyObject **items; |
177 | 10.6M | va_list vargs; |
178 | 10.6M | bool track = false; |
179 | | |
180 | 10.6M | if (n == 0) { |
181 | 0 | return tuple_get_empty(); |
182 | 0 | } |
183 | | |
184 | 10.6M | va_start(vargs, n); |
185 | 10.6M | PyTupleObject *result = tuple_alloc(n); |
186 | 10.6M | if (result == NULL) { |
187 | 0 | va_end(vargs); |
188 | 0 | return NULL; |
189 | 0 | } |
190 | 10.6M | items = result->ob_item; |
191 | 32.7M | for (i = 0; i < n; i++) { |
192 | 22.0M | o = va_arg(vargs, PyObject *); |
193 | 22.0M | if (!track && maybe_tracked(o)) { |
194 | 783k | track = true; |
195 | 783k | } |
196 | 22.0M | items[i] = Py_NewRef(o); |
197 | 22.0M | } |
198 | 10.6M | va_end(vargs); |
199 | 10.6M | if (track) { |
200 | 783k | _PyObject_GC_TRACK(result); |
201 | 783k | } |
202 | 10.6M | return (PyObject *)result; |
203 | 10.6M | } |
204 | | |
205 | | PyObject * |
206 | | _PyTuple_FromPair(PyObject *first, PyObject *second) |
207 | 84.0k | { |
208 | 84.0k | assert(first != NULL); |
209 | 84.0k | assert(second != NULL); |
210 | | |
211 | 84.0k | return _PyTuple_FromPairSteal(Py_NewRef(first), Py_NewRef(second)); |
212 | 84.0k | } |
213 | | |
214 | | PyObject * |
215 | | _PyTuple_FromPairSteal(PyObject *first, PyObject *second) |
216 | 84.1k | { |
217 | 84.1k | assert(first != NULL); |
218 | 84.1k | assert(second != NULL); |
219 | | |
220 | 84.1k | PyTupleObject *op = tuple_alloc(2); |
221 | 84.1k | if (op == NULL) { |
222 | 0 | Py_DECREF(first); |
223 | 0 | Py_DECREF(second); |
224 | 0 | return NULL; |
225 | 0 | } |
226 | 84.1k | PyObject **items = op->ob_item; |
227 | 84.1k | items[0] = first; |
228 | 84.1k | items[1] = second; |
229 | 84.1k | if (maybe_tracked(first) || maybe_tracked(second)) { |
230 | 84.0k | _PyObject_GC_TRACK(op); |
231 | 84.0k | } |
232 | 84.1k | return (PyObject *)op; |
233 | 84.1k | } |
234 | | |
235 | | /* Methods */ |
236 | | |
237 | | /* |
238 | | Free of a tuple where all contents have been stolen and |
239 | | is now untracked by GC. This operation is thus non-escaping. |
240 | | */ |
241 | | void |
242 | | _PyStolenTuple_Free(PyObject *obj) |
243 | 0 | { |
244 | 0 | assert(PyTuple_CheckExact(obj)); |
245 | 0 | PyTupleObject *op = _PyTuple_CAST(obj); |
246 | 0 | assert(Py_SIZE(op) != 0); |
247 | 0 | assert(!_PyObject_GC_IS_TRACKED(obj)); |
248 | | // This will abort on the empty singleton (if there is one). |
249 | 0 | if (!maybe_freelist_push(op)) { |
250 | 0 | PyTuple_Type.tp_free((PyObject *)op); |
251 | 0 | } |
252 | 0 | } |
253 | | |
254 | | static void |
255 | | tuple_dealloc(PyObject *self) |
256 | 422M | { |
257 | 422M | PyTupleObject *op = _PyTuple_CAST(self); |
258 | 422M | if (Py_SIZE(op) == 0) { |
259 | | /* The empty tuple is statically allocated. */ |
260 | 8 | if (op == &_Py_SINGLETON(tuple_empty)) { |
261 | | #ifdef Py_DEBUG |
262 | | _Py_FatalRefcountError("deallocating the empty tuple singleton"); |
263 | | #else |
264 | 0 | return; |
265 | 0 | #endif |
266 | 0 | } |
267 | | #ifdef Py_DEBUG |
268 | | /* tuple subclasses have their own empty instances. */ |
269 | | assert(!PyTuple_CheckExact(op)); |
270 | | #endif |
271 | 8 | } |
272 | | |
273 | 422M | PyObject_GC_UnTrack(op); |
274 | | |
275 | 422M | Py_ssize_t i = Py_SIZE(op); |
276 | 9.54G | while (--i >= 0) { |
277 | 9.12G | Py_XDECREF(op->ob_item[i]); |
278 | 9.12G | } |
279 | | // This will abort on the empty singleton (if there is one). |
280 | 422M | if (!maybe_freelist_push(op)) { |
281 | 87.1M | Py_TYPE(op)->tp_free((PyObject *)op); |
282 | 87.1M | } |
283 | 422M | } |
284 | | |
285 | | static PyObject * |
286 | | tuple_repr(PyObject *self) |
287 | 74.2k | { |
288 | 74.2k | PyTupleObject *v = _PyTuple_CAST(self); |
289 | 74.2k | Py_ssize_t n = PyTuple_GET_SIZE(v); |
290 | 74.2k | if (n == 0) { |
291 | 4 | return PyUnicode_FromString("()"); |
292 | 4 | } |
293 | | |
294 | | /* While not mutable, it is still possible to end up with a cycle in a |
295 | | tuple through an object that stores itself within a tuple (and thus |
296 | | infinitely asks for the repr of itself). This should only be |
297 | | possible within a type. */ |
298 | 74.2k | int res = Py_ReprEnter((PyObject *)v); |
299 | 74.2k | if (res != 0) { |
300 | 0 | return res > 0 ? PyUnicode_FromString("(...)") : NULL; |
301 | 0 | } |
302 | | |
303 | 74.2k | Py_ssize_t prealloc; |
304 | 74.2k | if (n > 1) { |
305 | | // "(" + "1" + ", 2" * (len - 1) + ")" |
306 | 37.4k | prealloc = 1 + 1 + (2 + 1) * (n - 1) + 1; |
307 | 37.4k | } |
308 | 36.7k | else { |
309 | | // "(1,)" |
310 | 36.7k | prealloc = 4; |
311 | 36.7k | } |
312 | 74.2k | PyUnicodeWriter *writer = PyUnicodeWriter_Create(prealloc); |
313 | 74.2k | if (writer == NULL) { |
314 | 0 | goto error; |
315 | 0 | } |
316 | | |
317 | 74.2k | if (PyUnicodeWriter_WriteChar(writer, '(') < 0) { |
318 | 0 | goto error; |
319 | 0 | } |
320 | | |
321 | | /* Do repr() on each element. */ |
322 | 225k | for (Py_ssize_t i = 0; i < n; ++i) { |
323 | 151k | if (i > 0) { |
324 | 76.9k | if (PyUnicodeWriter_WriteChar(writer, ',') < 0) { |
325 | 0 | goto error; |
326 | 0 | } |
327 | 76.9k | if (PyUnicodeWriter_WriteChar(writer, ' ') < 0) { |
328 | 0 | goto error; |
329 | 0 | } |
330 | 76.9k | } |
331 | | |
332 | 151k | if (PyUnicodeWriter_WriteRepr(writer, v->ob_item[i]) < 0) { |
333 | 0 | goto error; |
334 | 0 | } |
335 | 151k | } |
336 | | |
337 | 74.2k | if (n == 1) { |
338 | 36.7k | if (PyUnicodeWriter_WriteChar(writer, ',') < 0) { |
339 | 0 | goto error; |
340 | 0 | } |
341 | 36.7k | } |
342 | 74.2k | if (PyUnicodeWriter_WriteChar(writer, ')') < 0) { |
343 | 0 | goto error; |
344 | 0 | } |
345 | | |
346 | 74.2k | Py_ReprLeave((PyObject *)v); |
347 | 74.2k | return PyUnicodeWriter_Finish(writer); |
348 | | |
349 | 0 | error: |
350 | 0 | PyUnicodeWriter_Discard(writer); |
351 | 0 | Py_ReprLeave((PyObject *)v); |
352 | 0 | return NULL; |
353 | 74.2k | } |
354 | | |
355 | | |
356 | | /* Hash for tuples. This is a slightly simplified version of the xxHash |
357 | | non-cryptographic hash: |
358 | | - we do not use any parallelism, there is only 1 accumulator. |
359 | | - we drop the final mixing since this is just a permutation of the |
360 | | output space: it does not help against collisions. |
361 | | - at the end, we mangle the length with a single constant. |
362 | | For the xxHash specification, see |
363 | | https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md |
364 | | |
365 | | The constants for the hash function are defined in pycore_tuple.h. |
366 | | */ |
367 | | |
368 | | static Py_hash_t |
369 | | tuple_hash(PyObject *op) |
370 | 21.9M | { |
371 | 21.9M | PyTupleObject *v = _PyTuple_CAST(op); |
372 | | |
373 | 21.9M | Py_uhash_t acc = FT_ATOMIC_LOAD_SSIZE_RELAXED(v->ob_hash); |
374 | 21.9M | if (acc != (Py_uhash_t)-1) { |
375 | 3.22M | return acc; |
376 | 3.22M | } |
377 | | |
378 | 18.7M | Py_ssize_t len = Py_SIZE(v); |
379 | 18.7M | PyObject **item = v->ob_item; |
380 | 18.7M | acc = _PyTuple_HASH_XXPRIME_5; |
381 | 996M | for (Py_ssize_t i = 0; i < len; i++) { |
382 | 977M | Py_uhash_t lane = PyObject_Hash(item[i]); |
383 | 977M | if (lane == (Py_uhash_t)-1) { |
384 | 0 | return -1; |
385 | 0 | } |
386 | 977M | acc += lane * _PyTuple_HASH_XXPRIME_2; |
387 | 977M | acc = _PyTuple_HASH_XXROTATE(acc); |
388 | 977M | acc *= _PyTuple_HASH_XXPRIME_1; |
389 | 977M | } |
390 | | |
391 | | /* Add input length, mangled to keep the historical value of hash(()). */ |
392 | 18.7M | acc += len ^ (_PyTuple_HASH_XXPRIME_5 ^ 3527539UL); |
393 | | |
394 | 18.7M | if (acc == (Py_uhash_t)-1) { |
395 | 0 | acc = 1546275796; |
396 | 0 | } |
397 | | |
398 | 18.7M | FT_ATOMIC_STORE_SSIZE_RELAXED(v->ob_hash, acc); |
399 | | |
400 | 18.7M | return acc; |
401 | 18.7M | } |
402 | | |
403 | | static Py_ssize_t |
404 | | tuple_length(PyObject *self) |
405 | 11.6M | { |
406 | 11.6M | PyTupleObject *a = _PyTuple_CAST(self); |
407 | 11.6M | return Py_SIZE(a); |
408 | 11.6M | } |
409 | | |
410 | | static int |
411 | | tuple_contains(PyObject *self, PyObject *el) |
412 | 21.5M | { |
413 | 21.5M | PyTupleObject *a = _PyTuple_CAST(self); |
414 | 21.5M | int cmp = 0; |
415 | 69.0M | for (Py_ssize_t i = 0; cmp == 0 && i < Py_SIZE(a); ++i) { |
416 | 47.5M | cmp = PyObject_RichCompareBool(PyTuple_GET_ITEM(a, i), el, Py_EQ); |
417 | 47.5M | } |
418 | 21.5M | return cmp; |
419 | 21.5M | } |
420 | | |
421 | | static PyObject * |
422 | | tuple_item(PyObject *op, Py_ssize_t i) |
423 | 18.1M | { |
424 | 18.1M | PyTupleObject *a = _PyTuple_CAST(op); |
425 | 18.1M | if (i < 0 || i >= Py_SIZE(a)) { |
426 | 108 | PyErr_SetString(PyExc_IndexError, "tuple index out of range"); |
427 | 108 | return NULL; |
428 | 108 | } |
429 | 18.1M | return Py_NewRef(a->ob_item[i]); |
430 | 18.1M | } |
431 | | |
432 | | PyObject * |
433 | | PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) |
434 | 241M | { |
435 | 241M | if (n == 0) { |
436 | 76.0M | return tuple_get_empty(); |
437 | 76.0M | } |
438 | | |
439 | 165M | PyTupleObject *tuple = tuple_alloc(n); |
440 | 165M | if (tuple == NULL) { |
441 | 0 | return NULL; |
442 | 0 | } |
443 | 165M | PyObject **dst = tuple->ob_item; |
444 | 165M | bool track = false; |
445 | 2.58G | for (Py_ssize_t i = 0; i < n; i++) { |
446 | 2.42G | PyObject *item = src[i]; |
447 | 2.42G | if (!track && maybe_tracked(item)) { |
448 | 32.3M | track = true; |
449 | 32.3M | } |
450 | 2.42G | dst[i] = Py_NewRef(item); |
451 | 2.42G | } |
452 | 165M | if (track) { |
453 | 32.3M | _PyObject_GC_TRACK(tuple); |
454 | 32.3M | } |
455 | 165M | return (PyObject *)tuple; |
456 | 165M | } |
457 | | |
458 | | PyObject * |
459 | | _PyTuple_FromStackRefStealOnSuccess(const _PyStackRef *src, Py_ssize_t n) |
460 | 181M | { |
461 | 181M | if (n == 0) { |
462 | 0 | return tuple_get_empty(); |
463 | 0 | } |
464 | 181M | PyTupleObject *tuple = tuple_alloc(n); |
465 | 181M | if (tuple == NULL) { |
466 | 0 | return NULL; |
467 | 0 | } |
468 | 181M | PyObject **dst = tuple->ob_item; |
469 | 181M | bool track = false; |
470 | 543M | for (Py_ssize_t i = 0; i < n; i++) { |
471 | 362M | PyObject *item = PyStackRef_AsPyObjectSteal(src[i]); |
472 | 362M | if (!track && maybe_tracked(item)) { |
473 | 112M | track = true; |
474 | 112M | } |
475 | 362M | dst[i] = item; |
476 | 362M | } |
477 | 181M | if (track) { |
478 | 112M | _PyObject_GC_TRACK(tuple); |
479 | 112M | } |
480 | 181M | return (PyObject *)tuple; |
481 | 181M | } |
482 | | |
483 | | PyObject * |
484 | | _PyTuple_FromArraySteal(PyObject *const *src, Py_ssize_t n) |
485 | 71.2k | { |
486 | 71.2k | if (n == 0) { |
487 | 71 | return tuple_get_empty(); |
488 | 71 | } |
489 | 71.1k | PyTupleObject *tuple = tuple_alloc(n); |
490 | 71.1k | if (tuple == NULL) { |
491 | 0 | for (Py_ssize_t i = 0; i < n; i++) { |
492 | 0 | Py_DECREF(src[i]); |
493 | 0 | } |
494 | 0 | return NULL; |
495 | 0 | } |
496 | 71.1k | PyObject **dst = tuple->ob_item; |
497 | 524k | for (Py_ssize_t i = 0; i < n; i++) { |
498 | 453k | PyObject *item = src[i]; |
499 | 453k | dst[i] = item; |
500 | 453k | } |
501 | 71.1k | _PyObject_GC_TRACK(tuple); |
502 | 71.1k | return (PyObject *)tuple; |
503 | 71.1k | } |
504 | | |
505 | | static PyObject * |
506 | | tuple_slice(PyTupleObject *a, Py_ssize_t ilow, |
507 | | Py_ssize_t ihigh) |
508 | 17.0M | { |
509 | 17.0M | if (ilow < 0) |
510 | 0 | ilow = 0; |
511 | 17.0M | if (ihigh > Py_SIZE(a)) |
512 | 172 | ihigh = Py_SIZE(a); |
513 | 17.0M | if (ihigh < ilow) |
514 | 0 | ihigh = ilow; |
515 | 17.0M | if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) { |
516 | 0 | return Py_NewRef(a); |
517 | 0 | } |
518 | 17.0M | return PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow); |
519 | 17.0M | } |
520 | | |
521 | | PyObject * |
522 | | _PyTuple_BinarySlice(PyObject *container, PyObject *start, PyObject *stop) |
523 | 2.93M | { |
524 | 2.93M | assert(PyTuple_CheckExact(container)); |
525 | 2.93M | Py_ssize_t len = Py_SIZE(container); |
526 | 2.93M | Py_ssize_t istart, istop; |
527 | 2.93M | if (!_PyEval_UnpackIndices(start, stop, len, &istart, &istop)) { |
528 | 0 | return NULL; |
529 | 0 | } |
530 | 2.93M | if (istart == 0 && istop == len) { |
531 | 22.1k | return Py_NewRef(container); |
532 | 22.1k | } |
533 | 2.91M | if (istop < istart) { |
534 | 0 | istop = istart; |
535 | 0 | } |
536 | 2.91M | return PyTuple_FromArray(((PyTupleObject *)container)->ob_item + istart, |
537 | 2.91M | istop - istart); |
538 | 2.93M | } |
539 | | |
540 | | PyObject * |
541 | | PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j) |
542 | 17.0M | { |
543 | 17.0M | if (op == NULL || !PyTuple_Check(op)) { |
544 | 0 | PyErr_BadInternalCall(); |
545 | 0 | return NULL; |
546 | 0 | } |
547 | 17.0M | return tuple_slice((PyTupleObject *)op, i, j); |
548 | 17.0M | } |
549 | | |
550 | | static PyObject * |
551 | | tuple_concat(PyObject *aa, PyObject *bb) |
552 | 6.70M | { |
553 | 6.70M | PyTupleObject *a = _PyTuple_CAST(aa); |
554 | 6.70M | if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) { |
555 | 709k | return Py_NewRef(bb); |
556 | 709k | } |
557 | 5.99M | if (!PyTuple_Check(bb)) { |
558 | 0 | PyErr_Format(PyExc_TypeError, |
559 | 0 | "can only concatenate tuple (not \"%.200s\") to tuple", |
560 | 0 | Py_TYPE(bb)->tp_name); |
561 | 0 | return NULL; |
562 | 0 | } |
563 | 5.99M | PyTupleObject *b = (PyTupleObject *)bb; |
564 | | |
565 | 5.99M | if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) { |
566 | 3.50k | return Py_NewRef(a); |
567 | 3.50k | } |
568 | 5.99M | assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX); |
569 | 5.99M | Py_ssize_t size = Py_SIZE(a) + Py_SIZE(b); |
570 | 5.99M | if (size == 0) { |
571 | 0 | return tuple_get_empty(); |
572 | 0 | } |
573 | | |
574 | 5.99M | PyTupleObject *np = tuple_alloc(size); |
575 | 5.99M | if (np == NULL) { |
576 | 0 | return NULL; |
577 | 0 | } |
578 | | |
579 | 5.99M | PyObject **src = a->ob_item; |
580 | 5.99M | PyObject **dest = np->ob_item; |
581 | 4.34G | for (Py_ssize_t i = 0; i < Py_SIZE(a); i++) { |
582 | 4.33G | PyObject *v = src[i]; |
583 | 4.33G | dest[i] = Py_NewRef(v); |
584 | 4.33G | } |
585 | | |
586 | 5.99M | src = b->ob_item; |
587 | 5.99M | dest = np->ob_item + Py_SIZE(a); |
588 | 142M | for (Py_ssize_t i = 0; i < Py_SIZE(b); i++) { |
589 | 136M | PyObject *v = src[i]; |
590 | 136M | dest[i] = Py_NewRef(v); |
591 | 136M | } |
592 | | |
593 | 5.99M | _PyObject_GC_TRACK(np); |
594 | 5.99M | return (PyObject *)np; |
595 | 5.99M | } |
596 | | |
597 | | static PyObject * |
598 | | tuple_repeat(PyObject *self, Py_ssize_t n) |
599 | 0 | { |
600 | 0 | PyTupleObject *a = _PyTuple_CAST(self); |
601 | 0 | const Py_ssize_t input_size = Py_SIZE(a); |
602 | 0 | if (input_size == 0 || n == 1) { |
603 | 0 | if (PyTuple_CheckExact(a)) { |
604 | | /* Since tuples are immutable, we can return a shared |
605 | | copy in this case */ |
606 | 0 | return Py_NewRef(a); |
607 | 0 | } |
608 | 0 | } |
609 | 0 | if (input_size == 0 || n <= 0) { |
610 | 0 | return tuple_get_empty(); |
611 | 0 | } |
612 | 0 | assert(n>0); |
613 | |
|
614 | 0 | if (input_size > PY_SSIZE_T_MAX / n) |
615 | 0 | return PyErr_NoMemory(); |
616 | 0 | Py_ssize_t output_size = input_size * n; |
617 | |
|
618 | 0 | PyTupleObject *np = tuple_alloc(output_size); |
619 | 0 | if (np == NULL) |
620 | 0 | return NULL; |
621 | | |
622 | 0 | PyObject **dest = np->ob_item; |
623 | 0 | if (input_size == 1) { |
624 | 0 | PyObject *elem = a->ob_item[0]; |
625 | 0 | _Py_RefcntAdd(elem, n); |
626 | 0 | PyObject **dest_end = dest + output_size; |
627 | 0 | while (dest < dest_end) { |
628 | 0 | *dest++ = elem; |
629 | 0 | } |
630 | 0 | } |
631 | 0 | else { |
632 | 0 | PyObject **src = a->ob_item; |
633 | 0 | PyObject **src_end = src + input_size; |
634 | 0 | while (src < src_end) { |
635 | 0 | _Py_RefcntAdd(*src, n); |
636 | 0 | *dest++ = *src++; |
637 | 0 | } |
638 | |
|
639 | 0 | _Py_memory_repeat((char *)np->ob_item, sizeof(PyObject *)*output_size, |
640 | 0 | sizeof(PyObject *)*input_size); |
641 | 0 | } |
642 | 0 | _PyObject_GC_TRACK(np); |
643 | 0 | return (PyObject *) np; |
644 | 0 | } |
645 | | |
646 | | /*[clinic input] |
647 | | tuple.index |
648 | | |
649 | | value: object |
650 | | start: slice_index(accept={int}) = 0 |
651 | | stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize |
652 | | / |
653 | | |
654 | | Return first index of value. |
655 | | |
656 | | Raises ValueError if the value is not present. |
657 | | [clinic start generated code]*/ |
658 | | |
659 | | static PyObject * |
660 | | tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start, |
661 | | Py_ssize_t stop) |
662 | | /*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/ |
663 | 900 | { |
664 | 900 | Py_ssize_t i; |
665 | | |
666 | 900 | if (start < 0) { |
667 | 0 | start += Py_SIZE(self); |
668 | 0 | if (start < 0) |
669 | 0 | start = 0; |
670 | 0 | } |
671 | 900 | if (stop < 0) { |
672 | 0 | stop += Py_SIZE(self); |
673 | 0 | } |
674 | 900 | else if (stop > Py_SIZE(self)) { |
675 | 900 | stop = Py_SIZE(self); |
676 | 900 | } |
677 | 1.26k | for (i = start; i < stop; i++) { |
678 | 488 | int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ); |
679 | 488 | if (cmp > 0) |
680 | 124 | return PyLong_FromSsize_t(i); |
681 | 364 | else if (cmp < 0) |
682 | 0 | return NULL; |
683 | 488 | } |
684 | 776 | PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple"); |
685 | 776 | return NULL; |
686 | 900 | } |
687 | | |
688 | | /*[clinic input] |
689 | | tuple.count |
690 | | |
691 | | value: object |
692 | | / |
693 | | |
694 | | Return number of occurrences of value. |
695 | | [clinic start generated code]*/ |
696 | | |
697 | | static PyObject * |
698 | | tuple_count_impl(PyTupleObject *self, PyObject *value) |
699 | | /*[clinic end generated code: output=cf02888d4bc15d7a input=531721aff65bd772]*/ |
700 | 0 | { |
701 | 0 | Py_ssize_t count = 0; |
702 | 0 | Py_ssize_t i; |
703 | |
|
704 | 0 | for (i = 0; i < Py_SIZE(self); i++) { |
705 | 0 | int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ); |
706 | 0 | if (cmp > 0) |
707 | 0 | count++; |
708 | 0 | else if (cmp < 0) |
709 | 0 | return NULL; |
710 | 0 | } |
711 | 0 | return PyLong_FromSsize_t(count); |
712 | 0 | } |
713 | | |
714 | | static int |
715 | | tuple_traverse(PyObject *self, visitproc visit, void *arg) |
716 | 111M | { |
717 | 111M | PyTupleObject *o = _PyTuple_CAST(self); |
718 | 348M | for (Py_ssize_t i = Py_SIZE(o); --i >= 0; ) { |
719 | 237M | Py_VISIT(o->ob_item[i]); |
720 | 237M | } |
721 | 111M | return 0; |
722 | 111M | } |
723 | | |
724 | | static PyObject * |
725 | | tuple_richcompare(PyObject *v, PyObject *w, int op) |
726 | 16.8M | { |
727 | 16.8M | PyTupleObject *vt, *wt; |
728 | 16.8M | Py_ssize_t i; |
729 | 16.8M | Py_ssize_t vlen, wlen; |
730 | | |
731 | 16.8M | if (!PyTuple_Check(v) || !PyTuple_Check(w)) |
732 | 8 | Py_RETURN_NOTIMPLEMENTED; |
733 | | |
734 | 16.8M | vt = (PyTupleObject *)v; |
735 | 16.8M | wt = (PyTupleObject *)w; |
736 | | |
737 | 16.8M | vlen = Py_SIZE(vt); |
738 | 16.8M | wlen = Py_SIZE(wt); |
739 | | |
740 | | /* Note: the corresponding code for lists has an "early out" test |
741 | | * here when op is EQ or NE and the lengths differ. That pays there, |
742 | | * but Tim was unable to find any real code where EQ/NE tuple |
743 | | * compares don't have the same length, so testing for it here would |
744 | | * have cost without benefit. |
745 | | */ |
746 | | |
747 | | /* Search for the first index where items are different. |
748 | | * Note that because tuples are immutable, it's safe to reuse |
749 | | * vlen and wlen across the comparison calls. |
750 | | */ |
751 | 61.3M | for (i = 0; i < vlen && i < wlen; i++) { |
752 | 46.0M | int k = PyObject_RichCompareBool(vt->ob_item[i], |
753 | 46.0M | wt->ob_item[i], Py_EQ); |
754 | 46.0M | if (k < 0) |
755 | 0 | return NULL; |
756 | 46.0M | if (!k) |
757 | 1.55M | break; |
758 | 46.0M | } |
759 | | |
760 | 16.8M | if (i >= vlen || i >= wlen) { |
761 | | /* No more items to compare -- compare sizes */ |
762 | 15.2M | Py_RETURN_RICHCOMPARE(vlen, wlen, op); |
763 | 15.2M | } |
764 | | |
765 | | /* We have an item that differs -- shortcuts for EQ/NE */ |
766 | 1.55M | if (op == Py_EQ) { |
767 | 411k | Py_RETURN_FALSE; |
768 | 411k | } |
769 | 1.14M | if (op == Py_NE) { |
770 | 211k | Py_RETURN_TRUE; |
771 | 211k | } |
772 | | |
773 | | /* Compare the final item again using the proper operator */ |
774 | 932k | return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op); |
775 | 1.14M | } |
776 | | |
777 | | static PyObject * |
778 | | tuple_subtype_new(PyTypeObject *type, PyObject *iterable); |
779 | | |
780 | | /*[clinic input] |
781 | | @classmethod |
782 | | tuple.__new__ as tuple_new |
783 | | iterable: object(c_default="NULL") = () |
784 | | / |
785 | | |
786 | | Built-in immutable sequence. |
787 | | |
788 | | If no argument is given, the constructor returns an empty tuple. |
789 | | If iterable is specified the tuple is initialized from iterable's items. |
790 | | |
791 | | If the argument is a tuple, the return value is the same object. |
792 | | [clinic start generated code]*/ |
793 | | |
794 | | static PyObject * |
795 | | tuple_new_impl(PyTypeObject *type, PyObject *iterable) |
796 | | /*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/ |
797 | 977k | { |
798 | 977k | if (type != &PyTuple_Type) |
799 | 488k | return tuple_subtype_new(type, iterable); |
800 | | |
801 | 488k | if (iterable == NULL) { |
802 | 0 | return tuple_get_empty(); |
803 | 0 | } |
804 | 488k | else { |
805 | 488k | return PySequence_Tuple(iterable); |
806 | 488k | } |
807 | 488k | } |
808 | | |
809 | | static PyObject * |
810 | | tuple_vectorcall(PyObject *type, PyObject * const*args, |
811 | | size_t nargsf, PyObject *kwnames) |
812 | 219 | { |
813 | 219 | if (!_PyArg_NoKwnames("tuple", kwnames)) { |
814 | 0 | return NULL; |
815 | 0 | } |
816 | | |
817 | 219 | Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); |
818 | 219 | if (!_PyArg_CheckPositional("tuple", nargs, 0, 1)) { |
819 | 0 | return NULL; |
820 | 0 | } |
821 | | |
822 | 219 | if (nargs) { |
823 | 219 | return tuple_new_impl(_PyType_CAST(type), args[0]); |
824 | 219 | } |
825 | 0 | else { |
826 | 0 | return tuple_get_empty(); |
827 | 0 | } |
828 | 219 | } |
829 | | |
830 | | static PyObject * |
831 | | tuple_subtype_new(PyTypeObject *type, PyObject *iterable) |
832 | 488k | { |
833 | 488k | PyObject *tmp, *newobj, *item; |
834 | 488k | Py_ssize_t i, n; |
835 | | |
836 | 488k | assert(PyType_IsSubtype(type, &PyTuple_Type)); |
837 | | // tuple subclasses must implement the GC protocol |
838 | 488k | assert(_PyType_IS_GC(type)); |
839 | | |
840 | 488k | tmp = tuple_new_impl(&PyTuple_Type, iterable); |
841 | 488k | if (tmp == NULL) |
842 | 0 | return NULL; |
843 | 488k | assert(PyTuple_Check(tmp)); |
844 | | /* This may allocate an empty tuple that is not the global one. */ |
845 | 488k | newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp)); |
846 | 488k | if (newobj == NULL) { |
847 | 0 | Py_DECREF(tmp); |
848 | 0 | return NULL; |
849 | 0 | } |
850 | 1.81M | for (i = 0; i < n; i++) { |
851 | 1.32M | item = PyTuple_GET_ITEM(tmp, i); |
852 | 1.32M | PyTuple_SET_ITEM(newobj, i, Py_NewRef(item)); |
853 | 1.32M | } |
854 | 488k | Py_DECREF(tmp); |
855 | | |
856 | 488k | _PyTuple_RESET_HASH_CACHE(newobj); |
857 | | |
858 | | // Don't track if a subclass tp_alloc is PyType_GenericAlloc() |
859 | 488k | if (!_PyObject_GC_IS_TRACKED(newobj)) { |
860 | 0 | _PyObject_GC_TRACK(newobj); |
861 | 0 | } |
862 | 488k | return newobj; |
863 | 488k | } |
864 | | |
865 | | static PySequenceMethods tuple_as_sequence = { |
866 | | tuple_length, /* sq_length */ |
867 | | tuple_concat, /* sq_concat */ |
868 | | tuple_repeat, /* sq_repeat */ |
869 | | tuple_item, /* sq_item */ |
870 | | 0, /* sq_slice */ |
871 | | 0, /* sq_ass_item */ |
872 | | 0, /* sq_ass_slice */ |
873 | | tuple_contains, /* sq_contains */ |
874 | | }; |
875 | | |
876 | | static PyObject* |
877 | | tuple_subscript(PyObject *op, PyObject* item) |
878 | 2.31M | { |
879 | 2.31M | PyTupleObject *self = _PyTuple_CAST(op); |
880 | 2.31M | if (_PyIndex_Check(item)) { |
881 | 1.20M | Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError); |
882 | 1.20M | if (i == -1 && PyErr_Occurred()) |
883 | 0 | return NULL; |
884 | 1.20M | if (i < 0) |
885 | 1.20M | i += PyTuple_GET_SIZE(self); |
886 | 1.20M | return tuple_item(op, i); |
887 | 1.20M | } |
888 | 1.11M | else if (PySlice_Check(item)) { |
889 | 1.11M | Py_ssize_t start, stop, step, slicelength, i; |
890 | 1.11M | size_t cur; |
891 | 1.11M | PyObject* it; |
892 | 1.11M | PyObject **src, **dest; |
893 | | |
894 | 1.11M | if (PySlice_Unpack(item, &start, &stop, &step) < 0) { |
895 | 0 | return NULL; |
896 | 0 | } |
897 | 1.11M | slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start, |
898 | 1.11M | &stop, step); |
899 | | |
900 | 1.11M | if (slicelength <= 0) { |
901 | 21.0k | return tuple_get_empty(); |
902 | 21.0k | } |
903 | 1.09M | else if (start == 0 && step == 1 && |
904 | 11.0k | slicelength == PyTuple_GET_SIZE(self) && |
905 | 0 | PyTuple_CheckExact(self)) { |
906 | 0 | return Py_NewRef(self); |
907 | 0 | } |
908 | 1.09M | else { |
909 | 1.09M | PyTupleObject* result = tuple_alloc(slicelength); |
910 | 1.09M | if (!result) return NULL; |
911 | | |
912 | 1.09M | src = self->ob_item; |
913 | 1.09M | dest = result->ob_item; |
914 | 8.71M | for (cur = start, i = 0; i < slicelength; |
915 | 7.61M | cur += step, i++) { |
916 | 7.61M | it = Py_NewRef(src[cur]); |
917 | 7.61M | dest[i] = it; |
918 | 7.61M | } |
919 | | |
920 | 1.09M | _PyObject_GC_TRACK(result); |
921 | 1.09M | return (PyObject *)result; |
922 | 1.09M | } |
923 | 1.11M | } |
924 | 0 | else { |
925 | 0 | PyErr_Format(PyExc_TypeError, |
926 | 0 | "tuple indices must be integers or slices, not %.200s", |
927 | 0 | Py_TYPE(item)->tp_name); |
928 | 0 | return NULL; |
929 | 0 | } |
930 | 2.31M | } |
931 | | |
932 | | /*[clinic input] |
933 | | tuple.__getnewargs__ |
934 | | [clinic start generated code]*/ |
935 | | |
936 | | static PyObject * |
937 | | tuple___getnewargs___impl(PyTupleObject *self) |
938 | | /*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/ |
939 | 0 | { |
940 | 0 | return Py_BuildValue("(N)", tuple_slice(self, 0, Py_SIZE(self))); |
941 | 0 | } |
942 | | |
943 | | static PyMethodDef tuple_methods[] = { |
944 | | TUPLE___GETNEWARGS___METHODDEF |
945 | | TUPLE_INDEX_METHODDEF |
946 | | TUPLE_COUNT_METHODDEF |
947 | | {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, |
948 | | {NULL, NULL} /* sentinel */ |
949 | | }; |
950 | | |
951 | | static PyMappingMethods tuple_as_mapping = { |
952 | | tuple_length, |
953 | | tuple_subscript, |
954 | | 0 |
955 | | }; |
956 | | |
957 | | static PyObject *tuple_iter(PyObject *seq); |
958 | | |
959 | | PyTypeObject PyTuple_Type = { |
960 | | PyVarObject_HEAD_INIT(&PyType_Type, 0) |
961 | | "tuple", |
962 | | sizeof(PyTupleObject) - sizeof(PyObject *), |
963 | | sizeof(PyObject *), |
964 | | tuple_dealloc, /* tp_dealloc */ |
965 | | 0, /* tp_vectorcall_offset */ |
966 | | 0, /* tp_getattr */ |
967 | | 0, /* tp_setattr */ |
968 | | 0, /* tp_as_async */ |
969 | | tuple_repr, /* tp_repr */ |
970 | | 0, /* tp_as_number */ |
971 | | &tuple_as_sequence, /* tp_as_sequence */ |
972 | | &tuple_as_mapping, /* tp_as_mapping */ |
973 | | tuple_hash, /* tp_hash */ |
974 | | 0, /* tp_call */ |
975 | | 0, /* tp_str */ |
976 | | PyObject_GenericGetAttr, /* tp_getattro */ |
977 | | 0, /* tp_setattro */ |
978 | | 0, /* tp_as_buffer */ |
979 | | Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | |
980 | | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS | |
981 | | _Py_TPFLAGS_MATCH_SELF | Py_TPFLAGS_SEQUENCE, /* tp_flags */ |
982 | | tuple_new__doc__, /* tp_doc */ |
983 | | tuple_traverse, /* tp_traverse */ |
984 | | 0, /* tp_clear */ |
985 | | tuple_richcompare, /* tp_richcompare */ |
986 | | 0, /* tp_weaklistoffset */ |
987 | | tuple_iter, /* tp_iter */ |
988 | | 0, /* tp_iternext */ |
989 | | tuple_methods, /* tp_methods */ |
990 | | 0, /* tp_members */ |
991 | | 0, /* tp_getset */ |
992 | | 0, /* tp_base */ |
993 | | 0, /* tp_dict */ |
994 | | 0, /* tp_descr_get */ |
995 | | 0, /* tp_descr_set */ |
996 | | 0, /* tp_dictoffset */ |
997 | | 0, /* tp_init */ |
998 | | 0, /* tp_alloc */ |
999 | | tuple_new, /* tp_new */ |
1000 | | PyObject_GC_Del, /* tp_free */ |
1001 | | .tp_vectorcall = tuple_vectorcall, |
1002 | | .tp_version_tag = _Py_TYPE_VERSION_TUPLE, |
1003 | | }; |
1004 | | |
1005 | | /* The following function breaks the notion that tuples are immutable: |
1006 | | it changes the size of a tuple. We get away with this only if there |
1007 | | is only one module referencing the object. You can also think of it |
1008 | | as creating a new tuple object and destroying the old one, only more |
1009 | | efficiently. In any case, don't use this if the tuple may already be |
1010 | | known to some other part of the code. */ |
1011 | | |
1012 | | int |
1013 | | _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize) |
1014 | 740 | { |
1015 | 740 | PyTupleObject *v; |
1016 | 740 | PyTupleObject *sv; |
1017 | 740 | Py_ssize_t i; |
1018 | 740 | Py_ssize_t oldsize; |
1019 | | |
1020 | 740 | v = (PyTupleObject *) *pv; |
1021 | 740 | if (v == NULL || !Py_IS_TYPE(v, &PyTuple_Type) || |
1022 | 740 | (Py_SIZE(v) != 0 && !_PyObject_IsUniquelyReferenced(*pv))) { |
1023 | 0 | *pv = 0; |
1024 | 0 | Py_XDECREF(v); |
1025 | 0 | PyErr_BadInternalCall(); |
1026 | 0 | return -1; |
1027 | 0 | } |
1028 | | |
1029 | 740 | oldsize = Py_SIZE(v); |
1030 | 740 | if (oldsize == newsize) { |
1031 | 628 | return 0; |
1032 | 628 | } |
1033 | 112 | if (newsize == 0) { |
1034 | 72 | Py_DECREF(v); |
1035 | 72 | *pv = tuple_get_empty(); |
1036 | 72 | return 0; |
1037 | 72 | } |
1038 | 40 | if (oldsize == 0) { |
1039 | | #ifdef Py_DEBUG |
1040 | | assert(v == &_Py_SINGLETON(tuple_empty)); |
1041 | | #endif |
1042 | | /* The empty tuple is statically allocated so we never |
1043 | | resize it in-place. */ |
1044 | 0 | Py_DECREF(v); |
1045 | 0 | *pv = PyTuple_New(newsize); |
1046 | 0 | return *pv == NULL ? -1 : 0; |
1047 | 0 | } |
1048 | | |
1049 | 40 | if (_PyObject_GC_IS_TRACKED(v)) { |
1050 | 40 | _PyObject_GC_UNTRACK(v); |
1051 | 40 | } |
1052 | | #ifdef Py_TRACE_REFS |
1053 | | _Py_ForgetReference((PyObject *) v); |
1054 | | #endif |
1055 | | /* DECREF items deleted by shrinkage */ |
1056 | 152 | for (i = newsize; i < oldsize; i++) { |
1057 | 112 | Py_CLEAR(v->ob_item[i]); |
1058 | 112 | } |
1059 | 40 | _PyReftracerTrack((PyObject *)v, PyRefTracer_DESTROY); |
1060 | 40 | sv = PyObject_GC_Resize(PyTupleObject, v, newsize); |
1061 | 40 | if (sv == NULL) { |
1062 | 0 | *pv = NULL; |
1063 | | #ifdef Py_REF_DEBUG |
1064 | | _Py_DecRefTotal(_PyThreadState_GET()); |
1065 | | #endif |
1066 | 0 | PyObject_GC_Del(v); |
1067 | 0 | return -1; |
1068 | 0 | } |
1069 | 40 | _Py_NewReferenceNoTotal((PyObject *) sv); |
1070 | | /* Zero out items added by growing */ |
1071 | 40 | if (newsize > oldsize) |
1072 | 0 | memset(&sv->ob_item[oldsize], 0, |
1073 | 0 | sizeof(*sv->ob_item) * (newsize - oldsize)); |
1074 | 40 | *pv = (PyObject *) sv; |
1075 | 40 | _PyObject_GC_TRACK(sv); |
1076 | 40 | return 0; |
1077 | 40 | } |
1078 | | |
1079 | | /*********************** Tuple Iterator **************************/ |
1080 | | |
1081 | 1.70G | #define _PyTupleIterObject_CAST(op) ((_PyTupleIterObject *)(op)) |
1082 | | |
1083 | | static void |
1084 | | tupleiter_dealloc(PyObject *self) |
1085 | 4.20M | { |
1086 | 4.20M | _PyTupleIterObject *it = _PyTupleIterObject_CAST(self); |
1087 | 4.20M | _PyObject_GC_UNTRACK(it); |
1088 | 4.20M | Py_XDECREF(it->it_seq); |
1089 | 4.20M | assert(Py_IS_TYPE(self, &PyTupleIter_Type)); |
1090 | 4.20M | _Py_FREELIST_FREE(tuple_iters, it, PyObject_GC_Del); |
1091 | 4.20M | } |
1092 | | |
1093 | | static int |
1094 | | tupleiter_traverse(PyObject *self, visitproc visit, void *arg) |
1095 | 43.0k | { |
1096 | 43.0k | _PyTupleIterObject *it = _PyTupleIterObject_CAST(self); |
1097 | 43.0k | Py_VISIT(it->it_seq); |
1098 | 43.0k | return 0; |
1099 | 43.0k | } |
1100 | | |
1101 | | static PyObject * |
1102 | | tupleiter_next(PyObject *self) |
1103 | 1.70G | { |
1104 | 1.70G | _PyTupleIterObject *it = _PyTupleIterObject_CAST(self); |
1105 | 1.70G | PyTupleObject *seq; |
1106 | 1.70G | PyObject *item; |
1107 | | |
1108 | 1.70G | assert(it != NULL); |
1109 | 1.70G | seq = it->it_seq; |
1110 | 1.70G | #ifndef Py_GIL_DISABLED |
1111 | 1.70G | if (seq == NULL) |
1112 | 0 | return NULL; |
1113 | 1.70G | #endif |
1114 | 1.70G | assert(PyTuple_Check(seq)); |
1115 | | |
1116 | 1.70G | Py_ssize_t index = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index); |
1117 | 1.70G | if (index < PyTuple_GET_SIZE(seq)) { |
1118 | 1.69G | FT_ATOMIC_STORE_SSIZE_RELAXED(it->it_index, index + 1); |
1119 | 1.69G | item = PyTuple_GET_ITEM(seq, index); |
1120 | 1.69G | return Py_NewRef(item); |
1121 | 1.69G | } |
1122 | | |
1123 | 4.03M | #ifndef Py_GIL_DISABLED |
1124 | 4.03M | it->it_seq = NULL; |
1125 | 4.03M | Py_DECREF(seq); |
1126 | 4.03M | #endif |
1127 | 4.03M | return NULL; |
1128 | 1.70G | } |
1129 | | |
1130 | | static PyObject * |
1131 | | tupleiter_len(PyObject *self, PyObject *Py_UNUSED(ignored)) |
1132 | 0 | { |
1133 | 0 | _PyTupleIterObject *it = _PyTupleIterObject_CAST(self); |
1134 | 0 | Py_ssize_t len = 0; |
1135 | | #ifdef Py_GIL_DISABLED |
1136 | | Py_ssize_t idx = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index); |
1137 | | Py_ssize_t seq_len = PyTuple_GET_SIZE(it->it_seq); |
1138 | | if (idx < seq_len) |
1139 | | len = seq_len - idx; |
1140 | | #else |
1141 | 0 | if (it->it_seq) |
1142 | 0 | len = PyTuple_GET_SIZE(it->it_seq) - it->it_index; |
1143 | 0 | #endif |
1144 | 0 | return PyLong_FromSsize_t(len); |
1145 | 0 | } |
1146 | | |
1147 | | PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); |
1148 | | |
1149 | | static PyObject * |
1150 | | tupleiter_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) |
1151 | 0 | { |
1152 | 0 | PyObject *iter = _PyEval_GetBuiltin(&_Py_ID(iter)); |
1153 | | |
1154 | | /* _PyEval_GetBuiltin can invoke arbitrary code, |
1155 | | * call must be before access of iterator pointers. |
1156 | | * see issue #101765 */ |
1157 | 0 | _PyTupleIterObject *it = _PyTupleIterObject_CAST(self); |
1158 | |
|
1159 | | #ifdef Py_GIL_DISABLED |
1160 | | Py_ssize_t idx = FT_ATOMIC_LOAD_SSIZE_RELAXED(it->it_index); |
1161 | | if (idx < PyTuple_GET_SIZE(it->it_seq)) |
1162 | | return Py_BuildValue("N(O)n", iter, it->it_seq, idx); |
1163 | | #else |
1164 | 0 | if (it->it_seq) |
1165 | 0 | return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index); |
1166 | 0 | #endif |
1167 | 0 | return Py_BuildValue("N(())", iter); |
1168 | 0 | } |
1169 | | |
1170 | | static PyObject * |
1171 | | tupleiter_setstate(PyObject *self, PyObject *state) |
1172 | 0 | { |
1173 | 0 | _PyTupleIterObject *it = _PyTupleIterObject_CAST(self); |
1174 | 0 | Py_ssize_t index = PyLong_AsSsize_t(state); |
1175 | 0 | if (index == -1 && PyErr_Occurred()) |
1176 | 0 | return NULL; |
1177 | 0 | if (it->it_seq != NULL) { |
1178 | 0 | if (index < 0) |
1179 | 0 | index = 0; |
1180 | 0 | else if (index > PyTuple_GET_SIZE(it->it_seq)) |
1181 | 0 | index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */ |
1182 | 0 | FT_ATOMIC_STORE_SSIZE_RELAXED(it->it_index, index); |
1183 | 0 | } |
1184 | 0 | Py_RETURN_NONE; |
1185 | 0 | } |
1186 | | |
1187 | | PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); |
1188 | | PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); |
1189 | | |
1190 | | static PyMethodDef tupleiter_methods[] = { |
1191 | | {"__length_hint__", tupleiter_len, METH_NOARGS, length_hint_doc}, |
1192 | | {"__reduce__", tupleiter_reduce, METH_NOARGS, reduce_doc}, |
1193 | | {"__setstate__", tupleiter_setstate, METH_O, setstate_doc}, |
1194 | | {NULL, NULL, 0, NULL} /* sentinel */ |
1195 | | }; |
1196 | | |
1197 | | PyTypeObject PyTupleIter_Type = { |
1198 | | PyVarObject_HEAD_INIT(&PyType_Type, 0) |
1199 | | "tuple_iterator", /* tp_name */ |
1200 | | sizeof(_PyTupleIterObject), /* tp_basicsize */ |
1201 | | 0, /* tp_itemsize */ |
1202 | | /* methods */ |
1203 | | tupleiter_dealloc, /* tp_dealloc */ |
1204 | | 0, /* tp_vectorcall_offset */ |
1205 | | 0, /* tp_getattr */ |
1206 | | 0, /* tp_setattr */ |
1207 | | 0, /* tp_as_async */ |
1208 | | 0, /* tp_repr */ |
1209 | | 0, /* tp_as_number */ |
1210 | | 0, /* tp_as_sequence */ |
1211 | | 0, /* tp_as_mapping */ |
1212 | | 0, /* tp_hash */ |
1213 | | 0, /* tp_call */ |
1214 | | 0, /* tp_str */ |
1215 | | PyObject_GenericGetAttr, /* tp_getattro */ |
1216 | | 0, /* tp_setattro */ |
1217 | | 0, /* tp_as_buffer */ |
1218 | | Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */ |
1219 | | 0, /* tp_doc */ |
1220 | | tupleiter_traverse, /* tp_traverse */ |
1221 | | 0, /* tp_clear */ |
1222 | | 0, /* tp_richcompare */ |
1223 | | 0, /* tp_weaklistoffset */ |
1224 | | PyObject_SelfIter, /* tp_iter */ |
1225 | | tupleiter_next, /* tp_iternext */ |
1226 | | tupleiter_methods, /* tp_methods */ |
1227 | | 0, |
1228 | | }; |
1229 | | |
1230 | | static PyObject * |
1231 | | tuple_iter(PyObject *seq) |
1232 | 4.20M | { |
1233 | 4.20M | if (!PyTuple_Check(seq)) { |
1234 | 0 | PyErr_BadInternalCall(); |
1235 | 0 | return NULL; |
1236 | 0 | } |
1237 | 4.20M | _PyTupleIterObject *it = _Py_FREELIST_POP(_PyTupleIterObject, tuple_iters); |
1238 | 4.20M | if (it == NULL) { |
1239 | 163k | it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type); |
1240 | 163k | if (it == NULL) |
1241 | 0 | return NULL; |
1242 | 163k | } |
1243 | 4.20M | it->it_index = 0; |
1244 | 4.20M | it->it_seq = (PyTupleObject *)Py_NewRef(seq); |
1245 | 4.20M | _PyObject_GC_TRACK(it); |
1246 | 4.20M | return (PyObject *)it; |
1247 | 4.20M | } |
1248 | | |
1249 | | |
1250 | | /************* |
1251 | | * freelists * |
1252 | | *************/ |
1253 | | |
1254 | | static inline int |
1255 | | maybe_freelist_push(PyTupleObject *op) |
1256 | 422M | { |
1257 | 422M | if (!Py_IS_TYPE(op, &PyTuple_Type)) { |
1258 | 482k | return 0; |
1259 | 482k | } |
1260 | 422M | Py_ssize_t index = Py_SIZE(op) - 1; |
1261 | 422M | if (index < PyTuple_MAXSAVESIZE) { |
1262 | 414M | return _Py_FREELIST_PUSH(tuples[index], op, Py_tuple_MAXFREELIST); |
1263 | 414M | } |
1264 | 7.78M | return 0; |
1265 | 422M | } |
1266 | | |
1267 | | /* Print summary info about the state of the optimized allocator */ |
1268 | | void |
1269 | | _PyTuple_DebugMallocStats(FILE *out) |
1270 | 0 | { |
1271 | 0 | for (int i = 0; i < PyTuple_MAXSAVESIZE; i++) { |
1272 | 0 | int len = i + 1; |
1273 | 0 | char buf[128]; |
1274 | 0 | PyOS_snprintf(buf, sizeof(buf), |
1275 | 0 | "free %d-sized PyTupleObject", len); |
1276 | 0 | _PyDebugAllocatorStats(out, buf, _Py_FREELIST_SIZE(tuples[i]), |
1277 | 0 | _PyObject_VAR_SIZE(&PyTuple_Type, len)); |
1278 | 0 | } |
1279 | 0 | } |