/src/Python-3.8.3/Objects/tupleobject.c
Line | Count | Source (jump to first uncovered line) |
1 | | |
2 | | /* Tuple object implementation */ |
3 | | |
4 | | #include "Python.h" |
5 | | #include "pycore_object.h" |
6 | | #include "pycore_pystate.h" |
7 | | #include "pycore_accu.h" |
8 | | |
9 | | /*[clinic input] |
10 | | class tuple "PyTupleObject *" "&PyTuple_Type" |
11 | | [clinic start generated code]*/ |
12 | | /*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/ |
13 | | |
14 | | #include "clinic/tupleobject.c.h" |
15 | | |
16 | | /* Speed optimization to avoid frequent malloc/free of small tuples */ |
17 | | #ifndef PyTuple_MAXSAVESIZE |
18 | 254k | #define PyTuple_MAXSAVESIZE 20 /* Largest tuple to save on free list */ |
19 | | #endif |
20 | | #ifndef PyTuple_MAXFREELIST |
21 | 94.9k | #define PyTuple_MAXFREELIST 2000 /* Maximum number of tuples of each size to save */ |
22 | | #endif |
23 | | |
24 | | #if PyTuple_MAXSAVESIZE > 0 |
25 | | /* Entries 1 up to PyTuple_MAXSAVESIZE are free lists, entry 0 is the empty |
26 | | tuple () of which at most one instance will be allocated. |
27 | | */ |
28 | | static PyTupleObject *free_list[PyTuple_MAXSAVESIZE]; |
29 | | static int numfree[PyTuple_MAXSAVESIZE]; |
30 | | #endif |
31 | | #ifdef COUNT_ALLOCS |
32 | | Py_ssize_t _Py_fast_tuple_allocs; |
33 | | Py_ssize_t _Py_tuple_zero_allocs; |
34 | | #endif |
35 | | |
36 | | /* Debug statistic to count GC tracking of tuples. |
37 | | Please note that tuples are only untracked when considered by the GC, and |
38 | | many of them will be dead before. Therefore, a tracking rate close to 100% |
39 | | does not necessarily prove that the heuristic is inefficient. |
40 | | */ |
41 | | #ifdef SHOW_TRACK_COUNT |
42 | | static Py_ssize_t count_untracked = 0; |
43 | | static Py_ssize_t count_tracked = 0; |
44 | | |
45 | | static void |
46 | | show_track(void) |
47 | | { |
48 | | PyInterpreterState *interp = _PyInterpreterState_Get(); |
49 | | if (!interp->config.show_alloc_count) { |
50 | | return; |
51 | | } |
52 | | |
53 | | fprintf(stderr, "Tuples created: %" PY_FORMAT_SIZE_T "d\n", |
54 | | count_tracked + count_untracked); |
55 | | fprintf(stderr, "Tuples tracked by the GC: %" PY_FORMAT_SIZE_T |
56 | | "d\n", count_tracked); |
57 | | fprintf(stderr, "%.2f%% tuple tracking rate\n\n", |
58 | | (100.0*count_tracked/(count_untracked+count_tracked))); |
59 | | } |
60 | | #endif |
61 | | |
62 | | /* Print summary info about the state of the optimized allocator */ |
63 | | void |
64 | | _PyTuple_DebugMallocStats(FILE *out) |
65 | 0 | { |
66 | 0 | #if PyTuple_MAXSAVESIZE > 0 |
67 | 0 | int i; |
68 | 0 | char buf[128]; |
69 | 0 | for (i = 1; i < PyTuple_MAXSAVESIZE; i++) { |
70 | 0 | PyOS_snprintf(buf, sizeof(buf), |
71 | 0 | "free %d-sized PyTupleObject", i); |
72 | 0 | _PyDebugAllocatorStats(out, |
73 | 0 | buf, |
74 | 0 | numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i)); |
75 | 0 | } |
76 | 0 | #endif |
77 | 0 | } |
78 | | |
79 | | PyObject * |
80 | | PyTuple_New(Py_ssize_t size) |
81 | 85.8k | { |
82 | 85.8k | PyTupleObject *op; |
83 | 85.8k | Py_ssize_t i; |
84 | 85.8k | if (size < 0) { |
85 | 0 | PyErr_BadInternalCall(); |
86 | 0 | return NULL; |
87 | 0 | } |
88 | 85.8k | #if PyTuple_MAXSAVESIZE > 0 |
89 | 85.8k | if (size == 0 && free_list[0]) { |
90 | 6.46k | op = free_list[0]; |
91 | 6.46k | Py_INCREF(op); |
92 | | #ifdef COUNT_ALLOCS |
93 | | _Py_tuple_zero_allocs++; |
94 | | #endif |
95 | 6.46k | return (PyObject *) op; |
96 | 6.46k | } |
97 | 79.3k | if (size < PyTuple_MAXSAVESIZE && (op = free_list[size]) != NULL) { |
98 | 46.3k | free_list[size] = (PyTupleObject *) op->ob_item[0]; |
99 | 46.3k | numfree[size]--; |
100 | | #ifdef COUNT_ALLOCS |
101 | | _Py_fast_tuple_allocs++; |
102 | | #endif |
103 | | /* Inline PyObject_InitVar */ |
104 | | #ifdef Py_TRACE_REFS |
105 | | Py_SIZE(op) = size; |
106 | | Py_TYPE(op) = &PyTuple_Type; |
107 | | #endif |
108 | 46.3k | _Py_NewReference((PyObject *)op); |
109 | 46.3k | } |
110 | 33.0k | else |
111 | 33.0k | #endif |
112 | 33.0k | { |
113 | | /* Check for overflow */ |
114 | 33.0k | if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - sizeof(PyTupleObject) - |
115 | 33.0k | sizeof(PyObject *)) / sizeof(PyObject *)) { |
116 | 0 | return PyErr_NoMemory(); |
117 | 0 | } |
118 | 33.0k | op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size); |
119 | 33.0k | if (op == NULL) |
120 | 0 | return NULL; |
121 | 33.0k | } |
122 | 338k | for (i=0; i < size; i++) |
123 | 258k | op->ob_item[i] = NULL; |
124 | 79.3k | #if PyTuple_MAXSAVESIZE > 0 |
125 | 79.3k | if (size == 0) { |
126 | 14 | free_list[0] = op; |
127 | 14 | ++numfree[0]; |
128 | 14 | Py_INCREF(op); /* extra INCREF so that this is never freed */ |
129 | 14 | } |
130 | 79.3k | #endif |
131 | | #ifdef SHOW_TRACK_COUNT |
132 | | count_tracked++; |
133 | | #endif |
134 | 79.3k | _PyObject_GC_TRACK(op); |
135 | 79.3k | return (PyObject *) op; |
136 | 79.3k | } |
137 | | |
138 | | Py_ssize_t |
139 | | PyTuple_Size(PyObject *op) |
140 | 13.3k | { |
141 | 13.3k | if (!PyTuple_Check(op)) { |
142 | 0 | PyErr_BadInternalCall(); |
143 | 0 | return -1; |
144 | 0 | } |
145 | 13.3k | else |
146 | 13.3k | return Py_SIZE(op); |
147 | 13.3k | } |
148 | | |
149 | | PyObject * |
150 | | PyTuple_GetItem(PyObject *op, Py_ssize_t i) |
151 | 10.9k | { |
152 | 10.9k | if (!PyTuple_Check(op)) { |
153 | 0 | PyErr_BadInternalCall(); |
154 | 0 | return NULL; |
155 | 0 | } |
156 | 10.9k | if (i < 0 || i >= Py_SIZE(op)) { |
157 | 0 | PyErr_SetString(PyExc_IndexError, "tuple index out of range"); |
158 | 0 | return NULL; |
159 | 0 | } |
160 | 10.9k | return ((PyTupleObject *)op) -> ob_item[i]; |
161 | 10.9k | } |
162 | | |
163 | | int |
164 | | PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem) |
165 | 0 | { |
166 | 0 | PyObject **p; |
167 | 0 | if (!PyTuple_Check(op) || op->ob_refcnt != 1) { |
168 | 0 | Py_XDECREF(newitem); |
169 | 0 | PyErr_BadInternalCall(); |
170 | 0 | return -1; |
171 | 0 | } |
172 | 0 | if (i < 0 || i >= Py_SIZE(op)) { |
173 | 0 | Py_XDECREF(newitem); |
174 | 0 | PyErr_SetString(PyExc_IndexError, |
175 | 0 | "tuple assignment index out of range"); |
176 | 0 | return -1; |
177 | 0 | } |
178 | 0 | p = ((PyTupleObject *)op) -> ob_item + i; |
179 | 0 | Py_XSETREF(*p, newitem); |
180 | 0 | return 0; |
181 | 0 | } |
182 | | |
183 | | void |
184 | | _PyTuple_MaybeUntrack(PyObject *op) |
185 | 31.9k | { |
186 | 31.9k | PyTupleObject *t; |
187 | 31.9k | Py_ssize_t i, n; |
188 | | |
189 | 31.9k | if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op)) |
190 | 0 | return; |
191 | 31.9k | t = (PyTupleObject *) op; |
192 | 31.9k | n = Py_SIZE(t); |
193 | 141k | for (i = 0; i < n; i++) { |
194 | 113k | PyObject *elt = PyTuple_GET_ITEM(t, i); |
195 | | /* Tuple with NULL elements aren't |
196 | | fully constructed, don't untrack |
197 | | them yet. */ |
198 | 113k | if (!elt || |
199 | 113k | _PyObject_GC_MAY_BE_TRACKED(elt)) |
200 | 4.10k | return; |
201 | 113k | } |
202 | | #ifdef SHOW_TRACK_COUNT |
203 | | count_tracked--; |
204 | | count_untracked++; |
205 | | #endif |
206 | 27.8k | _PyObject_GC_UNTRACK(op); |
207 | 27.8k | } |
208 | | |
209 | | PyObject * |
210 | | PyTuple_Pack(Py_ssize_t n, ...) |
211 | 4.92k | { |
212 | 4.92k | Py_ssize_t i; |
213 | 4.92k | PyObject *o; |
214 | 4.92k | PyObject *result; |
215 | 4.92k | PyObject **items; |
216 | 4.92k | va_list vargs; |
217 | | |
218 | 4.92k | va_start(vargs, n); |
219 | 4.92k | result = PyTuple_New(n); |
220 | 4.92k | if (result == NULL) { |
221 | 0 | va_end(vargs); |
222 | 0 | return NULL; |
223 | 0 | } |
224 | 4.92k | items = ((PyTupleObject *)result)->ob_item; |
225 | 11.1k | for (i = 0; i < n; i++) { |
226 | 6.23k | o = va_arg(vargs, PyObject *); |
227 | 6.23k | Py_INCREF(o); |
228 | 6.23k | items[i] = o; |
229 | 6.23k | } |
230 | 4.92k | va_end(vargs); |
231 | 4.92k | return result; |
232 | 4.92k | } |
233 | | |
234 | | |
235 | | /* Methods */ |
236 | | |
237 | | static void |
238 | | tupledealloc(PyTupleObject *op) |
239 | 47.8k | { |
240 | 47.8k | Py_ssize_t i; |
241 | 47.8k | Py_ssize_t len = Py_SIZE(op); |
242 | 47.8k | PyObject_GC_UnTrack(op); |
243 | 47.8k | Py_TRASHCAN_BEGIN(op, tupledealloc) |
244 | 47.8k | if (len > 0) { |
245 | 47.8k | i = len; |
246 | 198k | while (--i >= 0) |
247 | 150k | Py_XDECREF(op->ob_item[i]); |
248 | 47.8k | #if PyTuple_MAXSAVESIZE > 0 |
249 | 47.8k | if (len < PyTuple_MAXSAVESIZE && |
250 | 47.8k | numfree[len] < PyTuple_MAXFREELIST && |
251 | 47.8k | Py_TYPE(op) == &PyTuple_Type) |
252 | 47.1k | { |
253 | 47.1k | op->ob_item[0] = (PyObject *) free_list[len]; |
254 | 47.1k | numfree[len]++; |
255 | 47.1k | free_list[len] = op; |
256 | 47.1k | goto done; /* return */ |
257 | 47.1k | } |
258 | 47.8k | #endif |
259 | 47.8k | } |
260 | 680 | Py_TYPE(op)->tp_free((PyObject *)op); |
261 | 47.8k | done: |
262 | 47.8k | Py_TRASHCAN_END |
263 | 47.8k | } |
264 | | |
265 | | static PyObject * |
266 | | tuplerepr(PyTupleObject *v) |
267 | 2 | { |
268 | 2 | Py_ssize_t i, n; |
269 | 2 | _PyUnicodeWriter writer; |
270 | | |
271 | 2 | n = Py_SIZE(v); |
272 | 2 | if (n == 0) |
273 | 0 | return PyUnicode_FromString("()"); |
274 | | |
275 | | /* While not mutable, it is still possible to end up with a cycle in a |
276 | | tuple through an object that stores itself within a tuple (and thus |
277 | | infinitely asks for the repr of itself). This should only be |
278 | | possible within a type. */ |
279 | 2 | i = Py_ReprEnter((PyObject *)v); |
280 | 2 | if (i != 0) { |
281 | 0 | return i > 0 ? PyUnicode_FromString("(...)") : NULL; |
282 | 0 | } |
283 | | |
284 | 2 | _PyUnicodeWriter_Init(&writer); |
285 | 2 | writer.overallocate = 1; |
286 | 2 | if (Py_SIZE(v) > 1) { |
287 | | /* "(" + "1" + ", 2" * (len - 1) + ")" */ |
288 | 2 | writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1; |
289 | 2 | } |
290 | 0 | else { |
291 | | /* "(1,)" */ |
292 | 0 | writer.min_length = 4; |
293 | 0 | } |
294 | | |
295 | 2 | if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0) |
296 | 0 | goto error; |
297 | | |
298 | | /* Do repr() on each element. */ |
299 | 11 | for (i = 0; i < n; ++i) { |
300 | 9 | PyObject *s; |
301 | | |
302 | 9 | if (i > 0) { |
303 | 7 | if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) |
304 | 0 | goto error; |
305 | 7 | } |
306 | | |
307 | 9 | s = PyObject_Repr(v->ob_item[i]); |
308 | 9 | if (s == NULL) |
309 | 0 | goto error; |
310 | | |
311 | 9 | if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) { |
312 | 0 | Py_DECREF(s); |
313 | 0 | goto error; |
314 | 0 | } |
315 | 9 | Py_DECREF(s); |
316 | 9 | } |
317 | | |
318 | 2 | writer.overallocate = 0; |
319 | 2 | if (n > 1) { |
320 | 2 | if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0) |
321 | 0 | goto error; |
322 | 2 | } |
323 | 0 | else { |
324 | 0 | if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0) |
325 | 0 | goto error; |
326 | 0 | } |
327 | | |
328 | 2 | Py_ReprLeave((PyObject *)v); |
329 | 2 | return _PyUnicodeWriter_Finish(&writer); |
330 | | |
331 | 0 | error: |
332 | 0 | _PyUnicodeWriter_Dealloc(&writer); |
333 | 0 | Py_ReprLeave((PyObject *)v); |
334 | 0 | return NULL; |
335 | 2 | } |
336 | | |
337 | | |
338 | | /* Hash for tuples. This is a slightly simplified version of the xxHash |
339 | | non-cryptographic hash: |
340 | | - we do not use any parallellism, there is only 1 accumulator. |
341 | | - we drop the final mixing since this is just a permutation of the |
342 | | output space: it does not help against collisions. |
343 | | - at the end, we mangle the length with a single constant. |
344 | | For the xxHash specification, see |
345 | | https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md |
346 | | |
347 | | Below are the official constants from the xxHash specification. Optimizing |
348 | | compilers should emit a single "rotate" instruction for the |
349 | | _PyHASH_XXROTATE() expansion. If that doesn't happen for some important |
350 | | platform, the macro could be changed to expand to a platform-specific rotate |
351 | | spelling instead. |
352 | | */ |
353 | | #if SIZEOF_PY_UHASH_T > 4 |
354 | 1.95k | #define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL) |
355 | 1.95k | #define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL) |
356 | 1.99k | #define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL) |
357 | 1.95k | #define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */ |
358 | | #else |
359 | | #define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL) |
360 | | #define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL) |
361 | | #define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL) |
362 | | #define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */ |
363 | | #endif |
364 | | |
365 | | /* Tests have shown that it's not worth to cache the hash value, see |
366 | | https://bugs.python.org/issue9685 */ |
367 | | static Py_hash_t |
368 | | tuplehash(PyTupleObject *v) |
369 | 996 | { |
370 | 996 | Py_ssize_t i, len = Py_SIZE(v); |
371 | 996 | PyObject **item = v->ob_item; |
372 | | |
373 | 996 | Py_uhash_t acc = _PyHASH_XXPRIME_5; |
374 | 2.95k | for (i = 0; i < len; i++) { |
375 | 1.95k | Py_uhash_t lane = PyObject_Hash(item[i]); |
376 | 1.95k | if (lane == (Py_uhash_t)-1) { |
377 | 0 | return -1; |
378 | 0 | } |
379 | 1.95k | acc += lane * _PyHASH_XXPRIME_2; |
380 | 1.95k | acc = _PyHASH_XXROTATE(acc); |
381 | 1.95k | acc *= _PyHASH_XXPRIME_1; |
382 | 1.95k | } |
383 | | |
384 | | /* Add input length, mangled to keep the historical value of hash(()). */ |
385 | 996 | acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL); |
386 | | |
387 | 996 | if (acc == (Py_uhash_t)-1) { |
388 | 0 | return 1546275796; |
389 | 0 | } |
390 | 996 | return acc; |
391 | 996 | } |
392 | | |
393 | | static Py_ssize_t |
394 | | tuplelength(PyTupleObject *a) |
395 | 610 | { |
396 | 610 | return Py_SIZE(a); |
397 | 610 | } |
398 | | |
399 | | static int |
400 | | tuplecontains(PyTupleObject *a, PyObject *el) |
401 | 1.00k | { |
402 | 1.00k | Py_ssize_t i; |
403 | 1.00k | int cmp; |
404 | | |
405 | 9.05k | for (i = 0, cmp = 0 ; cmp == 0 && i < Py_SIZE(a); ++i) |
406 | 8.04k | cmp = PyObject_RichCompareBool(el, PyTuple_GET_ITEM(a, i), |
407 | 8.04k | Py_EQ); |
408 | 1.00k | return cmp; |
409 | 1.00k | } |
410 | | |
411 | | static PyObject * |
412 | | tupleitem(PyTupleObject *a, Py_ssize_t i) |
413 | 1.94k | { |
414 | 1.94k | if (i < 0 || i >= Py_SIZE(a)) { |
415 | 0 | PyErr_SetString(PyExc_IndexError, "tuple index out of range"); |
416 | 0 | return NULL; |
417 | 0 | } |
418 | 1.94k | Py_INCREF(a->ob_item[i]); |
419 | 1.94k | return a->ob_item[i]; |
420 | 1.94k | } |
421 | | |
422 | | PyObject * |
423 | | _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) |
424 | 36.5k | { |
425 | 36.5k | PyTupleObject *tuple = (PyTupleObject *)PyTuple_New(n); |
426 | 36.5k | if (tuple == NULL) { |
427 | 0 | return NULL; |
428 | 0 | } |
429 | 36.5k | PyObject **dst = tuple->ob_item; |
430 | 93.7k | for (Py_ssize_t i = 0; i < n; i++) { |
431 | 57.1k | PyObject *item = src[i]; |
432 | 57.1k | Py_INCREF(item); |
433 | 57.1k | dst[i] = item; |
434 | 57.1k | } |
435 | 36.5k | return (PyObject *)tuple; |
436 | 36.5k | } |
437 | | |
438 | | static PyObject * |
439 | | tupleslice(PyTupleObject *a, Py_ssize_t ilow, |
440 | | Py_ssize_t ihigh) |
441 | 2.18k | { |
442 | 2.18k | if (ilow < 0) |
443 | 0 | ilow = 0; |
444 | 2.18k | if (ihigh > Py_SIZE(a)) |
445 | 1 | ihigh = Py_SIZE(a); |
446 | 2.18k | if (ihigh < ilow) |
447 | 0 | ihigh = ilow; |
448 | 2.18k | if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) { |
449 | 0 | Py_INCREF(a); |
450 | 0 | return (PyObject *)a; |
451 | 0 | } |
452 | 2.18k | return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow); |
453 | 2.18k | } |
454 | | |
455 | | PyObject * |
456 | | PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j) |
457 | 2.18k | { |
458 | 2.18k | if (op == NULL || !PyTuple_Check(op)) { |
459 | 0 | PyErr_BadInternalCall(); |
460 | 0 | return NULL; |
461 | 0 | } |
462 | 2.18k | return tupleslice((PyTupleObject *)op, i, j); |
463 | 2.18k | } |
464 | | |
465 | | static PyObject * |
466 | | tupleconcat(PyTupleObject *a, PyObject *bb) |
467 | 15 | { |
468 | 15 | Py_ssize_t size; |
469 | 15 | Py_ssize_t i; |
470 | 15 | PyObject **src, **dest; |
471 | 15 | PyTupleObject *np; |
472 | 15 | if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) { |
473 | 0 | Py_INCREF(bb); |
474 | 0 | return bb; |
475 | 0 | } |
476 | 15 | if (!PyTuple_Check(bb)) { |
477 | 0 | PyErr_Format(PyExc_TypeError, |
478 | 0 | "can only concatenate tuple (not \"%.200s\") to tuple", |
479 | 0 | Py_TYPE(bb)->tp_name); |
480 | 0 | return NULL; |
481 | 0 | } |
482 | 15 | #define b ((PyTupleObject *)bb) |
483 | 15 | if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) { |
484 | 0 | Py_INCREF(a); |
485 | 0 | return (PyObject *)a; |
486 | 0 | } |
487 | 15 | if (Py_SIZE(a) > PY_SSIZE_T_MAX - Py_SIZE(b)) |
488 | 0 | return PyErr_NoMemory(); |
489 | 15 | size = Py_SIZE(a) + Py_SIZE(b); |
490 | 15 | np = (PyTupleObject *) PyTuple_New(size); |
491 | 15 | if (np == NULL) { |
492 | 0 | return NULL; |
493 | 0 | } |
494 | 15 | src = a->ob_item; |
495 | 15 | dest = np->ob_item; |
496 | 57 | for (i = 0; i < Py_SIZE(a); i++) { |
497 | 42 | PyObject *v = src[i]; |
498 | 42 | Py_INCREF(v); |
499 | 42 | dest[i] = v; |
500 | 42 | } |
501 | 15 | src = b->ob_item; |
502 | 15 | dest = np->ob_item + Py_SIZE(a); |
503 | 33 | for (i = 0; i < Py_SIZE(b); i++) { |
504 | 18 | PyObject *v = src[i]; |
505 | 18 | Py_INCREF(v); |
506 | 18 | dest[i] = v; |
507 | 18 | } |
508 | 15 | return (PyObject *)np; |
509 | 15 | #undef b |
510 | 15 | } |
511 | | |
512 | | static PyObject * |
513 | | tuplerepeat(PyTupleObject *a, Py_ssize_t n) |
514 | 0 | { |
515 | 0 | Py_ssize_t i, j; |
516 | 0 | Py_ssize_t size; |
517 | 0 | PyTupleObject *np; |
518 | 0 | PyObject **p, **items; |
519 | 0 | if (n < 0) |
520 | 0 | n = 0; |
521 | 0 | if (Py_SIZE(a) == 0 || n == 1) { |
522 | 0 | if (PyTuple_CheckExact(a)) { |
523 | | /* Since tuples are immutable, we can return a shared |
524 | | copy in this case */ |
525 | 0 | Py_INCREF(a); |
526 | 0 | return (PyObject *)a; |
527 | 0 | } |
528 | 0 | if (Py_SIZE(a) == 0) |
529 | 0 | return PyTuple_New(0); |
530 | 0 | } |
531 | 0 | if (n > PY_SSIZE_T_MAX / Py_SIZE(a)) |
532 | 0 | return PyErr_NoMemory(); |
533 | 0 | size = Py_SIZE(a) * n; |
534 | 0 | np = (PyTupleObject *) PyTuple_New(size); |
535 | 0 | if (np == NULL) |
536 | 0 | return NULL; |
537 | 0 | p = np->ob_item; |
538 | 0 | items = a->ob_item; |
539 | 0 | for (i = 0; i < n; i++) { |
540 | 0 | for (j = 0; j < Py_SIZE(a); j++) { |
541 | 0 | *p = items[j]; |
542 | 0 | Py_INCREF(*p); |
543 | 0 | p++; |
544 | 0 | } |
545 | 0 | } |
546 | 0 | return (PyObject *) np; |
547 | 0 | } |
548 | | |
549 | | /*[clinic input] |
550 | | tuple.index |
551 | | |
552 | | value: object |
553 | | start: slice_index(accept={int}) = 0 |
554 | | stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize |
555 | | / |
556 | | |
557 | | Return first index of value. |
558 | | |
559 | | Raises ValueError if the value is not present. |
560 | | [clinic start generated code]*/ |
561 | | |
562 | | static PyObject * |
563 | | tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start, |
564 | | Py_ssize_t stop) |
565 | | /*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/ |
566 | 0 | { |
567 | 0 | Py_ssize_t i; |
568 | |
|
569 | 0 | if (start < 0) { |
570 | 0 | start += Py_SIZE(self); |
571 | 0 | if (start < 0) |
572 | 0 | start = 0; |
573 | 0 | } |
574 | 0 | if (stop < 0) { |
575 | 0 | stop += Py_SIZE(self); |
576 | 0 | } |
577 | 0 | else if (stop > Py_SIZE(self)) { |
578 | 0 | stop = Py_SIZE(self); |
579 | 0 | } |
580 | 0 | for (i = start; i < stop; i++) { |
581 | 0 | int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ); |
582 | 0 | if (cmp > 0) |
583 | 0 | return PyLong_FromSsize_t(i); |
584 | 0 | else if (cmp < 0) |
585 | 0 | return NULL; |
586 | 0 | } |
587 | 0 | PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple"); |
588 | 0 | return NULL; |
589 | 0 | } |
590 | | |
591 | | /*[clinic input] |
592 | | tuple.count |
593 | | |
594 | | value: object |
595 | | / |
596 | | |
597 | | Return number of occurrences of value. |
598 | | [clinic start generated code]*/ |
599 | | |
600 | | static PyObject * |
601 | | tuple_count(PyTupleObject *self, PyObject *value) |
602 | | /*[clinic end generated code: output=aa927affc5a97605 input=531721aff65bd772]*/ |
603 | 0 | { |
604 | 0 | Py_ssize_t count = 0; |
605 | 0 | Py_ssize_t i; |
606 | |
|
607 | 0 | for (i = 0; i < Py_SIZE(self); i++) { |
608 | 0 | int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ); |
609 | 0 | if (cmp > 0) |
610 | 0 | count++; |
611 | 0 | else if (cmp < 0) |
612 | 0 | return NULL; |
613 | 0 | } |
614 | 0 | return PyLong_FromSsize_t(count); |
615 | 0 | } |
616 | | |
617 | | static int |
618 | | tupletraverse(PyTupleObject *o, visitproc visit, void *arg) |
619 | 63.9k | { |
620 | 63.9k | Py_ssize_t i; |
621 | | |
622 | 315k | for (i = Py_SIZE(o); --i >= 0; ) |
623 | 251k | Py_VISIT(o->ob_item[i]); |
624 | 63.9k | return 0; |
625 | 63.9k | } |
626 | | |
627 | | static PyObject * |
628 | | tuplerichcompare(PyObject *v, PyObject *w, int op) |
629 | 88 | { |
630 | 88 | PyTupleObject *vt, *wt; |
631 | 88 | Py_ssize_t i; |
632 | 88 | Py_ssize_t vlen, wlen; |
633 | | |
634 | 88 | if (!PyTuple_Check(v) || !PyTuple_Check(w)) |
635 | 0 | Py_RETURN_NOTIMPLEMENTED; |
636 | | |
637 | 88 | vt = (PyTupleObject *)v; |
638 | 88 | wt = (PyTupleObject *)w; |
639 | | |
640 | 88 | vlen = Py_SIZE(vt); |
641 | 88 | wlen = Py_SIZE(wt); |
642 | | |
643 | | /* Note: the corresponding code for lists has an "early out" test |
644 | | * here when op is EQ or NE and the lengths differ. That pays there, |
645 | | * but Tim was unable to find any real code where EQ/NE tuple |
646 | | * compares don't have the same length, so testing for it here would |
647 | | * have cost without benefit. |
648 | | */ |
649 | | |
650 | | /* Search for the first index where items are different. |
651 | | * Note that because tuples are immutable, it's safe to reuse |
652 | | * vlen and wlen across the comparison calls. |
653 | | */ |
654 | 249 | for (i = 0; i < vlen && i < wlen; i++) { |
655 | 171 | int k = PyObject_RichCompareBool(vt->ob_item[i], |
656 | 171 | wt->ob_item[i], Py_EQ); |
657 | 171 | if (k < 0) |
658 | 0 | return NULL; |
659 | 171 | if (!k) |
660 | 10 | break; |
661 | 171 | } |
662 | | |
663 | 88 | if (i >= vlen || i >= wlen) { |
664 | | /* No more items to compare -- compare sizes */ |
665 | 78 | Py_RETURN_RICHCOMPARE(vlen, wlen, op); |
666 | 78 | } |
667 | | |
668 | | /* We have an item that differs -- shortcuts for EQ/NE */ |
669 | 10 | if (op == Py_EQ) { |
670 | 2 | Py_RETURN_FALSE; |
671 | 2 | } |
672 | 8 | if (op == Py_NE) { |
673 | 8 | Py_RETURN_TRUE; |
674 | 8 | } |
675 | | |
676 | | /* Compare the final item again using the proper operator */ |
677 | 0 | return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op); |
678 | 8 | } |
679 | | |
680 | | static PyObject * |
681 | | tuple_subtype_new(PyTypeObject *type, PyObject *iterable); |
682 | | |
683 | | /*[clinic input] |
684 | | @classmethod |
685 | | tuple.__new__ as tuple_new |
686 | | iterable: object(c_default="NULL") = () |
687 | | / |
688 | | |
689 | | Built-in immutable sequence. |
690 | | |
691 | | If no argument is given, the constructor returns an empty tuple. |
692 | | If iterable is specified the tuple is initialized from iterable's items. |
693 | | |
694 | | If the argument is a tuple, the return value is the same object. |
695 | | [clinic start generated code]*/ |
696 | | |
697 | | static PyObject * |
698 | | tuple_new_impl(PyTypeObject *type, PyObject *iterable) |
699 | | /*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/ |
700 | 336 | { |
701 | 336 | if (type != &PyTuple_Type) |
702 | 29 | return tuple_subtype_new(type, iterable); |
703 | | |
704 | 307 | if (iterable == NULL) |
705 | 0 | return PyTuple_New(0); |
706 | 307 | else |
707 | 307 | return PySequence_Tuple(iterable); |
708 | 307 | } |
709 | | |
710 | | static PyObject * |
711 | | tuple_subtype_new(PyTypeObject *type, PyObject *iterable) |
712 | 29 | { |
713 | 29 | PyObject *tmp, *newobj, *item; |
714 | 29 | Py_ssize_t i, n; |
715 | | |
716 | 29 | assert(PyType_IsSubtype(type, &PyTuple_Type)); |
717 | 29 | tmp = tuple_new_impl(&PyTuple_Type, iterable); |
718 | 29 | if (tmp == NULL) |
719 | 0 | return NULL; |
720 | 29 | assert(PyTuple_Check(tmp)); |
721 | 29 | newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp)); |
722 | 29 | if (newobj == NULL) { |
723 | 0 | Py_DECREF(tmp); |
724 | 0 | return NULL; |
725 | 0 | } |
726 | 145 | for (i = 0; i < n; i++) { |
727 | 116 | item = PyTuple_GET_ITEM(tmp, i); |
728 | 116 | Py_INCREF(item); |
729 | 116 | PyTuple_SET_ITEM(newobj, i, item); |
730 | 116 | } |
731 | 29 | Py_DECREF(tmp); |
732 | 29 | return newobj; |
733 | 29 | } |
734 | | |
735 | | static PySequenceMethods tuple_as_sequence = { |
736 | | (lenfunc)tuplelength, /* sq_length */ |
737 | | (binaryfunc)tupleconcat, /* sq_concat */ |
738 | | (ssizeargfunc)tuplerepeat, /* sq_repeat */ |
739 | | (ssizeargfunc)tupleitem, /* sq_item */ |
740 | | 0, /* sq_slice */ |
741 | | 0, /* sq_ass_item */ |
742 | | 0, /* sq_ass_slice */ |
743 | | (objobjproc)tuplecontains, /* sq_contains */ |
744 | | }; |
745 | | |
746 | | static PyObject* |
747 | | tuplesubscript(PyTupleObject* self, PyObject* item) |
748 | 1.78k | { |
749 | 1.78k | if (PyIndex_Check(item)) { |
750 | 1.76k | Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError); |
751 | 1.76k | if (i == -1 && PyErr_Occurred()) |
752 | 0 | return NULL; |
753 | 1.76k | if (i < 0) |
754 | 24 | i += PyTuple_GET_SIZE(self); |
755 | 1.76k | return tupleitem(self, i); |
756 | 1.76k | } |
757 | 14 | else if (PySlice_Check(item)) { |
758 | 14 | Py_ssize_t start, stop, step, slicelength, i; |
759 | 14 | size_t cur; |
760 | 14 | PyObject* result; |
761 | 14 | PyObject* it; |
762 | 14 | PyObject **src, **dest; |
763 | | |
764 | 14 | if (PySlice_Unpack(item, &start, &stop, &step) < 0) { |
765 | 0 | return NULL; |
766 | 0 | } |
767 | 14 | slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start, |
768 | 14 | &stop, step); |
769 | | |
770 | 14 | if (slicelength <= 0) { |
771 | 0 | return PyTuple_New(0); |
772 | 0 | } |
773 | 14 | else if (start == 0 && step == 1 && |
774 | 14 | slicelength == PyTuple_GET_SIZE(self) && |
775 | 14 | PyTuple_CheckExact(self)) { |
776 | 0 | Py_INCREF(self); |
777 | 0 | return (PyObject *)self; |
778 | 0 | } |
779 | 14 | else { |
780 | 14 | result = PyTuple_New(slicelength); |
781 | 14 | if (!result) return NULL; |
782 | | |
783 | 14 | src = self->ob_item; |
784 | 14 | dest = ((PyTupleObject *)result)->ob_item; |
785 | 42 | for (cur = start, i = 0; i < slicelength; |
786 | 28 | cur += step, i++) { |
787 | 28 | it = src[cur]; |
788 | 28 | Py_INCREF(it); |
789 | 28 | dest[i] = it; |
790 | 28 | } |
791 | | |
792 | 14 | return result; |
793 | 14 | } |
794 | 14 | } |
795 | 0 | else { |
796 | 0 | PyErr_Format(PyExc_TypeError, |
797 | 0 | "tuple indices must be integers or slices, not %.200s", |
798 | 0 | Py_TYPE(item)->tp_name); |
799 | 0 | return NULL; |
800 | 0 | } |
801 | 1.78k | } |
802 | | |
803 | | /*[clinic input] |
804 | | tuple.__getnewargs__ |
805 | | [clinic start generated code]*/ |
806 | | |
807 | | static PyObject * |
808 | | tuple___getnewargs___impl(PyTupleObject *self) |
809 | | /*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/ |
810 | 0 | { |
811 | 0 | return Py_BuildValue("(N)", tupleslice(self, 0, Py_SIZE(self))); |
812 | 0 | } |
813 | | |
814 | | static PyMethodDef tuple_methods[] = { |
815 | | TUPLE___GETNEWARGS___METHODDEF |
816 | | TUPLE_INDEX_METHODDEF |
817 | | TUPLE_COUNT_METHODDEF |
818 | | {NULL, NULL} /* sentinel */ |
819 | | }; |
820 | | |
821 | | static PyMappingMethods tuple_as_mapping = { |
822 | | (lenfunc)tuplelength, |
823 | | (binaryfunc)tuplesubscript, |
824 | | 0 |
825 | | }; |
826 | | |
827 | | static PyObject *tuple_iter(PyObject *seq); |
828 | | |
829 | | PyTypeObject PyTuple_Type = { |
830 | | PyVarObject_HEAD_INIT(&PyType_Type, 0) |
831 | | "tuple", |
832 | | sizeof(PyTupleObject) - sizeof(PyObject *), |
833 | | sizeof(PyObject *), |
834 | | (destructor)tupledealloc, /* tp_dealloc */ |
835 | | 0, /* tp_vectorcall_offset */ |
836 | | 0, /* tp_getattr */ |
837 | | 0, /* tp_setattr */ |
838 | | 0, /* tp_as_async */ |
839 | | (reprfunc)tuplerepr, /* tp_repr */ |
840 | | 0, /* tp_as_number */ |
841 | | &tuple_as_sequence, /* tp_as_sequence */ |
842 | | &tuple_as_mapping, /* tp_as_mapping */ |
843 | | (hashfunc)tuplehash, /* tp_hash */ |
844 | | 0, /* tp_call */ |
845 | | 0, /* tp_str */ |
846 | | PyObject_GenericGetAttr, /* tp_getattro */ |
847 | | 0, /* tp_setattro */ |
848 | | 0, /* tp_as_buffer */ |
849 | | Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | |
850 | | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS, /* tp_flags */ |
851 | | tuple_new__doc__, /* tp_doc */ |
852 | | (traverseproc)tupletraverse, /* tp_traverse */ |
853 | | 0, /* tp_clear */ |
854 | | tuplerichcompare, /* tp_richcompare */ |
855 | | 0, /* tp_weaklistoffset */ |
856 | | tuple_iter, /* tp_iter */ |
857 | | 0, /* tp_iternext */ |
858 | | tuple_methods, /* tp_methods */ |
859 | | 0, /* tp_members */ |
860 | | 0, /* tp_getset */ |
861 | | 0, /* tp_base */ |
862 | | 0, /* tp_dict */ |
863 | | 0, /* tp_descr_get */ |
864 | | 0, /* tp_descr_set */ |
865 | | 0, /* tp_dictoffset */ |
866 | | 0, /* tp_init */ |
867 | | 0, /* tp_alloc */ |
868 | | tuple_new, /* tp_new */ |
869 | | PyObject_GC_Del, /* tp_free */ |
870 | | }; |
871 | | |
872 | | /* The following function breaks the notion that tuples are immutable: |
873 | | it changes the size of a tuple. We get away with this only if there |
874 | | is only one module referencing the object. You can also think of it |
875 | | as creating a new tuple object and destroying the old one, only more |
876 | | efficiently. In any case, don't use this if the tuple may already be |
877 | | known to some other part of the code. */ |
878 | | |
879 | | int |
880 | | _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize) |
881 | 39 | { |
882 | 39 | PyTupleObject *v; |
883 | 39 | PyTupleObject *sv; |
884 | 39 | Py_ssize_t i; |
885 | 39 | Py_ssize_t oldsize; |
886 | | |
887 | 39 | v = (PyTupleObject *) *pv; |
888 | 39 | if (v == NULL || Py_TYPE(v) != &PyTuple_Type || |
889 | 39 | (Py_SIZE(v) != 0 && Py_REFCNT(v) != 1)) { |
890 | 0 | *pv = 0; |
891 | 0 | Py_XDECREF(v); |
892 | 0 | PyErr_BadInternalCall(); |
893 | 0 | return -1; |
894 | 0 | } |
895 | 39 | oldsize = Py_SIZE(v); |
896 | 39 | if (oldsize == newsize) |
897 | 0 | return 0; |
898 | | |
899 | 39 | if (oldsize == 0) { |
900 | | /* Empty tuples are often shared, so we should never |
901 | | resize them in-place even if we do own the only |
902 | | (current) reference */ |
903 | 0 | Py_DECREF(v); |
904 | 0 | *pv = PyTuple_New(newsize); |
905 | 0 | return *pv == NULL ? -1 : 0; |
906 | 0 | } |
907 | | |
908 | | /* XXX UNREF/NEWREF interface should be more symmetrical */ |
909 | 39 | _Py_DEC_REFTOTAL; |
910 | 39 | if (_PyObject_GC_IS_TRACKED(v)) |
911 | 39 | _PyObject_GC_UNTRACK(v); |
912 | 39 | _Py_ForgetReference((PyObject *) v); |
913 | | /* DECREF items deleted by shrinkage */ |
914 | 363 | for (i = newsize; i < oldsize; i++) { |
915 | 324 | Py_CLEAR(v->ob_item[i]); |
916 | 324 | } |
917 | 39 | sv = PyObject_GC_Resize(PyTupleObject, v, newsize); |
918 | 39 | if (sv == NULL) { |
919 | 0 | *pv = NULL; |
920 | 0 | PyObject_GC_Del(v); |
921 | 0 | return -1; |
922 | 0 | } |
923 | 39 | _Py_NewReference((PyObject *) sv); |
924 | | /* Zero out items added by growing */ |
925 | 39 | if (newsize > oldsize) |
926 | 3 | memset(&sv->ob_item[oldsize], 0, |
927 | 3 | sizeof(*sv->ob_item) * (newsize - oldsize)); |
928 | 39 | *pv = (PyObject *) sv; |
929 | 39 | _PyObject_GC_TRACK(sv); |
930 | 39 | return 0; |
931 | 39 | } |
932 | | |
933 | | int |
934 | | PyTuple_ClearFreeList(void) |
935 | 0 | { |
936 | 0 | int freelist_size = 0; |
937 | 0 | #if PyTuple_MAXSAVESIZE > 0 |
938 | 0 | int i; |
939 | 0 | for (i = 1; i < PyTuple_MAXSAVESIZE; i++) { |
940 | 0 | PyTupleObject *p, *q; |
941 | 0 | p = free_list[i]; |
942 | 0 | freelist_size += numfree[i]; |
943 | 0 | free_list[i] = NULL; |
944 | 0 | numfree[i] = 0; |
945 | 0 | while (p) { |
946 | 0 | q = p; |
947 | 0 | p = (PyTupleObject *)(p->ob_item[0]); |
948 | 0 | PyObject_GC_Del(q); |
949 | 0 | } |
950 | 0 | } |
951 | 0 | #endif |
952 | 0 | return freelist_size; |
953 | 0 | } |
954 | | |
955 | | void |
956 | | PyTuple_Fini(void) |
957 | 0 | { |
958 | 0 | #if PyTuple_MAXSAVESIZE > 0 |
959 | | /* empty tuples are used all over the place and applications may |
960 | | * rely on the fact that an empty tuple is a singleton. */ |
961 | 0 | Py_CLEAR(free_list[0]); |
962 | |
|
963 | 0 | (void)PyTuple_ClearFreeList(); |
964 | 0 | #endif |
965 | | #ifdef SHOW_TRACK_COUNT |
966 | | show_track(); |
967 | | #endif |
968 | 0 | } |
969 | | |
970 | | /*********************** Tuple Iterator **************************/ |
971 | | |
972 | | typedef struct { |
973 | | PyObject_HEAD |
974 | | Py_ssize_t it_index; |
975 | | PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */ |
976 | | } tupleiterobject; |
977 | | |
978 | | static void |
979 | | tupleiter_dealloc(tupleiterobject *it) |
980 | 4.26k | { |
981 | 4.26k | _PyObject_GC_UNTRACK(it); |
982 | 4.26k | Py_XDECREF(it->it_seq); |
983 | 4.26k | PyObject_GC_Del(it); |
984 | 4.26k | } |
985 | | |
986 | | static int |
987 | | tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg) |
988 | 36 | { |
989 | 36 | Py_VISIT(it->it_seq); |
990 | 36 | return 0; |
991 | 36 | } |
992 | | |
993 | | static PyObject * |
994 | | tupleiter_next(tupleiterobject *it) |
995 | 15.4k | { |
996 | 15.4k | PyTupleObject *seq; |
997 | 15.4k | PyObject *item; |
998 | | |
999 | 15.4k | assert(it != NULL); |
1000 | 15.4k | seq = it->it_seq; |
1001 | 15.4k | if (seq == NULL) |
1002 | 0 | return NULL; |
1003 | 15.4k | assert(PyTuple_Check(seq)); |
1004 | | |
1005 | 15.4k | if (it->it_index < PyTuple_GET_SIZE(seq)) { |
1006 | 12.0k | item = PyTuple_GET_ITEM(seq, it->it_index); |
1007 | 12.0k | ++it->it_index; |
1008 | 12.0k | Py_INCREF(item); |
1009 | 12.0k | return item; |
1010 | 12.0k | } |
1011 | | |
1012 | 3.39k | it->it_seq = NULL; |
1013 | 3.39k | Py_DECREF(seq); |
1014 | 3.39k | return NULL; |
1015 | 15.4k | } |
1016 | | |
1017 | | static PyObject * |
1018 | | tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored)) |
1019 | 0 | { |
1020 | 0 | Py_ssize_t len = 0; |
1021 | 0 | if (it->it_seq) |
1022 | 0 | len = PyTuple_GET_SIZE(it->it_seq) - it->it_index; |
1023 | 0 | return PyLong_FromSsize_t(len); |
1024 | 0 | } |
1025 | | |
1026 | | PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); |
1027 | | |
1028 | | static PyObject * |
1029 | | tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored)) |
1030 | 0 | { |
1031 | 0 | _Py_IDENTIFIER(iter); |
1032 | 0 | if (it->it_seq) |
1033 | 0 | return Py_BuildValue("N(O)n", _PyEval_GetBuiltinId(&PyId_iter), |
1034 | 0 | it->it_seq, it->it_index); |
1035 | 0 | else |
1036 | 0 | return Py_BuildValue("N(())", _PyEval_GetBuiltinId(&PyId_iter)); |
1037 | 0 | } |
1038 | | |
1039 | | static PyObject * |
1040 | | tupleiter_setstate(tupleiterobject *it, PyObject *state) |
1041 | 0 | { |
1042 | 0 | Py_ssize_t index = PyLong_AsSsize_t(state); |
1043 | 0 | if (index == -1 && PyErr_Occurred()) |
1044 | 0 | return NULL; |
1045 | 0 | if (it->it_seq != NULL) { |
1046 | 0 | if (index < 0) |
1047 | 0 | index = 0; |
1048 | 0 | else if (index > PyTuple_GET_SIZE(it->it_seq)) |
1049 | 0 | index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */ |
1050 | 0 | it->it_index = index; |
1051 | 0 | } |
1052 | 0 | Py_RETURN_NONE; |
1053 | 0 | } |
1054 | | |
1055 | | PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); |
1056 | | PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); |
1057 | | |
1058 | | static PyMethodDef tupleiter_methods[] = { |
1059 | | {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc}, |
1060 | | {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc}, |
1061 | | {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc}, |
1062 | | {NULL, NULL} /* sentinel */ |
1063 | | }; |
1064 | | |
1065 | | PyTypeObject PyTupleIter_Type = { |
1066 | | PyVarObject_HEAD_INIT(&PyType_Type, 0) |
1067 | | "tuple_iterator", /* tp_name */ |
1068 | | sizeof(tupleiterobject), /* tp_basicsize */ |
1069 | | 0, /* tp_itemsize */ |
1070 | | /* methods */ |
1071 | | (destructor)tupleiter_dealloc, /* tp_dealloc */ |
1072 | | 0, /* tp_vectorcall_offset */ |
1073 | | 0, /* tp_getattr */ |
1074 | | 0, /* tp_setattr */ |
1075 | | 0, /* tp_as_async */ |
1076 | | 0, /* tp_repr */ |
1077 | | 0, /* tp_as_number */ |
1078 | | 0, /* tp_as_sequence */ |
1079 | | 0, /* tp_as_mapping */ |
1080 | | 0, /* tp_hash */ |
1081 | | 0, /* tp_call */ |
1082 | | 0, /* tp_str */ |
1083 | | PyObject_GenericGetAttr, /* tp_getattro */ |
1084 | | 0, /* tp_setattro */ |
1085 | | 0, /* tp_as_buffer */ |
1086 | | Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */ |
1087 | | 0, /* tp_doc */ |
1088 | | (traverseproc)tupleiter_traverse, /* tp_traverse */ |
1089 | | 0, /* tp_clear */ |
1090 | | 0, /* tp_richcompare */ |
1091 | | 0, /* tp_weaklistoffset */ |
1092 | | PyObject_SelfIter, /* tp_iter */ |
1093 | | (iternextfunc)tupleiter_next, /* tp_iternext */ |
1094 | | tupleiter_methods, /* tp_methods */ |
1095 | | 0, |
1096 | | }; |
1097 | | |
1098 | | static PyObject * |
1099 | | tuple_iter(PyObject *seq) |
1100 | 4.26k | { |
1101 | 4.26k | tupleiterobject *it; |
1102 | | |
1103 | 4.26k | if (!PyTuple_Check(seq)) { |
1104 | 0 | PyErr_BadInternalCall(); |
1105 | 0 | return NULL; |
1106 | 0 | } |
1107 | 4.26k | it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type); |
1108 | 4.26k | if (it == NULL) |
1109 | 0 | return NULL; |
1110 | 4.26k | it->it_index = 0; |
1111 | 4.26k | Py_INCREF(seq); |
1112 | 4.26k | it->it_seq = (PyTupleObject *)seq; |
1113 | 4.26k | _PyObject_GC_TRACK(it); |
1114 | 4.26k | return (PyObject *)it; |
1115 | 4.26k | } |