1
2 /* Tuple object implementation */
3
4 #include "Python.h"
5 #include "internal/pystate.h"
6 #include "accu.h"
7
8 /*[clinic input]
9 class tuple "PyTupleObject *" "&PyTuple_Type"
10 [clinic start generated code]*/
11 /*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
12
13 #include "clinic/tupleobject.c.h"
14
15 /* Speed optimization to avoid frequent malloc/free of small tuples */
16 #ifndef PyTuple_MAXSAVESIZE
17 #define PyTuple_MAXSAVESIZE 20 /* Largest tuple to save on free list */
18 #endif
19 #ifndef PyTuple_MAXFREELIST
20 #define PyTuple_MAXFREELIST 2000 /* Maximum number of tuples of each size to save */
21 #endif
22
23 #if PyTuple_MAXSAVESIZE > 0
24 /* Entries 1 up to PyTuple_MAXSAVESIZE are free lists, entry 0 is the empty
25 tuple () of which at most one instance will be allocated.
26 */
27 static PyTupleObject *free_list[PyTuple_MAXSAVESIZE];
28 static int numfree[PyTuple_MAXSAVESIZE];
29 #endif
30 #ifdef COUNT_ALLOCS
31 Py_ssize_t fast_tuple_allocs;
32 Py_ssize_t tuple_zero_allocs;
33 #endif
34
35 /* Debug statistic to count GC tracking of tuples.
36 Please note that tuples are only untracked when considered by the GC, and
37 many of them will be dead before. Therefore, a tracking rate close to 100%
38 does not necessarily prove that the heuristic is inefficient.
39 */
40 #ifdef SHOW_TRACK_COUNT
41 static Py_ssize_t count_untracked = 0;
42 static Py_ssize_t count_tracked = 0;
43
44 static void
show_track(void)45 show_track(void)
46 {
47 PyInterpreterState *interp = PyThreadState_GET()->interp;
48 if (!interp->core_config.show_alloc_count) {
49 return;
50 }
51
52 fprintf(stderr, "Tuples created: %" PY_FORMAT_SIZE_T "d\n",
53 count_tracked + count_untracked);
54 fprintf(stderr, "Tuples tracked by the GC: %" PY_FORMAT_SIZE_T
55 "d\n", count_tracked);
56 fprintf(stderr, "%.2f%% tuple tracking rate\n\n",
57 (100.0*count_tracked/(count_untracked+count_tracked)));
58 }
59 #endif
60
61 /* Print summary info about the state of the optimized allocator */
62 void
_PyTuple_DebugMallocStats(FILE * out)63 _PyTuple_DebugMallocStats(FILE *out)
64 {
65 #if PyTuple_MAXSAVESIZE > 0
66 int i;
67 char buf[128];
68 for (i = 1; i < PyTuple_MAXSAVESIZE; i++) {
69 PyOS_snprintf(buf, sizeof(buf),
70 "free %d-sized PyTupleObject", i);
71 _PyDebugAllocatorStats(out,
72 buf,
73 numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i));
74 }
75 #endif
76 }
77
78 PyObject *
PyTuple_New(Py_ssize_t size)79 PyTuple_New(Py_ssize_t size)
80 {
81 PyTupleObject *op;
82 Py_ssize_t i;
83 if (size < 0) {
84 PyErr_BadInternalCall();
85 return NULL;
86 }
87 #if PyTuple_MAXSAVESIZE > 0
88 if (size == 0 && free_list[0]) {
89 op = free_list[0];
90 Py_INCREF(op);
91 #ifdef COUNT_ALLOCS
92 tuple_zero_allocs++;
93 #endif
94 return (PyObject *) op;
95 }
96 if (size < PyTuple_MAXSAVESIZE && (op = free_list[size]) != NULL) {
97 free_list[size] = (PyTupleObject *) op->ob_item[0];
98 numfree[size]--;
99 #ifdef COUNT_ALLOCS
100 fast_tuple_allocs++;
101 #endif
102 /* Inline PyObject_InitVar */
103 #ifdef Py_TRACE_REFS
104 Py_SIZE(op) = size;
105 Py_TYPE(op) = &PyTuple_Type;
106 #endif
107 _Py_NewReference((PyObject *)op);
108 }
109 else
110 #endif
111 {
112 /* Check for overflow */
113 if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - sizeof(PyTupleObject) -
114 sizeof(PyObject *)) / sizeof(PyObject *)) {
115 return PyErr_NoMemory();
116 }
117 op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
118 if (op == NULL)
119 return NULL;
120 }
121 for (i=0; i < size; i++)
122 op->ob_item[i] = NULL;
123 #if PyTuple_MAXSAVESIZE > 0
124 if (size == 0) {
125 free_list[0] = op;
126 ++numfree[0];
127 Py_INCREF(op); /* extra INCREF so that this is never freed */
128 }
129 #endif
130 #ifdef SHOW_TRACK_COUNT
131 count_tracked++;
132 #endif
133 _PyObject_GC_TRACK(op);
134 return (PyObject *) op;
135 }
136
137 Py_ssize_t
PyTuple_Size(PyObject * op)138 PyTuple_Size(PyObject *op)
139 {
140 if (!PyTuple_Check(op)) {
141 PyErr_BadInternalCall();
142 return -1;
143 }
144 else
145 return Py_SIZE(op);
146 }
147
148 PyObject *
PyTuple_GetItem(PyObject * op,Py_ssize_t i)149 PyTuple_GetItem(PyObject *op, Py_ssize_t i)
150 {
151 if (!PyTuple_Check(op)) {
152 PyErr_BadInternalCall();
153 return NULL;
154 }
155 if (i < 0 || i >= Py_SIZE(op)) {
156 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
157 return NULL;
158 }
159 return ((PyTupleObject *)op) -> ob_item[i];
160 }
161
162 int
PyTuple_SetItem(PyObject * op,Py_ssize_t i,PyObject * newitem)163 PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
164 {
165 PyObject **p;
166 if (!PyTuple_Check(op) || op->ob_refcnt != 1) {
167 Py_XDECREF(newitem);
168 PyErr_BadInternalCall();
169 return -1;
170 }
171 if (i < 0 || i >= Py_SIZE(op)) {
172 Py_XDECREF(newitem);
173 PyErr_SetString(PyExc_IndexError,
174 "tuple assignment index out of range");
175 return -1;
176 }
177 p = ((PyTupleObject *)op) -> ob_item + i;
178 Py_XSETREF(*p, newitem);
179 return 0;
180 }
181
182 void
_PyTuple_MaybeUntrack(PyObject * op)183 _PyTuple_MaybeUntrack(PyObject *op)
184 {
185 PyTupleObject *t;
186 Py_ssize_t i, n;
187
188 if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
189 return;
190 t = (PyTupleObject *) op;
191 n = Py_SIZE(t);
192 for (i = 0; i < n; i++) {
193 PyObject *elt = PyTuple_GET_ITEM(t, i);
194 /* Tuple with NULL elements aren't
195 fully constructed, don't untrack
196 them yet. */
197 if (!elt ||
198 _PyObject_GC_MAY_BE_TRACKED(elt))
199 return;
200 }
201 #ifdef SHOW_TRACK_COUNT
202 count_tracked--;
203 count_untracked++;
204 #endif
205 _PyObject_GC_UNTRACK(op);
206 }
207
208 PyObject *
PyTuple_Pack(Py_ssize_t n,...)209 PyTuple_Pack(Py_ssize_t n, ...)
210 {
211 Py_ssize_t i;
212 PyObject *o;
213 PyObject *result;
214 PyObject **items;
215 va_list vargs;
216
217 va_start(vargs, n);
218 result = PyTuple_New(n);
219 if (result == NULL) {
220 va_end(vargs);
221 return NULL;
222 }
223 items = ((PyTupleObject *)result)->ob_item;
224 for (i = 0; i < n; i++) {
225 o = va_arg(vargs, PyObject *);
226 Py_INCREF(o);
227 items[i] = o;
228 }
229 va_end(vargs);
230 return result;
231 }
232
233
234 /* Methods */
235
236 static void
tupledealloc(PyTupleObject * op)237 tupledealloc(PyTupleObject *op)
238 {
239 Py_ssize_t i;
240 Py_ssize_t len = Py_SIZE(op);
241 PyObject_GC_UnTrack(op);
242 Py_TRASHCAN_SAFE_BEGIN(op)
243 if (len > 0) {
244 i = len;
245 while (--i >= 0)
246 Py_XDECREF(op->ob_item[i]);
247 #if PyTuple_MAXSAVESIZE > 0
248 if (len < PyTuple_MAXSAVESIZE &&
249 numfree[len] < PyTuple_MAXFREELIST &&
250 Py_TYPE(op) == &PyTuple_Type)
251 {
252 op->ob_item[0] = (PyObject *) free_list[len];
253 numfree[len]++;
254 free_list[len] = op;
255 goto done; /* return */
256 }
257 #endif
258 }
259 Py_TYPE(op)->tp_free((PyObject *)op);
260 done:
261 Py_TRASHCAN_SAFE_END(op)
262 }
263
264 static PyObject *
tuplerepr(PyTupleObject * v)265 tuplerepr(PyTupleObject *v)
266 {
267 Py_ssize_t i, n;
268 _PyUnicodeWriter writer;
269
270 n = Py_SIZE(v);
271 if (n == 0)
272 return PyUnicode_FromString("()");
273
274 /* While not mutable, it is still possible to end up with a cycle in a
275 tuple through an object that stores itself within a tuple (and thus
276 infinitely asks for the repr of itself). This should only be
277 possible within a type. */
278 i = Py_ReprEnter((PyObject *)v);
279 if (i != 0) {
280 return i > 0 ? PyUnicode_FromString("(...)") : NULL;
281 }
282
283 _PyUnicodeWriter_Init(&writer);
284 writer.overallocate = 1;
285 if (Py_SIZE(v) > 1) {
286 /* "(" + "1" + ", 2" * (len - 1) + ")" */
287 writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1;
288 }
289 else {
290 /* "(1,)" */
291 writer.min_length = 4;
292 }
293
294 if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0)
295 goto error;
296
297 /* Do repr() on each element. */
298 for (i = 0; i < n; ++i) {
299 PyObject *s;
300
301 if (i > 0) {
302 if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0)
303 goto error;
304 }
305
306 s = PyObject_Repr(v->ob_item[i]);
307 if (s == NULL)
308 goto error;
309
310 if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
311 Py_DECREF(s);
312 goto error;
313 }
314 Py_DECREF(s);
315 }
316
317 writer.overallocate = 0;
318 if (n > 1) {
319 if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0)
320 goto error;
321 }
322 else {
323 if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0)
324 goto error;
325 }
326
327 Py_ReprLeave((PyObject *)v);
328 return _PyUnicodeWriter_Finish(&writer);
329
330 error:
331 _PyUnicodeWriter_Dealloc(&writer);
332 Py_ReprLeave((PyObject *)v);
333 return NULL;
334 }
335
336 /* The addend 82520, was selected from the range(0, 1000000) for
337 generating the greatest number of prime multipliers for tuples
338 up to length eight:
339
340 1082527, 1165049, 1082531, 1165057, 1247581, 1330103, 1082533,
341 1330111, 1412633, 1165069, 1247599, 1495177, 1577699
342
343 Tests have shown that it's not worth to cache the hash value, see
344 issue #9685.
345 */
346
347 static Py_hash_t
tuplehash(PyTupleObject * v)348 tuplehash(PyTupleObject *v)
349 {
350 Py_uhash_t x; /* Unsigned for defined overflow behavior. */
351 Py_hash_t y;
352 Py_ssize_t len = Py_SIZE(v);
353 PyObject **p;
354 Py_uhash_t mult = _PyHASH_MULTIPLIER;
355 x = 0x345678UL;
356 p = v->ob_item;
357 while (--len >= 0) {
358 y = PyObject_Hash(*p++);
359 if (y == -1)
360 return -1;
361 x = (x ^ y) * mult;
362 /* the cast might truncate len; that doesn't change hash stability */
363 mult += (Py_hash_t)(82520UL + len + len);
364 }
365 x += 97531UL;
366 if (x == (Py_uhash_t)-1)
367 x = -2;
368 return x;
369 }
370
371 static Py_ssize_t
tuplelength(PyTupleObject * a)372 tuplelength(PyTupleObject *a)
373 {
374 return Py_SIZE(a);
375 }
376
377 static int
tuplecontains(PyTupleObject * a,PyObject * el)378 tuplecontains(PyTupleObject *a, PyObject *el)
379 {
380 Py_ssize_t i;
381 int cmp;
382
383 for (i = 0, cmp = 0 ; cmp == 0 && i < Py_SIZE(a); ++i)
384 cmp = PyObject_RichCompareBool(el, PyTuple_GET_ITEM(a, i),
385 Py_EQ);
386 return cmp;
387 }
388
389 static PyObject *
tupleitem(PyTupleObject * a,Py_ssize_t i)390 tupleitem(PyTupleObject *a, Py_ssize_t i)
391 {
392 if (i < 0 || i >= Py_SIZE(a)) {
393 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
394 return NULL;
395 }
396 Py_INCREF(a->ob_item[i]);
397 return a->ob_item[i];
398 }
399
400 static PyObject *
tupleslice(PyTupleObject * a,Py_ssize_t ilow,Py_ssize_t ihigh)401 tupleslice(PyTupleObject *a, Py_ssize_t ilow,
402 Py_ssize_t ihigh)
403 {
404 PyTupleObject *np;
405 PyObject **src, **dest;
406 Py_ssize_t i;
407 Py_ssize_t len;
408 if (ilow < 0)
409 ilow = 0;
410 if (ihigh > Py_SIZE(a))
411 ihigh = Py_SIZE(a);
412 if (ihigh < ilow)
413 ihigh = ilow;
414 if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
415 Py_INCREF(a);
416 return (PyObject *)a;
417 }
418 len = ihigh - ilow;
419 np = (PyTupleObject *)PyTuple_New(len);
420 if (np == NULL)
421 return NULL;
422 src = a->ob_item + ilow;
423 dest = np->ob_item;
424 for (i = 0; i < len; i++) {
425 PyObject *v = src[i];
426 Py_INCREF(v);
427 dest[i] = v;
428 }
429 return (PyObject *)np;
430 }
431
432 PyObject *
PyTuple_GetSlice(PyObject * op,Py_ssize_t i,Py_ssize_t j)433 PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
434 {
435 if (op == NULL || !PyTuple_Check(op)) {
436 PyErr_BadInternalCall();
437 return NULL;
438 }
439 return tupleslice((PyTupleObject *)op, i, j);
440 }
441
442 static PyObject *
tupleconcat(PyTupleObject * a,PyObject * bb)443 tupleconcat(PyTupleObject *a, PyObject *bb)
444 {
445 Py_ssize_t size;
446 Py_ssize_t i;
447 PyObject **src, **dest;
448 PyTupleObject *np;
449 if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) {
450 Py_INCREF(bb);
451 return bb;
452 }
453 if (!PyTuple_Check(bb)) {
454 PyErr_Format(PyExc_TypeError,
455 "can only concatenate tuple (not \"%.200s\") to tuple",
456 Py_TYPE(bb)->tp_name);
457 return NULL;
458 }
459 #define b ((PyTupleObject *)bb)
460 if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) {
461 Py_INCREF(a);
462 return (PyObject *)a;
463 }
464 if (Py_SIZE(a) > PY_SSIZE_T_MAX - Py_SIZE(b))
465 return PyErr_NoMemory();
466 size = Py_SIZE(a) + Py_SIZE(b);
467 np = (PyTupleObject *) PyTuple_New(size);
468 if (np == NULL) {
469 return NULL;
470 }
471 src = a->ob_item;
472 dest = np->ob_item;
473 for (i = 0; i < Py_SIZE(a); i++) {
474 PyObject *v = src[i];
475 Py_INCREF(v);
476 dest[i] = v;
477 }
478 src = b->ob_item;
479 dest = np->ob_item + Py_SIZE(a);
480 for (i = 0; i < Py_SIZE(b); i++) {
481 PyObject *v = src[i];
482 Py_INCREF(v);
483 dest[i] = v;
484 }
485 return (PyObject *)np;
486 #undef b
487 }
488
489 static PyObject *
tuplerepeat(PyTupleObject * a,Py_ssize_t n)490 tuplerepeat(PyTupleObject *a, Py_ssize_t n)
491 {
492 Py_ssize_t i, j;
493 Py_ssize_t size;
494 PyTupleObject *np;
495 PyObject **p, **items;
496 if (n < 0)
497 n = 0;
498 if (Py_SIZE(a) == 0 || n == 1) {
499 if (PyTuple_CheckExact(a)) {
500 /* Since tuples are immutable, we can return a shared
501 copy in this case */
502 Py_INCREF(a);
503 return (PyObject *)a;
504 }
505 if (Py_SIZE(a) == 0)
506 return PyTuple_New(0);
507 }
508 if (n > PY_SSIZE_T_MAX / Py_SIZE(a))
509 return PyErr_NoMemory();
510 size = Py_SIZE(a) * n;
511 np = (PyTupleObject *) PyTuple_New(size);
512 if (np == NULL)
513 return NULL;
514 p = np->ob_item;
515 items = a->ob_item;
516 for (i = 0; i < n; i++) {
517 for (j = 0; j < Py_SIZE(a); j++) {
518 *p = items[j];
519 Py_INCREF(*p);
520 p++;
521 }
522 }
523 return (PyObject *) np;
524 }
525
526 /*[clinic input]
527 tuple.index
528
529 value: object
530 start: slice_index(accept={int}) = 0
531 stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
532 /
533
534 Return first index of value.
535
536 Raises ValueError if the value is not present.
537 [clinic start generated code]*/
538
539 static PyObject *
tuple_index_impl(PyTupleObject * self,PyObject * value,Py_ssize_t start,Py_ssize_t stop)540 tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
541 Py_ssize_t stop)
542 /*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
543 {
544 Py_ssize_t i;
545
546 if (start < 0) {
547 start += Py_SIZE(self);
548 if (start < 0)
549 start = 0;
550 }
551 if (stop < 0) {
552 stop += Py_SIZE(self);
553 }
554 else if (stop > Py_SIZE(self)) {
555 stop = Py_SIZE(self);
556 }
557 for (i = start; i < stop; i++) {
558 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
559 if (cmp > 0)
560 return PyLong_FromSsize_t(i);
561 else if (cmp < 0)
562 return NULL;
563 }
564 PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
565 return NULL;
566 }
567
568 /*[clinic input]
569 tuple.count
570
571 value: object
572 /
573
574 Return number of occurrences of value.
575 [clinic start generated code]*/
576
577 static PyObject *
tuple_count(PyTupleObject * self,PyObject * value)578 tuple_count(PyTupleObject *self, PyObject *value)
579 /*[clinic end generated code: output=aa927affc5a97605 input=531721aff65bd772]*/
580 {
581 Py_ssize_t count = 0;
582 Py_ssize_t i;
583
584 for (i = 0; i < Py_SIZE(self); i++) {
585 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
586 if (cmp > 0)
587 count++;
588 else if (cmp < 0)
589 return NULL;
590 }
591 return PyLong_FromSsize_t(count);
592 }
593
594 static int
tupletraverse(PyTupleObject * o,visitproc visit,void * arg)595 tupletraverse(PyTupleObject *o, visitproc visit, void *arg)
596 {
597 Py_ssize_t i;
598
599 for (i = Py_SIZE(o); --i >= 0; )
600 Py_VISIT(o->ob_item[i]);
601 return 0;
602 }
603
604 static PyObject *
tuplerichcompare(PyObject * v,PyObject * w,int op)605 tuplerichcompare(PyObject *v, PyObject *w, int op)
606 {
607 PyTupleObject *vt, *wt;
608 Py_ssize_t i;
609 Py_ssize_t vlen, wlen;
610
611 if (!PyTuple_Check(v) || !PyTuple_Check(w))
612 Py_RETURN_NOTIMPLEMENTED;
613
614 vt = (PyTupleObject *)v;
615 wt = (PyTupleObject *)w;
616
617 vlen = Py_SIZE(vt);
618 wlen = Py_SIZE(wt);
619
620 /* Note: the corresponding code for lists has an "early out" test
621 * here when op is EQ or NE and the lengths differ. That pays there,
622 * but Tim was unable to find any real code where EQ/NE tuple
623 * compares don't have the same length, so testing for it here would
624 * have cost without benefit.
625 */
626
627 /* Search for the first index where items are different.
628 * Note that because tuples are immutable, it's safe to reuse
629 * vlen and wlen across the comparison calls.
630 */
631 for (i = 0; i < vlen && i < wlen; i++) {
632 int k = PyObject_RichCompareBool(vt->ob_item[i],
633 wt->ob_item[i], Py_EQ);
634 if (k < 0)
635 return NULL;
636 if (!k)
637 break;
638 }
639
640 if (i >= vlen || i >= wlen) {
641 /* No more items to compare -- compare sizes */
642 Py_RETURN_RICHCOMPARE(vlen, wlen, op);
643 }
644
645 /* We have an item that differs -- shortcuts for EQ/NE */
646 if (op == Py_EQ) {
647 Py_RETURN_FALSE;
648 }
649 if (op == Py_NE) {
650 Py_RETURN_TRUE;
651 }
652
653 /* Compare the final item again using the proper operator */
654 return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
655 }
656
657 static PyObject *
658 tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
659
660 /*[clinic input]
661 @classmethod
662 tuple.__new__ as tuple_new
663 iterable: object(c_default="NULL") = ()
664 /
665
666 Built-in immutable sequence.
667
668 If no argument is given, the constructor returns an empty tuple.
669 If iterable is specified the tuple is initialized from iterable's items.
670
671 If the argument is a tuple, the return value is the same object.
672 [clinic start generated code]*/
673
674 static PyObject *
tuple_new_impl(PyTypeObject * type,PyObject * iterable)675 tuple_new_impl(PyTypeObject *type, PyObject *iterable)
676 /*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
677 {
678 if (type != &PyTuple_Type)
679 return tuple_subtype_new(type, iterable);
680
681 if (iterable == NULL)
682 return PyTuple_New(0);
683 else
684 return PySequence_Tuple(iterable);
685 }
686
687 static PyObject *
tuple_subtype_new(PyTypeObject * type,PyObject * iterable)688 tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
689 {
690 PyObject *tmp, *newobj, *item;
691 Py_ssize_t i, n;
692
693 assert(PyType_IsSubtype(type, &PyTuple_Type));
694 tmp = tuple_new_impl(&PyTuple_Type, iterable);
695 if (tmp == NULL)
696 return NULL;
697 assert(PyTuple_Check(tmp));
698 newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
699 if (newobj == NULL)
700 return NULL;
701 for (i = 0; i < n; i++) {
702 item = PyTuple_GET_ITEM(tmp, i);
703 Py_INCREF(item);
704 PyTuple_SET_ITEM(newobj, i, item);
705 }
706 Py_DECREF(tmp);
707 return newobj;
708 }
709
710 static PySequenceMethods tuple_as_sequence = {
711 (lenfunc)tuplelength, /* sq_length */
712 (binaryfunc)tupleconcat, /* sq_concat */
713 (ssizeargfunc)tuplerepeat, /* sq_repeat */
714 (ssizeargfunc)tupleitem, /* sq_item */
715 0, /* sq_slice */
716 0, /* sq_ass_item */
717 0, /* sq_ass_slice */
718 (objobjproc)tuplecontains, /* sq_contains */
719 };
720
721 static PyObject*
tuplesubscript(PyTupleObject * self,PyObject * item)722 tuplesubscript(PyTupleObject* self, PyObject* item)
723 {
724 if (PyIndex_Check(item)) {
725 Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
726 if (i == -1 && PyErr_Occurred())
727 return NULL;
728 if (i < 0)
729 i += PyTuple_GET_SIZE(self);
730 return tupleitem(self, i);
731 }
732 else if (PySlice_Check(item)) {
733 Py_ssize_t start, stop, step, slicelength, cur, i;
734 PyObject* result;
735 PyObject* it;
736 PyObject **src, **dest;
737
738 if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
739 return NULL;
740 }
741 slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
742 &stop, step);
743
744 if (slicelength <= 0) {
745 return PyTuple_New(0);
746 }
747 else if (start == 0 && step == 1 &&
748 slicelength == PyTuple_GET_SIZE(self) &&
749 PyTuple_CheckExact(self)) {
750 Py_INCREF(self);
751 return (PyObject *)self;
752 }
753 else {
754 result = PyTuple_New(slicelength);
755 if (!result) return NULL;
756
757 src = self->ob_item;
758 dest = ((PyTupleObject *)result)->ob_item;
759 for (cur = start, i = 0; i < slicelength;
760 cur += step, i++) {
761 it = src[cur];
762 Py_INCREF(it);
763 dest[i] = it;
764 }
765
766 return result;
767 }
768 }
769 else {
770 PyErr_Format(PyExc_TypeError,
771 "tuple indices must be integers or slices, not %.200s",
772 Py_TYPE(item)->tp_name);
773 return NULL;
774 }
775 }
776
777 /*[clinic input]
778 tuple.__getnewargs__
779 [clinic start generated code]*/
780
781 static PyObject *
tuple___getnewargs___impl(PyTupleObject * self)782 tuple___getnewargs___impl(PyTupleObject *self)
783 /*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
784 {
785 return Py_BuildValue("(N)", tupleslice(self, 0, Py_SIZE(self)));
786 }
787
788 static PyMethodDef tuple_methods[] = {
789 TUPLE___GETNEWARGS___METHODDEF
790 TUPLE_INDEX_METHODDEF
791 TUPLE_COUNT_METHODDEF
792 {NULL, NULL} /* sentinel */
793 };
794
795 static PyMappingMethods tuple_as_mapping = {
796 (lenfunc)tuplelength,
797 (binaryfunc)tuplesubscript,
798 0
799 };
800
801 static PyObject *tuple_iter(PyObject *seq);
802
803 PyTypeObject PyTuple_Type = {
804 PyVarObject_HEAD_INIT(&PyType_Type, 0)
805 "tuple",
806 sizeof(PyTupleObject) - sizeof(PyObject *),
807 sizeof(PyObject *),
808 (destructor)tupledealloc, /* tp_dealloc */
809 0, /* tp_print */
810 0, /* tp_getattr */
811 0, /* tp_setattr */
812 0, /* tp_reserved */
813 (reprfunc)tuplerepr, /* tp_repr */
814 0, /* tp_as_number */
815 &tuple_as_sequence, /* tp_as_sequence */
816 &tuple_as_mapping, /* tp_as_mapping */
817 (hashfunc)tuplehash, /* tp_hash */
818 0, /* tp_call */
819 0, /* tp_str */
820 PyObject_GenericGetAttr, /* tp_getattro */
821 0, /* tp_setattro */
822 0, /* tp_as_buffer */
823 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
824 Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS, /* tp_flags */
825 tuple_new__doc__, /* tp_doc */
826 (traverseproc)tupletraverse, /* tp_traverse */
827 0, /* tp_clear */
828 tuplerichcompare, /* tp_richcompare */
829 0, /* tp_weaklistoffset */
830 tuple_iter, /* tp_iter */
831 0, /* tp_iternext */
832 tuple_methods, /* tp_methods */
833 0, /* tp_members */
834 0, /* tp_getset */
835 0, /* tp_base */
836 0, /* tp_dict */
837 0, /* tp_descr_get */
838 0, /* tp_descr_set */
839 0, /* tp_dictoffset */
840 0, /* tp_init */
841 0, /* tp_alloc */
842 tuple_new, /* tp_new */
843 PyObject_GC_Del, /* tp_free */
844 };
845
846 /* The following function breaks the notion that tuples are immutable:
847 it changes the size of a tuple. We get away with this only if there
848 is only one module referencing the object. You can also think of it
849 as creating a new tuple object and destroying the old one, only more
850 efficiently. In any case, don't use this if the tuple may already be
851 known to some other part of the code. */
852
853 int
_PyTuple_Resize(PyObject ** pv,Py_ssize_t newsize)854 _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
855 {
856 PyTupleObject *v;
857 PyTupleObject *sv;
858 Py_ssize_t i;
859 Py_ssize_t oldsize;
860
861 v = (PyTupleObject *) *pv;
862 if (v == NULL || Py_TYPE(v) != &PyTuple_Type ||
863 (Py_SIZE(v) != 0 && Py_REFCNT(v) != 1)) {
864 *pv = 0;
865 Py_XDECREF(v);
866 PyErr_BadInternalCall();
867 return -1;
868 }
869 oldsize = Py_SIZE(v);
870 if (oldsize == newsize)
871 return 0;
872
873 if (oldsize == 0) {
874 /* Empty tuples are often shared, so we should never
875 resize them in-place even if we do own the only
876 (current) reference */
877 Py_DECREF(v);
878 *pv = PyTuple_New(newsize);
879 return *pv == NULL ? -1 : 0;
880 }
881
882 /* XXX UNREF/NEWREF interface should be more symmetrical */
883 _Py_DEC_REFTOTAL;
884 if (_PyObject_GC_IS_TRACKED(v))
885 _PyObject_GC_UNTRACK(v);
886 _Py_ForgetReference((PyObject *) v);
887 /* DECREF items deleted by shrinkage */
888 for (i = newsize; i < oldsize; i++) {
889 Py_CLEAR(v->ob_item[i]);
890 }
891 sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
892 if (sv == NULL) {
893 *pv = NULL;
894 PyObject_GC_Del(v);
895 return -1;
896 }
897 _Py_NewReference((PyObject *) sv);
898 /* Zero out items added by growing */
899 if (newsize > oldsize)
900 memset(&sv->ob_item[oldsize], 0,
901 sizeof(*sv->ob_item) * (newsize - oldsize));
902 *pv = (PyObject *) sv;
903 _PyObject_GC_TRACK(sv);
904 return 0;
905 }
906
907 int
PyTuple_ClearFreeList(void)908 PyTuple_ClearFreeList(void)
909 {
910 int freelist_size = 0;
911 #if PyTuple_MAXSAVESIZE > 0
912 int i;
913 for (i = 1; i < PyTuple_MAXSAVESIZE; i++) {
914 PyTupleObject *p, *q;
915 p = free_list[i];
916 freelist_size += numfree[i];
917 free_list[i] = NULL;
918 numfree[i] = 0;
919 while (p) {
920 q = p;
921 p = (PyTupleObject *)(p->ob_item[0]);
922 PyObject_GC_Del(q);
923 }
924 }
925 #endif
926 return freelist_size;
927 }
928
929 void
PyTuple_Fini(void)930 PyTuple_Fini(void)
931 {
932 #if PyTuple_MAXSAVESIZE > 0
933 /* empty tuples are used all over the place and applications may
934 * rely on the fact that an empty tuple is a singleton. */
935 Py_CLEAR(free_list[0]);
936
937 (void)PyTuple_ClearFreeList();
938 #endif
939 #ifdef SHOW_TRACK_COUNT
940 show_track();
941 #endif
942 }
943
944 /*********************** Tuple Iterator **************************/
945
946 typedef struct {
947 PyObject_HEAD
948 Py_ssize_t it_index;
949 PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
950 } tupleiterobject;
951
952 static void
tupleiter_dealloc(tupleiterobject * it)953 tupleiter_dealloc(tupleiterobject *it)
954 {
955 _PyObject_GC_UNTRACK(it);
956 Py_XDECREF(it->it_seq);
957 PyObject_GC_Del(it);
958 }
959
960 static int
tupleiter_traverse(tupleiterobject * it,visitproc visit,void * arg)961 tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg)
962 {
963 Py_VISIT(it->it_seq);
964 return 0;
965 }
966
967 static PyObject *
tupleiter_next(tupleiterobject * it)968 tupleiter_next(tupleiterobject *it)
969 {
970 PyTupleObject *seq;
971 PyObject *item;
972
973 assert(it != NULL);
974 seq = it->it_seq;
975 if (seq == NULL)
976 return NULL;
977 assert(PyTuple_Check(seq));
978
979 if (it->it_index < PyTuple_GET_SIZE(seq)) {
980 item = PyTuple_GET_ITEM(seq, it->it_index);
981 ++it->it_index;
982 Py_INCREF(item);
983 return item;
984 }
985
986 it->it_seq = NULL;
987 Py_DECREF(seq);
988 return NULL;
989 }
990
991 static PyObject *
tupleiter_len(tupleiterobject * it)992 tupleiter_len(tupleiterobject *it)
993 {
994 Py_ssize_t len = 0;
995 if (it->it_seq)
996 len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
997 return PyLong_FromSsize_t(len);
998 }
999
1000 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1001
1002 static PyObject *
tupleiter_reduce(tupleiterobject * it)1003 tupleiter_reduce(tupleiterobject *it)
1004 {
1005 if (it->it_seq)
1006 return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
1007 it->it_seq, it->it_index);
1008 else
1009 return Py_BuildValue("N(())", _PyObject_GetBuiltin("iter"));
1010 }
1011
1012 static PyObject *
tupleiter_setstate(tupleiterobject * it,PyObject * state)1013 tupleiter_setstate(tupleiterobject *it, PyObject *state)
1014 {
1015 Py_ssize_t index = PyLong_AsSsize_t(state);
1016 if (index == -1 && PyErr_Occurred())
1017 return NULL;
1018 if (it->it_seq != NULL) {
1019 if (index < 0)
1020 index = 0;
1021 else if (index > PyTuple_GET_SIZE(it->it_seq))
1022 index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1023 it->it_index = index;
1024 }
1025 Py_RETURN_NONE;
1026 }
1027
1028 PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1029 PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1030
1031 static PyMethodDef tupleiter_methods[] = {
1032 {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
1033 {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
1034 {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
1035 {NULL, NULL} /* sentinel */
1036 };
1037
1038 PyTypeObject PyTupleIter_Type = {
1039 PyVarObject_HEAD_INIT(&PyType_Type, 0)
1040 "tuple_iterator", /* tp_name */
1041 sizeof(tupleiterobject), /* tp_basicsize */
1042 0, /* tp_itemsize */
1043 /* methods */
1044 (destructor)tupleiter_dealloc, /* tp_dealloc */
1045 0, /* tp_print */
1046 0, /* tp_getattr */
1047 0, /* tp_setattr */
1048 0, /* tp_reserved */
1049 0, /* tp_repr */
1050 0, /* tp_as_number */
1051 0, /* tp_as_sequence */
1052 0, /* tp_as_mapping */
1053 0, /* tp_hash */
1054 0, /* tp_call */
1055 0, /* tp_str */
1056 PyObject_GenericGetAttr, /* tp_getattro */
1057 0, /* tp_setattro */
1058 0, /* tp_as_buffer */
1059 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1060 0, /* tp_doc */
1061 (traverseproc)tupleiter_traverse, /* tp_traverse */
1062 0, /* tp_clear */
1063 0, /* tp_richcompare */
1064 0, /* tp_weaklistoffset */
1065 PyObject_SelfIter, /* tp_iter */
1066 (iternextfunc)tupleiter_next, /* tp_iternext */
1067 tupleiter_methods, /* tp_methods */
1068 0,
1069 };
1070
1071 static PyObject *
tuple_iter(PyObject * seq)1072 tuple_iter(PyObject *seq)
1073 {
1074 tupleiterobject *it;
1075
1076 if (!PyTuple_Check(seq)) {
1077 PyErr_BadInternalCall();
1078 return NULL;
1079 }
1080 it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
1081 if (it == NULL)
1082 return NULL;
1083 it->it_index = 0;
1084 Py_INCREF(seq);
1085 it->it_seq = (PyTupleObject *)seq;
1086 _PyObject_GC_TRACK(it);
1087 return (PyObject *)it;
1088 }
1089