Coverage Report

Created: 2022-07-08 09:39

/home/mdboom/Work/builds/cpython/Objects/tupleobject.c
Line
Count
Source (jump to first uncovered line)
1
2
/* Tuple object implementation */
3
4
#include "Python.h"
5
#include "pycore_abstract.h"      // _PyIndex_Check()
6
#include "pycore_gc.h"            // _PyObject_GC_IS_TRACKED()
7
#include "pycore_initconfig.h"    // _PyStatus_OK()
8
#include "pycore_object.h"        // _PyObject_GC_TRACK(), _Py_FatalRefcountError()
9
10
/*[clinic input]
11
class tuple "PyTupleObject *" "&PyTuple_Type"
12
[clinic start generated code]*/
13
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
14
15
#include "clinic/tupleobject.c.h"
16
17
18
static inline PyTupleObject * maybe_freelist_pop(Py_ssize_t);
19
static inline int maybe_freelist_push(PyTupleObject *);
20
21
22
/* Allocate an uninitialized tuple object. Before making it public, following
23
   steps must be done:
24
25
   - Initialize its items.
26
   - Call _PyObject_GC_TRACK() on it.
27
28
   Because the empty tuple is always reused and it's already tracked by GC,
29
   this function must not be called with size == 0 (unless from PyTuple_New()
30
   which wraps this function).
31
*/
32
static PyTupleObject *
33
tuple_alloc(Py_ssize_t size)
34
{
35
    if (size < 0) {
  Branch (35:9): [True: 0, False: 120M]
36
        PyErr_BadInternalCall();
37
        return NULL;
38
    }
39
#ifdef Py_DEBUG
40
    assert(size != 0);    // The empty tuple is statically allocated.
41
#endif
42
43
    PyTupleObject *op = maybe_freelist_pop(size);
44
    if (op == NULL) {
  Branch (44:9): [True: 4.37M, False: 115M]
45
        /* Check for overflow */
46
        if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - (sizeof(PyTupleObject) -
  Branch (46:13): [True: 0, False: 4.37M]
47
                    sizeof(PyObject *))) / sizeof(PyObject *)) {
48
            return (PyTupleObject *)PyErr_NoMemory();
49
        }
50
        op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
51
        if (op == NULL)
  Branch (51:13): [True: 0, False: 4.37M]
52
            return NULL;
53
    }
54
    return op;
55
}
56
57
// The empty tuple singleton is not tracked by the GC.
58
// It does not contain any Python object.
59
// Note that tuple subclasses have their own empty instances.
60
61
static inline PyObject *
62
tuple_get_empty(void)
63
{
64
    Py_INCREF(&_Py_SINGLETON(tuple_empty));
65
    return (PyObject *)&_Py_SINGLETON(tuple_empty);
66
}
67
68
PyObject *
69
PyTuple_New(Py_ssize_t size)
70
{
71
    PyTupleObject *op;
72
    if (size == 0) {
  Branch (72:9): [True: 2.45M, False: 53.3M]
73
        return tuple_get_empty();
74
    }
75
    op = tuple_alloc(size);
76
    if (op == NULL) {
  Branch (76:9): [True: 0, False: 53.3M]
77
        return NULL;
78
    }
79
    
for (Py_ssize_t i = 0; 53.3M
i < size;
i++140M
) {
  Branch (79:28): [True: 140M, False: 53.3M]
80
        op->ob_item[i] = NULL;
81
    }
82
    _PyObject_GC_TRACK(op);
83
    return (PyObject *) op;
84
}
85
86
Py_ssize_t
87
PyTuple_Size(PyObject *op)
88
{
89
    if (!PyTuple_Check(op)) {
  Branch (89:9): [True: 0, False: 7.71M]
90
        PyErr_BadInternalCall();
91
        return -1;
92
    }
93
    else
94
        return Py_SIZE(op);
95
}
96
97
PyObject *
98
PyTuple_GetItem(PyObject *op, Py_ssize_t i)
99
{
100
    if (!PyTuple_Check(op)) {
  Branch (100:9): [True: 0, False: 16.2M]
101
        PyErr_BadInternalCall();
102
        return NULL;
103
    }
104
    if (i < 0 || 
i >= 16.2M
Py_SIZE16.2M
(op)) {
  Branch (104:9): [True: 1, False: 16.2M]
  Branch (104:18): [True: 1, False: 16.2M]
105
        PyErr_SetString(PyExc_IndexError, "tuple index out of range");
106
        return NULL;
107
    }
108
    return ((PyTupleObject *)op) -> ob_item[i];
109
}
110
111
int
112
PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
113
{
114
    PyObject **p;
115
    if (!PyTuple_Check(op) || Py_REFCNT(op) != 1) {
  Branch (115:9): [True: 0, False: 86]
  Branch (115:31): [True: 0, False: 86]
116
        Py_XDECREF(newitem);
117
        PyErr_BadInternalCall();
118
        return -1;
119
    }
120
    if (i < 0 || i >= Py_SIZE(op)) {
  Branch (120:9): [True: 0, False: 86]
  Branch (120:18): [True: 0, False: 86]
121
        Py_XDECREF(newitem);
122
        PyErr_SetString(PyExc_IndexError,
123
                        "tuple assignment index out of range");
124
        return -1;
125
    }
126
    p = ((PyTupleObject *)op) -> ob_item + i;
127
    Py_XSETREF(*p, newitem);
128
    return 0;
129
}
130
131
void
132
_PyTuple_MaybeUntrack(PyObject *op)
133
{
134
    PyTupleObject *t;
135
    Py_ssize_t i, n;
136
137
    if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
  Branch (137:9): [True: 0, False: 259M]
  Branch (137:36): [True: 0, False: 259M]
138
        return;
139
    t = (PyTupleObject *) op;
140
    n = Py_SIZE(t);
141
    for (i = 0; i < n; 
i++55.0M
) {
  Branch (141:17): [True: 310M, False: 3.61M]
142
        PyObject *elt = PyTuple_GET_ITEM(t, i);
143
        /* Tuple with NULL elements aren't
144
           fully constructed, don't untrack
145
           them yet. */
146
        if (!elt ||
  Branch (146:13): [True: 2.10k, False: 310M]
147
            
_PyObject_GC_MAY_BE_TRACKED(elt)310M
)
  Branch (147:13): [True: 255M, False: 55.0M]
148
            return;
149
    }
150
    _PyObject_GC_UNTRACK(op);
151
}
152
153
PyObject *
154
PyTuple_Pack(Py_ssize_t n, ...)
155
{
156
    Py_ssize_t i;
157
    PyObject *o;
158
    PyObject **items;
159
    va_list vargs;
160
161
    if (n == 0) {
  Branch (161:9): [True: 0, False: 5.70M]
162
        return tuple_get_empty();
163
    }
164
165
    va_start(vargs, n);
166
    PyTupleObject *result = tuple_alloc(n);
167
    if (result == NULL) {
  Branch (167:9): [True: 0, False: 5.70M]
168
        va_end(vargs);
169
        return NULL;
170
    }
171
    items = result->ob_item;
172
    for (i = 0; i < n; 
i++8.12M
) {
  Branch (172:17): [True: 8.12M, False: 5.70M]
173
        o = va_arg(vargs, PyObject *);
174
        Py_INCREF(o);
175
        items[i] = o;
176
    }
177
    va_end(vargs);
178
    _PyObject_GC_TRACK(result);
179
    return (PyObject *)result;
180
}
181
182
183
/* Methods */
184
185
static void
186
tupledealloc(PyTupleObject *op)
187
{
188
    if (Py_SIZE(op) == 0) {
  Branch (188:9): [True: 34, False: 121M]
189
        /* The empty tuple is statically allocated. */
190
        if (op == &_Py_SINGLETON(tuple_empty)) {
  Branch (190:13): [True: 0, False: 34]
191
#ifdef Py_DEBUG
192
            _Py_FatalRefcountError("deallocating the empty tuple singleton");
193
#else
194
            return;
195
#endif
196
        }
197
#ifdef Py_DEBUG
198
        /* tuple subclasses have their own empty instances. */
199
        assert(!PyTuple_CheckExact(op));
200
#endif
201
    }
202
203
    PyObject_GC_UnTrack(op);
204
    Py_TRASHCAN_BEGIN(op, tupledealloc)
205
206
    Py_ssize_t i = Py_SIZE(op);
207
    while (--i >= 0) {
  Branch (207:12): [True: 274M, False: 121M]
208
        Py_XDECREF(op->ob_item[i]);
209
    }
210
    // This will abort on the empty singleton (if there is one).
211
    if (!maybe_freelist_push(op)) {
  Branch (211:9): [True: 4.41M, False: 116M]
212
        Py_TYPE(op)->tp_free((PyObject *)op);
213
    }
214
215
    Py_TRASHCAN_END
216
}
217
218
static PyObject *
219
tuplerepr(PyTupleObject *v)
220
{
221
    Py_ssize_t i, n;
222
    _PyUnicodeWriter writer;
223
224
    n = Py_SIZE(v);
225
    if (n == 0)
  Branch (225:9): [True: 452, False: 11.3k]
226
        return PyUnicode_FromString("()");
227
228
    /* While not mutable, it is still possible to end up with a cycle in a
229
       tuple through an object that stores itself within a tuple (and thus
230
       infinitely asks for the repr of itself). This should only be
231
       possible within a type. */
232
    i = Py_ReprEnter((PyObject *)v);
233
    if (i != 0) {
  Branch (233:9): [True: 0, False: 11.3k]
234
        return i > 0 ? PyUnicode_FromString("(...)") : NULL;
  Branch (234:16): [True: 0, False: 0]
235
    }
236
237
    _PyUnicodeWriter_Init(&writer);
238
    writer.overallocate = 1;
239
    if (Py_SIZE(v) > 1) {
  Branch (239:9): [True: 10.9k, False: 364]
240
        /* "(" + "1" + ", 2" * (len - 1) + ")" */
241
        writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1;
242
    }
243
    else {
244
        /* "(1,)" */
245
        writer.min_length = 4;
246
    }
247
248
    if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0)
  Branch (248:9): [True: 0, False: 11.3k]
249
        goto error;
250
251
    /* Do repr() on each element. */
252
    
for (i = 0; 11.3k
i < n;
++i1.08M
) {
  Branch (252:17): [True: 1.08M, False: 11.3k]
253
        PyObject *s;
254
255
        if (i > 0) {
  Branch (255:13): [True: 1.06M, False: 11.3k]
256
            if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0)
  Branch (256:17): [True: 0, False: 1.06M]
257
                goto error;
258
        }
259
260
        s = PyObject_Repr(v->ob_item[i]);
261
        if (s == NULL)
  Branch (261:13): [True: 0, False: 1.08M]
262
            goto error;
263
264
        if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
  Branch (264:13): [True: 0, False: 1.08M]
265
            Py_DECREF(s);
266
            goto error;
267
        }
268
        Py_DECREF(s);
269
    }
270
271
    writer.overallocate = 0;
272
    if (n > 1) {
  Branch (272:9): [True: 10.9k, False: 364]
273
        if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0)
  Branch (273:13): [True: 0, False: 10.9k]
274
            goto error;
275
    }
276
    else {
277
        if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0)
  Branch (277:13): [True: 0, False: 364]
278
            goto error;
279
    }
280
281
    Py_ReprLeave((PyObject *)v);
282
    return _PyUnicodeWriter_Finish(&writer);
283
284
error:
285
    _PyUnicodeWriter_Dealloc(&writer);
286
    Py_ReprLeave((PyObject *)v);
287
    return NULL;
288
}
289
290
291
/* Hash for tuples. This is a slightly simplified version of the xxHash
292
   non-cryptographic hash:
293
   - we do not use any parallellism, there is only 1 accumulator.
294
   - we drop the final mixing since this is just a permutation of the
295
     output space: it does not help against collisions.
296
   - at the end, we mangle the length with a single constant.
297
   For the xxHash specification, see
298
   https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
299
300
   Below are the official constants from the xxHash specification. Optimizing
301
   compilers should emit a single "rotate" instruction for the
302
   _PyHASH_XXROTATE() expansion. If that doesn't happen for some important
303
   platform, the macro could be changed to expand to a platform-specific rotate
304
   spelling instead.
305
*/
306
#if SIZEOF_PY_UHASH_T > 4
307
#define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL)
308
#define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL)
309
#define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL)
310
#define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33))  /* Rotate left 31 bits */
311
#else
312
#define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL)
313
#define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL)
314
#define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL)
315
#define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19))  /* Rotate left 13 bits */
316
#endif
317
318
/* Tests have shown that it's not worth to cache the hash value, see
319
   https://bugs.python.org/issue9685 */
320
static Py_hash_t
321
tuplehash(PyTupleObject *v)
322
{
323
    Py_ssize_t i, len = Py_SIZE(v);
324
    PyObject **item = v->ob_item;
325
326
    Py_uhash_t acc = _PyHASH_XXPRIME_5;
327
    for (i = 0; i < len; 
i++19.0M
) {
  Branch (327:17): [True: 19.0M, False: 7.72M]
328
        Py_uhash_t lane = PyObject_Hash(item[i]);
329
        if (lane == (Py_uhash_t)-1) {
  Branch (329:13): [True: 60, False: 19.0M]
330
            return -1;
331
        }
332
        acc += lane * _PyHASH_XXPRIME_2;
333
        acc = _PyHASH_XXROTATE(acc);
334
        acc *= _PyHASH_XXPRIME_1;
335
    }
336
337
    /* Add input length, mangled to keep the historical value of hash(()). */
338
    acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL);
339
340
    if (acc == (Py_uhash_t)-1) {
  Branch (340:9): [True: 0, False: 7.72M]
341
        return 1546275796;
342
    }
343
    return acc;
344
}
345
346
static Py_ssize_t
347
tuplelength(PyTupleObject *a)
348
{
349
    return Py_SIZE(a);
350
}
351
352
static int
353
tuplecontains(PyTupleObject *a, PyObject *el)
354
{
355
    Py_ssize_t i;
356
    int cmp;
357
358
    for (i = 0, cmp = 0 ; cmp == 0 && 
i < 6.79M
Py_SIZE6.79M
(a);
++i5.72M
)
  Branch (358:27): [True: 6.79M, False: 848k]
  Branch (358:39): [True: 5.72M, False: 1.06M]
359
        cmp = PyObject_RichCompareBool(PyTuple_GET_ITEM(a, i), el, Py_EQ);
360
    return cmp;
361
}
362
363
static PyObject *
364
tupleitem(PyTupleObject *a, Py_ssize_t i)
365
{
366
    if (i < 0 || 
i >= 2.43M
Py_SIZE2.43M
(a)) {
  Branch (366:9): [True: 6, False: 2.43M]
  Branch (366:18): [True: 213, False: 2.43M]
367
        PyErr_SetString(PyExc_IndexError, "tuple index out of range");
368
        return NULL;
369
    }
370
    Py_INCREF(a->ob_item[i]);
371
    return a->ob_item[i];
372
}
373
374
PyObject *
375
_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n)
376
{
377
    if (n == 0) {
  Branch (377:9): [True: 14.2M, False: 50.9M]
378
        return tuple_get_empty();
379
    }
380
381
    PyTupleObject *tuple = tuple_alloc(n);
382
    if (tuple == NULL) {
  Branch (382:9): [True: 0, False: 50.9M]
383
        return NULL;
384
    }
385
    PyObject **dst = tuple->ob_item;
386
    for (Py_ssize_t i = 0; i < n; 
i++98.2M
) {
  Branch (386:28): [True: 98.2M, False: 50.9M]
387
        PyObject *item = src[i];
388
        Py_INCREF(item);
389
        dst[i] = item;
390
    }
391
    _PyObject_GC_TRACK(tuple);
392
    return (PyObject *)tuple;
393
}
394
395
PyObject *
396
_PyTuple_FromArraySteal(PyObject *const *src, Py_ssize_t n)
397
{
398
    if (n == 0) {
  Branch (398:9): [True: 2.06M, False: 9.64M]
399
        return tuple_get_empty();
400
    }
401
    PyTupleObject *tuple = tuple_alloc(n);
402
    if (tuple == NULL) {
  Branch (402:9): [True: 0, False: 9.64M]
403
        for (Py_ssize_t i = 0; i < n; i++) {
  Branch (403:32): [True: 0, False: 0]
404
            Py_DECREF(src[i]);
405
        }
406
        return NULL;
407
    }
408
    PyObject **dst = tuple->ob_item;
409
    for (Py_ssize_t i = 0; i < n; 
i++21.1M
) {
  Branch (409:28): [True: 21.1M, False: 9.64M]
410
        PyObject *item = src[i];
411
        dst[i] = item;
412
    }
413
    _PyObject_GC_TRACK(tuple);
414
    return (PyObject *)tuple;
415
}
416
417
static PyObject *
418
tupleslice(PyTupleObject *a, Py_ssize_t ilow,
419
           Py_ssize_t ihigh)
420
{
421
    if (ilow < 0)
  Branch (421:9): [True: 0, False: 4.53M]
422
        ilow = 0;
423
    if (ihigh > Py_SIZE(a))
  Branch (423:9): [True: 28.6k, False: 4.50M]
424
        ihigh = Py_SIZE(a);
425
    if (ihigh < ilow)
  Branch (425:9): [True: 0, False: 4.53M]
426
        ihigh = ilow;
427
    if (ilow == 0 && 
ihigh == 150k
Py_SIZE150k
(a) &&
PyTuple_CheckExact146
(a)) {
  Branch (427:9): [True: 150k, False: 4.38M]
  Branch (427:22): [True: 146, False: 150k]
428
        Py_INCREF(a);
429
        return (PyObject *)a;
430
    }
431
    return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow);
432
}
433
434
PyObject *
435
PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
436
{
437
    if (op == NULL || !PyTuple_Check(op)) {
  Branch (437:9): [True: 0, False: 4.53M]
  Branch (437:23): [True: 0, False: 4.53M]
438
        PyErr_BadInternalCall();
439
        return NULL;
440
    }
441
    return tupleslice((PyTupleObject *)op, i, j);
442
}
443
444
static PyObject *
445
tupleconcat(PyTupleObject *a, PyObject *bb)
446
{
447
    Py_ssize_t size;
448
    Py_ssize_t i;
449
    PyObject **src, **dest;
450
    PyTupleObject *np;
451
    if (Py_SIZE(a) == 0 && 
PyTuple_CheckExact10.0k
(bb)) {
  Branch (451:9): [True: 10.0k, False: 49.5k]
452
        Py_INCREF(bb);
453
        return bb;
454
    }
455
    if (!PyTuple_Check(bb)) {
  Branch (455:9): [True: 1, False: 49.5k]
456
        PyErr_Format(PyExc_TypeError,
457
             "can only concatenate tuple (not \"%.200s\") to tuple",
458
                 Py_TYPE(bb)->tp_name);
459
        return NULL;
460
    }
461
    PyTupleObject *b = (PyTupleObject *)bb;
462
463
    if (Py_SIZE(b) == 0 && 
PyTuple_CheckExact10.2k
(a)) {
  Branch (463:9): [True: 10.2k, False: 39.2k]
464
        Py_INCREF(a);
465
        return (PyObject *)a;
466
    }
467
    assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX);
468
    size = Py_SIZE(a) + Py_SIZE(b);
469
    if (size == 0) {
  Branch (469:9): [True: 1, False: 39.2k]
470
        return tuple_get_empty();
471
    }
472
473
    np = tuple_alloc(size);
474
    if (np == NULL) {
  Branch (474:9): [True: 0, False: 39.2k]
475
        return NULL;
476
    }
477
    src = a->ob_item;
478
    dest = np->ob_item;
479
    for (i = 0; i < Py_SIZE(a); 
i++161k
) {
  Branch (479:17): [True: 161k, False: 39.2k]
480
        PyObject *v = src[i];
481
        Py_INCREF(v);
482
        dest[i] = v;
483
    }
484
    src = b->ob_item;
485
    dest = np->ob_item + Py_SIZE(a);
486
    for (i = 0; i < Py_SIZE(b); 
i++91.9k
) {
  Branch (486:17): [True: 91.9k, False: 39.2k]
487
        PyObject *v = src[i];
488
        Py_INCREF(v);
489
        dest[i] = v;
490
    }
491
    _PyObject_GC_TRACK(np);
492
    return (PyObject *)np;
493
}
494
495
static PyObject *
496
tuplerepeat(PyTupleObject *a, Py_ssize_t n)
497
{
498
    Py_ssize_t size;
499
    PyTupleObject *np;
500
    if (Py_SIZE(a) == 0 || 
n == 110.7k
) {
  Branch (500:9): [True: 48, False: 10.7k]
  Branch (500:28): [True: 7.12k, False: 3.67k]
501
        if (PyTuple_CheckExact(a)) {
502
            /* Since tuples are immutable, we can return a shared
503
               copy in this case */
504
            Py_INCREF(a);
505
            return (PyObject *)a;
506
        }
507
    }
508
    if (Py_SIZE(a) == 0 || 
n <= 03.67k
) {
  Branch (508:9): [True: 3, False: 3.67k]
  Branch (508:28): [True: 2.88k, False: 793]
509
        return tuple_get_empty();
510
    }
511
    if (n > PY_SSIZE_T_MAX / Py_SIZE(a))
  Branch (511:9): [True: 0, False: 793]
512
        return PyErr_NoMemory();
513
    size = Py_SIZE(a) * n;
514
    np = tuple_alloc(size);
515
    if (np == NULL)
  Branch (515:9): [True: 0, False: 793]
516
        return NULL;
517
    PyObject **dest = np->ob_item;
518
    PyObject **dest_end = dest + size;
519
    if (Py_SIZE(a) == 1) {
  Branch (519:9): [True: 728, False: 65]
520
        PyObject *elem = a->ob_item[0];
521
        Py_SET_REFCNT(elem, Py_REFCNT(elem) + n);
522
#ifdef Py_REF_DEBUG
523
        _Py_RefTotal += n;
524
#endif
525
        while (dest < dest_end) {
  Branch (525:16): [True: 1.08M, False: 728]
526
            *dest++ = elem;
527
        }
528
    }
529
    else {
530
        PyObject **src = a->ob_item;
531
        PyObject **src_end = src + Py_SIZE(a);
532
        while (src < src_end) {
  Branch (532:16): [True: 15.6k, False: 65]
533
            Py_SET_REFCNT(*src, Py_REFCNT(*src) + n);
534
#ifdef Py_REF_DEBUG
535
            _Py_RefTotal += n;
536
#endif
537
            *dest++ = *src++;
538
        }
539
        // Now src chases after dest in the same buffer
540
        src = np->ob_item;
541
        while (dest < dest_end) {
  Branch (541:16): [True: 41.6k, False: 65]
542
            *dest++ = *src++;
543
        }
544
    }
545
    _PyObject_GC_TRACK(np);
546
    return (PyObject *) np;
547
}
548
549
/*[clinic input]
550
tuple.index
551
552
    value: object
553
    start: slice_index(accept={int}) = 0
554
    stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
555
    /
556
557
Return first index of value.
558
559
Raises ValueError if the value is not present.
560
[clinic start generated code]*/
561
562
static PyObject *
563
tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
564
                 Py_ssize_t stop)
565
/*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
566
{
567
    Py_ssize_t i;
568
569
    if (start < 0) {
  Branch (569:9): [True: 6, False: 791]
570
        start += Py_SIZE(self);
571
        if (start < 0)
  Branch (571:13): [True: 3, False: 3]
572
            start = 0;
573
    }
574
    if (stop < 0) {
  Branch (574:9): [True: 4, False: 793]
575
        stop += Py_SIZE(self);
576
    }
577
    else if (stop > Py_SIZE(self)) {
  Branch (577:14): [True: 791, False: 2]
578
        stop = Py_SIZE(self);
579
    }
580
    for (i = start; i < stop; 
i++400
) {
  Branch (580:21): [True: 1.19k, False: 6]
581
        int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
582
        if (cmp > 0)
  Branch (582:13): [True: 790, False: 401]
583
            return PyLong_FromSsize_t(i);
584
        else if (cmp < 0)
  Branch (584:18): [True: 1, False: 400]
585
            return NULL;
586
    }
587
    PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
588
    return NULL;
589
}
590
591
/*[clinic input]
592
tuple.count
593
594
     value: object
595
     /
596
597
Return number of occurrences of value.
598
[clinic start generated code]*/
599
600
static PyObject *
601
tuple_count(PyTupleObject *self, PyObject *value)
602
/*[clinic end generated code: output=aa927affc5a97605 input=531721aff65bd772]*/
603
{
604
    Py_ssize_t count = 0;
605
    Py_ssize_t i;
606
607
    for (i = 0; i < Py_SIZE(self); 
i++602
) {
  Branch (607:17): [True: 603, False: 182]
608
        int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
609
        if (cmp > 0)
  Branch (609:13): [True: 341, False: 262]
610
            count++;
611
        else if (cmp < 0)
  Branch (611:18): [True: 1, False: 261]
612
            return NULL;
613
    }
614
    return PyLong_FromSsize_t(count);
615
}
616
617
static int
618
tupletraverse(PyTupleObject *o, visitproc visit, void *arg)
619
{
620
    Py_ssize_t i;
621
622
    for (i = 
Py_SIZE530M
(o); --i >= 0; )
  Branch (622:26): [True: 1.33G, False: 530M]
623
        Py_VISIT(o->ob_item[i]);
624
    return 0;
625
}
626
627
static PyObject *
628
tuplerichcompare(PyObject *v, PyObject *w, int op)
629
{
630
    PyTupleObject *vt, *wt;
631
    Py_ssize_t i;
632
    Py_ssize_t vlen, wlen;
633
634
    if (!PyTuple_Check(v) || !PyTuple_Check(w))
  Branch (634:9): [True: 0, False: 8.09M]
  Branch (634:30): [True: 38.0k, False: 8.05M]
635
        Py_RETURN_NOTIMPLEMENTED;
636
637
    vt = (PyTupleObject *)v;
638
    wt = (PyTupleObject *)w;
639
640
    vlen = Py_SIZE(vt);
641
    wlen = Py_SIZE(wt);
642
643
    /* Note:  the corresponding code for lists has an "early out" test
644
     * here when op is EQ or NE and the lengths differ.  That pays there,
645
     * but Tim was unable to find any real code where EQ/NE tuple
646
     * compares don't have the same length, so testing for it here would
647
     * have cost without benefit.
648
     */
649
650
    /* Search for the first index where items are different.
651
     * Note that because tuples are immutable, it's safe to reuse
652
     * vlen and wlen across the comparison calls.
653
     */
654
    for (i = 0; i < vlen && 
i < wlen14.0M
;
i++10.5M
) {
  Branch (654:17): [True: 14.0M, False: 4.57M]
  Branch (654:29): [True: 14.0M, False: 436]
655
        int k = PyObject_RichCompareBool(vt->ob_item[i],
656
                                         wt->ob_item[i], Py_EQ);
657
        if (k < 0)
  Branch (657:13): [True: 2.90k, False: 14.0M]
658
            return NULL;
659
        if (!k)
  Branch (659:13): [True: 3.47M, False: 10.5M]
660
            break;
661
    }
662
663
    if (i >= vlen || 
i >= wlen3.47M
) {
  Branch (663:9): [True: 4.57M, False: 3.47M]
  Branch (663:22): [True: 436, False: 3.47M]
664
        /* No more items to compare -- compare sizes */
665
        Py_RETURN_RICHCOMPARE(vlen, wlen, op);
666
    }
667
668
    /* We have an item that differs -- shortcuts for EQ/NE */
669
    if (op == Py_EQ) {
  Branch (669:9): [True: 2.70M, False: 766k]
670
        Py_RETURN_FALSE;
671
    }
672
    if (op == Py_NE) {
  Branch (672:9): [True: 91.8k, False: 674k]
673
        Py_RETURN_TRUE;
674
    }
675
676
    /* Compare the final item again using the proper operator */
677
    return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
678
}
679
680
static PyObject *
681
tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
682
683
/*[clinic input]
684
@classmethod
685
tuple.__new__ as tuple_new
686
    iterable: object(c_default="NULL") = ()
687
    /
688
689
Built-in immutable sequence.
690
691
If no argument is given, the constructor returns an empty tuple.
692
If iterable is specified the tuple is initialized from iterable's items.
693
694
If the argument is a tuple, the return value is the same object.
695
[clinic start generated code]*/
696
697
static PyObject *
698
tuple_new_impl(PyTypeObject *type, PyObject *iterable)
699
/*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
700
{
701
    if (type != &PyTuple_Type)
  Branch (701:9): [True: 1.01M, False: 1.04M]
702
        return tuple_subtype_new(type, iterable);
703
704
    if (iterable == NULL) {
  Branch (704:9): [True: 21, False: 1.04M]
705
        return tuple_get_empty();
706
    }
707
    else {
708
        return PySequence_Tuple(iterable);
709
    }
710
}
711
712
static PyObject *
713
tuple_vectorcall(PyObject *type, PyObject * const*args,
714
                 size_t nargsf, PyObject *kwnames)
715
{
716
    if (!_PyArg_NoKwnames("tuple", kwnames)) {
717
        return NULL;
718
    }
719
720
    Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
721
    if (!_PyArg_CheckPositional("tuple", nargs, 0, 1)) {
722
        return NULL;
723
    }
724
725
    if (nargs) {
  Branch (725:9): [True: 31.4k, False: 284]
726
        return tuple_new_impl(_PyType_CAST(type), args[0]);
727
    }
728
    else {
729
        return tuple_get_empty();
730
    }
731
}
732
733
static PyObject *
734
tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
735
{
736
    PyObject *tmp, *newobj, *item;
737
    Py_ssize_t i, n;
738
739
    assert(PyType_IsSubtype(type, &PyTuple_Type));
740
    // tuple subclasses must implement the GC protocol
741
    assert(_PyType_IS_GC(type));
742
743
    tmp = tuple_new_impl(&PyTuple_Type, iterable);
744
    if (tmp == NULL)
  Branch (744:9): [True: 0, False: 1.01M]
745
        return NULL;
746
    assert(PyTuple_Check(tmp));
747
    /* This may allocate an empty tuple that is not the global one. */
748
    newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
749
    if (newobj == NULL) {
  Branch (749:9): [True: 0, False: 1.01M]
750
        Py_DECREF(tmp);
751
        return NULL;
752
    }
753
    
for (i = 0; 1.01M
i < n;
i++4.95M
) {
  Branch (753:17): [True: 4.95M, False: 1.01M]
754
        item = PyTuple_GET_ITEM(tmp, i);
755
        Py_INCREF(item);
756
        PyTuple_SET_ITEM(newobj, i, item);
757
    }
758
    Py_DECREF(tmp);
759
760
    // Don't track if a subclass tp_alloc is PyType_GenericAlloc()
761
    if (!_PyObject_GC_IS_TRACKED(newobj)) {
  Branch (761:9): [True: 0, False: 1.01M]
762
        _PyObject_GC_TRACK(newobj);
763
    }
764
    return newobj;
765
}
766
767
static PySequenceMethods tuple_as_sequence = {
768
    (lenfunc)tuplelength,                       /* sq_length */
769
    (binaryfunc)tupleconcat,                    /* sq_concat */
770
    (ssizeargfunc)tuplerepeat,                  /* sq_repeat */
771
    (ssizeargfunc)tupleitem,                    /* sq_item */
772
    0,                                          /* sq_slice */
773
    0,                                          /* sq_ass_item */
774
    0,                                          /* sq_ass_slice */
775
    (objobjproc)tuplecontains,                  /* sq_contains */
776
};
777
778
static PyObject*
779
tuplesubscript(PyTupleObject* self, PyObject* item)
780
{
781
    if (_PyIndex_Check(item)) {
  Branch (781:9): [True: 1.24M, False: 935k]
782
        Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
783
        if (i == -1 && 
PyErr_Occurred()1.04M
)
  Branch (783:13): [True: 1.04M, False: 194k]
  Branch (783:24): [True: 2, False: 1.04M]
784
            return NULL;
785
        if (i < 0)
  Branch (785:13): [True: 1.04M, False: 193k]
786
            i += PyTuple_GET_SIZE(self);
787
        return tupleitem(self, i);
788
    }
789
    else if (PySlice_Check(item)) {
790
        Py_ssize_t start, stop, step, slicelength, i;
791
        size_t cur;
792
        PyObject* it;
793
        PyObject **src, **dest;
794
795
        if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
  Branch (795:13): [True: 4, False: 935k]
796
            return NULL;
797
        }
798
        slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
799
                                            &stop, step);
800
801
        if (slicelength <= 0) {
  Branch (801:13): [True: 362k, False: 573k]
802
            return tuple_get_empty();
803
        }
804
        else if (start == 0 && 
step == 1366k
&&
  Branch (804:18): [True: 366k, False: 206k]
  Branch (804:32): [True: 317k, False: 49.7k]
805
                 
slicelength == 317k
PyTuple_GET_SIZE317k
(self) &&
  Branch (805:18): [True: 77.0k, False: 239k]
806
                 
PyTuple_CheckExact77.0k
(self)) {
807
            Py_INCREF(self);
808
            return (PyObject *)self;
809
        }
810
        else {
811
            PyTupleObject* result = tuple_alloc(slicelength);
812
            if (!result) 
return NULL0
;
  Branch (812:17): [True: 0, False: 495k]
813
814
            src = self->ob_item;
815
            dest = result->ob_item;
816
            for (cur = start, i = 0; i < slicelength;
  Branch (816:38): [True: 1.09M, False: 495k]
817
                 cur += step, i++) {
818
                it = src[cur];
819
                Py_INCREF(it);
820
                dest[i] = it;
821
            }
822
823
            _PyObject_GC_TRACK(result);
824
            return (PyObject *)result;
825
        }
826
    }
827
    else {
828
        PyErr_Format(PyExc_TypeError,
829
                     "tuple indices must be integers or slices, not %.200s",
830
                     Py_TYPE(item)->tp_name);
831
        return NULL;
832
    }
833
}
834
835
/*[clinic input]
836
tuple.__getnewargs__
837
[clinic start generated code]*/
838
839
static PyObject *
840
tuple___getnewargs___impl(PyTupleObject *self)
841
/*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
842
{
843
    return Py_BuildValue("(N)", tupleslice(self, 0, Py_SIZE(self)));
844
}
845
846
static PyMethodDef tuple_methods[] = {
847
    TUPLE___GETNEWARGS___METHODDEF
848
    TUPLE_INDEX_METHODDEF
849
    TUPLE_COUNT_METHODDEF
850
    {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
851
    {NULL,              NULL}           /* sentinel */
852
};
853
854
static PyMappingMethods tuple_as_mapping = {
855
    (lenfunc)tuplelength,
856
    (binaryfunc)tuplesubscript,
857
    0
858
};
859
860
static PyObject *tuple_iter(PyObject *seq);
861
862
PyTypeObject PyTuple_Type = {
863
    PyVarObject_HEAD_INIT(&PyType_Type, 0)
864
    "tuple",
865
    sizeof(PyTupleObject) - sizeof(PyObject *),
866
    sizeof(PyObject *),
867
    (destructor)tupledealloc,                   /* tp_dealloc */
868
    0,                                          /* tp_vectorcall_offset */
869
    0,                                          /* tp_getattr */
870
    0,                                          /* tp_setattr */
871
    0,                                          /* tp_as_async */
872
    (reprfunc)tuplerepr,                        /* tp_repr */
873
    0,                                          /* tp_as_number */
874
    &tuple_as_sequence,                         /* tp_as_sequence */
875
    &tuple_as_mapping,                          /* tp_as_mapping */
876
    (hashfunc)tuplehash,                        /* tp_hash */
877
    0,                                          /* tp_call */
878
    0,                                          /* tp_str */
879
    PyObject_GenericGetAttr,                    /* tp_getattro */
880
    0,                                          /* tp_setattro */
881
    0,                                          /* tp_as_buffer */
882
    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
883
        Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS |
884
        _Py_TPFLAGS_MATCH_SELF | Py_TPFLAGS_SEQUENCE,  /* tp_flags */
885
    tuple_new__doc__,                           /* tp_doc */
886
    (traverseproc)tupletraverse,                /* tp_traverse */
887
    0,                                          /* tp_clear */
888
    tuplerichcompare,                           /* tp_richcompare */
889
    0,                                          /* tp_weaklistoffset */
890
    tuple_iter,                                 /* tp_iter */
891
    0,                                          /* tp_iternext */
892
    tuple_methods,                              /* tp_methods */
893
    0,                                          /* tp_members */
894
    0,                                          /* tp_getset */
895
    0,                                          /* tp_base */
896
    0,                                          /* tp_dict */
897
    0,                                          /* tp_descr_get */
898
    0,                                          /* tp_descr_set */
899
    0,                                          /* tp_dictoffset */
900
    0,                                          /* tp_init */
901
    0,                                          /* tp_alloc */
902
    tuple_new,                                  /* tp_new */
903
    PyObject_GC_Del,                            /* tp_free */
904
    .tp_vectorcall = tuple_vectorcall,
905
};
906
907
/* The following function breaks the notion that tuples are immutable:
908
   it changes the size of a tuple.  We get away with this only if there
909
   is only one module referencing the object.  You can also think of it
910
   as creating a new tuple object and destroying the old one, only more
911
   efficiently.  In any case, don't use this if the tuple may already be
912
   known to some other part of the code. */
913
914
int
915
_PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
916
{
917
    PyTupleObject *v;
918
    PyTupleObject *sv;
919
    Py_ssize_t i;
920
    Py_ssize_t oldsize;
921
922
    v = (PyTupleObject *) *pv;
923
    if (v == NULL || !Py_IS_TYPE(v, &PyTuple_Type) ||
  Branch (923:9): [True: 0, False: 141k]
  Branch (923:22): [True: 0, False: 141k]
924
        (Py_SIZE(v) != 0 && 
Py_REFCNT141k
(v) != 1141k
)) {
  Branch (924:10): [True: 141k, False: 1]
  Branch (924:29): [True: 0, False: 141k]
925
        *pv = 0;
926
        Py_XDECREF(v);
927
        PyErr_BadInternalCall();
928
        return -1;
929
    }
930
931
    oldsize = Py_SIZE(v);
932
    if (oldsize == newsize) {
  Branch (932:9): [True: 3.68k, False: 137k]
933
        return 0;
934
    }
935
    if (newsize == 0) {
  Branch (935:9): [True: 1.82k, False: 135k]
936
        Py_DECREF(v);
937
        *pv = tuple_get_empty();
938
        return 0;
939
    }
940
    if (oldsize == 0) {
  Branch (940:9): [True: 0, False: 135k]
941
#ifdef Py_DEBUG
942
        assert(v == &_Py_SINGLETON(tuple_empty));
943
#endif
944
        /* The empty tuple is statically allocated so we never
945
           resize it in-place. */
946
        Py_DECREF(v);
947
        *pv = PyTuple_New(newsize);
948
        return *pv == NULL ? -1 : 0;
  Branch (948:16): [True: 0, False: 0]
949
    }
950
951
    /* XXX UNREF/NEWREF interface should be more symmetrical */
952
#ifdef Py_REF_DEBUG
953
    _Py_RefTotal--;
954
#endif
955
    if (_PyObject_GC_IS_TRACKED(v)) {
956
        _PyObject_GC_UNTRACK(v);
957
    }
958
#ifdef Py_TRACE_REFS
959
    _Py_ForgetReference((PyObject *) v);
960
#endif
961
    /* DECREF items deleted by shrinkage */
962
    for (i = newsize; i < oldsize; 
i++1.03M
) {
  Branch (962:23): [True: 1.03M, False: 135k]
963
        Py_CLEAR(v->ob_item[i]);
964
    }
965
    sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
966
    if (sv == NULL) {
  Branch (966:9): [True: 0, False: 135k]
967
        *pv = NULL;
968
        PyObject_GC_Del(v);
969
        return -1;
970
    }
971
    _Py_NewReference((PyObject *) sv);
972
    /* Zero out items added by growing */
973
    if (newsize > oldsize)
  Branch (973:9): [True: 6.19k, False: 129k]
974
        memset(&sv->ob_item[oldsize], 0,
975
               sizeof(*sv->ob_item) * (newsize - oldsize));
976
    *pv = (PyObject *) sv;
977
    _PyObject_GC_TRACK(sv);
978
    return 0;
979
}
980
981
982
PyStatus
983
_PyTuple_InitTypes(PyInterpreterState *interp)
984
{
985
    if (!_Py_IsMainInterpreter(interp)) {
  Branch (985:9): [True: 171, False: 107]
986
        return _PyStatus_OK();
987
    }
988
989
    if (PyType_Ready(&PyTuple_Type) < 0) {
  Branch (989:9): [True: 0, False: 107]
990
        return _PyStatus_ERR("Can't initialize tuple type");
991
    }
992
993
    if (PyType_Ready(&PyTupleIter_Type) < 0) {
  Branch (993:9): [True: 0, False: 107]
994
        return _PyStatus_ERR("Can't initialize tuple iterator type");
995
    }
996
997
    return _PyStatus_OK();
998
}
999
1000
static void maybe_freelist_clear(PyInterpreterState *, int);
1001
1002
void
1003
_PyTuple_Fini(PyInterpreterState *interp)
1004
{
1005
    maybe_freelist_clear(interp, 1);
1006
}
1007
1008
void
1009
_PyTuple_ClearFreeList(PyInterpreterState *interp)
1010
{
1011
    maybe_freelist_clear(interp, 0);
1012
}
1013
1014
/*********************** Tuple Iterator **************************/
1015
1016
typedef struct {
1017
    PyObject_HEAD
1018
    Py_ssize_t it_index;
1019
    PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
1020
} tupleiterobject;
1021
1022
static void
1023
tupleiter_dealloc(tupleiterobject *it)
1024
{
1025
    _PyObject_GC_UNTRACK(it);
1026
    Py_XDECREF(it->it_seq);
1027
    PyObject_GC_Del(it);
1028
}
1029
1030
static int
1031
tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg)
1032
{
1033
    Py_VISIT(it->it_seq);
1034
    return 0;
1035
}
1036
1037
static PyObject *
1038
tupleiter_next(tupleiterobject *it)
1039
{
1040
    PyTupleObject *seq;
1041
    PyObject *item;
1042
1043
    assert(it != NULL);
1044
    seq = it->it_seq;
1045
    if (seq == NULL)
  Branch (1045:9): [True: 87, False: 50.6M]
1046
        return NULL;
1047
    assert(PyTuple_Check(seq));
1048
1049
    if (it->it_index < PyTuple_GET_SIZE(seq)) {
  Branch (1049:9): [True: 34.6M, False: 16.0M]
1050
        item = PyTuple_GET_ITEM(seq, it->it_index);
1051
        ++it->it_index;
1052
        Py_INCREF(item);
1053
        return item;
1054
    }
1055
1056
    it->it_seq = NULL;
1057
    Py_DECREF(seq);
1058
    return NULL;
1059
}
1060
1061
static PyObject *
1062
tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1063
{
1064
    Py_ssize_t len = 0;
1065
    if (it->it_seq)
  Branch (1065:9): [True: 97.5k, False: 2]
1066
        len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
1067
    return PyLong_FromSsize_t(len);
1068
}
1069
1070
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1071
1072
static PyObject *
1073
tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1074
{
1075
    if (it->it_seq)
  Branch (1075:9): [True: 168, False: 0]
1076
        return Py_BuildValue("N(O)n", _PyEval_GetBuiltin(&_Py_ID(iter)),
1077
                             it->it_seq, it->it_index);
1078
    else
1079
        return Py_BuildValue("N(())", _PyEval_GetBuiltin(&_Py_ID(iter)));
1080
}
1081
1082
static PyObject *
1083
tupleiter_setstate(tupleiterobject *it, PyObject *state)
1084
{
1085
    Py_ssize_t index = PyLong_AsSsize_t(state);
1086
    if (index == -1 && 
PyErr_Occurred()0
)
  Branch (1086:9): [True: 0, False: 216]
  Branch (1086:24): [True: 0, False: 0]
1087
        return NULL;
1088
    if (it->it_seq != NULL) {
  Branch (1088:9): [True: 216, False: 0]
1089
        if (index < 0)
  Branch (1089:13): [True: 0, False: 216]
1090
            index = 0;
1091
        else if (index > PyTuple_GET_SIZE(it->it_seq))
  Branch (1091:18): [True: 0, False: 216]
1092
            index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1093
        it->it_index = index;
1094
    }
1095
    Py_RETURN_NONE;
1096
}
1097
1098
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1099
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1100
1101
static PyMethodDef tupleiter_methods[] = {
1102
    {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
1103
    {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
1104
    {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
1105
    {NULL,              NULL}           /* sentinel */
1106
};
1107
1108
PyTypeObject PyTupleIter_Type = {
1109
    PyVarObject_HEAD_INIT(&PyType_Type, 0)
1110
    "tuple_iterator",                           /* tp_name */
1111
    sizeof(tupleiterobject),                    /* tp_basicsize */
1112
    0,                                          /* tp_itemsize */
1113
    /* methods */
1114
    (destructor)tupleiter_dealloc,              /* tp_dealloc */
1115
    0,                                          /* tp_vectorcall_offset */
1116
    0,                                          /* tp_getattr */
1117
    0,                                          /* tp_setattr */
1118
    0,                                          /* tp_as_async */
1119
    0,                                          /* tp_repr */
1120
    0,                                          /* tp_as_number */
1121
    0,                                          /* tp_as_sequence */
1122
    0,                                          /* tp_as_mapping */
1123
    0,                                          /* tp_hash */
1124
    0,                                          /* tp_call */
1125
    0,                                          /* tp_str */
1126
    PyObject_GenericGetAttr,                    /* tp_getattro */
1127
    0,                                          /* tp_setattro */
1128
    0,                                          /* tp_as_buffer */
1129
    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1130
    0,                                          /* tp_doc */
1131
    (traverseproc)tupleiter_traverse,           /* tp_traverse */
1132
    0,                                          /* tp_clear */
1133
    0,                                          /* tp_richcompare */
1134
    0,                                          /* tp_weaklistoffset */
1135
    PyObject_SelfIter,                          /* tp_iter */
1136
    (iternextfunc)tupleiter_next,               /* tp_iternext */
1137
    tupleiter_methods,                          /* tp_methods */
1138
    0,
1139
};
1140
1141
static PyObject *
1142
tuple_iter(PyObject *seq)
1143
{
1144
    tupleiterobject *it;
1145
1146
    if (!PyTuple_Check(seq)) {
  Branch (1146:9): [True: 0, False: 16.2M]
1147
        PyErr_BadInternalCall();
1148
        return NULL;
1149
    }
1150
    it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
1151
    if (it == NULL)
  Branch (1151:9): [True: 0, False: 16.2M]
1152
        return NULL;
1153
    it->it_index = 0;
1154
    Py_INCREF(seq);
1155
    it->it_seq = (PyTupleObject *)seq;
1156
    _PyObject_GC_TRACK(it);
1157
    return (PyObject *)it;
1158
}
1159
1160
1161
/*************
1162
 * freelists *
1163
 *************/
1164
1165
#define STATE (interp->tuple)
1166
#define FREELIST_FINALIZED (STATE.numfree[0] < 0)
1167
1168
static inline PyTupleObject *
1169
maybe_freelist_pop(Py_ssize_t size)
1170
{
1171
#if PyTuple_NFREELISTS > 0
1172
    PyInterpreterState *interp = _PyInterpreterState_GET();
1173
#ifdef Py_DEBUG
1174
    /* maybe_freelist_pop() must not be called after maybe_freelist_fini(). */
1175
    assert(!FREELIST_FINALIZED);
1176
#endif
1177
    if (size == 0) {
  Branch (1177:9): [True: 0, False: 120M]
1178
        return NULL;
1179
    }
1180
    assert(size > 0);
1181
    if (size < PyTuple_MAXSAVESIZE) {
  Branch (1181:9): [True: 120M, False: 57.5k]
1182
        Py_ssize_t index = size - 1;
1183
        PyTupleObject *op = STATE.free_list[index];
1184
        if (op != NULL) {
  Branch (1184:13): [True: 115M, False: 4.32M]
1185
            /* op is the head of a linked list, with the first item
1186
               pointing to the next node.  Here we pop off the old head. */
1187
            STATE.free_list[index] = (PyTupleObject *) op->ob_item[0];
1188
            STATE.numfree[index]--;
1189
            /* Inlined _PyObject_InitVar() without _PyType_HasFeature() test */
1190
#ifdef Py_TRACE_REFS
1191
            /* maybe_freelist_push() ensures these were already set. */
1192
            // XXX Can we drop these?  See commit 68055ce6fe01 (GvR, Dec 1998).
1193
            Py_SET_SIZE(op, size);
1194
            Py_SET_TYPE(op, &PyTuple_Type);
1195
#endif
1196
            _Py_NewReference((PyObject *)op);
1197
            /* END inlined _PyObject_InitVar() */
1198
            OBJECT_STAT_INC(from_freelist);
1199
            return op;
1200
        }
1201
    }
1202
#endif
1203
    return NULL;
1204
}
1205
1206
static inline int
1207
maybe_freelist_push(PyTupleObject *op)
1208
{
1209
#if PyTuple_NFREELISTS > 0
1210
    PyInterpreterState *interp = _PyInterpreterState_GET();
1211
#ifdef Py_DEBUG
1212
    /* maybe_freelist_push() must not be called after maybe_freelist_fini(). */
1213
    assert(!FREELIST_FINALIZED);
1214
#endif
1215
    if (Py_SIZE(op) == 0) {
  Branch (1215:9): [True: 34, False: 121M]
1216
        return 0;
1217
    }
1218
    Py_ssize_t index = Py_SIZE(op) - 1;
1219
    if (index < PyTuple_NFREELISTS
  Branch (1219:9): [True: 121M, False: 59.8k]
1220
        && 
STATE121M
.numfree[index] < 121M
PyTuple_MAXFREELIST121M
  Branch (1220:12): [True: 117M, False: 3.33M]
1221
        && 
Py_IS_TYPE117M
(op, &PyTuple_Type))
1222
    {
1223
        /* op is the head of a linked list, with the first item
1224
           pointing to the next node.  Here we set op as the new head. */
1225
        op->ob_item[0] = (PyObject *) STATE.free_list[index];
1226
        STATE.free_list[index] = op;
1227
        STATE.numfree[index]++;
1228
        OBJECT_STAT_INC(to_freelist);
1229
        return 1;
1230
    }
1231
#endif
1232
    return 0;
1233
}
1234
1235
static void
1236
maybe_freelist_clear(PyInterpreterState *interp, int fini)
1237
{
1238
#if PyTuple_NFREELISTS > 0
1239
    for (Py_ssize_t i = 0; i < PyTuple_NFREELISTS; 
i++270k
) {
  Branch (1239:28): [True: 270k, False: 13.5k]
1240
        PyTupleObject *p = STATE.free_list[i];
1241
        STATE.free_list[i] = NULL;
1242
        STATE.numfree[i] = fini ? 
-15.44k
:
0264k
;
  Branch (1242:28): [True: 5.44k, False: 264k]
1243
        while (p) {
  Branch (1243:16): [True: 971k, False: 270k]
1244
            PyTupleObject *q = p;
1245
            p = (PyTupleObject *)(p->ob_item[0]);
1246
            PyObject_GC_Del(q);
1247
        }
1248
    }
1249
#endif
1250
}
1251
1252
/* Print summary info about the state of the optimized allocator */
1253
void
1254
_PyTuple_DebugMallocStats(FILE *out)
1255
{
1256
#if PyTuple_NFREELISTS > 0
1257
    PyInterpreterState *interp = _PyInterpreterState_GET();
1258
    for (int i = 0; i < PyTuple_NFREELISTS; i++) {
  Branch (1258:21): [True: 0, False: 0]
1259
        int len = i + 1;
1260
        char buf[128];
1261
        PyOS_snprintf(buf, sizeof(buf),
1262
                      "free %d-sized PyTupleObject", len);
1263
        _PyDebugAllocatorStats(out, buf, STATE.numfree[i],
1264
                               _PyObject_VAR_SIZE(&PyTuple_Type, len));
1265
    }
1266
#endif
1267
}
1268
1269
#undef STATE
1270
#undef FREELIST_FINALIZED