Coverage Report

Created: 2022-07-08 09:39

/home/mdboom/Work/builds/cpython/Python/compile.c
Line
Count
Source (jump to first uncovered line)
1
/*
2
 * This file compiles an abstract syntax tree (AST) into Python bytecode.
3
 *
4
 * The primary entry point is _PyAST_Compile(), which returns a
5
 * PyCodeObject.  The compiler makes several passes to build the code
6
 * object:
7
 *   1. Checks for future statements.  See future.c
8
 *   2. Builds a symbol table.  See symtable.c.
9
 *   3. Generate code for basic blocks.  See compiler_mod() in this file.
10
 *   4. Assemble the basic blocks into final code.  See assemble() in
11
 *      this file.
12
 *   5. Optimize the byte code (peephole optimizations).
13
 *
14
 * Note that compiler_mod() suggests module, but the module ast type
15
 * (mod_ty) has cases for expressions and interactive statements.
16
 *
17
 * CAUTION: The VISIT_* macros abort the current function when they
18
 * encounter a problem. So don't invoke them when there is memory
19
 * which needs to be released. Code blocks are OK, as the compiler
20
 * structure takes care of releasing those.  Use the arena to manage
21
 * objects.
22
 */
23
24
#include <stdbool.h>
25
26
// Need _PyOpcode_RelativeJump of pycore_opcode.h
27
#define NEED_OPCODE_TABLES
28
29
#include "Python.h"
30
#include "pycore_ast.h"           // _PyAST_GetDocString()
31
#include "pycore_code.h"          // _PyCode_New()
32
#include "pycore_compile.h"       // _PyFuture_FromAST()
33
#include "pycore_long.h"          // _PyLong_GetZero()
34
#include "pycore_opcode.h"        // _PyOpcode_Caches
35
#include "pycore_pymem.h"         // _PyMem_IsPtrFreed()
36
#include "pycore_symtable.h"      // PySTEntryObject
37
38
39
#define DEFAULT_BLOCK_SIZE 16
40
#define DEFAULT_CODE_SIZE 128
41
#define DEFAULT_LNOTAB_SIZE 16
42
#define DEFAULT_CNOTAB_SIZE 32
43
44
#define COMP_GENEXP   0
45
#define COMP_LISTCOMP 1
46
#define COMP_SETCOMP  2
47
#define COMP_DICTCOMP 3
48
49
/* A soft limit for stack use, to avoid excessive
50
 * memory use for large constants, etc.
51
 *
52
 * The value 30 is plucked out of thin air.
53
 * Code that could use more stack than this is
54
 * rare, so the exact value is unimportant.
55
 */
56
#define STACK_USE_GUIDELINE 30
57
58
/* If we exceed this limit, it should
59
 * be considered a compiler bug.
60
 * Currently it should be impossible
61
 * to exceed STACK_USE_GUIDELINE * 100,
62
 * as 100 is the maximum parse depth.
63
 * For performance reasons we will
64
 * want to reduce this to a
65
 * few hundred in the future.
66
 *
67
 * NOTE: Whatever MAX_ALLOWED_STACK_USE is
68
 * set to, it should never restrict what Python
69
 * we can write, just how we compile it.
70
 */
71
#define MAX_ALLOWED_STACK_USE (STACK_USE_GUIDELINE * 100)
72
73
74
#define MAX_REAL_OPCODE 254
75
76
#define IS_WITHIN_OPCODE_RANGE(opcode) \
77
        (((opcode) >= 0 && (opcode) <= MAX_REAL_OPCODE) || \
78
         IS_PSEUDO_OPCODE(opcode))
79
80
#define IS_JUMP_OPCODE(opcode) \
81
         is_bit_set_in_table(_PyOpcode_Jump, opcode)
82
83
#define IS_BLOCK_PUSH_OPCODE(opcode) \
84
        ((opcode) == SETUP_FINALLY || \
85
         
(opcode) == 15.1M
SETUP_WITH15.1M
|| \
86
         
(opcode) == 15.1M
SETUP_CLEANUP15.1M
)
87
88
/* opcodes which are not emitted in codegen stage, only by the assembler */
89
#define IS_ASSEMBLER_OPCODE(opcode) \
90
        ((opcode) == JUMP_FORWARD || \
91
         (opcode) == JUMP_BACKWARD || \
92
         (opcode) == JUMP_BACKWARD_NO_INTERRUPT || \
93
         (opcode) == POP_JUMP_FORWARD_IF_NONE || \
94
         (opcode) == POP_JUMP_BACKWARD_IF_NONE || \
95
         (opcode) == POP_JUMP_FORWARD_IF_NOT_NONE || \
96
         (opcode) == POP_JUMP_BACKWARD_IF_NOT_NONE || \
97
         (opcode) == POP_JUMP_FORWARD_IF_TRUE || \
98
         (opcode) == POP_JUMP_BACKWARD_IF_TRUE || \
99
         (opcode) == POP_JUMP_FORWARD_IF_FALSE || \
100
         (opcode) == POP_JUMP_BACKWARD_IF_FALSE)
101
102
#define IS_BACKWARDS_JUMP_OPCODE(opcode) \
103
        ((opcode) == JUMP_BACKWARD || \
104
         (opcode) == JUMP_BACKWARD_NO_INTERRUPT || \
105
         (opcode) == POP_JUMP_BACKWARD_IF_NONE || \
106
         (opcode) == POP_JUMP_BACKWARD_IF_NOT_NONE || \
107
         (opcode) == POP_JUMP_BACKWARD_IF_TRUE || \
108
         (opcode) == POP_JUMP_BACKWARD_IF_FALSE)
109
110
#define IS_UNCONDITIONAL_JUMP_OPCODE(opcode) \
111
        (
(opcode) == 5.43M
JUMP5.43M
|| \
112
         
(opcode) == 5.34M
JUMP_NO_INTERRUPT5.34M
|| \
113
         
(opcode) == 5.34M
JUMP_FORWARD5.34M
|| \
114
         
(opcode) == 5.32M
JUMP_BACKWARD5.32M
|| \
115
         
(opcode) == 5.30M
JUMP_BACKWARD_NO_INTERRUPT5.30M
)
116
117
#define IS_SCOPE_EXIT_OPCODE(opcode) \
118
        (
(opcode) == 11.2M
RETURN_VALUE11.2M
|| \
119
         
(opcode) == 10.8M
RAISE_VARARGS10.8M
|| \
120
         
(opcode) == 10.7M
RERAISE10.7M
)
121
122
#define IS_TOP_LEVEL_AWAIT(c) ( \
123
        (c->c_flags->cf_flags & PyCF_ALLOW_TOP_LEVEL_AWAIT) \
124
        && 
(c->u->u_ste->ste_type == ModuleBlock)105
)
125
126
struct location {
127
    int lineno;
128
    int end_lineno;
129
    int col_offset;
130
    int end_col_offset;
131
};
132
133
#define LOCATION(LNO, END_LNO, COL, END_COL) \
134
    ((const struct location){(LNO), (END_LNO), (COL), (END_COL)})
135
136
static struct location NO_LOCATION = {-1, -1, -1, -1};
137
138
struct instr {
139
    int i_opcode;
140
    int i_oparg;
141
    /* target block (if jump instruction) */
142
    struct basicblock_ *i_target;
143
     /* target block when exception is raised, should not be set by front-end. */
144
    struct basicblock_ *i_except;
145
    struct location i_loc;
146
};
147
148
typedef struct exceptstack {
149
    struct basicblock_ *handlers[CO_MAXBLOCKS+1];
150
    int depth;
151
} ExceptStack;
152
153
#define LOG_BITS_PER_INT 5
154
#define MASK_LOW_LOG_BITS 31
155
156
static inline int
157
is_bit_set_in_table(const uint32_t *table, int bitindex) {
158
    /* Is the relevant bit set in the relevant word? */
159
    /* 512 bits fit into 9 32-bits words.
160
     * Word is indexed by (bitindex>>ln(size of int in bits)).
161
     * Bit within word is the low bits of bitindex.
162
     */
163
    if (bitindex >= 0 && bitindex < 512) {
  Branch (163:9): [True: 24.7M, False: 0]
  Branch (163:26): [True: 24.7M, False: 0]
164
        uint32_t word = table[bitindex >> LOG_BITS_PER_INT];
165
        return (word >> (bitindex & MASK_LOW_LOG_BITS)) & 1;
166
    }
167
    else {
168
        return 0;
169
    }
170
}
171
172
static inline int
173
is_relative_jump(struct instr *i)
174
{
175
    return is_bit_set_in_table(_PyOpcode_RelativeJump, i->i_opcode);
176
}
177
178
static inline int
179
is_block_push(struct instr *i)
180
{
181
    return IS_BLOCK_PUSH_OPCODE(i->i_opcode);
182
}
183
184
static inline int
185
is_jump(struct instr *i)
186
{
187
    return IS_JUMP_OPCODE(i->i_opcode);
188
}
189
190
static int
191
instr_size(struct instr *instruction)
192
{
193
    int opcode = instruction->i_opcode;
194
    assert(!IS_PSEUDO_OPCODE(opcode));
195
    int oparg = HAS_ARG(opcode) ? 
instruction->i_oparg11.7M
:
01.92M
;
196
    int extended_args = (0xFFFFFF < oparg) + (0xFFFF < oparg) + (0xFF < oparg);
197
    int caches = _PyOpcode_Caches[opcode];
198
    return extended_args + 1 + caches;
199
}
200
201
static void
202
write_instr(_Py_CODEUNIT *codestr, struct instr *instruction, int ilen)
203
{
204
    int opcode = instruction->i_opcode;
205
    assert(!IS_PSEUDO_OPCODE(opcode));
206
    int oparg = HAS_ARG(opcode) ? 
instruction->i_oparg2.21M
:
0377k
;
207
    int caches = _PyOpcode_Caches[opcode];
208
    switch (ilen - caches) {
209
        case 4:
  Branch (209:9): [True: 0, False: 2.58M]
210
            *codestr++ = _Py_MAKECODEUNIT(EXTENDED_ARG_QUICK, (oparg >> 24) & 0xFF);
211
            /* fall through */
212
        case 3:
  Branch (212:9): [True: 0, False: 2.58M]
213
            *codestr++ = _Py_MAKECODEUNIT(EXTENDED_ARG_QUICK, (oparg >> 16) & 0xFF);
214
            /* fall through */
215
        case 2:
  Branch (215:9): [True: 146k, False: 2.44M]
216
            *codestr++ = _Py_MAKECODEUNIT(EXTENDED_ARG_QUICK, (oparg >> 8) & 0xFF);
217
            /* fall through */
218
        case 1:
  Branch (218:9): [True: 2.44M, False: 146k]
219
            *codestr++ = _Py_MAKECODEUNIT(opcode, oparg & 0xFF);
220
            break;
221
        default:
  Branch (221:9): [True: 0, False: 2.58M]
222
            Py_UNREACHABLE();
223
    }
224
    
while (2.58M
caches--) {
  Branch (224:12): [True: 1.27M, False: 2.58M]
225
        *codestr++ = _Py_MAKECODEUNIT(CACHE, 0);
226
    }
227
}
228
229
typedef struct basicblock_ {
230
    /* Each basicblock in a compilation unit is linked via b_list in the
231
       reverse order that the block are allocated.  b_list points to the next
232
       block, not to be confused with b_next, which is next by control flow. */
233
    struct basicblock_ *b_list;
234
    /* Exception stack at start of block, used by assembler to create the exception handling table */
235
    ExceptStack *b_exceptstack;
236
    /* pointer to an array of instructions, initially NULL */
237
    struct instr *b_instr;
238
    /* If b_next is non-NULL, it is a pointer to the next
239
       block reached by normal control flow. */
240
    struct basicblock_ *b_next;
241
    /* number of instructions used */
242
    int b_iused;
243
    /* length of instruction array (b_instr) */
244
    int b_ialloc;
245
    /* Number of predecssors that a block has. */
246
    int b_predecessors;
247
    /* Number of predecssors that a block has as an exception handler. */
248
    int b_except_predecessors;
249
    /* depth of stack upon entry of block, computed by stackdepth() */
250
    int b_startdepth;
251
    /* instruction offset for block, computed by assemble_jump_offsets() */
252
    int b_offset;
253
    /* Basic block is an exception handler that preserves lasti */
254
    unsigned b_preserve_lasti : 1;
255
    /* Used by compiler passes to mark whether they have visited a basic block. */
256
    unsigned b_visited : 1;
257
    /* b_cold is true if this block is not perf critical (like an exception handler) */
258
    unsigned b_cold : 1;
259
    /* b_warm is used by the cold-detection algorithm to mark blocks which are definitely not cold */
260
    unsigned b_warm : 1;
261
} basicblock;
262
263
264
static struct instr *
265
basicblock_last_instr(basicblock *b) {
266
    if (b->b_iused) {
  Branch (266:9): [True: 6.33M, False: 7.53k]
267
        return &b->b_instr[b->b_iused - 1];
268
    }
269
    return NULL;
270
}
271
272
static inline int
273
basicblock_returns(basicblock *b) {
274
    struct instr *last = basicblock_last_instr(b);
275
    return last && 
last->i_opcode == 69.0k
RETURN_VALUE69.0k
;
  Branch (275:12): [True: 69.0k, False: 3.81k]
  Branch (275:20): [True: 13.7k, False: 55.2k]
276
}
277
278
static inline int
279
basicblock_exits_scope(basicblock *b) {
280
    struct instr *last = basicblock_last_instr(b);
281
    return last && 
IS_SCOPE_EXIT_OPCODE753k
(last->i_opcode);
  Branch (281:12): [True: 753k, False: 3]
282
}
283
284
static inline int
285
basicblock_nofallthrough(basicblock *b) {
286
    struct instr *last = basicblock_last_instr(b);
287
    return (last &&
  Branch (287:13): [True: 3.19M, False: 3.71k]
288
            
(3.19M
IS_SCOPE_EXIT_OPCODE3.19M
(last->i_opcode) ||
289
             
IS_UNCONDITIONAL_JUMP_OPCODE2.83M
(last->i_opcode)));
290
}
291
292
#define BB_NO_FALLTHROUGH(B) (basicblock_nofallthrough(B))
293
#define BB_HAS_FALLTHROUGH(B) (!basicblock_nofallthrough(B))
294
295
/* fblockinfo tracks the current frame block.
296
297
A frame block is used to handle loops, try/except, and try/finally.
298
It's called a frame block to distinguish it from a basic block in the
299
compiler IR.
300
*/
301
302
enum fblocktype { WHILE_LOOP, FOR_LOOP, TRY_EXCEPT, FINALLY_TRY, FINALLY_END,
303
                  WITH, ASYNC_WITH, HANDLER_CLEANUP, POP_VALUE, EXCEPTION_HANDLER,
304
                  EXCEPTION_GROUP_HANDLER, ASYNC_COMPREHENSION_GENERATOR };
305
306
struct fblockinfo {
307
    enum fblocktype fb_type;
308
    basicblock *fb_block;
309
    /* (optional) type-specific exit or cleanup block */
310
    basicblock *fb_exit;
311
    /* (optional) additional information required for unwinding */
312
    void *fb_datum;
313
};
314
315
enum {
316
    COMPILER_SCOPE_MODULE,
317
    COMPILER_SCOPE_CLASS,
318
    COMPILER_SCOPE_FUNCTION,
319
    COMPILER_SCOPE_ASYNC_FUNCTION,
320
    COMPILER_SCOPE_LAMBDA,
321
    COMPILER_SCOPE_COMPREHENSION,
322
};
323
324
/* The following items change on entry and exit of code blocks.
325
   They must be saved and restored when returning to a block.
326
*/
327
struct compiler_unit {
328
    PySTEntryObject *u_ste;
329
330
    PyObject *u_name;
331
    PyObject *u_qualname;  /* dot-separated qualified name (lazy) */
332
    int u_scope_type;
333
334
    /* The following fields are dicts that map objects to
335
       the index of them in co_XXX.      The index is used as
336
       the argument for opcodes that refer to those collections.
337
    */
338
    PyObject *u_consts;    /* all constants */
339
    PyObject *u_names;     /* all names */
340
    PyObject *u_varnames;  /* local variables */
341
    PyObject *u_cellvars;  /* cell variables */
342
    PyObject *u_freevars;  /* free variables */
343
344
    PyObject *u_private;        /* for private name mangling */
345
346
    Py_ssize_t u_argcount;        /* number of arguments for block */
347
    Py_ssize_t u_posonlyargcount;        /* number of positional only arguments for block */
348
    Py_ssize_t u_kwonlyargcount; /* number of keyword only arguments for block */
349
    /* Pointer to the most recently allocated block.  By following b_list
350
       members, you can reach all early allocated blocks. */
351
    basicblock *u_blocks;
352
    basicblock *u_curblock; /* pointer to current block */
353
354
    int u_nfblocks;
355
    struct fblockinfo u_fblock[CO_MAXBLOCKS];
356
357
    int u_firstlineno; /* the first lineno of the block */
358
    struct location u_loc;  /* line/column info of the current stmt */
359
};
360
361
/* This struct captures the global state of a compilation.
362
363
The u pointer points to the current compilation unit, while units
364
for enclosing blocks are stored in c_stack.     The u and c_stack are
365
managed by compiler_enter_scope() and compiler_exit_scope().
366
367
Note that we don't track recursion levels during compilation - the
368
task of detecting and rejecting excessive levels of nesting is
369
handled by the symbol analysis pass.
370
371
*/
372
373
struct compiler {
374
    PyObject *c_filename;
375
    struct symtable *c_st;
376
    PyFutureFeatures *c_future; /* pointer to module's __future__ */
377
    PyCompilerFlags *c_flags;
378
379
    int c_optimize;              /* optimization level */
380
    int c_interactive;           /* true if in interactive mode */
381
    int c_nestlevel;
382
    PyObject *c_const_cache;     /* Python dict holding all constants,
383
                                    including names tuple */
384
    struct compiler_unit *u; /* compiler state for current block */
385
    PyObject *c_stack;           /* Python list holding compiler_unit ptrs */
386
    PyArena *c_arena;            /* pointer to memory allocation arena */
387
};
388
389
typedef struct {
390
    // A list of strings corresponding to name captures. It is used to track:
391
    // - Repeated name assignments in the same pattern.
392
    // - Different name assignments in alternatives.
393
    // - The order of name assignments in alternatives.
394
    PyObject *stores;
395
    // If 0, any name captures against our subject will raise.
396
    int allow_irrefutable;
397
    // An array of blocks to jump to on failure. Jumping to fail_pop[i] will pop
398
    // i items off of the stack. The end result looks like this (with each block
399
    // falling through to the next):
400
    // fail_pop[4]: POP_TOP
401
    // fail_pop[3]: POP_TOP
402
    // fail_pop[2]: POP_TOP
403
    // fail_pop[1]: POP_TOP
404
    // fail_pop[0]: NOP
405
    basicblock **fail_pop;
406
    // The current length of fail_pop.
407
    Py_ssize_t fail_pop_size;
408
    // The number of items on top of the stack that need to *stay* on top of the
409
    // stack. Variable captures go beneath these. All of them will be popped on
410
    // failure.
411
    Py_ssize_t on_top;
412
} pattern_context;
413
414
static int basicblock_next_instr(basicblock *);
415
416
static int compiler_enter_scope(struct compiler *, identifier, int, void *, int);
417
static void compiler_free(struct compiler *);
418
static basicblock *compiler_new_block(struct compiler *);
419
static int compiler_addop(struct compiler *, int, bool);
420
static int compiler_addop_i(struct compiler *, int, Py_ssize_t, bool);
421
static int compiler_addop_j(struct compiler *, int, basicblock *, bool);
422
static int compiler_error(struct compiler *, const char *, ...);
423
static int compiler_warn(struct compiler *, const char *, ...);
424
static int compiler_nameop(struct compiler *, identifier, expr_context_ty);
425
426
static PyCodeObject *compiler_mod(struct compiler *, mod_ty);
427
static int compiler_visit_stmt(struct compiler *, stmt_ty);
428
static int compiler_visit_keyword(struct compiler *, keyword_ty);
429
static int compiler_visit_expr(struct compiler *, expr_ty);
430
static int compiler_augassign(struct compiler *, stmt_ty);
431
static int compiler_annassign(struct compiler *, stmt_ty);
432
static int compiler_subscript(struct compiler *, expr_ty);
433
static int compiler_slice(struct compiler *, expr_ty);
434
435
static int are_all_items_const(asdl_expr_seq *, Py_ssize_t, Py_ssize_t);
436
437
438
static int compiler_with(struct compiler *, stmt_ty, int);
439
static int compiler_async_with(struct compiler *, stmt_ty, int);
440
static int compiler_async_for(struct compiler *, stmt_ty);
441
static int validate_keywords(struct compiler *c, asdl_keyword_seq *keywords);
442
static int compiler_call_simple_kw_helper(struct compiler *c,
443
                                          asdl_keyword_seq *keywords,
444
                                          Py_ssize_t nkwelts);
445
static int compiler_call_helper(struct compiler *c, int n,
446
                                asdl_expr_seq *args,
447
                                asdl_keyword_seq *keywords);
448
static int compiler_try_except(struct compiler *, stmt_ty);
449
static int compiler_try_star_except(struct compiler *, stmt_ty);
450
static int compiler_set_qualname(struct compiler *);
451
452
static int compiler_sync_comprehension_generator(
453
                                      struct compiler *c,
454
                                      asdl_comprehension_seq *generators, int gen_index,
455
                                      int depth,
456
                                      expr_ty elt, expr_ty val, int type);
457
458
static int compiler_async_comprehension_generator(
459
                                      struct compiler *c,
460
                                      asdl_comprehension_seq *generators, int gen_index,
461
                                      int depth,
462
                                      expr_ty elt, expr_ty val, int type);
463
464
static int compiler_pattern(struct compiler *, pattern_ty, pattern_context *);
465
static int compiler_match(struct compiler *, stmt_ty);
466
static int compiler_pattern_subpattern(struct compiler *, pattern_ty,
467
                                       pattern_context *);
468
469
static void clean_basic_block(basicblock *bb);
470
471
static PyCodeObject *assemble(struct compiler *, int addNone);
472
473
#define CAPSULE_NAME "compile.c compiler unit"
474
475
PyObject *
476
_Py_Mangle(PyObject *privateobj, PyObject *ident)
477
{
478
    /* Name mangling: __private becomes _classname__private.
479
       This is independent from how the name is used. */
480
    PyObject *result;
481
    size_t nlen, plen, ipriv;
482
    Py_UCS4 maxchar;
483
    if (privateobj == NULL || 
!341k
PyUnicode_Check(privateobj) ||
  Branch (483:9): [True: 1.52M, False: 341k]
  Branch (483:31): [True: 0, False: 341k]
484
        
PyUnicode_READ_CHAR341k
(ident, 0) != '_'341k
||
  Branch (484:9): [True: 302k, False: 39.2k]
485
        
PyUnicode_READ_CHAR39.2k
(ident, 1) != '_'39.2k
) {
  Branch (485:9): [True: 20.3k, False: 18.9k]
486
        Py_INCREF(ident);
487
        return ident;
488
    }
489
    nlen = PyUnicode_GET_LENGTH(ident);
490
    plen = PyUnicode_GET_LENGTH(privateobj);
491
    /* Don't mangle __id__ or names with dots.
492
493
       The only time a name with a dot can occur is when
494
       we are compiling an import statement that has a
495
       package name.
496
497
       TODO(jhylton): Decide whether we want to support
498
       mangling of the module name, e.g. __M.X.
499
    */
500
    if ((PyUnicode_READ_CHAR(ident, nlen-1) == '_' &&
  Branch (500:10): [True: 18.2k, False: 650]
501
         
PyUnicode_READ_CHAR18.2k
(ident, nlen-2) == '_'18.2k
) ||
  Branch (501:10): [True: 18.2k, False: 0]
502
        
PyUnicode_FindChar(ident, '.', 0, nlen, 1) != -1650
) {
  Branch (502:9): [True: 2, False: 648]
503
        Py_INCREF(ident);
504
        return ident; /* Don't mangle __whatever__ */
505
    }
506
    /* Strip leading underscores from class name */
507
    ipriv = 0;
508
    while (PyUnicode_READ_CHAR(privateobj, ipriv) == '_')
  Branch (508:12): [True: 107, False: 648]
509
        ipriv++;
510
    if (ipriv == plen) {
  Branch (510:9): [True: 1, False: 647]
511
        Py_INCREF(ident);
512
        return ident; /* Don't mangle if class is just underscores */
513
    }
514
    plen -= ipriv;
515
516
    if (plen + nlen >= PY_SSIZE_T_MAX - 1) {
  Branch (516:9): [True: 0, False: 647]
517
        PyErr_SetString(PyExc_OverflowError,
518
                        "private identifier too large to be mangled");
519
        return NULL;
520
    }
521
522
    maxchar = PyUnicode_MAX_CHAR_VALUE(ident);
523
    if (PyUnicode_MAX_CHAR_VALUE(privateobj) > maxchar)
  Branch (523:9): [True: 0, False: 647]
524
        maxchar = PyUnicode_MAX_CHAR_VALUE(privateobj);
525
526
    result = PyUnicode_New(1 + nlen + plen, maxchar);
527
    if (!result)
  Branch (527:9): [True: 0, False: 647]
528
        return 0;
529
    /* ident = "_" + priv[ipriv:] + ident # i.e. 1+plen+nlen bytes */
530
    PyUnicode_WRITE(PyUnicode_KIND(result), PyUnicode_DATA(result), 0, '_');
531
    if (PyUnicode_CopyCharacters(result, 1, privateobj, ipriv, plen) < 0) {
  Branch (531:9): [True: 0, False: 647]
532
        Py_DECREF(result);
533
        return NULL;
534
    }
535
    if (PyUnicode_CopyCharacters(result, plen+1, ident, 0, nlen) < 0) {
  Branch (535:9): [True: 0, False: 647]
536
        Py_DECREF(result);
537
        return NULL;
538
    }
539
    assert(_PyUnicode_CheckConsistency(result, 1));
540
    return result;
541
}
542
543
static int
544
compiler_init(struct compiler *c)
545
{
546
    memset(c, 0, sizeof(struct compiler));
547
548
    c->c_const_cache = PyDict_New();
549
    if (!c->c_const_cache) {
  Branch (549:9): [True: 0, False: 48.7k]
550
        return 0;
551
    }
552
553
    c->c_stack = PyList_New(0);
554
    if (!c->c_stack) {
  Branch (554:9): [True: 0, False: 48.7k]
555
        Py_CLEAR(c->c_const_cache);
556
        return 0;
557
    }
558
559
    return 1;
560
}
561
562
PyCodeObject *
563
_PyAST_Compile(mod_ty mod, PyObject *filename, PyCompilerFlags *flags,
564
               int optimize, PyArena *arena)
565
{
566
    struct compiler c;
567
    PyCodeObject *co = NULL;
568
    PyCompilerFlags local_flags = _PyCompilerFlags_INIT;
569
    int merged;
570
    if (!compiler_init(&c))
  Branch (570:9): [True: 0, False: 48.7k]
571
        return NULL;
572
    Py_INCREF(filename);
573
    c.c_filename = filename;
574
    c.c_arena = arena;
575
    c.c_future = _PyFuture_FromAST(mod, filename);
576
    if (c.c_future == NULL)
  Branch (576:9): [True: 10, False: 48.7k]
577
        goto finally;
578
    if (!flags) {
  Branch (578:9): [True: 174, False: 48.6k]
579
        flags = &local_flags;
580
    }
581
    merged = c.c_future->ff_features | flags->cf_flags;
582
    c.c_future->ff_features = merged;
583
    flags->cf_flags = merged;
584
    c.c_flags = flags;
585
    c.c_optimize = (optimize == -1) ? 
_Py_GetConfig()->optimization_level48.6k
:
optimize112
;
  Branch (585:20): [True: 48.6k, False: 112]
586
    c.c_nestlevel = 0;
587
588
    _PyASTOptimizeState state;
589
    state.optimize = c.c_optimize;
590
    state.ff_features = merged;
591
592
    if (!_PyAST_Optimize(mod, arena, &state)) {
  Branch (592:9): [True: 8, False: 48.7k]
593
        goto finally;
594
    }
595
596
    c.c_st = _PySymtable_Build(mod, filename, c.c_future);
597
    if (c.c_st == NULL) {
  Branch (597:9): [True: 152, False: 48.6k]
598
        if (!PyErr_Occurred())
  Branch (598:13): [True: 0, False: 152]
599
            PyErr_SetString(PyExc_SystemError, "no symtable");
600
        goto finally;
601
    }
602
603
    co = compiler_mod(&c, mod);
604
605
 finally:
606
    compiler_free(&c);
607
    assert(co || PyErr_Occurred());
608
    return co;
609
}
610
611
static void
612
compiler_free(struct compiler *c)
613
{
614
    if (c->c_st)
  Branch (614:9): [True: 48.6k, False: 170]
615
        _PySymtable_Free(c->c_st);
616
    if (c->c_future)
  Branch (616:9): [True: 48.7k, False: 10]
617
        PyObject_Free(c->c_future);
618
    Py_XDECREF(c->c_filename);
619
    Py_DECREF(c->c_const_cache);
620
    Py_DECREF(c->c_stack);
621
}
622
623
static PyObject *
624
list2dict(PyObject *list)
625
{
626
    Py_ssize_t i, n;
627
    PyObject *v, *k;
628
    PyObject *dict = PyDict_New();
629
    if (!dict) 
return NULL0
;
  Branch (629:9): [True: 0, False: 73.2k]
630
631
    n = PyList_Size(list);
632
    for (i = 0; i < n; 
i++45.8k
) {
  Branch (632:17): [True: 45.8k, False: 73.2k]
633
        v = PyLong_FromSsize_t(i);
634
        if (!v) {
  Branch (634:13): [True: 0, False: 45.8k]
635
            Py_DECREF(dict);
636
            return NULL;
637
        }
638
        k = PyList_GET_ITEM(list, i);
639
        if (PyDict_SetItem(dict, k, v) < 0) {
  Branch (639:13): [True: 0, False: 45.8k]
640
            Py_DECREF(v);
641
            Py_DECREF(dict);
642
            return NULL;
643
        }
644
        Py_DECREF(v);
645
    }
646
    return dict;
647
}
648
649
/* Return new dict containing names from src that match scope(s).
650
651
src is a symbol table dictionary.  If the scope of a name matches
652
either scope_type or flag is set, insert it into the new dict.  The
653
values are integers, starting at offset and increasing by one for
654
each key.
655
*/
656
657
static PyObject *
658
dictbytype(PyObject *src, int scope_type, int flag, Py_ssize_t offset)
659
{
660
    Py_ssize_t i = offset, scope, num_keys, key_i;
661
    PyObject *k, *v, *dest = PyDict_New();
662
    PyObject *sorted_keys;
663
664
    assert(offset >= 0);
665
    if (dest == NULL)
  Branch (665:9): [True: 0, False: 146k]
666
        return NULL;
667
668
    /* Sort the keys so that we have a deterministic order on the indexes
669
       saved in the returned dictionary.  These indexes are used as indexes
670
       into the free and cell var storage.  Therefore if they aren't
671
       deterministic, then the generated bytecode is not deterministic.
672
    */
673
    sorted_keys = PyDict_Keys(src);
674
    if (sorted_keys == NULL)
  Branch (674:9): [True: 0, False: 146k]
675
        return NULL;
676
    if (PyList_Sort(sorted_keys) != 0) {
  Branch (676:9): [True: 0, False: 146k]
677
        Py_DECREF(sorted_keys);
678
        return NULL;
679
    }
680
    num_keys = PyList_GET_SIZE(sorted_keys);
681
682
    for (key_i = 0; key_i < num_keys; 
key_i++367k
) {
  Branch (682:21): [True: 367k, False: 146k]
683
        /* XXX this should probably be a macro in symtable.h */
684
        long vi;
685
        k = PyList_GET_ITEM(sorted_keys, key_i);
686
        v = PyDict_GetItemWithError(src, k);
687
        assert(v && PyLong_Check(v));
688
        vi = PyLong_AS_LONG(v);
689
        scope = (vi >> SCOPE_OFFSET) & SCOPE_MASK;
690
691
        if (scope == scope_type || 
vi & flag363k
) {
  Branch (691:13): [True: 4.14k, False: 363k]
  Branch (691:36): [True: 1, False: 363k]
692
            PyObject *item = PyLong_FromSsize_t(i);
693
            if (item == NULL) {
  Branch (693:17): [True: 0, False: 4.14k]
694
                Py_DECREF(sorted_keys);
695
                Py_DECREF(dest);
696
                return NULL;
697
            }
698
            i++;
699
            if (PyDict_SetItem(dest, k, item) < 0) {
  Branch (699:17): [True: 0, False: 4.14k]
700
                Py_DECREF(sorted_keys);
701
                Py_DECREF(item);
702
                Py_DECREF(dest);
703
                return NULL;
704
            }
705
            Py_DECREF(item);
706
        }
707
    }
708
    Py_DECREF(sorted_keys);
709
    return dest;
710
}
711
712
static void
713
compiler_unit_check(struct compiler_unit *u)
714
{
715
    basicblock *block;
716
    for (block = u->u_blocks; block != NULL; 
block = block->b_list666k
) {
  Branch (716:31): [True: 666k, False: 97.9k]
717
        assert(!_PyMem_IsPtrFreed(block));
718
        if (block->b_instr != NULL) {
  Branch (718:13): [True: 650k, False: 15.8k]
719
            assert(block->b_ialloc > 0);
720
            assert(block->b_iused >= 0);
721
            assert(block->b_ialloc >= block->b_iused);
722
        }
723
        else {
724
            assert (block->b_iused == 0);
725
            assert (block->b_ialloc == 0);
726
        }
727
    }
728
}
729
730
static void
731
compiler_unit_free(struct compiler_unit *u)
732
{
733
    basicblock *b, *next;
734
735
    compiler_unit_check(u);
736
    b = u->u_blocks;
737
    while (b != NULL) {
  Branch (737:12): [True: 586k, False: 73.2k]
738
        if (b->b_instr)
  Branch (738:13): [True: 575k, False: 10.2k]
739
            PyObject_Free((void *)b->b_instr);
740
        next = b->b_list;
741
        PyObject_Free((void *)b);
742
        b = next;
743
    }
744
    Py_CLEAR(u->u_ste);
745
    Py_CLEAR(u->u_name);
746
    Py_CLEAR(u->u_qualname);
747
    Py_CLEAR(u->u_consts);
748
    Py_CLEAR(u->u_names);
749
    Py_CLEAR(u->u_varnames);
750
    Py_CLEAR(u->u_freevars);
751
    Py_CLEAR(u->u_cellvars);
752
    Py_CLEAR(u->u_private);
753
    PyObject_Free(u);
754
}
755
756
static int
757
compiler_set_qualname(struct compiler *c)
758
{
759
    Py_ssize_t stack_size;
760
    struct compiler_unit *u = c->u;
761
    PyObject *name, *base;
762
763
    base = NULL;
764
    stack_size = PyList_GET_SIZE(c->c_stack);
765
    assert(stack_size >= 1);
766
    if (stack_size > 1) {
  Branch (766:9): [True: 12.5k, False: 12.1k]
767
        int scope, force_global = 0;
768
        struct compiler_unit *parent;
769
        PyObject *mangled, *capsule;
770
771
        capsule = PyList_GET_ITEM(c->c_stack, stack_size - 1);
772
        parent = (struct compiler_unit *)PyCapsule_GetPointer(capsule, CAPSULE_NAME);
773
        assert(parent);
774
775
        if (u->u_scope_type == COMPILER_SCOPE_FUNCTION
  Branch (775:13): [True: 11.1k, False: 1.39k]
776
            || 
u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION1.39k
  Branch (776:16): [True: 50, False: 1.34k]
777
            || 
u->u_scope_type == COMPILER_SCOPE_CLASS1.34k
) {
  Branch (777:16): [True: 147, False: 1.19k]
778
            assert(u->u_name);
779
            mangled = _Py_Mangle(parent->u_private, u->u_name);
780
            if (!mangled)
  Branch (780:17): [True: 0, False: 11.3k]
781
                return 0;
782
            scope = _PyST_GetScope(parent->u_ste, mangled);
783
            Py_DECREF(mangled);
784
            assert(scope != GLOBAL_IMPLICIT);
785
            if (scope == GLOBAL_EXPLICIT)
  Branch (785:17): [True: 0, False: 11.3k]
786
                force_global = 1;
787
        }
788
789
        if (!force_global) {
  Branch (789:13): [True: 12.5k, False: 0]
790
            if (parent->u_scope_type == COMPILER_SCOPE_FUNCTION
  Branch (790:17): [True: 3.54k, False: 8.98k]
791
                || 
parent->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION8.98k
  Branch (791:20): [True: 15, False: 8.96k]
792
                || 
parent->u_scope_type == COMPILER_SCOPE_LAMBDA8.96k
)
  Branch (792:20): [True: 9, False: 8.95k]
793
            {
794
                _Py_DECLARE_STR(dot_locals, ".<locals>");
795
                base = PyUnicode_Concat(parent->u_qualname,
796
                                        &_Py_STR(dot_locals));
797
                if (base == NULL)
  Branch (797:21): [True: 0, False: 3.56k]
798
                    return 0;
799
            }
800
            else {
801
                Py_INCREF(parent->u_qualname);
802
                base = parent->u_qualname;
803
            }
804
        }
805
    }
806
807
    if (base != NULL) {
  Branch (807:9): [True: 12.5k, False: 12.1k]
808
        _Py_DECLARE_STR(dot, ".");
809
        name = PyUnicode_Concat(base, &_Py_STR(dot));
810
        Py_DECREF(base);
811
        if (name == NULL)
  Branch (811:13): [True: 0, False: 12.5k]
812
            return 0;
813
        PyUnicode_Append(&name, u->u_name);
814
        if (name == NULL)
  Branch (814:13): [True: 0, False: 12.5k]
815
            return 0;
816
    }
817
    else {
818
        Py_INCREF(u->u_name);
819
        name = u->u_name;
820
    }
821
    u->u_qualname = name;
822
823
    return 1;
824
}
825
826
827
/* Allocate a new block and return a pointer to it.
828
   Returns NULL on error.
829
*/
830
static basicblock *
831
new_basicblock()
832
{
833
    basicblock *b = (basicblock *)PyObject_Calloc(1, sizeof(basicblock));
834
    if (b == NULL) {
  Branch (834:9): [True: 0, False: 586k]
835
        PyErr_NoMemory();
836
        return NULL;
837
    }
838
    return b;
839
}
840
841
static basicblock *
842
compiler_new_block(struct compiler *c)
843
{
844
    basicblock *b = new_basicblock();
845
    if (b == NULL) {
  Branch (845:9): [True: 0, False: 585k]
846
        return NULL;
847
    }
848
    /* Extend the singly linked list of blocks with new block. */
849
    struct compiler_unit *u = c->u;
850
    b->b_list = u->u_blocks;
851
    u->u_blocks = b;
852
    return b;
853
}
854
855
static basicblock *
856
compiler_use_next_block(struct compiler *c, basicblock *block)
857
{
858
    assert(block != NULL);
859
    c->u->u_curblock->b_next = block;
860
    c->u->u_curblock = block;
861
    return block;
862
}
863
864
static basicblock *
865
basicblock_new_b_list_successor(basicblock *prev)
866
{
867
    basicblock *result = new_basicblock();
868
    if (result == NULL) {
  Branch (868:9): [True: 0, False: 837]
869
        return NULL;
870
    }
871
    result->b_list = prev->b_list;
872
    prev->b_list = result;
873
    return result;
874
}
875
876
static basicblock *
877
copy_basicblock(basicblock *block)
878
{
879
    /* Cannot copy a block if it has a fallthrough, since
880
     * a block can only have one fallthrough predecessor.
881
     */
882
    assert(BB_NO_FALLTHROUGH(block));
883
    basicblock *result = basicblock_new_b_list_successor(block);
884
    if (result == NULL) {
  Branch (884:9): [True: 0, False: 831]
885
        return NULL;
886
    }
887
    
for (int i = 0; 831
i < block->b_iused;
i++1.65k
) {
  Branch (887:21): [True: 1.65k, False: 831]
888
        int n = basicblock_next_instr(result);
889
        if (n < 0) {
  Branch (889:13): [True: 0, False: 1.65k]
890
            return NULL;
891
        }
892
        result->b_instr[n] = block->b_instr[i];
893
    }
894
    return result;
895
}
896
897
/* Returns the offset of the next instruction in the current block's
898
   b_instr array.  Resizes the b_instr as necessary.
899
   Returns -1 on failure.
900
*/
901
902
static int
903
basicblock_next_instr(basicblock *b)
904
{
905
    assert(b != NULL);
906
    if (b->b_instr == NULL) {
  Branch (906:9): [True: 575k, False: 2.09M]
907
        b->b_instr = (struct instr *)PyObject_Calloc(
908
                         DEFAULT_BLOCK_SIZE, sizeof(struct instr));
909
        if (b->b_instr == NULL) {
  Branch (909:13): [True: 0, False: 575k]
910
            PyErr_NoMemory();
911
            return -1;
912
        }
913
        b->b_ialloc = DEFAULT_BLOCK_SIZE;
914
    }
915
    else if (b->b_iused == b->b_ialloc) {
  Branch (915:14): [True: 12.1k, False: 2.08M]
916
        struct instr *tmp;
917
        size_t oldsize, newsize;
918
        oldsize = b->b_ialloc * sizeof(struct instr);
919
        newsize = oldsize << 1;
920
921
        if (oldsize > (SIZE_MAX >> 1)) {
  Branch (921:13): [True: 0, False: 12.1k]
922
            PyErr_NoMemory();
923
            return -1;
924
        }
925
926
        if (newsize == 0) {
  Branch (926:13): [True: 0, False: 12.1k]
927
            PyErr_NoMemory();
928
            return -1;
929
        }
930
        b->b_ialloc <<= 1;
931
        tmp = (struct instr *)PyObject_Realloc(
932
                                        (void *)b->b_instr, newsize);
933
        if (tmp == NULL) {
  Branch (933:13): [True: 0, False: 12.1k]
934
            PyErr_NoMemory();
935
            return -1;
936
        }
937
        b->b_instr = tmp;
938
        memset((char *)b->b_instr + oldsize, 0, newsize - oldsize);
939
    }
940
    return b->b_iused++;
941
}
942
943
/* Set the line number and column offset for the following instructions.
944
945
   The line number is reset in the following cases:
946
   - when entering a new scope
947
   - on each statement
948
   - on each expression and sub-expression
949
   - before the "except" and "finally" clauses
950
*/
951
952
#define SET_LOC(c, x)                                   \
953
    (c)->u->u_loc.lineno = (x)->lineno;                 \
954
    (c)->u->u_loc.end_lineno = (x)->end_lineno;         \
955
    (c)->u->u_loc.col_offset = (x)->col_offset;         \
956
    (c)->u->u_loc.end_col_offset = (x)->end_col_offset;
957
958
// Artificial instructions
959
#define UNSET_LOC(c) \
960
    (c)->u->u_loc.lineno = -1;             \
961
    (c)->u->u_loc.end_lineno = -1;         \
962
    (c)->u->u_loc.col_offset = -1;         \
963
    (c)->u->u_loc.end_col_offset = -1;
964
965
966
/* Return the stack effect of opcode with argument oparg.
967
968
   Some opcodes have different stack effect when jump to the target and
969
   when not jump. The 'jump' parameter specifies the case:
970
971
   * 0 -- when not jump
972
   * 1 -- when jump
973
   * -1 -- maximal
974
 */
975
static int
976
stack_effect(int opcode, int oparg, int jump)
977
{
978
    switch (opcode) {
979
        case NOP:
  Branch (979:9): [True: 1.90k, False: 2.86M]
980
        case EXTENDED_ARG:
  Branch (980:9): [True: 4, False: 2.87M]
981
        case RESUME:
  Branch (981:9): [True: 74.4k, False: 2.79M]
982
        case CACHE:
  Branch (982:9): [True: 3.61k, False: 2.86M]
983
            return 0;
984
985
        /* Stack manipulation */
986
        case POP_TOP:
  Branch (986:9): [True: 232k, False: 2.63M]
987
            return -1;
988
        case SWAP:
  Branch (988:9): [True: 1.35k, False: 2.86M]
989
            return 0;
990
991
        /* Unary operators */
992
        case UNARY_POSITIVE:
  Branch (992:9): [True: 22, False: 2.87M]
993
        case UNARY_NEGATIVE:
  Branch (993:9): [True: 314, False: 2.87M]
994
        case UNARY_NOT:
  Branch (994:9): [True: 170, False: 2.87M]
995
        case UNARY_INVERT:
  Branch (995:9): [True: 77, False: 2.87M]
996
            return 0;
997
998
        case SET_ADD:
  Branch (998:9): [True: 174, False: 2.87M]
999
        case LIST_APPEND:
  Branch (999:9): [True: 146k, False: 2.72M]
1000
            return -1;
1001
        case MAP_ADD:
  Branch (1001:9): [True: 69.9k, False: 2.80M]
1002
            return -2;
1003
1004
        case BINARY_SUBSCR:
  Branch (1004:9): [True: 10.4k, False: 2.86M]
1005
            return -1;
1006
        case BINARY_SLICE:
  Branch (1006:9): [True: 1.78k, False: 2.86M]
1007
            return -2;
1008
        case STORE_SUBSCR:
  Branch (1008:9): [True: 3.00k, False: 2.86M]
1009
            return -3;
1010
        case STORE_SLICE:
  Branch (1010:9): [True: 109, False: 2.87M]
1011
            return -4;
1012
        case DELETE_SUBSCR:
  Branch (1012:9): [True: 296, False: 2.87M]
1013
            return -2;
1014
1015
        case GET_ITER:
  Branch (1015:9): [True: 4.13k, False: 2.86M]
1016
            return 0;
1017
1018
        case PRINT_EXPR:
  Branch (1018:9): [True: 2.58k, False: 2.86M]
1019
            return -1;
1020
        case LOAD_BUILD_CLASS:
  Branch (1020:9): [True: 1.80k, False: 2.86M]
1021
            return 1;
1022
1023
        case RETURN_VALUE:
  Branch (1023:9): [True: 79.1k, False: 2.79M]
1024
            return -1;
1025
        case IMPORT_STAR:
  Branch (1025:9): [True: 373, False: 2.87M]
1026
            return -1;
1027
        case SETUP_ANNOTATIONS:
  Branch (1027:9): [True: 385, False: 2.87M]
1028
            return 0;
1029
        case ASYNC_GEN_WRAP:
  Branch (1029:9): [True: 17, False: 2.87M]
1030
        case YIELD_VALUE:
  Branch (1030:9): [True: 1.52k, False: 2.86M]
1031
            return 0;
1032
        case POP_BLOCK:
  Branch (1032:9): [True: 7.32k, False: 2.86M]
1033
            return 0;
1034
        case POP_EXCEPT:
  Branch (1034:9): [True: 6.69k, False: 2.86M]
1035
            return -1;
1036
1037
        case STORE_NAME:
  Branch (1037:9): [True: 52.8k, False: 2.81M]
1038
            return -1;
1039
        case DELETE_NAME:
  Branch (1039:9): [True: 328, False: 2.87M]
1040
            return 0;
1041
        case UNPACK_SEQUENCE:
  Branch (1041:9): [True: 2.74k, False: 2.86M]
1042
            return oparg-1;
1043
        case UNPACK_EX:
  Branch (1043:9): [True: 66, False: 2.87M]
1044
            return (oparg&0xFF) + (oparg>>8);
1045
        case FOR_ITER:
  Branch (1045:9): [True: 8.28k, False: 2.86M]
1046
            /* -1 at end of iterator, 1 if continue iterating. */
1047
            return jump > 0 ? 
-14.14k
:
14.14k
;
  Branch (1047:20): [True: 4.14k, False: 4.14k]
1048
        case SEND:
  Branch (1048:9): [True: 742, False: 2.87M]
1049
            return jump > 0 ? 
-1370
:
0372
;
  Branch (1049:20): [True: 370, False: 372]
1050
        case STORE_ATTR:
  Branch (1050:9): [True: 6.84k, False: 2.86M]
1051
            return -2;
1052
        case DELETE_ATTR:
  Branch (1052:9): [True: 53, False: 2.87M]
1053
            return -1;
1054
        case STORE_GLOBAL:
  Branch (1054:9): [True: 231, False: 2.87M]
1055
            return -1;
1056
        case DELETE_GLOBAL:
  Branch (1056:9): [True: 4, False: 2.87M]
1057
            return 0;
1058
        case LOAD_CONST:
  Branch (1058:9): [True: 392k, False: 2.47M]
1059
            return 1;
1060
        case LOAD_NAME:
  Branch (1060:9): [True: 550k, False: 2.32M]
1061
            return 1;
1062
        case BUILD_TUPLE:
  Branch (1062:9): [True: 26.9k, False: 2.84M]
1063
        case BUILD_LIST:
  Branch (1063:9): [True: 5.48k, False: 2.86M]
1064
        case BUILD_SET:
  Branch (1064:9): [True: 231, False: 2.87M]
1065
        case BUILD_STRING:
  Branch (1065:9): [True: 2.38k, False: 2.86M]
1066
            return 1-oparg;
1067
        case BUILD_MAP:
  Branch (1067:9): [True: 6.81k, False: 2.86M]
1068
            return 1 - 2*oparg;
1069
        case BUILD_CONST_KEY_MAP:
  Branch (1069:9): [True: 755, False: 2.87M]
1070
            return -oparg;
1071
        case LOAD_ATTR:
  Branch (1071:9): [True: 66.6k, False: 2.80M]
1072
            return (oparg & 1);
1073
        case COMPARE_OP:
  Branch (1073:9): [True: 11.3k, False: 2.85M]
1074
        case IS_OP:
  Branch (1074:9): [True: 2.06k, False: 2.86M]
1075
        case CONTAINS_OP:
  Branch (1075:9): [True: 2.51k, False: 2.86M]
1076
            return -1;
1077
        case CHECK_EXC_MATCH:
  Branch (1077:9): [True: 2.34k, False: 2.86M]
1078
            return 0;
1079
        case CHECK_EG_MATCH:
  Branch (1079:9): [True: 38, False: 2.87M]
1080
            return 0;
1081
        case IMPORT_NAME:
  Branch (1081:9): [True: 4.73k, False: 2.86M]
1082
            return -1;
1083
        case IMPORT_FROM:
  Branch (1083:9): [True: 1.55k, False: 2.86M]
1084
            return 1;
1085
1086
        /* Jumps */
1087
        case JUMP_FORWARD:
  Branch (1087:9): [True: 7, False: 2.87M]
1088
        case JUMP_BACKWARD:
  Branch (1088:9): [True: 4, False: 2.87M]
1089
        case JUMP:
  Branch (1089:9): [True: 29.2k, False: 2.84M]
1090
        case JUMP_BACKWARD_NO_INTERRUPT:
  Branch (1090:9): [True: 4, False: 2.87M]
1091
        case JUMP_NO_INTERRUPT:
  Branch (1091:9): [True: 742, False: 2.87M]
1092
            return 0;
1093
1094
        case JUMP_IF_TRUE_OR_POP:
  Branch (1094:9): [True: 1.98k, False: 2.86M]
1095
        case JUMP_IF_FALSE_OR_POP:
  Branch (1095:9): [True: 1.47k, False: 2.86M]
1096
            return jump ? 
01.73k
:
-11.73k
;
  Branch (1096:20): [True: 1.73k, False: 1.73k]
1097
1098
        case POP_JUMP_BACKWARD_IF_NONE:
  Branch (1098:9): [True: 4, False: 2.87M]
1099
        case POP_JUMP_FORWARD_IF_NONE:
  Branch (1099:9): [True: 4, False: 2.87M]
1100
        case POP_JUMP_IF_NONE:
  Branch (1100:9): [True: 3.28k, False: 2.86M]
1101
        case POP_JUMP_BACKWARD_IF_NOT_NONE:
  Branch (1101:9): [True: 4, False: 2.87M]
1102
        case POP_JUMP_FORWARD_IF_NOT_NONE:
  Branch (1102:9): [True: 4, False: 2.87M]
1103
        case POP_JUMP_IF_NOT_NONE:
  Branch (1103:9): [True: 3.45k, False: 2.86M]
1104
        case POP_JUMP_FORWARD_IF_FALSE:
  Branch (1104:9): [True: 4, False: 2.87M]
1105
        case POP_JUMP_BACKWARD_IF_FALSE:
  Branch (1105:9): [True: 4, False: 2.87M]
1106
        case POP_JUMP_IF_FALSE:
  Branch (1106:9): [True: 445k, False: 2.42M]
1107
        case POP_JUMP_FORWARD_IF_TRUE:
  Branch (1107:9): [True: 4, False: 2.87M]
1108
        case POP_JUMP_BACKWARD_IF_TRUE:
  Branch (1108:9): [True: 4, False: 2.87M]
1109
        case POP_JUMP_IF_TRUE:
  Branch (1109:9): [True: 12.1k, False: 2.85M]
1110
            return -1;
1111
1112
        case LOAD_GLOBAL:
  Branch (1112:9): [True: 45.8k, False: 2.82M]
1113
            return (oparg & 1) + 1;
1114
1115
        /* Exception handling pseudo-instructions */
1116
        case SETUP_FINALLY:
  Branch (1116:9): [True: 5.62k, False: 2.86M]
1117
            /* 0 in the normal flow.
1118
             * Restore the stack position and push 1 value before jumping to
1119
             * the handler if an exception be raised. */
1120
            return jump ? 
12.81k
:
02.80k
;
  Branch (1120:20): [True: 2.81k, False: 2.80k]
1121
        case SETUP_CLEANUP:
  Branch (1121:9): [True: 8.09k, False: 2.86M]
1122
            /* As SETUP_FINALLY, but pushes lasti as well */
1123
            return jump ? 
24.04k
:
04.04k
;
  Branch (1123:20): [True: 4.04k, False: 4.04k]
1124
        case SETUP_WITH:
  Branch (1124:9): [True: 1.74k, False: 2.86M]
1125
            /* 0 in the normal flow.
1126
             * Restore the stack position to the position before the result
1127
             * of __(a)enter__ and push 2 values before jumping to the handler
1128
             * if an exception be raised. */
1129
            return jump ? 
1873
:
0871
;
  Branch (1129:20): [True: 873, False: 871]
1130
1131
        case PREP_RERAISE_STAR:
  Branch (1131:9): [True: 37, False: 2.87M]
1132
             return -1;
1133
        case RERAISE:
  Branch (1133:9): [True: 7.50k, False: 2.86M]
1134
            return -1;
1135
        case PUSH_EXC_INFO:
  Branch (1135:9): [True: 3.63k, False: 2.86M]
1136
            return 1;
1137
1138
        case WITH_EXCEPT_START:
  Branch (1138:9): [True: 874, False: 2.87M]
1139
            return 1;
1140
1141
        case LOAD_FAST:
  Branch (1141:9): [True: 165k, False: 2.70M]
1142
        case LOAD_FAST_CHECK:
  Branch (1142:9): [True: 280, False: 2.87M]
1143
            return 1;
1144
        case STORE_FAST:
  Branch (1144:9): [True: 39.5k, False: 2.83M]
1145
            return -1;
1146
        case DELETE_FAST:
  Branch (1146:9): [True: 658, False: 2.87M]
1147
            return 0;
1148
1149
        case RETURN_GENERATOR:
  Branch (1149:9): [True: 1.36k, False: 2.86M]
1150
            return 0;
1151
1152
        case RAISE_VARARGS:
  Branch (1152:9): [True: 4.37k, False: 2.86M]
1153
            return -oparg;
1154
1155
        /* Functions and calls */
1156
        case KW_NAMES:
  Branch (1156:9): [True: 3.11k, False: 2.86M]
1157
            return 0;
1158
        case CALL:
  Branch (1158:9): [True: 71.2k, False: 2.79M]
1159
            return -1-oparg;
1160
1161
        case CALL_FUNCTION_EX:
  Branch (1161:9): [True: 1.13k, False: 2.87M]
1162
            return -2 - ((oparg & 0x01) != 0);
1163
        case MAKE_FUNCTION:
  Branch (1163:9): [True: 24.5k, False: 2.84M]
1164
            return 0 - ((oparg & 0x01) != 0) - ((oparg & 0x02) != 0) -
1165
                ((oparg & 0x04) != 0) - ((oparg & 0x08) != 0);
1166
        case BUILD_SLICE:
  Branch (1166:9): [True: 143, False: 2.87M]
1167
            if (oparg == 3)
  Branch (1167:17): [True: 56, False: 87]
1168
                return -2;
1169
            else
1170
                return -1;
1171
1172
        /* Closures */
1173
        case MAKE_CELL:
  Branch (1173:9): [True: 1.82k, False: 2.86M]
1174
        case COPY_FREE_VARS:
  Branch (1174:9): [True: 1.38k, False: 2.86M]
1175
            return 0;
1176
        case LOAD_CLOSURE:
  Branch (1176:9): [True: 2.62k, False: 2.86M]
1177
            return 1;
1178
        case LOAD_DEREF:
  Branch (1178:9): [True: 5.45k, False: 2.86M]
1179
        case LOAD_CLASSDEREF:
  Branch (1179:9): [True: 9, False: 2.87M]
1180
            return 1;
1181
        case STORE_DEREF:
  Branch (1181:9): [True: 1.03k, False: 2.87M]
1182
            return -1;
1183
        case DELETE_DEREF:
  Branch (1183:9): [True: 4, False: 2.87M]
1184
            return 0;
1185
1186
        /* Iterators and generators */
1187
        case GET_AWAITABLE:
  Branch (1187:9): [True: 244, False: 2.87M]
1188
            return 0;
1189
1190
        case BEFORE_ASYNC_WITH:
  Branch (1190:9): [True: 58, False: 2.87M]
1191
        case BEFORE_WITH:
  Branch (1191:9): [True: 846, False: 2.87M]
1192
            return 1;
1193
        case GET_AITER:
  Branch (1193:9): [True: 52, False: 2.87M]
1194
            return 0;
1195
        case GET_ANEXT:
  Branch (1195:9): [True: 58, False: 2.87M]
1196
            return 1;
1197
        case GET_YIELD_FROM_ITER:
  Branch (1197:9): [True: 112, False: 2.87M]
1198
            return 0;
1199
        case END_ASYNC_FOR:
  Branch (1199:9): [True: 52, False: 2.87M]
1200
            return -2;
1201
        case FORMAT_VALUE:
  Branch (1201:9): [True: 77.5k, False: 2.79M]
1202
            /* If there's a fmt_spec on the stack, we go from 2->1,
1203
               else 1->1. */
1204
            return (oparg & FVS_MASK) == FVS_HAVE_SPEC ? 
-13.79k
:
073.7k
;
  Branch (1204:20): [True: 3.79k, False: 73.7k]
1205
        case LOAD_METHOD:
  Branch (1205:9): [True: 4, False: 2.87M]
1206
            return 1;
1207
        case LOAD_ASSERTION_ERROR:
  Branch (1207:9): [True: 338, False: 2.87M]
1208
            return 1;
1209
        case LIST_TO_TUPLE:
  Branch (1209:9): [True: 1.07k, False: 2.87M]
1210
            return 0;
1211
        case LIST_EXTEND:
  Branch (1211:9): [True: 1.71k, False: 2.86M]
1212
        case SET_UPDATE:
  Branch (1212:9): [True: 22, False: 2.87M]
1213
        case DICT_MERGE:
  Branch (1213:9): [True: 634, False: 2.87M]
1214
        case DICT_UPDATE:
  Branch (1214:9): [True: 5.14k, False: 2.86M]
1215
            return -1;
1216
        case MATCH_CLASS:
  Branch (1216:9): [True: 90, False: 2.87M]
1217
            return -2;
1218
        case GET_LEN:
  Branch (1218:9): [True: 450, False: 2.87M]
1219
        case MATCH_MAPPING:
  Branch (1219:9): [True: 190, False: 2.87M]
1220
        case MATCH_SEQUENCE:
  Branch (1220:9): [True: 288, False: 2.87M]
1221
        case MATCH_KEYS:
  Branch (1221:9): [True: 166, False: 2.87M]
1222
            return 1;
1223
        case COPY:
  Branch (1223:9): [True: 5.79k, False: 2.86M]
1224
        case PUSH_NULL:
  Branch (1224:9): [True: 17.0k, False: 2.85M]
1225
            return 1;
1226
        case BINARY_OP:
  Branch (1226:9): [True: 65.3k, False: 2.80M]
1227
            return -1;
1228
        default:
  Branch (1228:9): [True: 145, False: 2.87M]
1229
            return PY_INVALID_STACK_EFFECT;
1230
    }
1231
    return PY_INVALID_STACK_EFFECT; /* not reachable */
1232
}
1233
1234
int
1235
PyCompile_OpcodeStackEffectWithJump(int opcode, int oparg, int jump)
1236
{
1237
    return stack_effect(opcode, oparg, jump);
1238
}
1239
1240
int
1241
PyCompile_OpcodeStackEffect(int opcode, int oparg)
1242
{
1243
    return stack_effect(opcode, oparg, -1);
1244
}
1245
1246
static int
1247
is_end_of_basic_block(struct instr *instr)
1248
{
1249
    int opcode = instr->i_opcode;
1250
    return IS_JUMP_OPCODE(opcode) || 
IS_SCOPE_EXIT_OPCODE2.08M
(opcode);
1251
}
1252
1253
static int
1254
compiler_use_new_implicit_block_if_needed(struct compiler *c)
1255
{
1256
    basicblock *b = c->u->u_curblock;
1257
    if (b->b_iused && 
is_end_of_basic_block(basicblock_last_instr(b))2.31M
) {
  Branch (1257:9): [True: 2.31M, False: 340k]
  Branch (1257:23): [True: 234k, False: 2.08M]
1258
        basicblock *b = compiler_new_block(c);
1259
        if (b == NULL) {
  Branch (1259:13): [True: 0, False: 234k]
1260
            return -1;
1261
        }
1262
        compiler_use_next_block(c, b);
1263
    }
1264
    return 0;
1265
}
1266
1267
/* Add an opcode with no argument.
1268
   Returns 0 on failure, 1 on success.
1269
*/
1270
1271
static int
1272
basicblock_addop(basicblock *b, int opcode, int oparg,
1273
                 basicblock *target, const struct location *loc)
1274
{
1275
    assert(IS_WITHIN_OPCODE_RANGE(opcode));
1276
    assert(!IS_ASSEMBLER_OPCODE(opcode));
1277
    assert(HAS_ARG(opcode) || oparg == 0);
1278
    assert(0 <= oparg && oparg < (1 << 30));
1279
    assert((target == NULL) ||
1280
           IS_JUMP_OPCODE(opcode) ||
1281
           IS_BLOCK_PUSH_OPCODE(opcode));
1282
    assert(oparg == 0 || target == NULL);
1283
1284
    int off = basicblock_next_instr(b);
1285
    if (off < 0) {
  Branch (1285:9): [True: 0, False: 2.65M]
1286
        return 0;
1287
    }
1288
    struct instr *i = &b->b_instr[off];
1289
    i->i_opcode = opcode;
1290
    i->i_oparg = oparg;
1291
    i->i_target = target;
1292
    i->i_loc = loc ? 
*loc2.64M
:
NO_LOCATION12.8k
;
  Branch (1292:16): [True: 2.64M, False: 12.8k]
1293
1294
    return 1;
1295
}
1296
1297
static int
1298
compiler_addop(struct compiler *c, int opcode, bool line)
1299
{
1300
    assert(!HAS_ARG(opcode));
1301
    if (compiler_use_new_implicit_block_if_needed(c) < 0) {
  Branch (1301:9): [True: 0, False: 412k]
1302
        return -1;
1303
    }
1304
1305
    const struct location *loc = line ? 
&c->u->u_loc409k
: NULL;
  Branch (1305:34): [True: 409k, False: 2.78k]
1306
    return basicblock_addop(c->u->u_curblock, opcode, 0, NULL, loc);
1307
}
1308
1309
static Py_ssize_t
1310
compiler_add_o(PyObject *dict, PyObject *o)
1311
{
1312
    PyObject *v;
1313
    Py_ssize_t arg;
1314
1315
    v = PyDict_GetItemWithError(dict, o);
1316
    if (!v) {
  Branch (1316:9): [True: 422k, False: 941k]
1317
        if (PyErr_Occurred()) {
  Branch (1317:13): [True: 0, False: 422k]
1318
            return -1;
1319
        }
1320
        arg = PyDict_GET_SIZE(dict);
1321
        v = PyLong_FromSsize_t(arg);
1322
        if (!v) {
  Branch (1322:13): [True: 0, False: 422k]
1323
            return -1;
1324
        }
1325
        if (PyDict_SetItem(dict, o, v) < 0) {
  Branch (1325:13): [True: 0, False: 422k]
1326
            Py_DECREF(v);
1327
            return -1;
1328
        }
1329
        Py_DECREF(v);
1330
    }
1331
    else
1332
        arg = PyLong_AsLong(v);
1333
    return arg;
1334
}
1335
1336
// Merge const *o* recursively and return constant key object.
1337
static PyObject*
1338
merge_consts_recursive(PyObject *const_cache, PyObject *o)
1339
{
1340
    assert(PyDict_CheckExact(const_cache));
1341
    // None and Ellipsis are singleton, and key is the singleton.
1342
    // No need to merge object and key.
1343
    if (o == Py_None || 
o == 586k
Py_Ellipsis586k
) {
  Branch (1343:9): [True: 61.0k, False: 586k]
  Branch (1343:25): [True: 454, False: 585k]
1344
        Py_INCREF(o);
1345
        return o;
1346
    }
1347
1348
    PyObject *key = _PyCode_ConstantKey(o);
1349
    if (key == NULL) {
  Branch (1349:9): [True: 0, False: 585k]
1350
        return NULL;
1351
    }
1352
1353
    // t is borrowed reference
1354
    PyObject *t = PyDict_SetDefault(const_cache, key, key);
1355
    if (t != key) {
  Branch (1355:9): [True: 86.6k, False: 498k]
1356
        // o is registered in const_cache.  Just use it.
1357
        Py_XINCREF(t);
1358
        Py_DECREF(key);
1359
        return t;
1360
    }
1361
1362
    // We registered o in const_cache.
1363
    // When o is a tuple or frozenset, we want to merge its
1364
    // items too.
1365
    if (PyTuple_CheckExact(o)) {
1366
        Py_ssize_t len = PyTuple_GET_SIZE(o);
1367
        for (Py_ssize_t i = 0; i < len; 
i++223k
) {
  Branch (1367:32): [True: 223k, False: 6.24k]
1368
            PyObject *item = PyTuple_GET_ITEM(o, i);
1369
            PyObject *u = merge_consts_recursive(const_cache, item);
1370
            if (u == NULL) {
  Branch (1370:17): [True: 0, False: 223k]
1371
                Py_DECREF(key);
1372
                return NULL;
1373
            }
1374
1375
            // See _PyCode_ConstantKey()
1376
            PyObject *v;  // borrowed
1377
            if (PyTuple_CheckExact(u)) {
1378
                v = PyTuple_GET_ITEM(u, 1);
1379
            }
1380
            else {
1381
                v = u;
1382
            }
1383
            if (v != item) {
  Branch (1383:17): [True: 1.82k, False: 221k]
1384
                Py_INCREF(v);
1385
                PyTuple_SET_ITEM(o, i, v);
1386
                Py_DECREF(item);
1387
            }
1388
1389
            Py_DECREF(u);
1390
        }
1391
    }
1392
    else if (PyFrozenSet_CheckExact(o)) {
1393
        // *key* is tuple. And its first item is frozenset of
1394
        // constant keys.
1395
        // See _PyCode_ConstantKey() for detail.
1396
        assert(PyTuple_CheckExact(key));
1397
        assert(PyTuple_GET_SIZE(key) == 2);
1398
1399
        Py_ssize_t len = PySet_GET_SIZE(o);
1400
        if (len == 0) {  // empty frozenset should not be re-created.
  Branch (1400:13): [True: 1, False: 74]
1401
            return key;
1402
        }
1403
        PyObject *tuple = PyTuple_New(len);
1404
        if (tuple == NULL) {
  Branch (1404:13): [True: 0, False: 74]
1405
            Py_DECREF(key);
1406
            return NULL;
1407
        }
1408
        Py_ssize_t i = 0, pos = 0;
1409
        PyObject *item;
1410
        Py_hash_t hash;
1411
        while (_PySet_NextEntry(o, &pos, &item, &hash)) {
  Branch (1411:16): [True: 233, False: 74]
1412
            PyObject *k = merge_consts_recursive(const_cache, item);
1413
            if (k == NULL) {
  Branch (1413:17): [True: 0, False: 233]
1414
                Py_DECREF(tuple);
1415
                Py_DECREF(key);
1416
                return NULL;
1417
            }
1418
            PyObject *u;
1419
            if (PyTuple_CheckExact(k)) {
1420
                u = PyTuple_GET_ITEM(k, 1);
1421
                Py_INCREF(u);
1422
                Py_DECREF(k);
1423
            }
1424
            else {
1425
                u = k;
1426
            }
1427
            PyTuple_SET_ITEM(tuple, i, u);  // Steals reference of u.
1428
            i++;
1429
        }
1430
1431
        // Instead of rewriting o, we create new frozenset and embed in the
1432
        // key tuple.  Caller should get merged frozenset from the key tuple.
1433
        PyObject *new = PyFrozenSet_New(tuple);
1434
        Py_DECREF(tuple);
1435
        if (new == NULL) {
  Branch (1435:13): [True: 0, False: 74]
1436
            Py_DECREF(key);
1437
            return NULL;
1438
        }
1439
        assert(PyTuple_GET_ITEM(key, 1) == o);
1440
        Py_DECREF(o);
1441
        PyTuple_SET_ITEM(key, 1, new);
1442
    }
1443
1444
    return key;
1445
}
1446
1447
static Py_ssize_t
1448
compiler_add_const(struct compiler *c, PyObject *o)
1449
{
1450
    PyObject *key = merge_consts_recursive(c->c_const_cache, o);
1451
    if (key == NULL) {
  Branch (1451:9): [True: 0, False: 423k]
1452
        return -1;
1453
    }
1454
1455
    Py_ssize_t arg = compiler_add_o(c->u->u_consts, key);
1456
    Py_DECREF(key);
1457
    return arg;
1458
}
1459
1460
static int
1461
compiler_addop_load_const(struct compiler *c, PyObject *o)
1462
{
1463
    Py_ssize_t arg = compiler_add_const(c, o);
1464
    if (arg < 0)
  Branch (1464:9): [True: 0, False: 398k]
1465
        return 0;
1466
    return compiler_addop_i(c, LOAD_CONST, arg, true);
1467
}
1468
1469
static int
1470
compiler_addop_o(struct compiler *c, int opcode, PyObject *dict,
1471
                     PyObject *o)
1472
{
1473
    Py_ssize_t arg = compiler_add_o(dict, o);
1474
    if (arg < 0)
  Branch (1474:9): [True: 0, False: 204k]
1475
        return 0;
1476
    return compiler_addop_i(c, opcode, arg, true);
1477
}
1478
1479
static int
1480
compiler_addop_name(struct compiler *c, int opcode, PyObject *dict,
1481
                    PyObject *o)
1482
{
1483
    Py_ssize_t arg;
1484
1485
    PyObject *mangled = _Py_Mangle(c->u->u_private, o);
1486
    if (!mangled)
  Branch (1486:9): [True: 0, False: 80.4k]
1487
        return 0;
1488
    arg = compiler_add_o(dict, mangled);
1489
    Py_DECREF(mangled);
1490
    if (arg < 0)
  Branch (1490:9): [True: 0, False: 80.4k]
1491
        return 0;
1492
    if (opcode == LOAD_ATTR) {
  Branch (1492:9): [True: 39.8k, False: 40.6k]
1493
        arg <<= 1;
1494
    }
1495
    if (opcode == LOAD_METHOD) {
  Branch (1495:9): [True: 26.8k, False: 53.5k]
1496
        opcode = LOAD_ATTR;
1497
        arg <<= 1;
1498
        arg |= 1;
1499
    }
1500
    return compiler_addop_i(c, opcode, arg, true);
1501
}
1502
1503
/* Add an opcode with an integer argument.
1504
   Returns 0 on failure, 1 on success.
1505
*/
1506
static int
1507
compiler_addop_i(struct compiler *c, int opcode, Py_ssize_t oparg, bool line)
1508
{
1509
    if (compiler_use_new_implicit_block_if_needed(c) < 0) {
  Branch (1509:9): [True: 0, False: 1.97M]
1510
        return -1;
1511
    }
1512
    /* oparg value is unsigned, but a signed C int is usually used to store
1513
       it in the C code (like Python/ceval.c).
1514
1515
       Limit to 32-bit signed C int (rather than INT_MAX) for portability.
1516
1517
       The argument of a concrete bytecode instruction is limited to 8-bit.
1518
       EXTENDED_ARG is used for 16, 24, and 32-bit arguments. */
1519
1520
    int oparg_ = Py_SAFE_DOWNCAST(oparg, Py_ssize_t, int);
1521
1522
    const struct location *loc = line ? 
&c->u->u_loc1.97M
: NULL;
  Branch (1522:34): [True: 1.97M, False: 524]
1523
    return basicblock_addop(c->u->u_curblock, opcode, oparg_, NULL, loc);
1524
}
1525
1526
static int
1527
compiler_addop_j(struct compiler *c, int opcode, basicblock *target, bool line)
1528
{
1529
    if (compiler_use_new_implicit_block_if_needed(c) < 0) {
  Branch (1529:9): [True: 0, False: 267k]
1530
        return -1;
1531
    }
1532
    const struct location *loc = line ? 
&c->u->u_loc258k
: NULL;
  Branch (1532:34): [True: 258k, False: 9.58k]
1533
    assert(target != NULL);
1534
    assert(IS_JUMP_OPCODE(opcode) || IS_BLOCK_PUSH_OPCODE(opcode));
1535
    return basicblock_addop(c->u->u_curblock, opcode, 0, target, loc);
1536
}
1537
1538
#define ADDOP(C, OP) { \
1539
    if (!compiler_addop((C), (OP), true)) \
1540
        
return 00
; \
1541
}
1542
1543
#define ADDOP_NOLINE(C, OP) { \
1544
    if (!compiler_addop((C), (OP), false)) \
1545
        
return 00
; \
1546
}
1547
1548
#define ADDOP_IN_SCOPE(C, OP) { \
1549
    if (!compiler_addop((C), (OP), true)) { \
1550
        compiler_exit_scope(c); \
1551
        return 0; \
1552
    } \
1553
}
1554
1555
#define ADDOP_LOAD_CONST(C, O) { \
1556
    if (!compiler_addop_load_const((C), (O))) \
1557
        
return 00
; \
1558
}
1559
1560
/* Same as ADDOP_LOAD_CONST, but steals a reference. */
1561
#define ADDOP_LOAD_CONST_NEW(C, O) { \
1562
    PyObject *__new_const = (O); \
1563
    if (__new_const == NULL) { \
1564
        return 0; \
1565
    } \
1566
    if (!compiler_addop_load_const((C), __new_const)) { \
1567
        Py_DECREF(__new_const); \
1568
        return 0; \
1569
    } \
1570
    Py_DECREF(__new_const); \
1571
}
1572
1573
#define ADDOP_N(C, OP, O, TYPE) { \
1574
    assert(!HAS_CONST(OP)); /* use ADDOP_LOAD_CONST_NEW */ \
1575
    if (!compiler_addop_o((C), (OP), (C)->u->u_ ## TYPE, (O))) { \
1576
        Py_DECREF((O)); \
1577
        return 0; \
1578
    } \
1579
    Py_DECREF((O)); \
1580
}
1581
1582
#define ADDOP_NAME(C, OP, O, TYPE) { \
1583
    if (!compiler_addop_name((C), (OP), (C)->u->u_ ## TYPE, (O))) \
1584
        
return 00
; \
1585
}
1586
1587
#define ADDOP_I(C, OP, O) { \
1588
    if (!compiler_addop_i((C), (OP), (
O9.76k
), true)) \
1589
        
return 00
; \
1590
}
1591
1592
#define ADDOP_I_NOLINE(C, OP, O) { \
1593
    if (!compiler_addop_i((C), (OP), (O), false)) \
1594
        
return 00
; \
1595
}
1596
1597
#define ADDOP_JUMP(C, OP, O) { \
1598
    if (
!compiler_addop_j((C), (258k
OP), (O), true)) \
1599
        
return 00
; \
1600
}
1601
1602
/* Add a jump with no line number.
1603
 * Used for artificial jumps that have no corresponding
1604
 * token in the source code. */
1605
#define ADDOP_JUMP_NOLINE(C, OP, O) { \
1606
    if (!compiler_addop_j((C), (OP), (O), false)) \
1607
        
return 00
; \
1608
}
1609
1610
#define ADDOP_COMPARE(C, CMP) { \
1611
    if (!compiler_addcompare((C), (cmpop_ty)(CMP))) \
1612
        
return 00
; \
1613
}
1614
1615
#define ADDOP_BINARY(C, BINOP) \
1616
    RETURN_IF_FALSE(addop_binary((C), (BINOP), false))
1617
1618
#define ADDOP_INPLACE(C, BINOP) \
1619
    RETURN_IF_FALSE(addop_binary((C), (BINOP), true))
1620
1621
/* VISIT and VISIT_SEQ takes an ASDL type as their second argument.  They use
1622
   the ASDL name to synthesize the name of the C type and the visit function.
1623
*/
1624
1625
#define ADD_YIELD_FROM(C, await) \
1626
    RETURN_IF_FALSE(compiler_add_yield_from((C), (await)))
1627
1628
#define POP_EXCEPT_AND_RERAISE(C) \
1629
    RETURN_IF_FALSE(compiler_pop_except_and_reraise((C)))
1630
1631
#define ADDOP_YIELD(C) \
1632
    RETURN_IF_FALSE(addop_yield(C))
1633
1634
#define VISIT(C, TYPE, V) {\
1635
    if (
!compiler_visit_ ## TYPE((C), (1.63M
V19.7k
))) \
1636
        
return 0496
; \
1637
}
1638
1639
#define VISIT_IN_SCOPE(C, TYPE, V) {\
1640
    if (!compiler_visit_ ## TYPE((C), (V))) { \
1641
        compiler_exit_scope(c); \
1642
        return 0; \
1643
    } \
1644
}
1645
1646
#define VISIT_SEQ(C, TYPE, SEQ) { \
1647
    int _i; \
1648
    asdl_ ## TYPE ## _seq *seq = (SEQ); /* avoid variable capture */ \
1649
    for (_i = 0; _i < asdl_seq_LEN(seq); 
_i++311k
) { \
1650
        TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1651
        if (!compiler_visit_ ## TYPE((C), elt)) \
1652
            
return 084
; \
1653
    } \
1654
}
1655
1656
#define VISIT_SEQ_IN_SCOPE(C, TYPE, SEQ) { \
1657
    int _i; \
1658
    asdl_ ## TYPE ## _seq *seq = (SEQ); /* avoid variable capture */ \
1659
    for (_i = 0; _i < asdl_seq_LEN(seq); 
_i++4.09k
) { \
1660
        TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1661
        if (!compiler_visit_ ## TYPE((C), elt)) { \
1662
            compiler_exit_scope(c); \
1663
            return 0; \
1664
        } \
1665
    } \
1666
}
1667
1668
#define RETURN_IF_FALSE(X)  \
1669
    if (!(X)) {             \
1670
        return 0;           \
1671
    }
1672
1673
static int
1674
compiler_enter_scope(struct compiler *c, identifier name,
1675
                     int scope_type, void *key, int lineno)
1676
{
1677
    struct compiler_unit *u;
1678
    basicblock *block;
1679
1680
    u = (struct compiler_unit *)PyObject_Calloc(1, sizeof(
1681
                                            struct compiler_unit));
1682
    if (!u) {
  Branch (1682:9): [True: 0, False: 73.2k]
1683
        PyErr_NoMemory();
1684
        return 0;
1685
    }
1686
    u->u_scope_type = scope_type;
1687
    u->u_argcount = 0;
1688
    u->u_posonlyargcount = 0;
1689
    u->u_kwonlyargcount = 0;
1690
    u->u_ste = PySymtable_Lookup(c->c_st, key);
1691
    if (!u->u_ste) {
  Branch (1691:9): [True: 0, False: 73.2k]
1692
        compiler_unit_free(u);
1693
        return 0;
1694
    }
1695
    Py_INCREF(name);
1696
    u->u_name = name;
1697
    u->u_varnames = list2dict(u->u_ste->ste_varnames);
1698
    u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0);
1699
    if (!u->u_varnames || !u->u_cellvars) {
  Branch (1699:9): [True: 0, False: 73.2k]
  Branch (1699:27): [True: 0, False: 73.2k]
1700
        compiler_unit_free(u);
1701
        return 0;
1702
    }
1703
    if (u->u_ste->ste_needs_class_closure) {
  Branch (1703:9): [True: 147, False: 73.1k]
1704
        /* Cook up an implicit __class__ cell. */
1705
        int res;
1706
        assert(u->u_scope_type == COMPILER_SCOPE_CLASS);
1707
        assert(PyDict_GET_SIZE(u->u_cellvars) == 0);
1708
        res = PyDict_SetItem(u->u_cellvars, &_Py_ID(__class__),
1709
                             _PyLong_GetZero());
1710
        if (res < 0) {
  Branch (1710:13): [True: 0, False: 147]
1711
            compiler_unit_free(u);
1712
            return 0;
1713
        }
1714
    }
1715
1716
    u->u_freevars = dictbytype(u->u_ste->ste_symbols, FREE, DEF_FREE_CLASS,
1717
                               PyDict_GET_SIZE(u->u_cellvars));
1718
    if (!u->u_freevars) {
  Branch (1718:9): [True: 0, False: 73.2k]
1719
        compiler_unit_free(u);
1720
        return 0;
1721
    }
1722
1723
    u->u_blocks = NULL;
1724
    u->u_nfblocks = 0;
1725
    u->u_firstlineno = lineno;
1726
    u->u_loc = LOCATION(lineno, lineno, 0, 0);
1727
    u->u_consts = PyDict_New();
1728
    if (!u->u_consts) {
  Branch (1728:9): [True: 0, False: 73.2k]
1729
        compiler_unit_free(u);
1730
        return 0;
1731
    }
1732
    u->u_names = PyDict_New();
1733
    if (!u->u_names) {
  Branch (1733:9): [True: 0, False: 73.2k]
1734
        compiler_unit_free(u);
1735
        return 0;
1736
    }
1737
1738
    u->u_private = NULL;
1739
1740
    /* Push the old compiler_unit on the stack. */
1741
    if (c->u) {
  Branch (1741:9): [True: 24.6k, False: 48.6k]
1742
        PyObject *capsule = PyCapsule_New(c->u, CAPSULE_NAME, NULL);
1743
        if (!capsule || PyList_Append(c->c_stack, capsule) < 0) {
  Branch (1743:13): [True: 0, False: 24.6k]
  Branch (1743:25): [True: 0, False: 24.6k]
1744
            Py_XDECREF(capsule);
1745
            compiler_unit_free(u);
1746
            return 0;
1747
        }
1748
        Py_DECREF(capsule);
1749
        u->u_private = c->u->u_private;
1750
        Py_XINCREF(u->u_private);
1751
    }
1752
    c->u = u;
1753
1754
    c->c_nestlevel++;
1755
1756
    block = compiler_new_block(c);
1757
    if (block == NULL)
  Branch (1757:9): [True: 0, False: 73.2k]
1758
        return 0;
1759
    c->u->u_curblock = block;
1760
1761
    if (u->u_scope_type == COMPILER_SCOPE_MODULE) {
  Branch (1761:9): [True: 48.6k, False: 24.6k]
1762
        c->u->u_loc.lineno = 0;
1763
    }
1764
    else {
1765
        if (!compiler_set_qualname(c))
  Branch (1765:13): [True: 0, False: 24.6k]
1766
            return 0;
1767
    }
1768
    ADDOP_I(c, RESUME, 0);
1769
1770
    if (u->u_scope_type == COMPILER_SCOPE_MODULE) {
  Branch (1770:9): [True: 48.6k, False: 24.6k]
1771
        c->u->u_loc.lineno = -1;
1772
    }
1773
    return 1;
1774
}
1775
1776
static void
1777
compiler_exit_scope(struct compiler *c)
1778
{
1779
    // Don't call PySequence_DelItem() with an exception raised
1780
    PyObject *exc_type, *exc_val, *exc_tb;
1781
    PyErr_Fetch(&exc_type, &exc_val, &exc_tb);
1782
1783
    c->c_nestlevel--;
1784
    compiler_unit_free(c->u);
1785
    /* Restore c->u to the parent unit. */
1786
    Py_ssize_t n = PyList_GET_SIZE(c->c_stack) - 1;
1787
    if (n >= 0) {
  Branch (1787:9): [True: 24.6k, False: 48.6k]
1788
        PyObject *capsule = PyList_GET_ITEM(c->c_stack, n);
1789
        c->u = (struct compiler_unit *)PyCapsule_GetPointer(capsule, CAPSULE_NAME);
1790
        assert(c->u);
1791
        /* we are deleting from a list so this really shouldn't fail */
1792
        if (PySequence_DelItem(c->c_stack, n) < 0) {
  Branch (1792:13): [True: 0, False: 24.6k]
1793
            _PyErr_WriteUnraisableMsg("on removing the last compiler "
1794
                                      "stack item", NULL);
1795
        }
1796
        compiler_unit_check(c->u);
1797
    }
1798
    else {
1799
        c->u = NULL;
1800
    }
1801
1802
    PyErr_Restore(exc_type, exc_val, exc_tb);
1803
}
1804
1805
/* Search if variable annotations are present statically in a block. */
1806
1807
static int
1808
find_ann(asdl_stmt_seq *stmts)
1809
{
1810
    int i, j, res = 0;
1811
    stmt_ty st;
1812
1813
    for (i = 0; i < asdl_seq_LEN(stmts); 
i++456k
) {
  Branch (1813:17): [True: 456k, False: 421k]
1814
        st = (stmt_ty)asdl_seq_GET(stmts, i);
1815
        switch (st->kind) {
1816
        case AnnAssign_kind:
  Branch (1816:9): [True: 383, False: 456k]
1817
            return 1;
1818
        case For_kind:
  Branch (1818:9): [True: 287, False: 456k]
1819
            res = find_ann(st->v.For.body) ||
  Branch (1819:19): [True: 0, False: 287]
1820
                  find_ann(st->v.For.orelse);
  Branch (1820:19): [True: 0, False: 287]
1821
            break;
1822
        case AsyncFor_kind:
  Branch (1822:9): [True: 5, False: 456k]
1823
            res = find_ann(st->v.AsyncFor.body) ||
  Branch (1823:19): [True: 0, False: 5]
1824
                  find_ann(st->v.AsyncFor.orelse);
  Branch (1824:19): [True: 0, False: 5]
1825
            break;
1826
        case While_kind:
  Branch (1826:9): [True: 70, False: 456k]
1827
            res = find_ann(st->v.While.body) ||
  Branch (1827:19): [True: 0, False: 70]
1828
                  find_ann(st->v.While.orelse);
  Branch (1828:19): [True: 0, False: 70]
1829
            break;
1830
        case If_kind:
  Branch (1830:9): [True: 202k, False: 254k]
1831
            res = find_ann(st->v.If.body) ||
  Branch (1831:19): [True: 0, False: 202k]
1832
                  find_ann(st->v.If.orelse);
  Branch (1832:19): [True: 0, False: 202k]
1833
            break;
1834
        case With_kind:
  Branch (1834:9): [True: 140, False: 456k]
1835
            res = find_ann(st->v.With.body);
1836
            break;
1837
        case AsyncWith_kind:
  Branch (1837:9): [True: 5, False: 456k]
1838
            res = find_ann(st->v.AsyncWith.body);
1839
            break;
1840
        case Try_kind:
  Branch (1840:9): [True: 453, False: 456k]
1841
            for (j = 0; j < asdl_seq_LEN(st->v.Try.handlers); 
j++445
) {
  Branch (1841:25): [True: 445, False: 453]
1842
                excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
1843
                    st->v.Try.handlers, j);
1844
                if (find_ann(handler->v.ExceptHandler.body)) {
  Branch (1844:21): [True: 0, False: 445]
1845
                    return 1;
1846
                }
1847
            }
1848
            res = find_ann(st->v.Try.body) ||
  Branch (1848:19): [True: 0, False: 453]
1849
                  find_ann(st->v.Try.finalbody) ||
  Branch (1849:19): [True: 0, False: 453]
1850
                  find_ann(st->v.Try.orelse);
  Branch (1850:19): [True: 0, False: 453]
1851
            break;
1852
        case TryStar_kind:
  Branch (1852:9): [True: 8, False: 456k]
1853
            for (j = 0; j < asdl_seq_LEN(st->v.TryStar.handlers); 
j++8
) {
  Branch (1853:25): [True: 8, False: 8]
1854
                excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
1855
                    st->v.TryStar.handlers, j);
1856
                if (find_ann(handler->v.ExceptHandler.body)) {
  Branch (1856:21): [True: 0, False: 8]
1857
                    return 1;
1858
                }
1859
            }
1860
            res = find_ann(st->v.TryStar.body) ||
  Branch (1860:19): [True: 0, False: 8]
1861
                  find_ann(st->v.TryStar.finalbody) ||
  Branch (1861:19): [True: 0, False: 8]
1862
                  find_ann(st->v.TryStar.orelse);
  Branch (1862:19): [True: 0, False: 8]
1863
            break;
1864
        default:
  Branch (1864:9): [True: 252k, False: 203k]
1865
            res = 0;
1866
        }
1867
        if (res) {
  Branch (1867:13): [True: 0, False: 456k]
1868
            break;
1869
        }
1870
    }
1871
    return res;
1872
}
1873
1874
/*
1875
 * Frame block handling functions
1876
 */
1877
1878
static int
1879
compiler_push_fblock(struct compiler *c, enum fblocktype t, basicblock *b,
1880
                     basicblock *exit, void *datum)
1881
{
1882
    struct fblockinfo *f;
1883
    if (c->u->u_nfblocks >= CO_MAXBLOCKS) {
  Branch (1883:9): [True: 2, False: 12.8k]
1884
        return compiler_error(c, "too many statically nested blocks");
1885
    }
1886
    f = &c->u->u_fblock[c->u->u_nfblocks++];
1887
    f->fb_type = t;
1888
    f->fb_block = b;
1889
    f->fb_exit = exit;
1890
    f->fb_datum = datum;
1891
    return 1;
1892
}
1893
1894
static void
1895
compiler_pop_fblock(struct compiler *c, enum fblocktype t, basicblock *b)
1896
{
1897
    struct compiler_unit *u = c->u;
1898
    assert(u->u_nfblocks > 0);
1899
    u->u_nfblocks--;
1900
    assert(u->u_fblock[u->u_nfblocks].fb_type == t);
1901
    assert(u->u_fblock[u->u_nfblocks].fb_block == b);
1902
}
1903
1904
static int
1905
compiler_call_exit_with_nones(struct compiler *c) {
1906
    ADDOP_LOAD_CONST(c, Py_None);
1907
    ADDOP_LOAD_CONST(c, Py_None);
1908
    ADDOP_LOAD_CONST(c, Py_None);
1909
    ADDOP_I(c, CALL, 2);
1910
    return 1;
1911
}
1912
1913
static int
1914
compiler_add_yield_from(struct compiler *c, int await)
1915
{
1916
    basicblock *start, *resume, *exit;
1917
    start = compiler_new_block(c);
1918
    resume = compiler_new_block(c);
1919
    exit = compiler_new_block(c);
1920
    if (start == NULL || resume == NULL || exit == NULL) {
  Branch (1920:9): [True: 0, False: 373]
  Branch (1920:26): [True: 0, False: 373]
  Branch (1920:44): [True: 0, False: 373]
1921
        return 0;
1922
    }
1923
    compiler_use_next_block(c, start);
1924
    ADDOP_JUMP(c, SEND, exit);
1925
    compiler_use_next_block(c, resume);
1926
    ADDOP_I(c, YIELD_VALUE, 0);
1927
    ADDOP_I(c, RESUME, await ? 3 : 2);
1928
    ADDOP_JUMP(c, JUMP_NO_INTERRUPT, start);
1929
    compiler_use_next_block(c, exit);
1930
    return 1;
1931
}
1932
1933
static int
1934
compiler_pop_except_and_reraise(struct compiler *c)
1935
{
1936
    /* Stack contents
1937
     * [exc_info, lasti, exc]            COPY        3
1938
     * [exc_info, lasti, exc, exc_info]  POP_EXCEPT
1939
     * [exc_info, lasti, exc]            RERAISE      1
1940
     * (exception_unwind clears the stack)
1941
     */
1942
1943
    ADDOP_I(c, COPY, 3);
1944
    ADDOP(c, POP_EXCEPT);
1945
    ADDOP_I(c, RERAISE, 1);
1946
    return 1;
1947
}
1948
1949
/* Unwind a frame block.  If preserve_tos is true, the TOS before
1950
 * popping the blocks will be restored afterwards, unless another
1951
 * return, break or continue is found. In which case, the TOS will
1952
 * be popped.
1953
 */
1954
static int
1955
compiler_unwind_fblock(struct compiler *c, struct fblockinfo *info,
1956
                       int preserve_tos)
1957
{
1958
    switch (info->fb_type) {
  Branch (1958:13): [True: 0, False: 3.20k]
1959
        case WHILE_LOOP:
  Branch (1959:9): [True: 581, False: 2.62k]
1960
        case EXCEPTION_HANDLER:
  Branch (1960:9): [True: 591, False: 2.61k]
1961
        case EXCEPTION_GROUP_HANDLER:
  Branch (1961:9): [True: 0, False: 3.20k]
1962
        case ASYNC_COMPREHENSION_GENERATOR:
  Branch (1962:9): [True: 0, False: 3.20k]
1963
            return 1;
1964
1965
        case FOR_LOOP:
  Branch (1965:9): [True: 606, False: 2.60k]
1966
            /* Pop the iterator */
1967
            if (preserve_tos) {
  Branch (1967:17): [True: 147, False: 459]
1968
                ADDOP_I(c, SWAP, 2);
1969
            }
1970
            ADDOP(c, POP_TOP);
1971
            return 1;
1972
1973
        case TRY_EXCEPT:
  Branch (1973:9): [True: 520, False: 2.68k]
1974
            ADDOP(c, POP_BLOCK);
1975
            return 1;
1976
1977
        case FINALLY_TRY:
  Branch (1977:9): [True: 99, False: 3.11k]
1978
            /* This POP_BLOCK gets the line number of the unwinding statement */
1979
            ADDOP(c, POP_BLOCK);
1980
            if (preserve_tos) {
  Branch (1980:17): [True: 58, False: 41]
1981
                if (!compiler_push_fblock(c, POP_VALUE, NULL, NULL, NULL)) {
  Branch (1981:21): [True: 0, False: 58]
1982
                    return 0;
1983
                }
1984
            }
1985
            /* Emit the finally block */
1986
            VISIT_SEQ(c, stmt, info->fb_datum);
1987
            if (preserve_tos) {
  Branch (1987:17): [True: 58, False: 41]
1988
                compiler_pop_fblock(c, POP_VALUE, NULL);
1989
            }
1990
            /* The finally block should appear to execute after the
1991
             * statement causing the unwinding, so make the unwinding
1992
             * instruction artificial */
1993
            UNSET_LOC(c);
1994
            return 1;
1995
1996
        case FINALLY_END:
  Branch (1996:9): [True: 50, False: 3.15k]
1997
            if (preserve_tos) {
  Branch (1997:17): [True: 0, False: 50]
1998
                ADDOP_I(c, SWAP, 2);
1999
            }
2000
            ADDOP(c, POP_TOP); /* exc_value */
2001
            if (preserve_tos) {
  Branch (2001:17): [True: 0, False: 50]
2002
                ADDOP_I(c, SWAP, 2);
2003
            }
2004
            ADDOP(c, POP_BLOCK);
2005
            ADDOP(c, POP_EXCEPT);
2006
            return 1;
2007
2008
        case WITH:
  Branch (2008:9): [True: 130, False: 3.07k]
2009
        case ASYNC_WITH:
  Branch (2009:9): [True: 29, False: 3.18k]
2010
            SET_LOC(c, (stmt_ty)info->fb_datum);
2011
            ADDOP(c, POP_BLOCK);
2012
            if (preserve_tos) {
  Branch (2012:17): [True: 90, False: 69]
2013
                ADDOP_I(c, SWAP, 2);
2014
            }
2015
            if(!compiler_call_exit_with_nones(c)) {
  Branch (2015:16): [True: 0, False: 159]
2016
                return 0;
2017
            }
2018
            if (info->fb_type == ASYNC_WITH) {
  Branch (2018:17): [True: 29, False: 130]
2019
                ADDOP_I(c, GET_AWAITABLE, 2);
2020
                ADDOP_LOAD_CONST(c, Py_None);
2021
                ADD_YIELD_FROM(c, 1);
2022
            }
2023
            ADDOP(c, POP_TOP);
2024
            /* The exit block should appear to execute after the
2025
             * statement causing the unwinding, so make the unwinding
2026
             * instruction artificial */
2027
            UNSET_LOC(c);
2028
            return 1;
2029
2030
        case HANDLER_CLEANUP:
  Branch (2030:9): [True: 599, False: 2.61k]
2031
            if (info->fb_datum) {
  Branch (2031:17): [True: 101, False: 498]
2032
                ADDOP(c, POP_BLOCK);
2033
            }
2034
            if (preserve_tos) {
  Branch (2034:17): [True: 179, False: 420]
2035
                ADDOP_I(c, SWAP, 2);
2036
            }
2037
            ADDOP(c, POP_BLOCK);
2038
            ADDOP(c, POP_EXCEPT);
2039
            if (info->fb_datum) {
  Branch (2039:17): [True: 101, False: 498]
2040
                ADDOP_LOAD_CONST(c, Py_None);
2041
                compiler_nameop(c, info->fb_datum, Store);
2042
                compiler_nameop(c, info->fb_datum, Del);
2043
            }
2044
            return 1;
2045
2046
        case POP_VALUE:
  Branch (2046:9): [True: 4, False: 3.20k]
2047
            if (preserve_tos) {
  Branch (2047:17): [True: 0, False: 4]
2048
                ADDOP_I(c, SWAP, 2);
2049
            }
2050
            ADDOP(c, POP_TOP);
2051
            return 1;
2052
    }
2053
    
Py_UNREACHABLE0
();
2054
}
2055
2056
/** Unwind block stack. If loop is not NULL, then stop when the first loop is encountered. */
2057
static int
2058
compiler_unwind_fblock_stack(struct compiler *c, int preserve_tos, struct fblockinfo **loop) {
2059
    if (c->u->u_nfblocks == 0) {
  Branch (2059:9): [True: 15.9k, False: 3.79k]
2060
        return 1;
2061
    }
2062
    struct fblockinfo *top = &c->u->u_fblock[c->u->u_nfblocks-1];
2063
    if (top->fb_type == EXCEPTION_GROUP_HANDLER) {
  Branch (2063:9): [True: 8, False: 3.78k]
2064
        return compiler_error(
2065
            c, "'break', 'continue' and 'return' cannot appear in an except* block");
2066
    }
2067
    if (loop != NULL && 
(1.85k
top->fb_type == WHILE_LOOP1.85k
||
top->fb_type == FOR_LOOP1.27k
)) {
  Branch (2067:9): [True: 1.85k, False: 1.93k]
  Branch (2067:26): [True: 579, False: 1.27k]
  Branch (2067:56): [True: 804, False: 474]
2068
        *loop = top;
2069
        return 1;
2070
    }
2071
    struct fblockinfo copy = *top;
2072
    c->u->u_nfblocks--;
2073
    if (!compiler_unwind_fblock(c, &copy, preserve_tos)) {
  Branch (2073:9): [True: 0, False: 2.40k]
2074
        return 0;
2075
    }
2076
    if (!compiler_unwind_fblock_stack(c, preserve_tos, loop)) {
  Branch (2076:9): [True: 8, False: 2.39k]
2077
        return 0;
2078
    }
2079
    c->u->u_fblock[c->u->u_nfblocks] = copy;
2080
    c->u->u_nfblocks++;
2081
    return 1;
2082
}
2083
2084
/* Compile a sequence of statements, checking for a docstring
2085
   and for annotations. */
2086
2087
static int
2088
compiler_body(struct compiler *c, asdl_stmt_seq *stmts)
2089
{
2090
    int i = 0;
2091
    stmt_ty st;
2092
    PyObject *docstring;
2093
2094
    /* Set current line number to the line number of first statement.
2095
       This way line number for SETUP_ANNOTATIONS will always
2096
       coincide with the line number of first "real" statement in module.
2097
       If body is empty, then lineno will be set later in assemble. */
2098
    if (c->u->u_scope_type == COMPILER_SCOPE_MODULE && 
asdl_seq_LEN8.62k
(stmts)) {
  Branch (2098:9): [True: 8.62k, False: 1.81k]
2099
        st = (stmt_ty)asdl_seq_GET(stmts, 0);
2100
        SET_LOC(c, st);
2101
    }
2102
    /* Every annotated class and module should have __annotations__. */
2103
    if (find_ann(stmts)) {
  Branch (2103:9): [True: 382, False: 10.0k]
2104
        ADDOP(c, SETUP_ANNOTATIONS);
2105
    }
2106
    if (!asdl_seq_LEN(stmts))
  Branch (2106:9): [True: 299, False: 10.1k]
2107
        return 1;
2108
    /* if not -OO mode, set docstring */
2109
    if (c->c_optimize < 2) {
  Branch (2109:9): [True: 9.84k, False: 305]
2110
        docstring = _PyAST_GetDocString(stmts);
2111
        if (docstring) {
  Branch (2111:13): [True: 854, False: 8.98k]
2112
            i = 1;
2113
            st = (stmt_ty)asdl_seq_GET(stmts, 0);
2114
            assert(st->kind == Expr_kind);
2115
            VISIT(c, expr, st->v.Expr.value);
2116
            UNSET_LOC(c);
2117
            if (!compiler_nameop(c, &_Py_ID(__doc__), Store))
  Branch (2117:17): [True: 0, False: 854]
2118
                return 0;
2119
        }
2120
    }
2121
    
for (; 10.1k
i < asdl_seq_LEN(stmts);
i++244k
)
  Branch (2121:12): [True: 244k, False: 9.89k]
2122
        VISIT(c, stmt, (stmt_ty)asdl_seq_GET(stmts, i));
2123
    return 1;
2124
}
2125
2126
static PyCodeObject *
2127
compiler_mod(struct compiler *c, mod_ty mod)
2128
{
2129
    PyCodeObject *co;
2130
    int addNone = 1;
2131
    _Py_DECLARE_STR(anon_module, "<module>");
2132
    if (!compiler_enter_scope(c, &_Py_STR(anon_module), COMPILER_SCOPE_MODULE,
  Branch (2132:9): [True: 0, False: 48.6k]
2133
                              mod, 1)) {
2134
        return NULL;
2135
    }
2136
    c->u->u_loc.lineno = 1;
2137
    switch (mod->kind) {
2138
    case Module_kind:
  Branch (2138:5): [True: 8.62k, False: 39.9k]
2139
        if (!compiler_body(c, mod->v.Module.body)) {
  Branch (2139:13): [True: 238, False: 8.39k]
2140
            compiler_exit_scope(c);
2141
            return 0;
2142
        }
2143
        break;
2144
    case Interactive_kind:
  Branch (2144:5): [True: 4.08k, False: 44.5k]
2145
        if (find_ann(mod->v.Interactive.body)) {
  Branch (2145:13): [True: 1, False: 4.07k]
2146
            ADDOP(c, SETUP_ANNOTATIONS);
2147
        }
2148
        c->c_interactive = 1;
2149
        VISIT_SEQ_IN_SCOPE(c, stmt, mod->v.Interactive.body);
2150
        break;
2151
    case Expression_kind:
  Branch (2151:5): [True: 35.9k, False: 12.7k]
2152
        VISIT_IN_SCOPE(c, expr, mod->v.Expression.body);
2153
        addNone = 0;
2154
        break;
2155
    default:
  Branch (2155:5): [True: 0, False: 48.6k]
2156
        PyErr_Format(PyExc_SystemError,
2157
                     "module kind %d should not be possible",
2158
                     mod->kind);
2159
        return 0;
2160
    }
2161
    co = assemble(c, addNone);
2162
    compiler_exit_scope(c);
2163
    return co;
2164
}
2165
2166
/* The test for LOCAL must come before the test for FREE in order to
2167
   handle classes where name is both local and free.  The local var is
2168
   a method and the free var is a free var referenced within a method.
2169
*/
2170
2171
static int
2172
get_ref_type(struct compiler *c, PyObject *name)
2173
{
2174
    int scope;
2175
    if (c->u->u_scope_type == COMPILER_SCOPE_CLASS &&
  Branch (2175:9): [True: 311, False: 2.16k]
2176
        
_PyUnicode_EqualToASCIIString(name, "__class__")311
)
  Branch (2176:9): [True: 252, False: 59]
2177
        return CELL;
2178
    scope = _PyST_GetScope(c->u->u_ste, name);
2179
    if (scope == 0) {
  Branch (2179:9): [True: 0, False: 2.22k]
2180
        PyErr_Format(PyExc_SystemError,
2181
                     "_PyST_GetScope(name=%R) failed: "
2182
                     "unknown scope in unit %S (%R); "
2183
                     "symbols: %R; locals: %R; globals: %R",
2184
                     name,
2185
                     c->u->u_name, c->u->u_ste->ste_id,
2186
                     c->u->u_ste->ste_symbols, c->u->u_varnames, c->u->u_names);
2187
        return -1;
2188
    }
2189
    return scope;
2190
}
2191
2192
static int
2193
compiler_lookup_arg(PyObject *dict, PyObject *name)
2194
{
2195
    PyObject *v;
2196
    v = PyDict_GetItemWithError(dict, name);
2197
    if (v == NULL)
  Branch (2197:9): [True: 0, False: 2.61k]
2198
        return -1;
2199
    return PyLong_AS_LONG(v);
2200
}
2201
2202
static int
2203
compiler_make_closure(struct compiler *c, PyCodeObject *co, Py_ssize_t flags,
2204
                      PyObject *qualname)
2205
{
2206
    if (qualname == NULL)
  Branch (2206:9): [True: 1.80k, False: 22.7k]
2207
        qualname = co->co_name;
2208
2209
    if (co->co_nfreevars) {
  Branch (2209:9): [True: 1.37k, False: 23.1k]
2210
        int i = co->co_nlocals + co->co_nplaincellvars;
2211
        for (; i < co->co_nlocalsplus; 
++i2.47k
) {
  Branch (2211:16): [True: 2.47k, False: 1.37k]
2212
            /* Bypass com_addop_varname because it will generate
2213
               LOAD_DEREF but LOAD_CLOSURE is needed.
2214
            */
2215
            PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
2216
2217
            /* Special case: If a class contains a method with a
2218
               free variable that has the same name as a method,
2219
               the name will be considered free *and* local in the
2220
               class.  It should be handled by the closure, as
2221
               well as by the normal name lookup logic.
2222
            */
2223
            int reftype = get_ref_type(c, name);
2224
            if (reftype == -1) {
  Branch (2224:17): [True: 0, False: 2.47k]
2225
                return 0;
2226
            }
2227
            int arg;
2228
            if (reftype == CELL) {
  Branch (2228:17): [True: 2.35k, False: 115]
2229
                arg = compiler_lookup_arg(c->u->u_cellvars, name);
2230
            }
2231
            else {
2232
                arg = compiler_lookup_arg(c->u->u_freevars, name);
2233
            }
2234
            if (arg == -1) {
  Branch (2234:17): [True: 0, False: 2.47k]
2235
                PyObject *freevars = _PyCode_GetFreevars(co);
2236
                if (freevars == NULL) {
  Branch (2236:21): [True: 0, False: 0]
2237
                    PyErr_Clear();
2238
                }
2239
                PyErr_Format(PyExc_SystemError,
2240
                    "compiler_lookup_arg(name=%R) with reftype=%d failed in %S; "
2241
                    "freevars of code %S: %R",
2242
                    name,
2243
                    reftype,
2244
                    c->u->u_name,
2245
                    co->co_name,
2246
                    freevars);
2247
                Py_DECREF(freevars);
2248
                return 0;
2249
            }
2250
            ADDOP_I(c, LOAD_CLOSURE, arg);
2251
        }
2252
        flags |= 0x08;
2253
        ADDOP_I(c, BUILD_TUPLE, co->co_nfreevars);
2254
    }
2255
    ADDOP_LOAD_CONST(c, (PyObject*)co);
2256
    ADDOP_I(c, MAKE_FUNCTION, flags);
2257
    return 1;
2258
}
2259
2260
static int
2261
compiler_decorators(struct compiler *c, asdl_expr_seq* decos)
2262
{
2263
    int i;
2264
2265
    if (!decos)
  Branch (2265:9): [True: 13.9k, False: 8.69k]
2266
        return 1;
2267
2268
    
for (i = 0; 8.69k
i < asdl_seq_LEN(decos);
i++1.12k
) {
  Branch (2268:17): [True: 1.12k, False: 8.69k]
2269
        VISIT(c, expr, (expr_ty)asdl_seq_GET(decos, i));
2270
    }
2271
    return 1;
2272
}
2273
2274
static int
2275
compiler_apply_decorators(struct compiler *c, asdl_expr_seq* decos)
2276
{
2277
    if (!decos)
  Branch (2277:9): [True: 13.8k, False: 8.69k]
2278
        return 1;
2279
2280
    struct location old_loc = c->u->u_loc;
2281
    for (Py_ssize_t i = 
asdl_seq_LEN8.69k
(decos) - 1; i > -1;
i--1.12k
) {
  Branch (2281:50): [True: 1.12k, False: 8.69k]
2282
        SET_LOC(c, (expr_ty)asdl_seq_GET(decos, i));
2283
        ADDOP_I(c, CALL, 0);
2284
    }
2285
    c->u->u_loc = old_loc;
2286
    return 1;
2287
}
2288
2289
static int
2290
compiler_visit_kwonlydefaults(struct compiler *c, asdl_arg_seq *kwonlyargs,
2291
                              asdl_expr_seq *kw_defaults)
2292
{
2293
    /* Push a dict of keyword-only default values.
2294
2295
       Return 0 on error, -1 if no dict pushed, 1 if a dict is pushed.
2296
       */
2297
    int i;
2298
    PyObject *keys = NULL;
2299
2300
    for (i = 0; i < asdl_seq_LEN(kwonlyargs); 
i++3.34k
) {
  Branch (2300:17): [True: 3.34k, False: 21.6k]
2301
        arg_ty arg = asdl_seq_GET(kwonlyargs, i);
2302
        expr_ty default_ = asdl_seq_GET(kw_defaults, i);
2303
        if (default_) {
  Branch (2303:13): [True: 886, False: 2.45k]
2304
            PyObject *mangled = _Py_Mangle(c->u->u_private, arg->arg);
2305
            if (!mangled) {
  Branch (2305:17): [True: 0, False: 886]
2306
                goto error;
2307
            }
2308
            if (keys == NULL) {
  Branch (2308:17): [True: 348, False: 538]
2309
                keys = PyList_New(1);
2310
                if (keys == NULL) {
  Branch (2310:21): [True: 0, False: 348]
2311
                    Py_DECREF(mangled);
2312
                    return 0;
2313
                }
2314
                PyList_SET_ITEM(keys, 0, mangled);
2315
            }
2316
            else {
2317
                int res = PyList_Append(keys, mangled);
2318
                Py_DECREF(mangled);
2319
                if (res == -1) {
  Branch (2319:21): [True: 0, False: 538]
2320
                    goto error;
2321
                }
2322
            }
2323
            if (!compiler_visit_expr(c, default_)) {
  Branch (2323:17): [True: 1, False: 885]
2324
                goto error;
2325
            }
2326
        }
2327
    }
2328
    if (keys != NULL) {
  Branch (2328:9): [True: 347, False: 21.2k]
2329
        Py_ssize_t default_count = PyList_GET_SIZE(keys);
2330
        PyObject *keys_tuple = PyList_AsTuple(keys);
2331
        Py_DECREF(keys);
2332
        
ADDOP_LOAD_CONST_NEW347
(c, keys_tuple);
2333
        
ADDOP_I347
(c, BUILD_CONST_KEY_MAP, default_count);
2334
        assert(default_count > 0);
2335
        return 1;
2336
    }
2337
    else {
2338
        return -1;
2339
    }
2340
2341
error:
2342
    Py_XDECREF(keys);
2343
    return 0;
2344
}
2345
2346
static int
2347
compiler_visit_annexpr(struct compiler *c, expr_ty annotation)
2348
{
2349
    ADDOP_LOAD_CONST_NEW(c, _PyAST_ExprAsUnicode(annotation));
2350
    return 1;
2351
}
2352
2353
static int
2354
compiler_visit_argannotation(struct compiler *c, identifier id,
2355
    expr_ty annotation, Py_ssize_t *annotations_len)
2356
{
2357
    if (!annotation) {
  Branch (2357:9): [True: 53.6k, False: 2.90k]
2358
        return 1;
2359
    }
2360
2361
    PyObject *mangled = _Py_Mangle(c->u->u_private, id);
2362
    if (!mangled) {
  Branch (2362:9): [True: 0, False: 2.90k]
2363
        return 0;
2364
    }
2365
    ADDOP_LOAD_CONST(c, mangled);
2366
    Py_DECREF(mangled);
2367
2368
    if (c->c_future->ff_features & CO_FUTURE_ANNOTATIONS) {
  Branch (2368:9): [True: 1.03k, False: 1.87k]
2369
        VISIT(c, annexpr, annotation);
2370
    }
2371
    else {
2372
        if (annotation->kind == Starred_kind) {
  Branch (2372:13): [True: 10, False: 1.86k]
2373
            // *args: *Ts (where Ts is a TypeVarTuple).
2374
            // Do [annotation_value] = [*Ts].
2375
            // (Note that in theory we could end up here even for an argument
2376
            // other than *args, but in practice the grammar doesn't allow it.)
2377
            VISIT(c, expr, annotation->v.Starred.value);
2378
            ADDOP_I(c, UNPACK_SEQUENCE, (Py_ssize_t) 1);
2379
        }
2380
        else {
2381
            VISIT(c, expr, annotation);
2382
        }
2383
    }
2384
    *annotations_len += 2;
2385
    return 1;
2386
}
2387
2388
static int
2389
compiler_visit_argannotations(struct compiler *c, asdl_arg_seq* args,
2390
                              Py_ssize_t *annotations_len)
2391
{
2392
    int i;
2393
    for (i = 0; i < asdl_seq_LEN(args); 
i++35.7k
) {
  Branch (2393:17): [True: 35.7k, False: 62.3k]
2394
        arg_ty arg = (arg_ty)asdl_seq_GET(args, i);
2395
        if (!compiler_visit_argannotation(
  Branch (2395:13): [True: 3, False: 35.7k]
2396
                        c,
2397
                        arg->arg,
2398
                        arg->annotation,
2399
                        annotations_len))
2400
            return 0;
2401
    }
2402
    return 1;
2403
}
2404
2405
static int
2406
compiler_visit_annotations(struct compiler *c, arguments_ty args,
2407
                           expr_ty returns)
2408
{
2409
    /* Push arg annotation names and values.
2410
       The expressions are evaluated out-of-order wrt the source code.
2411
2412
       Return 0 on error, -1 if no annotations pushed, 1 if a annotations is pushed.
2413
       */
2414
    Py_ssize_t annotations_len = 0;
2415
2416
    if (!compiler_visit_argannotations(c, args->args, &annotations_len))
  Branch (2416:9): [True: 3, False: 20.7k]
2417
        return 0;
2418
    if (!compiler_visit_argannotations(c, args->posonlyargs, &annotations_len))
  Branch (2418:9): [True: 0, False: 20.7k]
2419
        return 0;
2420
    if (args->vararg && 
args->vararg->annotation742
&&
  Branch (2420:9): [True: 742, False: 20.0k]
  Branch (2420:25): [True: 17, False: 725]
2421
        !compiler_visit_argannotation(c, args->vararg->arg,
  Branch (2421:9): [True: 1, False: 16]
2422
                                     args->vararg->annotation, &annotations_len))
2423
        return 0;
2424
    if (!compiler_visit_argannotations(c, args->kwonlyargs, &annotations_len))
  Branch (2424:9): [True: 0, False: 20.7k]
2425
        return 0;
2426
    if (args->kwarg && 
args->kwarg->annotation522
&&
  Branch (2426:9): [True: 522, False: 20.2k]
  Branch (2426:24): [True: 5, False: 517]
2427
        !compiler_visit_argannotation(c, args->kwarg->arg,
  Branch (2427:9): [True: 1, False: 4]
2428
                                     args->kwarg->annotation, &annotations_len))
2429
        return 0;
2430
2431
    if (!compiler_visit_argannotation(c, &_Py_ID(return), returns,
  Branch (2431:9): [True: 0, False: 20.7k]
2432
                                      &annotations_len)) {
2433
        return 0;
2434
    }
2435
2436
    if (annotations_len) {
  Branch (2436:9): [True: 1.30k, False: 19.4k]
2437
        ADDOP_I(c, BUILD_TUPLE, annotations_len);
2438
        return 1;
2439
    }
2440
2441
    return -1;
2442
}
2443
2444
static int
2445
compiler_visit_defaults(struct compiler *c, arguments_ty args)
2446
{
2447
    VISIT_SEQ(c, expr, args->defaults);
2448
    ADDOP_I(c, BUILD_TUPLE, asdl_seq_LEN(args->defaults));
2449
    return 1;
2450
}
2451
2452
static Py_ssize_t
2453
compiler_default_arguments(struct compiler *c, arguments_ty args)
2454
{
2455
    Py_ssize_t funcflags = 0;
2456
    if (args->defaults && asdl_seq_LEN(args->defaults) > 0) {
  Branch (2456:9): [True: 21.6k, False: 0]
  Branch (2456:27): [True: 2.13k, False: 19.5k]
2457
        if (!compiler_visit_defaults(c, args))
  Branch (2457:13): [True: 3, False: 2.13k]
2458
            return -1;
2459
        funcflags |= 0x01;
2460
    }
2461
    if (args->kwonlyargs) {
  Branch (2461:9): [True: 21.6k, False: 0]
2462
        int res = compiler_visit_kwonlydefaults(c, args->kwonlyargs,
2463
                                                args->kw_defaults);
2464
        if (res == 0) {
  Branch (2464:13): [True: 1, False: 21.6k]
2465
            return -1;
2466
        }
2467
        else if (res > 0) {
  Branch (2467:18): [True: 347, False: 21.2k]
2468
            funcflags |= 0x02;
2469
        }
2470
    }
2471
    return funcflags;
2472
}
2473
2474
static int
2475
forbidden_name(struct compiler *c, identifier name, expr_context_ty ctx)
2476
{
2477
2478
    if (ctx == Store && 
_PyUnicode_EqualToASCIIString(name, "__debug__")157k
) {
  Branch (2478:9): [True: 157k, False: 766k]
  Branch (2478:25): [True: 15, False: 157k]
2479
        compiler_error(c, "cannot assign to __debug__");
2480
        return 1;
2481
    }
2482
    if (ctx == Del && 
_PyUnicode_EqualToASCIIString(name, "__debug__")1.16k
) {
  Branch (2482:9): [True: 1.16k, False: 922k]
  Branch (2482:23): [True: 1, False: 1.15k]
2483
        compiler_error(c, "cannot delete __debug__");
2484
        return 1;
2485
    }
2486
    return 0;
2487
}
2488
2489
static int
2490
compiler_check_debug_one_arg(struct compiler *c, arg_ty arg)
2491
{
2492
    if (arg != NULL) {
  Branch (2492:9): [True: 44.6k, False: 41.9k]
2493
        if (forbidden_name(c, arg->arg, Store))
  Branch (2493:13): [True: 5, False: 44.6k]
2494
            return 0;
2495
    }
2496
    return 1;
2497
}
2498
2499
static int
2500
compiler_check_debug_args_seq(struct compiler *c, asdl_arg_seq *args)
2501
{
2502
    if (args != NULL) {
  Branch (2502:9): [True: 64.9k, False: 0]
2503
        for (Py_ssize_t i = 0, n = 
asdl_seq_LEN64.9k
(args); i < n;
i++43.3k
) {
  Branch (2503:56): [True: 43.3k, False: 64.9k]
2504
            if (!compiler_check_debug_one_arg(c, asdl_seq_GET(args, i)))
  Branch (2504:17): [True: 4, False: 43.3k]
2505
                return 0;
2506
        }
2507
    }
2508
    return 1;
2509
}
2510
2511
static int
2512
compiler_check_debug_args(struct compiler *c, arguments_ty args)
2513
{
2514
    if (!compiler_check_debug_args_seq(c, args->posonlyargs))
  Branch (2514:9): [True: 0, False: 21.6k]
2515
        return 0;
2516
    if (!compiler_check_debug_args_seq(c, args->args))
  Branch (2516:9): [True: 3, False: 21.6k]
2517
        return 0;
2518
    if (!compiler_check_debug_one_arg(c, args->vararg))
  Branch (2518:9): [True: 0, False: 21.6k]
2519
        return 0;
2520
    if (!compiler_check_debug_args_seq(c, args->kwonlyargs))
  Branch (2520:9): [True: 1, False: 21.6k]
2521
        return 0;
2522
    if (!compiler_check_debug_one_arg(c, args->kwarg))
  Branch (2522:9): [True: 1, False: 21.6k]
2523
        return 0;
2524
    return 1;
2525
}
2526
2527
static int
2528
compiler_function(struct compiler *c, stmt_ty s, int is_async)
2529
{
2530
    PyCodeObject *co;
2531
    PyObject *qualname, *docstring = NULL;
2532
    arguments_ty args;
2533
    expr_ty returns;
2534
    identifier name;
2535
    asdl_expr_seq* decos;
2536
    asdl_stmt_seq *body;
2537
    Py_ssize_t i, funcflags;
2538
    int annotations;
2539
    int scope_type;
2540
    int firstlineno;
2541
2542
    if (is_async) {
  Branch (2542:9): [True: 468, False: 20.3k]
2543
        assert(s->kind == AsyncFunctionDef_kind);
2544
2545
        args = s->v.AsyncFunctionDef.args;
2546
        returns = s->v.AsyncFunctionDef.returns;
2547
        decos = s->v.AsyncFunctionDef.decorator_list;
2548
        name = s->v.AsyncFunctionDef.name;
2549
        body = s->v.AsyncFunctionDef.body;
2550
2551
        scope_type = COMPILER_SCOPE_ASYNC_FUNCTION;
2552
    } else {
2553
        assert(s->kind == FunctionDef_kind);
2554
2555
        args = s->v.FunctionDef.args;
2556
        returns = s->v.FunctionDef.returns;
2557
        decos = s->v.FunctionDef.decorator_list;
2558
        name = s->v.FunctionDef.name;
2559
        body = s->v.FunctionDef.body;
2560
2561
        scope_type = COMPILER_SCOPE_FUNCTION;
2562
    }
2563
2564
    if (!compiler_check_debug_args(c, args))
  Branch (2564:9): [True: 2, False: 20.8k]
2565
        return 0;
2566
2567
    if (!compiler_decorators(c, decos))
  Branch (2567:9): [True: 0, False: 20.8k]
2568
        return 0;
2569
2570
    firstlineno = s->lineno;
2571
    if (asdl_seq_LEN(decos)) {
2572
        firstlineno = ((expr_ty)asdl_seq_GET(decos, 0))->lineno;
2573
    }
2574
2575
    funcflags = compiler_default_arguments(c, args);
2576
    if (funcflags == -1) {
  Branch (2576:9): [True: 4, False: 20.7k]
2577
        return 0;
2578
    }
2579
2580
    annotations = compiler_visit_annotations(c, args, returns);
2581
    if (annotations == 0) {
  Branch (2581:9): [True: 5, False: 20.7k]
2582
        return 0;
2583
    }
2584
    else if (annotations > 0) {
  Branch (2584:14): [True: 1.30k, False: 19.4k]
2585
        funcflags |= 0x04;
2586
    }
2587
2588
    if (!compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno)) {
  Branch (2588:9): [True: 0, False: 20.7k]
2589
        return 0;
2590
    }
2591
2592
    /* if not -OO mode, add docstring */
2593
    if (c->c_optimize < 2) {
  Branch (2593:9): [True: 18.8k, False: 1.91k]
2594
        docstring = _PyAST_GetDocString(body);
2595
    }
2596
    if (compiler_add_const(c, docstring ? 
docstring4.06k
:
Py_None16.7k
) < 0) {
  Branch (2596:9): [True: 0, False: 20.7k]
  Branch (2596:31): [True: 4.06k, False: 16.7k]
2597
        compiler_exit_scope(c);
2598
        return 0;
2599
    }
2600
2601
    c->u->u_argcount = asdl_seq_LEN(args->args);
2602
    c->u->u_posonlyargcount = asdl_seq_LEN(args->posonlyargs);
2603
    c->u->u_kwonlyargcount = asdl_seq_LEN(args->kwonlyargs);
2604
    for (i = 
docstring20.7k
?
14.06k
:
016.7k
; i < asdl_seq_LEN(body);
i++56.1k
) {
  Branch (2604:14): [True: 4.06k, False: 16.7k]
  Branch (2604:33): [True: 56.2k, False: 20.7k]
2605
        VISIT_IN_SCOPE(c, stmt, (stmt_ty)asdl_seq_GET(body, i));
2606
    }
2607
    co = assemble(c, 1);
2608
    qualname = c->u->u_qualname;
2609
    Py_INCREF(qualname);
2610
    compiler_exit_scope(c);
2611
    if (co == NULL) {
  Branch (2611:9): [True: 0, False: 20.7k]
2612
        Py_XDECREF(qualname);
2613
        Py_XDECREF(co);
2614
        return 0;
2615
    }
2616
2617
    if (!compiler_make_closure(c, co, funcflags, qualname)) {
  Branch (2617:9): [True: 0, False: 20.7k]
2618
        Py_DECREF(qualname);
2619
        Py_DECREF(co);
2620
        return 0;
2621
    }
2622
    Py_DECREF(qualname);
2623
    Py_DECREF(co);
2624
2625
    if (!compiler_apply_decorators(c, decos))
  Branch (2625:9): [True: 0, False: 20.7k]
2626
        return 0;
2627
    return compiler_nameop(c, name, Store);
2628
}
2629
2630
static int
2631
compiler_class(struct compiler *c, stmt_ty s)
2632
{
2633
    PyCodeObject *co;
2634
    int i, firstlineno;
2635
    asdl_expr_seq *decos = s->v.ClassDef.decorator_list;
2636
2637
    if (!compiler_decorators(c, decos))
  Branch (2637:9): [True: 0, False: 1.81k]
2638
        return 0;
2639
2640
    firstlineno = s->lineno;
2641
    if (asdl_seq_LEN(decos)) {
2642
        firstlineno = ((expr_ty)asdl_seq_GET(decos, 0))->lineno;
2643
    }
2644
2645
    /* ultimately generate code for:
2646
         <name> = __build_class__(<func>, <name>, *<bases>, **<keywords>)
2647
       where:
2648
         <func> is a zero arg function/closure created from the class body.
2649
            It mutates its locals to build the class namespace.
2650
         <name> is the class name
2651
         <bases> is the positional arguments and *varargs argument
2652
         <keywords> is the keyword arguments and **kwds argument
2653
       This borrows from compiler_call.
2654
    */
2655
2656
    /* 1. compile the class body into a code object */
2657
    if (!compiler_enter_scope(c, s->v.ClassDef.name,
  Branch (2657:9): [True: 0, False: 1.81k]
2658
                              COMPILER_SCOPE_CLASS, (void *)s, firstlineno)) {
2659
        return 0;
2660
    }
2661
    /* this block represents what we do in the new scope */
2662
    {
2663
        /* use the class name for name mangling */
2664
        Py_INCREF(s->v.ClassDef.name);
2665
        Py_XSETREF(c->u->u_private, s->v.ClassDef.name);
2666
        /* load (global) __name__ ... */
2667
        if (!compiler_nameop(c, &_Py_ID(__name__), Load)) {
  Branch (2667:13): [True: 0, False: 1.81k]
2668
            compiler_exit_scope(c);
2669
            return 0;
2670
        }
2671
        /* ... and store it as __module__ */
2672
        if (!compiler_nameop(c, &_Py_ID(__module__), Store)) {
  Branch (2672:13): [True: 0, False: 1.81k]
2673
            compiler_exit_scope(c);
2674
            return 0;
2675
        }
2676
        assert(c->u->u_qualname);
2677
        ADDOP_LOAD_CONST(c, c->u->u_qualname);
2678
        if (!compiler_nameop(c, &_Py_ID(__qualname__), Store)) {
  Branch (2678:13): [True: 0, False: 1.81k]
2679
            compiler_exit_scope(c);
2680
            return 0;
2681
        }
2682
        /* compile the body proper */
2683
        if (!compiler_body(c, s->v.ClassDef.body)) {
  Branch (2683:13): [True: 17, False: 1.80k]
2684
            compiler_exit_scope(c);
2685
            return 0;
2686
        }
2687
        /* The following code is artificial */
2688
        UNSET_LOC(c);
2689
        /* Return __classcell__ if it is referenced, otherwise return None */
2690
        if (c->u->u_ste->ste_needs_class_closure) {
  Branch (2690:13): [True: 147, False: 1.65k]
2691
            /* Store __classcell__ into class namespace & return it */
2692
            i = compiler_lookup_arg(c->u->u_cellvars, &_Py_ID(__class__));
2693
            if (i < 0) {
  Branch (2693:17): [True: 0, False: 147]
2694
                compiler_exit_scope(c);
2695
                return 0;
2696
            }
2697
            assert(i == 0);
2698
2699
            ADDOP_I(c, LOAD_CLOSURE, i);
2700
            ADDOP_I(c, COPY, 1);
2701
            if (!compiler_nameop(c, &_Py_ID(__classcell__), Store)) {
  Branch (2701:17): [True: 0, False: 147]
2702
                compiler_exit_scope(c);
2703
                return 0;
2704
            }
2705
        }
2706
        else {
2707
            /* No methods referenced __class__, so just return None */
2708
            assert(PyDict_GET_SIZE(c->u->u_cellvars) == 0);
2709
            ADDOP_LOAD_CONST(c, Py_None);
2710
        }
2711
        ADDOP_IN_SCOPE(c, RETURN_VALUE);
2712
        /* create the code object */
2713
        co = assemble(c, 1);
2714
    }
2715
    /* leave the new scope */
2716
    compiler_exit_scope(c);
2717
    if (co == NULL)
  Branch (2717:9): [True: 0, False: 1.80k]
2718
        return 0;
2719
2720
    /* 2. load the 'build_class' function */
2721
    ADDOP(c, PUSH_NULL);
2722
    ADDOP(c, LOAD_BUILD_CLASS);
2723
2724
    /* 3. load a function (or closure) made from the code object */
2725
    if (!compiler_make_closure(c, co, 0, NULL)) {
  Branch (2725:9): [True: 0, False: 1.80k]
2726
        Py_DECREF(co);
2727
        return 0;
2728
    }
2729
    Py_DECREF(co);
2730
2731
    /* 4. load class name */
2732
    ADDOP_LOAD_CONST(c, s->v.ClassDef.name);
2733
2734
    /* 5. generate the rest of the code for the call */
2735
    if (!compiler_call_helper(c, 2, s->v.ClassDef.bases, s->v.ClassDef.keywords))
  Branch (2735:9): [True: 1, False: 1.79k]
2736
        return 0;
2737
    /* 6. apply decorators */
2738
    if (!compiler_apply_decorators(c, decos))
  Branch (2738:9): [True: 0, False: 1.79k]
2739
        return 0;
2740
2741
    /* 7. store into <name> */
2742
    if (!compiler_nameop(c, s->v.ClassDef.name, Store))
  Branch (2742:9): [True: 0, False: 1.79k]
2743
        return 0;
2744
    return 1;
2745
}
2746
2747
/* Return 0 if the expression is a constant value except named singletons.
2748
   Return 1 otherwise. */
2749
static int
2750
check_is_arg(expr_ty e)
2751
{
2752
    if (e->kind != Constant_kind) {
  Branch (2752:9): [True: 26.8k, False: 9.17k]
2753
        return 1;
2754
    }
2755
    PyObject *value = e->v.Constant.value;
2756
    return (value == Py_None
  Branch (2756:13): [True: 3.12k, False: 6.04k]
2757
         || 
value == 6.04k
Py_False
  Branch (2757:13): [True: 33, False: 6.01k]
2758
         || 
value == 6.01k
Py_True
  Branch (2758:13): [True: 30, False: 5.98k]
2759
         || 
value == 5.98k
Py_Ellipsis5.98k
);
  Branch (2759:13): [True: 9, False: 5.97k]
2760
}
2761
2762
/* Check operands of identity chacks ("is" and "is not").
2763
   Emit a warning if any operand is a constant except named singletons.
2764
   Return 0 on error.
2765
 */
2766
static int
2767
check_compare(struct compiler *c, expr_ty e)
2768
{
2769
    Py_ssize_t i, n;
2770
    int left = check_is_arg(e->v.Compare.left);
2771
    n = asdl_seq_LEN(e->v.Compare.ops);
2772
    for (i = 0; i < n; 
i++18.1k
) {
  Branch (2772:17): [True: 18.1k, False: 17.8k]
2773
        cmpop_ty op = (cmpop_ty)asdl_seq_GET(e->v.Compare.ops, i);
2774
        int right = check_is_arg((expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
2775
        if (op == Is || 
op == IsNot14.7k
) {
  Branch (2775:13): [True: 3.41k, False: 14.7k]
  Branch (2775:25): [True: 1.69k, False: 13.0k]
2776
            if (!right || 
!left5.09k
) {
  Branch (2776:17): [True: 11, False: 5.09k]
  Branch (2776:27): [True: 10, False: 5.08k]
2777
                const char *msg = (op == Is)
  Branch (2777:35): [True: 19, False: 2]
2778
                        ? 
"\"is\" with a literal. Did you mean \"==\"?"19
2779
                        : 
"\"is not\" with a literal. Did you mean \"!=\"?"2
;
2780
                return compiler_warn(c, msg);
2781
            }
2782
        }
2783
        left = right;
2784
    }
2785
    return 1;
2786
}
2787
2788
static int compiler_addcompare(struct compiler *c, cmpop_ty op)
2789
{
2790
    int cmp;
2791
    switch (op) {
2792
    case Eq:
  Branch (2792:5): [True: 5.38k, False: 13.6k]
2793
        cmp = Py_EQ;
2794
        break;
2795
    case NotEq:
  Branch (2795:5): [True: 1.45k, False: 17.5k]
2796
        cmp = Py_NE;
2797
        break;
2798
    case Lt:
  Branch (2798:5): [True: 1.43k, False: 17.5k]
2799
        cmp = Py_LT;
2800
        break;
2801
    case LtE:
  Branch (2801:5): [True: 855, False: 18.1k]
2802
        cmp = Py_LE;
2803
        break;
2804
    case Gt:
  Branch (2804:5): [True: 1.20k, False: 17.8k]
2805
        cmp = Py_GT;
2806
        break;
2807
    case GtE:
  Branch (2807:5): [True: 1.04k, False: 17.9k]
2808
        cmp = Py_GE;
2809
        break;
2810
    case Is:
  Branch (2810:5): [True: 3.41k, False: 15.5k]
2811
        ADDOP_I(c, IS_OP, 0);
2812
        return 1;
2813
    case IsNot:
  Branch (2813:5): [True: 1.69k, False: 17.3k]
2814
        ADDOP_I(c, IS_OP, 1);
2815
        return 1;
2816
    case In:
  Branch (2816:5): [True: 1.93k, False: 17.0k]
2817
        ADDOP_I(c, CONTAINS_OP, 0);
2818
        return 1;
2819
    case NotIn:
  Branch (2819:5): [True: 582, False: 18.4k]
2820
        ADDOP_I(c, CONTAINS_OP, 1);
2821
        return 1;
2822
    default:
  Branch (2822:5): [True: 0, False: 19.0k]
2823
        Py_UNREACHABLE();
2824
    }
2825
    ADDOP_I(c, COMPARE_OP, cmp);
2826
    return 1;
2827
}
2828
2829
2830
2831
static int
2832
compiler_jump_if(struct compiler *c, expr_ty e, basicblock *next, int cond)
2833
{
2834
    switch (e->kind) {
2835
    case UnaryOp_kind:
  Branch (2835:5): [True: 4.06k, False: 231k]
2836
        if (e->v.UnaryOp.op == Not)
  Branch (2836:13): [True: 4.06k, False: 0]
2837
            return compiler_jump_if(c, e->v.UnaryOp.operand, next, !cond);
2838
        /* fallback to general implementation */
2839
        break;
2840
    case BoolOp_kind: {
  Branch (2840:5): [True: 3.29k, False: 231k]
2841
        asdl_expr_seq *s = e->v.BoolOp.values;
2842
        Py_ssize_t i, n = asdl_seq_LEN(s) - 1;
2843
        assert(n >= 0);
2844
        int cond2 = e->v.BoolOp.op == Or;
2845
        basicblock *next2 = next;
2846
        if (!cond2 != !cond) {
  Branch (2846:13): [True: 1.12k, False: 2.17k]
2847
            next2 = compiler_new_block(c);
2848
            if (next2 == NULL)
  Branch (2848:17): [True: 0, False: 1.12k]
2849
                return 0;
2850
        }
2851
        
for (i = 0; 3.29k
i < n;
++i3.98k
) {
  Branch (2851:21): [True: 3.98k, False: 3.29k]
2852
            if (!compiler_jump_if(c, (expr_ty)asdl_seq_GET(s, i), next2, cond2))
  Branch (2852:17): [True: 0, False: 3.98k]
2853
                return 0;
2854
        }
2855
        if (!compiler_jump_if(c, (expr_ty)asdl_seq_GET(s, n), next, cond))
  Branch (2855:13): [True: 0, False: 3.29k]
2856
            return 0;
2857
        if (next2 != next)
  Branch (2857:13): [True: 1.12k, False: 2.17k]
2858
            compiler_use_next_block(c, next2);
2859
        return 1;
2860
    }
2861
    case IfExp_kind: {
  Branch (2861:5): [True: 3, False: 235k]
2862
        basicblock *end, *next2;
2863
        end = compiler_new_block(c);
2864
        if (end == NULL)
  Branch (2864:13): [True: 0, False: 3]
2865
            return 0;
2866
        next2 = compiler_new_block(c);
2867
        if (next2 == NULL)
  Branch (2867:13): [True: 0, False: 3]
2868
            return 0;
2869
        if (!compiler_jump_if(c, e->v.IfExp.test, next2, 0))
  Branch (2869:13): [True: 0, False: 3]
2870
            return 0;
2871
        if (!compiler_jump_if(c, e->v.IfExp.body, next, cond))
  Branch (2871:13): [True: 0, False: 3]
2872
            return 0;
2873
        ADDOP_JUMP_NOLINE(c, JUMP, end);
2874
        compiler_use_next_block(c, next2);
2875
        if (!compiler_jump_if(c, e->v.IfExp.orelse, next, cond))
  Branch (2875:13): [True: 0, False: 3]
2876
            return 0;
2877
        compiler_use_next_block(c, end);
2878
        return 1;
2879
    }
2880
    case Compare_kind: {
  Branch (2880:5): [True: 13.5k, False: 221k]
2881
        Py_ssize_t i, n = asdl_seq_LEN(e->v.Compare.ops) - 1;
2882
        if (n > 0) {
  Branch (2882:13): [True: 138, False: 13.3k]
2883
            if (!check_compare(c, e)) {
  Branch (2883:17): [True: 0, False: 138]
2884
                return 0;
2885
            }
2886
            basicblock *cleanup = compiler_new_block(c);
2887
            if (cleanup == NULL)
  Branch (2887:17): [True: 0, False: 138]
2888
                return 0;
2889
            VISIT(c, expr, e->v.Compare.left);
2890
            for (i = 0; i < n; 
i++152
) {
  Branch (2890:25): [True: 152, False: 138]
2891
                VISIT(c, expr,
2892
                    (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
2893
                ADDOP_I(c, SWAP, 2);
2894
                ADDOP_I(c, COPY, 2);
2895
                ADDOP_COMPARE(c, asdl_seq_GET(e->v.Compare.ops, i));
2896
                ADDOP_JUMP(c, POP_JUMP_IF_FALSE, cleanup);
2897
            }
2898
            VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n));
2899
            ADDOP_COMPARE(c, asdl_seq_GET(e->v.Compare.ops, n));
2900
            ADDOP_JUMP(c, cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next);
2901
            basicblock *end = compiler_new_block(c);
2902
            if (end == NULL)
  Branch (2902:17): [True: 0, False: 138]
2903
                return 0;
2904
            ADDOP_JUMP_NOLINE(c, JUMP, end);
2905
            compiler_use_next_block(c, cleanup);
2906
            ADDOP(c, POP_TOP);
2907
            if (!cond) {
  Branch (2907:17): [True: 70, False: 68]
2908
                ADDOP_JUMP_NOLINE(c, JUMP, next);
2909
            }
2910
            compiler_use_next_block(c, end);
2911
            return 1;
2912
        }
2913
        /* fallback to general implementation */
2914
        break;
2915
    }
2916
    default:
  Branch (2916:5): [True: 214k, False: 20.9k]
2917
        /* fallback to general implementation */
2918
        break;
2919
    }
2920
2921
    /* general implementation */
2922
    VISIT(c, expr, e);
2923
    ADDOP_JUMP(c, cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next);
2924
    return 1;
2925
}
2926
2927
static int
2928
compiler_ifexp(struct compiler *c, expr_ty e)
2929
{
2930
    basicblock *end, *next;
2931
2932
    assert(e->kind == IfExp_kind);
2933
    end = compiler_new_block(c);
2934
    if (end == NULL)
  Branch (2934:9): [True: 0, False: 565]
2935
        return 0;
2936
    next = compiler_new_block(c);
2937
    if (next == NULL)
  Branch (2937:9): [True: 0, False: 565]
2938
        return 0;
2939
    if (!compiler_jump_if(c, e->v.IfExp.test, next, 0))
  Branch (2939:9): [True: 0, False: 565]
2940
        return 0;
2941
    VISIT(c, expr, e->v.IfExp.body);
2942
    ADDOP_JUMP_NOLINE(c, JUMP, end);
2943
    compiler_use_next_block(c, next);
2944
    VISIT(c, expr, e->v.IfExp.orelse);
2945
    compiler_use_next_block(c, end);
2946
    return 1;
2947
}
2948
2949
static int
2950
compiler_lambda(struct compiler *c, expr_ty e)
2951
{
2952
    PyCodeObject *co;
2953
    PyObject *qualname;
2954
    Py_ssize_t funcflags;
2955
    arguments_ty args = e->v.Lambda.args;
2956
    assert(e->kind == Lambda_kind);
2957
2958
    if (!compiler_check_debug_args(c, args))
  Branch (2958:9): [True: 3, False: 842]
2959
        return 0;
2960
2961
    funcflags = compiler_default_arguments(c, args);
2962
    if (funcflags == -1) {
  Branch (2962:9): [True: 0, False: 842]
2963
        return 0;
2964
    }
2965
2966
    _Py_DECLARE_STR(anon_lambda, "<lambda>");
2967
    if (!compiler_enter_scope(c, &_Py_STR(anon_lambda), COMPILER_SCOPE_LAMBDA,
  Branch (2967:9): [True: 0, False: 842]
2968
                              (void *)e, e->lineno)) {
2969
        return 0;
2970
    }
2971
    /* Make None the first constant, so the lambda can't have a
2972
       docstring. */
2973
    if (compiler_add_const(c, Py_None) < 0)
  Branch (2973:9): [True: 0, False: 842]
2974
        return 0;
2975
2976
    c->u->u_argcount = asdl_seq_LEN(args->args);
2977
    c->u->u_posonlyargcount = asdl_seq_LEN(args->posonlyargs);
2978
    c->u->u_kwonlyargcount = asdl_seq_LEN(args->kwonlyargs);
2979
    VISIT_IN_SCOPE(c, expr, e->v.Lambda.body);
2980
    if (c->u->u_ste->ste_generator) {
  Branch (2980:9): [True: 4, False: 838]
2981
        co = assemble(c, 0);
2982
    }
2983
    else {
2984
        ADDOP_IN_SCOPE(c, RETURN_VALUE);
2985
        co = assemble(c, 1);
2986
    }
2987
    qualname = c->u->u_qualname;
2988
    Py_INCREF(qualname);
2989
    compiler_exit_scope(c);
2990
    if (co == NULL) {
  Branch (2990:9): [True: 0, False: 842]
2991
        Py_DECREF(qualname);
2992
        return 0;
2993
    }
2994
2995
    if (!compiler_make_closure(c, co, funcflags, qualname)) {
  Branch (2995:9): [True: 0, False: 842]
2996
        Py_DECREF(qualname);
2997
        Py_DECREF(co);
2998
        return 0;
2999
    }
3000
    Py_DECREF(qualname);
3001
    Py_DECREF(co);
3002
3003
    return 1;
3004
}
3005
3006
static int
3007
compiler_if(struct compiler *c, stmt_ty s)
3008
{
3009
    basicblock *end, *next;
3010
    assert(s->kind == If_kind);
3011
    end = compiler_new_block(c);
3012
    if (end == NULL) {
  Branch (3012:9): [True: 0, False: 221k]
3013
        return 0;
3014
    }
3015
    if (asdl_seq_LEN(s->v.If.orelse)) {
3016
        next = compiler_new_block(c);
3017
        if (next == NULL) {
  Branch (3017:13): [True: 0, False: 6.00k]
3018
            return 0;
3019
        }
3020
    }
3021
    else {
3022
        next = end;
3023
    }
3024
    if (!compiler_jump_if(c, s->v.If.test, next, 0)) {
  Branch (3024:9): [True: 0, False: 221k]
3025
        return 0;
3026
    }
3027
    VISIT_SEQ(c, stmt, s->v.If.body);
3028
    if (asdl_seq_LEN(s->v.If.orelse)) {
3029
        ADDOP_JUMP_NOLINE(c, JUMP, end);
3030
        compiler_use_next_block(c, next);
3031
        VISIT_SEQ(c, stmt, s->v.If.orelse);
3032
    }
3033
    compiler_use_next_block(c, end);
3034
    return 1;
3035
}
3036
3037
static int
3038
compiler_for(struct compiler *c, stmt_ty s)
3039
{
3040
    basicblock *start, *body, *cleanup, *end;
3041
3042
    start = compiler_new_block(c);
3043
    body = compiler_new_block(c);
3044
    cleanup = compiler_new_block(c);
3045
    end = compiler_new_block(c);
3046
    if (start == NULL || body == NULL || end == NULL || cleanup == NULL) {
  Branch (3046:9): [True: 0, False: 2.93k]
  Branch (3046:26): [True: 0, False: 2.93k]
  Branch (3046:42): [True: 0, False: 2.93k]
  Branch (3046:57): [True: 0, False: 2.93k]
3047
        return 0;
3048
    }
3049
    if (!compiler_push_fblock(c, FOR_LOOP, start, end, NULL)) {
  Branch (3049:9): [True: 0, False: 2.93k]
3050
        return 0;
3051
    }
3052
    VISIT(c, expr, s->v.For.iter);
3053
    ADDOP(c, GET_ITER);
3054
    compiler_use_next_block(c, start);
3055
    ADDOP_JUMP(c, FOR_ITER, cleanup);
3056
    compiler_use_next_block(c, body);
3057
    VISIT(c, expr, s->v.For.target);
3058
    VISIT_SEQ(c, stmt, s->v.For.body);
3059
    /* Mark jump as artificial */
3060
    UNSET_LOC(c);
3061
    ADDOP_JUMP(c, JUMP, start);
3062
    compiler_use_next_block(c, cleanup);
3063
3064
    compiler_pop_fblock(c, FOR_LOOP, start);
3065
3066
    VISIT_SEQ(c, stmt, s->v.For.orelse);
3067
    compiler_use_next_block(c, end);
3068
    return 1;
3069
}
3070
3071
3072
static int
3073
compiler_async_for(struct compiler *c, stmt_ty s)
3074
{
3075
    basicblock *start, *except, *end;
3076
    if (IS_TOP_LEVEL_AWAIT(c)){
3077
        c->u->u_ste->ste_coroutine = 1;
3078
    } else if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION) {
  Branch (3078:16): [True: 8, False: 26]
3079
        return compiler_error(c, "'async for' outside async function");
3080
    }
3081
3082
    start = compiler_new_block(c);
3083
    except = compiler_new_block(c);
3084
    end = compiler_new_block(c);
3085
3086
    if (start == NULL || except == NULL || end == NULL) {
  Branch (3086:9): [True: 0, False: 28]
  Branch (3086:26): [True: 0, False: 28]
  Branch (3086:44): [True: 0, False: 28]
3087
        return 0;
3088
    }
3089
    VISIT(c, expr, s->v.AsyncFor.iter);
3090
    ADDOP(c, GET_AITER);
3091
3092
    compiler_use_next_block(c, start);
3093
    if (!compiler_push_fblock(c, FOR_LOOP, start, end, NULL)) {
  Branch (3093:9): [True: 0, False: 28]
3094
        return 0;
3095
    }
3096
    /* SETUP_FINALLY to guard the __anext__ call */
3097
    ADDOP_JUMP(c, SETUP_FINALLY, except);
3098
    ADDOP(c, GET_ANEXT);
3099
    ADDOP_LOAD_CONST(c, Py_None);
3100
    ADD_YIELD_FROM(c, 1);
3101
    ADDOP(c, POP_BLOCK);  /* for SETUP_FINALLY */
3102
3103
    /* Success block for __anext__ */
3104
    VISIT(c, expr, s->v.AsyncFor.target);
3105
    VISIT_SEQ(c, stmt, s->v.AsyncFor.body);
3106
    /* Mark jump as artificial */
3107
    UNSET_LOC(c);
3108
    ADDOP_JUMP(c, JUMP, start);
3109
3110
    compiler_pop_fblock(c, FOR_LOOP, start);
3111
3112
    /* Except block for __anext__ */
3113
    compiler_use_next_block(c, except);
3114
3115
    /* Use same line number as the iterator,
3116
     * as the END_ASYNC_FOR succeeds the `for`, not the body. */
3117
    SET_LOC(c, s->v.AsyncFor.iter);
3118
    ADDOP(c, END_ASYNC_FOR);
3119
3120
    /* `else` block */
3121
    VISIT_SEQ(c, stmt, s->v.For.orelse);
3122
3123
    compiler_use_next_block(c, end);
3124
3125
    return 1;
3126
}
3127
3128
static int
3129
compiler_while(struct compiler *c, stmt_ty s)
3130
{
3131
    basicblock *loop, *body, *end, *anchor = NULL;
3132
    loop = compiler_new_block(c);
3133
    body = compiler_new_block(c);
3134
    anchor = compiler_new_block(c);
3135
    end = compiler_new_block(c);
3136
    if (loop == NULL || body == NULL || anchor == NULL || end == NULL) {
  Branch (3136:9): [True: 0, False: 808]
  Branch (3136:25): [True: 0, False: 808]
  Branch (3136:41): [True: 0, False: 808]
  Branch (3136:59): [True: 0, False: 808]
3137
        return 0;
3138
    }
3139
    compiler_use_next_block(c, loop);
3140
    if (!compiler_push_fblock(c, WHILE_LOOP, loop, end, NULL)) {
  Branch (3140:9): [True: 1, False: 807]
3141
        return 0;
3142
    }
3143
    if (!compiler_jump_if(c, s->v.While.test, anchor, 0)) {
  Branch (3143:9): [True: 0, False: 807]
3144
        return 0;
3145
    }
3146
3147
    compiler_use_next_block(c, body);
3148
    VISIT_SEQ(c, stmt, s->v.While.body);
3149
    SET_LOC(c, s);
3150
    if (!compiler_jump_if(c, s->v.While.test, body, 1)) {
  Branch (3150:9): [True: 0, False: 780]
3151
        return 0;
3152
    }
3153
3154
    compiler_pop_fblock(c, WHILE_LOOP, loop);
3155
3156
    compiler_use_next_block(c, anchor);
3157
    if (s->v.While.orelse) {
  Branch (3157:9): [True: 455, False: 325]
3158
        VISIT_SEQ(c, stmt, s->v.While.orelse);
3159
    }
3160
    compiler_use_next_block(c, end);
3161
3162
    return 1;
3163
}
3164
3165
static int
3166
compiler_return(struct compiler *c, stmt_ty s)
3167
{
3168
    int preserve_tos = ((s->v.Return.value != NULL) &&
  Branch (3168:25): [True: 15.4k, False: 551]
3169
                        
(s->v.Return.value->kind != Constant_kind)15.4k
);
  Branch (3169:25): [True: 13.0k, False: 2.43k]
3170
    if (c->u->u_ste->ste_type != FunctionBlock)
  Branch (3170:9): [True: 15, False: 15.9k]
3171
        return compiler_error(c, "'return' outside function");
3172
    if (s->v.Return.value != NULL &&
  Branch (3172:9): [True: 15.4k, False: 540]
3173
        
c->u->u_ste->ste_coroutine15.4k
&&
c->u->u_ste->ste_generator37
)
  Branch (3173:9): [True: 37, False: 15.4k]
  Branch (3173:39): [True: 3, False: 34]
3174
    {
3175
            return compiler_error(
3176
                c, "'return' with value in async generator");
3177
    }
3178
    if (preserve_tos) {
  Branch (3178:9): [True: 13.0k, False: 2.97k]
3179
        VISIT(c, expr, s->v.Return.value);
3180
    } else {
3181
        /* Emit instruction with line number for return value */
3182
        if (s->v.Return.value != NULL) {
  Branch (3182:13): [True: 2.43k, False: 540]
3183
            SET_LOC(c, s->v.Return.value);
3184
            ADDOP(c, NOP);
3185
        }
3186
    }
3187
    if (s->v.Return.value == NULL || 
s->v.Return.value->lineno != s->lineno15.4k
) {
  Branch (3187:9): [True: 540, False: 15.4k]
  Branch (3187:38): [True: 27, False: 15.4k]
3188
        SET_LOC(c, s);
3189
        ADDOP(c, NOP);
3190
    }
3191
3192
    if (!compiler_unwind_fblock_stack(c, preserve_tos, NULL))
  Branch (3192:9): [True: 2, False: 15.9k]
3193
        return 0;
3194
    if (s->v.Return.value == NULL) {
  Branch (3194:9): [True: 540, False: 15.4k]
3195
        ADDOP_LOAD_CONST(c, Py_None);
3196
    }
3197
    else if (!preserve_tos) {
  Branch (3197:14): [True: 2.42k, False: 13.0k]
3198
        ADDOP_LOAD_CONST(c, s->v.Return.value->v.Constant.value);
3199
    }
3200
    ADDOP(c, RETURN_VALUE);
3201
3202
    return 1;
3203
}
3204
3205
static int
3206
compiler_break(struct compiler *c)
3207
{
3208
    struct fblockinfo *loop = NULL;
3209
    /* Emit instruction with line number */
3210
    ADDOP(c, NOP);
3211
    if (!compiler_unwind_fblock_stack(c, 0, &loop)) {
  Branch (3211:9): [True: 3, False: 813]
3212
        return 0;
3213
    }
3214
    if (loop == NULL) {
  Branch (3214:9): [True: 9, False: 804]
3215
        return compiler_error(c, "'break' outside loop");
3216
    }
3217
    if (!compiler_unwind_fblock(c, loop, 0)) {
  Branch (3217:9): [True: 0, False: 804]
3218
        return 0;
3219
    }
3220
    ADDOP_JUMP(c, JUMP, loop->fb_exit);
3221
    return 1;
3222
}
3223
3224
static int
3225
compiler_continue(struct compiler *c)
3226
{
3227
    struct fblockinfo *loop = NULL;
3228
    /* Emit instruction with line number */
3229
    ADDOP(c, NOP);
3230
    if (!compiler_unwind_fblock_stack(c, 0, &loop)) {
  Branch (3230:9): [True: 3, False: 590]
3231
        return 0;
3232
    }
3233
    if (loop == NULL) {
  Branch (3233:9): [True: 11, False: 579]
3234
        return compiler_error(c, "'continue' not properly in loop");
3235
    }
3236
    ADDOP_JUMP(c, JUMP, loop->fb_block);
3237
    return 1;
3238
}
3239
3240
3241
/* Code generated for "try: <body> finally: <finalbody>" is as follows:
3242
3243
        SETUP_FINALLY           L
3244
        <code for body>
3245
        POP_BLOCK
3246
        <code for finalbody>
3247
        JUMP E
3248
    L:
3249
        <code for finalbody>
3250
    E:
3251
3252
   The special instructions use the block stack.  Each block
3253
   stack entry contains the instruction that created it (here
3254
   SETUP_FINALLY), the level of the value stack at the time the
3255
   block stack entry was created, and a label (here L).
3256
3257
   SETUP_FINALLY:
3258
    Pushes the current value stack level and the label
3259
    onto the block stack.
3260
   POP_BLOCK:
3261
    Pops en entry from the block stack.
3262
3263
   The block stack is unwound when an exception is raised:
3264
   when a SETUP_FINALLY entry is found, the raised and the caught
3265
   exceptions are pushed onto the value stack (and the exception
3266
   condition is cleared), and the interpreter jumps to the label
3267
   gotten from the block stack.
3268
*/
3269
3270
static int
3271
compiler_try_finally(struct compiler *c, stmt_ty s)
3272
{
3273
    basicblock *body, *end, *exit, *cleanup;
3274
3275
    body = compiler_new_block(c);
3276
    end = compiler_new_block(c);
3277
    exit = compiler_new_block(c);
3278
    cleanup = compiler_new_block(c);
3279
    if (body == NULL || end == NULL || exit == NULL || cleanup == NULL) {
  Branch (3279:9): [True: 0, False: 348]
  Branch (3279:25): [True: 0, False: 348]
  Branch (3279:40): [True: 0, False: 348]
  Branch (3279:56): [True: 0, False: 348]
3280
        return 0;
3281
    }
3282
    /* `try` block */
3283
    ADDOP_JUMP(c, SETUP_FINALLY, end);
3284
    compiler_use_next_block(c, body);
3285
    if (!compiler_push_fblock(c, FINALLY_TRY, body, end, s->v.Try.finalbody))
  Branch (3285:9): [True: 0, False: 348]
3286
        return 0;
3287
    if (s->v.Try.handlers && 
asdl_seq_LEN179
(s->v.Try.handlers)) {
  Branch (3287:9): [True: 179, False: 169]
3288
        if (!compiler_try_except(c, s))
  Branch (3288:13): [True: 0, False: 24]
3289
            return 0;
3290
    }
3291
    else {
3292
        VISIT_SEQ(c, stmt, s->v.Try.body);
3293
    }
3294
    ADDOP_NOLINE(c, POP_BLOCK);
3295
    compiler_pop_fblock(c, FINALLY_TRY, body);
3296
    VISIT_SEQ(c, stmt, s->v.Try.finalbody);
3297
    ADDOP_JUMP_NOLINE(c, JUMP, exit);
3298
    /* `finally` block */
3299
    compiler_use_next_block(c, end);
3300
3301
    UNSET_LOC(c);
3302
    ADDOP_JUMP(c, SETUP_CLEANUP, cleanup);
3303
    ADDOP(c, PUSH_EXC_INFO);
3304
    if (!compiler_push_fblock(c, FINALLY_END, end, NULL, NULL))
  Branch (3304:9): [True: 0, False: 346]
3305
        return 0;
3306
    VISIT_SEQ(c, stmt, s->v.Try.finalbody);
3307
    compiler_pop_fblock(c, FINALLY_END, end);
3308
    ADDOP_I(c, RERAISE, 0);
3309
    compiler_use_next_block(c, cleanup);
3310
    POP_EXCEPT_AND_RERAISE(c);
3311
    compiler_use_next_block(c, exit);
3312
    return 1;
3313
}
3314
3315
static int
3316
compiler_try_star_finally(struct compiler *c, stmt_ty s)
3317
{
3318
    basicblock *body = compiler_new_block(c);
3319
    if (body == NULL) {
  Branch (3319:9): [True: 0, False: 12]
3320
        return 0;
3321
    }
3322
    basicblock *end = compiler_new_block(c);
3323
    if (!end) {
  Branch (3323:9): [True: 0, False: 12]
3324
        return 0;
3325
    }
3326
    basicblock *exit = compiler_new_block(c);
3327
    if (!exit) {
  Branch (3327:9): [True: 0, False: 12]
3328
        return 0;
3329
    }
3330
    basicblock *cleanup = compiler_new_block(c);
3331
    if (!cleanup) {
  Branch (3331:9): [True: 0, False: 12]
3332
        return 0;
3333
    }
3334
    /* `try` block */
3335
    ADDOP_JUMP(c, SETUP_FINALLY, end);
3336
    compiler_use_next_block(c, body);
3337
    if (!compiler_push_fblock(c, FINALLY_TRY, body, end, s->v.TryStar.finalbody)) {
  Branch (3337:9): [True: 0, False: 12]
3338
        return 0;
3339
    }
3340
    if (s->v.TryStar.handlers && asdl_seq_LEN(s->v.TryStar.handlers)) {
  Branch (3340:9): [True: 12, False: 0]
3341
        if (!compiler_try_star_except(c, s)) {
  Branch (3341:13): [True: 3, False: 9]
3342
            return 0;
3343
        }
3344
    }
3345
    else {
3346
        VISIT_SEQ(c, stmt, s->v.TryStar.body);
3347
    }
3348
    ADDOP_NOLINE(c, POP_BLOCK);
3349
    compiler_pop_fblock(c, FINALLY_TRY, body);
3350
    VISIT_SEQ(c, stmt, s->v.TryStar.finalbody);
3351
    ADDOP_JUMP_NOLINE(c, JUMP, exit);
3352
    /* `finally` block */
3353
    compiler_use_next_block(c, end);
3354
3355
    UNSET_LOC(c);
3356
    ADDOP_JUMP(c, SETUP_CLEANUP, cleanup);
3357
    ADDOP(c, PUSH_EXC_INFO);
3358
    if (!compiler_push_fblock(c, FINALLY_END, end, NULL, NULL)) {
  Branch (3358:9): [True: 0, False: 9]
3359
        return 0;
3360
    }
3361
    VISIT_SEQ(c, stmt, s->v.TryStar.finalbody);
3362
    compiler_pop_fblock(c, FINALLY_END, end);
3363
    ADDOP_I(c, RERAISE, 0);
3364
    compiler_use_next_block(c, cleanup);
3365
    POP_EXCEPT_AND_RERAISE(c);
3366
    compiler_use_next_block(c, exit);
3367
    return 1;
3368
}
3369
3370
3371
/*
3372
   Code generated for "try: S except E1 as V1: S1 except E2 as V2: S2 ...":
3373
   (The contents of the value stack is shown in [], with the top
3374
   at the right; 'tb' is trace-back info, 'val' the exception's
3375
   associated value, and 'exc' the exception.)
3376
3377
   Value stack          Label   Instruction     Argument
3378
   []                           SETUP_FINALLY   L1
3379
   []                           <code for S>
3380
   []                           POP_BLOCK
3381
   []                           JUMP            L0
3382
3383
   [exc]                L1:     <evaluate E1>           )
3384
   [exc, E1]                    CHECK_EXC_MATCH         )
3385
   [exc, bool]                  POP_JUMP_IF_FALSE L2    ) only if E1
3386
   [exc]                        <assign to V1>  (or POP if no V1)
3387
   []                           <code for S1>
3388
                                JUMP            L0
3389
3390
   [exc]                L2:     <evaluate E2>
3391
   .............................etc.......................
3392
3393
   [exc]                Ln+1:   RERAISE     # re-raise exception
3394
3395
   []                   L0:     <next statement>
3396
3397
   Of course, parts are not generated if Vi or Ei is not present.
3398
*/
3399
static int
3400
compiler_try_except(struct compiler *c, stmt_ty s)
3401
{
3402
    basicblock *body, *except, *end, *cleanup;
3403
    Py_ssize_t i, n;
3404
3405
    body = compiler_new_block(c);
3406
    except = compiler_new_block(c);
3407
    end = compiler_new_block(c);
3408
    cleanup = compiler_new_block(c);
3409
    if (body == NULL || except == NULL || end == NULL || cleanup == NULL)
  Branch (3409:9): [True: 0, False: 2.38k]
  Branch (3409:25): [True: 0, False: 2.38k]
  Branch (3409:43): [True: 0, False: 2.38k]
  Branch (3409:58): [True: 0, False: 2.38k]
3410
        return 0;
3411
    ADDOP_JUMP(c, SETUP_FINALLY, except);
3412
    compiler_use_next_block(c, body);
3413
    if (!compiler_push_fblock(c, TRY_EXCEPT, body, NULL, NULL))
  Branch (3413:9): [True: 1, False: 2.38k]
3414
        return 0;
3415
    VISIT_SEQ(c, stmt, s->v.Try.body);
3416
    compiler_pop_fblock(c, TRY_EXCEPT, body);
3417
    ADDOP_NOLINE(c, POP_BLOCK);
3418
    if (s->v.Try.orelse && 
asdl_seq_LEN1.44k
(s->v.Try.orelse)) {
  Branch (3418:9): [True: 1.44k, False: 943]
3419
        VISIT_SEQ(c, stmt, s->v.Try.orelse);
3420
    }
3421
    ADDOP_JUMP_NOLINE(c, JUMP, end);
3422
    n = asdl_seq_LEN(s->v.Try.handlers);
3423
    compiler_use_next_block(c, except);
3424
3425
    UNSET_LOC(c);
3426
    ADDOP_JUMP(c, SETUP_CLEANUP, cleanup);
3427
    ADDOP(c, PUSH_EXC_INFO);
3428
    /* Runtime will push a block here, so we need to account for that */
3429
    if (!compiler_push_fblock(c, EXCEPTION_HANDLER, NULL, NULL, NULL))
  Branch (3429:9): [True: 0, False: 2.38k]
3430
        return 0;
3431
    
for (i = 0; 2.38k
i < n;
i++2.52k
) {
  Branch (3431:17): [True: 2.53k, False: 2.37k]
3432
        excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
3433
            s->v.Try.handlers, i);
3434
        SET_LOC(c, handler);
3435
        if (!handler->v.ExceptHandler.type && 
i < n-1179
) {
  Branch (3435:13): [True: 179, False: 2.35k]
  Branch (3435:47): [True: 1, False: 178]
3436
            return compiler_error(c, "default 'except:' must be last");
3437
        }
3438
        except = compiler_new_block(c);
3439
        if (except == NULL)
  Branch (3439:13): [True: 0, False: 2.53k]
3440
            return 0;
3441
        if (handler->v.ExceptHandler.type) {
  Branch (3441:13): [True: 2.35k, False: 178]
3442
            VISIT(c, expr, handler->v.ExceptHandler.type);
3443
            ADDOP(c, CHECK_EXC_MATCH);
3444
            ADDOP_JUMP(c, POP_JUMP_IF_FALSE, except);
3445
        }
3446
        if (handler->v.ExceptHandler.name) {
  Branch (3446:13): [True: 391, False: 2.14k]
3447
            basicblock *cleanup_end, *cleanup_body;
3448
3449
            cleanup_end = compiler_new_block(c);
3450
            cleanup_body = compiler_new_block(c);
3451
            if (cleanup_end == NULL || cleanup_body == NULL) {
  Branch (3451:17): [True: 0, False: 391]
  Branch (3451:40): [True: 0, False: 391]
3452
                return 0;
3453
            }
3454
3455
            compiler_nameop(c, handler->v.ExceptHandler.name, Store);
3456
3457
            /*
3458
              try:
3459
                  # body
3460
              except type as name:
3461
                  try:
3462
                      # body
3463
                  finally:
3464
                      name = None # in case body contains "del name"
3465
                      del name
3466
            */
3467
3468
            /* second try: */
3469
            ADDOP_JUMP(c, SETUP_CLEANUP, cleanup_end);
3470
            compiler_use_next_block(c, cleanup_body);
3471
            if (!compiler_push_fblock(c, HANDLER_CLEANUP, cleanup_body, NULL, handler->v.ExceptHandler.name))
  Branch (3471:17): [True: 0, False: 391]
3472
                return 0;
3473
3474
            /* second # body */
3475
            VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
3476
            compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body);
3477
            /* name = None; del name; # Mark as artificial */
3478
            UNSET_LOC(c);
3479
            ADDOP(c, POP_BLOCK);
3480
            ADDOP(c, POP_BLOCK);
3481
            ADDOP(c, POP_EXCEPT);
3482
            ADDOP_LOAD_CONST(c, Py_None);
3483
            compiler_nameop(c, handler->v.ExceptHandler.name, Store);
3484
            compiler_nameop(c, handler->v.ExceptHandler.name, Del);
3485
            ADDOP_JUMP(c, JUMP, end);
3486
3487
            /* except: */
3488
            compiler_use_next_block(c, cleanup_end);
3489
3490
            /* name = None; del name; # Mark as artificial */
3491
            UNSET_LOC(c);
3492
3493
            ADDOP_LOAD_CONST(c, Py_None);
3494
            compiler_nameop(c, handler->v.ExceptHandler.name, Store);
3495
            compiler_nameop(c, handler->v.ExceptHandler.name, Del);
3496
3497
            ADDOP_I(c, RERAISE, 1);
3498
        }
3499
        else {
3500
            basicblock *cleanup_body;
3501
3502
            cleanup_body = compiler_new_block(c);
3503
            if (!cleanup_body)
  Branch (3503:17): [True: 0, False: 2.14k]
3504
                return 0;
3505
3506
            ADDOP(c, POP_TOP); /* exc_value */
3507
            compiler_use_next_block(c, cleanup_body);
3508
            if (!compiler_push_fblock(c, HANDLER_CLEANUP, cleanup_body, NULL, NULL))
  Branch (3508:17): [True: 0, False: 2.14k]
3509
                return 0;
3510
            VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
3511
            compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body);
3512
            UNSET_LOC(c);
3513
            ADDOP(c, POP_BLOCK);
3514
            ADDOP(c, POP_EXCEPT);
3515
            ADDOP_JUMP(c, JUMP, end);
3516
        }
3517
        compiler_use_next_block(c, except);
3518
    }
3519
    /* Mark as artificial */
3520
    UNSET_LOC(c);
3521
    compiler_pop_fblock(c, EXCEPTION_HANDLER, NULL);
3522
    ADDOP_I(c, RERAISE, 0);
3523
    compiler_use_next_block(c, cleanup);
3524
    POP_EXCEPT_AND_RERAISE(c);
3525
    compiler_use_next_block(c, end);
3526
    return 1;
3527
}
3528
3529
/*
3530
   Code generated for "try: S except* E1 as V1: S1 except* E2 as V2: S2 ...":
3531
   (The contents of the value stack is shown in [], with the top
3532
   at the right; 'tb' is trace-back info, 'val' the exception instance,
3533
   and 'typ' the exception's type.)
3534
3535
   Value stack                   Label         Instruction     Argument
3536
   []                                         SETUP_FINALLY         L1
3537
   []                                         <code for S>
3538
   []                                         POP_BLOCK
3539
   []                                         JUMP                  L0
3540
3541
   [exc]                            L1:       COPY 1       )  save copy of the original exception
3542
   [orig, exc]                                BUILD_LIST   )  list for raised/reraised excs ("result")
3543
   [orig, exc, res]                           SWAP 2
3544
3545
   [orig, res, exc]                           <evaluate E1>
3546
   [orig, res, exc, E1]                       CHECK_EG_MATCH
3547
   [orig, red, rest/exc, match?]              COPY 1
3548
   [orig, red, rest/exc, match?, match?]      POP_JUMP_IF_NOT_NONE  H1
3549
   [orig, red, exc, None]                     POP_TOP
3550
   [orig, red, exc]                           JUMP L2
3551
3552
   [orig, res, rest, match]         H1:       <assign to V1>  (or POP if no V1)
3553
3554
   [orig, res, rest]                          SETUP_FINALLY         R1
3555
   [orig, res, rest]                          <code for S1>
3556
   [orig, res, rest]                          JUMP                  L2
3557
3558
   [orig, res, rest, i, v]          R1:       LIST_APPEND   3 ) exc raised in except* body - add to res
3559
   [orig, res, rest, i]                       POP
3560
3561
   [orig, res, rest]                L2:       <evaluate E2>
3562
   .............................etc.......................
3563
3564
   [orig, res, rest]                Ln+1:     LIST_APPEND 1  ) add unhandled exc to res (could be None)
3565
3566
   [orig, res]                                PREP_RERAISE_STAR
3567
   [exc]                                      COPY 1
3568
   [exc, exc]                                 POP_JUMP_IF_NOT_NONE  RER
3569
   [exc]                                      POP_TOP
3570
   []                                         JUMP                  L0
3571
3572
   [exc]                            RER:      SWAP 2
3573
   [exc, prev_exc_info]                       POP_EXCEPT
3574
   [exc]                                      RERAISE               0
3575
3576
   []                               L0:       <next statement>
3577
*/
3578
static int
3579
compiler_try_star_except(struct compiler *c, stmt_ty s)
3580
{
3581
    basicblock *body = compiler_new_block(c);
3582
    if (body == NULL) {
  Branch (3582:9): [True: 0, False: 41]
3583
        return 0;
3584
    }
3585
    basicblock *except = compiler_new_block(c);
3586
    if (except == NULL) {
  Branch (3586:9): [True: 0, False: 41]
3587
        return 0;
3588
    }
3589
    basicblock *orelse = compiler_new_block(c);
3590
     if (orelse == NULL) {
  Branch (3590:10): [True: 0, False: 41]
3591
        return 0;
3592
    }
3593
    basicblock *end = compiler_new_block(c);
3594
    if (end == NULL) {
  Branch (3594:9): [True: 0, False: 41]
3595
        return 0;
3596
    }
3597
    basicblock *cleanup = compiler_new_block(c);
3598
    if (cleanup == NULL) {
  Branch (3598:9): [True: 0, False: 41]
3599
        return 0;
3600
    }
3601
    basicblock *reraise_star = compiler_new_block(c);
3602
    if (reraise_star == NULL) {
  Branch (3602:9): [True: 0, False: 41]
3603
        return 0;
3604
    }
3605
3606
    ADDOP_JUMP(c, SETUP_FINALLY, except);
3607
    compiler_use_next_block(c, body);
3608
    if (!compiler_push_fblock(c, TRY_EXCEPT, body, NULL, NULL)) {
  Branch (3608:9): [True: 0, False: 41]
3609
        return 0;
3610
    }
3611
    VISIT_SEQ(c, stmt, s->v.TryStar.body);
3612
    compiler_pop_fblock(c, TRY_EXCEPT, body);
3613
    ADDOP_NOLINE(c, POP_BLOCK);
3614
    ADDOP_JUMP_NOLINE(c, JUMP, orelse);
3615
    Py_ssize_t n = asdl_seq_LEN(s->v.TryStar.handlers);
3616
    compiler_use_next_block(c, except);
3617
3618
    UNSET_LOC(c);
3619
    ADDOP_JUMP(c, SETUP_CLEANUP, cleanup);
3620
    ADDOP(c, PUSH_EXC_INFO);
3621
    /* Runtime will push a block here, so we need to account for that */
3622
    if (!compiler_push_fblock(c, EXCEPTION_GROUP_HANDLER,
  Branch (3622:9): [True: 0, False: 41]
3623
                                 NULL, NULL, "except handler")) {
3624
        return 0;
3625
    }
3626
    
for (Py_ssize_t i = 0; 41
i < n;
i++34
) {
  Branch (3626:28): [True: 42, False: 33]
3627
        excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
3628
            s->v.TryStar.handlers, i);
3629
        SET_LOC(c, handler);
3630
        except = compiler_new_block(c);
3631
        if (except == NULL) {
  Branch (3631:13): [True: 0, False: 42]
3632
            return 0;
3633
        }
3634
        basicblock *handle_match = compiler_new_block(c);
3635
        if (handle_match == NULL) {
  Branch (3635:13): [True: 0, False: 42]
3636
            return 0;
3637
        }
3638
        if (i == 0) {
  Branch (3638:13): [True: 41, False: 1]
3639
            /* Push the original EG into the stack */
3640
            /*
3641
               [exc]            COPY 1
3642
               [orig, exc]
3643
            */
3644
            ADDOP_I(c, COPY, 1);
3645
3646
            /* create empty list for exceptions raised/reraise in the except* blocks */
3647
            /*
3648
               [orig, exc]       BUILD_LIST
3649
               [orig, exc, []]   SWAP 2
3650
               [orig, [], exc]
3651
            */
3652
            ADDOP_I(c, BUILD_LIST, 0);
3653
            ADDOP_I(c, SWAP, 2);
3654
        }
3655
        if (handler->v.ExceptHandler.type) {
  Branch (3655:13): [True: 42, False: 0]
3656
            VISIT(c, expr, handler->v.ExceptHandler.type);
3657
            ADDOP(c, CHECK_EG_MATCH);
3658
            ADDOP_I(c, COPY, 1);
3659
            ADDOP_JUMP(c, POP_JUMP_IF_NOT_NONE, handle_match);
3660
            ADDOP(c, POP_TOP);  // match
3661
            ADDOP_JUMP(c, JUMP, except);
3662
        }
3663
3664
        compiler_use_next_block(c, handle_match);
3665
3666
        basicblock *cleanup_end = compiler_new_block(c);
3667
        if (cleanup_end == NULL) {
  Branch (3667:13): [True: 0, False: 42]
3668
            return 0;
3669
        }
3670
        basicblock *cleanup_body = compiler_new_block(c);
3671
        if (cleanup_body == NULL) {
  Branch (3671:13): [True: 0, False: 42]
3672
            return 0;
3673
        }
3674
3675
        if (handler->v.ExceptHandler.name) {
  Branch (3675:13): [True: 19, False: 23]
3676
            compiler_nameop(c, handler->v.ExceptHandler.name, Store);
3677
        }
3678
        else {
3679
            ADDOP(c, POP_TOP);  // match
3680
        }
3681
3682
        /*
3683
          try:
3684
              # body
3685
          except type as name:
3686
              try:
3687
                  # body
3688
              finally:
3689
                  name = None # in case body contains "del name"
3690
                  del name
3691
        */
3692
        /* second try: */
3693
        ADDOP_JUMP(c, SETUP_CLEANUP, cleanup_end);
3694
        compiler_use_next_block(c, cleanup_body);
3695
        if (!compiler_push_fblock(c, HANDLER_CLEANUP, cleanup_body, NULL, handler->v.ExceptHandler.name))
  Branch (3695:13): [True: 0, False: 42]
3696
            return 0;
3697
3698
        /* second # body */
3699
        VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
3700
        compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body);
3701
        /* name = None; del name; # Mark as artificial */
3702
        UNSET_LOC(c);
3703
        ADDOP(c, POP_BLOCK);
3704
        if (handler->v.ExceptHandler.name) {
  Branch (3704:13): [True: 11, False: 23]
3705
            ADDOP_LOAD_CONST(c, Py_None);
3706
            compiler_nameop(c, handler->v.ExceptHandler.name, Store);
3707
            compiler_nameop(c, handler->v.ExceptHandler.name, Del);
3708
        }
3709
        ADDOP_JUMP(c, JUMP, except);
3710
3711
        /* except: */
3712
        compiler_use_next_block(c, cleanup_end);
3713
3714
        /* name = None; del name; # Mark as artificial */
3715
        UNSET_LOC(c);
3716
3717
        if (handler->v.ExceptHandler.name) {
  Branch (3717:13): [True: 11, False: 23]
3718
            ADDOP_LOAD_CONST(c, Py_None);
3719
            compiler_nameop(c, handler->v.ExceptHandler.name, Store);
3720
            compiler_nameop(c, handler->v.ExceptHandler.name, Del);
3721
        }
3722
3723
        /* add exception raised to the res list */
3724
        ADDOP_I(c, LIST_APPEND, 3); // exc
3725
        ADDOP(c, POP_TOP); // lasti
3726
3727
        ADDOP_JUMP(c, JUMP, except);
3728
        compiler_use_next_block(c, except);
3729
3730
        if (i == n - 1) {
  Branch (3730:13): [True: 33, False: 1]
3731
            /* Add exc to the list (if not None it's the unhandled part of the EG) */
3732
            ADDOP_I(c, LIST_APPEND, 1);
3733
            ADDOP_JUMP(c, JUMP, reraise_star);
3734
        }
3735
    }
3736
    /* Mark as artificial */
3737
    UNSET_LOC(c);
3738
    compiler_pop_fblock(c, EXCEPTION_GROUP_HANDLER, NULL);
3739
    basicblock *reraise = compiler_new_block(c);
3740
    if (!reraise) {
  Branch (3740:9): [True: 0, False: 33]
3741
        return 0;
3742
    }
3743
3744
    compiler_use_next_block(c, reraise_star);
3745
    ADDOP(c, PREP_RERAISE_STAR);
3746
    ADDOP_I(c, COPY, 1);
3747
    ADDOP_JUMP(c, POP_JUMP_IF_NOT_NONE, reraise);
3748
3749
    /* Nothing to reraise */
3750
    ADDOP(c, POP_TOP);
3751
    ADDOP(c, POP_BLOCK);
3752
    ADDOP(c, POP_EXCEPT);
3753
    ADDOP_JUMP(c, JUMP, end);
3754
    compiler_use_next_block(c, reraise);
3755
    ADDOP(c, POP_BLOCK);
3756
    ADDOP_I(c, SWAP, 2);
3757
    ADDOP(c, POP_EXCEPT);
3758
    ADDOP_I(c, RERAISE, 0);
3759
    compiler_use_next_block(c, cleanup);
3760
    POP_EXCEPT_AND_RERAISE(c);
3761
    compiler_use_next_block(c, orelse);
3762
    VISIT_SEQ(c, stmt, s->v.TryStar.orelse);
3763
    compiler_use_next_block(c, end);
3764
    return 1;
3765
}
3766
3767
static int
3768
compiler_try(struct compiler *c, stmt_ty s) {
3769
    if (s->v.Try.finalbody && 
asdl_seq_LEN1.65k
(s->v.Try.finalbody))
  Branch (3769:9): [True: 1.65k, False: 1.05k]
3770
        return compiler_try_finally(c, s);
3771
    else
3772
        return compiler_try_except(c, s);
3773
}
3774
3775
static int
3776
compiler_try_star(struct compiler *c, stmt_ty s)
3777
{
3778
    if (s->v.TryStar.finalbody && 
asdl_seq_LEN18
(s->v.TryStar.finalbody)) {
  Branch (3778:9): [True: 18, False: 23]
3779
        return compiler_try_star_finally(c, s);
3780
    }
3781
    else {
3782
        return compiler_try_star_except(c, s);
3783
    }
3784
}
3785
3786
static int
3787
compiler_import_as(struct compiler *c, identifier name, identifier asname)
3788
{
3789
    /* The IMPORT_NAME opcode was already generated.  This function
3790
       merely needs to bind the result to a name.
3791
3792
       If there is a dot in name, we need to split it and emit a
3793
       IMPORT_FROM for each name.
3794
    */
3795
    Py_ssize_t len = PyUnicode_GET_LENGTH(name);
3796
    Py_ssize_t dot = PyUnicode_FindChar(name, '.', 0, len, 1);
3797
    if (dot == -2)
  Branch (3797:9): [True: 0, False: 105]
3798
        return 0;
3799
    if (dot != -1) {
  Branch (3799:9): [True: 4, False: 101]
3800
        /* Consume the base module name to get the first attribute */
3801
        while (1) {
  Branch (3801:16): [Folded - Ignored]
3802
            Py_ssize_t pos = dot + 1;
3803
            PyObject *attr;
3804
            dot = PyUnicode_FindChar(name, '.', pos, len, 1);
3805
            if (dot == -2)
  Branch (3805:17): [True: 0, False: 6]
3806
                return 0;
3807
            attr = PyUnicode_Substring(name, pos, (dot != -1) ? 
dot2
:
len4
);
  Branch (3807:51): [True: 2, False: 4]
3808
            if (!attr)
  Branch (3808:17): [True: 0, False: 6]
3809
                return 0;
3810
            
ADDOP_N6
(c, IMPORT_FROM, attr, names);
3811
            if (
dot == -16
) {
  Branch (3811:17): [True: 4, False: 2]
3812
                break;
3813
            }
3814
            ADDOP_I(c, SWAP, 2);
3815
            ADDOP(c, POP_TOP);
3816
        }
3817
        if (!compiler_nameop(c, asname, Store)) {
  Branch (3817:13): [True: 0, False: 4]
3818
            return 0;
3819
        }
3820
        ADDOP(c, POP_TOP);
3821
        return 1;
3822
    }
3823
    return compiler_nameop(c, asname, Store);
3824
}
3825
3826
static int
3827
compiler_import(struct compiler *c, stmt_ty s)
3828
{
3829
    /* The Import node stores a module name like a.b.c as a single
3830
       string.  This is convenient for all cases except
3831
         import a.b.c as d
3832
       where we need to parse that string to extract the individual
3833
       module names.
3834
       XXX Perhaps change the representation to make this case simpler?
3835
     */
3836
    Py_ssize_t i, n = asdl_seq_LEN(s->v.Import.names);
3837
3838
    PyObject *zero = _PyLong_GetZero();  // borrowed reference
3839
    for (i = 0; i < n; 
i++3.28k
) {
  Branch (3839:17): [True: 3.28k, False: 3.17k]
3840
        alias_ty alias = (alias_ty)asdl_seq_GET(s->v.Import.names, i);
3841
        int r;
3842
3843
        ADDOP_LOAD_CONST(c, zero);
3844
        ADDOP_LOAD_CONST(c, Py_None);
3845
        ADDOP_NAME(c, IMPORT_NAME, alias->name, names);
3846
3847
        if (alias->asname) {
  Branch (3847:13): [True: 105, False: 3.17k]
3848
            r = compiler_import_as(c, alias->name, alias->asname);
3849
            if (!r)
  Branch (3849:17): [True: 0, False: 105]
3850
                return r;
3851
        }
3852
        else {
3853
            identifier tmp = alias->name;
3854
            Py_ssize_t dot = PyUnicode_FindChar(
3855
                alias->name, '.', 0, PyUnicode_GET_LENGTH(alias->name), 1);
3856
            if (dot != -1) {
  Branch (3856:17): [True: 295, False: 2.88k]
3857
                tmp = PyUnicode_Substring(alias->name, 0, dot);
3858
                if (tmp == NULL)
  Branch (3858:21): [True: 0, False: 295]
3859
                    return 0;
3860
            }
3861
            r = compiler_nameop(c, tmp, Store);
3862
            if (dot != -1) {
  Branch (3862:17): [True: 295, False: 2.88k]
3863
                Py_DECREF(tmp);
3864
            }
3865
            if (!r)
  Branch (3865:17): [True: 0, False: 3.17k]
3866
                return r;
3867
        }
3868
    }
3869
    return 1;
3870
}
3871
3872
static int
3873
compiler_from_import(struct compiler *c, stmt_ty s)
3874
{
3875
    Py_ssize_t i, n = asdl_seq_LEN(s->v.ImportFrom.names);
3876
    PyObject *names;
3877
3878
    
ADDOP_LOAD_CONST_NEW1.46k
(c, PyLong_FromLong(s->v.ImportFrom.level));
3879
3880
    names = PyTuple_New(n);
3881
    if (
!names1.46k
)
  Branch (3881:9): [True: 0, False: 1.46k]
3882
        return 0;
3883
3884
    /* build up the names */
3885
    
for (i = 0; 1.46k
i < n;
i++1.92k
) {
  Branch (3885:17): [True: 1.92k, False: 1.46k]
3886
        alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
3887
        Py_INCREF(alias->name);
3888
        PyTuple_SET_ITEM(names, i, alias->name);
3889
    }
3890
3891
    if (s->lineno > c->c_future->ff_lineno && 
s->v.ImportFrom.module1.27k
&&
  Branch (3891:9): [True: 1.27k, False: 198]
  Branch (3891:47): [True: 1.22k, False: 43]
3892
        
_PyUnicode_EqualToASCIIString(s->v.ImportFrom.module, "__future__")1.22k
) {
  Branch (3892:9): [True: 9, False: 1.21k]
3893
        Py_DECREF(names);
3894
        return compiler_error(c, "from __future__ imports must occur "
3895
                              "at the beginning of the file");
3896
    }
3897
    
ADDOP_LOAD_CONST_NEW1.45k
(c, names);
3898
3899
    if (
s->v.ImportFrom.module1.45k
) {
  Branch (3899:9): [True: 1.41k, False: 43]
3900
        ADDOP_NAME(c, IMPORT_NAME, s->v.ImportFrom.module, names);
3901
    }
3902
    else {
3903
        _Py_DECLARE_STR(empty, "");
3904
        ADDOP_NAME(c, IMPORT_NAME, &_Py_STR(empty), names);
3905
    }
3906
    
for (i = 0; 1.45k
i < n;
i++1.55k
) {
  Branch (3906:17): [True: 1.91k, False: 1.09k]
3907
        alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
3908
        identifier store_name;
3909
3910
        if (i == 0 && 
PyUnicode_READ_CHAR1.45k
(alias->name, 0) == '*'1.45k
) {
  Branch (3910:13): [True: 1.45k, False: 460]
  Branch (3910:23): [True: 369, False: 1.09k]
3911
            assert(n == 1);
3912
            ADDOP(c, IMPORT_STAR);
3913
            return 1;
3914
        }
3915
3916
        ADDOP_NAME(c, IMPORT_FROM, alias->name, names);
3917
        store_name = alias->name;
3918
        if (alias->asname)
  Branch (3918:13): [True: 199, False: 1.35k]
3919
            store_name = alias->asname;
3920
3921
        if (!compiler_nameop(c, store_name, Store)) {
  Branch (3921:13): [True: 0, False: 1.55k]
3922
            return 0;
3923
        }
3924
    }
3925
    /* remove imported module */
3926
    ADDOP(c, POP_TOP);
3927
    return 1;
3928
}
3929
3930
static int
3931
compiler_assert(struct compiler *c, stmt_ty s)
3932
{
3933
    basicblock *end;
3934
3935
    /* Always emit a warning if the test is a non-zero length tuple */
3936
    if ((s->v.Assert.test->kind == Tuple_kind &&
  Branch (3936:10): [True: 3, False: 420]
3937
        
asdl_seq_LEN3
(s->v.Assert.test->v.Tuple.elts) > 03
) ||
  Branch (3937:9): [True: 3, False: 0]
3938
        
(420
s->v.Assert.test->kind == Constant_kind420
&&
  Branch (3938:10): [True: 29, False: 391]
3939
         PyTuple_Check(s->v.Assert.test->v.Constant.value) &&
3940
         
PyTuple_Size(s->v.Assert.test->v.Constant.value) > 06
))
  Branch (3940:10): [True: 6, False: 0]
3941
    {
3942
        if (!compiler_warn(c, "assertion is always true, "
  Branch (3942:13): [True: 6, False: 3]
3943
                              "perhaps remove parentheses?"))
3944
        {
3945
            return 0;
3946
        }
3947
    }
3948
    if (c->c_optimize)
  Branch (3948:9): [True: 74, False: 343]
3949
        return 1;
3950
    end = compiler_new_block(c);
3951
    if (end == NULL)
  Branch (3951:9): [True: 0, False: 343]
3952
        return 0;
3953
    if (!compiler_jump_if(c, s->v.Assert.test, end, 1))
  Branch (3953:9): [True: 0, False: 343]
3954
        return 0;
3955
    ADDOP(c, LOAD_ASSERTION_ERROR);
3956
    if (s->v.Assert.msg) {
  Branch (3956:9): [True: 64, False: 279]
3957
        VISIT(c, expr, s->v.Assert.msg);
3958
        ADDOP_I(c, CALL, 0);
3959
    }
3960
    ADDOP_I(c, RAISE_VARARGS, 1);
3961
    compiler_use_next_block(c, end);
3962
    return 1;
3963
}
3964
3965
static int
3966
compiler_visit_stmt_expr(struct compiler *c, expr_ty value)
3967
{
3968
    if (c->c_interactive && 
c->c_nestlevel <= 13.02k
) {
  Branch (3968:9): [True: 3.02k, False: 221k]
  Branch (3968:29): [True: 2.58k, False: 438]
3969
        VISIT(c, expr, value);
3970
        ADDOP(c, PRINT_EXPR);
3971
        return 1;
3972
    }
3973
3974
    if (value->kind == Constant_kind) {
  Branch (3974:9): [True: 1.62k, False: 220k]
3975
        /* ignore constant statement */
3976
        ADDOP(c, NOP);
3977
        return 1;
3978
    }
3979
3980
    VISIT(c, expr, value);
3981
    /* Mark POP_TOP as artificial */
3982
    UNSET_LOC(c);
3983
    ADDOP(c, POP_TOP);
3984
    return 1;
3985
}
3986
3987
static int
3988
compiler_visit_stmt(struct compiler *c, stmt_ty s)
3989
{
3990
    Py_ssize_t i, n;
3991
3992
    /* Always assign a lineno to the next instruction for a stmt. */
3993
    SET_LOC(c, s);
3994
3995
    switch (s->kind) {
  Branch (3995:13): [True: 0, False: 563k]
3996
    case FunctionDef_kind:
  Branch (3996:5): [True: 20.3k, False: 543k]
3997
        return compiler_function(c, s, 0);
3998
    case ClassDef_kind:
  Branch (3998:5): [True: 1.81k, False: 561k]
3999
        return compiler_class(c, s);
4000
    case Return_kind:
  Branch (4000:5): [True: 15.9k, False: 547k]
4001
        return compiler_return(c, s);
4002
    case Delete_kind:
  Branch (4002:5): [True: 477, False: 563k]
4003
        VISIT_SEQ(c, expr, s->v.Delete.targets)
4004
        break;
4005
    case Assign_kind:
  Branch (4005:5): [True: 55.2k, False: 508k]
4006
        n = asdl_seq_LEN(s->v.Assign.targets);
4007
        VISIT(c, expr, s->v.Assign.value);
4008
        for (i = 0; i < n; 
i++55.6k
) {
  Branch (4008:21): [True: 55.6k, False: 55.1k]
4009
            if (i < n - 1) {
  Branch (4009:17): [True: 492, False: 55.1k]
4010
                ADDOP_I(c, COPY, 1);
4011
            }
4012
            VISIT(c, expr,
4013
                  (expr_ty)asdl_seq_GET(s->v.Assign.targets, i));
4014
        }
4015
        break;
4016
    case AugAssign_kind:
  Branch (4016:5): [True: 1.73k, False: 561k]
4017
        return compiler_augassign(c, s);
4018
    case AnnAssign_kind:
  Branch (4018:5): [True: 945, False: 562k]
4019
        return compiler_annassign(c, s);
4020
    case For_kind:
  Branch (4020:5): [True: 2.93k, False: 560k]
4021
        return compiler_for(c, s);
4022
    case While_kind:
  Branch (4022:5): [True: 808, False: 562k]
4023
        return compiler_while(c, s);
4024
    case If_kind:
  Branch (4024:5): [True: 221k, False: 342k]
4025
        return compiler_if(c, s);
4026
    case Match_kind:
  Branch (4026:5): [True: 376, False: 563k]
4027
        return compiler_match(c, s);
4028
    case Raise_kind:
  Branch (4028:5): [True: 4.04k, False: 559k]
4029
        n = 0;
4030
        if (s->v.Raise.exc) {
  Branch (4030:13): [True: 3.74k, False: 302]
4031
            VISIT(c, expr, s->v.Raise.exc);
4032
            n++;
4033
            if (s->v.Raise.cause) {
  Branch (4033:17): [True: 239, False: 3.50k]
4034
                VISIT(c, expr, s->v.Raise.cause);
4035
                n++;
4036
            }
4037
        }
4038
        ADDOP_I(c, RAISE_VARARGS, (int)n);
4039
        break;
4040
    case Try_kind:
  Branch (4040:5): [True: 2.71k, False: 560k]
4041
        return compiler_try(c, s);
4042
    case TryStar_kind:
  Branch (4042:5): [True: 41, False: 563k]
4043
        return compiler_try_star(c, s);
4044
    case Assert_kind:
  Branch (4044:5): [True: 423, False: 563k]
4045
        return compiler_assert(c, s);
4046
    case Import_kind:
  Branch (4046:5): [True: 3.17k, False: 560k]
4047
        return compiler_import(c, s);
4048
    case ImportFrom_kind:
  Branch (4048:5): [True: 1.46k, False: 562k]
4049
        return compiler_from_import(c, s);
4050
    case Global_kind:
  Branch (4050:5): [True: 102, False: 563k]
4051
    case Nonlocal_kind:
  Branch (4051:5): [True: 60, False: 563k]
4052
        break;
4053
    case Expr_kind:
  Branch (4053:5): [True: 224k, False: 338k]
4054
        return compiler_visit_stmt_expr(c, s->v.Expr.value);
4055
    case Pass_kind:
  Branch (4055:5): [True: 2.29k, False: 561k]
4056
        ADDOP(c, NOP);
4057
        break;
4058
    case Break_kind:
  Branch (4058:5): [True: 816, False: 562k]
4059
        return compiler_break(c);
4060
    case Continue_kind:
  Branch (4060:5): [True: 593, False: 562k]
4061
        return compiler_continue(c);
4062
    case With_kind:
  Branch (4062:5): [True: 806, False: 562k]
4063
        return compiler_with(c, s, 0);
4064
    case AsyncFunctionDef_kind:
  Branch (4064:5): [True: 468, False: 563k]
4065
        return compiler_function(c, s, 1);
4066
    case AsyncWith_kind:
  Branch (4066:5): [True: 47, False: 563k]
4067
        return compiler_async_with(c, s, 0);
4068
    case AsyncFor_kind:
  Branch (4068:5): [True: 36, False: 563k]
4069
        return compiler_async_for(c, s);
4070
    }
4071
4072
    return 1;
4073
}
4074
4075
static int
4076
unaryop(unaryop_ty op)
4077
{
4078
    switch (op) {
4079
    case Invert:
  Branch (4079:5): [True: 73, False: 494]
4080
        return UNARY_INVERT;
4081
    case Not:
  Branch (4081:5): [True: 166, False: 401]
4082
        return UNARY_NOT;
4083
    case UAdd:
  Branch (4083:5): [True: 18, False: 549]
4084
        return UNARY_POSITIVE;
4085
    case USub:
  Branch (4085:5): [True: 310, False: 257]
4086
        return UNARY_NEGATIVE;
4087
    default:
  Branch (4087:5): [True: 0, False: 567]
4088
        PyErr_Format(PyExc_SystemError,
4089
            "unary op %d should not be possible", op);
4090
        return 0;
4091
    }
4092
}
4093
4094
static int
4095
addop_binary(struct compiler *c, operator_ty binop, bool inplace)
4096
{
4097
    int oparg;
4098
    switch (binop) {
4099
        case Add:
  Branch (4099:9): [True: 12.5k, False: 52.7k]
4100
            oparg = inplace ? 
NB_INPLACE_ADD1.18k
:
NB_ADD11.3k
;
  Branch (4100:21): [True: 1.18k, False: 11.3k]
4101
            break;
4102
        case Sub:
  Branch (4102:9): [True: 26.7k, False: 38.5k]
4103
            oparg = inplace ? 
NB_INPLACE_SUBTRACT191
:
NB_SUBTRACT26.5k
;
  Branch (4103:21): [True: 191, False: 26.5k]
4104
            break;
4105
        case Mult:
  Branch (4105:9): [True: 3.49k, False: 61.8k]
4106
            oparg = inplace ? 
NB_INPLACE_MULTIPLY44
:
NB_MULTIPLY3.45k
;
  Branch (4106:21): [True: 44, False: 3.45k]
4107
            break;
4108
        case MatMult:
  Branch (4108:9): [True: 22, False: 65.2k]
4109
            oparg = inplace ? 
NB_INPLACE_MATRIX_MULTIPLY11
:
NB_MATRIX_MULTIPLY11
;
  Branch (4109:21): [True: 11, False: 11]
4110
            break;
4111
        case Div:
  Branch (4111:9): [True: 294, False: 65.0k]
4112
            oparg = inplace ? 
NB_INPLACE_TRUE_DIVIDE18
:
NB_TRUE_DIVIDE276
;
  Branch (4112:21): [True: 18, False: 276]
4113
            break;
4114
        case Mod:
  Branch (4114:9): [True: 20.3k, False: 44.9k]
4115
            oparg = inplace ? 
NB_INPLACE_REMAINDER13
:
NB_REMAINDER20.3k
;
  Branch (4115:21): [True: 13, False: 20.3k]
4116
            break;
4117
        case Pow:
  Branch (4117:9): [True: 225, False: 65.0k]
4118
            oparg = inplace ? 
NB_INPLACE_POWER10
:
NB_POWER215
;
  Branch (4118:21): [True: 10, False: 215]
4119
            break;
4120
        case LShift:
  Branch (4120:9): [True: 105, False: 65.1k]
4121
            oparg = inplace ? 
NB_INPLACE_LSHIFT17
:
NB_LSHIFT88
;
  Branch (4121:21): [True: 17, False: 88]
4122
            break;
4123
        case RShift:
  Branch (4123:9): [True: 103, False: 65.1k]
4124
            oparg = inplace ? 
NB_INPLACE_RSHIFT18
:
NB_RSHIFT85
;
  Branch (4124:21): [True: 18, False: 85]
4125
            break;
4126
        case BitOr:
  Branch (4126:9): [True: 432, False: 64.8k]
4127
            oparg = inplace ? 
NB_INPLACE_OR129
:
NB_OR303
;
  Branch (4127:21): [True: 129, False: 303]
4128
            break;
4129
        case BitXor:
  Branch (4129:9): [True: 107, False: 65.1k]
4130
            oparg = inplace ? 
NB_INPLACE_XOR30
:
NB_XOR77
;
  Branch (4130:21): [True: 30, False: 77]
4131
            break;
4132
        case BitAnd:
  Branch (4132:9): [True: 617, False: 64.6k]
4133
            oparg = inplace ? 
NB_INPLACE_AND37
:
NB_AND580
;
  Branch (4133:21): [True: 37, False: 580]
4134
            break;
4135
        case FloorDiv:
  Branch (4135:9): [True: 267, False: 65.0k]
4136
            oparg = inplace ? 
NB_INPLACE_FLOOR_DIVIDE33
:
NB_FLOOR_DIVIDE234
;
  Branch (4136:21): [True: 33, False: 234]
4137
            break;
4138
        default:
  Branch (4138:9): [True: 0, False: 65.2k]
4139
            PyErr_Format(PyExc_SystemError, "%s op %d should not be possible",
4140
                         inplace ? "inplace" : "binary", binop);
  Branch (4140:26): [True: 0, False: 0]
4141
            return 0;
4142
    }
4143
    ADDOP_I(c, BINARY_OP, oparg);
4144
    return 1;
4145
}
4146
4147
4148
static int
4149
addop_yield(struct compiler *c) {
4150
    if (c->u->u_ste->ste_generator && c->u->u_ste->ste_coroutine) {
  Branch (4150:9): [True: 1.13k, False: 0]
  Branch (4150:39): [True: 16, False: 1.11k]
4151
        ADDOP(c, ASYNC_GEN_WRAP);
4152
    }
4153
    ADDOP_I(c, YIELD_VALUE, 0);
4154
    ADDOP_I(c, RESUME, 1);
4155
    return 1;
4156
}
4157
4158
static int
4159
compiler_nameop(struct compiler *c, identifier name, expr_context_ty ctx)
4160
{
4161
    int op, scope;
4162
    Py_ssize_t arg;
4163
    enum { OP_FAST, OP_GLOBAL, OP_DEREF, OP_NAME } optype;
4164
4165
    PyObject *dict = c->u->u_names;
4166
    PyObject *mangled;
4167
4168
    assert(!_PyUnicode_EqualToASCIIString(name, "None") &&
4169
           !_PyUnicode_EqualToASCIIString(name, "True") &&
4170
           !_PyUnicode_EqualToASCIIString(name, "False"));
4171
4172
    if (forbidden_name(c, name, ctx))
  Branch (4172:9): [True: 7, False: 860k]
4173
        return 0;
4174
4175
    mangled = _Py_Mangle(c->u->u_private, name);
4176
    if (!mangled)
  Branch (4176:9): [True: 0, False: 860k]
4177
        return 0;
4178
4179
    op = 0;
4180
    optype = OP_NAME;
4181
    scope = _PyST_GetScope(c->u->u_ste, mangled);
4182
    switch (scope) {
4183
    case FREE:
  Branch (4183:5): [True: 3.40k, False: 856k]
4184
        dict = c->u->u_freevars;
4185
        optype = OP_DEREF;
4186
        break;
4187
    case CELL:
  Branch (4187:5): [True: 3.08k, False: 857k]
4188
        dict = c->u->u_cellvars;
4189
        optype = OP_DEREF;
4190
        break;
4191
    case LOCAL:
  Branch (4191:5): [True: 276k, False: 583k]
4192
        if (c->u->u_ste->ste_type == FunctionBlock)
  Branch (4192:13): [True: 204k, False: 72.2k]
4193
            optype = OP_FAST;
4194
        break;
4195
    case GLOBAL_IMPLICIT:
  Branch (4195:5): [True: 570k, False: 290k]
4196
        if (c->u->u_ste->ste_type == FunctionBlock)
  Branch (4196:13): [True: 45.6k, False: 524k]
4197
            optype = OP_GLOBAL;
4198
        break;
4199
    case GLOBAL_EXPLICIT:
  Branch (4199:5): [True: 412, False: 859k]
4200
        optype = OP_GLOBAL;
4201
        break;
4202
    default:
  Branch (4202:5): [True: 6.44k, False: 853k]
4203
        /* scope can be 0 */
4204
        break;
4205
    }
4206
4207
    /* XXX Leave assert here, but handle __doc__ and the like better */
4208
    assert(scope || PyUnicode_READ_CHAR(name, 0) == '_');
4209
4210
    switch (optype) {
  Branch (4210:13): [True: 0, False: 860k]
4211
    case OP_DEREF:
  Branch (4211:5): [True: 6.48k, False: 853k]
4212
        switch (ctx) {
  Branch (4212:17): [True: 0, False: 6.48k]
4213
        case Load:
  Branch (4213:9): [True: 5.45k, False: 1.02k]
4214
            op = (c->u->u_ste->ste_type == ClassBlock) ? 
LOAD_CLASSDEREF5
:
LOAD_DEREF5.45k
;
  Branch (4214:18): [True: 5, False: 5.45k]
4215
            break;
4216
        case Store: op = STORE_DEREF; break;
  Branch (4216:9): [True: 1.02k, False: 5.45k]
4217
        case Del: op = DELETE_DEREF; break;
  Branch (4217:9): [True: 0, False: 6.48k]
4218
        }
4219
        break;
4220
    case OP_FAST:
  Branch (4220:5): [True: 204k, False: 655k]
4221
        switch (ctx) {
  Branch (4221:17): [True: 0, False: 204k]
4222
        case Load: op = LOAD_FAST; break;
  Branch (4222:9): [True: 163k, False: 40.5k]
4223
        case Store: op = STORE_FAST; break;
  Branch (4223:9): [True: 39.7k, False: 164k]
4224
        case Del: op = DELETE_FAST; break;
  Branch (4224:9): [True: 829, False: 203k]
4225
        }
4226
        ADDOP_N(c, op, mangled, varnames);
4227
        return 1;
4228
    case OP_GLOBAL:
  Branch (4228:5): [True: 46.0k, False: 814k]
4229
        switch (ctx) {
  Branch (4229:17): [True: 0, False: 46.0k]
4230
        case Load: op = LOAD_GLOBAL; break;
  Branch (4230:9): [True: 45.8k, False: 227]
4231
        case Store: op = STORE_GLOBAL; break;
  Branch (4231:9): [True: 227, False: 45.8k]
4232
        case Del: op = DELETE_GLOBAL; break;
  Branch (4232:9): [True: 0, False: 46.0k]
4233
        }
4234
        break;
4235
    case OP_NAME:
  Branch (4235:5): [True: 603k, False: 257k]
4236
        switch (ctx) {
  Branch (4236:17): [True: 0, False: 603k]
4237
        case Load: op = LOAD_NAME; break;
  Branch (4237:9): [True: 549k, False: 53.3k]
4238
        case Store: op = STORE_NAME; break;
  Branch (4238:9): [True: 52.9k, False: 550k]
4239
        case Del: op = DELETE_NAME; break;
  Branch (4239:9): [True: 330, False: 602k]
4240
        }
4241
        break;
4242
    }
4243
4244
    assert(op);
4245
    arg = compiler_add_o(dict, mangled);
4246
    Py_DECREF(mangled);
4247
    if (arg < 0) {
  Branch (4247:9): [True: 0, False: 655k]
4248
        return 0;
4249
    }
4250
    if (op == LOAD_GLOBAL) {
  Branch (4250:9): [True: 45.8k, False: 609k]
4251
        arg <<= 1;
4252
    }
4253
    return compiler_addop_i(c, op, arg, true);
4254
}
4255
4256
static int
4257
compiler_boolop(struct compiler *c, expr_ty e)
4258
{
4259
    basicblock *end;
4260
    int jumpi;
4261
    Py_ssize_t i, n;
4262
    asdl_expr_seq *s;
4263
4264
    assert(e->kind == BoolOp_kind);
4265
    if (e->v.BoolOp.op == And)
  Branch (4265:9): [True: 575, False: 750]
4266
        jumpi = JUMP_IF_FALSE_OR_POP;
4267
    else
4268
        jumpi = JUMP_IF_TRUE_OR_POP;
4269
    end = compiler_new_block(c);
4270
    if (end == NULL)
  Branch (4270:9): [True: 0, False: 1.32k]
4271
        return 0;
4272
    s = e->v.BoolOp.values;
4273
    n = asdl_seq_LEN(s) - 1;
4274
    assert(n >= 0);
4275
    for (i = 0; i < n; 
++i1.79k
) {
  Branch (4275:17): [True: 1.79k, False: 1.32k]
4276
        VISIT(c, expr, (expr_ty)asdl_seq_GET(s, i));
4277
        ADDOP_JUMP(c, jumpi, end);
4278
        basicblock *next = compiler_new_block(c);
4279
        if (next == NULL) {
  Branch (4279:13): [True: 0, False: 1.79k]
4280
            return 0;
4281
        }
4282
        compiler_use_next_block(c, next);
4283
    }
4284
    VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n));
4285
    compiler_use_next_block(c, end);
4286
    return 1;
4287
}
4288
4289
static int
4290
starunpack_helper(struct compiler *c, asdl_expr_seq *elts, int pushed,
4291
                  int build, int add, int extend, int tuple)
4292
{
4293
    Py_ssize_t n = asdl_seq_LEN(elts);
4294
    if (n > 2 && 
are_all_items_const(elts, 0, n)2.56k
) {
  Branch (4294:9): [True: 2.56k, False: 27.4k]
  Branch (4294:18): [True: 563, False: 2.00k]
4295
        PyObject *folded = PyTuple_New(n);
4296
        if (folded == NULL) {
  Branch (4296:13): [True: 0, False: 563]
4297
            return 0;
4298
        }
4299
        PyObject *val;
4300
        for (Py_ssize_t i = 0; i < n; 
i++201k
) {
  Branch (4300:32): [True: 201k, False: 563]
4301
            val = ((expr_ty)asdl_seq_GET(elts, i))->v.Constant.value;
4302
            Py_INCREF(val);
4303
            PyTuple_SET_ITEM(folded, i, val);
4304
        }
4305
        if (tuple && 
!pushed8
) {
  Branch (4305:13): [True: 8, False: 555]
  Branch (4305:22): [True: 8, False: 0]
4306
            
ADDOP_LOAD_CONST_NEW8
(c, folded);
4307
        } else {
4308
            if (add == SET_ADD) {
  Branch (4308:17): [True: 14, False: 541]
4309
                Py_SETREF(folded, PyFrozenSet_New(folded));
4310
                if (folded == NULL) {
  Branch (4310:21): [True: 0, False: 14]
4311
                    return 0;
4312
                }
4313
            }
4314
            ADDOP_I(c, build, pushed);
4315
            
ADDOP_LOAD_CONST_NEW555
(c, folded);
4316
            
ADDOP_I555
(c, extend, 1);
4317
            if (tuple) {
  Branch (4317:17): [True: 0, False: 555]
4318
                ADDOP(c, LIST_TO_TUPLE);
4319
            }
4320
        }
4321
        return 1;
4322
    }
4323
4324
    int big = n+pushed > STACK_USE_GUIDELINE;
4325
    int seen_star = 0;
4326
    for (Py_ssize_t i = 0; i < n; 
i++44.4k
) {
  Branch (4326:28): [True: 45.4k, False: 28.3k]
4327
        expr_ty elt = asdl_seq_GET(elts, i);
4328
        if (elt->kind == Starred_kind) {
  Branch (4328:13): [True: 1.07k, False: 44.4k]
4329
            seen_star = 1;
4330
            break;
4331
        }
4332
    }
4333
    if (!seen_star && 
!big28.3k
) {
  Branch (4333:9): [True: 28.3k, False: 1.07k]
  Branch (4333:23): [True: 28.3k, False: 15]
4334
        for (Py_ssize_t i = 0; i < n; 
i++36.7k
) {
  Branch (4334:32): [True: 36.7k, False: 28.3k]
4335
            expr_ty elt = asdl_seq_GET(elts, i);
4336
            VISIT(c, expr, elt);
4337
        }
4338
        if (tuple) {
  Branch (4338:13): [True: 24.9k, False: 3.38k]
4339
            ADDOP_I(c, BUILD_TUPLE, n+pushed);
4340
        } else {
4341
            ADDOP_I(c, build, n+pushed);
4342
        }
4343
        return 1;
4344
    }
4345
    int sequence_built = 0;
4346
    if (big) {
  Branch (4346:9): [True: 15, False: 1.07k]
4347
        ADDOP_I(c, build, pushed);
4348
        sequence_built = 1;
4349
    }
4350
    
for (Py_ssize_t i = 0; 1.09k
i < n;
i++9.02k
) {
  Branch (4350:28): [True: 9.02k, False: 1.09k]
4351
        expr_ty elt = asdl_seq_GET(elts, i);
4352
        if (elt->kind == Starred_kind) {
  Branch (4352:13): [True: 1.17k, False: 7.84k]
4353
            if (sequence_built == 0) {
  Branch (4353:17): [True: 1.07k, False: 100]
4354
                ADDOP_I(c, build, i+pushed);
4355
                sequence_built = 1;
4356
            }
4357
            VISIT(c, expr, elt->v.Starred.value);
4358
            ADDOP_I(c, extend, 1);
4359
        }
4360
        else {
4361
            VISIT(c, expr, elt);
4362
            if (sequence_built) {
  Branch (4362:17): [True: 7.06k, False: 783]
4363
                ADDOP_I(c, add, 1);
4364
            }
4365
        }
4366
    }
4367
    assert(sequence_built);
4368
    if (tuple) {
  Branch (4368:9): [True: 1.07k, False: 18]
4369
        ADDOP(c, LIST_TO_TUPLE);
4370
    }
4371
    return 1;
4372
}
4373
4374
static int
4375
unpack_helper(struct compiler *c, asdl_expr_seq *elts)
4376
{
4377
    Py_ssize_t n = asdl_seq_LEN(elts);
4378
    int seen_star = 0;
4379
    for (Py_ssize_t i = 0; i < n; 
i++10.6k
) {
  Branch (4379:28): [True: 10.6k, False: 2.56k]
4380
        expr_ty elt = asdl_seq_GET(elts, i);
4381
        if (elt->kind == Starred_kind && 
!seen_star34
) {
  Branch (4381:13): [True: 34, False: 10.6k]
  Branch (4381:42): [True: 31, False: 3]
4382
            if ((i >= (1 << 8)) ||
  Branch (4382:17): [True: 2, False: 29]
4383
                
(n-i-1 >= (INT_MAX >> 8))29
)
  Branch (4383:17): [True: 0, False: 29]
4384
                return compiler_error(c,
4385
                    "too many expressions in "
4386
                    "star-unpacking assignment");
4387
            ADDOP_I(c, UNPACK_EX, (i + ((n-i-1) << 8)));
4388
            seen_star = 1;
4389
        }
4390
        else if (elt->kind == Starred_kind) {
  Branch (4390:18): [True: 3, False: 10.6k]
4391
            return compiler_error(c,
4392
                "multiple starred expressions in assignment");
4393
        }
4394
    }
4395
    if (!seen_star) {
  Branch (4395:9): [True: 2.54k, False: 26]
4396
        ADDOP_I(c, UNPACK_SEQUENCE, n);
4397
    }
4398
    return 1;
4399
}
4400
4401
static int
4402
assignment_helper(struct compiler *c, asdl_expr_seq *elts)
4403
{
4404
    Py_ssize_t n = asdl_seq_LEN(elts);
4405
    RETURN_IF_FALSE(unpack_helper(c, elts));
4406
    for (Py_ssize_t i = 0; i < n; 
i++9.85k
) {
  Branch (4406:28): [True: 9.85k, False: 2.56k]
4407
        expr_ty elt = asdl_seq_GET(elts, i);
4408
        VISIT(c, expr, elt->kind != Starred_kind ? elt : elt->v.Starred.value);
4409
    }
4410
    return 1;
4411
}
4412
4413
static int
4414
compiler_list(struct compiler *c, expr_ty e)
4415
{
4416
    asdl_expr_seq *elts = e->v.List.elts;
4417
    if (e->v.List.ctx == Store) {
  Branch (4417:9): [True: 24, False: 3.85k]
4418
        return assignment_helper(c, elts);
4419
    }
4420
    else if (e->v.List.ctx == Load) {
  Branch (4420:14): [True: 3.84k, False: 3]
4421
        return starunpack_helper(c, elts, 0, BUILD_LIST,
4422
                                 LIST_APPEND, LIST_EXTEND, 0);
4423
    }
4424
    else
4425
        VISIT_SEQ(c, expr, elts);
4426
    return 1;
4427
}
4428
4429
static int
4430
compiler_tuple(struct compiler *c, expr_ty e)
4431
{
4432
    asdl_expr_seq *elts = e->v.Tuple.elts;
4433
    if (e->v.Tuple.ctx == Store) {
  Branch (4433:9): [True: 2.54k, False: 25.3k]
4434
        return assignment_helper(c, elts);
4435
    }
4436
    else if (e->v.Tuple.ctx == Load) {
  Branch (4436:14): [True: 25.3k, False: 10]
4437
        return starunpack_helper(c, elts, 0, BUILD_LIST,
4438
                                 LIST_APPEND, LIST_EXTEND, 1);
4439
    }
4440
    else
4441
        VISIT_SEQ(c, expr, elts);
4442
    return 1;
4443
}
4444
4445
static int
4446
compiler_set(struct compiler *c, expr_ty e)
4447
{
4448
    return starunpack_helper(c, e->v.Set.elts, 0, BUILD_SET,
4449
                             SET_ADD, SET_UPDATE, 0);
4450
}
4451
4452
static int
4453
are_all_items_const(asdl_expr_seq *seq, Py_ssize_t begin, Py_ssize_t end)
4454
{
4455
    Py_ssize_t i;
4456
    for (i = begin; i < end; 
i++203k
) {
  Branch (4456:21): [True: 205k, False: 900]
4457
        expr_ty key = (expr_ty)asdl_seq_GET(seq, i);
4458
        if (key == NULL || key->kind != Constant_kind)
  Branch (4458:13): [True: 0, False: 205k]
  Branch (4458:28): [True: 2.05k, False: 203k]
4459
            return 0;
4460
    }
4461
    return 1;
4462
}
4463
4464
static int
4465
compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end)
4466
{
4467
    Py_ssize_t i, n = end - begin;
4468
    PyObject *keys, *key;
4469
    int big = n*2 > STACK_USE_GUIDELINE;
4470
    if (n > 1 && 
!big4.46k
&&
are_all_items_const(e->v.Dict.keys, begin, end)386
) {
  Branch (4470:9): [True: 4.46k, False: 1.23k]
  Branch (4470:18): [True: 386, False: 4.08k]
  Branch (4470:26): [True: 337, False: 49]
4471
        for (i = begin; i < end; 
i++1.40k
) {
  Branch (4471:25): [True: 1.40k, False: 337]
4472
            VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.values, i));
4473
        }
4474
        keys = PyTuple_New(n);
4475
        if (keys == NULL) {
  Branch (4475:13): [True: 0, False: 337]
4476
            return 0;
4477
        }
4478
        
for (i = begin; 337
i < end;
i++1.40k
) {
  Branch (4478:25): [True: 1.40k, False: 337]
4479
            key = ((expr_ty)asdl_seq_GET(e->v.Dict.keys, i))->v.Constant.value;
4480
            Py_INCREF(key);
4481
            PyTuple_SET_ITEM(keys, i - begin, key);
4482
        }
4483
        
ADDOP_LOAD_CONST_NEW337
(c, keys);
4484
        
ADDOP_I337
(c, BUILD_CONST_KEY_MAP, n);
4485
        return 1;
4486
    }
4487
    if (big) {
  Branch (4487:9): [True: 4.08k, False: 1.28k]
4488
        ADDOP_I(c, BUILD_MAP, 0);
4489
    }
4490
    
for (i = begin; 5.36k
i < end;
i++70.7k
) {
  Branch (4490:21): [True: 70.7k, False: 5.36k]
4491
        VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.keys, i));
4492
        VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.values, i));
4493
        if (big) {
  Branch (4493:13): [True: 69.3k, False: 1.40k]
4494
            ADDOP_I(c, MAP_ADD, 1);
4495
        }
4496
    }
4497
    if (!big) {
  Branch (4497:9): [True: 1.28k, False: 4.08k]
4498
        ADDOP_I(c, BUILD_MAP, n);
4499
    }
4500
    return 1;
4501
}
4502
4503
static int
4504
compiler_dict(struct compiler *c, expr_ty e)
4505
{
4506
    Py_ssize_t i, n, elements;
4507
    int have_dict;
4508
    int is_unpacking = 0;
4509
    n = asdl_seq_LEN(e->v.Dict.values);
4510
    have_dict = 0;
4511
    elements = 0;
4512
    for (i = 0; i < n; 
i++73.2k
) {
  Branch (4512:17): [True: 73.2k, False: 2.38k]
4513
        is_unpacking = (expr_ty)asdl_seq_GET(e->v.Dict.keys, i) == NULL;
4514
        if (is_unpacking) {
  Branch (4514:13): [True: 1.05k, False: 72.1k]
4515
            if (elements) {
  Branch (4515:17): [True: 4, False: 1.05k]
4516
                if (!compiler_subdict(c, e, i - elements, i)) {
  Branch (4516:21): [True: 0, False: 4]
4517
                    return 0;
4518
                }
4519
                if (have_dict) {
  Branch (4519:21): [True: 2, False: 2]
4520
                    ADDOP_I(c, DICT_UPDATE, 1);
4521
                }
4522
                have_dict = 1;
4523
                elements = 0;
4524
            }
4525
            if (have_dict == 0) {
  Branch (4525:17): [True: 33, False: 1.02k]
4526
                ADDOP_I(c, BUILD_MAP, 0);
4527
                have_dict = 1;
4528
            }
4529
            VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.values, i));
4530
            ADDOP_I(c, DICT_UPDATE, 1);
4531
        }
4532
        else {
4533
            if (elements*2 > STACK_USE_GUIDELINE) {
  Branch (4533:17): [True: 4.07k, False: 68.0k]
4534
                if (!compiler_subdict(c, e, i - elements, i + 1)) {
  Branch (4534:21): [True: 0, False: 4.07k]
4535
                    return 0;
4536
                }
4537
                if (have_dict) {
  Branch (4537:21): [True: 4.04k, False: 25]
4538
                    ADDOP_I(c, DICT_UPDATE, 1);
4539
                }
4540
                have_dict = 1;
4541
                elements = 0;
4542
            }
4543
            else {
4544
                elements++;
4545
            }
4546
        }
4547
    }
4548
    if (elements) {
  Branch (4548:9): [True: 1.62k, False: 758]
4549
        if (!compiler_subdict(c, e, n - elements, n)) {
  Branch (4549:13): [True: 0, False: 1.62k]
4550
            return 0;
4551
        }
4552
        if (have_dict) {
  Branch (4552:13): [True: 28, False: 1.59k]
4553
            ADDOP_I(c, DICT_UPDATE, 1);
4554
        }
4555
        have_dict = 1;
4556
    }
4557
    if (!have_dict) {
  Branch (4557:9): [True: 726, False: 1.65k]
4558
        ADDOP_I(c, BUILD_MAP, 0);
4559
    }
4560
    return 1;
4561
}
4562
4563
static int
4564
compiler_compare(struct compiler *c, expr_ty e)
4565
{
4566
    Py_ssize_t i, n;
4567
4568
    if (!check_compare(c, e)) {
  Branch (4568:9): [True: 7, False: 17.7k]
4569
        return 0;
4570
    }
4571
    VISIT(c, expr, e->v.Compare.left);
4572
    assert(asdl_seq_LEN(e->v.Compare.ops) > 0);
4573
    n = asdl_seq_LEN(e->v.Compare.ops) - 1;
4574
    if (n == 0) {
  Branch (4574:9): [True: 17.7k, False: 25]
4575
        VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, 0));
4576
        ADDOP_COMPARE(c, asdl_seq_GET(e->v.Compare.ops, 0));
4577
    }
4578
    else {
4579
        basicblock *cleanup = compiler_new_block(c);
4580
        if (cleanup == NULL)
  Branch (4580:13): [True: 0, False: 25]
4581
            return 0;
4582
        
for (i = 0; 25
i < n;
i++123
) {
  Branch (4582:21): [True: 123, False: 25]
4583
            VISIT(c, expr,
4584
                (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
4585
            ADDOP_I(c, SWAP, 2);
4586
            ADDOP_I(c, COPY, 2);
4587
            ADDOP_COMPARE(c, asdl_seq_GET(e->v.Compare.ops, i));
4588
            ADDOP_JUMP(c, JUMP_IF_FALSE_OR_POP, cleanup);
4589
        }
4590
        VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n));
4591
        ADDOP_COMPARE(c, asdl_seq_GET(e->v.Compare.ops, n));
4592
        basicblock *end = compiler_new_block(c);
4593
        if (end == NULL)
  Branch (4593:13): [True: 0, False: 25]
4594
            return 0;
4595
        ADDOP_JUMP_NOLINE(c, JUMP, end);
4596
        compiler_use_next_block(c, cleanup);
4597
        ADDOP_I(c, SWAP, 2);
4598
        ADDOP(c, POP_TOP);
4599
        compiler_use_next_block(c, end);
4600
    }
4601
    return 1;
4602
}
4603
4604
static PyTypeObject *
4605
infer_type(expr_ty e)
4606
{
4607
    switch (e->kind) {
4608
    case Tuple_kind:
  Branch (4608:5): [True: 1.46k, False: 10.8k]
4609
        return &PyTuple_Type;
4610
    case List_kind:
  Branch (4610:5): [True: 40, False: 12.2k]
4611
    case ListComp_kind:
  Branch (4611:5): [True: 6, False: 12.3k]
4612
        return &PyList_Type;
4613
    case Dict_kind:
  Branch (4613:5): [True: 4, False: 12.3k]
4614
    case DictComp_kind:
  Branch (4614:5): [True: 4, False: 12.3k]
4615
        return &PyDict_Type;
4616
    case Set_kind:
  Branch (4616:5): [True: 6, False: 12.3k]
4617
    case SetComp_kind:
  Branch (4617:5): [True: 6, False: 12.3k]
4618
        return &PySet_Type;
4619
    case GeneratorExp_kind:
  Branch (4619:5): [True: 6, False: 12.3k]
4620
        return &PyGen_Type;
4621
    case Lambda_kind:
  Branch (4621:5): [True: 4, False: 12.3k]
4622
        return &PyFunction_Type;
4623
    case JoinedStr_kind:
  Branch (4623:5): [True: 12, False: 12.2k]
4624
    case FormattedValue_kind:
  Branch (4624:5): [True: 0, False: 12.3k]
4625
        return &PyUnicode_Type;
4626
    case Constant_kind:
  Branch (4626:5): [True: 5.34k, False: 6.96k]
4627
        return Py_TYPE(e->v.Constant.value);
4628
    default:
  Branch (4628:5): [True: 5.40k, False: 6.90k]
4629
        return NULL;
4630
    }
4631
}
4632
4633
static int
4634
check_caller(struct compiler *c, expr_ty e)
4635
{
4636
    switch (e->kind) {
4637
    case Constant_kind:
  Branch (4637:5): [True: 18, False: 40.2k]
4638
    case Tuple_kind:
  Branch (4638:5): [True: 2, False: 40.2k]
4639
    case List_kind:
  Branch (4639:5): [True: 2, False: 40.2k]
4640
    case ListComp_kind:
  Branch (4640:5): [True: 2, False: 40.2k]
4641
    case Dict_kind:
  Branch (4641:5): [True: 2, False: 40.2k]
4642
    case DictComp_kind:
  Branch (4642:5): [True: 2, False: 40.2k]
4643
    case Set_kind:
  Branch (4643:5): [True: 2, False: 40.2k]
4644
    case SetComp_kind:
  Branch (4644:5): [True: 2, False: 40.2k]
4645
    case GeneratorExp_kind:
  Branch (4645:5): [True: 2, False: 40.2k]
4646
    case JoinedStr_kind:
  Branch (4646:5): [True: 4, False: 40.2k]
4647
    case FormattedValue_kind:
  Branch (4647:5): [True: 0, False: 40.2k]
4648
        return compiler_warn(c, "'%.200s' object is not callable; "
4649
                                "perhaps you missed a comma?",
4650
                                infer_type(e)->tp_name);
4651
    default:
  Branch (4651:5): [True: 40.1k, False: 38]
4652
        return 1;
4653
    }
4654
}
4655
4656
static int
4657
check_subscripter(struct compiler *c, expr_ty e)
4658
{
4659
    PyObject *v;
4660
4661
    switch (e->kind) {
4662
    case Constant_kind:
  Branch (4662:5): [True: 21, False: 12.1k]
4663
        v = e->v.Constant.value;
4664
        if (!(v == Py_None || 
v == 19
Py_Ellipsis19
||
  Branch (4664:15): [True: 2, False: 19]
  Branch (4664:31): [True: 2, False: 17]
4665
              PyLong_Check(v) || PyFloat_Check(v) || PyComplex_Check(v) ||
4666
              
PyAnySet_Check9
(v)))
4667
        {
4668
            return 1;
4669
        }
4670
        /* fall through */
4671
    case Set_kind:
  Branch (4671:5): [True: 2, False: 12.2k]
4672
    case SetComp_kind:
  Branch (4672:5): [True: 2, False: 12.2k]
4673
    case GeneratorExp_kind:
  Branch (4673:5): [True: 2, False: 12.2k]
4674
    case Lambda_kind:
  Branch (4674:5): [True: 2, False: 12.2k]
4675
        return compiler_warn(c, "'%.200s' object is not subscriptable; "
4676
                                "perhaps you missed a comma?",
4677
                                infer_type(e)->tp_name);
4678
    default:
  Branch (4678:5): [True: 12.1k, False: 29]
4679
        return 1;
4680
    }
4681
}
4682
4683
static int
4684
check_index(struct compiler *c, expr_ty e, expr_ty s)
4685
{
4686
    PyObject *v;
4687
4688
    PyTypeObject *index_type = infer_type(s);
4689
    if (index_type == NULL
  Branch (4689:9): [True: 5.40k, False: 6.79k]
4690
        || 
PyType_FastSubclass6.79k
(index_type, Py_TPFLAGS_LONG_SUBCLASS)
4691
        || 
index_type == &PySlice_Type1.94k
) {
  Branch (4691:12): [True: 0, False: 1.94k]
4692
        return 1;
4693
    }
4694
4695
    switch (e->kind) {
4696
    case Constant_kind:
  Branch (4696:5): [True: 12, False: 1.93k]
4697
        v = e->v.Constant.value;
4698
        if (!(PyUnicode_Check(v) || PyBytes_Check(v) || PyTuple_Check(v))) {
4699
            return 1;
4700
        }
4701
        /* fall through */
4702
    case Tuple_kind:
  Branch (4702:5): [True: 2, False: 1.94k]
4703
    case List_kind:
  Branch (4703:5): [True: 36, False: 1.91k]
4704
    case ListComp_kind:
  Branch (4704:5): [True: 2, False: 1.94k]
4705
    case JoinedStr_kind:
  Branch (4705:5): [True: 4, False: 1.94k]
4706
    case FormattedValue_kind:
  Branch (4706:5): [True: 0, False: 1.94k]
4707
        return compiler_warn(c, "%.200s indices must be integers or slices, "
4708
                                "not %.200s; "
4709
                                "perhaps you missed a comma?",
4710
                                infer_type(e)->tp_name,
4711
                                index_type->tp_name);
4712
    default:
  Branch (4712:5): [True: 1.89k, False: 56]
4713
        return 1;
4714
    }
4715
}
4716
4717
static int
4718
is_import_originated(struct compiler *c, expr_ty e)
4719
{
4720
    /* Check whether the global scope has an import named
4721
     e, if it is a Name object. For not traversing all the
4722
     scope stack every time this function is called, it will
4723
     only check the global scope to determine whether something
4724
     is imported or not. */
4725
4726
    if (e->kind != Name_kind) {
  Branch (4726:9): [True: 6.71k, False: 24.9k]
4727
        return 0;
4728
    }
4729
4730
    long flags = _PyST_GetSymbol(c->c_st->st_top, e->v.Name.id);
4731
    return flags & DEF_IMPORT;
4732
}
4733
4734
static void
4735
update_location_to_match_attr(struct compiler *c, expr_ty meth)
4736
{
4737
    if (meth->lineno != meth->end_lineno) {
  Branch (4737:9): [True: 208, False: 53.4k]
4738
        // Make start location match attribute
4739
        c->u->u_loc.lineno = meth->end_lineno;
4740
        c->u->u_loc.col_offset = meth->end_col_offset - (int)PyUnicode_GetLength(meth->v.Attribute.attr)-1;
4741
    }
4742
}
4743
4744
// Return 1 if the method call was optimized, -1 if not, and 0 on error.
4745
static int
4746
maybe_optimize_method_call(struct compiler *c, expr_ty e)
4747
{
4748
    Py_ssize_t argsl, i, kwdsl;
4749
    expr_ty meth = e->v.Call.func;
4750
    asdl_expr_seq *args = e->v.Call.args;
4751
    asdl_keyword_seq *kwds = e->v.Call.keywords;
4752
4753
    /* Check that the call node is an attribute access */
4754
    if (meth->kind != Attribute_kind || 
meth->v.Attribute.ctx != Load31.6k
) {
  Branch (4754:9): [True: 35.3k, False: 31.6k]
  Branch (4754:41): [True: 0, False: 31.6k]
4755
        return -1;
4756
    }
4757
4758
    /* Check that the base object is not something that is imported */
4759
    if (is_import_originated(c, meth->v.Attribute.value)) {
  Branch (4759:9): [True: 4.37k, False: 27.2k]
4760
        return -1;
4761
    }
4762
4763
    /* Check that there aren't too many arguments */
4764
    argsl = asdl_seq_LEN(args);
4765
    kwdsl = asdl_seq_LEN(kwds);
4766
    if (argsl + kwdsl + (kwdsl != 0) >= STACK_USE_GUIDELINE) {
  Branch (4766:9): [True: 3, False: 27.2k]
4767
        return -1;
4768
    }
4769
    /* Check that there are no *varargs types of arguments. */
4770
    
for (i = 0; 27.2k
i < argsl;
i++30.6k
) {
  Branch (4770:17): [True: 30.9k, False: 26.9k]
4771
        expr_ty elt = asdl_seq_GET(args, i);
4772
        if (elt->kind == Starred_kind) {
  Branch (4772:13): [True: 341, False: 30.6k]
4773
            return -1;
4774
        }
4775
    }
4776
4777
    
for (i = 0; 26.9k
i < kwdsl;
i++2.02k
) {
  Branch (4777:17): [True: 2.13k, False: 26.8k]
4778
        keyword_ty kw = asdl_seq_GET(kwds, i);
4779
        if (kw->arg == NULL) {
  Branch (4779:13): [True: 113, False: 2.02k]
4780
            return -1;
4781
        }
4782
    }
4783
    /* Alright, we can optimize the code. */
4784
    VISIT(c, expr, meth->v.Attribute.value);
4785
    SET_LOC(c, meth);
4786
    update_location_to_match_attr(c, meth);
4787
    ADDOP_NAME(c, LOAD_METHOD, meth->v.Attribute.attr, names);
4788
    VISIT_SEQ(c, expr, e->v.Call.args);
4789
4790
    if (kwdsl) {
  Branch (4790:9): [True: 1.22k, False: 25.6k]
4791
        VISIT_SEQ(c, keyword, kwds);
4792
        if (!compiler_call_simple_kw_helper(c, kwds, kwdsl)) {
  Branch (4792:13): [True: 0, False: 1.22k]
4793
            return 0;
4794
        };
4795
    }
4796
    SET_LOC(c, e);
4797
    update_location_to_match_attr(c, meth);
4798
    ADDOP_I(c, CALL, argsl + kwdsl);
4799
    return 1;
4800
}
4801
4802
static int
4803
validate_keywords(struct compiler *c, asdl_keyword_seq *keywords)
4804
{
4805
    Py_ssize_t nkeywords = asdl_seq_LEN(keywords);
4806
    for (Py_ssize_t i = 0; i < nkeywords; 
i++11.7k
) {
  Branch (4806:28): [True: 11.7k, False: 109k]
4807
        keyword_ty key = ((keyword_ty)asdl_seq_GET(keywords, i));
4808
        if (key->arg == NULL) {
  Branch (4808:13): [True: 1.23k, False: 10.4k]
4809
            continue;
4810
        }
4811
        if (forbidden_name(c, key->arg, Store)) {
  Branch (4811:13): [True: 1, False: 10.4k]
4812
            return -1;
4813
        }
4814
        
for (Py_ssize_t j = i + 1; 10.4k
j < nkeywords;
j++120k
) {
  Branch (4814:36): [True: 120k, False: 10.4k]
4815
            keyword_ty other = ((keyword_ty)asdl_seq_GET(keywords, j));
4816
            if (other->arg && 
!PyUnicode_Compare(key->arg, other->arg)120k
) {
  Branch (4816:17): [True: 120k, False: 468]
  Branch (4816:31): [True: 5, False: 120k]
4817
                SET_LOC(c, other);
4818
                compiler_error(c, "keyword argument repeated: %U", key->arg);
4819
                return -1;
4820
            }
4821
        }
4822
    }
4823
    return 0;
4824
}
4825
4826
static int
4827
compiler_call(struct compiler *c, expr_ty e)
4828
{
4829
    if (validate_keywords(c, e->v.Call.keywords) == -1) {
  Branch (4829:9): [True: 5, False: 67.0k]
4830
        return 0;
4831
    }
4832
    int ret = maybe_optimize_method_call(c, e);
4833
    if (ret >= 0) {
  Branch (4833:9): [True: 26.8k, False: 40.2k]
4834
        return ret;
4835
    }
4836
    if (!check_caller(c, e->v.Call.func)) {
  Branch (4836:9): [True: 19, False: 40.2k]
4837
        return 0;
4838
    }
4839
    SET_LOC(c, e->v.Call.func);
4840
    ADDOP(c, PUSH_NULL);
4841
    SET_LOC(c, e);
4842
    VISIT(c, expr, e->v.Call.func);
4843
    return compiler_call_helper(c, 0,
4844
                                e->v.Call.args,
4845
                                e->v.Call.keywords);
4846
}
4847
4848
static int
4849
compiler_joined_str(struct compiler *c, expr_ty e)
4850
{
4851
4852
    Py_ssize_t value_count = asdl_seq_LEN(e->v.JoinedStr.values);
4853
    if (value_count > STACK_USE_GUIDELINE) {
  Branch (4853:9): [True: 15, False: 6.61k]
4854
        _Py_DECLARE_STR(empty, "");
4855
        
ADDOP_LOAD_CONST_NEW15
(c, &_Py_STR(empty));
4856
        
ADDOP_NAME15
(c, LOAD_METHOD, &_Py_ID(join), names);
4857
        ADDOP_I(c, BUILD_LIST, 0);
4858
        for (Py_ssize_t i = 0; i < asdl_seq_LEN(e->v.JoinedStr.values); 
i++138k
) {
  Branch (4858:32): [True: 138k, False: 15]
4859
            VISIT(c, expr, asdl_seq_GET(e->v.JoinedStr.values, i));
4860
            ADDOP_I(c, LIST_APPEND, 1);
4861
        }
4862
        ADDOP_I(c, CALL, 1);
4863
    }
4864
    else {
4865
        VISIT_SEQ(c, expr, e->v.JoinedStr.values);
4866
        if (asdl_seq_LEN(e->v.JoinedStr.values) != 1) {
  Branch (4866:13): [True: 2.37k, False: 4.24k]
4867
            ADDOP_I(c, BUILD_STRING, asdl_seq_LEN(e->v.JoinedStr.values));
4868
        }
4869
    }
4870
    return 1;
4871
}
4872
4873
/* Used to implement f-strings. Format a single value. */
4874
static int
4875
compiler_formatted_value(struct compiler *c, expr_ty e)
4876
{
4877
    /* Our oparg encodes 2 pieces of information: the conversion
4878
       character, and whether or not a format_spec was provided.
4879
4880
       Convert the conversion char to 3 bits:
4881
           : 000  0x0  FVC_NONE   The default if nothing specified.
4882
       !s  : 001  0x1  FVC_STR
4883
       !r  : 010  0x2  FVC_REPR
4884
       !a  : 011  0x3  FVC_ASCII
4885
4886
       next bit is whether or not we have a format spec:
4887
       yes : 100  0x4
4888
       no  : 000  0x0
4889
    */
4890
4891
    int conversion = e->v.FormattedValue.conversion;
4892
    int oparg;
4893
4894
    /* The expression to be formatted. */
4895
    VISIT(c, expr, e->v.FormattedValue.value);
4896
4897
    switch (conversion) {
4898
    case 's': oparg = FVC_STR;   break;
  Branch (4898:5): [True: 2.52k, False: 75.0k]
4899
    case 'r': oparg = FVC_REPR;  break;
  Branch (4899:5): [True: 3.27k, False: 74.2k]
4900
    case 'a': oparg = FVC_ASCII; break;
  Branch (4900:5): [True: 1.34k, False: 76.2k]
4901
    case -1:  oparg = FVC_NONE;  break;
  Branch (4901:5): [True: 70.4k, False: 7.14k]
4902
    default:
  Branch (4902:5): [True: 0, False: 77.5k]
4903
        PyErr_Format(PyExc_SystemError,
4904
                     "Unrecognized conversion character %d", conversion);
4905
        return 0;
4906
    }
4907
    if (e->v.FormattedValue.format_spec) {
  Branch (4907:9): [True: 3.79k, False: 73.7k]
4908
        /* Evaluate the format spec, and update our opcode arg. */
4909
        VISIT(c, expr, e->v.FormattedValue.format_spec);
4910
        oparg |= FVS_HAVE_SPEC;
4911
    }
4912
4913
    /* And push our opcode and oparg */
4914
    ADDOP_I(c, FORMAT_VALUE, oparg);
4915
4916
    return 1;
4917
}
4918
4919
static int
4920
compiler_subkwargs(struct compiler *c, asdl_keyword_seq *keywords, Py_ssize_t begin, Py_ssize_t end)
4921
{
4922
    Py_ssize_t i, n = end - begin;
4923
    keyword_ty kw;
4924
    PyObject *keys, *key;
4925
    assert(n > 0);
4926
    int big = n*2 > STACK_USE_GUIDELINE;
4927
    if (n > 1 && 
!big71
) {
  Branch (4927:9): [True: 71, False: 140]
  Branch (4927:18): [True: 67, False: 4]
4928
        for (i = begin; i < end; 
i++217
) {
  Branch (4928:25): [True: 217, False: 67]
4929
            kw = asdl_seq_GET(keywords, i);
4930
            VISIT(c, expr, kw->value);
4931
        }
4932
        keys = PyTuple_New(n);
4933
        if (keys == NULL) {
  Branch (4933:13): [True: 0, False: 67]
4934
            return 0;
4935
        }
4936
        
for (i = begin; 67
i < end;
i++217
) {
  Branch (4936:25): [True: 217, False: 67]
4937
            key = ((keyword_ty) asdl_seq_GET(keywords, i))->arg;
4938
            Py_INCREF(key);
4939
            PyTuple_SET_ITEM(keys, i - begin, key);
4940
        }
4941
        
ADDOP_LOAD_CONST_NEW67
(c, keys);
4942
        
ADDOP_I67
(c, BUILD_CONST_KEY_MAP, n);
4943
        return 1;
4944
    }
4945
    if (big) {
  Branch (4945:9): [True: 4, False: 140]
4946
        ADDOP_I_NOLINE(c, BUILD_MAP, 0);
4947
    }
4948
    
for (i = begin; 144
i < end;
i++660
) {
  Branch (4948:21): [True: 660, False: 144]
4949
        kw = asdl_seq_GET(keywords, i);
4950
        ADDOP_LOAD_CONST(c, kw->arg);
4951
        VISIT(c, expr, kw->value);
4952
        if (big) {
  Branch (4952:13): [True: 520, False: 140]
4953
            ADDOP_I_NOLINE(c, MAP_ADD, 1);
4954
        }
4955
    }
4956
    if (!big) {
  Branch (4956:9): [True: 140, False: 4]
4957
        ADDOP_I(c, BUILD_MAP, n);
4958
    }
4959
    return 1;
4960
}
4961
4962
/* Used by compiler_call_helper and maybe_optimize_method_call to emit
4963
 * KW_NAMES before CALL.
4964
 * Returns 1 on success, 0 on error.
4965
 */
4966
static int
4967
compiler_call_simple_kw_helper(struct compiler *c,
4968
                               asdl_keyword_seq *keywords,
4969
                               Py_ssize_t nkwelts)
4970
{
4971
    PyObject *names;
4972
    names = PyTuple_New(nkwelts);
4973
    if (names == NULL) {
  Branch (4973:9): [True: 0, False: 3.11k]
4974
        return 0;
4975
    }
4976
    
for (int i = 0; 3.11k
i < nkwelts;
i++5.38k
) {
  Branch (4976:21): [True: 5.38k, False: 3.11k]
4977
        keyword_ty kw = asdl_seq_GET(keywords, i);
4978
        Py_INCREF(kw->arg);
4979
        PyTuple_SET_ITEM(names, i, kw->arg);
4980
    }
4981
    Py_ssize_t arg = compiler_add_const(c, names);
4982
    if (arg < 0) {
  Branch (4982:9): [True: 0, False: 3.11k]
4983
        return 0;
4984
    }
4985
    Py_DECREF(names);
4986
    ADDOP_I(c, KW_NAMES, arg);
4987
    return 1;
4988
}
4989
4990
4991
/* shared code between compiler_call and compiler_class */
4992
static int
4993
compiler_call_helper(struct compiler *c,
4994
                     int n, /* Args already pushed */
4995
                     asdl_expr_seq *args,
4996
                     asdl_keyword_seq *keywords)
4997
{
4998
    Py_ssize_t i, nseen, nelts, nkwelts;
4999
5000
    if (validate_keywords(c, keywords) == -1) {
  Branch (5000:9): [True: 1, False: 42.0k]
5001
        return 0;
5002
    }
5003
5004
    nelts = asdl_seq_LEN(args);
5005
    nkwelts = asdl_seq_LEN(keywords);
5006
5007
    if (nelts + nkwelts*2 > STACK_USE_GUIDELINE) {
  Branch (5007:9): [True: 11, False: 41.9k]
5008
         goto ex_call;
5009
    }
5010
    
for (i = 0; 41.9k
i < nelts;
i++50.3k
) {
  Branch (5010:17): [True: 51.1k, False: 41.1k]
5011
        expr_ty elt = asdl_seq_GET(args, i);
5012
        if (elt->kind == Starred_kind) {
  Branch (5012:13): [True: 827, False: 50.3k]
5013
            goto ex_call;
5014
        }
5015
    }
5016
    
for (i = 0; 41.1k
i < nkwelts;
i++3.58k
) {
  Branch (5016:17): [True: 3.87k, False: 40.8k]
5017
        keyword_ty kw = asdl_seq_GET(keywords, i);
5018
        if (kw->arg == NULL) {
  Branch (5018:13): [True: 288, False: 3.58k]
5019
            goto ex_call;
5020
        }
5021
    }
5022
5023
    /* No * or ** args, so can use faster calling sequence */
5024
    
for (i = 0; 40.8k
i < nelts;
i++49.5k
) {
  Branch (5024:17): [True: 49.5k, False: 40.8k]
5025
        expr_ty elt = asdl_seq_GET(args, i);
5026
        assert(elt->kind != Starred_kind);
5027
        VISIT(c, expr, elt);
5028
    }
5029
    if (nkwelts) {
  Branch (5029:9): [True: 1.88k, False: 38.9k]
5030
        VISIT_SEQ(c, keyword, keywords);
5031
        if (!compiler_call_simple_kw_helper(c, keywords, nkwelts)) {
  Branch (5031:13): [True: 0, False: 1.88k]
5032
            return 0;
5033
        };
5034
    }
5035
    ADDOP_I(c, CALL, n + nelts + nkwelts);
5036
    return 1;
5037
5038
ex_call:
5039
5040
    /* Do positional arguments. */
5041
    if (n ==0 && 
nelts == 11.12k
&&
((expr_ty)562
asdl_seq_GET562
(args, 0))->kind == Starred_kind) {
  Branch (5041:9): [True: 1.12k, False: 6]
  Branch (5041:18): [True: 562, False: 558]
  Branch (5041:32): [True: 470, False: 92]
5042
        VISIT(c, expr, ((expr_ty)asdl_seq_GET(args, 0))->v.Starred.value);
5043
    }
5044
    else if (starunpack_helper(c, args, n, BUILD_LIST,
  Branch (5044:14): [True: 0, False: 656]
5045
                                 LIST_APPEND, LIST_EXTEND, 1) == 0) {
5046
        return 0;
5047
    }
5048
    /* Then keyword arguments */
5049
    if (nkwelts) {
  Branch (5049:9): [True: 685, False: 441]
5050
        /* Has a new dict been pushed */
5051
        int have_dict = 0;
5052
5053
        nseen = 0;  /* the number of keyword arguments on the stack following */
5054
        for (i = 0; i < nkwelts; 
i++1.49k
) {
  Branch (5054:21): [True: 1.49k, False: 685]
5055
            keyword_ty kw = asdl_seq_GET(keywords, i);
5056
            if (kw->arg == NULL) {
  Branch (5056:17): [True: 621, False: 877]
5057
                /* A keyword argument unpacking. */
5058
                if (nseen) {
  Branch (5058:21): [True: 127, False: 494]
5059
                    if (!compiler_subkwargs(c, keywords, i - nseen, i)) {
  Branch (5059:25): [True: 0, False: 127]
5060
                        return 0;
5061
                    }
5062
                    if (have_dict) {
  Branch (5062:25): [True: 2, False: 125]
5063
                        ADDOP_I(c, DICT_MERGE, 1);
5064
                    }
5065
                    have_dict = 1;
5066
                    nseen = 0;
5067
                }
5068
                if (!have_dict) {
  Branch (5068:21): [True: 483, False: 138]
5069
                    ADDOP_I(c, BUILD_MAP, 0);
5070
                    have_dict = 1;
5071
                }
5072
                VISIT(c, expr, kw->value);
5073
                ADDOP_I(c, DICT_MERGE, 1);
5074
            }
5075
            else {
5076
                nseen++;
5077
            }
5078
        }
5079
        if (nseen) {
  Branch (5079:13): [True: 84, False: 601]
5080
            /* Pack up any trailing keyword arguments. */
5081
            if (!compiler_subkwargs(c, keywords, nkwelts - nseen, nkwelts)) {
  Branch (5081:17): [True: 0, False: 84]
5082
                return 0;
5083
            }
5084
            if (have_dict) {
  Branch (5084:17): [True: 7, False: 77]
5085
                ADDOP_I(c, DICT_MERGE, 1);
5086
            }
5087
            have_dict = 1;
5088
        }
5089
        assert(have_dict);
5090
    }
5091
    ADDOP_I(c, CALL_FUNCTION_EX, nkwelts > 0);
5092
    return 1;
5093
}
5094
5095
5096
/* List and set comprehensions and generator expressions work by creating a
5097
  nested function to perform the actual iteration. This means that the
5098
  iteration variables don't leak into the current scope.
5099
  The defined function is called immediately following its definition, with the
5100
  result of that call being the result of the expression.
5101
  The LC/SC version returns the populated container, while the GE version is
5102
  flagged in symtable.c as a generator, so it returns the generator object
5103
  when the function is called.
5104
5105
  Possible cleanups:
5106
    - iterate over the generator sequence instead of using recursion
5107
*/
5108
5109
5110
static int
5111
compiler_comprehension_generator(struct compiler *c,
5112
                                 asdl_comprehension_seq *generators, int gen_index,
5113
                                 int depth,
5114
                                 expr_ty elt, expr_ty val, int type)
5115
{
5116
    comprehension_ty gen;
5117
    gen = (comprehension_ty)asdl_seq_GET(generators, gen_index);
5118
    if (gen->is_async) {
  Branch (5118:9): [True: 20, False: 1.22k]
5119
        return compiler_async_comprehension_generator(
5120
            c, generators, gen_index, depth, elt, val, type);
5121
    } else {
5122
        return compiler_sync_comprehension_generator(
5123
            c, generators, gen_index, depth, elt, val, type);
5124
    }
5125
}
5126
5127
static int
5128
compiler_sync_comprehension_generator(struct compiler *c,
5129
                                      asdl_comprehension_seq *generators, int gen_index,
5130
                                      int depth,
5131
                                      expr_ty elt, expr_ty val, int type)
5132
{
5133
    /* generate code for the iterator, then each of the ifs,
5134
       and then write to the element */
5135
5136
    comprehension_ty gen;
5137
    basicblock *start, *anchor, *if_cleanup;
5138
    Py_ssize_t i, n;
5139
5140
    start = compiler_new_block(c);
5141
    if_cleanup = compiler_new_block(c);
5142
    anchor = compiler_new_block(c);
5143
5144
    if (start == NULL || if_cleanup == NULL || anchor == NULL) {
  Branch (5144:9): [True: 0, False: 1.22k]
  Branch (5144:26): [True: 0, False: 1.22k]
  Branch (5144:48): [True: 0, False: 1.22k]
5145
        return 0;
5146
    }
5147
5148
    gen = (comprehension_ty)asdl_seq_GET(generators, gen_index);
5149
5150
    if (gen_index == 0) {
  Branch (5150:9): [True: 1.16k, False: 65]
5151
        /* Receive outermost iter as an implicit argument */
5152
        c->u->u_argcount = 1;
5153
        ADDOP_I(c, LOAD_FAST, 0);
5154
    }
5155
    else {
5156
        /* Sub-iter - calculate on the fly */
5157
        /* Fast path for the temporary variable assignment idiom:
5158
             for y in [f(x)]
5159
         */
5160
        asdl_expr_seq *elts;
5161
        switch (gen->iter->kind) {
5162
            case List_kind:
  Branch (5162:13): [True: 0, False: 65]
5163
                elts = gen->iter->v.List.elts;
5164
                break;
5165
            case Tuple_kind:
  Branch (5165:13): [True: 12, False: 53]
5166
                elts = gen->iter->v.Tuple.elts;
5167
                break;
5168
            default:
  Branch (5168:13): [True: 53, False: 12]
5169
                elts = NULL;
5170
        }
5171
        if (asdl_seq_LEN(elts) == 1) {
  Branch (5171:13): [True: 12, False: 53]
5172
            expr_ty elt = asdl_seq_GET(elts, 0);
5173
            if (elt->kind != Starred_kind) {
  Branch (5173:17): [True: 12, False: 0]
5174
                VISIT(c, expr, elt);
5175
                start = NULL;
5176
            }
5177
        }
5178
        if (start) {
  Branch (5178:13): [True: 53, False: 12]
5179
            VISIT(c, expr, gen->iter);
5180
            ADDOP(c, GET_ITER);
5181
        }
5182
    }
5183
    if (start) {
  Branch (5183:9): [True: 1.21k, False: 12]
5184
        depth++;
5185
        compiler_use_next_block(c, start);
5186
        ADDOP_JUMP(c, FOR_ITER, anchor);
5187
    }
5188
    VISIT(c, expr, gen->target);
5189
5190
    /* XXX this needs to be cleaned up...a lot! */
5191
    n = asdl_seq_LEN(gen->ifs);
5192
    for (i = 0; i < n; 
i++244
) {
  Branch (5192:17): [True: 244, False: 1.22k]
5193
        expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i);
5194
        if (!compiler_jump_if(c, e, if_cleanup, 0))
  Branch (5194:13): [True: 0, False: 244]
5195
            return 0;
5196
    }
5197
5198
    if (++gen_index < asdl_seq_LEN(generators))
  Branch (5198:9): [True: 65, False: 1.16k]
5199
        if (!compiler_comprehension_generator(c,
  Branch (5199:13): [True: 0, False: 65]
5200
                                              generators, gen_index, depth,
5201
                                              elt, val, type))
5202
        return 0;
5203
5204
    /* only append after the last for generator */
5205
    if (gen_index >= asdl_seq_LEN(generators)) {
  Branch (5205:9): [True: 1.16k, False: 65]
5206
        /* comprehension specific code */
5207
        switch (type) {
5208
        case COMP_GENEXP:
  Branch (5208:9): [True: 501, False: 659]
5209
            VISIT(c, expr, elt);
5210
            ADDOP_YIELD(c);
5211
            ADDOP(c, POP_TOP);
5212
            break;
5213
        case COMP_LISTCOMP:
  Branch (5213:9): [True: 546, False: 614]
5214
            VISIT(c, expr, elt);
5215
            ADDOP_I(c, LIST_APPEND, depth + 1);
5216
            break;
5217
        case COMP_SETCOMP:
  Branch (5217:9): [True: 59, False: 1.10k]
5218
            VISIT(c, expr, elt);
5219
            ADDOP_I(c, SET_ADD, depth + 1);
5220
            break;
5221
        case COMP_DICTCOMP:
  Branch (5221:9): [True: 54, False: 1.10k]
5222
            /* With '{k: v}', k is evaluated before v, so we do
5223
               the same. */
5224
            VISIT(c, expr, elt);
5225
            VISIT(c, expr, val);
5226
            ADDOP_I(c, MAP_ADD, depth + 1);
5227
            break;
5228
        default:
  Branch (5228:9): [True: 0, False: 1.16k]
5229
            return 0;
5230
        }
5231
    }
5232
    compiler_use_next_block(c, if_cleanup);
5233
    if (start) {
  Branch (5233:9): [True: 1.21k, False: 12]
5234
        ADDOP_JUMP(c, JUMP, start);
5235
        compiler_use_next_block(c, anchor);
5236
    }
5237
5238
    return 1;
5239
}
5240
5241
static int
5242
compiler_async_comprehension_generator(struct compiler *c,
5243
                                      asdl_comprehension_seq *generators, int gen_index,
5244
                                      int depth,
5245
                                      expr_ty elt, expr_ty val, int type)
5246
{
5247
    comprehension_ty gen;
5248
    basicblock *start, *if_cleanup, *except;
5249
    Py_ssize_t i, n;
5250
    start = compiler_new_block(c);
5251
    except = compiler_new_block(c);
5252
    if_cleanup = compiler_new_block(c);
5253
5254
    if (start == NULL || if_cleanup == NULL || except == NULL) {
  Branch (5254:9): [True: 0, False: 20]
  Branch (5254:26): [True: 0, False: 20]
  Branch (5254:48): [True: 0, False: 20]
5255
        return 0;
5256
    }
5257
5258
    gen = (comprehension_ty)asdl_seq_GET(generators, gen_index);
5259
5260
    if (gen_index == 0) {
  Branch (5260:9): [True: 18, False: 2]
5261
        /* Receive outermost iter as an implicit argument */
5262
        c->u->u_argcount = 1;
5263
        ADDOP_I(c, LOAD_FAST, 0);
5264
    }
5265
    else {
5266
        /* Sub-iter - calculate on the fly */
5267
        VISIT(c, expr, gen->iter);
5268
        ADDOP(c, GET_AITER);
5269
    }
5270
5271
    compiler_use_next_block(c, start);
5272
    /* Runtime will push a block here, so we need to account for that */
5273
    if (!compiler_push_fblock(c, ASYNC_COMPREHENSION_GENERATOR, start,
  Branch (5273:9): [True: 0, False: 20]
5274
                              NULL, NULL)) {
5275
        return 0;
5276
    }
5277
5278
    ADDOP_JUMP(c, SETUP_FINALLY, except);
5279
    ADDOP(c, GET_ANEXT);
5280
    ADDOP_LOAD_CONST(c, Py_None);
5281
    ADD_YIELD_FROM(c, 1);
5282
    ADDOP(c, POP_BLOCK);
5283
    VISIT(c, expr, gen->target);
5284
5285
    n = asdl_seq_LEN(gen->ifs);
5286
    for (i = 0; i < n; 
i++0
) {
  Branch (5286:17): [True: 0, False: 20]
5287
        expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i);
5288
        if (!compiler_jump_if(c, e, if_cleanup, 0))
  Branch (5288:13): [True: 0, False: 0]
5289
            return 0;
5290
    }
5291
5292
    depth++;
5293
    if (++gen_index < asdl_seq_LEN(generators))
  Branch (5293:9): [True: 2, False: 18]
5294
        if (!compiler_comprehension_generator(c,
  Branch (5294:13): [True: 0, False: 2]
5295
                                              generators, gen_index, depth,
5296
                                              elt, val, type))
5297
        return 0;
5298
5299
    /* only append after the last for generator */
5300
    if (gen_index >= asdl_seq_LEN(generators)) {
  Branch (5300:9): [True: 18, False: 2]
5301
        /* comprehension specific code */
5302
        switch (type) {
5303
        case COMP_GENEXP:
  Branch (5303:9): [True: 2, False: 16]
5304
            VISIT(c, expr, elt);
5305
            ADDOP_YIELD(c);
5306
            ADDOP(c, POP_TOP);
5307
            break;
5308
        case COMP_LISTCOMP:
  Branch (5308:9): [True: 10, False: 8]
5309
            VISIT(c, expr, elt);
5310
            ADDOP_I(c, LIST_APPEND, depth + 1);
5311
            break;
5312
        case COMP_SETCOMP:
  Branch (5312:9): [True: 4, False: 14]
5313
            VISIT(c, expr, elt);
5314
            ADDOP_I(c, SET_ADD, depth + 1);
5315
            break;
5316
        case COMP_DICTCOMP:
  Branch (5316:9): [True: 2, False: 16]
5317
            /* With '{k: v}', k is evaluated before v, so we do
5318
               the same. */
5319
            VISIT(c, expr, elt);
5320
            VISIT(c, expr, val);
5321
            ADDOP_I(c, MAP_ADD, depth + 1);
5322
            break;
5323
        default:
  Branch (5323:9): [True: 0, False: 18]
5324
            return 0;
5325
        }
5326
    }
5327
    compiler_use_next_block(c, if_cleanup);
5328
    ADDOP_JUMP(c, JUMP, start);
5329
5330
    compiler_pop_fblock(c, ASYNC_COMPREHENSION_GENERATOR, start);
5331
5332
    compiler_use_next_block(c, except);
5333
    //UNSET_LOC(c);
5334
5335
    ADDOP(c, END_ASYNC_FOR);
5336
5337
    return 1;
5338
}
5339
5340
static int
5341
compiler_comprehension(struct compiler *c, expr_ty e, int type,
5342
                       identifier name, asdl_comprehension_seq *generators, expr_ty elt,
5343
                       expr_ty val)
5344
{
5345
    PyCodeObject *co = NULL;
5346
    comprehension_ty outermost;
5347
    PyObject *qualname = NULL;
5348
    int scope_type = c->u->u_scope_type;
5349
    int is_async_generator = 0;
5350
    int is_top_level_await = IS_TOP_LEVEL_AWAIT(c);
5351
5352
    outermost = (comprehension_ty) asdl_seq_GET(generators, 0);
5353
    if (!compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION,
  Branch (5353:9): [True: 0, False: 1.22k]
5354
                              (void *)e, e->lineno))
5355
    {
5356
        goto error;
5357
    }
5358
    SET_LOC(c, e);
5359
5360
    is_async_generator = c->u->u_ste->ste_coroutine;
5361
5362
    if (is_async_generator && 
type != 60
COMP_GENEXP60
&&
  Branch (5362:9): [True: 60, False: 1.16k]
  Branch (5362:31): [True: 58, False: 2]
5363
        
scope_type != COMPILER_SCOPE_ASYNC_FUNCTION58
&&
  Branch (5363:9): [True: 56, False: 2]
5364
        
scope_type != COMPILER_SCOPE_COMPREHENSION56
&&
  Branch (5364:9): [True: 56, False: 0]
5365
        
!is_top_level_await56
)
  Branch (5365:9): [True: 42, False: 14]
5366
    {
5367
        compiler_error(c, "asynchronous comprehension outside of "
5368
                          "an asynchronous function");
5369
        goto error_in_scope;
5370
    }
5371
5372
    if (type != COMP_GENEXP) {
  Branch (5372:9): [True: 675, False: 503]
5373
        int op;
5374
        switch (type) {
5375
        case COMP_LISTCOMP:
  Branch (5375:9): [True: 556, False: 119]
5376
            op = BUILD_LIST;
5377
            break;
5378
        case COMP_SETCOMP:
  Branch (5378:9): [True: 63, False: 612]
5379
            op = BUILD_SET;
5380
            break;
5381
        case COMP_DICTCOMP:
  Branch (5381:9): [True: 56, False: 619]
5382
            op = BUILD_MAP;
5383
            break;
5384
        default:
  Branch (5384:9): [True: 0, False: 675]
5385
            PyErr_Format(PyExc_SystemError,
5386
                         "unknown comprehension type %d", type);
5387
            goto error_in_scope;
5388
        }
5389
5390
        ADDOP_I(c, op, 0);
5391
    }
5392
5393
    if (!compiler_comprehension_generator(c, generators, 0, 0, elt,
  Branch (5393:9): [True: 0, False: 1.17k]
5394
                                          val, type))
5395
        goto error_in_scope;
5396
5397
    if (type != COMP_GENEXP) {
  Branch (5397:9): [True: 675, False: 503]
5398
        ADDOP(c, RETURN_VALUE);
5399
    }
5400
5401
    co = assemble(c, 1);
5402
    qualname = c->u->u_qualname;
5403
    Py_INCREF(qualname);
5404
    compiler_exit_scope(c);
5405
    if (is_top_level_await && 
is_async_generator26
){
  Branch (5405:9): [True: 26, False: 1.15k]
  Branch (5405:31): [True: 16, False: 10]
5406
        c->u->u_ste->ste_coroutine = 1;
5407
    }
5408
    if (co == NULL)
  Branch (5408:9): [True: 0, False: 1.17k]
5409
        goto error;
5410
5411
    if (!compiler_make_closure(c, co, 0, qualname)) {
  Branch (5411:9): [True: 0, False: 1.17k]
5412
        goto error;
5413
    }
5414
    Py_DECREF(qualname);
5415
    Py_DECREF(co);
5416
5417
    VISIT(c, expr, outermost->iter);
5418
5419
    if (outermost->is_async) {
  Branch (5419:9): [True: 18, False: 1.15k]
5420
        ADDOP(c, GET_AITER);
5421
    } else {
5422
        ADDOP(c, GET_ITER);
5423
    }
5424
5425
    ADDOP_I(c, CALL, 0);
5426
5427
    if (is_async_generator && 
type != 18
COMP_GENEXP18
) {
  Branch (5427:9): [True: 18, False: 1.15k]
  Branch (5427:31): [True: 16, False: 2]
5428
        ADDOP_I(c, GET_AWAITABLE, 0);
5429
        ADDOP_LOAD_CONST(c, Py_None);
5430
        ADD_YIELD_FROM(c, 1);
5431
    }
5432
5433
    return 1;
5434
error_in_scope:
5435
    compiler_exit_scope(c);
5436
error:
5437
    Py_XDECREF(qualname);
5438
    Py_XDECREF(co);
5439
    return 0;
5440
}
5441
5442
static int
5443
compiler_genexp(struct compiler *c, expr_ty e)
5444
{
5445
    assert(e->kind == GeneratorExp_kind);
5446
    _Py_DECLARE_STR(anon_genexpr, "<genexpr>");
5447
    return compiler_comprehension(c, e, COMP_GENEXP, &_Py_STR(anon_genexpr),
5448
                                  e->v.GeneratorExp.generators,
5449
                                  e->v.GeneratorExp.elt, NULL);
5450
}
5451
5452
static int
5453
compiler_listcomp(struct compiler *c, expr_ty e)
5454
{
5455
    assert(e->kind == ListComp_kind);
5456
    _Py_DECLARE_STR(anon_listcomp, "<listcomp>");
5457
    return compiler_comprehension(c, e, COMP_LISTCOMP, &_Py_STR(anon_listcomp),
5458
                                  e->v.ListComp.generators,
5459
                                  e->v.ListComp.elt, NULL);
5460
}
5461
5462
static int
5463
compiler_setcomp(struct compiler *c, expr_ty e)
5464
{
5465
    assert(e->kind == SetComp_kind);
5466
    _Py_DECLARE_STR(anon_setcomp, "<setcomp>");
5467
    return compiler_comprehension(c, e, COMP_SETCOMP, &_Py_STR(anon_setcomp),
5468
                                  e->v.SetComp.generators,
5469
                                  e->v.SetComp.elt, NULL);
5470
}
5471
5472
5473
static int
5474
compiler_dictcomp(struct compiler *c, expr_ty e)
5475
{
5476
    assert(e->kind == DictComp_kind);
5477
    _Py_DECLARE_STR(anon_dictcomp, "<dictcomp>");
5478
    return compiler_comprehension(c, e, COMP_DICTCOMP, &_Py_STR(anon_dictcomp),
5479
                                  e->v.DictComp.generators,
5480
                                  e->v.DictComp.key, e->v.DictComp.value);
5481
}
5482
5483
5484
static int
5485
compiler_visit_keyword(struct compiler *c, keyword_ty k)
5486
{
5487
    VISIT(c, expr, k->value);
5488
    return 1;
5489
}
5490
5491
5492
static int
5493
compiler_with_except_finish(struct compiler *c, basicblock * cleanup) {
5494
    UNSET_LOC(c);
5495
    basicblock *suppress = compiler_new_block(c);
5496
    if (suppress == NULL) {
  Branch (5496:9): [True: 0, False: 870]
5497
        return 0;
5498
    }
5499
    ADDOP_JUMP(c, POP_JUMP_IF_TRUE, suppress);
5500
    ADDOP_I(c, RERAISE, 2);
5501
    compiler_use_next_block(c, suppress);
5502
    ADDOP(c, POP_TOP); /* exc_value */
5503
    ADDOP(c, POP_BLOCK);
5504
    ADDOP(c, POP_EXCEPT);
5505
    ADDOP(c, POP_TOP);
5506
    ADDOP(c, POP_TOP);
5507
    basicblock *exit = compiler_new_block(c);
5508
    if (exit == NULL) {
  Branch (5508:9): [True: 0, False: 870]
5509
        return 0;
5510
    }
5511
    ADDOP_JUMP(c, JUMP, exit);
5512
    compiler_use_next_block(c, cleanup);
5513
    POP_EXCEPT_AND_RERAISE(c);
5514
    compiler_use_next_block(c, exit);
5515
    return 1;
5516
}
5517
5518
/*
5519
   Implements the async with statement.
5520
5521
   The semantics outlined in that PEP are as follows:
5522
5523
   async with EXPR as VAR:
5524
       BLOCK
5525
5526
   It is implemented roughly as:
5527
5528
   context = EXPR
5529
   exit = context.__aexit__  # not calling it
5530
   value = await context.__aenter__()
5531
   try:
5532
       VAR = value  # if VAR present in the syntax
5533
       BLOCK
5534
   finally:
5535
       if an exception was raised:
5536
           exc = copy of (exception, instance, traceback)
5537
       else:
5538
           exc = (None, None, None)
5539
       if not (await exit(*exc)):
5540
           raise
5541
 */
5542
static int
5543
compiler_async_with(struct compiler *c, stmt_ty s, int pos)
5544
{
5545
    basicblock *block, *final, *exit, *cleanup;
5546
    withitem_ty item = asdl_seq_GET(s->v.AsyncWith.items, pos);
5547
5548
    assert(s->kind == AsyncWith_kind);
5549
    if (IS_TOP_LEVEL_AWAIT(c)){
5550
        c->u->u_ste->ste_coroutine = 1;
5551
    } else if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION){
  Branch (5551:16): [True: 8, False: 40]
5552
        return compiler_error(c, "'async with' outside async function");
5553
    }
5554
5555
    block = compiler_new_block(c);
5556
    final = compiler_new_block(c);
5557
    exit = compiler_new_block(c);
5558
    cleanup = compiler_new_block(c);
5559
    if (!block || !final || !exit || !cleanup)
  Branch (5559:9): [True: 0, False: 42]
  Branch (5559:19): [True: 0, False: 42]
  Branch (5559:29): [True: 0, False: 42]
  Branch (5559:38): [True: 0, False: 42]
5560
        return 0;
5561
5562
    /* Evaluate EXPR */
5563
    VISIT(c, expr, item->context_expr);
5564
5565
    ADDOP(c, BEFORE_ASYNC_WITH);
5566
    ADDOP_I(c, GET_AWAITABLE, 1);
5567
    ADDOP_LOAD_CONST(c, Py_None);
5568
    ADD_YIELD_FROM(c, 1);
5569
5570
    ADDOP_JUMP(c, SETUP_WITH, final);
5571
5572
    /* SETUP_WITH pushes a finally block. */
5573
    compiler_use_next_block(c, block);
5574
    if (!compiler_push_fblock(c, ASYNC_WITH, block, final, s)) {
  Branch (5574:9): [True: 0, False: 42]
5575
        return 0;
5576
    }
5577
5578
    if (item->optional_vars) {
  Branch (5578:9): [True: 18, False: 24]
5579
        VISIT(c, expr, item->optional_vars);
5580
    }
5581
    else {
5582
    /* Discard result from context.__aenter__() */
5583
        ADDOP(c, POP_TOP);
5584
    }
5585
5586
    pos++;
5587
    if (pos == asdl_seq_LEN(s->v.AsyncWith.items))
  Branch (5587:9): [True: 39, False: 3]
5588
        /* BLOCK code */
5589
        VISIT_SEQ(c, stmt, s->v.AsyncWith.body)
5590
    else if (!compiler_async_with(c, s, pos))
  Branch (5590:14): [True: 0, False: 3]
5591
            return 0;
5592
5593
    compiler_pop_fblock(c, ASYNC_WITH, block);
5594
    ADDOP(c, POP_BLOCK);
5595
    /* End of body; start the cleanup */
5596
5597
    /* For successful outcome:
5598
     * call __exit__(None, None, None)
5599
     */
5600
    SET_LOC(c, s);
5601
    if(!compiler_call_exit_with_nones(c))
  Branch (5601:8): [True: 0, False: 42]
5602
        return 0;
5603
    ADDOP_I(c, GET_AWAITABLE, 2);
5604
    ADDOP_LOAD_CONST(c, Py_None);
5605
    ADD_YIELD_FROM(c, 1);
5606
5607
    ADDOP(c, POP_TOP);
5608
5609
    ADDOP_JUMP(c, JUMP, exit);
5610
5611
    /* For exceptional outcome: */
5612
    compiler_use_next_block(c, final);
5613
5614
    ADDOP_JUMP(c, SETUP_CLEANUP, cleanup);
5615
    ADDOP(c, PUSH_EXC_INFO);
5616
    ADDOP(c, WITH_EXCEPT_START);
5617
    ADDOP_I(c, GET_AWAITABLE, 2);
5618
    ADDOP_LOAD_CONST(c, Py_None);
5619
    ADD_YIELD_FROM(c, 1);
5620
    compiler_with_except_finish(c, cleanup);
5621
5622
    compiler_use_next_block(c, exit);
5623
    return 1;
5624
}
5625
5626
5627
/*
5628
   Implements the with statement from PEP 343.
5629
   with EXPR as VAR:
5630
       BLOCK
5631
   is implemented as:
5632
        <code for EXPR>
5633
        SETUP_WITH  E
5634
        <code to store to VAR> or POP_TOP
5635
        <code for BLOCK>
5636
        LOAD_CONST (None, None, None)
5637
        CALL_FUNCTION_EX 0
5638
        JUMP  EXIT
5639
    E:  WITH_EXCEPT_START (calls EXPR.__exit__)
5640
        POP_JUMP_IF_TRUE T:
5641
        RERAISE
5642
    T:  POP_TOP (remove exception from stack)
5643
        POP_EXCEPT
5644
        POP_TOP
5645
    EXIT:
5646
 */
5647
5648
static int
5649
compiler_with(struct compiler *c, stmt_ty s, int pos)
5650
{
5651
    basicblock *block, *final, *exit, *cleanup;
5652
    withitem_ty item = asdl_seq_GET(s->v.With.items, pos);
5653
5654
    assert(s->kind == With_kind);
5655
5656
    block = compiler_new_block(c);
5657
    final = compiler_new_block(c);
5658
    exit = compiler_new_block(c);
5659
    cleanup = compiler_new_block(c);
5660
    if (!block || !final || !exit || !cleanup)
  Branch (5660:9): [True: 0, False: 828]
  Branch (5660:19): [True: 0, False: 828]
  Branch (5660:29): [True: 0, False: 828]
  Branch (5660:38): [True: 0, False: 828]
5661
        return 0;
5662
5663
    /* Evaluate EXPR */
5664
    VISIT(c, expr, item->context_expr);
5665
    /* Will push bound __exit__ */
5666
    ADDOP(c, BEFORE_WITH);
5667
    ADDOP_JUMP(c, SETUP_WITH, final);
5668
5669
    /* SETUP_WITH pushes a finally block. */
5670
    compiler_use_next_block(c, block);
5671
    if (!compiler_push_fblock(c, WITH, block, final, s)) {
  Branch (5671:9): [True: 0, False: 828]
5672
        return 0;
5673
    }
5674
5675
    if (item->optional_vars) {
  Branch (5675:9): [True: 379, False: 449]
5676
        VISIT(c, expr, item->optional_vars);
5677
    }
5678
    else {
5679
    /* Discard result from context.__enter__() */
5680
        ADDOP(c, POP_TOP);
5681
    }
5682
5683
    pos++;
5684
    if (pos == asdl_seq_LEN(s->v.With.items))
  Branch (5684:9): [True: 806, False: 22]
5685
        /* BLOCK code */
5686
        VISIT_SEQ(c, stmt, s->v.With.body)
5687
    else if (!compiler_with(c, s, pos))
  Branch (5687:14): [True: 0, False: 22]
5688
            return 0;
5689
5690
5691
    /* Mark all following code as artificial */
5692
    UNSET_LOC(c);
5693
    ADDOP(c, POP_BLOCK);
5694
    compiler_pop_fblock(c, WITH, block);
5695
5696
    /* End of body; start the cleanup. */
5697
5698
    /* For successful outcome:
5699
     * call __exit__(None, None, None)
5700
     */
5701
    SET_LOC(c, s);
5702
    if (!compiler_call_exit_with_nones(c))
  Branch (5702:9): [True: 0, False: 828]
5703
        return 0;
5704
    ADDOP(c, POP_TOP);
5705
    ADDOP_JUMP(c, JUMP, exit);
5706
5707
    /* For exceptional outcome: */
5708
    compiler_use_next_block(c, final);
5709
5710
    ADDOP_JUMP(c, SETUP_CLEANUP, cleanup);
5711
    ADDOP(c, PUSH_EXC_INFO);
5712
    ADDOP(c, WITH_EXCEPT_START);
5713
    compiler_with_except_finish(c, cleanup);
5714
5715
    compiler_use_next_block(c, exit);
5716
    return 1;
5717
}
5718
5719
static int
5720
compiler_visit_expr1(struct compiler *c, expr_ty e)
5721
{
5722
    switch (e->kind) {
  Branch (5722:13): [True: 0, False: 1.47M]
5723
    case NamedExpr_kind:
  Branch (5723:5): [True: 240, False: 1.47M]
5724
        VISIT(c, expr, e->v.NamedExpr.value);
5725
        ADDOP_I(c, COPY, 1);
5726
        VISIT(c, expr, e->v.NamedExpr.target);
5727
        break;
5728
    case BoolOp_kind:
  Branch (5728:5): [True: 1.32k, False: 1.47M]
5729
        return compiler_boolop(c, e);
5730
    case BinOp_kind:
  Branch (5730:5): [True: 63.5k, False: 1.41M]
5731
        VISIT(c, expr, e->v.BinOp.left);
5732
        VISIT(c, expr, e->v.BinOp.right);
5733
        ADDOP_BINARY(c, e->v.BinOp.op);
5734
        break;
5735
    case UnaryOp_kind:
  Branch (5735:5): [True: 567, False: 1.47M]
5736
        VISIT(c, expr, e->v.UnaryOp.operand);
5737
        ADDOP(c, unaryop(e->v.UnaryOp.op));
5738
        break;
5739
    case Lambda_kind:
  Branch (5739:5): [True: 845, False: 1.47M]
5740
        return compiler_lambda(c, e);
5741
    case IfExp_kind:
  Branch (5741:5): [True: 565, False: 1.47M]
5742
        return compiler_ifexp(c, e);
5743
    case Dict_kind:
  Branch (5743:5): [True: 2.38k, False: 1.47M]
5744
        return compiler_dict(c, e);
5745
    case Set_kind:
  Branch (5745:5): [True: 164, False: 1.47M]
5746
        return compiler_set(c, e);
5747
    case GeneratorExp_kind:
  Branch (5747:5): [True: 503, False: 1.47M]
5748
        return compiler_genexp(c, e);
5749
    case ListComp_kind:
  Branch (5749:5): [True: 590, False: 1.47M]
5750
        return compiler_listcomp(c, e);
5751
    case SetComp_kind:
  Branch (5751:5): [True: 68, False: 1.47M]
5752
        return compiler_setcomp(c, e);
5753
    case DictComp_kind:
  Branch (5753:5): [True: 59, False: 1.47M]
5754
        return compiler_dictcomp(c, e);
5755
    case Yield_kind:
  Branch (5755:5): [True: 645, False: 1.47M]
5756
        if (c->u->u_ste->ste_type != FunctionBlock)
  Branch (5756:13): [True: 15, False: 630]
5757
            return compiler_error(c, "'yield' outside function");
5758
        if (e->v.Yield.value) {
  Branch (5758:13): [True: 585, False: 45]
5759
            VISIT(c, expr, e->v.Yield.value);
5760
        }
5761
        else {
5762
            ADDOP_LOAD_CONST(c, Py_None);
5763
        }
5764
        ADDOP_YIELD(c);
5765
        break;
5766
    case YieldFrom_kind:
  Branch (5766:5): [True: 114, False: 1.47M]
5767
        if (c->u->u_ste->ste_type != FunctionBlock)
  Branch (5767:13): [True: 3, False: 111]
5768
            return compiler_error(c, "'yield' outside function");
5769
5770
        if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION)
  Branch (5770:13): [True: 3, False: 108]
5771
            return compiler_error(c, "'yield from' inside async function");
5772
5773
        VISIT(c, expr, e->v.YieldFrom.value);
5774
        ADDOP(c, GET_YIELD_FROM_ITER);
5775
        ADDOP_LOAD_CONST(c, Py_None);
5776
        ADD_YIELD_FROM(c, 0);
5777
        break;
5778
    case Await_kind:
  Branch (5778:5): [True: 71, False: 1.47M]
5779
        if (!IS_TOP_LEVEL_AWAIT(c)){
5780
            if (c->u->u_ste->ste_type != FunctionBlock){
  Branch (5780:17): [True: 11, False: 58]
5781
                return compiler_error(c, "'await' outside function");
5782
            }
5783
5784
            if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION &&
  Branch (5784:17): [True: 16, False: 42]
5785
                    
c->u->u_scope_type != COMPILER_SCOPE_COMPREHENSION16
){
  Branch (5785:21): [True: 14, False: 2]
5786
                return compiler_error(c, "'await' outside async function");
5787
            }
5788
        }
5789
5790
        VISIT(c, expr, e->v.Await.value);
5791
        ADDOP_I(c, GET_AWAITABLE, 0);
5792
        ADDOP_LOAD_CONST(c, Py_None);
5793
        ADD_YIELD_FROM(c, 1);
5794
        break;
5795
    case Compare_kind:
  Branch (5795:5): [True: 17.7k, False: 1.45M]
5796
        return compiler_compare(c, e);
5797
    case Call_kind:
  Branch (5797:5): [True: 67.0k, False: 1.40M]
5798
        return compiler_call(c, e);
5799
    case Constant_kind:
  Branch (5799:5): [True: 319k, False: 1.15M]
5800
        ADDOP_LOAD_CONST(c, e->v.Constant.value);
5801
        break;
5802
    case JoinedStr_kind:
  Branch (5802:5): [True: 6.63k, False: 1.46M]
5803
        return compiler_joined_str(c, e);
5804
    case FormattedValue_kind:
  Branch (5804:5): [True: 77.5k, False: 1.39M]
5805
        return compiler_formatted_value(c, e);
5806
    /* The following exprs can be assignment targets. */
5807
    case Attribute_kind:
  Branch (5807:5): [True: 46.2k, False: 1.42M]
5808
        VISIT(c, expr, e->v.Attribute.value);
5809
        switch (e->v.Attribute.ctx) {
  Branch (5809:17): [True: 0, False: 46.2k]
5810
        case Load:
  Branch (5810:9): [True: 39.5k, False: 6.67k]
5811
        {
5812
            int old_lineno = c->u->u_loc.lineno;
5813
            c->u->u_loc.lineno = e->end_lineno;
5814
            ADDOP_NAME(c, LOAD_ATTR, e->v.Attribute.attr, names);
5815
            c->u->u_loc.lineno = old_lineno;
5816
            break;
5817
        }
5818
        case Store:
  Branch (5818:9): [True: 6.62k, False: 39.6k]
5819
            if (forbidden_name(c, e->v.Attribute.attr, e->v.Attribute.ctx)) {
  Branch (5819:17): [True: 1, False: 6.62k]
5820
                return 0;
5821
            }
5822
            int old_lineno = c->u->u_loc.lineno;
5823
            c->u->u_loc.lineno = e->end_lineno;
5824
            ADDOP_NAME(c, STORE_ATTR, e->v.Attribute.attr, names);
5825
            c->u->u_loc.lineno = old_lineno;
5826
            break;
5827
        case Del:
  Branch (5827:9): [True: 49, False: 46.2k]
5828
            ADDOP_NAME(c, DELETE_ATTR, e->v.Attribute.attr, names);
5829
            break;
5830
        }
5831
        break;
5832
    case Subscript_kind:
  Branch (5832:5): [True: 14.9k, False: 1.45M]
5833
        return compiler_subscript(c, e);
5834
    case Starred_kind:
  Branch (5834:5): [True: 4, False: 1.47M]
5835
        switch (e->v.Starred.ctx) {
5836
        case Store:
  Branch (5836:9): [True: 1, False: 3]
5837
            /* In all legitimate cases, the Starred node was already replaced
5838
             * by compiler_list/compiler_tuple. XXX: is that okay? */
5839
            return compiler_error(c,
5840
                "starred assignment target must be in a list or tuple");
5841
        default:
  Branch (5841:9): [True: 3, False: 1]
5842
            return compiler_error(c,
5843
                "can't use starred expression here");
5844
        }
5845
        break;
5846
    case Slice_kind:
  Branch (5846:5): [True: 136, False: 1.47M]
5847
    {
5848
        int n = compiler_slice(c, e);
5849
        if (n == 0) {
  Branch (5849:13): [True: 0, False: 136]
5850
            return 0;
5851
        }
5852
        ADDOP_I(c, BUILD_SLICE, n);
5853
        break;
5854
    }
5855
    case Name_kind:
  Branch (5855:5): [True: 820k, False: 653k]
5856
        return compiler_nameop(c, e->v.Name.id, e->v.Name.ctx);
5857
    /* child nodes of List and Tuple will have expr_context set */
5858
    case List_kind:
  Branch (5858:5): [True: 3.87k, False: 1.47M]
5859
        return compiler_list(c, e);
5860
    case Tuple_kind:
  Branch (5860:5): [True: 27.9k, False: 1.44M]
5861
        return compiler_tuple(c, e);
5862
    }
5863
    return 1;
5864
}
5865
5866
static int
5867
compiler_visit_expr(struct compiler *c, expr_ty e)
5868
{
5869
    struct location old_loc = c->u->u_loc;
5870
    SET_LOC(c, e);
5871
    int res = compiler_visit_expr1(c, e);
5872
    c->u->u_loc = old_loc;
5873
    return res;
5874
}
5875
5876
static bool
5877
is_two_element_slice(expr_ty s)
5878
{
5879
    return s->kind == Slice_kind &&
  Branch (5879:12): [True: 1.95k, False: 13.0k]
5880
           
s->v.Slice.step == NULL1.95k
;
  Branch (5880:12): [True: 1.91k, False: 37]
5881
}
5882
5883
static int
5884
compiler_augassign(struct compiler *c, stmt_ty s)
5885
{
5886
    assert(s->kind == AugAssign_kind);
5887
    expr_ty e = s->v.AugAssign.target;
5888
5889
    struct location old_loc = c->u->u_loc;
5890
    SET_LOC(c, e);
5891
5892
    switch (e->kind) {
5893
    case Attribute_kind:
  Branch (5893:5): [True: 224, False: 1.50k]
5894
        VISIT(c, expr, e->v.Attribute.value);
5895
        ADDOP_I(c, COPY, 1);
5896
        int old_lineno = c->u->u_loc.lineno;
5897
        c->u->u_loc.lineno = e->end_lineno;
5898
        ADDOP_NAME(c, LOAD_ATTR, e->v.Attribute.attr, names);
5899
        c->u->u_loc.lineno = old_lineno;
5900
        break;
5901
    case Subscript_kind:
  Branch (5901:5): [True: 56, False: 1.67k]
5902
        VISIT(c, expr, e->v.Subscript.value);
5903
        if (is_two_element_slice(e->v.Subscript.slice)) {
  Branch (5903:13): [True: 4, False: 52]
5904
            if (!compiler_slice(c, e->v.Subscript.slice)) {
  Branch (5904:17): [True: 0, False: 4]
5905
                return 0;
5906
            }
5907
            ADDOP_I(c, COPY, 3);
5908
            ADDOP_I(c, COPY, 3);
5909
            ADDOP_I(c, COPY, 3);
5910
            ADDOP(c, BINARY_SLICE);
5911
        }
5912
        else {
5913
            VISIT(c, expr, e->v.Subscript.slice);
5914
            ADDOP_I(c, COPY, 2);
5915
            ADDOP_I(c, COPY, 2);
5916
            ADDOP(c, BINARY_SUBSCR);
5917
        }
5918
        break;
5919
    case Name_kind:
  Branch (5919:5): [True: 1.45k, False: 280]
5920
        if (!compiler_nameop(c, e->v.Name.id, Load))
  Branch (5920:13): [True: 0, False: 1.45k]
5921
            return 0;
5922
        break;
5923
    default:
  Branch (5923:5): [True: 0, False: 1.73k]
5924
        PyErr_Format(PyExc_SystemError,
5925
            "invalid node type (%d) for augmented assignment",
5926
            e->kind);
5927
        return 0;
5928
    }
5929
5930
    c->u->u_loc = old_loc;
5931
5932
    VISIT(c, expr, s->v.AugAssign.value);
5933
    ADDOP_INPLACE(c, s->v.AugAssign.op);
5934
5935
    SET_LOC(c, e);
5936
5937
    switch (e->kind) {
5938
    case Attribute_kind:
  Branch (5938:5): [True: 224, False: 1.50k]
5939
        c->u->u_loc.lineno = e->end_lineno;
5940
        ADDOP_I(c, SWAP, 2);
5941
        ADDOP_NAME(c, STORE_ATTR, e->v.Attribute.attr, names);
5942
        break;
5943
    case Subscript_kind:
  Branch (5943:5): [True: 56, False: 1.67k]
5944
        if (is_two_element_slice(e->v.Subscript.slice)) {
  Branch (5944:13): [True: 4, False: 52]
5945
            ADDOP_I(c, SWAP, 4);
5946
            ADDOP_I(c, SWAP, 3);
5947
            ADDOP_I(c, SWAP, 2);
5948
            ADDOP(c, STORE_SLICE);
5949
        }
5950
        else {
5951
            ADDOP_I(c, SWAP, 3);
5952
            ADDOP_I(c, SWAP, 2);
5953
            ADDOP(c, STORE_SUBSCR);
5954
        }
5955
        break;
5956
    case Name_kind:
  Branch (5956:5): [True: 1.45k, False: 280]
5957
        return compiler_nameop(c, e->v.Name.id, Store);
5958
    default:
  Branch (5958:5): [True: 0, False: 1.73k]
5959
        Py_UNREACHABLE();
5960
    }
5961
    return 1;
5962
}
5963
5964
static int
5965
check_ann_expr(struct compiler *c, expr_ty e)
5966
{
5967
    VISIT(c, expr, e);
5968
    ADDOP(c, POP_TOP);
5969
    return 1;
5970
}
5971
5972
static int
5973
check_annotation(struct compiler *c, stmt_ty s)
5974
{
5975
    /* Annotations of complex targets does not produce anything
5976
       under annotations future */
5977
    if (c->c_future->ff_features & CO_FUTURE_ANNOTATIONS) {
  Branch (5977:9): [True: 344, False: 14]
5978
        return 1;
5979
    }
5980
5981
    /* Annotations are only evaluated in a module or class. */
5982
    if (c->u->u_scope_type == COMPILER_SCOPE_MODULE ||
  Branch (5982:9): [True: 4, False: 10]
5983
        
c->u->u_scope_type == COMPILER_SCOPE_CLASS10
) {
  Branch (5983:9): [True: 2, False: 8]
5984
        return check_ann_expr(c, s->v.AnnAssign.annotation);
5985
    }
5986
    return 1;
5987
}
5988
5989
static int
5990
check_ann_subscr(struct compiler *c, expr_ty e)
5991
{
5992
    /* We check that everything in a subscript is defined at runtime. */
5993
    switch (e->kind) {
5994
    case Slice_kind:
  Branch (5994:5): [True: 0, False: 1]
5995
        if (e->v.Slice.lower && !check_ann_expr(c, e->v.Slice.lower)) {
  Branch (5995:13): [True: 0, False: 0]
  Branch (5995:33): [True: 0, False: 0]
5996
            return 0;
5997
        }
5998
        if (e->v.Slice.upper && !check_ann_expr(c, e->v.Slice.upper)) {
  Branch (5998:13): [True: 0, False: 0]
  Branch (5998:33): [True: 0, False: 0]
5999
            return 0;
6000
        }
6001
        if (e->v.Slice.step && !check_ann_expr(c, e->v.Slice.step)) {
  Branch (6001:13): [True: 0, False: 0]
  Branch (6001:32): [True: 0, False: 0]
6002
            return 0;
6003
        }
6004
        return 1;
6005
    case Tuple_kind: {
  Branch (6005:5): [True: 0, False: 1]
6006
        /* extended slice */
6007
        asdl_expr_seq *elts = e->v.Tuple.elts;
6008
        Py_ssize_t i, n = asdl_seq_LEN(elts);
6009
        for (i = 0; i < n; i++) {
  Branch (6009:21): [True: 0, False: 0]
6010
            if (!check_ann_subscr(c, asdl_seq_GET(elts, i))) {
  Branch (6010:17): [True: 0, False: 0]
6011
                return 0;
6012
            }
6013
        }
6014
        return 1;
6015
    }
6016
    default:
  Branch (6016:5): [True: 1, False: 0]
6017
        return check_ann_expr(c, e);
6018
    }
6019
}
6020
6021
static int
6022
compiler_annassign(struct compiler *c, stmt_ty s)
6023
{
6024
    expr_ty targ = s->v.AnnAssign.target;
6025
    PyObject* mangled;
6026
6027
    assert(s->kind == AnnAssign_kind);
6028
6029
    /* We perform the actual assignment first. */
6030
    if (s->v.AnnAssign.value) {
  Branch (6030:9): [True: 213, False: 732]
6031
        VISIT(c, expr, s->v.AnnAssign.value);
6032
        VISIT(c, expr, targ);
6033
    }
6034
    switch (targ->kind) {
6035
    case Name_kind:
  Branch (6035:5): [True: 589, False: 356]
6036
        if (forbidden_name(c, targ->v.Name.id, Store))
  Branch (6036:13): [True: 1, False: 588]
6037
            return 0;
6038
        /* If we have a simple name in a module or class, store annotation. */
6039
        if (s->v.AnnAssign.simple &&
  Branch (6039:13): [True: 585, False: 3]
6040
            
(585
c->u->u_scope_type == COMPILER_SCOPE_MODULE585
||
  Branch (6040:14): [True: 368, False: 217]
6041
             
c->u->u_scope_type == COMPILER_SCOPE_CLASS217
)) {
  Branch (6041:14): [True: 207, False: 10]
6042
            if (c->c_future->ff_features & CO_FUTURE_ANNOTATIONS) {
  Branch (6042:17): [True: 517, False: 58]
6043
                VISIT(c, annexpr, s->v.AnnAssign.annotation)
6044
            }
6045
            else {
6046
                VISIT(c, expr, s->v.AnnAssign.annotation);
6047
            }
6048
            ADDOP_NAME(c, LOAD_NAME, &_Py_ID(__annotations__), names);
6049
            mangled = _Py_Mangle(c->u->u_private, targ->v.Name.id);
6050
            
ADDOP_LOAD_CONST_NEW575
(c, mangled);
6051
            
ADDOP575
(c, STORE_SUBSCR);
6052
        }
6053
        break;
6054
    case Attribute_kind:
  Branch (6054:5): [True: 350, False: 595]
6055
        if (forbidden_name(c, targ->v.Attribute.attr, Store))
  Branch (6055:13): [True: 1, False: 349]
6056
            return 0;
6057
        if (!s->v.AnnAssign.value &&
  Branch (6057:13): [True: 345, False: 4]
6058
            
!check_ann_expr(c, targ->v.Attribute.value)345
) {
  Branch (6058:13): [True: 0, False: 345]
6059
            return 0;
6060
        }
6061
        break;
6062
    case Subscript_kind:
  Branch (6062:5): [True: 6, False: 939]
6063
        if (!s->v.AnnAssign.value &&
  Branch (6063:13): [True: 1, False: 5]
6064
            
(1
!check_ann_expr(c, targ->v.Subscript.value)1
||
  Branch (6064:14): [True: 0, False: 1]
6065
             !check_ann_subscr(c, targ->v.Subscript.slice))) {
  Branch (6065:14): [True: 0, False: 1]
6066
                return 0;
6067
        }
6068
        break;
6069
    default:
  Branch (6069:5): [True: 0, False: 945]
6070
        PyErr_Format(PyExc_SystemError,
6071
                     "invalid node type (%d) for annotated assignment",
6072
                     targ->kind);
6073
            return 0;
6074
    }
6075
    /* Annotation is evaluated last. */
6076
    if (!s->v.AnnAssign.simple && 
!check_annotation(c, s)358
) {
  Branch (6076:9): [True: 358, False: 585]
  Branch (6076:35): [True: 0, False: 358]
6077
        return 0;
6078
    }
6079
    return 1;
6080
}
6081
6082
/* Raises a SyntaxError and returns 0.
6083
   If something goes wrong, a different exception may be raised.
6084
*/
6085
6086
static int
6087
compiler_error(struct compiler *c, const char *format, ...)
6088
{
6089
    va_list vargs;
6090
    va_start(vargs, format);
6091
    PyObject *msg = PyUnicode_FromFormatV(format, vargs);
6092
    va_end(vargs);
6093
    if (msg == NULL) {
  Branch (6093:9): [True: 0, False: 289]
6094
        return 0;
6095
    }
6096
    PyObject *loc = PyErr_ProgramTextObject(c->c_filename, c->u->u_loc.lineno);
6097
    if (loc == NULL) {
  Branch (6097:9): [True: 281, False: 8]
6098
        Py_INCREF(Py_None);
6099
        loc = Py_None;
6100
    }
6101
    struct location u_loc = c->u->u_loc;
6102
    PyObject *args = Py_BuildValue("O(OiiOii)", msg, c->c_filename,
6103
                                   u_loc.lineno, u_loc.col_offset + 1, loc,
6104
                                   u_loc.end_lineno, u_loc.end_col_offset + 1);
6105
    Py_DECREF(msg);
6106
    if (args == NULL) {
  Branch (6106:9): [True: 0, False: 289]
6107
        goto exit;
6108
    }
6109
    PyErr_SetObject(PyExc_SyntaxError, args);
6110
 exit:
6111
    Py_DECREF(loc);
6112
    Py_XDECREF(args);
6113
    return 0;
6114
}
6115
6116
/* Emits a SyntaxWarning and returns 1 on success.
6117
   If a SyntaxWarning raised as error, replaces it with a SyntaxError
6118
   and returns 0.
6119
*/
6120
static int
6121
compiler_warn(struct compiler *c, const char *format, ...)
6122
{
6123
    va_list vargs;
6124
    va_start(vargs, format);
6125
    PyObject *msg = PyUnicode_FromFormatV(format, vargs);
6126
    va_end(vargs);
6127
    if (msg == NULL) {
  Branch (6127:9): [True: 0, False: 138]
6128
        return 0;
6129
    }
6130
    if (PyErr_WarnExplicitObject(PyExc_SyntaxWarning, msg, c->c_filename,
  Branch (6130:9): [True: 67, False: 71]
6131
                                 c->u->u_loc.lineno, NULL, NULL) < 0)
6132
    {
6133
        if (PyErr_ExceptionMatches(PyExc_SyntaxWarning)) {
  Branch (6133:13): [True: 67, False: 0]
6134
            /* Replace the SyntaxWarning exception with a SyntaxError
6135
               to get a more accurate error report */
6136
            PyErr_Clear();
6137
            assert(PyUnicode_AsUTF8(msg) != NULL);
6138
            compiler_error(c, PyUnicode_AsUTF8(msg));
6139
        }
6140
        Py_DECREF(msg);
6141
        return 0;
6142
    }
6143
    Py_DECREF(msg);
6144
    return 1;
6145
}
6146
6147
static int
6148
compiler_subscript(struct compiler *c, expr_ty e)
6149
{
6150
    expr_context_ty ctx = e->v.Subscript.ctx;
6151
    int op = 0;
6152
6153
    if (ctx == Load) {
  Branch (6153:9): [True: 12.2k, False: 2.76k]
6154
        if (!check_subscripter(c, e->v.Subscript.value)) {
  Branch (6154:13): [True: 10, False: 12.1k]
6155
            return 0;
6156
        }
6157
        if (!check_index(c, e->v.Subscript.value, e->v.Subscript.slice)) {
  Branch (6157:13): [True: 25, False: 12.1k]
6158
            return 0;
6159
        }
6160
    }
6161
6162
    VISIT(c, expr, e->v.Subscript.value);
6163
    if (is_two_element_slice(e->v.Subscript.slice) && 
ctx != Del1.90k
) {
  Branch (6163:9): [True: 1.90k, False: 13.0k]
  Branch (6163:55): [True: 1.87k, False: 31]
6164
        if (!compiler_slice(c, e->v.Subscript.slice)) {
  Branch (6164:13): [True: 0, False: 1.87k]
6165
            return 0;
6166
        }
6167
        if (ctx == Load) {
  Branch (6167:13): [True: 1.77k, False: 101]
6168
            ADDOP(c, BINARY_SLICE);
6169
        }
6170
        else {
6171
            assert(ctx == Store);
6172
            ADDOP(c, STORE_SLICE);
6173
        }
6174
    }
6175
    else {
6176
        VISIT(c, expr, e->v.Subscript.slice);
6177
        switch (ctx) {
  Branch (6177:17): [True: 0, False: 13.0k]
6178
            case Load:    op = BINARY_SUBSCR; break;
  Branch (6178:13): [True: 10.3k, False: 2.66k]
6179
            case Store:   op = STORE_SUBSCR; break;
  Branch (6179:13): [True: 2.37k, False: 10.6k]
6180
            case Del:     op = DELETE_SUBSCR; break;
  Branch (6180:13): [True: 286, False: 12.7k]
6181
        }
6182
        assert(op);
6183
        ADDOP(c, op);
6184
    }
6185
    return 1;
6186
}
6187
6188
/* Returns the number of the values emitted,
6189
 * thus are needed to build the slice, or 0 if there is an error. */
6190
static int
6191
compiler_slice(struct compiler *c, expr_ty s)
6192
{
6193
    int n = 2;
6194
    assert(s->kind == Slice_kind);
6195
6196
    /* only handles the cases where BUILD_SLICE is emitted */
6197
    if (s->v.Slice.lower) {
  Branch (6197:9): [True: 1.23k, False: 781]
6198
        VISIT(c, expr, s->v.Slice.lower);
6199
    }
6200
    else {
6201
        ADDOP_LOAD_CONST(c, Py_None);
6202
    }
6203
6204
    if (s->v.Slice.upper) {
  Branch (6204:9): [True: 1.20k, False: 811]
6205
        VISIT(c, expr, s->v.Slice.upper);
6206
    }
6207
    else {
6208
        ADDOP_LOAD_CONST(c, Py_None);
6209
    }
6210
6211
    if (s->v.Slice.step) {
  Branch (6211:9): [True: 55, False: 1.96k]
6212
        n++;
6213
        VISIT(c, expr, s->v.Slice.step);
6214
    }
6215
    return n;
6216
}
6217
6218
6219
// PEP 634: Structural Pattern Matching
6220
6221
// To keep things simple, all compiler_pattern_* and pattern_helper_* routines
6222
// follow the convention of consuming TOS (the subject for the given pattern)
6223
// and calling jump_to_fail_pop on failure (no match).
6224
6225
// When calling into these routines, it's important that pc->on_top be kept
6226
// updated to reflect the current number of items that we are using on the top
6227
// of the stack: they will be popped on failure, and any name captures will be
6228
// stored *underneath* them on success. This lets us defer all names stores
6229
// until the *entire* pattern matches.
6230
6231
#define WILDCARD_CHECK(N) \
6232
    (
(N)->kind == MatchAs_kind1.06k
&&
!(N)->v.MatchAs.name437
)
6233
6234
#define WILDCARD_STAR_CHECK(N) \
6235
    ((N)->kind == MatchStar_kind && !(N)->v.MatchStar.name)
6236
6237
// Limit permitted subexpressions, even if the parser & AST validator let them through
6238
#define MATCH_VALUE_EXPR(N) \
6239
    ((N)->kind == Constant_kind || 
(N)->kind == Attribute_kind17
)
6240
6241
// Allocate or resize pc->fail_pop to allow for n items to be popped on failure.
6242
static int
6243
ensure_fail_pop(struct compiler *c, pattern_context *pc, Py_ssize_t n)
6244
{
6245
    Py_ssize_t size = n + 1;
6246
    if (size <= pc->fail_pop_size) {
  Branch (6246:9): [True: 859, False: 814]
6247
        return 1;
6248
    }
6249
    Py_ssize_t needed = sizeof(basicblock*) * size;
6250
    basicblock **resized = PyObject_Realloc(pc->fail_pop, needed);
6251
    if (resized == NULL) {
  Branch (6251:9): [True: 0, False: 814]
6252
        PyErr_NoMemory();
6253
        return 0;
6254
    }
6255
    pc->fail_pop = resized;
6256
    while (pc->fail_pop_size < size) {
  Branch (6256:12): [True: 1.39k, False: 814]
6257
        basicblock *new_block;
6258
        RETURN_IF_FALSE(new_block = compiler_new_block(c));
6259
        pc->fail_pop[pc->fail_pop_size++] = new_block;
6260
    }
6261
    return 1;
6262
}
6263
6264
// Use op to jump to the correct fail_pop block.
6265
static int
6266
jump_to_fail_pop(struct compiler *c, pattern_context *pc, int op)
6267
{
6268
    // Pop any items on the top of the stack, plus any objects we were going to
6269
    // capture on success:
6270
    Py_ssize_t pops = pc->on_top + PyList_GET_SIZE(pc->stores);
6271
    RETURN_IF_FALSE(ensure_fail_pop(c, pc, pops));
6272
    ADDOP_JUMP(c, op, pc->fail_pop[pops]);
6273
    return 1;
6274
}
6275
6276
// Build all of the fail_pop blocks and reset fail_pop.
6277
static int
6278
emit_and_reset_fail_pop(struct compiler *c, pattern_context *pc)
6279
{
6280
    if (!pc->fail_pop_size) {
  Branch (6280:9): [True: 15, False: 586]
6281
        assert(pc->fail_pop == NULL);
6282
        return 1;
6283
    }
6284
    
while (586
--pc->fail_pop_size) {
  Branch (6284:12): [True: 776, False: 586]
6285
        compiler_use_next_block(c, pc->fail_pop[pc->fail_pop_size]);
6286
        if (!compiler_addop(c, POP_TOP, true)) {
  Branch (6286:13): [True: 0, False: 776]
6287
            pc->fail_pop_size = 0;
6288
            PyObject_Free(pc->fail_pop);
6289
            pc->fail_pop = NULL;
6290
            return 0;
6291
        }
6292
    }
6293
    compiler_use_next_block(c, pc->fail_pop[0]);
6294
    PyObject_Free(pc->fail_pop);
6295
    pc->fail_pop = NULL;
6296
    return 1;
6297
}
6298
6299
static int
6300
compiler_error_duplicate_store(struct compiler *c, identifier n)
6301
{
6302
    return compiler_error(c, "multiple assignments to name %R in pattern", n);
6303
}
6304
6305
// Duplicate the effect of 3.10's ROT_* instructions using SWAPs.
6306
static int
6307
pattern_helper_rotate(struct compiler *c, Py_ssize_t count)
6308
{
6309
    while (1 < count) {
  Branch (6309:12): [True: 1.90k, False: 528]
6310
        ADDOP_I(c, SWAP, count--);
6311
    }
6312
    return 1;
6313
}
6314
6315
static int
6316
pattern_helper_store_name(struct compiler *c, identifier n, pattern_context *pc)
6317
{
6318
    if (n == NULL) {
  Branch (6318:9): [True: 48, False: 408]
6319
        ADDOP(c, POP_TOP);
6320
        return 1;
6321
    }
6322
    if (forbidden_name(c, n, Store)) {
  Branch (6322:9): [True: 0, False: 408]
6323
        return 0;
6324
    }
6325
    // Can't assign to the same name twice:
6326
    int duplicate = PySequence_Contains(pc->stores, n);
6327
    if (duplicate < 0) {
  Branch (6327:9): [True: 0, False: 408]
6328
        return 0;
6329
    }
6330
    if (duplicate) {
  Branch (6330:9): [True: 6, False: 402]
6331
        return compiler_error_duplicate_store(c, n);
6332
    }
6333
    // Rotate this object underneath any items we need to preserve:
6334
    Py_ssize_t rotations = pc->on_top + PyList_GET_SIZE(pc->stores) + 1;
6335
    RETURN_IF_FALSE(pattern_helper_rotate(c, rotations));
6336
    return !PyList_Append(pc->stores, n);
6337
}
6338
6339
6340
static int
6341
pattern_unpack_helper(struct compiler *c, asdl_pattern_seq *elts)
6342
{
6343
    Py_ssize_t n = asdl_seq_LEN(elts);
6344
    int seen_star = 0;
6345
    for (Py_ssize_t i = 0; i < n; 
i++461
) {
  Branch (6345:28): [True: 461, False: 234]
6346
        pattern_ty elt = asdl_seq_GET(elts, i);
6347
        if (elt->kind == MatchStar_kind && 
!seen_star37
) {
  Branch (6347:13): [True: 37, False: 424]
  Branch (6347:44): [True: 37, False: 0]
6348
            if ((i >= (1 << 8)) ||
  Branch (6348:17): [True: 0, False: 37]
6349
                (n-i-1 >= (INT_MAX >> 8)))
  Branch (6349:17): [True: 0, False: 37]
6350
                return compiler_error(c,
6351
                    "too many expressions in "
6352
                    "star-unpacking sequence pattern");
6353
            ADDOP_I(c, UNPACK_EX, (i + ((n-i-1) << 8)));
6354
            seen_star = 1;
6355
        }
6356
        else if (elt->kind == MatchStar_kind) {
  Branch (6356:18): [True: 0, False: 424]
6357
            return compiler_error(c,
6358
                "multiple starred expressions in sequence pattern");
6359
        }
6360
    }
6361
    if (!seen_star) {
  Branch (6361:9): [True: 197, False: 37]
6362
        ADDOP_I(c, UNPACK_SEQUENCE, n);
6363
    }
6364
    return 1;
6365
}
6366
6367
static int
6368
pattern_helper_sequence_unpack(struct compiler *c, asdl_pattern_seq *patterns,
6369
                               Py_ssize_t star, pattern_context *pc)
6370
{
6371
    RETURN_IF_FALSE(pattern_unpack_helper(c, patterns));
6372
    Py_ssize_t size = asdl_seq_LEN(patterns);
6373
    // We've now got a bunch of new subjects on the stack. They need to remain
6374
    // there after each subpattern match:
6375
    pc->on_top += size;
6376
    for (Py_ssize_t i = 0; i < size; 
i++459
) {
  Branch (6376:28): [True: 461, False: 232]
6377
        // One less item to keep track of each time we loop through:
6378
        pc->on_top--;
6379
        pattern_ty pattern = asdl_seq_GET(patterns, i);
6380
        RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
6381
    }
6382
    return 1;
6383
}
6384
6385
// Like pattern_helper_sequence_unpack, but uses BINARY_SUBSCR instead of
6386
// UNPACK_SEQUENCE / UNPACK_EX. This is more efficient for patterns with a
6387
// starred wildcard like [first, *_] / [first, *_, last] / [*_, last] / etc.
6388
static int
6389
pattern_helper_sequence_subscr(struct compiler *c, asdl_pattern_seq *patterns,
6390
                               Py_ssize_t star, pattern_context *pc)
6391
{
6392
    // We need to keep the subject around for extracting elements:
6393
    pc->on_top++;
6394
    Py_ssize_t size = asdl_seq_LEN(patterns);
6395
    for (Py_ssize_t i = 0; i < size; 
i++47
) {
  Branch (6395:28): [True: 47, False: 16]
6396
        pattern_ty pattern = asdl_seq_GET(patterns, i);
6397
        if (WILDCARD_CHECK(pattern)) {
6398
            continue;
6399
        }
6400
        if (i == star) {
  Branch (6400:13): [True: 16, False: 24]
6401
            assert(WILDCARD_STAR_CHECK(pattern));
6402
            continue;
6403
        }
6404
        ADDOP_I(c, COPY, 1);
6405
        if (i < star) {
  Branch (6405:13): [True: 14, False: 10]
6406
            
ADDOP_LOAD_CONST_NEW14
(c, PyLong_FromSsize_t(i));
6407
        }
6408
        else {
6409
            // The subject may not support negative indexing! Compute a
6410
            // nonnegative index:
6411
            ADDOP(c, GET_LEN);
6412
            
ADDOP_LOAD_CONST_NEW10
(c, PyLong_FromSsize_t(size - i));
6413
            
ADDOP_BINARY10
(c, Sub);
6414
        }
6415
        ADDOP(c, BINARY_SUBSCR);
6416
        RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
6417
    }
6418
    // Pop the subject, we're done with it:
6419
    pc->on_top--;
6420
    ADDOP(c, POP_TOP);
6421
    return 1;
6422
}
6423
6424
// Like compiler_pattern, but turn off checks for irrefutability.
6425
static int
6426
compiler_pattern_subpattern(struct compiler *c, pattern_ty p, pattern_context *pc)
6427
{
6428
    int allow_irrefutable = pc->allow_irrefutable;
6429
    pc->allow_irrefutable = 1;
6430
    RETURN_IF_FALSE(compiler_pattern(c, p, pc));
6431
    pc->allow_irrefutable = allow_irrefutable;
6432
    return 1;
6433
}
6434
6435
static int
6436
compiler_pattern_as(struct compiler *c, pattern_ty p, pattern_context *pc)
6437
{
6438
    assert(p->kind == MatchAs_kind);
6439
    if (p->v.MatchAs.pattern == NULL) {
  Branch (6439:9): [True: 376, False: 47]
6440
        // An irrefutable match:
6441
        if (!pc->allow_irrefutable) {
  Branch (6441:13): [True: 11, False: 365]
6442
            if (p->v.MatchAs.name) {
  Branch (6442:17): [True: 5, False: 6]
6443
                const char *e = "name capture %R makes remaining patterns unreachable";
6444
                return compiler_error(c, e, p->v.MatchAs.name);
6445
            }
6446
            const char *e = "wildcard makes remaining patterns unreachable";
6447
            return compiler_error(c, e);
6448
        }
6449
        return pattern_helper_store_name(c, p->v.MatchAs.name, pc);
6450
    }
6451
    // Need to make a copy for (possibly) storing later:
6452
    pc->on_top++;
6453
    ADDOP_I(c, COPY, 1);
6454
    RETURN_IF_FALSE(compiler_pattern(c, p->v.MatchAs.pattern, pc));
6455
    // Success! Store it:
6456
    pc->on_top--;
6457
    RETURN_IF_FALSE(pattern_helper_store_name(c, p->v.MatchAs.name, pc));
6458
    return 1;
6459
}
6460
6461
static int
6462
compiler_pattern_star(struct compiler *c, pattern_ty p, pattern_context *pc)
6463
{
6464
    assert(p->kind == MatchStar_kind);
6465
    RETURN_IF_FALSE(pattern_helper_store_name(c, p->v.MatchStar.name, pc));
6466
    return 1;
6467
}
6468
6469
static int
6470
validate_kwd_attrs(struct compiler *c, asdl_identifier_seq *attrs, asdl_pattern_seq* patterns)
6471
{
6472
    // Any errors will point to the pattern rather than the arg name as the
6473
    // parser is only supplying identifiers rather than Name or keyword nodes
6474
    Py_ssize_t nattrs = asdl_seq_LEN(attrs);
6475
    for (Py_ssize_t i = 0; i < nattrs; 
i++63
) {
  Branch (6475:28): [True: 64, False: 35]
6476
        identifier attr = ((identifier)asdl_seq_GET(attrs, i));
6477
        SET_LOC(c, ((pattern_ty) asdl_seq_GET(patterns, i)));
6478
        if (forbidden_name(c, attr, Store)) {
  Branch (6478:13): [True: 0, False: 64]
6479
            return -1;
6480
        }
6481
        
for (Py_ssize_t j = i + 1; 64
j < nattrs;
j++36
) {
  Branch (6481:36): [True: 37, False: 63]
6482
            identifier other = ((identifier)asdl_seq_GET(attrs, j));
6483
            if (!PyUnicode_Compare(attr, other)) {
  Branch (6483:17): [True: 1, False: 36]
6484
                SET_LOC(c, ((pattern_ty) asdl_seq_GET(patterns, j)));
6485
                compiler_error(c, "attribute name repeated in class pattern: %U", attr);
6486
                return -1;
6487
            }
6488
        }
6489
    }
6490
    return 0;
6491
}
6492
6493
static int
6494
compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc)
6495
{
6496
    assert(p->kind == MatchClass_kind);
6497
    asdl_pattern_seq *patterns = p->v.MatchClass.patterns;
6498
    asdl_identifier_seq *kwd_attrs = p->v.MatchClass.kwd_attrs;
6499
    asdl_pattern_seq *kwd_patterns = p->v.MatchClass.kwd_patterns;
6500
    Py_ssize_t nargs = asdl_seq_LEN(patterns);
6501
    Py_ssize_t nattrs = asdl_seq_LEN(kwd_attrs);
6502
    Py_ssize_t nkwd_patterns = asdl_seq_LEN(kwd_patterns);
6503
    if (nattrs != nkwd_patterns) {
  Branch (6503:9): [True: 0, False: 90]
6504
        // AST validator shouldn't let this happen, but if it does,
6505
        // just fail, don't crash out of the interpreter
6506
        const char * e = "kwd_attrs (%d) / kwd_patterns (%d) length mismatch in class pattern";
6507
        return compiler_error(c, e, nattrs, nkwd_patterns);
6508
    }
6509
    if (INT_MAX < nargs || INT_MAX < nargs + nattrs - 1) {
  Branch (6509:9): [True: 0, False: 90]
  Branch (6509:28): [True: 0, False: 90]
6510
        const char *e = "too many sub-patterns in class pattern %R";
6511
        return compiler_error(c, e, p->v.MatchClass.cls);
6512
    }
6513
    if (nattrs) {
  Branch (6513:9): [True: 36, False: 54]
6514
        RETURN_IF_FALSE(!validate_kwd_attrs(c, kwd_attrs, kwd_patterns));
6515
        SET_LOC(c, p);
6516
    }
6517
    VISIT(c, expr, p->v.MatchClass.cls);
6518
    PyObject *attr_names;
6519
    RETURN_IF_FALSE(attr_names = PyTuple_New(nattrs));
6520
    Py_ssize_t i;
6521
    for (i = 0; i < nattrs; 
i++63
) {
  Branch (6521:17): [True: 63, False: 89]
6522
        PyObject *name = asdl_seq_GET(kwd_attrs, i);
6523
        Py_INCREF(name);
6524
        PyTuple_SET_ITEM(attr_names, i, name);
6525
    }
6526
    
ADDOP_LOAD_CONST_NEW89
(c, attr_names);
6527
    
ADDOP_I89
(c, MATCH_CLASS, nargs);
6528
    ADDOP_I(c, COPY, 1);
6529
    ADDOP_LOAD_CONST(c, Py_None);
6530
    ADDOP_I(c, IS_OP, 1);
6531
    // TOS is now a tuple of (nargs + nattrs) attributes (or None):
6532
    pc->on_top++;
6533
    RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE));
6534
    ADDOP_I(c, UNPACK_SEQUENCE, nargs + nattrs);
6535
    pc->on_top += nargs + nattrs - 1;
6536
    for (i = 0; i < nargs + nattrs; 
i++173
) {
  Branch (6536:17): [True: 176, False: 86]
6537
        pc->on_top--;
6538
        pattern_ty pattern;
6539
        if (i < nargs) {
  Branch (6539:13): [True: 113, False: 63]
6540
            // Positional:
6541
            pattern = asdl_seq_GET(patterns, i);
6542
        }
6543
        else {
6544
            // Keyword:
6545
            pattern = asdl_seq_GET(kwd_patterns, i - nargs);
6546
        }
6547
        if (WILDCARD_CHECK(pattern)) {
6548
            ADDOP(c, POP_TOP);
6549
            continue;
6550
        }
6551
        RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
6552
    }
6553
    // Success! Pop the tuple of attributes:
6554
    return 1;
6555
}
6556
6557
static int
6558
compiler_pattern_mapping(struct compiler *c, pattern_ty p, pattern_context *pc)
6559
{
6560
    assert(p->kind == MatchMapping_kind);
6561
    asdl_expr_seq *keys = p->v.MatchMapping.keys;
6562
    asdl_pattern_seq *patterns = p->v.MatchMapping.patterns;
6563
    Py_ssize_t size = asdl_seq_LEN(keys);
6564
    Py_ssize_t npatterns = asdl_seq_LEN(patterns);
6565
    if (size != npatterns) {
  Branch (6565:9): [True: 0, False: 193]
6566
        // AST validator shouldn't let this happen, but if it does,
6567
        // just fail, don't crash out of the interpreter
6568
        const char * e = "keys (%d) / patterns (%d) length mismatch in mapping pattern";
6569
        return compiler_error(c, e, size, npatterns);
6570
    }
6571
    // We have a double-star target if "rest" is set
6572
    PyObject *star_target = p->v.MatchMapping.rest;
6573
    // We need to keep the subject on top during the mapping and length checks:
6574
    pc->on_top++;
6575
    ADDOP(c, MATCH_MAPPING);
6576
    RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE));
6577
    if (!size && 
!star_target27
) {
  Branch (6577:9): [True: 27, False: 166]
  Branch (6577:18): [True: 24, False: 3]
6578
        // If the pattern is just "{}", we're done! Pop the subject:
6579
        pc->on_top--;
6580
        ADDOP(c, POP_TOP);
6581
        return 1;
6582
    }
6583
    if (size) {
  Branch (6583:9): [True: 166, False: 3]
6584
        // If the pattern has any keys in it, perform a length check:
6585
        ADDOP(c, GET_LEN);
6586
        
ADDOP_LOAD_CONST_NEW166
(c, PyLong_FromSsize_t(size));
6587
        
ADDOP_COMPARE166
(c, GtE);
6588
        RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE));
6589
    }
6590
    if (INT_MAX < size - 1) {
  Branch (6590:9): [True: 0, False: 169]
6591
        return compiler_error(c, "too many sub-patterns in mapping pattern");
6592
    }
6593
    // Collect all of the keys into a tuple for MATCH_KEYS and
6594
    // **rest. They can either be dotted names or literals:
6595
6596
    // Maintaining a set of Constant_kind kind keys allows us to raise a
6597
    // SyntaxError in the case of duplicates.
6598
    PyObject *seen = PySet_New(NULL);
6599
    if (seen == NULL) {
  Branch (6599:9): [True: 0, False: 169]
6600
        return 0;
6601
    }
6602
6603
    // NOTE: goto error on failure in the loop below to avoid leaking `seen`
6604
    
for (Py_ssize_t i = 0; 169
i < size;
i++195
) {
  Branch (6604:28): [True: 201, False: 163]
6605
        expr_ty key = asdl_seq_GET(keys, i);
6606
        if (key == NULL) {
  Branch (6606:13): [True: 0, False: 201]
6607
            const char *e = "can't use NULL keys in MatchMapping "
6608
                            "(set 'rest' parameter instead)";
6609
            SET_LOC(c, ((pattern_ty) asdl_seq_GET(patterns, i)));
6610
            compiler_error(c, e);
6611
            goto error;
6612
        }
6613
6614
        if (key->kind == Constant_kind) {
  Branch (6614:13): [True: 199, False: 2]
6615
            int in_seen = PySet_Contains(seen, key->v.Constant.value);
6616
            if (in_seen < 0) {
  Branch (6616:17): [True: 0, False: 199]
6617
                goto error;
6618
            }
6619
            if (in_seen) {
  Branch (6619:17): [True: 5, False: 194]
6620
                const char *e = "mapping pattern checks duplicate key (%R)";
6621
                compiler_error(c, e, key->v.Constant.value);
6622
                goto error;
6623
            }
6624
            if (PySet_Add(seen, key->v.Constant.value)) {
  Branch (6624:17): [True: 0, False: 194]
6625
                goto error;
6626
            }
6627
        }
6628
6629
        else if (key->kind != Attribute_kind) {
  Branch (6629:18): [True: 1, False: 1]
6630
            const char *e = "mapping pattern keys may only match literals and attribute lookups";
6631
            compiler_error(c, e);
6632
            goto error;
6633
        }
6634
        if (!compiler_visit_expr(c, key)) {
  Branch (6634:13): [True: 0, False: 195]
6635
            goto error;
6636
        }
6637
    }
6638
6639
    // all keys have been checked; there are no duplicates
6640
    Py_DECREF(seen);
6641
6642
    ADDOP_I(c, BUILD_TUPLE, size);
6643
    ADDOP(c, MATCH_KEYS);
6644
    // There's now a tuple of keys and a tuple of values on top of the subject:
6645
    pc->on_top += 2;
6646
    ADDOP_I(c, COPY, 1);
6647
    ADDOP_LOAD_CONST(c, Py_None);
6648
    ADDOP_I(c, IS_OP, 1);
6649
    RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE));
6650
    // So far so good. Use that tuple of values on the stack to match
6651
    // sub-patterns against:
6652
    ADDOP_I(c, UNPACK_SEQUENCE, size);
6653
    pc->on_top += size - 1;
6654
    for (Py_ssize_t i = 0; i < size; 
i++189
) {
  Branch (6654:28): [True: 190, False: 162]
6655
        pc->on_top--;
6656
        pattern_ty pattern = asdl_seq_GET(patterns, i);
6657
        RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
6658
    }
6659
    // If we get this far, it's a match! Whatever happens next should consume
6660
    // the tuple of keys and the subject:
6661
    pc->on_top -= 2;
6662
    if (star_target) {
  Branch (6662:9): [True: 8, False: 154]
6663
        // If we have a starred name, bind a dict of remaining items to it (this may
6664
        // seem a bit inefficient, but keys is rarely big enough to actually impact
6665
        // runtime):
6666
        // rest = dict(TOS1)
6667
        // for key in TOS:
6668
        //     del rest[key]
6669
        ADDOP_I(c, BUILD_MAP, 0);           // [subject, keys, empty]
6670
        ADDOP_I(c, SWAP, 3);                // [empty, keys, subject]
6671
        ADDOP_I(c, DICT_UPDATE, 2);         // [copy, keys]
6672
        ADDOP_I(c, UNPACK_SEQUENCE, size);  // [copy, keys...]
6673
        while (size) {
  Branch (6673:16): [True: 7, False: 8]
6674
            ADDOP_I(c, COPY, 1 + size--);   // [copy, keys..., copy]
6675
            ADDOP_I(c, SWAP, 2);            // [copy, keys..., copy, key]
6676
            ADDOP(c, DELETE_SUBSCR);        // [copy, keys...]
6677
        }
6678
        RETURN_IF_FALSE(pattern_helper_store_name(c, star_target, pc));
6679
    }
6680
    else {
6681
        ADDOP(c, POP_TOP);  // Tuple of keys.
6682
        ADDOP(c, POP_TOP);  // Subject.
6683
    }
6684
    return 1;
6685
6686
error:
6687
    Py_DECREF(seen);
6688
    return 0;
6689
}
6690
6691
static int
6692
compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
6693
{
6694
    assert(p->kind == MatchOr_kind);
6695
    basicblock *end;
6696
    RETURN_IF_FALSE(end = compiler_new_block(c));
6697
    Py_ssize_t size = asdl_seq_LEN(p->v.MatchOr.patterns);
6698
    assert(size > 1);
6699
    // We're going to be messing with pc. Keep the original info handy:
6700
    pattern_context old_pc = *pc;
6701
    Py_INCREF(pc->stores);
6702
    // control is the list of names bound by the first alternative. It is used
6703
    // for checking different name bindings in alternatives, and for correcting
6704
    // the order in which extracted elements are placed on the stack.
6705
    PyObject *control = NULL;
6706
    // NOTE: We can't use returning macros anymore! goto error on error.
6707
    for (Py_ssize_t i = 0; i < size; 
i++141
) {
  Branch (6707:28): [True: 151, False: 50]
6708
        pattern_ty alt = asdl_seq_GET(p->v.MatchOr.patterns, i);
6709
        SET_LOC(c, alt);
6710
        PyObject *pc_stores = PyList_New(0);
6711
        if (pc_stores == NULL) {
  Branch (6711:13): [True: 0, False: 151]
6712
            goto error;
6713
        }
6714
        Py_SETREF(pc->stores, pc_stores);
6715
        // An irrefutable sub-pattern must be last, if it is allowed at all:
6716
        pc->allow_irrefutable = (i == size - 1) && 
old_pc.allow_irrefutable52
;
  Branch (6716:33): [True: 52, False: 99]
  Branch (6716:52): [True: 44, False: 8]
6717
        pc->fail_pop = NULL;
6718
        pc->fail_pop_size = 0;
6719
        pc->on_top = 0;
6720
        if (!compiler_addop_i(c, COPY, 1, true) || !compiler_pattern(c, alt, pc)) {
  Branch (6720:13): [True: 0, False: 151]
  Branch (6720:52): [True: 8, False: 143]
6721
            goto error;
6722
        }
6723
        // Success!
6724
        Py_ssize_t nstores = PyList_GET_SIZE(pc->stores);
6725
        if (!i) {
  Branch (6725:13): [True: 53, False: 90]
6726
            // This is the first alternative, so save its stores as a "control"
6727
            // for the others (they can't bind a different set of names, and
6728
            // might need to be reordered):
6729
            assert(control == NULL);
6730
            control = pc->stores;
6731
            Py_INCREF(control);
6732
        }
6733
        else if (nstores != PyList_GET_SIZE(control)) {
  Branch (6733:18): [True: 1, False: 89]
6734
            goto diff;
6735
        }
6736
        else if (nstores) {
  Branch (6736:18): [True: 18, False: 71]
6737
            // There were captures. Check to see if we differ from control:
6738
            Py_ssize_t icontrol = nstores;
6739
            while (icontrol--) {
  Branch (6739:20): [True: 66, False: 17]
6740
                PyObject *name = PyList_GET_ITEM(control, icontrol);
6741
                Py_ssize_t istores = PySequence_Index(pc->stores, name);
6742
                if (istores < 0) {
  Branch (6742:21): [True: 1, False: 65]
6743
                    PyErr_Clear();
6744
                    goto diff;
6745
                }
6746
                if (icontrol != istores) {
  Branch (6746:21): [True: 30, False: 35]
6747
                    // Reorder the names on the stack to match the order of the
6748
                    // names in control. There's probably a better way of doing
6749
                    // this; the current solution is potentially very
6750
                    // inefficient when each alternative subpattern binds lots
6751
                    // of names in different orders. It's fine for reasonable
6752
                    // cases, though, and the peephole optimizer will ensure
6753
                    // that the final code is as efficient as possible.
6754
                    assert(istores < icontrol);
6755
                    Py_ssize_t rotations = istores + 1;
6756
                    // Perform the same rotation on pc->stores:
6757
                    PyObject *rotated = PyList_GetSlice(pc->stores, 0,
6758
                                                        rotations);
6759
                    if (rotated == NULL ||
  Branch (6759:25): [True: 0, False: 30]
6760
                        PyList_SetSlice(pc->stores, 0, rotations, NULL) ||
  Branch (6760:25): [True: 0, False: 30]
6761
                        PyList_SetSlice(pc->stores, icontrol - istores,
  Branch (6761:25): [True: 0, False: 30]
6762
                                        icontrol - istores, rotated))
6763
                    {
6764
                        Py_XDECREF(rotated);
6765
                        goto error;
6766
                    }
6767
                    Py_DECREF(rotated);
6768
                    // That just did:
6769
                    // rotated = pc_stores[:rotations]
6770
                    // del pc_stores[:rotations]
6771
                    // pc_stores[icontrol-istores:icontrol-istores] = rotated
6772
                    // Do the same thing to the stack, using several
6773
                    // rotations:
6774
                    while (rotations--) {
  Branch (6774:28): [True: 102, False: 30]
6775
                        if (!pattern_helper_rotate(c, icontrol + 1)){
  Branch (6775:29): [True: 0, False: 102]
6776
                            goto error;
6777
                        }
6778
                    }
6779
                }
6780
            }
6781
        }
6782
        assert(control);
6783
        if (!compiler_addop_j(c, JUMP, end, true) ||
  Branch (6783:13): [True: 0, False: 141]
6784
            !emit_and_reset_fail_pop(c, pc))
  Branch (6784:13): [True: 0, False: 141]
6785
        {
6786
            goto error;
6787
        }
6788
    }
6789
    Py_DECREF(pc->stores);
6790
    *pc = old_pc;
6791
    Py_INCREF(pc->stores);
6792
    // Need to NULL this for the PyObject_Free call in the error block.
6793
    old_pc.fail_pop = NULL;
6794
    // No match. Pop the remaining copy of the subject and fail:
6795
    if (!compiler_addop(c, POP_TOP, true) || !jump_to_fail_pop(c, pc, JUMP)) {
  Branch (6795:9): [True: 0, False: 50]
  Branch (6795:46): [True: 0, False: 50]
6796
        goto error;
6797
    }
6798
    compiler_use_next_block(c, end);
6799
    Py_ssize_t nstores = PyList_GET_SIZE(control);
6800
    // There's a bunch of stuff on the stack between where the new stores
6801
    // are and where they need to be:
6802
    // - The other stores.
6803
    // - A copy of the subject.
6804
    // - Anything else that may be on top of the stack.
6805
    // - Any previous stores we've already stashed away on the stack.
6806
    Py_ssize_t nrots = nstores + 1 + pc->on_top + PyList_GET_SIZE(pc->stores);
6807
    for (Py_ssize_t i = 0; i < nstores; 
i++24
) {
  Branch (6807:28): [True: 24, False: 50]
6808
        // Rotate this capture to its proper place on the stack:
6809
        if (!pattern_helper_rotate(c, nrots)) {
  Branch (6809:13): [True: 0, False: 24]
6810
            goto error;
6811
        }
6812
        // Update the list of previous stores with this new name, checking for
6813
        // duplicates:
6814
        PyObject *name = PyList_GET_ITEM(control, i);
6815
        int dupe = PySequence_Contains(pc->stores, name);
6816
        if (dupe < 0) {
  Branch (6816:13): [True: 0, False: 24]
6817
            goto error;
6818
        }
6819
        if (dupe) {
  Branch (6819:13): [True: 0, False: 24]
6820
            compiler_error_duplicate_store(c, name);
6821
            goto error;
6822
        }
6823
        if (PyList_Append(pc->stores, name)) {
  Branch (6823:13): [True: 0, False: 24]
6824
            goto error;
6825
        }
6826
    }
6827
    Py_DECREF(old_pc.stores);
6828
    Py_DECREF(control);
6829
    // NOTE: Returning macros are safe again.
6830
    // Pop the copy of the subject:
6831
    ADDOP(c, POP_TOP);
6832
    return 1;
6833
diff:
6834
    compiler_error(c, "alternative patterns bind different names");
6835
error:
6836
    PyObject_Free(old_pc.fail_pop);
6837
    Py_DECREF(old_pc.stores);
6838
    Py_XDECREF(control);
6839
    return 0;
6840
}
6841
6842
6843
static int
6844
compiler_pattern_sequence(struct compiler *c, pattern_ty p, pattern_context *pc)
6845
{
6846
    assert(p->kind == MatchSequence_kind);
6847
    asdl_pattern_seq *patterns = p->v.MatchSequence.patterns;
6848
    Py_ssize_t size = asdl_seq_LEN(patterns);
6849
    Py_ssize_t star = -1;
6850
    int only_wildcard = 1;
6851
    int star_wildcard = 0;
6852
    // Find a starred name, if it exists. There may be at most one:
6853
    for (Py_ssize_t i = 0; i < size; 
i++530
) {
  Branch (6853:28): [True: 532, False: 288]
6854
        pattern_ty pattern = asdl_seq_GET(patterns, i);
6855
        if (pattern->kind == MatchStar_kind) {
  Branch (6855:13): [True: 64, False: 468]
6856
            if (star >= 0) {
  Branch (6856:17): [True: 2, False: 62]
6857
                const char *e = "multiple starred names in sequence pattern";
6858
                return compiler_error(c, e);
6859
            }
6860
            star_wildcard = WILDCARD_STAR_CHECK(pattern);
6861
            only_wildcard &= star_wildcard;
6862
            star = i;
6863
            continue;
6864
        }
6865
        only_wildcard &= WILDCARD_CHECK(pattern);
6866
    }
6867
    // We need to keep the subject on top during the sequence and length checks:
6868
    pc->on_top++;
6869
    ADDOP(c, MATCH_SEQUENCE);
6870
    RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE));
6871
    if (star < 0) {
  Branch (6871:9): [True: 228, False: 60]
6872
        // No star: len(subject) == size
6873
        ADDOP(c, GET_LEN);
6874
        
ADDOP_LOAD_CONST_NEW228
(c, PyLong_FromSsize_t(size));
6875
        
ADDOP_COMPARE228
(c, Eq);
6876
        RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE));
6877
    }
6878
    else if (size > 1) {
  Branch (6878:14): [True: 53, False: 7]
6879
        // Star: len(subject) >= size - 1
6880
        ADDOP(c, GET_LEN);
6881
        
ADDOP_LOAD_CONST_NEW53
(c, PyLong_FromSsize_t(size - 1));
6882
        
ADDOP_COMPARE53
(c, GtE);
6883
        RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE));
6884
    }
6885
    // Whatever comes next should consume the subject:
6886
    pc->on_top--;
6887
    if (only_wildcard) {
  Branch (6887:9): [True: 38, False: 250]
6888
        // Patterns like: [] / [_] / [_, _] / [*_] / [_, *_] / [_, _, *_] / etc.
6889
        ADDOP(c, POP_TOP);
6890
    }
6891
    else if (star_wildcard) {
  Branch (6891:14): [True: 16, False: 234]
6892
        RETURN_IF_FALSE(pattern_helper_sequence_subscr(c, patterns, star, pc));
6893
    }
6894
    else {
6895
        RETURN_IF_FALSE(pattern_helper_sequence_unpack(c, patterns, star, pc));
6896
    }
6897
    return 1;
6898
}
6899
6900
static int
6901
compiler_pattern_value(struct compiler *c, pattern_ty p, pattern_context *pc)
6902
{
6903
    assert(p->kind == MatchValue_kind);
6904
    expr_ty value = p->v.MatchValue.value;
6905
    if (!MATCH_VALUE_EXPR(value)) {
6906
        const char *e = "patterns may only match literals and attribute lookups";
6907
        return compiler_error(c, e);
6908
    }
6909
    VISIT(c, expr, value);
6910
    ADDOP_COMPARE(c, Eq);
6911
    RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE));
6912
    return 1;
6913
}
6914
6915
static int
6916
compiler_pattern_singleton(struct compiler *c, pattern_ty p, pattern_context *pc)
6917
{
6918
    assert(p->kind == MatchSingleton_kind);
6919
    ADDOP_LOAD_CONST(c, p->v.MatchSingleton.value);
6920
    ADDOP_COMPARE(c, Is);
6921
    RETURN_IF_FALSE(jump_to_fail_pop(c, pc, POP_JUMP_IF_FALSE));
6922
    return 1;
6923
}
6924
6925
static int
6926
compiler_pattern(struct compiler *c, pattern_ty p, pattern_context *pc)
6927
{
6928
    SET_LOC(c, p);
6929
    switch (p->kind) {
  Branch (6929:13): [True: 0, False: 1.49k]
6930
        case MatchValue_kind:
  Branch (6930:9): [True: 385, False: 1.10k]
6931
            return compiler_pattern_value(c, p, pc);
6932
        case MatchSingleton_kind:
  Branch (6932:9): [True: 13, False: 1.47k]
6933
            return compiler_pattern_singleton(c, p, pc);
6934
        case MatchSequence_kind:
  Branch (6934:9): [True: 290, False: 1.20k]
6935
            return compiler_pattern_sequence(c, p, pc);
6936
        case MatchMapping_kind:
  Branch (6936:9): [True: 193, False: 1.29k]
6937
            return compiler_pattern_mapping(c, p, pc);
6938
        case MatchClass_kind:
  Branch (6938:9): [True: 90, False: 1.40k]
6939
            return compiler_pattern_class(c, p, pc);
6940
        case MatchStar_kind:
  Branch (6940:9): [True: 37, False: 1.45k]
6941
            return compiler_pattern_star(c, p, pc);
6942
        case MatchAs_kind:
  Branch (6942:9): [True: 423, False: 1.06k]
6943
            return compiler_pattern_as(c, p, pc);
6944
        case MatchOr_kind:
  Branch (6944:9): [True: 60, False: 1.43k]
6945
            return compiler_pattern_or(c, p, pc);
6946
    }
6947
    // AST validator shouldn't let this happen, but if it does,
6948
    // just fail, don't crash out of the interpreter
6949
    const char *e = "invalid match pattern node in AST (kind=%d)";
6950
    return compiler_error(c, e, p->kind);
6951
}
6952
6953
static int
6954
compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
6955
{
6956
    VISIT(c, expr, s->v.Match.subject);
6957
    basicblock *end;
6958
    RETURN_IF_FALSE(end = compiler_new_block(c));
6959
    Py_ssize_t cases = asdl_seq_LEN(s->v.Match.cases);
6960
    assert(cases > 0);
6961
    match_case_ty m = asdl_seq_GET(s->v.Match.cases, cases - 1);
6962
    int has_default = WILDCARD_CHECK(m->pattern) && 
1 < cases19
;
  Branch (6962:53): [True: 14, False: 5]
6963
    for (Py_ssize_t i = 0; i < cases - has_default; 
i++460
) {
  Branch (6963:28): [True: 490, False: 346]
6964
        m = asdl_seq_GET(s->v.Match.cases, i);
6965
        SET_LOC(c, m->pattern);
6966
        // Only copy the subject if we're *not* on the last case:
6967
        if (i != cases - has_default - 1) {
  Branch (6967:13): [True: 117, False: 373]
6968
            ADDOP_I(c, COPY, 1);
6969
        }
6970
        RETURN_IF_FALSE(pc->stores = PyList_New(0));
6971
        // Irrefutable cases must be either guarded, last, or both:
6972
        pc->allow_irrefutable = m->guard != NULL || 
i == cases - 1441
;
  Branch (6972:33): [True: 49, False: 441]
  Branch (6972:53): [True: 340, False: 101]
6973
        pc->fail_pop = NULL;
6974
        pc->fail_pop_size = 0;
6975
        pc->on_top = 0;
6976
        // NOTE: Can't use returning macros here (they'll leak pc->stores)!
6977
        if (!compiler_pattern(c, m->pattern, pc)) {
  Branch (6977:13): [True: 30, False: 460]
6978
            Py_DECREF(pc->stores);
6979
            return 0;
6980
        }
6981
        assert(!pc->on_top);
6982
        // It's a match! Store all of the captured names (they're on the stack).
6983
        Py_ssize_t nstores = PyList_GET_SIZE(pc->stores);
6984
        for (Py_ssize_t n = 0; n < nstores; 
n++327
) {
  Branch (6984:32): [True: 327, False: 460]
6985
            PyObject *name = PyList_GET_ITEM(pc->stores, n);
6986
            if (!compiler_nameop(c, name, Store)) {
  Branch (6986:17): [True: 0, False: 327]
6987
                Py_DECREF(pc->stores);
6988
                return 0;
6989
            }
6990
        }
6991
        Py_DECREF(pc->stores);
6992
        // NOTE: Returning macros are safe again.
6993
        if (m->guard) {
  Branch (6993:13): [True: 47, False: 413]
6994
            RETURN_IF_FALSE(ensure_fail_pop(c, pc, 0));
6995
            RETURN_IF_FALSE(compiler_jump_if(c, m->guard, pc->fail_pop[0], 0));
6996
        }
6997
        // Success! Pop the subject off, we're done with it:
6998
        if (i != cases - has_default - 1) {
  Branch (6998:13): [True: 114, False: 346]
6999
            ADDOP(c, POP_TOP);
7000
        }
7001
        VISIT_SEQ(c, stmt, m->body);
7002
        ADDOP_JUMP(c, JUMP, end);
7003
        // If the pattern fails to match, we want the line number of the
7004
        // cleanup to be associated with the failed pattern, not the last line
7005
        // of the body
7006
        SET_LOC(c, m->pattern);
7007
        RETURN_IF_FALSE(emit_and_reset_fail_pop(c, pc));
7008
    }
7009
    if (has_default) {
  Branch (7009:9): [True: 13, False: 333]
7010
        // A trailing "case _" is common, and lets us save a bit of redundant
7011
        // pushing and popping in the loop above:
7012
        m = asdl_seq_GET(s->v.Match.cases, cases - 1);
7013
        SET_LOC(c, m->pattern);
7014
        if (cases == 1) {
  Branch (7014:13): [True: 0, False: 13]
7015
            // No matches. Done with the subject:
7016
            ADDOP(c, POP_TOP);
7017
        }
7018
        else {
7019
            // Show line coverage for default case (it doesn't create bytecode)
7020
            ADDOP(c, NOP);
7021
        }
7022
        if (m->guard) {
  Branch (7022:13): [True: 1, False: 12]
7023
            RETURN_IF_FALSE(compiler_jump_if(c, m->guard, end, 0));
7024
        }
7025
        VISIT_SEQ(c, stmt, m->body);
7026
    }
7027
    compiler_use_next_block(c, end);
7028
    return 1;
7029
}
7030
7031
static int
7032
compiler_match(struct compiler *c, stmt_ty s)
7033
{
7034
    pattern_context pc;
7035
    pc.fail_pop = NULL;
7036
    int result = compiler_match_inner(c, s, &pc);
7037
    PyObject_Free(pc.fail_pop);
7038
    return result;
7039
}
7040
7041
#undef WILDCARD_CHECK
7042
#undef WILDCARD_STAR_CHECK
7043
7044
7045
/* End of the compiler section, beginning of the assembler section */
7046
7047
7048
struct assembler {
7049
    PyObject *a_bytecode;  /* bytes containing bytecode */
7050
    int a_offset;              /* offset into bytecode */
7051
    PyObject *a_except_table;  /* bytes containing exception table */
7052
    int a_except_table_off;    /* offset into exception table */
7053
    /* Location Info */
7054
    int a_lineno;          /* lineno of last emitted instruction */
7055
    PyObject* a_linetable; /* bytes containing location info */
7056
    int a_location_off;    /* offset of last written location info frame */
7057
};
7058
7059
static basicblock**
7060
make_cfg_traversal_stack(basicblock *entryblock) {
7061
    int nblocks = 0;
7062
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next3.30M
) {
  Branch (7062:38): [True: 3.30M, False: 311k]
7063
        b->b_visited = 0;
7064
        nblocks++;
7065
    }
7066
    basicblock **stack = (basicblock **)PyMem_Malloc(sizeof(basicblock *) * nblocks);
7067
    if (!stack) {
  Branch (7067:9): [True: 0, False: 311k]
7068
        PyErr_NoMemory();
7069
    }
7070
    return stack;
7071
}
7072
7073
Py_LOCAL_INLINE(void)
7074
stackdepth_push(basicblock ***sp, basicblock *b, int depth)
7075
{
7076
    assert(b->b_startdepth < 0 || b->b_startdepth == depth);
7077
    if (b->b_startdepth < depth && 
b->b_startdepth < 100568k
) {
  Branch (7077:9): [True: 568k, False: 227k]
  Branch (7077:36): [True: 568k, False: 0]
7078
        assert(b->b_startdepth < 0);
7079
        b->b_startdepth = depth;
7080
        *(*sp)++ = b;
7081
    }
7082
}
7083
7084
/* Find the flow path that needs the largest stack.  We assume that
7085
 * cycles in the flow graph have no net effect on the stack depth.
7086
 */
7087
static int
7088
stackdepth(basicblock *entryblock, int code_flags)
7089
{
7090
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (7090:38): [True: 568k, False: 72.8k]
7091
        b->b_startdepth = INT_MIN;
7092
    }
7093
    basicblock **stack = make_cfg_traversal_stack(entryblock);
7094
    if (!stack) {
  Branch (7094:9): [True: 0, False: 72.8k]
7095
        return -1;
7096
    }
7097
7098
    int maxdepth = 0;
7099
    basicblock **sp = stack;
7100
    if (code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) {
  Branch (7100:9): [True: 1.33k, False: 71.5k]
7101
        stackdepth_push(&sp, entryblock, 1);
7102
    } else {
7103
        stackdepth_push(&sp, entryblock, 0);
7104
    }
7105
7106
    while (sp != stack) {
  Branch (7106:12): [True: 568k, False: 72.8k]
7107
        basicblock *b = *--sp;
7108
        int depth = b->b_startdepth;
7109
        assert(depth >= 0);
7110
        basicblock *next = b->b_next;
7111
        for (int i = 0; i < b->b_iused; 
i++2.49M
) {
  Branch (7111:25): [True: 2.60M, False: 462k]
7112
            struct instr *instr = &b->b_instr[i];
7113
            int effect = stack_effect(instr->i_opcode, instr->i_oparg, 0);
7114
            if (effect == PY_INVALID_STACK_EFFECT) {
  Branch (7114:17): [True: 0, False: 2.60M]
7115
                PyErr_Format(PyExc_SystemError,
7116
                             "compiler stack_effect(opcode=%d, arg=%i) failed",
7117
                             instr->i_opcode, instr->i_oparg);
7118
                return -1;
7119
            }
7120
            int new_depth = depth + effect;
7121
            if (new_depth > maxdepth) {
  Branch (7121:17): [True: 179k, False: 2.42M]
7122
                maxdepth = new_depth;
7123
            }
7124
            assert(depth >= 0); /* invalid code or bug in stackdepth() */
7125
            if (is_jump(instr) || 
is_block_push(instr)2.35M
) {
  Branch (7125:17): [True: 253k, False: 2.35M]
  Branch (7125:35): [True: 7.72k, False: 2.34M]
7126
                effect = stack_effect(instr->i_opcode, instr->i_oparg, 1);
7127
                assert(effect != PY_INVALID_STACK_EFFECT);
7128
                int target_depth = depth + effect;
7129
                if (target_depth > maxdepth) {
  Branch (7129:21): [True: 977, False: 259k]
7130
                    maxdepth = target_depth;
7131
                }
7132
                assert(target_depth >= 0); /* invalid code or bug in stackdepth() */
7133
                stackdepth_push(&sp, instr->i_target, target_depth);
7134
            }
7135
            depth = new_depth;
7136
            assert(!IS_ASSEMBLER_OPCODE(instr->i_opcode));
7137
            if (IS_UNCONDITIONAL_JUMP_OPCODE(instr->i_opcode) ||
7138
                
IS_SCOPE_EXIT_OPCODE2.58M
(instr->i_opcode))
7139
            {
7140
                /* remaining code is dead */
7141
                next = NULL;
7142
                break;
7143
            }
7144
            if (instr->i_opcode == YIELD_VALUE) {
  Branch (7144:17): [True: 1.48k, False: 2.49M]
7145
                instr->i_oparg = depth;
7146
            }
7147
        }
7148
        if (next != NULL) {
  Branch (7148:13): [True: 462k, False: 106k]
7149
            assert(BB_HAS_FALLTHROUGH(b));
7150
            stackdepth_push(&sp, next, depth);
7151
        }
7152
    }
7153
    PyMem_Free(stack);
7154
    return maxdepth;
7155
}
7156
7157
static int
7158
assemble_init(struct assembler *a, int firstlineno)
7159
{
7160
    memset(a, 0, sizeof(struct assembler));
7161
    a->a_lineno = firstlineno;
7162
    a->a_linetable = NULL;
7163
    a->a_location_off = 0;
7164
    a->a_except_table = NULL;
7165
    a->a_bytecode = PyBytes_FromStringAndSize(NULL, DEFAULT_CODE_SIZE);
7166
    if (a->a_bytecode == NULL) {
  Branch (7166:9): [True: 0, False: 72.8k]
7167
        goto error;
7168
    }
7169
    a->a_linetable = PyBytes_FromStringAndSize(NULL, DEFAULT_CNOTAB_SIZE);
7170
    if (a->a_linetable == NULL) {
  Branch (7170:9): [True: 0, False: 72.8k]
7171
        goto error;
7172
    }
7173
    a->a_except_table = PyBytes_FromStringAndSize(NULL, DEFAULT_LNOTAB_SIZE);
7174
    if (a->a_except_table == NULL) {
  Branch (7174:9): [True: 0, False: 72.8k]
7175
        goto error;
7176
    }
7177
    return 1;
7178
error:
7179
    Py_XDECREF(a->a_bytecode);
7180
    Py_XDECREF(a->a_linetable);
7181
    Py_XDECREF(a->a_except_table);
7182
    return 0;
7183
}
7184
7185
static void
7186
assemble_free(struct assembler *a)
7187
{
7188
    Py_XDECREF(a->a_bytecode);
7189
    Py_XDECREF(a->a_linetable);
7190
    Py_XDECREF(a->a_except_table);
7191
}
7192
7193
static int
7194
blocksize(basicblock *b)
7195
{
7196
    int i;
7197
    int size = 0;
7198
7199
    for (i = 0; i < b->b_iused; 
i++2.80M
) {
  Branch (7199:17): [True: 2.80M, False: 595k]
7200
        size += instr_size(&b->b_instr[i]);
7201
    }
7202
    return size;
7203
}
7204
7205
static basicblock *
7206
push_except_block(ExceptStack *stack, struct instr *setup) {
7207
    assert(is_block_push(setup));
7208
    int opcode = setup->i_opcode;
7209
    basicblock * target = setup->i_target;
7210
    if (opcode == SETUP_WITH || 
opcode == 6.85k
SETUP_CLEANUP6.85k
) {
  Branch (7210:9): [True: 870, False: 6.85k]
  Branch (7210:33): [True: 4.04k, False: 2.80k]
7211
        target->b_preserve_lasti = 1;
7212
    }
7213
    stack->handlers[++stack->depth] = target;
7214
    return target;
7215
}
7216
7217
static basicblock *
7218
pop_except_block(ExceptStack *stack) {
7219
    assert(stack->depth > 0);
7220
    return stack->handlers[--stack->depth];
7221
}
7222
7223
static basicblock *
7224
except_stack_top(ExceptStack *stack) {
7225
    return stack->handlers[stack->depth];
7226
}
7227
7228
static ExceptStack *
7229
make_except_stack(void) {
7230
    ExceptStack *new = PyMem_Malloc(sizeof(ExceptStack));
7231
    if (new == NULL) {
  Branch (7231:9): [True: 0, False: 72.8k]
7232
        PyErr_NoMemory();
7233
        return NULL;
7234
    }
7235
    new->depth = 0;
7236
    new->handlers[0] = NULL;
7237
    return new;
7238
}
7239
7240
static ExceptStack *
7241
copy_except_stack(ExceptStack *stack) {
7242
    ExceptStack *copy = PyMem_Malloc(sizeof(ExceptStack));
7243
    if (copy == NULL) {
  Branch (7243:9): [True: 0, False: 238k]
7244
        PyErr_NoMemory();
7245
        return NULL;
7246
    }
7247
    memcpy(copy, stack, sizeof(ExceptStack));
7248
    return copy;
7249
}
7250
7251
static int
7252
label_exception_targets(basicblock *entryblock) {
7253
    basicblock **todo_stack = make_cfg_traversal_stack(entryblock);
7254
    if (todo_stack == NULL) {
  Branch (7254:9): [True: 0, False: 72.8k]
7255
        return -1;
7256
    }
7257
    ExceptStack *except_stack = make_except_stack();
7258
    if (except_stack == NULL) {
  Branch (7258:9): [True: 0, False: 72.8k]
7259
        PyMem_Free(todo_stack);
7260
        PyErr_NoMemory();
7261
        return -1;
7262
    }
7263
    except_stack->depth = 0;
7264
    todo_stack[0] = entryblock;
7265
    entryblock->b_visited = 1;
7266
    entryblock->b_exceptstack = except_stack;
7267
    basicblock **todo = &todo_stack[1];
7268
    basicblock *handler = NULL;
7269
    while (todo > todo_stack) {
  Branch (7269:12): [True: 568k, False: 72.8k]
7270
        todo--;
7271
        basicblock *b = todo[0];
7272
        assert(b->b_visited == 1);
7273
        except_stack = b->b_exceptstack;
7274
        assert(except_stack != NULL);
7275
        b->b_exceptstack = NULL;
7276
        handler = except_stack_top(except_stack);
7277
        for (int i = 0; i < b->b_iused; 
i++2.60M
) {
  Branch (7277:25): [True: 2.60M, False: 568k]
7278
            struct instr *instr = &b->b_instr[i];
7279
            if (is_block_push(instr)) {
  Branch (7279:17): [True: 7.72k, False: 2.59M]
7280
                if (!instr->i_target->b_visited) {
  Branch (7280:21): [True: 7.72k, False: 0]
7281
                    ExceptStack *copy = copy_except_stack(except_stack);
7282
                    if (copy == NULL) {
  Branch (7282:25): [True: 0, False: 7.72k]
7283
                        goto error;
7284
                    }
7285
                    instr->i_target->b_exceptstack = copy;
7286
                    todo[0] = instr->i_target;
7287
                    instr->i_target->b_visited = 1;
7288
                    todo++;
7289
                }
7290
                handler = push_except_block(except_stack, instr);
7291
            }
7292
            else if (instr->i_opcode == POP_BLOCK) {
  Branch (7292:22): [True: 7.32k, False: 2.58M]
7293
                handler = pop_except_block(except_stack);
7294
            }
7295
            else if (is_jump(instr)) {
  Branch (7295:22): [True: 253k, False: 2.33M]
7296
                instr->i_except = handler;
7297
                assert(i == b->b_iused -1);
7298
                if (!instr->i_target->b_visited) {
  Branch (7298:21): [True: 235k, False: 17.6k]
7299
                    if (BB_HAS_FALLTHROUGH(b)) {
7300
                        ExceptStack *copy = copy_except_stack(except_stack);
7301
                        if (copy == NULL) {
  Branch (7301:29): [True: 0, False: 230k]
7302
                            goto error;
7303
                        }
7304
                        instr->i_target->b_exceptstack = copy;
7305
                    }
7306
                    else {
7307
                        instr->i_target->b_exceptstack = except_stack;
7308
                        except_stack = NULL;
7309
                    }
7310
                    todo[0] = instr->i_target;
7311
                    instr->i_target->b_visited = 1;
7312
                    todo++;
7313
                }
7314
            }
7315
            else {
7316
                instr->i_except = handler;
7317
            }
7318
        }
7319
        if (BB_HAS_FALLTHROUGH(b) && 
!b->b_next->b_visited462k
) {
  Branch (7319:38): [True: 252k, False: 210k]
7320
            assert(except_stack != NULL);
7321
            b->b_next->b_exceptstack = except_stack;
7322
            todo[0] = b->b_next;
7323
            b->b_next->b_visited = 1;
7324
            todo++;
7325
        }
7326
        else if (except_stack != NULL) {
  Branch (7326:18): [True: 311k, False: 5.02k]
7327
           PyMem_Free(except_stack);
7328
        }
7329
    }
7330
#ifdef Py_DEBUG
7331
    for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
7332
        assert(b->b_exceptstack == NULL);
7333
    }
7334
#endif
7335
    PyMem_Free(todo_stack);
7336
    return 0;
7337
error:
7338
    PyMem_Free(todo_stack);
7339
    PyMem_Free(except_stack);
7340
    return -1;
7341
}
7342
7343
static int
7344
mark_warm(basicblock *entryblock) {
7345
    basicblock **stack = make_cfg_traversal_stack(entryblock);
7346
    if (stack == NULL) {
  Branch (7346:9): [True: 0, False: 9.77k]
7347
        return -1;
7348
    }
7349
    basicblock **sp = stack;
7350
7351
    *sp++ = entryblock;
7352
    entryblock->b_visited = 1;
7353
    while (sp > stack) {
  Branch (7353:12): [True: 486k, False: 9.77k]
7354
        basicblock *b = *(--sp);
7355
        assert(!b->b_except_predecessors);
7356
        b->b_warm = 1;
7357
        basicblock *next = b->b_next;
7358
        if (next && 
BB_HAS_FALLTHROUGH477k
(b) &&
!next->b_visited455k
) {
  Branch (7358:13): [True: 477k, False: 8.73k]
  Branch (7358:46): [True: 247k, False: 207k]
7359
            *sp++ = next;
7360
            next->b_visited = 1;
7361
        }
7362
        for (int i=0; i < b->b_iused; 
i++1.42M
) {
  Branch (7362:23): [True: 1.42M, False: 486k]
7363
            struct instr *instr = &b->b_instr[i];
7364
            if (is_jump(instr) && 
!instr->i_target->b_visited246k
) {
  Branch (7364:17): [True: 246k, False: 1.18M]
  Branch (7364:35): [True: 228k, False: 17.6k]
7365
                *sp++ = instr->i_target;
7366
                instr->i_target->b_visited = 1;
7367
            }
7368
        }
7369
    }
7370
    PyMem_Free(stack);
7371
    return 0;
7372
}
7373
7374
static int
7375
mark_cold(basicblock *entryblock) {
7376
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next505k
) {
  Branch (7376:38): [True: 505k, False: 9.77k]
7377
        assert(!b->b_cold && !b->b_warm);
7378
    }
7379
    if (mark_warm(entryblock) < 0) {
  Branch (7379:9): [True: 0, False: 9.77k]
7380
        return -1;
7381
    }
7382
7383
    basicblock **stack = make_cfg_traversal_stack(entryblock);
7384
    if (stack == NULL) {
  Branch (7384:9): [True: 0, False: 9.77k]
7385
        return -1;
7386
    }
7387
7388
    basicblock **sp = stack;
7389
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next505k
) {
  Branch (7389:38): [True: 505k, False: 9.77k]
7390
        if (b->b_except_predecessors) {
  Branch (7390:13): [True: 7.72k, False: 497k]
7391
            assert(b->b_except_predecessors == b->b_predecessors);
7392
            assert(!b->b_warm);
7393
            *sp++ = b;
7394
            b->b_visited = 1;
7395
        }
7396
    }
7397
7398
    while (sp > stack) {
  Branch (7398:12): [True: 19.1k, False: 9.77k]
7399
        basicblock *b = *(--sp);
7400
        b->b_cold = 1;
7401
        basicblock *next = b->b_next;
7402
        if (next && 
BB_HAS_FALLTHROUGH18.1k
(b)) {
  Branch (7402:13): [True: 18.1k, False: 1.04k]
7403
            if (!next->b_warm && 
!next->b_visited7.26k
) {
  Branch (7403:17): [True: 7.26k, False: 6]
  Branch (7403:34): [True: 7.09k, False: 173]
7404
                *sp++ = next;
7405
                next->b_visited = 1;
7406
            }
7407
        }
7408
        for (int i = 0; i < b->b_iused; 
i++70.4k
) {
  Branch (7408:25): [True: 70.4k, False: 19.1k]
7409
            struct instr *instr = &b->b_instr[i];
7410
            if (is_jump(instr)) {
  Branch (7410:17): [True: 6.58k, False: 63.8k]
7411
                assert(i == b->b_iused-1);
7412
                basicblock *target = b->b_instr[i].i_target;
7413
                if (!target->b_warm && 
!target->b_visited4.72k
) {
  Branch (7413:21): [True: 4.72k, False: 1.86k]
  Branch (7413:40): [True: 4.33k, False: 389]
7414
                    *sp++ = target;
7415
                    target->b_visited = 1;
7416
                }
7417
            }
7418
        }
7419
    }
7420
    PyMem_Free(stack);
7421
    return 0;
7422
}
7423
7424
static int
7425
push_cold_blocks_to_end(basicblock *entryblock, int code_flags) {
7426
    if (entryblock->b_next == NULL) {
  Branch (7426:9): [True: 63.0k, False: 9.77k]
7427
        /* single basicblock, no need to reorder */
7428
        return 0;
7429
    }
7430
    if (mark_cold(entryblock) < 0) {
  Branch (7430:9): [True: 0, False: 9.77k]
7431
        return -1;
7432
    }
7433
7434
    /* If we have a cold block with fallthrough to a warm block, add */
7435
    /* an explicit jump instead of fallthrough */
7436
    
for (basicblock *b = entryblock; 9.77k
b != NULL;
b = b->b_next505k
) {
  Branch (7436:38): [True: 505k, False: 9.77k]
7437
        if (b->b_cold && 
BB_HAS_FALLTHROUGH19.1k
(b) &&
b->b_next7.27k
&&
b->b_next->b_warm7.27k
) {
  Branch (7437:13): [True: 19.1k, False: 486k]
  Branch (7437:51): [True: 7.27k, False: 0]
  Branch (7437:64): [True: 6, False: 7.26k]
7438
            basicblock *explicit_jump = basicblock_new_b_list_successor(b);
7439
            if (explicit_jump == NULL) {
  Branch (7439:17): [True: 0, False: 6]
7440
                return -1;
7441
            }
7442
            basicblock_addop(explicit_jump, JUMP, 0, b->b_next, &NO_LOCATION);
7443
7444
            explicit_jump->b_cold = 1;
7445
            explicit_jump->b_next = b->b_next;
7446
            b->b_next = explicit_jump;
7447
        }
7448
    }
7449
7450
    assert(!entryblock->b_cold);  /* First block can't be cold */
7451
    basicblock *cold_blocks = NULL;
7452
    basicblock *cold_blocks_tail = NULL;
7453
7454
    basicblock *b = entryblock;
7455
    while(b->b_next) {
  Branch (7455:11): [True: 12.2k, False: 1.04k]
7456
        assert(!b->b_cold);
7457
        while (b->b_next && 
!b->b_next->b_cold480k
) {
  Branch (7457:16): [True: 480k, False: 8.73k]
  Branch (7457:29): [True: 476k, False: 3.48k]
7458
            b = b->b_next;
7459
        }
7460
        if (b->b_next == NULL) {
  Branch (7460:13): [True: 8.73k, False: 3.48k]
7461
            /* no more cold blocks */
7462
            break;
7463
        }
7464
7465
        /* b->b_next is the beginning of a cold streak */
7466
        assert(!b->b_cold && b->b_next->b_cold);
7467
7468
        basicblock *b_end = b->b_next;
7469
        while (b_end->b_next && 
b_end->b_next->b_cold18.1k
) {
  Branch (7469:16): [True: 18.1k, False: 1.04k]
  Branch (7469:33): [True: 15.6k, False: 2.43k]
7470
            b_end = b_end->b_next;
7471
        }
7472
7473
        /* b_end is the end of the cold streak */
7474
        assert(b_end && b_end->b_cold);
7475
        assert(b_end->b_next == NULL || !b_end->b_next->b_cold);
7476
7477
        if (cold_blocks == NULL) {
  Branch (7477:13): [True: 2.30k, False: 1.17k]
7478
            cold_blocks = b->b_next;
7479
        }
7480
        else {
7481
            cold_blocks_tail->b_next = b->b_next;
7482
        }
7483
        cold_blocks_tail = b_end;
7484
        b->b_next = b_end->b_next;
7485
        b_end->b_next = NULL;
7486
    }
7487
    assert(b != NULL && b->b_next == NULL);
7488
    b->b_next = cold_blocks;
7489
    return 0;
7490
}
7491
7492
static void
7493
convert_exception_handlers_to_nops(basicblock *entryblock) {
7494
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (7494:38): [True: 568k, False: 72.8k]
7495
        for (int i = 0; i < b->b_iused; 
i++2.60M
) {
  Branch (7495:25): [True: 2.60M, False: 568k]
7496
            struct instr *instr = &b->b_instr[i];
7497
            if (is_block_push(instr) || 
instr->i_opcode == 2.59M
POP_BLOCK2.59M
) {
  Branch (7497:17): [True: 7.72k, False: 2.59M]
  Branch (7497:41): [True: 7.32k, False: 2.58M]
7498
                instr->i_opcode = NOP;
7499
            }
7500
        }
7501
    }
7502
}
7503
7504
static inline void
7505
write_except_byte(struct assembler *a, int byte) {
7506
    unsigned char *p = (unsigned char *) PyBytes_AS_STRING(a->a_except_table);
7507
    p[a->a_except_table_off++] = byte;
7508
}
7509
7510
#define CONTINUATION_BIT 64
7511
7512
static void
7513
assemble_emit_exception_table_item(struct assembler *a, int value, int msb)
7514
{
7515
    assert ((msb | 128) == 128);
7516
    assert(value >= 0 && value < (1 << 30));
7517
    if (value >= 1 << 24) {
  Branch (7517:9): [True: 0, False: 43.8k]
7518
        write_except_byte(a, (value >> 24) | CONTINUATION_BIT | msb);
7519
        msb = 0;
7520
    }
7521
    if (value >= 1 << 18) {
  Branch (7521:9): [True: 0, False: 43.8k]
7522
        write_except_byte(a, ((value >> 18)&0x3f) | CONTINUATION_BIT | msb);
7523
        msb = 0;
7524
    }
7525
    if (value >= 1 << 12) {
  Branch (7525:9): [True: 21, False: 43.8k]
7526
        write_except_byte(a, ((value >> 12)&0x3f) | CONTINUATION_BIT | msb);
7527
        msb = 0;
7528
    }
7529
    if (value >= 1 << 6) {
  Branch (7529:9): [True: 18.6k, False: 25.2k]
7530
        write_except_byte(a, ((value >> 6)&0x3f) | CONTINUATION_BIT | msb);
7531
        msb = 0;
7532
    }
7533
    write_except_byte(a, (value&0x3f) | msb);
7534
}
7535
7536
/* See Objects/exception_handling_notes.txt for details of layout */
7537
#define MAX_SIZE_OF_ENTRY 20
7538
7539
static int
7540
assemble_emit_exception_table_entry(struct assembler *a, int start, int end, basicblock *handler)
7541
{
7542
    Py_ssize_t len = PyBytes_GET_SIZE(a->a_except_table);
7543
    if (a->a_except_table_off + MAX_SIZE_OF_ENTRY >= len) {
  Branch (7543:9): [True: 3.69k, False: 7.27k]
7544
        if (_PyBytes_Resize(&a->a_except_table, len * 2) < 0)
  Branch (7544:13): [True: 0, False: 3.69k]
7545
            return 0;
7546
    }
7547
    int size = end-start;
7548
    assert(end > start);
7549
    int target = handler->b_offset;
7550
    int depth = handler->b_startdepth - 1;
7551
    if (handler->b_preserve_lasti) {
  Branch (7551:9): [True: 7.54k, False: 3.42k]
7552
        depth -= 1;
7553
    }
7554
    assert(depth >= 0);
7555
    int depth_lasti = (depth<<1) | handler->b_preserve_lasti;
7556
    assemble_emit_exception_table_item(a, start, (1<<7));
7557
    assemble_emit_exception_table_item(a, size, 0);
7558
    assemble_emit_exception_table_item(a, target, 0);
7559
    assemble_emit_exception_table_item(a, depth_lasti, 0);
7560
    return 1;
7561
}
7562
7563
static int
7564
assemble_exception_table(struct assembler *a, basicblock *entryblock)
7565
{
7566
    basicblock *b;
7567
    int ioffset = 0;
7568
    basicblock *handler = NULL;
7569
    int start = -1;
7570
    for (b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (7570:26): [True: 568k, False: 72.8k]
7571
        ioffset = b->b_offset;
7572
        for (int i = 0; i < b->b_iused; 
i++2.58M
) {
  Branch (7572:25): [True: 2.58M, False: 568k]
7573
            struct instr *instr = &b->b_instr[i];
7574
            if (instr->i_except != handler) {
  Branch (7574:17): [True: 19.5k, False: 2.57M]
7575
                if (handler != NULL) {
  Branch (7575:21): [True: 10.9k, False: 8.62k]
7576
                    RETURN_IF_FALSE(assemble_emit_exception_table_entry(a, start, ioffset, handler));
7577
                }
7578
                start = ioffset;
7579
                handler = instr->i_except;
7580
            }
7581
            ioffset += instr_size(instr);
7582
        }
7583
    }
7584
    if (handler != NULL) {
  Branch (7584:9): [True: 0, False: 72.8k]
7585
        RETURN_IF_FALSE(assemble_emit_exception_table_entry(a, start, ioffset, handler));
7586
    }
7587
    return 1;
7588
}
7589
7590
/* Code location emitting code. See locations.md for a description of the format. */
7591
7592
#define MSB 0x80
7593
7594
static void
7595
write_location_byte(struct assembler* a, int val)
7596
{
7597
    PyBytes_AS_STRING(a->a_linetable)[a->a_location_off] = val&255;
7598
    a->a_location_off++;
7599
}
7600
7601
7602
static uint8_t *
7603
location_pointer(struct assembler* a)
7604
{
7605
    return (uint8_t *)PyBytes_AS_STRING(a->a_linetable) +
7606
        a->a_location_off;
7607
}
7608
7609
static void
7610
write_location_first_byte(struct assembler* a, int code, int length)
7611
{
7612
    a->a_location_off += write_location_entry_start(
7613
        location_pointer(a), code, length);
7614
}
7615
7616
static void
7617
write_location_varint(struct assembler* a, unsigned int val)
7618
{
7619
    uint8_t *ptr = location_pointer(a);
7620
    a->a_location_off += write_varint(ptr, val);
7621
}
7622
7623
7624
static void
7625
write_location_signed_varint(struct assembler* a, int val)
7626
{
7627
    uint8_t *ptr = location_pointer(a);
7628
    a->a_location_off += write_signed_varint(ptr, val);
7629
}
7630
7631
static void
7632
write_location_info_short_form(struct assembler* a, int length, int column, int end_column)
7633
{
7634
    assert(length > 0 &&  length <= 8);
7635
    int column_low_bits = column & 7;
7636
    int column_group = column >> 3;
7637
    assert(column < 80);
7638
    assert(end_column >= column);
7639
    assert(end_column - column < 16);
7640
    write_location_first_byte(a, PY_CODE_LOCATION_INFO_SHORT0 + column_group, length);
7641
    write_location_byte(a, (column_low_bits << 4) | (end_column - column));
7642
}
7643
7644
static void
7645
write_location_info_oneline_form(struct assembler* a, int length, int line_delta, int column, int end_column)
7646
{
7647
    assert(length > 0 &&  length <= 8);
7648
    assert(line_delta >= 0 && line_delta < 3);
7649
    assert(column < 128);
7650
    assert(end_column < 128);
7651
    write_location_first_byte(a, PY_CODE_LOCATION_INFO_ONE_LINE0 + line_delta, length);
7652
    write_location_byte(a, column);
7653
    write_location_byte(a, end_column);
7654
}
7655
7656
static void
7657
write_location_info_long_form(struct assembler* a, struct instr* i, int length)
7658
{
7659
    assert(length > 0 &&  length <= 8);
7660
    write_location_first_byte(a, PY_CODE_LOCATION_INFO_LONG, length);
7661
    write_location_signed_varint(a, i->i_loc.lineno - a->a_lineno);
7662
    assert(i->i_loc.end_lineno >= i->i_loc.lineno);
7663
    write_location_varint(a, i->i_loc.end_lineno - i->i_loc.lineno);
7664
    write_location_varint(a, i->i_loc.col_offset + 1);
7665
    write_location_varint(a, i->i_loc.end_col_offset + 1);
7666
}
7667
7668
static void
7669
write_location_info_none(struct assembler* a, int length)
7670
{
7671
    write_location_first_byte(a, PY_CODE_LOCATION_INFO_NONE, length);
7672
}
7673
7674
static void
7675
write_location_info_no_column(struct assembler* a, int length, int line_delta)
7676
{
7677
    write_location_first_byte(a, PY_CODE_LOCATION_INFO_NO_COLUMNS, length);
7678
    write_location_signed_varint(a, line_delta);
7679
}
7680
7681
#define THEORETICAL_MAX_ENTRY_SIZE 25 /* 1 + 6 + 6 + 6 + 6 */
7682
7683
static int
7684
write_location_info_entry(struct assembler* a, struct instr* i, int isize)
7685
{
7686
    Py_ssize_t len = PyBytes_GET_SIZE(a->a_linetable);
7687
    if (a->a_location_off + THEORETICAL_MAX_ENTRY_SIZE >= len) {
  Branch (7687:9): [True: 92.7k, False: 2.56M]
7688
        assert(len > THEORETICAL_MAX_ENTRY_SIZE);
7689
        if (_PyBytes_Resize(&a->a_linetable, len*2) < 0) {
  Branch (7689:13): [True: 0, False: 92.7k]
7690
            return 0;
7691
        }
7692
    }
7693
    if (i->i_loc.lineno < 0) {
  Branch (7693:9): [True: 18.9k, False: 2.63M]
7694
        write_location_info_none(a, isize);
7695
        return 1;
7696
    }
7697
    int line_delta = i->i_loc.lineno - a->a_lineno;
7698
    int column = i->i_loc.col_offset;
7699
    int end_column = i->i_loc.end_col_offset;
7700
    assert(column >= -1);
7701
    assert(end_column >= -1);
7702
    if (column < 0 || 
end_column < 02.63M
) {
  Branch (7702:9): [True: 2.70k, False: 2.63M]
  Branch (7702:23): [True: 0, False: 2.63M]
7703
        if (i->i_loc.end_lineno == i->i_loc.lineno || 
i->i_loc.end_lineno == -132
) {
  Branch (7703:13): [True: 2.67k, False: 32]
  Branch (7703:55): [True: 32, False: 0]
7704
            write_location_info_no_column(a, isize, line_delta);
7705
            a->a_lineno = i->i_loc.lineno;
7706
            return 1;
7707
        }
7708
    }
7709
    else if (i->i_loc.end_lineno == i->i_loc.lineno) {
  Branch (7709:14): [True: 2.41M, False: 222k]
7710
        if (line_delta == 0 && 
column < 802.02M
&&
end_column - column < 161.74M
&&
end_column >= column1.10M
) {
  Branch (7710:13): [True: 2.02M, False: 389k]
  Branch (7710:32): [True: 1.74M, False: 274k]
  Branch (7710:47): [True: 1.10M, False: 640k]
  Branch (7710:75): [True: 1.10M, False: 0]
7711
            write_location_info_short_form(a, isize, column, end_column);
7712
            return 1;
7713
        }
7714
        if (line_delta >= 0 && 
line_delta < 31.29M
&&
column < 1281.27M
&&
end_column < 1281.00M
) {
  Branch (7714:13): [True: 1.29M, False: 3.93k]
  Branch (7714:32): [True: 1.27M, False: 24.9k]
  Branch (7714:50): [True: 1.00M, False: 270k]
  Branch (7714:66): [True: 608k, False: 395k]
7715
            write_location_info_oneline_form(a, isize, line_delta, column, end_column);
7716
            a->a_lineno = i->i_loc.lineno;
7717
            return 1;
7718
        }
7719
    }
7720
    write_location_info_long_form(a, i, isize);
7721
    a->a_lineno = i->i_loc.lineno;
7722
    return 1;
7723
}
7724
7725
static int
7726
assemble_emit_location(struct assembler* a, struct instr* i)
7727
{
7728
    int isize = instr_size(i);
7729
    while (isize > 8) {
  Branch (7729:12): [True: 66.6k, False: 2.58M]
7730
        if (!write_location_info_entry(a, i, 8)) {
  Branch (7730:13): [True: 0, False: 66.6k]
7731
            return 0;
7732
        }
7733
        isize -= 8;
7734
    }
7735
    return write_location_info_entry(a, i, isize);
7736
}
7737
7738
/* assemble_emit()
7739
   Extend the bytecode with a new instruction.
7740
   Update lnotab if necessary.
7741
*/
7742
7743
static int
7744
assemble_emit(struct assembler *a, struct instr *i)
7745
{
7746
    Py_ssize_t len = PyBytes_GET_SIZE(a->a_bytecode);
7747
    _Py_CODEUNIT *code;
7748
7749
    int size = instr_size(i);
7750
    if (a->a_offset + size >= len / (int)sizeof(_Py_CODEUNIT)) {
  Branch (7750:9): [True: 13.3k, False: 2.57M]
7751
        if (len > PY_SSIZE_T_MAX / 2)
  Branch (7751:13): [True: 0, False: 13.3k]
7752
            return 0;
7753
        if (_PyBytes_Resize(&a->a_bytecode, len * 2) < 0)
  Branch (7753:13): [True: 0, False: 13.3k]
7754
            return 0;
7755
    }
7756
    code = (_Py_CODEUNIT *)PyBytes_AS_STRING(a->a_bytecode) + a->a_offset;
7757
    a->a_offset += size;
7758
    write_instr(code, i, size);
7759
    return 1;
7760
}
7761
7762
static void
7763
normalize_jumps(basicblock *entryblock)
7764
{
7765
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (7765:38): [True: 568k, False: 72.8k]
7766
        b->b_visited = 0;
7767
    }
7768
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (7768:38): [True: 568k, False: 72.8k]
7769
        b->b_visited = 1;
7770
        if (b->b_iused == 0) {
  Branch (7770:13): [True: 244, False: 568k]
7771
            continue;
7772
        }
7773
        struct instr *last = &b->b_instr[b->b_iused-1];
7774
        assert(!IS_ASSEMBLER_OPCODE(last->i_opcode));
7775
        if (is_jump(last)) {
  Branch (7775:13): [True: 250k, False: 317k]
7776
            bool is_forward = last->i_target->b_visited == 0;
7777
            switch(last->i_opcode) {
  Branch (7777:20): [True: 4.50k, False: 246k]
7778
                case JUMP:
  Branch (7778:17): [True: 12.3k, False: 238k]
7779
                    last->i_opcode = is_forward ? 
JUMP_FORWARD5.25k
:
JUMP_BACKWARD7.06k
;
  Branch (7779:38): [True: 5.25k, False: 7.06k]
7780
                    break;
7781
                case JUMP_NO_INTERRUPT:
  Branch (7781:17): [True: 369, False: 250k]
7782
                    last->i_opcode = is_forward ?
  Branch (7782:38): [True: 0, False: 369]
7783
                        
JUMP_FORWARD0
: JUMP_BACKWARD_NO_INTERRUPT;
7784
                    break;
7785
                case POP_JUMP_IF_NOT_NONE:
  Branch (7785:17): [True: 1.72k, False: 249k]
7786
                    last->i_opcode = is_forward ?
  Branch (7786:38): [True: 1.69k, False: 30]
7787
                        POP_JUMP_FORWARD_IF_NOT_NONE : 
POP_JUMP_BACKWARD_IF_NOT_NONE30
;
7788
                    break;
7789
                case POP_JUMP_IF_NONE:
  Branch (7789:17): [True: 1.64k, False: 249k]
7790
                    last->i_opcode = is_forward ?
  Branch (7790:38): [True: 1.57k, False: 63]
7791
                        POP_JUMP_FORWARD_IF_NONE : 
POP_JUMP_BACKWARD_IF_NONE63
;
7792
                    break;
7793
                case POP_JUMP_IF_FALSE:
  Branch (7793:17): [True: 222k, False: 28.3k]
7794
                    last->i_opcode = is_forward ?
  Branch (7794:38): [True: 221k, False: 1.34k]
7795
                        POP_JUMP_FORWARD_IF_FALSE : 
POP_JUMP_BACKWARD_IF_FALSE1.34k
;
7796
                    break;
7797
                case POP_JUMP_IF_TRUE:
  Branch (7797:17): [True: 6.06k, False: 244k]
7798
                    last->i_opcode = is_forward ?
  Branch (7798:38): [True: 5.55k, False: 516]
7799
                        POP_JUMP_FORWARD_IF_TRUE : 
POP_JUMP_BACKWARD_IF_TRUE516
;
7800
                    break;
7801
                case JUMP_IF_TRUE_OR_POP:
  Branch (7801:17): [True: 991, False: 249k]
7802
                case JUMP_IF_FALSE_OR_POP:
  Branch (7802:17): [True: 737, False: 250k]
7803
                    if (!is_forward) {
  Branch (7803:25): [True: 0, False: 1.72k]
7804
                        /* As far as we can tell, the compiler never emits
7805
                         * these jumps with a backwards target. If/when this
7806
                         * exception is raised, we have found a use case for
7807
                         * a backwards version of this jump (or to replace
7808
                         * it with the sequence (COPY 1, POP_JUMP_IF_T/F, POP)
7809
                         */
7810
                        PyErr_Format(PyExc_SystemError,
7811
                            "unexpected %s jumping backwards",
7812
                            last->i_opcode == JUMP_IF_TRUE_OR_POP ?
  Branch (7812:29): [True: 0, False: 0]
7813
                                "JUMP_IF_TRUE_OR_POP" : "JUMP_IF_FALSE_OR_POP");
7814
                    }
7815
                    break;
7816
            }
7817
        }
7818
    }
7819
}
7820
7821
static void
7822
assemble_jump_offsets(basicblock *entryblock)
7823
{
7824
    int bsize, totsize, extended_arg_recompile;
7825
7826
    /* Compute the size of each block and fixup jump args.
7827
       Replace block pointer with position in bytecode. */
7828
    do {
7829
        totsize = 0;
7830
        for (basicblock *b = entryblock; b != NULL; 
b = b->b_next595k
) {
  Branch (7830:42): [True: 595k, False: 73.3k]
7831
            bsize = blocksize(b);
7832
            b->b_offset = totsize;
7833
            totsize += bsize;
7834
        }
7835
        extended_arg_recompile = 0;
7836
        for (basicblock *b = entryblock; b != NULL; 
b = b->b_next595k
) {
  Branch (7836:42): [True: 595k, False: 73.3k]
7837
            bsize = b->b_offset;
7838
            for (int i = 0; i < b->b_iused; 
i++2.80M
) {
  Branch (7838:29): [True: 2.80M, False: 595k]
7839
                struct instr *instr = &b->b_instr[i];
7840
                int isize = instr_size(instr);
7841
                /* Relative jumps are computed relative to
7842
                   the instruction pointer after fetching
7843
                   the jump instruction.
7844
                */
7845
                bsize += isize;
7846
                if (is_jump(instr)) {
  Branch (7846:21): [True: 266k, False: 2.54M]
7847
                    instr->i_oparg = instr->i_target->b_offset;
7848
                    if (is_relative_jump(instr)) {
  Branch (7848:25): [True: 266k, False: 0]
7849
                        if (instr->i_oparg < bsize) {
  Branch (7849:29): [True: 12.1k, False: 254k]
7850
                            assert(IS_BACKWARDS_JUMP_OPCODE(instr->i_opcode));
7851
                            instr->i_oparg = bsize - instr->i_oparg;
7852
                        }
7853
                        else {
7854
                            assert(!IS_BACKWARDS_JUMP_OPCODE(instr->i_opcode));
7855
                            instr->i_oparg -= bsize;
7856
                        }
7857
                    }
7858
                    else {
7859
                        assert(!IS_BACKWARDS_JUMP_OPCODE(instr->i_opcode));
7860
                    }
7861
                    if (instr_size(instr) != isize) {
  Branch (7861:25): [True: 2.36k, False: 264k]
7862
                        extended_arg_recompile = 1;
7863
                    }
7864
                }
7865
            }
7866
        }
7867
7868
    /* XXX: This is an awful hack that could hurt performance, but
7869
        on the bright side it should work until we come up
7870
        with a better solution.
7871
7872
        The issue is that in the first loop blocksize() is called
7873
        which calls instr_size() which requires i_oparg be set
7874
        appropriately. There is a bootstrap problem because
7875
        i_oparg is calculated in the second loop above.
7876
7877
        So we loop until we stop seeing new EXTENDED_ARGs.
7878
        The only EXTENDED_ARGs that could be popping up are
7879
        ones in jump instructions.  So this should converge
7880
        fairly quickly.
7881
    */
7882
    } while (extended_arg_recompile);
  Branch (7882:14): [True: 458, False: 72.8k]
7883
}
7884
7885
7886
// Ensure each basicblock is only put onto the stack once.
7887
#define MAYBE_PUSH(B) do {                          \
7888
        if ((B)->b_visited == 0) {                  \
7889
            *(*stack_top)++ = (B);                  \
7890
            (B)->b_visited = 1;                     \
7891
        }                                           \
7892
    } while (0)
7893
7894
static void
7895
scan_block_for_local(int target, basicblock *b, bool unsafe_to_start,
7896
                     basicblock ***stack_top)
7897
{
7898
    bool unsafe = unsafe_to_start;
7899
    for (int i = 0; i < b->b_iused; 
i++57.0M
) {
  Branch (7899:21): [True: 57.0M, False: 1.00M]
7900
        struct instr *instr = &b->b_instr[i];
7901
        assert(instr->i_opcode != EXTENDED_ARG);
7902
        assert(instr->i_opcode != EXTENDED_ARG_QUICK);
7903
        assert(instr->i_opcode != LOAD_FAST__LOAD_FAST);
7904
        assert(instr->i_opcode != STORE_FAST__LOAD_FAST);
7905
        assert(instr->i_opcode != LOAD_CONST__LOAD_FAST);
7906
        assert(instr->i_opcode != STORE_FAST__STORE_FAST);
7907
        assert(instr->i_opcode != LOAD_FAST__LOAD_CONST);
7908
        if (unsafe && 
instr->i_except != NULL1.65M
) {
  Branch (7908:13): [True: 1.65M, False: 55.3M]
  Branch (7908:23): [True: 193k, False: 1.45M]
7909
            MAYBE_PUSH(instr->i_except);
7910
        }
7911
        if (instr->i_oparg != target) {
  Branch (7911:13): [True: 56.3M, False: 644k]
7912
            continue;
7913
        }
7914
        switch (instr->i_opcode) {
  Branch (7914:17): [True: 391k, False: 252k]
7915
            case LOAD_FAST_CHECK:
  Branch (7915:13): [True: 0, False: 644k]
7916
                // if this doesn't raise, then var is defined
7917
                unsafe = false;
7918
                break;
7919
            case LOAD_FAST:
  Branch (7919:13): [True: 184k, False: 459k]
7920
                if (unsafe) {
  Branch (7920:21): [True: 679, False: 184k]
7921
                    instr->i_opcode = LOAD_FAST_CHECK;
7922
                }
7923
                unsafe = false;
7924
                break;
7925
            case STORE_FAST:
  Branch (7925:13): [True: 67.1k, False: 577k]
7926
                unsafe = false;
7927
                break;
7928
            case DELETE_FAST:
  Branch (7928:13): [True: 680, False: 643k]
7929
                unsafe = true;
7930
                break;
7931
        }
7932
    }
7933
    if (unsafe) {
  Branch (7933:9): [True: 250k, False: 757k]
7934
        // unsafe at end of this block,
7935
        // so unsafe at start of next blocks
7936
        if (b->b_next && 
BB_HAS_FALLTHROUGH239k
(b)) {
  Branch (7936:13): [True: 239k, False: 10.1k]
7937
            MAYBE_PUSH(b->b_next);
7938
        }
7939
        if (b->b_iused > 0) {
  Branch (7939:13): [True: 249k, False: 392]
7940
            struct instr *last = &b->b_instr[b->b_iused-1];
7941
            if (is_jump(last)) {
  Branch (7941:17): [True: 139k, False: 109k]
7942
                assert(last->i_target != NULL);
7943
                MAYBE_PUSH(last->i_target);
7944
            }
7945
        }
7946
    }
7947
}
7948
#undef MAYBE_PUSH
7949
7950
static int
7951
add_checks_for_loads_of_unknown_variables(basicblock *entryblock,
7952
                                          struct compiler *c)
7953
{
7954
    basicblock **stack = make_cfg_traversal_stack(entryblock);
7955
    if (stack == NULL) {
  Branch (7955:9): [True: 0, False: 72.8k]
7956
        return -1;
7957
    }
7958
    Py_ssize_t nparams = PyList_GET_SIZE(c->u->u_ste->ste_varnames);
7959
    int nlocals = (int)PyDict_GET_SIZE(c->u->u_varnames);
7960
    for (int target = 0; target < nlocals; 
target++68.9k
) {
  Branch (7960:26): [True: 68.9k, False: 72.8k]
7961
        for (basicblock *b = entryblock; b != NULL; 
b = b->b_next730k
) {
  Branch (7961:42): [True: 730k, False: 68.9k]
7962
            b->b_visited = 0;
7963
        }
7964
        basicblock **stack_top = stack;
7965
7966
        // First pass: find the relevant DFS starting points:
7967
        // the places where "being uninitialized" originates,
7968
        // which are the entry block and any DELETE_FAST statements.
7969
        if (target >= nparams) {
  Branch (7969:13): [True: 23.1k, False: 45.8k]
7970
            // only non-parameter locals start out uninitialized.
7971
            *(stack_top++) = entryblock;
7972
            entryblock->b_visited = 1;
7973
        }
7974
        for (basicblock *b = entryblock; b != NULL; 
b = b->b_next730k
) {
  Branch (7974:42): [True: 730k, False: 68.9k]
7975
            scan_block_for_local(target, b, false, &stack_top);
7976
        }
7977
7978
        // Second pass: Depth-first search to propagate uncertainty
7979
        while (stack_top > stack) {
  Branch (7979:16): [True: 276k, False: 68.9k]
7980
            basicblock *b = *--stack_top;
7981
            scan_block_for_local(target, b, true, &stack_top);
7982
        }
7983
    }
7984
    PyMem_Free(stack);
7985
    return 0;
7986
}
7987
7988
static PyObject *
7989
dict_keys_inorder(PyObject *dict, Py_ssize_t offset)
7990
{
7991
    PyObject *tuple, *k, *v;
7992
    Py_ssize_t i, pos = 0, size = PyDict_GET_SIZE(dict);
7993
7994
    tuple = PyTuple_New(size);
7995
    if (tuple == NULL)
  Branch (7995:9): [True: 0, False: 72.8k]
7996
        return NULL;
7997
    
while (72.8k
PyDict_Next(dict, &pos, &k, &v)) {
  Branch (7997:12): [True: 168k, False: 72.8k]
7998
        i = PyLong_AS_LONG(v);
7999
        Py_INCREF(k);
8000
        assert((i - offset) < size);
8001
        assert((i - offset) >= 0);
8002
        PyTuple_SET_ITEM(tuple, i - offset, k);
8003
    }
8004
    return tuple;
8005
}
8006
8007
static PyObject *
8008
consts_dict_keys_inorder(PyObject *dict)
8009
{
8010
    PyObject *consts, *k, *v;
8011
    Py_ssize_t i, pos = 0, size = PyDict_GET_SIZE(dict);
8012
8013
    consts = PyList_New(size);   /* PyCode_Optimize() requires a list */
8014
    if (consts == NULL)
  Branch (8014:9): [True: 0, False: 72.8k]
8015
        return NULL;
8016
    
while (72.8k
PyDict_Next(dict, &pos, &k, &v)) {
  Branch (8016:12): [True: 230k, False: 72.8k]
8017
        i = PyLong_AS_LONG(v);
8018
        /* The keys of the dictionary can be tuples wrapping a constant.
8019
         * (see compiler_add_o and _PyCode_ConstantKey). In that case
8020
         * the object we want is always second. */
8021
        if (PyTuple_CheckExact(k)) {
8022
            k = PyTuple_GET_ITEM(k, 1);
8023
        }
8024
        Py_INCREF(k);
8025
        assert(i < size);
8026
        assert(i >= 0);
8027
        PyList_SET_ITEM(consts, i, k);
8028
    }
8029
    return consts;
8030
}
8031
8032
static int
8033
compute_code_flags(struct compiler *c)
8034
{
8035
    PySTEntryObject *ste = c->u->u_ste;
8036
    int flags = 0;
8037
    if (ste->ste_type == FunctionBlock) {
  Branch (8037:9): [True: 22.7k, False: 50.1k]
8038
        flags |= CO_NEWLOCALS | CO_OPTIMIZED;
8039
        if (ste->ste_nested)
  Branch (8039:13): [True: 3.55k, False: 19.1k]
8040
            flags |= CO_NESTED;
8041
        if (ste->ste_generator && 
!ste->ste_coroutine863
)
  Branch (8041:13): [True: 863, False: 21.8k]
  Branch (8041:35): [True: 851, False: 12]
8042
            flags |= CO_GENERATOR;
8043
        if (!ste->ste_generator && 
ste->ste_coroutine21.8k
)
  Branch (8043:13): [True: 21.8k, False: 863]
  Branch (8043:36): [True: 454, False: 21.4k]
8044
            flags |= CO_COROUTINE;
8045
        if (ste->ste_generator && 
ste->ste_coroutine863
)
  Branch (8045:13): [True: 863, False: 21.8k]
  Branch (8045:35): [True: 12, False: 851]
8046
            flags |= CO_ASYNC_GENERATOR;
8047
        if (ste->ste_varargs)
  Branch (8047:13): [True: 763, False: 21.9k]
8048
            flags |= CO_VARARGS;
8049
        if (ste->ste_varkeywords)
  Branch (8049:13): [True: 544, False: 22.1k]
8050
            flags |= CO_VARKEYWORDS;
8051
    }
8052
8053
    /* (Only) inherit compilerflags in PyCF_MASK */
8054
    flags |= (c->c_flags->cf_flags & PyCF_MASK);
8055
8056
    if ((IS_TOP_LEVEL_AWAIT(c)) &&
8057
         
ste->ste_coroutine31
&&
  Branch (8057:10): [True: 20, False: 11]
8058
         
!ste->ste_generator20
) {
  Branch (8058:10): [True: 20, False: 0]
8059
        flags |= CO_COROUTINE;
8060
    }
8061
8062
    return flags;
8063
}
8064
8065
// Merge *obj* with constant cache.
8066
// Unlike merge_consts_recursive(), this function doesn't work recursively.
8067
static int
8068
merge_const_one(PyObject *const_cache, PyObject **obj)
8069
{
8070
    PyDict_CheckExact(const_cache);
8071
    PyObject *key = _PyCode_ConstantKey(*obj);
8072
    if (key == NULL) {
  Branch (8072:9): [True: 0, False: 439k]
8073
        return 0;
8074
    }
8075
8076
    // t is borrowed reference
8077
    PyObject *t = PyDict_SetDefault(const_cache, key, key);
8078
    Py_DECREF(key);
8079
    if (t == NULL) {
  Branch (8079:9): [True: 0, False: 439k]
8080
        return 0;
8081
    }
8082
    if (t == key) {  // obj is new constant.
  Branch (8082:9): [True: 371k, False: 68.0k]
8083
        return 1;
8084
    }
8085
8086
    if (PyTuple_CheckExact(t)) {
8087
        // t is still borrowed reference
8088
        t = PyTuple_GET_ITEM(t, 1);
8089
    }
8090
8091
    Py_INCREF(t);
8092
    Py_DECREF(*obj);
8093
    *obj = t;
8094
    return 1;
8095
}
8096
8097
// This is in codeobject.c.
8098
extern void _Py_set_localsplus_info(int, PyObject *, unsigned char,
8099
                                   PyObject *, PyObject *);
8100
8101
static void
8102
compute_localsplus_info(struct compiler *c, int nlocalsplus,
8103
                        PyObject *names, PyObject *kinds)
8104
{
8105
    PyObject *k, *v;
8106
    Py_ssize_t pos = 0;
8107
    while (PyDict_Next(c->u->u_varnames, &pos, &k, &v)) {
  Branch (8107:12): [True: 68.9k, False: 72.8k]
8108
        int offset = (int)PyLong_AS_LONG(v);
8109
        assert(offset >= 0);
8110
        assert(offset < nlocalsplus);
8111
        // For now we do not distinguish arg kinds.
8112
        _PyLocals_Kind kind = CO_FAST_LOCAL;
8113
        if (PyDict_GetItem(c->u->u_cellvars, k) != NULL) {
  Branch (8113:13): [True: 997, False: 67.9k]
8114
            kind |= CO_FAST_CELL;
8115
        }
8116
        _Py_set_localsplus_info(offset, k, kind, names, kinds);
8117
    }
8118
    int nlocals = (int)PyDict_GET_SIZE(c->u->u_varnames);
8119
8120
    // This counter mirrors the fix done in fix_cell_offsets().
8121
    int numdropped = 0;
8122
    pos = 0;
8123
    while (PyDict_Next(c->u->u_cellvars, &pos, &k, &v)) {
  Branch (8123:12): [True: 1.82k, False: 72.8k]
8124
        if (PyDict_GetItem(c->u->u_varnames, k) != NULL) {
  Branch (8124:13): [True: 997, False: 827]
8125
            // Skip cells that are already covered by locals.
8126
            numdropped += 1;
8127
            continue;
8128
        }
8129
        int offset = (int)PyLong_AS_LONG(v);
8130
        assert(offset >= 0);
8131
        offset += nlocals - numdropped;
8132
        assert(offset < nlocalsplus);
8133
        _Py_set_localsplus_info(offset, k, CO_FAST_CELL, names, kinds);
8134
    }
8135
8136
    pos = 0;
8137
    while (PyDict_Next(c->u->u_freevars, &pos, &k, &v)) {
  Branch (8137:12): [True: 2.47k, False: 72.8k]
8138
        int offset = (int)PyLong_AS_LONG(v);
8139
        assert(offset >= 0);
8140
        offset += nlocals - numdropped;
8141
        assert(offset < nlocalsplus);
8142
        _Py_set_localsplus_info(offset, k, CO_FAST_FREE, names, kinds);
8143
    }
8144
}
8145
8146
static PyCodeObject *
8147
makecode(struct compiler *c, struct assembler *a, PyObject *constslist,
8148
         int maxdepth, int nlocalsplus, int code_flags)
8149
{
8150
    PyCodeObject *co = NULL;
8151
    PyObject *names = NULL;
8152
    PyObject *consts = NULL;
8153
    PyObject *localsplusnames = NULL;
8154
    PyObject *localspluskinds = NULL;
8155
8156
    names = dict_keys_inorder(c->u->u_names, 0);
8157
    if (!names) {
  Branch (8157:9): [True: 0, False: 72.8k]
8158
        goto error;
8159
    }
8160
    if (!merge_const_one(c->c_const_cache, &names)) {
  Branch (8160:9): [True: 0, False: 72.8k]
8161
        goto error;
8162
    }
8163
8164
    consts = PyList_AsTuple(constslist); /* PyCode_New requires a tuple */
8165
    if (consts == NULL) {
  Branch (8165:9): [True: 0, False: 72.8k]
8166
        goto error;
8167
    }
8168
    if (!merge_const_one(c->c_const_cache, &consts)) {
  Branch (8168:9): [True: 0, False: 72.8k]
8169
        goto error;
8170
    }
8171
8172
    assert(c->u->u_posonlyargcount < INT_MAX);
8173
    assert(c->u->u_argcount < INT_MAX);
8174
    assert(c->u->u_kwonlyargcount < INT_MAX);
8175
    int posonlyargcount = (int)c->u->u_posonlyargcount;
8176
    int posorkwargcount = (int)c->u->u_argcount;
8177
    assert(INT_MAX - posonlyargcount - posorkwargcount > 0);
8178
    int kwonlyargcount = (int)c->u->u_kwonlyargcount;
8179
8180
    localsplusnames = PyTuple_New(nlocalsplus);
8181
    if (localsplusnames == NULL) {
  Branch (8181:9): [True: 0, False: 72.8k]
8182
        goto error;
8183
    }
8184
    localspluskinds = PyBytes_FromStringAndSize(NULL, nlocalsplus);
8185
    if (localspluskinds == NULL) {
  Branch (8185:9): [True: 0, False: 72.8k]
8186
        goto error;
8187
    }
8188
    compute_localsplus_info(c, nlocalsplus, localsplusnames, localspluskinds);
8189
8190
    struct _PyCodeConstructor con = {
8191
        .filename = c->c_filename,
8192
        .name = c->u->u_name,
8193
        .qualname = c->u->u_qualname ? 
c->u->u_qualname24.5k
:
c->u->u_name48.3k
,
  Branch (8193:21): [True: 24.5k, False: 48.3k]
8194
        .flags = code_flags,
8195
8196
        .code = a->a_bytecode,
8197
        .firstlineno = c->u->u_firstlineno,
8198
        .linetable = a->a_linetable,
8199
8200
        .consts = consts,
8201
        .names = names,
8202
8203
        .localsplusnames = localsplusnames,
8204
        .localspluskinds = localspluskinds,
8205
8206
        .argcount = posonlyargcount + posorkwargcount,
8207
        .posonlyargcount = posonlyargcount,
8208
        .kwonlyargcount = kwonlyargcount,
8209
8210
        .stacksize = maxdepth,
8211
8212
        .exceptiontable = a->a_except_table,
8213
    };
8214
8215
    if (_PyCode_Validate(&con) < 0) {
  Branch (8215:9): [True: 0, False: 72.8k]
8216
        goto error;
8217
    }
8218
8219
    if (!merge_const_one(c->c_const_cache, &localsplusnames)) {
  Branch (8219:9): [True: 0, False: 72.8k]
8220
        goto error;
8221
    }
8222
    con.localsplusnames = localsplusnames;
8223
8224
    co = _PyCode_New(&con);
8225
    if (co == NULL) {
  Branch (8225:9): [True: 0, False: 72.8k]
8226
        goto error;
8227
    }
8228
8229
 error:
8230
    Py_XDECREF(names);
8231
    Py_XDECREF(consts);
8232
    Py_XDECREF(localsplusnames);
8233
    Py_XDECREF(localspluskinds);
8234
    return co;
8235
}
8236
8237
8238
/* For debugging purposes only */
8239
#if 0
8240
static void
8241
dump_instr(struct instr *i)
8242
{
8243
    const char *jrel = (is_relative_jump(i)) ? "jrel " : "";
8244
    const char *jabs = (is_jump(i) && !is_relative_jump(i))? "jabs " : "";
8245
8246
    char arg[128];
8247
8248
    *arg = '\0';
8249
    if (HAS_ARG(i->i_opcode)) {
8250
        sprintf(arg, "arg: %d ", i->i_oparg);
8251
    }
8252
    if (is_jump(i)) {
8253
        sprintf(arg, "target: %p ", i->i_target);
8254
    }
8255
    if (is_block_push(i)) {
8256
        sprintf(arg, "except_target: %p ", i->i_target);
8257
    }
8258
    fprintf(stderr, "line: %d, opcode: %d %s%s%s\n",
8259
                    i->i_loc.lineno, i->i_opcode, arg, jabs, jrel);
8260
}
8261
8262
static void
8263
dump_basicblock(const basicblock *b)
8264
{
8265
    const char *b_return = basicblock_returns(b) ? "return " : "";
8266
    fprintf(stderr, "[%d %d %d %p] used: %d, depth: %d, offset: %d %s\n",
8267
        b->b_cold, b->b_warm, BB_NO_FALLTHROUGH(b), b, b->b_iused,
8268
        b->b_startdepth, b->b_offset, b_return);
8269
    if (b->b_instr) {
8270
        int i;
8271
        for (i = 0; i < b->b_iused; i++) {
8272
            fprintf(stderr, "  [%02d] ", i);
8273
            dump_instr(b->b_instr + i);
8274
        }
8275
    }
8276
}
8277
#endif
8278
8279
8280
static int
8281
normalize_basic_block(basicblock *bb);
8282
8283
static int
8284
optimize_cfg(basicblock *entryblock, PyObject *consts, PyObject *const_cache);
8285
8286
static int
8287
trim_unused_consts(basicblock *entryblock, PyObject *consts);
8288
8289
/* Duplicates exit BBs, so that line numbers can be propagated to them */
8290
static int
8291
duplicate_exits_without_lineno(basicblock *entryblock);
8292
8293
static int
8294
extend_block(basicblock *bb);
8295
8296
static int *
8297
build_cellfixedoffsets(struct compiler *c)
8298
{
8299
    int nlocals = (int)PyDict_GET_SIZE(c->u->u_varnames);
8300
    int ncellvars = (int)PyDict_GET_SIZE(c->u->u_cellvars);
8301
    int nfreevars = (int)PyDict_GET_SIZE(c->u->u_freevars);
8302
8303
    int noffsets = ncellvars + nfreevars;
8304
    int *fixed = PyMem_New(int, noffsets);
8305
    if (fixed == NULL) {
  Branch (8305:9): [True: 0, False: 72.8k]
8306
        PyErr_NoMemory();
8307
        return NULL;
8308
    }
8309
    
for (int i = 0; 72.8k
i < noffsets;
i++4.29k
) {
  Branch (8309:21): [True: 4.29k, False: 72.8k]
8310
        fixed[i] = nlocals + i;
8311
    }
8312
8313
    PyObject *varname, *cellindex;
8314
    Py_ssize_t pos = 0;
8315
    while (PyDict_Next(c->u->u_cellvars, &pos, &varname, &cellindex)) {
  Branch (8315:12): [True: 1.82k, False: 72.8k]
8316
        PyObject *varindex = PyDict_GetItem(c->u->u_varnames, varname);
8317
        if (varindex != NULL) {
  Branch (8317:13): [True: 997, False: 827]
8318
            assert(PyLong_AS_LONG(cellindex) < INT_MAX);
8319
            assert(PyLong_AS_LONG(varindex) < INT_MAX);
8320
            int oldindex = (int)PyLong_AS_LONG(cellindex);
8321
            int argoffset = (int)PyLong_AS_LONG(varindex);
8322
            fixed[oldindex] = argoffset;
8323
        }
8324
    }
8325
8326
    return fixed;
8327
}
8328
8329
static inline int
8330
insert_instruction(basicblock *block, int pos, struct instr *instr) {
8331
    if (basicblock_next_instr(block) < 0) {
  Branch (8331:9): [True: 0, False: 5.87k]
8332
        return -1;
8333
    }
8334
    
for (int i = block->b_iused-1; 5.87k
i > pos;
i--71.8k
) {
  Branch (8334:36): [True: 71.8k, False: 5.87k]
8335
        block->b_instr[i] = block->b_instr[i-1];
8336
    }
8337
    block->b_instr[pos] = *instr;
8338
    return 0;
8339
}
8340
8341
static int
8342
insert_prefix_instructions(struct compiler *c, basicblock *entryblock,
8343
                           int *fixed, int nfreevars, int code_flags)
8344
{
8345
    assert(c->u->u_firstlineno > 0);
8346
8347
    /* Add the generator prefix instructions. */
8348
    if (code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) {
  Branch (8348:9): [True: 1.33k, False: 71.5k]
8349
        struct instr make_gen = {
8350
            .i_opcode = RETURN_GENERATOR,
8351
            .i_oparg = 0,
8352
            .i_loc = LOCATION(c->u->u_firstlineno, c->u->u_firstlineno, -1, -1),
8353
            .i_target = NULL,
8354
        };
8355
        if (insert_instruction(entryblock, 0, &make_gen) < 0) {
  Branch (8355:13): [True: 0, False: 1.33k]
8356
            return -1;
8357
        }
8358
        struct instr pop_top = {
8359
            .i_opcode = POP_TOP,
8360
            .i_oparg = 0,
8361
            .i_loc = NO_LOCATION,
8362
            .i_target = NULL,
8363
        };
8364
        if (insert_instruction(entryblock, 1, &pop_top) < 0) {
  Branch (8364:13): [True: 0, False: 1.33k]
8365
            return -1;
8366
        }
8367
    }
8368
8369
    /* Set up cells for any variable that escapes, to be put in a closure. */
8370
    const int ncellvars = (int)PyDict_GET_SIZE(c->u->u_cellvars);
8371
    if (ncellvars) {
  Branch (8371:9): [True: 929, False: 71.9k]
8372
        // c->u->u_cellvars has the cells out of order so we sort them
8373
        // before adding the MAKE_CELL instructions.  Note that we
8374
        // adjust for arg cells, which come first.
8375
        const int nvars = ncellvars + (int)PyDict_GET_SIZE(c->u->u_varnames);
8376
        int *sorted = PyMem_RawCalloc(nvars, sizeof(int));
8377
        if (sorted == NULL) {
  Branch (8377:13): [True: 0, False: 929]
8378
            PyErr_NoMemory();
8379
            return -1;
8380
        }
8381
        
for (int i = 0; 929
i < ncellvars;
i++1.82k
) {
  Branch (8381:25): [True: 1.82k, False: 929]
8382
            sorted[fixed[i]] = i + 1;
8383
        }
8384
        for (int i = 0, ncellsused = 0; ncellsused < ncellvars; 
i++4.33k
) {
  Branch (8384:41): [True: 4.33k, False: 929]
8385
            int oldindex = sorted[i] - 1;
8386
            if (oldindex == -1) {
  Branch (8386:17): [True: 2.50k, False: 1.82k]
8387
                continue;
8388
            }
8389
            struct instr make_cell = {
8390
                .i_opcode = MAKE_CELL,
8391
                // This will get fixed in offset_derefs().
8392
                .i_oparg = oldindex,
8393
                .i_loc = NO_LOCATION,
8394
                .i_target = NULL,
8395
            };
8396
            if (insert_instruction(entryblock, ncellsused, &make_cell) < 0) {
  Branch (8396:17): [True: 0, False: 1.82k]
8397
                return -1;
8398
            }
8399
            ncellsused += 1;
8400
        }
8401
        PyMem_RawFree(sorted);
8402
    }
8403
8404
    if (nfreevars) {
  Branch (8404:9): [True: 1.37k, False: 71.4k]
8405
        struct instr copy_frees = {
8406
            .i_opcode = COPY_FREE_VARS,
8407
            .i_oparg = nfreevars,
8408
            .i_loc = NO_LOCATION,
8409
            .i_target = NULL,
8410
        };
8411
        if (insert_instruction(entryblock, 0, &copy_frees) < 0) {
  Branch (8411:13): [True: 0, False: 1.37k]
8412
            return -1;
8413
        }
8414
8415
    }
8416
8417
    return 0;
8418
}
8419
8420
/* Make sure that all returns have a line number, even if early passes
8421
 * have failed to propagate a correct line number.
8422
 * The resulting line number may not be correct according to PEP 626,
8423
 * but should be "good enough", and no worse than in older versions. */
8424
static void
8425
guarantee_lineno_for_exits(basicblock *entryblock, int firstlineno) {
8426
    int lineno = firstlineno;
8427
    assert(lineno > 0);
8428
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (8428:38): [True: 568k, False: 72.8k]
8429
        if (b->b_iused == 0) {
  Branch (8429:13): [True: 0, False: 568k]
8430
            continue;
8431
        }
8432
        struct instr *last = &b->b_instr[b->b_iused-1];
8433
        if (last->i_loc.lineno < 0) {
  Branch (8433:13): [True: 4.31k, False: 564k]
8434
            if (last->i_opcode == RETURN_VALUE) {
  Branch (8434:17): [True: 8, False: 4.30k]
8435
                for (int i = 0; i < b->b_iused; 
i++40
) {
  Branch (8435:33): [True: 40, False: 8]
8436
                    assert(b->b_instr[i].i_loc.lineno < 0);
8437
8438
                    b->b_instr[i].i_loc.lineno = lineno;
8439
                }
8440
            }
8441
        }
8442
        else {
8443
            lineno = last->i_loc.lineno;
8444
        }
8445
    }
8446
}
8447
8448
static int
8449
fix_cell_offsets(struct compiler *c, basicblock *entryblock, int *fixedmap)
8450
{
8451
    int nlocals = (int)PyDict_GET_SIZE(c->u->u_varnames);
8452
    int ncellvars = (int)PyDict_GET_SIZE(c->u->u_cellvars);
8453
    int nfreevars = (int)PyDict_GET_SIZE(c->u->u_freevars);
8454
    int noffsets = ncellvars + nfreevars;
8455
8456
    // First deal with duplicates (arg cells).
8457
    int numdropped = 0;
8458
    for (int i = 0; i < noffsets ; 
i++4.29k
) {
  Branch (8458:21): [True: 4.29k, False: 72.8k]
8459
        if (fixedmap[i] == i + nlocals) {
  Branch (8459:13): [True: 3.29k, False: 997]
8460
            fixedmap[i] -= numdropped;
8461
        }
8462
        else {
8463
            // It was a duplicate (cell/arg).
8464
            numdropped += 1;
8465
        }
8466
    }
8467
8468
    // Then update offsets, either relative to locals or by cell2arg.
8469
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next584k
) {
  Branch (8469:38): [True: 584k, False: 72.8k]
8470
        for (int i = 0; i < b->b_iused; 
i++2.66M
) {
  Branch (8470:25): [True: 2.66M, False: 584k]
8471
            struct instr *inst = &b->b_instr[i];
8472
            // This is called before extended args are generated.
8473
            assert(inst->i_opcode != EXTENDED_ARG);
8474
            assert(inst->i_opcode != EXTENDED_ARG_QUICK);
8475
            int oldoffset = inst->i_oparg;
8476
            switch(inst->i_opcode) {
  Branch (8476:20): [True: 2.65M, False: 10.9k]
8477
                case MAKE_CELL:
  Branch (8477:17): [True: 1.82k, False: 2.66M]
8478
                case LOAD_CLOSURE:
  Branch (8478:17): [True: 2.61k, False: 2.66M]
8479
                case LOAD_DEREF:
  Branch (8479:17): [True: 5.45k, False: 2.66M]
8480
                case STORE_DEREF:
  Branch (8480:17): [True: 1.02k, False: 2.66M]
8481
                case DELETE_DEREF:
  Branch (8481:17): [True: 0, False: 2.66M]
8482
                case LOAD_CLASSDEREF:
  Branch (8482:17): [True: 5, False: 2.66M]
8483
                    assert(oldoffset >= 0);
8484
                    assert(oldoffset < noffsets);
8485
                    assert(fixedmap[oldoffset] >= 0);
8486
                    inst->i_oparg = fixedmap[oldoffset];
8487
            }
8488
        }
8489
    }
8490
8491
    return numdropped;
8492
}
8493
8494
static void
8495
propagate_line_numbers(basicblock *entryblock);
8496
8497
static void
8498
eliminate_empty_basic_blocks(basicblock *entryblock);
8499
8500
8501
static void
8502
remove_redundant_jumps(basicblock *entryblock) {
8503
    /* If a non-empty block ends with a jump instruction, check if the next
8504
     * non-empty block reached through normal flow control is the target
8505
     * of that jump. If it is, then the jump instruction is redundant and
8506
     * can be deleted.
8507
     */
8508
    int removed = 0;
8509
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (8509:38): [True: 568k, False: 72.8k]
8510
        if (b->b_iused > 0) {
  Branch (8510:13): [True: 568k, False: 0]
8511
            struct instr *b_last_instr = &b->b_instr[b->b_iused - 1];
8512
            assert(!IS_ASSEMBLER_OPCODE(b_last_instr->i_opcode));
8513
            if (b_last_instr->i_opcode == JUMP ||
  Branch (8513:17): [True: 14.6k, False: 554k]
8514
                
b_last_instr->i_opcode == 554k
JUMP_NO_INTERRUPT554k
) {
  Branch (8514:17): [True: 369, False: 553k]
8515
                if (b_last_instr->i_target == b->b_next) {
  Branch (8515:21): [True: 2.31k, False: 12.6k]
8516
                    assert(b->b_next->b_iused);
8517
                    b_last_instr->i_opcode = NOP;
8518
                    removed++;
8519
                }
8520
            }
8521
        }
8522
    }
8523
    if (removed) {
  Branch (8523:9): [True: 1.41k, False: 71.4k]
8524
        eliminate_empty_basic_blocks(entryblock);
8525
    }
8526
}
8527
8528
static PyCodeObject *
8529
assemble(struct compiler *c, int addNone)
8530
{
8531
    PyCodeObject *co = NULL;
8532
    PyObject *consts = NULL;
8533
    struct assembler a;
8534
    memset(&a, 0, sizeof(struct assembler));
8535
8536
    int code_flags = compute_code_flags(c);
8537
    if (code_flags < 0) {
  Branch (8537:9): [True: 0, False: 72.8k]
8538
        return NULL;
8539
    }
8540
8541
    /* Make sure every block that falls off the end returns None. */
8542
    if (!basicblock_returns(c->u->u_curblock)) {
  Branch (8542:9): [True: 59.0k, False: 13.7k]
8543
        UNSET_LOC(c);
8544
        if (addNone)
  Branch (8544:13): [True: 23.1k, False: 35.9k]
8545
            
ADDOP_LOAD_CONST23.1k
(c, Py_None);
8546
        ADDOP(c, RETURN_VALUE);
8547
    }
8548
8549
    
for (basicblock *b = c->u->u_blocks; 72.8k
b != NULL;
b = b->b_list584k
) {
  Branch (8549:42): [True: 584k, False: 72.8k]
8550
        if (normalize_basic_block(b)) {
  Branch (8550:13): [True: 0, False: 584k]
8551
            return NULL;
8552
        }
8553
    }
8554
8555
    
for (basicblock *b = c->u->u_blocks; 72.8k
b != NULL;
b = b->b_list584k
) {
  Branch (8555:42): [True: 584k, False: 72.8k]
8556
        if (extend_block(b)) {
  Branch (8556:13): [True: 0, False: 584k]
8557
            return NULL;
8558
        }
8559
    }
8560
8561
    assert(PyDict_GET_SIZE(c->u->u_varnames) < INT_MAX);
8562
    assert(PyDict_GET_SIZE(c->u->u_cellvars) < INT_MAX);
8563
    assert(PyDict_GET_SIZE(c->u->u_freevars) < INT_MAX);
8564
    int nlocals = (int)PyDict_GET_SIZE(c->u->u_varnames);
8565
    int ncellvars = (int)PyDict_GET_SIZE(c->u->u_cellvars);
8566
    int nfreevars = (int)PyDict_GET_SIZE(c->u->u_freevars);
8567
    assert(INT_MAX - nlocals - ncellvars > 0);
8568
    assert(INT_MAX - nlocals - ncellvars - nfreevars > 0);
8569
    int nlocalsplus = nlocals + ncellvars + nfreevars;
8570
    int *cellfixedoffsets = build_cellfixedoffsets(c);
8571
    if (cellfixedoffsets == NULL) {
  Branch (8571:9): [True: 0, False: 72.8k]
8572
        goto error;
8573
    }
8574
8575
    int nblocks = 0;
8576
    basicblock *entryblock = NULL;
8577
    for (basicblock *b = c->u->u_blocks; b != NULL; 
b = b->b_list584k
) {
  Branch (8577:42): [True: 584k, False: 72.8k]
8578
        nblocks++;
8579
        entryblock = b;
8580
    }
8581
    assert(entryblock != NULL);
8582
    if ((size_t)nblocks > SIZE_MAX / sizeof(basicblock *)) {
  Branch (8582:9): [True: 0, False: 72.8k]
8583
        PyErr_NoMemory();
8584
        goto error;
8585
    }
8586
8587
    /* Set firstlineno if it wasn't explicitly set. */
8588
    if (!c->u->u_firstlineno) {
  Branch (8588:9): [True: 0, False: 72.8k]
8589
        if (entryblock->b_instr && entryblock->b_instr->i_loc.lineno) {
  Branch (8589:13): [True: 0, False: 0]
  Branch (8589:36): [True: 0, False: 0]
8590
            c->u->u_firstlineno = entryblock->b_instr->i_loc.lineno;
8591
        }
8592
        else {
8593
            c->u->u_firstlineno = 1;
8594
        }
8595
    }
8596
8597
    // This must be called before fix_cell_offsets().
8598
    if (insert_prefix_instructions(c, entryblock, cellfixedoffsets, nfreevars, code_flags)) {
  Branch (8598:9): [True: 0, False: 72.8k]
8599
        goto error;
8600
    }
8601
8602
    int numdropped = fix_cell_offsets(c, entryblock, cellfixedoffsets);
8603
    PyMem_Free(cellfixedoffsets);  // At this point we're done with it.
8604
    cellfixedoffsets = NULL;
8605
    if (numdropped < 0) {
  Branch (8605:9): [True: 0, False: 72.8k]
8606
        goto error;
8607
    }
8608
    nlocalsplus -= numdropped;
8609
8610
    consts = consts_dict_keys_inorder(c->u->u_consts);
8611
    if (consts == NULL) {
  Branch (8611:9): [True: 0, False: 72.8k]
8612
        goto error;
8613
    }
8614
8615
    if (optimize_cfg(entryblock, consts, c->c_const_cache)) {
  Branch (8615:9): [True: 0, False: 72.8k]
8616
        goto error;
8617
    }
8618
    if (duplicate_exits_without_lineno(entryblock)) {
  Branch (8618:9): [True: 0, False: 72.8k]
8619
        return NULL;
8620
    }
8621
    if (trim_unused_consts(entryblock, consts)) {
  Branch (8621:9): [True: 0, False: 72.8k]
8622
        goto error;
8623
    }
8624
    propagate_line_numbers(entryblock);
8625
    guarantee_lineno_for_exits(entryblock, c->u->u_firstlineno);
8626
8627
    int maxdepth = stackdepth(entryblock, code_flags);
8628
    if (maxdepth < 0) {
  Branch (8628:9): [True: 0, False: 72.8k]
8629
        goto error;
8630
    }
8631
    /* TO DO -- For 3.12, make sure that `maxdepth <= MAX_ALLOWED_STACK_USE` */
8632
8633
    if (label_exception_targets(entryblock)) {
  Branch (8633:9): [True: 0, False: 72.8k]
8634
        goto error;
8635
    }
8636
    convert_exception_handlers_to_nops(entryblock);
8637
8638
    if (push_cold_blocks_to_end(entryblock, code_flags) < 0) {
  Branch (8638:9): [True: 0, False: 72.8k]
8639
        goto error;
8640
    }
8641
8642
    remove_redundant_jumps(entryblock);
8643
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (8643:38): [True: 568k, False: 72.8k]
8644
        clean_basic_block(b);
8645
    }
8646
8647
    /* Order of basic blocks must have been determined by now */
8648
    normalize_jumps(entryblock);
8649
8650
    if (add_checks_for_loads_of_unknown_variables(entryblock, c) < 0) {
  Branch (8650:9): [True: 0, False: 72.8k]
8651
        goto error;
8652
    }
8653
8654
    /* Can't modify the bytecode after computing jump offsets. */
8655
    assemble_jump_offsets(entryblock);
8656
8657
8658
    /* Create assembler */
8659
    if (!assemble_init(&a, c->u->u_firstlineno))
  Branch (8659:9): [True: 0, False: 72.8k]
8660
        goto error;
8661
8662
    /* Emit code. */
8663
    
for (basicblock *b = entryblock; 72.8k
b != NULL;
b = b->b_next568k
) {
  Branch (8663:38): [True: 568k, False: 72.8k]
8664
        for (int j = 0; j < b->b_iused; 
j++2.58M
)
  Branch (8664:25): [True: 2.58M, False: 568k]
8665
            if (!assemble_emit(&a, &b->b_instr[j]))
  Branch (8665:17): [True: 0, False: 2.58M]
8666
                goto error;
8667
    }
8668
8669
    /* Emit location info */
8670
    a.a_lineno = c->u->u_firstlineno;
8671
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (8671:38): [True: 568k, False: 72.8k]
8672
        for (int j = 0; j < b->b_iused; 
j++2.58M
)
  Branch (8672:25): [True: 2.58M, False: 568k]
8673
            if (!assemble_emit_location(&a, &b->b_instr[j]))
  Branch (8673:17): [True: 0, False: 2.58M]
8674
                goto error;
8675
    }
8676
8677
    if (!assemble_exception_table(&a, entryblock)) {
  Branch (8677:9): [True: 0, False: 72.8k]
8678
        goto error;
8679
    }
8680
    if (_PyBytes_Resize(&a.a_except_table, a.a_except_table_off) < 0) {
  Branch (8680:9): [True: 0, False: 72.8k]
8681
        goto error;
8682
    }
8683
    if (!merge_const_one(c->c_const_cache, &a.a_except_table)) {
  Branch (8683:9): [True: 0, False: 72.8k]
8684
        goto error;
8685
    }
8686
8687
    if (_PyBytes_Resize(&a.a_linetable, a.a_location_off) < 0) {
  Branch (8687:9): [True: 0, False: 72.8k]
8688
        goto error;
8689
    }
8690
    if (!merge_const_one(c->c_const_cache, &a.a_linetable)) {
  Branch (8690:9): [True: 0, False: 72.8k]
8691
        goto error;
8692
    }
8693
8694
    if (_PyBytes_Resize(&a.a_bytecode, a.a_offset * sizeof(_Py_CODEUNIT)) < 0) {
  Branch (8694:9): [True: 0, False: 72.8k]
8695
        goto error;
8696
    }
8697
    if (!merge_const_one(c->c_const_cache, &a.a_bytecode)) {
  Branch (8697:9): [True: 0, False: 72.8k]
8698
        goto error;
8699
    }
8700
8701
    co = makecode(c, &a, consts, maxdepth, nlocalsplus, code_flags);
8702
 error:
8703
    Py_XDECREF(consts);
8704
    assemble_free(&a);
8705
    if (cellfixedoffsets != NULL) {
  Branch (8705:9): [True: 0, False: 72.8k]
8706
        PyMem_Free(cellfixedoffsets);
8707
    }
8708
    return co;
8709
}
8710
8711
static PyObject*
8712
get_const_value(int opcode, int oparg, PyObject *co_consts)
8713
{
8714
    PyObject *constant = NULL;
8715
    assert(HAS_CONST(opcode));
8716
    if (opcode == LOAD_CONST) {
  Branch (8716:9): [True: 10.0k, False: 0]
8717
        constant = PyList_GET_ITEM(co_consts, oparg);
8718
    }
8719
8720
    if (constant == NULL) {
  Branch (8720:9): [True: 0, False: 10.0k]
8721
        PyErr_SetString(PyExc_SystemError,
8722
                        "Internal error: failed to get value of a constant");
8723
        return NULL;
8724
    }
8725
    Py_INCREF(constant);
8726
    return constant;
8727
}
8728
8729
/* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cn, BUILD_TUPLE n
8730
   with    LOAD_CONST (c1, c2, ... cn).
8731
   The consts table must still be in list form so that the
8732
   new constant (c1, c2, ... cn) can be appended.
8733
   Called with codestr pointing to the first LOAD_CONST.
8734
*/
8735
static int
8736
fold_tuple_on_constants(PyObject *const_cache,
8737
                        struct instr *inst,
8738
                        int n, PyObject *consts)
8739
{
8740
    /* Pre-conditions */
8741
    assert(PyDict_CheckExact(const_cache));
8742
    assert(PyList_CheckExact(consts));
8743
    assert(inst[n].i_opcode == BUILD_TUPLE);
8744
    assert(inst[n].i_oparg == n);
8745
8746
    for (int i = 0; i < n; 
i++6.94k
) {
  Branch (8746:21): [True: 33.8k, False: 2.70k]
8747
        if (!HAS_CONST(inst[i].i_opcode)) {
8748
            return 0;
8749
        }
8750
    }
8751
8752
    /* Buildup new tuple of constants */
8753
    PyObject *newconst = PyTuple_New(n);
8754
    if (newconst == NULL) {
  Branch (8754:9): [True: 0, False: 2.70k]
8755
        return -1;
8756
    }
8757
    
for (int i = 0; 2.70k
i < n;
i++5.68k
) {
  Branch (8757:21): [True: 5.68k, False: 2.70k]
8758
        int op = inst[i].i_opcode;
8759
        int arg = inst[i].i_oparg;
8760
        PyObject *constant = get_const_value(op, arg, consts);
8761
        if (constant == NULL) {
  Branch (8761:13): [True: 0, False: 5.68k]
8762
            return -1;
8763
        }
8764
        PyTuple_SET_ITEM(newconst, i, constant);
8765
    }
8766
    if (merge_const_one(const_cache, &newconst) == 0) {
  Branch (8766:9): [True: 0, False: 2.70k]
8767
        Py_DECREF(newconst);
8768
        return -1;
8769
    }
8770
8771
    Py_ssize_t index;
8772
    for (index = 0; index < PyList_GET_SIZE(consts); 
index++105k
) {
  Branch (8772:21): [True: 106k, False: 1.69k]
8773
        if (PyList_GET_ITEM(consts, index) == newconst) {
  Branch (8773:13): [True: 1.00k, False: 105k]
8774
            break;
8775
        }
8776
    }
8777
    if (index == PyList_GET_SIZE(consts)) {
  Branch (8777:9): [True: 1.69k, False: 1.00k]
8778
        if ((size_t)index >= (size_t)INT_MAX - 1) {
  Branch (8778:13): [True: 0, False: 1.69k]
8779
            Py_DECREF(newconst);
8780
            PyErr_SetString(PyExc_OverflowError, "too many constants");
8781
            return -1;
8782
        }
8783
        if (PyList_Append(consts, newconst)) {
  Branch (8783:13): [True: 0, False: 1.69k]
8784
            Py_DECREF(newconst);
8785
            return -1;
8786
        }
8787
    }
8788
    Py_DECREF(newconst);
8789
    for (int i = 0; i < n; 
i++5.68k
) {
  Branch (8789:21): [True: 5.68k, False: 2.70k]
8790
        inst[i].i_opcode = NOP;
8791
    }
8792
    inst[n].i_opcode = LOAD_CONST;
8793
    inst[n].i_oparg = (int)index;
8794
    return 0;
8795
}
8796
8797
#define VISITED (-1)
8798
8799
// Replace an arbitrary run of SWAPs and NOPs with an optimal one that has the
8800
// same effect.
8801
static int
8802
swaptimize(basicblock *block, int *ix)
8803
{
8804
    // NOTE: "./python -m test test_patma" serves as a good, quick stress test
8805
    // for this function. Make sure to blow away cached *.pyc files first!
8806
    assert(*ix < block->b_iused);
8807
    struct instr *instructions = &block->b_instr[*ix];
8808
    // Find the length of the current sequence of SWAPs and NOPs, and record the
8809
    // maximum depth of the stack manipulations:
8810
    assert(instructions[0].i_opcode == SWAP);
8811
    int depth = instructions[0].i_oparg;
8812
    int len = 0;
8813
    int more = false;
8814
    int limit = block->b_iused - *ix;
8815
    while (++len < limit) {
  Branch (8815:12): [True: 3.29k, False: 0]
8816
        int opcode = instructions[len].i_opcode;
8817
        if (opcode == SWAP) {
  Branch (8817:13): [True: 1.79k, False: 1.50k]
8818
            depth = Py_MAX(depth, instructions[len].i_oparg);
8819
            more = true;
8820
        }
8821
        else if (opcode != NOP) {
  Branch (8821:18): [True: 1.50k, False: 0]
8822
            break;
8823
        }
8824
    }
8825
    // It's already optimal if there's only one SWAP:
8826
    if (!more) {
  Branch (8826:9): [True: 1.33k, False: 175]
8827
        return 0;
8828
    }
8829
    // Create an array with elements {0, 1, 2, ..., depth - 1}:
8830
    int *stack = PyMem_Malloc(depth * sizeof(int));
8831
    if (stack == NULL) {
  Branch (8831:9): [True: 0, False: 175]
8832
        PyErr_NoMemory();
8833
        return -1;
8834
    }
8835
    
for (int i = 0; 175
i < depth;
i++645
) {
  Branch (8835:21): [True: 645, False: 175]
8836
        stack[i] = i;
8837
    }
8838
    // Simulate the combined effect of these instructions by "running" them on
8839
    // our "stack":
8840
    for (int i = 0; i < len; 
i++1.96k
) {
  Branch (8840:21): [True: 1.96k, False: 175]
8841
        if (instructions[i].i_opcode == SWAP) {
  Branch (8841:13): [True: 1.96k, False: 0]
8842
            int oparg = instructions[i].i_oparg;
8843
            int top = stack[0];
8844
            // SWAPs are 1-indexed:
8845
            stack[0] = stack[oparg - 1];
8846
            stack[oparg - 1] = top;
8847
        }
8848
    }
8849
    // Now we can begin! Our approach here is based on a solution to a closely
8850
    // related problem (https://cs.stackexchange.com/a/13938). It's easiest to
8851
    // think of this algorithm as determining the steps needed to efficiently
8852
    // "un-shuffle" our stack. By performing the moves in *reverse* order,
8853
    // though, we can efficiently *shuffle* it! For this reason, we will be
8854
    // replacing instructions starting from the *end* of the run. Since the
8855
    // solution is optimal, we don't need to worry about running out of space:
8856
    int current = len - 1;
8857
    for (int i = 0; i < depth; 
i++645
) {
  Branch (8857:21): [True: 645, False: 175]
8858
        // Skip items that have already been visited, or just happen to be in
8859
        // the correct location:
8860
        if (stack[i] == VISITED || 
stack[i] == i255
) {
  Branch (8860:13): [True: 390, False: 255]
  Branch (8860:36): [True: 95, False: 160]
8861
            continue;
8862
        }
8863
        // Okay, we've found an item that hasn't been visited. It forms a cycle
8864
        // with other items; traversing the cycle and swapping each item with
8865
        // the next will put them all in the correct place. The weird
8866
        // loop-and-a-half is necessary to insert 0 into every cycle, since we
8867
        // can only swap from that position:
8868
        int j = i;
8869
        while (true) {
8870
            // Skip the actual swap if our item is zero, since swapping the top
8871
            // item with itself is pointless:
8872
            if (j) {
  Branch (8872:17): [True: 438, False: 272]
8873
                assert(0 <= current);
8874
                // SWAPs are 1-indexed:
8875
                instructions[current].i_opcode = SWAP;
8876
                instructions[current--].i_oparg = j + 1;
8877
            }
8878
            if (stack[j] == VISITED) {
  Branch (8878:17): [True: 160, False: 550]
8879
                // Completed the cycle:
8880
                assert(j == i);
8881
                break;
8882
            }
8883
            int next_j = stack[j];
8884
            stack[j] = VISITED;
8885
            j = next_j;
8886
        }
8887
    }
8888
    // NOP out any unused instructions:
8889
    while (0 <= current) {
  Branch (8889:12): [True: 1.52k, False: 175]
8890
        instructions[current--].i_opcode = NOP;
8891
    }
8892
    PyMem_Free(stack);
8893
    *ix += len - 1;
8894
    return 0;
8895
}
8896
8897
// This list is pretty small, since it's only okay to reorder opcodes that:
8898
// - can't affect control flow (like jumping or raising exceptions)
8899
// - can't invoke arbitrary code (besides finalizers)
8900
// - only touch the TOS (and pop it when finished)
8901
#define SWAPPABLE(opcode) \
8902
    ((opcode) == STORE_FAST || 
(opcode) == 1.87k
POP_TOP1.87k
)
8903
8904
static int
8905
next_swappable_instruction(basicblock *block, int i, int lineno)
8906
{
8907
    while (++i < block->b_iused) {
  Branch (8907:12): [True: 2.94k, False: 25]
8908
        struct instr *instruction = &block->b_instr[i];
8909
        if (0 <= lineno && 
instruction->i_loc.lineno != lineno1.03k
) {
  Branch (8909:13): [True: 1.03k, False: 1.90k]
  Branch (8909:28): [True: 3, False: 1.03k]
8910
            // Optimizing across this instruction could cause user-visible
8911
            // changes in the names bound between line tracing events!
8912
            return -1;
8913
        }
8914
        if (instruction->i_opcode == NOP) {
  Branch (8914:13): [True: 469, False: 2.47k]
8915
            continue;
8916
        }
8917
        if (SWAPPABLE(instruction->i_opcode)) {
8918
            return i;
8919
        }
8920
        return -1;
8921
    }
8922
    return -1;
8923
}
8924
8925
// Attempt to apply SWAPs statically by swapping *instructions* rather than
8926
// stack items. For example, we can replace SWAP(2), POP_TOP, STORE_FAST(42)
8927
// with the more efficient NOP, STORE_FAST(42), POP_TOP.
8928
static void
8929
apply_static_swaps(basicblock *block, int i)
8930
{
8931
    // SWAPs are to our left, and potential swaperands are to our right:
8932
    for (; 0 <= i; 
i--1.04k
) {
  Branch (8932:12): [True: 2.53k, False: 18]
8933
        assert(i < block->b_iused);
8934
        struct instr *swap = &block->b_instr[i];
8935
        if (swap->i_opcode != SWAP) {
  Branch (8935:13): [True: 943, False: 1.59k]
8936
            if (swap->i_opcode == NOP || 
SWAPPABLE348
(swap->i_opcode)) {
  Branch (8936:17): [True: 595, False: 348]
8937
                // Nope, but we know how to handle these. Keep looking:
8938
                continue;
8939
            }
8940
            // We can't reason about what this instruction does. Bail:
8941
            return;
8942
        }
8943
        int j = next_swappable_instruction(block, i, -1);
8944
        if (j < 0) {
  Branch (8944:13): [True: 982, False: 608]
8945
            return;
8946
        }
8947
        int k = j;
8948
        int lineno = block->b_instr[j].i_loc.lineno;
8949
        for (int count = swap->i_oparg - 1; 0 < count; 
count--720
) {
  Branch (8949:45): [True: 910, False: 418]
8950
            k = next_swappable_instruction(block, k, lineno);
8951
            if (k < 0) {
  Branch (8951:17): [True: 190, False: 720]
8952
                return;
8953
            }
8954
        }
8955
        // Success!
8956
        swap->i_opcode = NOP;
8957
        struct instr temp = block->b_instr[j];
8958
        block->b_instr[j] = block->b_instr[k];
8959
        block->b_instr[k] = temp;
8960
    }
8961
}
8962
8963
// Attempt to eliminate jumps to jumps by updating inst to jump to
8964
// target->i_target using the provided opcode. Return whether or not the
8965
// optimization was successful.
8966
static bool
8967
jump_thread(struct instr *inst, struct instr *target, int opcode)
8968
{
8969
    assert(is_jump(inst));
8970
    assert(is_jump(target));
8971
    // bpo-45773: If inst->i_target == target->i_target, then nothing actually
8972
    // changes (and we fall into an infinite loop):
8973
    if ((inst->i_loc.lineno == target->i_loc.lineno || 
target->i_loc.lineno == -11.92k
) &&
  Branch (8973:10): [True: 2.12k, False: 1.92k]
  Branch (8973:56): [True: 1.92k, False: 6]
8974
        
inst->i_target != target->i_target4.04k
)
  Branch (8974:9): [True: 4.04k, False: 2]
8975
    {
8976
        inst->i_target = target->i_target;
8977
        inst->i_opcode = opcode;
8978
        return true;
8979
    }
8980
    return false;
8981
}
8982
8983
/* Maximum size of basic block that should be copied in optimizer */
8984
#define MAX_COPY_SIZE 4
8985
8986
/* Optimization */
8987
static int
8988
optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts)
8989
{
8990
    assert(PyDict_CheckExact(const_cache));
8991
    assert(PyList_CheckExact(consts));
8992
    struct instr nop;
8993
    nop.i_opcode = NOP;
8994
    struct instr *target;
8995
    for (int i = 0; i < bb->b_iused; 
i++2.66M
) {
  Branch (8995:21): [True: 2.66M, False: 584k]
8996
        struct instr *inst = &bb->b_instr[i];
8997
        int oparg = inst->i_oparg;
8998
        int nextop = i+1 < bb->b_iused ? 
bb->b_instr[i+1].i_opcode2.09M
:
0578k
;
  Branch (8998:22): [True: 2.09M, False: 578k]
8999
        if (is_jump(inst) || 
is_block_push(inst)2.40M
) {
  Branch (8999:13): [True: 260k, False: 2.40M]
  Branch (8999:30): [True: 7.72k, False: 2.40M]
9000
            /* Skip over empty basic blocks. */
9001
            while (inst->i_target->b_iused == 0) {
  Branch (9001:20): [True: 0, False: 267k]
9002
                inst->i_target = inst->i_target->b_next;
9003
            }
9004
            target = &inst->i_target->b_instr[0];
9005
            assert(!IS_ASSEMBLER_OPCODE(target->i_opcode));
9006
        }
9007
        else {
9008
            target = &nop;
9009
        }
9010
        assert(!IS_ASSEMBLER_OPCODE(inst->i_opcode));
9011
        switch (inst->i_opcode) {
9012
            /* Remove LOAD_CONST const; conditional jump */
9013
            case LOAD_CONST:
  Branch (9013:13): [True: 401k, False: 2.26M]
9014
            {
9015
                PyObject* cnt;
9016
                int is_true;
9017
                int jump_if_true;
9018
                switch(nextop) {
  Branch (9018:24): [True: 397k, False: 4.41k]
9019
                    case POP_JUMP_IF_FALSE:
  Branch (9019:21): [True: 563, False: 401k]
9020
                    case POP_JUMP_IF_TRUE:
  Branch (9020:21): [True: 344, False: 401k]
9021
                        cnt = get_const_value(inst->i_opcode, oparg, consts);
9022
                        if (cnt == NULL) {
  Branch (9022:29): [True: 0, False: 907]
9023
                            goto error;
9024
                        }
9025
                        is_true = PyObject_IsTrue(cnt);
9026
                        Py_DECREF(cnt);
9027
                        if (is_true == -1) {
  Branch (9027:29): [True: 0, False: 907]
9028
                            goto error;
9029
                        }
9030
                        inst->i_opcode = NOP;
9031
                        jump_if_true = nextop == POP_JUMP_IF_TRUE;
9032
                        if (is_true == jump_if_true) {
  Branch (9032:29): [True: 371, False: 536]
9033
                            bb->b_instr[i+1].i_opcode = JUMP;
9034
                        }
9035
                        else {
9036
                            bb->b_instr[i+1].i_opcode = NOP;
9037
                        }
9038
                        break;
9039
                    case JUMP_IF_FALSE_OR_POP:
  Branch (9039:21): [True: 14, False: 401k]
9040
                    case JUMP_IF_TRUE_OR_POP:
  Branch (9040:21): [True: 35, False: 401k]
9041
                        cnt = get_const_value(inst->i_opcode, oparg, consts);
9042
                        if (cnt == NULL) {
  Branch (9042:29): [True: 0, False: 49]
9043
                            goto error;
9044
                        }
9045
                        is_true = PyObject_IsTrue(cnt);
9046
                        Py_DECREF(cnt);
9047
                        if (is_true == -1) {
  Branch (9047:29): [True: 0, False: 49]
9048
                            goto error;
9049
                        }
9050
                        jump_if_true = nextop == JUMP_IF_TRUE_OR_POP;
9051
                        if (is_true == jump_if_true) {
  Branch (9051:29): [True: 29, False: 20]
9052
                            bb->b_instr[i+1].i_opcode = JUMP;
9053
                        }
9054
                        else {
9055
                            inst->i_opcode = NOP;
9056
                            bb->b_instr[i+1].i_opcode = NOP;
9057
                        }
9058
                        break;
9059
                    case IS_OP:
  Branch (9059:21): [True: 3.46k, False: 398k]
9060
                        cnt = get_const_value(inst->i_opcode, oparg, consts);
9061
                        if (cnt == NULL) {
  Branch (9061:29): [True: 0, False: 3.46k]
9062
                            goto error;
9063
                        }
9064
                        int jump_op = i+2 < bb->b_iused ? 
bb->b_instr[i+2].i_opcode3.44k
:
015
;
  Branch (9064:39): [True: 3.44k, False: 15]
9065
                        if (Py_IsNone(cnt) && 
(3.37k
jump_op == 3.37k
POP_JUMP_IF_FALSE3.37k
||
jump_op == 220
POP_JUMP_IF_TRUE220
)) {
  Branch (9065:48): [True: 3.15k, False: 220]
  Branch (9065:80): [True: 143, False: 77]
9066
                            unsigned char nextarg = bb->b_instr[i+1].i_oparg;
9067
                            inst->i_opcode = NOP;
9068
                            bb->b_instr[i+1].i_opcode = NOP;
9069
                            bb->b_instr[i+2].i_opcode = nextarg ^ (jump_op == POP_JUMP_IF_FALSE) ?
  Branch (9069:57): [True: 1.65k, False: 1.64k]
9070
                                    POP_JUMP_IF_NOT_NONE : 
POP_JUMP_IF_NONE1.64k
;
9071
                        }
9072
                        Py_DECREF(cnt);
9073
                        break;
9074
                }
9075
                break;
9076
            }
9077
9078
                /* Try to fold tuples of constants.
9079
                   Skip over BUILD_TUPLE(1) UNPACK_SEQUENCE(1).
9080
                   Replace BUILD_TUPLE(2) UNPACK_SEQUENCE(2) with SWAP(2).
9081
                   Replace BUILD_TUPLE(3) UNPACK_SEQUENCE(3) with SWAP(3). */
9082
            
case 29.8k
BUILD_TUPLE29.8k
:
  Branch (9082:13): [True: 29.8k, False: 2.63M]
9083
                if (nextop == UNPACK_SEQUENCE && 
oparg == bb->b_instr[i+1].i_oparg279
) {
  Branch (9083:21): [True: 279, False: 29.6k]
  Branch (9083:50): [True: 279, False: 0]
9084
                    switch(oparg) {
  Branch (9084:28): [True: 15, False: 264]
9085
                        case 1:
  Branch (9085:25): [True: 1, False: 278]
9086
                            inst->i_opcode = NOP;
9087
                            bb->b_instr[i+1].i_opcode = NOP;
9088
                            continue;
9089
                        case 2:
  Branch (9089:25): [True: 228, False: 51]
9090
                        case 3:
  Branch (9090:25): [True: 35, False: 244]
9091
                            inst->i_opcode = NOP;
9092
                            bb->b_instr[i+1].i_opcode = SWAP;
9093
                            continue;
9094
                    }
9095
                }
9096
                if (i >= oparg) {
  Branch (9096:21): [True: 29.5k, False: 36]
9097
                    if (fold_tuple_on_constants(const_cache, inst-oparg, oparg, consts)) {
  Branch (9097:25): [True: 0, False: 29.5k]
9098
                        goto error;
9099
                    }
9100
                }
9101
                break;
9102
9103
                /* Simplify conditional jump to conditional jump where the
9104
                   result of the first test implies the success of a similar
9105
                   test or the failure of the opposite test.
9106
                   Arises in code like:
9107
                   "a and b or c"
9108
                   "(a and b) and c"
9109
                   "(a or b) or c"
9110
                   "(a or b) and c"
9111
                   x:JUMP_IF_FALSE_OR_POP y   y:JUMP_IF_FALSE_OR_POP z
9112
                      -->  x:JUMP_IF_FALSE_OR_POP z
9113
                   x:JUMP_IF_FALSE_OR_POP y   y:JUMP_IF_TRUE_OR_POP z
9114
                      -->  x:POP_JUMP_IF_FALSE y+1
9115
                   where y+1 is the instruction following the second test.
9116
                */
9117
            
case 873
JUMP_IF_FALSE_OR_POP873
:
  Branch (9117:13): [True: 873, False: 2.66M]
9118
                switch (target->i_opcode) {
  Branch (9118:25): [True: 733, False: 140]
9119
                    case POP_JUMP_IF_FALSE:
  Branch (9119:21): [True: 0, False: 873]
9120
                        i -= jump_thread(inst, target, POP_JUMP_IF_FALSE);
9121
                        break;
9122
                    case JUMP:
  Branch (9122:21): [True: 0, False: 873]
9123
                    case JUMP_IF_FALSE_OR_POP:
  Branch (9123:21): [True: 0, False: 873]
9124
                        i -= jump_thread(inst, target, JUMP_IF_FALSE_OR_POP);
9125
                        break;
9126
                    case JUMP_IF_TRUE_OR_POP:
  Branch (9126:21): [True: 140, False: 733]
9127
                    case POP_JUMP_IF_TRUE:
  Branch (9127:21): [True: 0, False: 873]
9128
                        if (inst->i_loc.lineno == target->i_loc.lineno) {
  Branch (9128:29): [True: 136, False: 4]
9129
                            // We don't need to bother checking for loops here,
9130
                            // since a block's b_next cannot point to itself:
9131
                            assert(inst->i_target != inst->i_target->b_next);
9132
                            inst->i_opcode = POP_JUMP_IF_FALSE;
9133
                            inst->i_target = inst->i_target->b_next;
9134
                            --i;
9135
                        }
9136
                        break;
9137
                }
9138
                break;
9139
            case JUMP_IF_TRUE_OR_POP:
  Branch (9139:13): [True: 996, False: 2.66M]
9140
                switch (target->i_opcode) {
  Branch (9140:25): [True: 987, False: 9]
9141
                    case POP_JUMP_IF_TRUE:
  Branch (9141:21): [True: 0, False: 996]
9142
                        i -= jump_thread(inst, target, POP_JUMP_IF_TRUE);
9143
                        break;
9144
                    case JUMP:
  Branch (9144:21): [True: 0, False: 996]
9145
                    case JUMP_IF_TRUE_OR_POP:
  Branch (9145:21): [True: 0, False: 996]
9146
                        i -= jump_thread(inst, target, JUMP_IF_TRUE_OR_POP);
9147
                        break;
9148
                    case JUMP_IF_FALSE_OR_POP:
  Branch (9148:21): [True: 9, False: 987]
9149
                    case POP_JUMP_IF_FALSE:
  Branch (9149:21): [True: 0, False: 996]
9150
                        if (inst->i_loc.lineno == target->i_loc.lineno) {
  Branch (9150:29): [True: 5, False: 4]
9151
                            // We don't need to bother checking for loops here,
9152
                            // since a block's b_next cannot point to itself:
9153
                            assert(inst->i_target != inst->i_target->b_next);
9154
                            inst->i_opcode = POP_JUMP_IF_TRUE;
9155
                            inst->i_target = inst->i_target->b_next;
9156
                            --i;
9157
                        }
9158
                        break;
9159
                }
9160
                break;
9161
            case POP_JUMP_IF_NOT_NONE:
  Branch (9161:13): [True: 1.77k, False: 2.66M]
9162
            case POP_JUMP_IF_NONE:
  Branch (9162:13): [True: 1.72k, False: 2.66M]
9163
                switch (target->i_opcode) {
  Branch (9163:25): [True: 3.36k, False: 131]
9164
                    case JUMP:
  Branch (9164:21): [True: 131, False: 3.36k]
9165
                        i -= jump_thread(inst, target, inst->i_opcode);
9166
                }
9167
                break;
9168
            case POP_JUMP_IF_FALSE:
  Branch (9168:13): [True: 224k, False: 2.44M]
9169
                switch (target->i_opcode) {
  Branch (9169:25): [True: 222k, False: 1.72k]
9170
                    case JUMP:
  Branch (9170:21): [True: 1.72k, False: 222k]
9171
                        i -= jump_thread(inst, target, POP_JUMP_IF_FALSE);
9172
                }
9173
                break;
9174
            
case 6.29k
POP_JUMP_IF_TRUE6.29k
:
  Branch (9174:13): [True: 6.29k, False: 2.66M]
9175
                switch (target->i_opcode) {
  Branch (9175:25): [True: 6.08k, False: 207]
9176
                    case JUMP:
  Branch (9176:21): [True: 207, False: 6.08k]
9177
                        i -= jump_thread(inst, target, POP_JUMP_IF_TRUE);
9178
                }
9179
                break;
9180
            case JUMP:
  Branch (9180:13): [True: 19.3k, False: 2.64M]
9181
                switch (target->i_opcode) {
  Branch (9181:25): [True: 17.3k, False: 1.99k]
9182
                    case JUMP:
  Branch (9182:21): [True: 1.99k, False: 17.3k]
9183
                        i -= jump_thread(inst, target, JUMP);
9184
                }
9185
                break;
9186
            
case 4.14k
FOR_ITER4.14k
:
  Branch (9186:13): [True: 4.14k, False: 2.66M]
9187
                if (target->i_opcode == JUMP) {
  Branch (9187:21): [True: 303, False: 3.83k]
9188
                    /* This will not work now because the jump (at target) could
9189
                     * be forward or backward and FOR_ITER only jumps forward. We
9190
                     * can re-enable this if ever we implement a backward version
9191
                     * of FOR_ITER.
9192
                     */
9193
                    /*
9194
                    i -= jump_thread(inst, target, FOR_ITER);
9195
                    */
9196
                }
9197
                break;
9198
            case SWAP:
  Branch (9198:13): [True: 1.50k, False: 2.66M]
9199
                if (oparg == 1) {
  Branch (9199:21): [True: 0, False: 1.50k]
9200
                    inst->i_opcode = NOP;
9201
                    break;
9202
                }
9203
                if (swaptimize(bb, &i)) {
  Branch (9203:21): [True: 0, False: 1.50k]
9204
                    goto error;
9205
                }
9206
                apply_static_swaps(bb, i);
9207
                break;
9208
            case KW_NAMES:
  Branch (9208:13): [True: 3.11k, False: 2.66M]
9209
                break;
9210
            case PUSH_NULL:
  Branch (9210:13): [True: 41.9k, False: 2.62M]
9211
                if (nextop == LOAD_GLOBAL && 
(inst[1].i_opcode & 1) == 024.9k
) {
  Branch (9211:21): [True: 24.9k, False: 17.0k]
  Branch (9211:46): [True: 24.9k, False: 0]
9212
                    inst->i_opcode = NOP;
9213
                    inst->i_oparg = 0;
9214
                    inst[1].i_oparg |= 1;
9215
                }
9216
                break;
9217
            default:
  Branch (9217:13): [True: 1.93M, False: 737k]
9218
                /* All HAS_CONST opcodes should be handled with LOAD_CONST */
9219
                assert (!HAS_CONST(inst->i_opcode));
9220
        }
9221
    }
9222
    return 0;
9223
error:
9224
    return -1;
9225
}
9226
9227
static bool
9228
basicblock_has_lineno(const basicblock *bb) {
9229
    for (int i = 0; i < bb->b_iused; 
i++7.05k
) {
  Branch (9229:21): [True: 8.81k, False: 3.47k]
9230
        if (bb->b_instr[i].i_loc.lineno > 0) {
  Branch (9230:13): [True: 1.76k, False: 7.05k]
9231
            return true;
9232
        }
9233
    }
9234
    return false;
9235
}
9236
9237
/* If this block ends with an unconditional jump to an exit block,
9238
 * then remove the jump and extend this block with the target.
9239
 */
9240
static int
9241
extend_block(basicblock *bb) {
9242
    if (bb->b_iused == 0) {
  Branch (9242:9): [True: 19.8k, False: 1.14M]
9243
        return 0;
9244
    }
9245
    struct instr *last = &bb->b_instr[bb->b_iused-1];
9246
    if (last->i_opcode != JUMP &&
  Branch (9246:9): [True: 1.11M, False: 37.5k]
9247
        
last->i_opcode != 1.11M
JUMP_FORWARD1.11M
&&
  Branch (9247:9): [True: 1.11M, False: 0]
9248
        
last->i_opcode != 1.11M
JUMP_BACKWARD1.11M
) {
  Branch (9248:9): [True: 1.11M, False: 0]
9249
        return 0;
9250
    }
9251
    if (basicblock_exits_scope(last->i_target) && 
last->i_target->b_iused <= 9.23k
MAX_COPY_SIZE9.23k
) {
  Branch (9251:9): [True: 9.23k, False: 28.3k]
  Branch (9251:51): [True: 5.23k, False: 3.99k]
9252
        basicblock *to_copy = last->i_target;
9253
        if (basicblock_has_lineno(to_copy)) {
  Branch (9253:13): [True: 1.76k, False: 3.47k]
9254
            /* copy only blocks without line number (like implicit 'return None's) */
9255
            return 0;
9256
        }
9257
        last->i_opcode = NOP;
9258
        for (int i = 0; i < to_copy->b_iused; 
i++7.04k
) {
  Branch (9258:25): [True: 7.04k, False: 3.47k]
9259
            int index = basicblock_next_instr(bb);
9260
            if (index < 0) {
  Branch (9260:17): [True: 0, False: 7.04k]
9261
                return -1;
9262
            }
9263
            bb->b_instr[index] = to_copy->b_instr[i];
9264
        }
9265
    }
9266
    return 0;
9267
}
9268
9269
static void
9270
clean_basic_block(basicblock *bb) {
9271
    /* Remove NOPs when legal to do so. */
9272
    int dest = 0;
9273
    int prev_lineno = -1;
9274
    for (int src = 0; src < bb->b_iused; 
src++7.87M
) {
  Branch (9274:23): [True: 7.87M, False: 1.72M]
9275
        int lineno = bb->b_instr[src].i_loc.lineno;
9276
        if (bb->b_instr[src].i_opcode == NOP) {
  Branch (9276:13): [True: 73.7k, False: 7.80M]
9277
            /* Eliminate no-op if it doesn't have a line number */
9278
            if (lineno < 0) {
  Branch (9278:17): [True: 6.31k, False: 67.3k]
9279
                continue;
9280
            }
9281
            /* or, if the previous instruction had the same line number. */
9282
            if (prev_lineno == lineno) {
  Branch (9282:17): [True: 23.5k, False: 43.8k]
9283
                continue;
9284
            }
9285
            /* or, if the next instruction has same line number or no line number */
9286
            if (src < bb->b_iused - 1) {
  Branch (9286:17): [True: 39.2k, False: 4.60k]
9287
                int next_lineno = bb->b_instr[src+1].i_loc.lineno;
9288
                if (next_lineno < 0 || 
next_lineno == lineno36.9k
) {
  Branch (9288:21): [True: 2.26k, False: 36.9k]
  Branch (9288:40): [True: 32.7k, False: 4.24k]
9289
                    bb->b_instr[src+1].i_loc = bb->b_instr[src].i_loc;
9290
                    continue;
9291
                }
9292
            }
9293
            else {
9294
                basicblock* next = bb->b_next;
9295
                while (next && next->b_iused == 0) {
  Branch (9295:24): [True: 4.65k, False: 0]
  Branch (9295:32): [True: 42, False: 4.60k]
9296
                    next = next->b_next;
9297
                }
9298
                /* or if last instruction in BB and next BB has same line number */
9299
                if (next) {
  Branch (9299:21): [True: 4.60k, False: 0]
9300
                    if (lineno == next->b_instr[0].i_loc.lineno) {
  Branch (9300:25): [True: 219, False: 4.38k]
9301
                        continue;
9302
                    }
9303
                }
9304
            }
9305
9306
        }
9307
        if (dest != src) {
  Branch (9307:13): [True: 300k, False: 7.50M]
9308
            bb->b_instr[dest] = bb->b_instr[src];
9309
        }
9310
        dest++;
9311
        prev_lineno = lineno;
9312
    }
9313
    assert(dest <= bb->b_iused);
9314
    bb->b_iused = dest;
9315
}
9316
9317
static int
9318
normalize_basic_block(basicblock *bb) {
9319
    /* Mark blocks as exit and/or nofallthrough.
9320
     Raise SystemError if CFG is malformed. */
9321
    for (int i = 0; i < bb->b_iused; 
i++2.65M
) {
  Branch (9321:21): [True: 2.65M, False: 584k]
9322
        int opcode = bb->b_instr[i].i_opcode;
9323
        assert(!IS_ASSEMBLER_OPCODE(opcode));
9324
        int is_jump = IS_JUMP_OPCODE(opcode);
9325
        int is_exit = IS_SCOPE_EXIT_OPCODE(opcode);
9326
        if (is_exit || 
is_jump2.56M
) {
  Branch (9326:13): [True: 90.3k, False: 2.56M]
  Branch (9326:24): [True: 259k, False: 2.30M]
9327
            if (i != bb->b_iused-1) {
  Branch (9327:17): [True: 0, False: 350k]
9328
                PyErr_SetString(PyExc_SystemError, "malformed control flow graph.");
9329
                return -1;
9330
            }
9331
        }
9332
        if (is_jump) {
  Branch (9332:13): [True: 259k, False: 2.39M]
9333
            /* Skip over empty basic blocks. */
9334
            while (bb->b_instr[i].i_target->b_iused == 0) {
  Branch (9334:20): [True: 19.3k, False: 259k]
9335
                bb->b_instr[i].i_target = bb->b_instr[i].i_target->b_next;
9336
            }
9337
        }
9338
    }
9339
    return 0;
9340
}
9341
9342
static int
9343
mark_reachable(basicblock *entryblock) {
9344
    basicblock **stack = make_cfg_traversal_stack(entryblock);
9345
    if (stack == NULL) {
  Branch (9345:9): [True: 0, False: 72.8k]
9346
        return -1;
9347
    }
9348
    basicblock **sp = stack;
9349
    entryblock->b_predecessors = 1;
9350
    *sp++ = entryblock;
9351
    while (sp > stack) {
  Branch (9351:12): [True: 571k, False: 72.8k]
9352
        basicblock *b = *(--sp);
9353
        b->b_visited = 1;
9354
        if (b->b_next && 
BB_HAS_FALLTHROUGH500k
(b)) {
  Branch (9354:13): [True: 500k, False: 71.0k]
9355
            if (!b->b_next->b_visited) {
  Branch (9355:17): [True: 257k, False: 208k]
9356
                assert(b->b_next->b_predecessors == 0);
9357
                *sp++ = b->b_next;
9358
            }
9359
            b->b_next->b_predecessors++;
9360
        }
9361
        for (int i = 0; i < b->b_iused; 
i++2.60M
) {
  Branch (9361:25): [True: 2.60M, False: 571k]
9362
            basicblock *target;
9363
            struct instr *instr = &b->b_instr[i];
9364
            if (is_jump(instr) || 
is_block_push(instr)2.34M
) {
  Branch (9364:17): [True: 253k, False: 2.34M]
  Branch (9364:35): [True: 7.72k, False: 2.34M]
9365
                target = instr->i_target;
9366
                if (!target->b_visited) {
  Branch (9366:21): [True: 240k, False: 20.1k]
9367
                    assert(target->b_predecessors == 0 || target == b->b_next);
9368
                    *sp++ = target;
9369
                }
9370
                target->b_predecessors++;
9371
                if (is_block_push(instr)) {
  Branch (9371:21): [True: 7.72k, False: 253k]
9372
                    target->b_except_predecessors++;
9373
                }
9374
                assert(target->b_except_predecessors == 0 ||
9375
                       target->b_except_predecessors == target->b_predecessors);
9376
            }
9377
        }
9378
    }
9379
    PyMem_Free(stack);
9380
    return 0;
9381
}
9382
9383
static void
9384
eliminate_empty_basic_blocks(basicblock *entryblock) {
9385
    /* Eliminate empty blocks */
9386
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next608k
) {
  Branch (9386:38): [True: 608k, False: 74.2k]
9387
        basicblock *next = b->b_next;
9388
        if (next) {
  Branch (9388:13): [True: 533k, False: 74.2k]
9389
            while (next->b_iused == 0 && 
next->b_next16.2k
) {
  Branch (9389:20): [True: 16.2k, False: 531k]
  Branch (9389:42): [True: 14.4k, False: 1.86k]
9390
                next = next->b_next;
9391
            }
9392
            b->b_next = next;
9393
        }
9394
    }
9395
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next608k
) {
  Branch (9395:38): [True: 608k, False: 74.2k]
9396
        if (b->b_iused == 0) {
  Branch (9396:13): [True: 1.86k, False: 606k]
9397
            continue;
9398
        }
9399
        
for (int i = 0; 606k
i < b->b_iused;
i++2.82M
) {
  Branch (9399:25): [True: 2.82M, False: 606k]
9400
            struct instr *instr = &b->b_instr[i];
9401
            if (is_jump(instr) || 
is_block_push(instr)2.55M
) {
  Branch (9401:17): [True: 270k, False: 2.55M]
  Branch (9401:35): [True: 7.72k, False: 2.54M]
9402
                basicblock *target = instr->i_target;
9403
                while (target->b_iused == 0) {
  Branch (9403:24): [True: 11, False: 278k]
9404
                    target = target->b_next;
9405
                }
9406
                instr->i_target = target;
9407
            }
9408
        }
9409
    }
9410
}
9411
9412
9413
/* If an instruction has no line number, but it's predecessor in the BB does,
9414
 * then copy the line number. If a successor block has no line number, and only
9415
 * one predecessor, then inherit the line number.
9416
 * This ensures that all exit blocks (with one predecessor) receive a line number.
9417
 * Also reduces the size of the line number table,
9418
 * but has no impact on the generated line number events.
9419
 */
9420
static void
9421
propagate_line_numbers(basicblock *entryblock) {
9422
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (9422:38): [True: 568k, False: 72.8k]
9423
        if (b->b_iused == 0) {
  Branch (9423:13): [True: 0, False: 568k]
9424
            continue;
9425
        }
9426
9427
        struct location prev_location = NO_LOCATION;
9428
        for (int i = 0; i < b->b_iused; 
i++2.60M
) {
  Branch (9428:25): [True: 2.60M, False: 568k]
9429
            if (b->b_instr[i].i_loc.lineno < 0) {
  Branch (9429:17): [True: 352k, False: 2.25M]
9430
                b->b_instr[i].i_loc = prev_location;
9431
            }
9432
            else {
9433
                prev_location = b->b_instr[i].i_loc;
9434
            }
9435
        }
9436
        if (BB_HAS_FALLTHROUGH(b) && 
b->b_next->b_predecessors == 1462k
) {
  Branch (9436:38): [True: 247k, False: 215k]
9437
            assert(b->b_next->b_iused);
9438
            if (b->b_next->b_instr[0].i_loc.lineno < 0) {
  Branch (9438:17): [True: 3.02k, False: 244k]
9439
                b->b_next->b_instr[0].i_loc = prev_location;
9440
            }
9441
        }
9442
        if (is_jump(&b->b_instr[b->b_iused-1])) {
  Branch (9442:13): [True: 253k, False: 315k]
9443
            basicblock *target = b->b_instr[b->b_iused-1].i_target;
9444
            if (target->b_predecessors == 1) {
  Branch (9444:17): [True: 21.6k, False: 231k]
9445
                if (target->b_instr[0].i_loc.lineno < 0) {
  Branch (9445:21): [True: 5.22k, False: 16.4k]
9446
                    target->b_instr[0].i_loc = prev_location;
9447
                }
9448
            }
9449
        }
9450
    }
9451
}
9452
9453
/* Perform optimizations on a control flow graph.
9454
   The consts object should still be in list form to allow new constants
9455
   to be appended.
9456
9457
   All transformations keep the code size the same or smaller.
9458
   For those that reduce size, the gaps are initially filled with
9459
   NOPs.  Later those NOPs are removed.
9460
*/
9461
9462
static int
9463
optimize_cfg(basicblock *entryblock, PyObject *consts, PyObject *const_cache)
9464
{
9465
    assert(PyDict_CheckExact(const_cache));
9466
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next584k
) {
  Branch (9466:38): [True: 584k, False: 72.8k]
9467
        if (optimize_basic_block(const_cache, b, consts)) {
  Branch (9467:13): [True: 0, False: 584k]
9468
            return -1;
9469
        }
9470
        clean_basic_block(b);
9471
        assert(b->b_predecessors == 0);
9472
    }
9473
    
for (basicblock *b = entryblock; 72.8k
b != NULL;
b = b->b_next584k
) {
  Branch (9473:38): [True: 584k, False: 72.8k]
9474
        if (extend_block(b)) {
  Branch (9474:13): [True: 0, False: 584k]
9475
            return -1;
9476
        }
9477
    }
9478
    if (mark_reachable(entryblock)) {
  Branch (9478:9): [True: 0, False: 72.8k]
9479
        return -1;
9480
    }
9481
    /* Delete unreachable instructions */
9482
    
for (basicblock *b = entryblock; 72.8k
b != NULL;
b = b->b_next584k
) {
  Branch (9482:38): [True: 584k, False: 72.8k]
9483
       if (b->b_predecessors == 0) {
  Branch (9483:12): [True: 12.9k, False: 571k]
9484
            b->b_iused = 0;
9485
       }
9486
    }
9487
    eliminate_empty_basic_blocks(entryblock);
9488
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next569k
) {
  Branch (9488:38): [True: 569k, False: 72.8k]
9489
        clean_basic_block(b);
9490
    }
9491
    return 0;
9492
}
9493
9494
// Remove trailing unused constants.
9495
static int
9496
trim_unused_consts(basicblock *entryblock, PyObject *consts)
9497
{
9498
    assert(PyList_CheckExact(consts));
9499
9500
    // The first constant may be docstring; keep it always.
9501
    int max_const_index = 0;
9502
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (9502:38): [True: 568k, False: 72.8k]
9503
        for (int i = 0; i < b->b_iused; 
i++2.60M
) {
  Branch (9503:25): [True: 2.60M, False: 568k]
9504
            if ((b->b_instr[i].i_opcode == LOAD_CONST ||
  Branch (9504:18): [True: 391k, False: 2.21M]
9505
                
b->b_instr[i].i_opcode == 2.21M
KW_NAMES2.21M
) &&
  Branch (9505:17): [True: 3.11k, False: 2.20M]
9506
                    
b->b_instr[i].i_oparg > max_const_index394k
) {
  Branch (9506:21): [True: 146k, False: 248k]
9507
                max_const_index = b->b_instr[i].i_oparg;
9508
            }
9509
        }
9510
    }
9511
    if (max_const_index+1 < PyList_GET_SIZE(consts)) {
  Branch (9511:9): [True: 391, False: 72.4k]
9512
        //fprintf(stderr, "removing trailing consts: max=%d, size=%d\n",
9513
        //        max_const_index, (int)PyList_GET_SIZE(consts));
9514
        if (PyList_SetSlice(consts, max_const_index+1,
  Branch (9514:13): [True: 0, False: 391]
9515
                            PyList_GET_SIZE(consts), NULL) < 0) {
9516
            return 1;
9517
        }
9518
    }
9519
    return 0;
9520
}
9521
9522
static inline int
9523
is_exit_without_lineno(basicblock *b) {
9524
    if (!basicblock_exits_scope(b)) {
  Branch (9524:9): [True: 687k, False: 28.9k]
9525
        return 0;
9526
    }
9527
    
for (int i = 0; 28.9k
i < b->b_iused;
i++13.0k
) {
  Branch (9527:21): [True: 35.0k, False: 6.92k]
9528
        if (b->b_instr[i].i_loc.lineno >= 0) {
  Branch (9528:13): [True: 22.0k, False: 13.0k]
9529
            return 0;
9530
        }
9531
    }
9532
    return 1;
9533
}
9534
9535
/* PEP 626 mandates that the f_lineno of a frame is correct
9536
 * after a frame terminates. It would be prohibitively expensive
9537
 * to continuously update the f_lineno field at runtime,
9538
 * so we make sure that all exiting instruction (raises and returns)
9539
 * have a valid line number, allowing us to compute f_lineno lazily.
9540
 * We can do this by duplicating the exit blocks without line number
9541
 * so that none have more than one predecessor. We can then safely
9542
 * copy the line number from the sole predecessor block.
9543
 */
9544
static int
9545
duplicate_exits_without_lineno(basicblock *entryblock)
9546
{
9547
    /* Copy all exit blocks without line number that are targets of a jump.
9548
     */
9549
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next570k
) {
  Branch (9549:38): [True: 570k, False: 72.8k]
9550
        if (b->b_iused > 0 && 
is_jump(&b->b_instr[b->b_iused-1])568k
) {
  Branch (9550:13): [True: 568k, False: 1.86k]
  Branch (9550:31): [True: 253k, False: 315k]
9551
            basicblock *target = b->b_instr[b->b_iused-1].i_target;
9552
            if (is_exit_without_lineno(target) && 
target->b_predecessors > 14.86k
) {
  Branch (9552:17): [True: 4.86k, False: 248k]
  Branch (9552:51): [True: 831, False: 4.02k]
9553
                basicblock *new_target = copy_basicblock(target);
9554
                if (new_target == NULL) {
  Branch (9554:21): [True: 0, False: 831]
9555
                    return -1;
9556
                }
9557
                new_target->b_instr[0].i_loc = b->b_instr[b->b_iused-1].i_loc;
9558
                b->b_instr[b->b_iused-1].i_target = new_target;
9559
                target->b_predecessors--;
9560
                new_target->b_predecessors = 1;
9561
                new_target->b_next = target->b_next;
9562
                target->b_next = new_target;
9563
            }
9564
        }
9565
    }
9566
    /* Eliminate empty blocks */
9567
    
for (basicblock *b = entryblock; 72.8k
b != NULL;
b = b->b_next568k
) {
  Branch (9567:38): [True: 568k, False: 72.8k]
9568
        while (b->b_next && 
b->b_next->b_iused == 0497k
) {
  Branch (9568:16): [True: 497k, False: 72.8k]
  Branch (9568:29): [True: 1.86k, False: 495k]
9569
            b->b_next = b->b_next->b_next;
9570
        }
9571
    }
9572
    /* Any remaining reachable exit blocks without line number can only be reached by
9573
     * fall through, and thus can only have a single predecessor */
9574
    for (basicblock *b = entryblock; b != NULL; 
b = b->b_next568k
) {
  Branch (9574:38): [True: 568k, False: 72.8k]
9575
        if (BB_HAS_FALLTHROUGH(b) && 
b->b_next462k
&&
b->b_iused > 0462k
) {
  Branch (9575:38): [True: 462k, False: 0]
  Branch (9575:51): [True: 462k, False: 0]
9576
            if (is_exit_without_lineno(b->b_next)) {
  Branch (9576:17): [True: 2.06k, False: 460k]
9577
                assert(b->b_next->b_iused > 0);
9578
                b->b_next->b_instr[0].i_loc = b->b_instr[b->b_iused-1].i_loc;
9579
            }
9580
        }
9581
    }
9582
    return 0;
9583
}
9584
9585
9586
/* Retained for API compatibility.
9587
 * Optimization is now done in optimize_cfg */
9588
9589
PyObject *
9590
PyCode_Optimize(PyObject *code, PyObject* Py_UNUSED(consts),
9591
                PyObject *Py_UNUSED(names), PyObject *Py_UNUSED(lnotab_obj))
9592
{
9593
    Py_INCREF(code);
9594
    return code;
9595
}