/home/mdboom/Work/builds/cpython/Objects/frameobject.c
Line | Count | Source (jump to first uncovered line) |
1 | /* Frame object implementation */ |
2 | |
3 | #include "Python.h" |
4 | #include "pycore_ceval.h" // _PyEval_BuiltinsFromGlobals() |
5 | #include "pycore_code.h" // CO_FAST_LOCAL, etc. |
6 | #include "pycore_function.h" // _PyFunction_FromConstructor() |
7 | #include "pycore_moduleobject.h" // _PyModule_GetDict() |
8 | #include "pycore_object.h" // _PyObject_GC_UNTRACK() |
9 | #include "pycore_opcode.h" // _PyOpcode_Caches |
10 | |
11 | #include "frameobject.h" // PyFrameObject |
12 | #include "pycore_frame.h" |
13 | #include "opcode.h" // EXTENDED_ARG |
14 | #include "structmember.h" // PyMemberDef |
15 | |
16 | #define OFF(x) offsetof(PyFrameObject, x) |
17 | |
18 | static PyMemberDef frame_memberlist[] = { |
19 | {"f_trace_lines", T_BOOL, OFF(f_trace_lines), 0}, |
20 | {"f_trace_opcodes", T_BOOL, OFF(f_trace_opcodes), 0}, |
21 | {NULL} /* Sentinel */ |
22 | }; |
23 | |
24 | |
25 | static PyObject * |
26 | frame_getlocals(PyFrameObject *f, void *closure) |
27 | { |
28 | if (PyFrame_FastToLocalsWithError(f) < 0) Branch (28:9): [True: 0, False: 448]
|
29 | return NULL; |
30 | PyObject *locals = f->f_frame->f_locals; |
31 | Py_INCREF(locals); |
32 | return locals; |
33 | } |
34 | |
35 | int |
36 | PyFrame_GetLineNumber(PyFrameObject *f) |
37 | { |
38 | assert(f != NULL); |
39 | if (f->f_lineno != 0) { Branch (39:9): [True: 574k, False: 4.03M]
|
40 | return f->f_lineno; |
41 | } |
42 | else { |
43 | return _PyInterpreterFrame_GetLine(f->f_frame); |
44 | } |
45 | } |
46 | |
47 | static PyObject * |
48 | frame_getlineno(PyFrameObject *f, void *closure) |
49 | { |
50 | int lineno = PyFrame_GetLineNumber(f); |
51 | if (lineno < 0) { Branch (51:9): [True: 103, False: 703k]
|
52 | Py_RETURN_NONE; |
53 | } |
54 | else { |
55 | return PyLong_FromLong(lineno); |
56 | } |
57 | } |
58 | |
59 | static PyObject * |
60 | frame_getlasti(PyFrameObject *f, void *closure) |
61 | { |
62 | int lasti = _PyInterpreterFrame_LASTI(f->f_frame); |
63 | if (lasti < 0) { Branch (63:9): [True: 0, False: 582]
|
64 | return PyLong_FromLong(-1); |
65 | } |
66 | return PyLong_FromLong(lasti * sizeof(_Py_CODEUNIT)); |
67 | } |
68 | |
69 | static PyObject * |
70 | frame_getglobals(PyFrameObject *f, void *closure) |
71 | { |
72 | PyObject *globals = f->f_frame->f_globals; |
73 | if (globals == NULL) { Branch (73:9): [True: 0, False: 241k]
|
74 | globals = Py_None; |
75 | } |
76 | Py_INCREF(globals); |
77 | return globals; |
78 | } |
79 | |
80 | static PyObject * |
81 | frame_getbuiltins(PyFrameObject *f, void *closure) |
82 | { |
83 | PyObject *builtins = f->f_frame->f_builtins; |
84 | if (builtins == NULL) { Branch (84:9): [True: 0, False: 3]
|
85 | builtins = Py_None; |
86 | } |
87 | Py_INCREF(builtins); |
88 | return builtins; |
89 | } |
90 | |
91 | static PyObject * |
92 | frame_getcode(PyFrameObject *f, void *closure) |
93 | { |
94 | if (PySys_Audit("object.__getattr__", "Os", f, "f_code") < 0) { Branch (94:9): [True: 0, False: 846k]
|
95 | return NULL; |
96 | } |
97 | return (PyObject *)PyFrame_GetCode(f); |
98 | } |
99 | |
100 | static PyObject * |
101 | frame_getback(PyFrameObject *f, void *closure) |
102 | { |
103 | PyObject *res = (PyObject *)PyFrame_GetBack(f); |
104 | if (res == NULL) { Branch (104:9): [True: 2.45k, False: 132k]
|
105 | Py_RETURN_NONE; |
106 | } |
107 | return res; |
108 | } |
109 | |
110 | // Given the index of the effective opcode, scan back to construct the oparg |
111 | // with EXTENDED_ARG. This only works correctly with *unquickened* code, |
112 | // obtained via a call to _PyCode_GetCode! |
113 | static unsigned int |
114 | get_arg(const _Py_CODEUNIT *codestr, Py_ssize_t i) |
115 | { |
116 | _Py_CODEUNIT word; |
117 | unsigned int oparg = _Py_OPARG(codestr[i]); |
118 | if (i >= 1 && _Py_OPCODE(word = codestr[i-1]) == EXTENDED_ARG) { Branch (118:9): [True: 402, False: 0]
Branch (118:19): [True: 0, False: 402]
|
119 | oparg |= _Py_OPARG(word) << 8; |
120 | if (i >= 2 && _Py_OPCODE(word = codestr[i-2]) == EXTENDED_ARG) { Branch (120:13): [True: 0, False: 0]
Branch (120:23): [True: 0, False: 0]
|
121 | oparg |= _Py_OPARG(word) << 16; |
122 | if (i >= 3 && _Py_OPCODE(word = codestr[i-3]) == EXTENDED_ARG) { Branch (122:17): [True: 0, False: 0]
Branch (122:27): [True: 0, False: 0]
|
123 | oparg |= _Py_OPARG(word) << 24; |
124 | } |
125 | } |
126 | } |
127 | return oparg; |
128 | } |
129 | |
130 | /* Model the evaluation stack, to determine which jumps |
131 | * are safe and how many values needs to be popped. |
132 | * The stack is modelled by a 64 integer, treating any |
133 | * stack that can't fit into 64 bits as "overflowed". |
134 | */ |
135 | |
136 | typedef enum kind { |
137 | Iterator = 1, |
138 | Except = 2, |
139 | Object = 3, |
140 | Null = 4, |
141 | } Kind; |
142 | |
143 | static int |
144 | compatible_kind(Kind from, Kind to) { |
145 | if (to == 0) { Branch (145:9): [True: 0, False: 11]
|
146 | return 0; |
147 | } |
148 | if (to == Object) { Branch (148:9): [True: 3, False: 8]
|
149 | return from != Null; |
150 | } |
151 | if (to == Null) { Branch (151:9): [True: 4, False: 4]
|
152 | return 1; |
153 | } |
154 | return from == to; |
155 | } |
156 | |
157 | #define BITS_PER_BLOCK 3 |
158 | |
159 | #define UNINITIALIZED -2 |
160 | #define OVERFLOWED -1 |
161 | |
162 | #define MAX_STACK_ENTRIES (63/BITS_PER_BLOCK) |
163 | #define WILL_OVERFLOW (1ULL<<((MAX_STACK_ENTRIES-1)*BITS_PER_BLOCK)) |
164 | |
165 | static inline int64_t |
166 | push_value(int64_t stack, Kind kind) |
167 | { |
168 | if (((uint64_t)stack) >= WILL_OVERFLOW) { Branch (168:9): [True: 0, False: 1.49k]
|
169 | return OVERFLOWED; |
170 | } |
171 | else { |
172 | return (stack << BITS_PER_BLOCK) | kind; |
173 | } |
174 | } |
175 | |
176 | static inline int64_t |
177 | pop_value(int64_t stack) |
178 | { |
179 | return Py_ARITHMETIC_RIGHT_SHIFT(int64_t, stack, BITS_PER_BLOCK); |
180 | } |
181 | |
182 | static inline Kind |
183 | top_of_stack(int64_t stack) |
184 | { |
185 | return stack & ((1<<BITS_PER_BLOCK)-1); |
186 | } |
187 | |
188 | static int64_t * |
189 | mark_stacks(PyCodeObject *code_obj, int len) |
190 | { |
191 | PyObject *co_code = _PyCode_GetCode(code_obj); |
192 | if (co_code == NULL) { Branch (192:9): [True: 0, False: 88]
|
193 | return NULL; |
194 | } |
195 | _Py_CODEUNIT *code = (_Py_CODEUNIT *)PyBytes_AS_STRING(co_code); |
196 | int64_t *stacks = PyMem_New(int64_t, len+1); |
197 | int i, j, opcode; |
198 | |
199 | if (stacks == NULL) { Branch (199:9): [True: 0, False: 88]
|
200 | PyErr_NoMemory(); |
201 | Py_DECREF(co_code); |
202 | return NULL; |
203 | } |
204 | for (int i = 1; 88 i <= len; i++7.96k ) { Branch (204:21): [True: 7.96k, False: 88]
|
205 | stacks[i] = UNINITIALIZED; |
206 | } |
207 | stacks[0] = 0; |
208 | if (code_obj->co_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) Branch (208:9): [True: 20, False: 68]
|
209 | { |
210 | // Generators get sent None while starting: |
211 | stacks[0] = push_value(stacks[0], Object); |
212 | } |
213 | int todo = 1; |
214 | while (todo) { Branch (214:12): [True: 88, False: 88]
|
215 | todo = 0; |
216 | for (i = 0; i < len; i++7.96k ) { Branch (216:21): [True: 7.96k, False: 88]
|
217 | int64_t next_stack = stacks[i]; |
218 | if (next_stack == UNINITIALIZED) { Branch (218:17): [True: 1.97k, False: 5.99k]
|
219 | continue; |
220 | } |
221 | opcode = _Py_OPCODE(code[i]); |
222 | switch (opcode) { |
223 | case JUMP_IF_FALSE_OR_POP: Branch (223:17): [True: 0, False: 5.99k]
|
224 | case JUMP_IF_TRUE_OR_POP: Branch (224:17): [True: 0, False: 5.99k]
|
225 | case POP_JUMP_FORWARD_IF_FALSE: Branch (225:17): [True: 4, False: 5.99k]
|
226 | case POP_JUMP_BACKWARD_IF_FALSE: Branch (226:17): [True: 0, False: 5.99k]
|
227 | case POP_JUMP_FORWARD_IF_TRUE: Branch (227:17): [True: 6, False: 5.98k]
|
228 | case POP_JUMP_BACKWARD_IF_TRUE: Branch (228:17): [True: 2, False: 5.99k]
|
229 | { |
230 | int64_t target_stack; |
231 | int j = get_arg(code, i); |
232 | if (opcode == POP_JUMP_FORWARD_IF_FALSE || Branch (232:25): [True: 4, False: 8]
|
233 | opcode == 8 POP_JUMP_FORWARD_IF_TRUE8 ) { Branch (233:25): [True: 6, False: 2]
|
234 | j += i + 1; |
235 | } |
236 | else if (opcode == POP_JUMP_BACKWARD_IF_FALSE || Branch (236:30): [True: 0, False: 2]
|
237 | opcode == POP_JUMP_BACKWARD_IF_TRUE) { Branch (237:30): [True: 2, False: 0]
|
238 | j = i + 1 - j; |
239 | } |
240 | assert(j < len); |
241 | if (stacks[j] == UNINITIALIZED && j < i10 ) { Branch (241:25): [True: 10, False: 2]
Branch (241:55): [True: 0, False: 10]
|
242 | todo = 1; |
243 | } |
244 | if (opcode == JUMP_IF_FALSE_OR_POP || Branch (244:25): [True: 0, False: 12]
|
245 | opcode == JUMP_IF_TRUE_OR_POP) Branch (245:25): [True: 0, False: 12]
|
246 | { |
247 | target_stack = next_stack; |
248 | next_stack = pop_value(next_stack); |
249 | } |
250 | else { |
251 | next_stack = pop_value(next_stack); |
252 | target_stack = next_stack; |
253 | } |
254 | assert(stacks[j] == UNINITIALIZED || stacks[j] == target_stack); |
255 | stacks[j] = target_stack; |
256 | stacks[i+1] = next_stack; |
257 | break; |
258 | } |
259 | case SEND: Branch (259:17): [True: 33, False: 5.96k]
|
260 | j = get_arg(code, i) + i + 1; |
261 | assert(j < len); |
262 | assert(stacks[j] == UNINITIALIZED || stacks[j] == pop_value(next_stack)); |
263 | stacks[j] = pop_value(next_stack); |
264 | stacks[i+1] = next_stack; |
265 | break; |
266 | case JUMP_FORWARD: Branch (266:17): [True: 3, False: 5.99k]
|
267 | j = get_arg(code, i) + i + 1; |
268 | assert(j < len); |
269 | assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack); |
270 | stacks[j] = next_stack; |
271 | break; |
272 | case JUMP_BACKWARD: Branch (272:17): [True: 22, False: 5.97k]
|
273 | case JUMP_BACKWARD_NO_INTERRUPT: Branch (273:17): [True: 33, False: 5.96k]
|
274 | j = i + 1 - get_arg(code, i); |
275 | assert(j >= 0); |
276 | assert(j < len); |
277 | if (stacks[j] == UNINITIALIZED && j < i0 ) { Branch (277:25): [True: 0, False: 55]
Branch (277:55): [True: 0, False: 0]
|
278 | todo = 1; |
279 | } |
280 | assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack); |
281 | stacks[j] = next_stack; |
282 | break; |
283 | case GET_ITER: Branch (283:17): [True: 21, False: 5.97k]
|
284 | case GET_AITER: Branch (284:17): [True: 9, False: 5.98k]
|
285 | next_stack = push_value(pop_value(next_stack), Iterator); |
286 | stacks[i+1] = next_stack; |
287 | break; |
288 | case FOR_ITER: Branch (288:17): [True: 18, False: 5.97k]
|
289 | { |
290 | int64_t target_stack = pop_value(next_stack); |
291 | stacks[i+1] = push_value(next_stack, Object); |
292 | j = get_arg(code, i) + 1 + INLINE_CACHE_ENTRIES_FOR_ITER + i; |
293 | assert(j < len); |
294 | assert(stacks[j] == UNINITIALIZED || stacks[j] == target_stack); |
295 | stacks[j] = target_stack; |
296 | break; |
297 | } |
298 | case END_ASYNC_FOR: Branch (298:17): [True: 0, False: 5.99k]
|
299 | next_stack = pop_value(pop_value(pop_value(next_stack))); |
300 | stacks[i+1] = next_stack; |
301 | break; |
302 | case PUSH_EXC_INFO: Branch (302:17): [True: 0, False: 5.99k]
|
303 | case POP_EXCEPT: Branch (303:17): [True: 0, False: 5.99k]
|
304 | /* These instructions only appear in exception handlers, which |
305 | * skip this switch ever since the move to zero-cost exceptions |
306 | * (their stack remains UNINITIALIZED because nothing sets it). |
307 | * |
308 | * Note that explain_incompatible_stack interprets an |
309 | * UNINITIALIZED stack as belonging to an exception handler. |
310 | */ |
311 | Py_UNREACHABLE(); |
312 | break; |
313 | case RETURN_VALUE: Branch (313:17): [True: 90, False: 5.90k]
|
314 | case RAISE_VARARGS: Branch (314:17): [True: 0, False: 5.99k]
|
315 | case RERAISE: Branch (315:17): [True: 0, False: 5.99k]
|
316 | /* End of block */ |
317 | break; |
318 | case PUSH_NULL: Branch (318:17): [True: 4, False: 5.99k]
|
319 | next_stack = push_value(next_stack, Null); |
320 | stacks[i+1] = next_stack; |
321 | break; |
322 | case LOAD_GLOBAL: Branch (322:17): [True: 54, False: 5.94k]
|
323 | { |
324 | int j = get_arg(code, i); |
325 | if (j & 1) { Branch (325:25): [True: 51, False: 3]
|
326 | next_stack = push_value(next_stack, Null); |
327 | } |
328 | next_stack = push_value(next_stack, Object); |
329 | stacks[i+1] = next_stack; |
330 | break; |
331 | } |
332 | case LOAD_ATTR: Branch (332:17): [True: 227, False: 5.76k]
|
333 | { |
334 | int j = get_arg(code, i); |
335 | if (j & 1) { Branch (335:25): [True: 227, False: 0]
|
336 | next_stack = pop_value(next_stack); |
337 | next_stack = push_value(next_stack, Null); |
338 | next_stack = push_value(next_stack, Object); |
339 | } |
340 | stacks[i+1] = next_stack; |
341 | break; |
342 | } |
343 | default: Branch (343:17): [True: 5.46k, False: 526]
|
344 | { |
345 | int delta = PyCompile_OpcodeStackEffect(opcode, _Py_OPARG(code[i])); |
346 | while (delta < 0) { Branch (346:28): [True: 1.08k, False: 5.46k]
|
347 | next_stack = pop_value(next_stack); |
348 | delta++; |
349 | } |
350 | while (delta > 0) { Branch (350:28): [True: 868, False: 5.46k]
|
351 | next_stack = push_value(next_stack, Object); |
352 | delta--; |
353 | } |
354 | stacks[i+1] = next_stack; |
355 | } |
356 | } |
357 | } |
358 | } |
359 | Py_DECREF(co_code); |
360 | return stacks; |
361 | } |
362 | |
363 | static int |
364 | compatible_stack(int64_t from_stack, int64_t to_stack) |
365 | { |
366 | if (from_stack < 0 || to_stack < 091 ) { Branch (366:9): [True: 10, False: 91]
Branch (366:27): [True: 19, False: 72]
|
367 | return 0; |
368 | } |
369 | while(72 from_stack > to_stack) { Branch (369:11): [True: 30, False: 72]
|
370 | from_stack = pop_value(from_stack); |
371 | } |
372 | while(from_stack) { Branch (372:11): [True: 11, False: 69]
|
373 | Kind from_top = top_of_stack(from_stack); |
374 | Kind to_top = top_of_stack(to_stack); |
375 | if (!compatible_kind(from_top, to_top)) { Branch (375:13): [True: 3, False: 8]
|
376 | return 0; |
377 | } |
378 | from_stack = pop_value(from_stack); |
379 | to_stack = pop_value(to_stack); |
380 | } |
381 | return to_stack == 0; |
382 | } |
383 | |
384 | static const char * |
385 | explain_incompatible_stack(int64_t to_stack) |
386 | { |
387 | assert(to_stack != 0); |
388 | if (to_stack == OVERFLOWED) { Branch (388:9): [True: 0, False: 22]
|
389 | return "stack is too deep to analyze"; |
390 | } |
391 | if (to_stack == UNINITIALIZED) { Branch (391:9): [True: 10, False: 12]
|
392 | return "can't jump into an exception handler, or code may be unreachable"; |
393 | } |
394 | Kind target_kind = top_of_stack(to_stack); |
395 | switch(target_kind) { |
396 | case Except: Branch (396:9): [True: 0, False: 12]
|
397 | return "can't jump into an 'except' block as there's no exception"; |
398 | case Object: Branch (398:9): [True: 3, False: 9]
|
399 | case Null: Branch (399:9): [True: 4, False: 8]
|
400 | return "incompatible stacks"; |
401 | case Iterator: Branch (401:9): [True: 5, False: 7]
|
402 | return "can't jump into the body of a for loop"; |
403 | default: Branch (403:9): [True: 0, False: 12]
|
404 | Py_UNREACHABLE(); |
405 | } |
406 | } |
407 | |
408 | static int * |
409 | marklines(PyCodeObject *code, int len) |
410 | { |
411 | PyCodeAddressRange bounds; |
412 | _PyCode_InitAddressRange(code, &bounds); |
413 | assert (bounds.ar_end == 0); |
414 | int last_line = -1; |
415 | |
416 | int *linestarts = PyMem_New(int, len); |
417 | if (linestarts == NULL) { Branch (417:9): [True: 0, False: 91]
|
418 | return NULL; |
419 | } |
420 | for (int i = 0; 91 i < len; i++8.14k ) { Branch (420:21): [True: 8.14k, False: 91]
|
421 | linestarts[i] = -1; |
422 | } |
423 | |
424 | while (_PyLineTable_NextAddressRange(&bounds)) { Branch (424:12): [True: 3.82k, False: 91]
|
425 | assert(bounds.ar_start / (int)sizeof(_Py_CODEUNIT) < len); |
426 | if (bounds.ar_line != last_line && bounds.ar_line != -1942 ) { Branch (426:13): [True: 942, False: 2.88k]
Branch (426:44): [True: 692, False: 250]
|
427 | linestarts[bounds.ar_start / sizeof(_Py_CODEUNIT)] = bounds.ar_line; |
428 | last_line = bounds.ar_line; |
429 | } |
430 | } |
431 | return linestarts; |
432 | } |
433 | |
434 | static int |
435 | first_line_not_before(int *lines, int len, int line) |
436 | { |
437 | int result = INT_MAX; |
438 | for (int i = 0; i < len; i++8.14k ) { Branch (438:21): [True: 8.14k, False: 91]
|
439 | if (lines[i] < result && lines[i] >= line7.97k ) { Branch (439:13): [True: 7.97k, False: 172]
Branch (439:34): [True: 98, False: 7.87k]
|
440 | result = lines[i]; |
441 | } |
442 | } |
443 | if (result == INT_MAX) { Branch (443:9): [True: 3, False: 88]
|
444 | return -1; |
445 | } |
446 | return result; |
447 | } |
448 | |
449 | static void |
450 | frame_stack_pop(PyFrameObject *f) |
451 | { |
452 | PyObject *v = _PyFrame_StackPop(f->f_frame); |
453 | Py_XDECREF(v); |
454 | } |
455 | |
456 | static PyFrameState |
457 | _PyFrame_GetState(PyFrameObject *frame) |
458 | { |
459 | if (frame->f_frame->stacktop == 0) { Branch (459:9): [True: 50, False: 220]
|
460 | return FRAME_CLEARED; |
461 | } |
462 | switch(frame->f_frame->owner) { Branch (462:12): [True: 0, False: 220]
|
463 | case FRAME_OWNED_BY_GENERATOR: Branch (463:9): [True: 45, False: 175]
|
464 | { |
465 | PyGenObject *gen = _PyFrame_GetGenerator(frame->f_frame); |
466 | return gen->gi_frame_state; |
467 | } |
468 | case FRAME_OWNED_BY_THREAD: Branch (468:9): [True: 175, False: 45]
|
469 | { |
470 | if (_PyInterpreterFrame_LASTI(frame->f_frame) < 0) { Branch (470:17): [True: 0, False: 175]
|
471 | return FRAME_CREATED; |
472 | } |
473 | switch (_PyOpcode_Deopt[_Py_OPCODE(*frame->f_frame->prev_instr)]) |
474 | { |
475 | case COPY_FREE_VARS: Branch (475:17): [True: 0, False: 175]
|
476 | case MAKE_CELL: Branch (476:17): [True: 0, False: 175]
|
477 | case RETURN_GENERATOR: Branch (477:17): [True: 0, False: 175]
|
478 | /* Frame not fully initialized */ |
479 | return FRAME_CREATED; |
480 | default: Branch (480:17): [True: 175, False: 0]
|
481 | return FRAME_EXECUTING; |
482 | } |
483 | } |
484 | case FRAME_OWNED_BY_FRAME_OBJECT: Branch (484:9): [True: 0, False: 220]
|
485 | return FRAME_COMPLETED; |
486 | } |
487 | Py_UNREACHABLE0 (); |
488 | } |
489 | |
490 | static void |
491 | add_load_fast_null_checks(PyCodeObject *co) |
492 | { |
493 | int changed = 0; |
494 | _Py_CODEUNIT *instructions = _PyCode_CODE(co); |
495 | for (Py_ssize_t i = 0; i < Py_SIZE(co); i++9.37k ) { Branch (495:28): [True: 9.37k, False: 94]
|
496 | switch (_Py_OPCODE(instructions[i])) { |
497 | case LOAD_FAST: Branch (497:13): [True: 376, False: 9.00k]
|
498 | case LOAD_FAST__LOAD_FAST: Branch (498:13): [True: 3, False: 9.37k]
|
499 | case LOAD_FAST__LOAD_CONST: Branch (499:13): [True: 1, False: 9.37k]
|
500 | changed = 1; |
501 | _Py_SET_OPCODE(instructions[i], LOAD_FAST_CHECK); |
502 | break; |
503 | case LOAD_CONST__LOAD_FAST: Branch (503:13): [True: 4, False: 9.37k]
|
504 | changed = 1; |
505 | _Py_SET_OPCODE(instructions[i], LOAD_CONST); |
506 | break; |
507 | case STORE_FAST__LOAD_FAST: Branch (507:13): [True: 2, False: 9.37k]
|
508 | changed = 1; |
509 | _Py_SET_OPCODE(instructions[i], STORE_FAST); |
510 | break; |
511 | } |
512 | } |
513 | if (changed) { Branch (513:9): [True: 86, False: 8]
|
514 | // invalidate cached co_code object |
515 | Py_CLEAR(co->_co_code); |
516 | } |
517 | } |
518 | |
519 | /* Setter for f_lineno - you can set f_lineno from within a trace function in |
520 | * order to jump to a given line of code, subject to some restrictions. Most |
521 | * lines are OK to jump to because they don't make any assumptions about the |
522 | * state of the stack (obvious because you could remove the line and the code |
523 | * would still work without any stack errors), but there are some constructs |
524 | * that limit jumping: |
525 | * |
526 | * o Any exception handlers. |
527 | * o 'for' and 'async for' loops can't be jumped into because the |
528 | * iterator needs to be on the stack. |
529 | * o Jumps cannot be made from within a trace function invoked with a |
530 | * 'return' or 'exception' event since the eval loop has been exited at |
531 | * that time. |
532 | */ |
533 | static int |
534 | frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignored)) |
535 | { |
536 | if (p_new_lineno == NULL) { Branch (536:9): [True: 1, False: 97]
|
537 | PyErr_SetString(PyExc_AttributeError, "cannot delete attribute"); |
538 | return -1; |
539 | } |
540 | /* f_lineno must be an integer. */ |
541 | if (!PyLong_CheckExact(p_new_lineno)) { Branch (541:9): [True: 1, False: 96]
|
542 | PyErr_SetString(PyExc_ValueError, |
543 | "lineno must be an integer"); |
544 | return -1; |
545 | } |
546 | |
547 | PyFrameState state = _PyFrame_GetState(f); |
548 | /* |
549 | * This code preserves the historical restrictions on |
550 | * setting the line number of a frame. |
551 | * Jumps are forbidden on a 'return' trace event (except after a yield). |
552 | * Jumps from 'call' trace events are also forbidden. |
553 | * In addition, jumps are forbidden when not tracing, |
554 | * as this is a debugging feature. |
555 | */ |
556 | switch(PyThreadState_GET()->tracing_what) { |
557 | case PyTrace_EXCEPTION: Branch (557:9): [True: 1, False: 95]
|
558 | PyErr_SetString(PyExc_ValueError, |
559 | "can only jump from a 'line' trace event"); |
560 | return -1; |
561 | case PyTrace_CALL: Branch (561:9): [True: 2, False: 94]
|
562 | PyErr_Format(PyExc_ValueError, |
563 | "can't jump from the 'call' trace event of a new frame"); |
564 | return -1; |
565 | case PyTrace_LINE: Branch (565:9): [True: 91, False: 5]
|
566 | break; |
567 | case PyTrace_RETURN: Branch (567:9): [True: 2, False: 94]
|
568 | if (state == FRAME_SUSPENDED) { Branch (568:17): [True: 1, False: 1]
|
569 | break; |
570 | } |
571 | /* fall through */ |
572 | default: Branch (572:9): [True: 0, False: 96]
|
573 | PyErr_SetString(PyExc_ValueError, |
574 | "can only jump from a 'line' trace event"); |
575 | return -1; |
576 | } |
577 | if (!f->f_trace) { Branch (577:9): [True: 0, False: 92]
|
578 | PyErr_Format(PyExc_ValueError, |
579 | "f_lineno can only be set by a trace function"); |
580 | return -1; |
581 | } |
582 | |
583 | int new_lineno; |
584 | |
585 | /* Fail if the line falls outside the code block and |
586 | select first line with actual code. */ |
587 | int overflow; |
588 | long l_new_lineno = PyLong_AsLongAndOverflow(p_new_lineno, &overflow); |
589 | if (overflow Branch (589:9): [True: 0, False: 92]
|
590 | #if SIZEOF_LONG > SIZEOF_INT |
591 | || l_new_lineno > INT_MAX Branch (591:12): [True: 0, False: 92]
|
592 | || l_new_lineno < INT_MIN Branch (592:12): [True: 0, False: 92]
|
593 | #endif |
594 | ) { |
595 | PyErr_SetString(PyExc_ValueError, |
596 | "lineno out of range"); |
597 | return -1; |
598 | } |
599 | new_lineno = (int)l_new_lineno; |
600 | |
601 | if (new_lineno < f->f_frame->f_code->co_firstlineno) { Branch (601:9): [True: 1, False: 91]
|
602 | PyErr_Format(PyExc_ValueError, |
603 | "line %d comes before the current code block", |
604 | new_lineno); |
605 | return -1; |
606 | } |
607 | |
608 | add_load_fast_null_checks(f->f_frame->f_code); |
609 | |
610 | /* PyCode_NewWithPosOnlyArgs limits co_code to be under INT_MAX so this |
611 | * should never overflow. */ |
612 | int len = (int)Py_SIZE(f->f_frame->f_code); |
613 | int *lines = marklines(f->f_frame->f_code, len); |
614 | if (lines == NULL) { Branch (614:9): [True: 0, False: 91]
|
615 | return -1; |
616 | } |
617 | |
618 | new_lineno = first_line_not_before(lines, len, new_lineno); |
619 | if (new_lineno < 0) { Branch (619:9): [True: 3, False: 88]
|
620 | PyErr_Format(PyExc_ValueError, |
621 | "line %d comes after the current code block", |
622 | (int)l_new_lineno); |
623 | PyMem_Free(lines); |
624 | return -1; |
625 | } |
626 | |
627 | int64_t *stacks = mark_stacks(f->f_frame->f_code, len); |
628 | if (stacks == NULL) { Branch (628:9): [True: 0, False: 88]
|
629 | PyMem_Free(lines); |
630 | return -1; |
631 | } |
632 | |
633 | int64_t best_stack = OVERFLOWED; |
634 | int best_addr = -1; |
635 | int64_t start_stack = stacks[_PyInterpreterFrame_LASTI(f->f_frame)]; |
636 | int err = -1; |
637 | const char *msg = "cannot find bytecode for specified line"; |
638 | for (int i = 0; i < len; i++7.96k ) { Branch (638:21): [True: 7.96k, False: 88]
|
639 | if (lines[i] == new_lineno) { Branch (639:13): [True: 101, False: 7.86k]
|
640 | int64_t target_stack = stacks[i]; |
641 | if (compatible_stack(start_stack, target_stack)) { Branch (641:17): [True: 58, False: 43]
|
642 | err = 0; |
643 | if (target_stack > best_stack) { Branch (643:21): [True: 58, False: 0]
|
644 | best_stack = target_stack; |
645 | best_addr = i; |
646 | } |
647 | } |
648 | else if (err < 0) { Branch (648:22): [True: 32, False: 11]
|
649 | if (start_stack == OVERFLOWED) { Branch (649:21): [True: 0, False: 32]
|
650 | msg = "stack to deep to analyze"; |
651 | } |
652 | else if (start_stack == UNINITIALIZED) { Branch (652:26): [True: 10, False: 22]
|
653 | msg = "can't jump from within an exception handler"; |
654 | } |
655 | else { |
656 | msg = explain_incompatible_stack(target_stack); |
657 | err = 1; |
658 | } |
659 | } |
660 | } |
661 | } |
662 | PyMem_Free(stacks); |
663 | PyMem_Free(lines); |
664 | if (err) { Branch (664:9): [True: 30, False: 58]
|
665 | PyErr_SetString(PyExc_ValueError, msg); |
666 | return -1; |
667 | } |
668 | if (state == FRAME_SUSPENDED) { Branch (668:9): [True: 1, False: 57]
|
669 | /* Account for value popped by yield */ |
670 | start_stack = pop_value(start_stack); |
671 | } |
672 | while (start_stack > best_stack) { Branch (672:12): [True: 26, False: 58]
|
673 | frame_stack_pop(f); |
674 | start_stack = pop_value(start_stack); |
675 | } |
676 | /* Finally set the new lasti and return OK. */ |
677 | f->f_lineno = 0; |
678 | f->f_frame->prev_instr = _PyCode_CODE(f->f_frame->f_code) + best_addr; |
679 | return 0; |
680 | } |
681 | |
682 | static PyObject * |
683 | frame_gettrace(PyFrameObject *f, void *closure) |
684 | { |
685 | PyObject* trace = f->f_trace; |
686 | |
687 | if (trace == NULL) Branch (687:9): [True: 2, False: 5]
|
688 | trace = Py_None; |
689 | |
690 | Py_INCREF(trace); |
691 | |
692 | return trace; |
693 | } |
694 | |
695 | static int |
696 | frame_settrace(PyFrameObject *f, PyObject* v, void *closure) |
697 | { |
698 | if (v == Py_None) { Branch (698:9): [True: 3, False: 1.62k]
|
699 | v = NULL; |
700 | } |
701 | Py_XINCREF(v); |
702 | Py_XSETREF(f->f_trace, v); |
703 | |
704 | return 0; |
705 | } |
706 | |
707 | |
708 | static PyGetSetDef frame_getsetlist[] = { |
709 | {"f_back", (getter)frame_getback, NULL, NULL}, |
710 | {"f_locals", (getter)frame_getlocals, NULL, NULL}, |
711 | {"f_lineno", (getter)frame_getlineno, |
712 | (setter)frame_setlineno, NULL}, |
713 | {"f_trace", (getter)frame_gettrace, (setter)frame_settrace, NULL}, |
714 | {"f_lasti", (getter)frame_getlasti, NULL, NULL}, |
715 | {"f_globals", (getter)frame_getglobals, NULL, NULL}, |
716 | {"f_builtins", (getter)frame_getbuiltins, NULL, NULL}, |
717 | {"f_code", (getter)frame_getcode, NULL, NULL}, |
718 | {0} |
719 | }; |
720 | |
721 | /* Stack frames are allocated and deallocated at a considerable rate. |
722 | In an attempt to improve the speed of function calls, we maintain |
723 | a separate free list of stack frames (just like floats are |
724 | allocated in a special way -- see floatobject.c). When a stack |
725 | frame is on the free list, only the following members have a meaning: |
726 | ob_type == &Frametype |
727 | f_back next item on free list, or NULL |
728 | */ |
729 | |
730 | static void |
731 | frame_dealloc(PyFrameObject *f) |
732 | { |
733 | /* It is the responsibility of the owning generator/coroutine |
734 | * to have cleared the generator pointer */ |
735 | |
736 | assert(f->f_frame->owner != FRAME_OWNED_BY_GENERATOR || |
737 | _PyFrame_GetGenerator(f->f_frame)->gi_frame_state == FRAME_CLEARED); |
738 | |
739 | if (_PyObject_GC_IS_TRACKED(f)) { |
740 | _PyObject_GC_UNTRACK(f); |
741 | } |
742 | |
743 | Py_TRASHCAN_BEGIN(f, frame_dealloc); |
744 | PyCodeObject *co = NULL; |
745 | |
746 | /* Kill all local variables including specials, if we own them */ |
747 | if (f->f_frame->owner == FRAME_OWNED_BY_FRAME_OBJECT) { Branch (747:9): [True: 1.12M, False: 2.33M]
|
748 | assert(f->f_frame == (_PyInterpreterFrame *)f->_f_frame_data); |
749 | _PyInterpreterFrame *frame = (_PyInterpreterFrame *)f->_f_frame_data; |
750 | /* Don't clear code object until the end */ |
751 | co = frame->f_code; |
752 | frame->f_code = NULL; |
753 | Py_CLEAR(frame->f_func); |
754 | Py_CLEAR(frame->f_locals); |
755 | PyObject **locals = _PyFrame_GetLocalsArray(frame); |
756 | for (int i = 0; i < frame->stacktop; i++6.19M ) { Branch (756:25): [True: 6.19M, False: 1.12M]
|
757 | Py_CLEAR(locals[i]); |
758 | } |
759 | } |
760 | Py_CLEAR(f->f_back); |
761 | Py_CLEAR(f->f_trace); |
762 | PyObject_GC_Del(f); |
763 | Py_XDECREF(co); |
764 | Py_TRASHCAN_END; |
765 | } |
766 | |
767 | static int |
768 | frame_traverse(PyFrameObject *f, visitproc visit, void *arg) |
769 | { |
770 | Py_VISIT(f->f_back); |
771 | Py_VISIT(f->f_trace); |
772 | if (f->f_frame->owner != FRAME_OWNED_BY_FRAME_OBJECT) { Branch (772:9): [True: 0, False: 1.52M]
|
773 | return 0; |
774 | } |
775 | assert(f->f_frame->frame_obj == NULL); |
776 | return _PyFrame_Traverse(f->f_frame, visit, arg); |
777 | } |
778 | |
779 | static int |
780 | frame_tp_clear(PyFrameObject *f) |
781 | { |
782 | Py_CLEAR(f->f_trace); |
783 | |
784 | /* locals and stack */ |
785 | PyObject **locals = _PyFrame_GetLocalsArray(f->f_frame); |
786 | assert(f->f_frame->stacktop >= 0); |
787 | for (int i = 0; i < f->f_frame->stacktop; i++536k ) { Branch (787:21): [True: 536k, False: 70.2k]
|
788 | Py_CLEAR(locals[i]); |
789 | } |
790 | f->f_frame->stacktop = 0; |
791 | return 0; |
792 | } |
793 | |
794 | static PyObject * |
795 | frame_clear(PyFrameObject *f, PyObject *Py_UNUSED(ignored)) |
796 | { |
797 | if (f->f_frame->owner == FRAME_OWNED_BY_GENERATOR) { Branch (797:9): [True: 723, False: 255k]
|
798 | PyGenObject *gen = _PyFrame_GetGenerator(f->f_frame); |
799 | if (gen->gi_frame_state == FRAME_EXECUTING) { Branch (799:13): [True: 720, False: 3]
|
800 | goto running; |
801 | } |
802 | _PyGen_Finalize((PyObject *)gen); |
803 | } |
804 | else if (f->f_frame->owner == FRAME_OWNED_BY_THREAD) { Branch (804:14): [True: 185k, False: 69.6k]
|
805 | goto running; |
806 | } |
807 | else { |
808 | assert(f->f_frame->owner == FRAME_OWNED_BY_FRAME_OBJECT); |
809 | (void)frame_tp_clear(f); |
810 | } |
811 | Py_RETURN_NONE69.6k ; |
812 | running: |
813 | PyErr_SetString(PyExc_RuntimeError, |
814 | "cannot clear an executing frame"); |
815 | return NULL; |
816 | } |
817 | |
818 | PyDoc_STRVAR(clear__doc__, |
819 | "F.clear(): clear most references held by the frame"); |
820 | |
821 | static PyObject * |
822 | frame_sizeof(PyFrameObject *f, PyObject *Py_UNUSED(ignored)) |
823 | { |
824 | Py_ssize_t res; |
825 | res = offsetof(PyFrameObject, _f_frame_data) + offsetof(_PyInterpreterFrame, localsplus); |
826 | PyCodeObject *code = f->f_frame->f_code; |
827 | res += (code->co_nlocalsplus+code->co_stacksize) * sizeof(PyObject *); |
828 | return PyLong_FromSsize_t(res); |
829 | } |
830 | |
831 | PyDoc_STRVAR(sizeof__doc__, |
832 | "F.__sizeof__() -> size of F in memory, in bytes"); |
833 | |
834 | static PyObject * |
835 | frame_repr(PyFrameObject *f) |
836 | { |
837 | int lineno = PyFrame_GetLineNumber(f); |
838 | PyCodeObject *code = f->f_frame->f_code; |
839 | return PyUnicode_FromFormat( |
840 | "<frame at %p, file %R, line %d, code %S>", |
841 | f, code->co_filename, lineno, code->co_name); |
842 | } |
843 | |
844 | static PyMethodDef frame_methods[] = { |
845 | {"clear", (PyCFunction)frame_clear, METH_NOARGS, |
846 | clear__doc__}, |
847 | {"__sizeof__", (PyCFunction)frame_sizeof, METH_NOARGS, |
848 | sizeof__doc__}, |
849 | {NULL, NULL} /* sentinel */ |
850 | }; |
851 | |
852 | PyTypeObject PyFrame_Type = { |
853 | PyVarObject_HEAD_INIT(&PyType_Type, 0) |
854 | "frame", |
855 | offsetof(PyFrameObject, _f_frame_data) + |
856 | offsetof(_PyInterpreterFrame, localsplus), |
857 | sizeof(PyObject *), |
858 | (destructor)frame_dealloc, /* tp_dealloc */ |
859 | 0, /* tp_vectorcall_offset */ |
860 | 0, /* tp_getattr */ |
861 | 0, /* tp_setattr */ |
862 | 0, /* tp_as_async */ |
863 | (reprfunc)frame_repr, /* tp_repr */ |
864 | 0, /* tp_as_number */ |
865 | 0, /* tp_as_sequence */ |
866 | 0, /* tp_as_mapping */ |
867 | 0, /* tp_hash */ |
868 | 0, /* tp_call */ |
869 | 0, /* tp_str */ |
870 | PyObject_GenericGetAttr, /* tp_getattro */ |
871 | PyObject_GenericSetAttr, /* tp_setattro */ |
872 | 0, /* tp_as_buffer */ |
873 | Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */ |
874 | 0, /* tp_doc */ |
875 | (traverseproc)frame_traverse, /* tp_traverse */ |
876 | (inquiry)frame_tp_clear, /* tp_clear */ |
877 | 0, /* tp_richcompare */ |
878 | 0, /* tp_weaklistoffset */ |
879 | 0, /* tp_iter */ |
880 | 0, /* tp_iternext */ |
881 | frame_methods, /* tp_methods */ |
882 | frame_memberlist, /* tp_members */ |
883 | frame_getsetlist, /* tp_getset */ |
884 | 0, /* tp_base */ |
885 | 0, /* tp_dict */ |
886 | }; |
887 | |
888 | static void |
889 | init_frame(_PyInterpreterFrame *frame, PyFunctionObject *func, PyObject *locals) |
890 | { |
891 | /* _PyFrame_InitializeSpecials consumes reference to func */ |
892 | Py_INCREF(func); |
893 | Py_XINCREF(locals); |
894 | PyCodeObject *code = (PyCodeObject *)func->func_code; |
895 | _PyFrame_InitializeSpecials(frame, func, locals, code); |
896 | for (Py_ssize_t i = 0; i < code->co_nlocalsplus; i++0 ) { Branch (896:28): [True: 0, False: 30]
|
897 | frame->localsplus[i] = NULL; |
898 | } |
899 | } |
900 | |
901 | PyFrameObject* |
902 | _PyFrame_New_NoTrack(PyCodeObject *code) |
903 | { |
904 | CALL_STAT_INC(frame_objects_created); |
905 | int slots = code->co_nlocalsplus + code->co_stacksize; |
906 | PyFrameObject *f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, slots); |
907 | if (f == NULL) { Branch (907:9): [True: 0, False: 3.46M]
|
908 | return NULL; |
909 | } |
910 | f->f_back = NULL; |
911 | f->f_trace = NULL; |
912 | f->f_trace_lines = 1; |
913 | f->f_trace_opcodes = 0; |
914 | f->f_fast_as_locals = 0; |
915 | f->f_lineno = 0; |
916 | return f; |
917 | } |
918 | |
919 | /* Legacy API */ |
920 | PyFrameObject* |
921 | PyFrame_New(PyThreadState *tstate, PyCodeObject *code, |
922 | PyObject *globals, PyObject *locals) |
923 | { |
924 | PyObject *builtins = _PyEval_BuiltinsFromGlobals(tstate, globals); // borrowed ref |
925 | if (builtins == NULL) { Branch (925:9): [True: 0, False: 30]
|
926 | return NULL; |
927 | } |
928 | PyFrameConstructor desc = { |
929 | .fc_globals = globals, |
930 | .fc_builtins = builtins, |
931 | .fc_name = code->co_name, |
932 | .fc_qualname = code->co_name, |
933 | .fc_code = (PyObject *)code, |
934 | .fc_defaults = NULL, |
935 | .fc_kwdefaults = NULL, |
936 | .fc_closure = NULL |
937 | }; |
938 | PyFunctionObject *func = _PyFunction_FromConstructor(&desc); |
939 | if (func == NULL) { Branch (939:9): [True: 0, False: 30]
|
940 | return NULL; |
941 | } |
942 | PyFrameObject *f = _PyFrame_New_NoTrack(code); |
943 | if (f == NULL) { Branch (943:9): [True: 0, False: 30]
|
944 | Py_DECREF(func); |
945 | return NULL; |
946 | } |
947 | init_frame((_PyInterpreterFrame *)f->_f_frame_data, func, locals); |
948 | f->f_frame = (_PyInterpreterFrame *)f->_f_frame_data; |
949 | f->f_frame->owner = FRAME_OWNED_BY_FRAME_OBJECT; |
950 | Py_DECREF(func); |
951 | _PyObject_GC_TRACK(f); |
952 | return f; |
953 | } |
954 | |
955 | static int |
956 | _PyFrame_OpAlreadyRan(_PyInterpreterFrame *frame, int opcode, int oparg) |
957 | { |
958 | // This only works when opcode is a non-quickened form: |
959 | assert(_PyOpcode_Deopt[opcode] == opcode); |
960 | int check_oparg = 0; |
961 | for (_Py_CODEUNIT *instruction = _PyCode_CODE(frame->f_code); |
962 | instruction < frame->prev_instr; instruction++2.00k ) Branch (962:10): [True: 7.03k, False: 0]
|
963 | { |
964 | int check_opcode = _PyOpcode_Deopt[_Py_OPCODE(*instruction)]; |
965 | check_oparg |= _Py_OPARG(*instruction); |
966 | if (check_opcode == opcode && check_oparg == oparg7.03k ) { Branch (966:13): [True: 7.03k, False: 1]
Branch (966:39): [True: 5.03k, False: 2.00k]
|
967 | return 1; |
968 | } |
969 | if (check_opcode == EXTENDED_ARG) { Branch (969:13): [True: 0, False: 2.00k]
|
970 | check_oparg <<= 8; |
971 | } |
972 | else { |
973 | check_oparg = 0; |
974 | } |
975 | instruction += _PyOpcode_Caches[check_opcode]; |
976 | } |
977 | return 0; |
978 | } |
979 | |
980 | int |
981 | _PyFrame_FastToLocalsWithError(_PyInterpreterFrame *frame) { |
982 | /* Merge fast locals into f->f_locals */ |
983 | PyObject *locals; |
984 | PyObject **fast; |
985 | PyCodeObject *co; |
986 | locals = frame->f_locals; |
987 | if (locals == NULL) { Branch (987:9): [True: 4.50k, False: 9.96k]
|
988 | locals = frame->f_locals = PyDict_New(); |
989 | if (locals == NULL) Branch (989:13): [True: 0, False: 4.50k]
|
990 | return -1; |
991 | } |
992 | co = frame->f_code; |
993 | fast = _PyFrame_GetLocalsArray(frame); |
994 | // COPY_FREE_VARS has no quickened forms, so no need to use _PyOpcode_Deopt |
995 | // here: |
996 | int lasti = _PyInterpreterFrame_LASTI(frame); |
997 | if (lasti < 0 && _Py_OPCODE0 (_PyCode_CODE(co)[0]) == 0 COPY_FREE_VARS0 ) { Branch (997:9): [True: 0, False: 14.4k]
Branch (997:22): [True: 0, False: 0]
|
998 | /* Free vars have not been initialized -- Do that */ |
999 | PyCodeObject *co = frame->f_code; |
1000 | PyObject *closure = frame->f_func->func_closure; |
1001 | int offset = co->co_nlocals + co->co_nplaincellvars; |
1002 | for (int i = 0; i < co->co_nfreevars; ++i) { Branch (1002:25): [True: 0, False: 0]
|
1003 | PyObject *o = PyTuple_GET_ITEM(closure, i); |
1004 | Py_INCREF(o); |
1005 | frame->localsplus[offset + i] = o; |
1006 | } |
1007 | // COPY_FREE_VARS doesn't have inline CACHEs, either: |
1008 | frame->prev_instr = _PyCode_CODE(frame->f_code); |
1009 | } |
1010 | for (int i = 0; i < co->co_nlocalsplus; i++99.1k ) { Branch (1010:21): [True: 99.1k, False: 14.4k]
|
1011 | _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i); |
1012 | |
1013 | /* If the namespace is unoptimized, then one of the |
1014 | following cases applies: |
1015 | 1. It does not contain free variables, because it |
1016 | uses import * or is a top-level namespace. |
1017 | 2. It is a class namespace. |
1018 | We don't want to accidentally copy free variables |
1019 | into the locals dict used by the class. |
1020 | */ |
1021 | if (kind & CO_FAST_FREE && !(co->co_flags & 254 CO_OPTIMIZED254 )) { Branch (1021:13): [True: 254, False: 98.8k]
Branch (1021:36): [True: 5, False: 249]
|
1022 | continue; |
1023 | } |
1024 | |
1025 | PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); |
1026 | PyObject *value = fast[i]; |
1027 | if (frame->stacktop) { Branch (1027:13): [True: 99.0k, False: 17]
|
1028 | if (kind & CO_FAST_FREE) { Branch (1028:17): [True: 247, False: 98.8k]
|
1029 | // The cell was set by COPY_FREE_VARS. |
1030 | assert(value != NULL && PyCell_Check(value)); |
1031 | value = PyCell_GET(value); |
1032 | } |
1033 | else if (kind & CO_FAST_CELL) { Branch (1033:22): [True: 5.02k, False: 93.8k]
|
1034 | // Note that no *_DEREF ops can happen before MAKE_CELL |
1035 | // executes. So there's no need to duplicate the work |
1036 | // that MAKE_CELL would otherwise do later, if it hasn't |
1037 | // run yet. |
1038 | if (value != NULL) { Branch (1038:21): [True: 5.02k, False: 0]
|
1039 | if (PyCell_Check(value) && |
1040 | _PyFrame_OpAlreadyRan(frame, MAKE_CELL, i)) { Branch (1040:29): [True: 5.02k, False: 0]
|
1041 | // (likely) MAKE_CELL must have executed already. |
1042 | value = PyCell_GET(value); |
1043 | } |
1044 | // (likely) Otherwise it it is an arg (kind & CO_FAST_LOCAL), |
1045 | // with the initial value set when the frame was created... |
1046 | // (unlikely) ...or it was set to some initial value by |
1047 | // an earlier call to PyFrame_LocalsToFast(). |
1048 | } |
1049 | } |
1050 | } |
1051 | else { |
1052 | assert(value == NULL); |
1053 | } |
1054 | if (value == NULL) { Branch (1054:13): [True: 10.7k, False: 88.3k]
|
1055 | if (PyObject_DelItem(locals, name) != 0) { Branch (1055:17): [True: 10.7k, False: 6]
|
1056 | if (PyErr_ExceptionMatches(PyExc_KeyError)) { Branch (1056:21): [True: 10.7k, False: 0]
|
1057 | PyErr_Clear(); |
1058 | } |
1059 | else { |
1060 | return -1; |
1061 | } |
1062 | } |
1063 | } |
1064 | else { |
1065 | if (PyObject_SetItem(locals, name, value) != 0) { Branch (1065:17): [True: 0, False: 88.3k]
|
1066 | return -1; |
1067 | } |
1068 | } |
1069 | } |
1070 | return 0; |
1071 | } |
1072 | |
1073 | int |
1074 | PyFrame_FastToLocalsWithError(PyFrameObject *f) |
1075 | { |
1076 | if (f == NULL) { Branch (1076:9): [True: 0, False: 489]
|
1077 | PyErr_BadInternalCall(); |
1078 | return -1; |
1079 | } |
1080 | int err = _PyFrame_FastToLocalsWithError(f->f_frame); |
1081 | if (err == 0) { Branch (1081:9): [True: 489, False: 0]
|
1082 | f->f_fast_as_locals = 1; |
1083 | } |
1084 | return err; |
1085 | } |
1086 | |
1087 | void |
1088 | PyFrame_FastToLocals(PyFrameObject *f) |
1089 | { |
1090 | int res; |
1091 |
|
1092 | assert(!PyErr_Occurred()); |
1093 |
|
1094 | res = PyFrame_FastToLocalsWithError(f); |
1095 | if (res < 0) Branch (1095:9): [True: 0, False: 0]
|
1096 | PyErr_Clear(); |
1097 | } |
1098 | |
1099 | void |
1100 | _PyFrame_LocalsToFast(_PyInterpreterFrame *frame, int clear) |
1101 | { |
1102 | /* Merge locals into fast locals */ |
1103 | PyObject *locals; |
1104 | PyObject **fast; |
1105 | PyObject *error_type, *error_value, *error_traceback; |
1106 | PyCodeObject *co; |
1107 | locals = frame->f_locals; |
1108 | if (locals == NULL) { Branch (1108:9): [True: 0, False: 2.00k]
|
1109 | return; |
1110 | } |
1111 | fast = _PyFrame_GetLocalsArray(frame); |
1112 | co = frame->f_code; |
1113 | bool added_null_checks = false; |
1114 | |
1115 | PyErr_Fetch(&error_type, &error_value, &error_traceback); |
1116 | for (int i = 0; i < co->co_nlocalsplus; i++276 ) { Branch (1116:21): [True: 276, False: 2.00k]
|
1117 | _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i); |
1118 | |
1119 | /* Same test as in PyFrame_FastToLocals() above. */ |
1120 | if (kind & CO_FAST_FREE && !(co->co_flags & 6 CO_OPTIMIZED6 )) { Branch (1120:13): [True: 6, False: 270]
Branch (1120:36): [True: 0, False: 6]
|
1121 | continue; |
1122 | } |
1123 | PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); |
1124 | PyObject *value = PyObject_GetItem(locals, name); |
1125 | /* We only care about NULLs if clear is true. */ |
1126 | if (value == NULL) { Branch (1126:13): [True: 57, False: 219]
|
1127 | PyErr_Clear(); |
1128 | if (!clear) { Branch (1128:17): [True: 0, False: 57]
|
1129 | continue; |
1130 | } |
1131 | } |
1132 | PyObject *oldvalue = fast[i]; |
1133 | if (!added_null_checks && oldvalue != NULL272 && value == NULL218 ) { Branch (1133:13): [True: 272, False: 4]
Branch (1133:35): [True: 218, False: 54]
Branch (1133:55): [True: 3, False: 215]
|
1134 | add_load_fast_null_checks(co); |
1135 | added_null_checks = true; |
1136 | } |
1137 | PyObject *cell = NULL; |
1138 | if (kind == CO_FAST_FREE) { Branch (1138:13): [True: 6, False: 270]
|
1139 | // The cell was set when the frame was created from |
1140 | // the function's closure. |
1141 | assert(oldvalue != NULL && PyCell_Check(oldvalue)); |
1142 | cell = oldvalue; |
1143 | } |
1144 | else if (kind & CO_FAST_CELL && oldvalue != NULL6 ) { Branch (1144:18): [True: 6, False: 264]
Branch (1144:41): [True: 6, False: 0]
|
1145 | /* Same test as in PyFrame_FastToLocals() above. */ |
1146 | if (PyCell_Check(oldvalue) && |
1147 | _PyFrame_OpAlreadyRan(frame, MAKE_CELL, i)) { Branch (1147:21): [True: 6, False: 0]
|
1148 | // (likely) MAKE_CELL must have executed already. |
1149 | cell = oldvalue; |
1150 | } |
1151 | // (unlikely) Otherwise, it must have been set to some |
1152 | // initial value by an earlier call to PyFrame_LocalsToFast(). |
1153 | } |
1154 | if (cell != NULL) { Branch (1154:13): [True: 12, False: 264]
|
1155 | oldvalue = PyCell_GET(cell); |
1156 | if (value != oldvalue) { Branch (1156:17): [True: 0, False: 12]
|
1157 | Py_XINCREF(value); |
1158 | PyCell_SET(cell, value); |
1159 | Py_XDECREF(oldvalue); |
1160 | } |
1161 | } |
1162 | else if (value != oldvalue) { Branch (1162:18): [True: 3, False: 261]
|
1163 | Py_XINCREF(value); |
1164 | Py_XSETREF(fast[i], value); |
1165 | } |
1166 | Py_XDECREF(value); |
1167 | } |
1168 | PyErr_Restore(error_type, error_value, error_traceback); |
1169 | } |
1170 | |
1171 | void |
1172 | PyFrame_LocalsToFast(PyFrameObject *f, int clear) |
1173 | { |
1174 | if (f && f->f_fast_as_locals && _PyFrame_GetState(f) != FRAME_CLEARED174 ) { Branch (1174:9): [True: 889k, False: 0]
Branch (1174:14): [True: 174, False: 889k]
Branch (1174:37): [True: 125, False: 49]
|
1175 | _PyFrame_LocalsToFast(f->f_frame, clear); |
1176 | f->f_fast_as_locals = 0; |
1177 | } |
1178 | } |
1179 | |
1180 | |
1181 | int _PyFrame_IsEntryFrame(PyFrameObject *frame) |
1182 | { |
1183 | assert(frame != NULL); |
1184 | return frame->f_frame->is_entry; |
1185 | } |
1186 | |
1187 | |
1188 | PyCodeObject * |
1189 | PyFrame_GetCode(PyFrameObject *frame) |
1190 | { |
1191 | assert(frame != NULL); |
1192 | PyCodeObject *code = frame->f_frame->f_code; |
1193 | assert(code != NULL); |
1194 | Py_INCREF(code); |
1195 | return code; |
1196 | } |
1197 | |
1198 | |
1199 | PyFrameObject* |
1200 | PyFrame_GetBack(PyFrameObject *frame) |
1201 | { |
1202 | assert(frame != NULL); |
1203 | PyFrameObject *back = frame->f_back; |
1204 | if (back == NULL) { Branch (1204:9): [True: 137k, False: 36]
|
1205 | _PyInterpreterFrame *prev = frame->f_frame->previous; |
1206 | while (prev && _PyFrame_IsIncomplete(prev)135k ) { Branch (1206:16): [True: 135k, False: 2.45k]
Branch (1206:24): [True: 4, False: 135k]
|
1207 | prev = prev->previous; |
1208 | } |
1209 | if (prev) { Branch (1209:13): [True: 135k, False: 2.45k]
|
1210 | back = _PyFrame_GetFrameObject(prev); |
1211 | } |
1212 | } |
1213 | Py_XINCREF(back); |
1214 | return back; |
1215 | } |
1216 | |
1217 | PyObject* |
1218 | PyFrame_GetLocals(PyFrameObject *frame) |
1219 | { |
1220 | return frame_getlocals(frame, NULL); |
1221 | } |
1222 | |
1223 | PyObject* |
1224 | PyFrame_GetGlobals(PyFrameObject *frame) |
1225 | { |
1226 | return frame_getglobals(frame, NULL); |
1227 | } |
1228 | |
1229 | PyObject* |
1230 | PyFrame_GetBuiltins(PyFrameObject *frame) |
1231 | { |
1232 | return frame_getbuiltins(frame, NULL); |
1233 | } |
1234 | |
1235 | int |
1236 | PyFrame_GetLasti(PyFrameObject *frame) |
1237 | { |
1238 | int lasti = _PyInterpreterFrame_LASTI(frame->f_frame); |
1239 | if (lasti < 0) { Branch (1239:9): [True: 0, False: 1]
|
1240 | return -1; |
1241 | } |
1242 | return lasti * sizeof(_Py_CODEUNIT); |
1243 | } |
1244 | |
1245 | PyObject * |
1246 | PyFrame_GetGenerator(PyFrameObject *frame) |
1247 | { |
1248 | if (frame->f_frame->owner != FRAME_OWNED_BY_GENERATOR) { Branch (1248:9): [True: 0, False: 1]
|
1249 | return NULL; |
1250 | } |
1251 | PyGenObject *gen = _PyFrame_GetGenerator(frame->f_frame); |
1252 | return Py_NewRef(gen); |
1253 | } |
1254 | |
1255 | PyObject* |
1256 | _PyEval_BuiltinsFromGlobals(PyThreadState *tstate, PyObject *globals) |
1257 | { |
1258 | PyObject *builtins = PyDict_GetItemWithError(globals, &_Py_ID(__builtins__)); |
1259 | if (builtins) { Branch (1259:9): [True: 2.57M, False: 71]
|
1260 | if (PyModule_Check(builtins)) { |
1261 | builtins = _PyModule_GetDict(builtins); |
1262 | assert(builtins != NULL); |
1263 | } |
1264 | return builtins; |
1265 | } |
1266 | if (PyErr_Occurred()) { Branch (1266:9): [True: 0, False: 71]
|
1267 | return NULL; |
1268 | } |
1269 | |
1270 | return _PyEval_GetBuiltins(tstate); |
1271 | } |
1272 | |
1273 | |