Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
allendowney
GitHub Repository: allendowney/cpython
Path: blob/main/Python/compile.c
12 views
1
/*
2
* This file compiles an abstract syntax tree (AST) into Python bytecode.
3
*
4
* The primary entry point is _PyAST_Compile(), which returns a
5
* PyCodeObject. The compiler makes several passes to build the code
6
* object:
7
* 1. Checks for future statements. See future.c
8
* 2. Builds a symbol table. See symtable.c.
9
* 3. Generate an instruction sequence. See compiler_mod() in this file.
10
* 4. Generate a control flow graph and run optimizations on it. See flowgraph.c.
11
* 5. Assemble the basic blocks into final code. See optimize_and_assemble() in
12
* this file, and assembler.c.
13
*
14
* Note that compiler_mod() suggests module, but the module ast type
15
* (mod_ty) has cases for expressions and interactive statements.
16
*
17
* CAUTION: The VISIT_* macros abort the current function when they
18
* encounter a problem. So don't invoke them when there is memory
19
* which needs to be released. Code blocks are OK, as the compiler
20
* structure takes care of releasing those. Use the arena to manage
21
* objects.
22
*/
23
24
#include <stdbool.h>
25
26
#include "Python.h"
27
#include "pycore_ast.h" // _PyAST_GetDocString()
28
#define NEED_OPCODE_TABLES
29
#include "pycore_opcode_utils.h"
30
#undef NEED_OPCODE_TABLES
31
#include "pycore_flowgraph.h"
32
#include "pycore_code.h" // _PyCode_New()
33
#include "pycore_compile.h"
34
#include "pycore_intrinsics.h"
35
#include "pycore_long.h" // _PyLong_GetZero()
36
#include "pycore_pystate.h" // _Py_GetConfig()
37
#include "pycore_symtable.h" // PySTEntryObject, _PyFuture_FromAST()
38
39
#include "opcode_metadata.h" // _PyOpcode_opcode_metadata, _PyOpcode_num_popped/pushed
40
41
#define COMP_GENEXP 0
42
#define COMP_LISTCOMP 1
43
#define COMP_SETCOMP 2
44
#define COMP_DICTCOMP 3
45
46
/* A soft limit for stack use, to avoid excessive
47
* memory use for large constants, etc.
48
*
49
* The value 30 is plucked out of thin air.
50
* Code that could use more stack than this is
51
* rare, so the exact value is unimportant.
52
*/
53
#define STACK_USE_GUIDELINE 30
54
55
#undef SUCCESS
56
#undef ERROR
57
#define SUCCESS 0
58
#define ERROR -1
59
60
#define RETURN_IF_ERROR(X) \
61
if ((X) == -1) { \
62
return ERROR; \
63
}
64
65
#define IS_TOP_LEVEL_AWAIT(C) ( \
66
((C)->c_flags.cf_flags & PyCF_ALLOW_TOP_LEVEL_AWAIT) \
67
&& ((C)->u->u_ste->ste_type == ModuleBlock))
68
69
typedef _PyCompilerSrcLocation location;
70
typedef _PyCfgInstruction cfg_instr;
71
typedef _PyCfgBasicblock basicblock;
72
typedef _PyCfgBuilder cfg_builder;
73
74
#define LOCATION(LNO, END_LNO, COL, END_COL) \
75
((const _PyCompilerSrcLocation){(LNO), (END_LNO), (COL), (END_COL)})
76
77
/* Return true if loc1 starts after loc2 ends. */
78
static inline bool
79
location_is_after(location loc1, location loc2) {
80
return (loc1.lineno > loc2.end_lineno) ||
81
((loc1.lineno == loc2.end_lineno) &&
82
(loc1.col_offset > loc2.end_col_offset));
83
}
84
85
#define LOC(x) SRC_LOCATION_FROM_AST(x)
86
87
typedef _PyCfgJumpTargetLabel jump_target_label;
88
89
static jump_target_label NO_LABEL = {-1};
90
91
#define SAME_LABEL(L1, L2) ((L1).id == (L2).id)
92
#define IS_LABEL(L) (!SAME_LABEL((L), (NO_LABEL)))
93
94
#define NEW_JUMP_TARGET_LABEL(C, NAME) \
95
jump_target_label NAME = instr_sequence_new_label(INSTR_SEQUENCE(C)); \
96
if (!IS_LABEL(NAME)) { \
97
return ERROR; \
98
}
99
100
#define USE_LABEL(C, LBL) \
101
RETURN_IF_ERROR(instr_sequence_use_label(INSTR_SEQUENCE(C), (LBL).id))
102
103
104
/* fblockinfo tracks the current frame block.
105
106
A frame block is used to handle loops, try/except, and try/finally.
107
It's called a frame block to distinguish it from a basic block in the
108
compiler IR.
109
*/
110
111
enum fblocktype { WHILE_LOOP, FOR_LOOP, TRY_EXCEPT, FINALLY_TRY, FINALLY_END,
112
WITH, ASYNC_WITH, HANDLER_CLEANUP, POP_VALUE, EXCEPTION_HANDLER,
113
EXCEPTION_GROUP_HANDLER, ASYNC_COMPREHENSION_GENERATOR };
114
115
struct fblockinfo {
116
enum fblocktype fb_type;
117
jump_target_label fb_block;
118
/* (optional) type-specific exit or cleanup block */
119
jump_target_label fb_exit;
120
/* (optional) additional information required for unwinding */
121
void *fb_datum;
122
};
123
124
enum {
125
COMPILER_SCOPE_MODULE,
126
COMPILER_SCOPE_CLASS,
127
COMPILER_SCOPE_FUNCTION,
128
COMPILER_SCOPE_ASYNC_FUNCTION,
129
COMPILER_SCOPE_LAMBDA,
130
COMPILER_SCOPE_COMPREHENSION,
131
COMPILER_SCOPE_TYPEPARAMS,
132
};
133
134
135
typedef _PyCompile_Instruction instruction;
136
typedef _PyCompile_InstructionSequence instr_sequence;
137
138
#define INITIAL_INSTR_SEQUENCE_SIZE 100
139
#define INITIAL_INSTR_SEQUENCE_LABELS_MAP_SIZE 10
140
141
/*
142
* Resize the array if index is out of range.
143
*
144
* idx: the index we want to access
145
* arr: pointer to the array
146
* alloc: pointer to the capacity of the array
147
* default_alloc: initial number of items
148
* item_size: size of each item
149
*
150
*/
151
int
152
_PyCompile_EnsureArrayLargeEnough(int idx, void **array, int *alloc,
153
int default_alloc, size_t item_size)
154
{
155
void *arr = *array;
156
if (arr == NULL) {
157
int new_alloc = default_alloc;
158
if (idx >= new_alloc) {
159
new_alloc = idx + default_alloc;
160
}
161
arr = PyObject_Calloc(new_alloc, item_size);
162
if (arr == NULL) {
163
PyErr_NoMemory();
164
return ERROR;
165
}
166
*alloc = new_alloc;
167
}
168
else if (idx >= *alloc) {
169
size_t oldsize = *alloc * item_size;
170
int new_alloc = *alloc << 1;
171
if (idx >= new_alloc) {
172
new_alloc = idx + default_alloc;
173
}
174
size_t newsize = new_alloc * item_size;
175
176
if (oldsize > (SIZE_MAX >> 1)) {
177
PyErr_NoMemory();
178
return ERROR;
179
}
180
181
assert(newsize > 0);
182
void *tmp = PyObject_Realloc(arr, newsize);
183
if (tmp == NULL) {
184
PyErr_NoMemory();
185
return ERROR;
186
}
187
*alloc = new_alloc;
188
arr = tmp;
189
memset((char *)arr + oldsize, 0, newsize - oldsize);
190
}
191
192
*array = arr;
193
return SUCCESS;
194
}
195
196
static int
197
instr_sequence_next_inst(instr_sequence *seq) {
198
assert(seq->s_instrs != NULL || seq->s_used == 0);
199
200
RETURN_IF_ERROR(
201
_PyCompile_EnsureArrayLargeEnough(seq->s_used + 1,
202
(void**)&seq->s_instrs,
203
&seq->s_allocated,
204
INITIAL_INSTR_SEQUENCE_SIZE,
205
sizeof(instruction)));
206
assert(seq->s_allocated >= 0);
207
assert(seq->s_used < seq->s_allocated);
208
return seq->s_used++;
209
}
210
211
static jump_target_label
212
instr_sequence_new_label(instr_sequence *seq)
213
{
214
jump_target_label lbl = {++seq->s_next_free_label};
215
return lbl;
216
}
217
218
static int
219
instr_sequence_use_label(instr_sequence *seq, int lbl) {
220
int old_size = seq->s_labelmap_size;
221
RETURN_IF_ERROR(
222
_PyCompile_EnsureArrayLargeEnough(lbl,
223
(void**)&seq->s_labelmap,
224
&seq->s_labelmap_size,
225
INITIAL_INSTR_SEQUENCE_LABELS_MAP_SIZE,
226
sizeof(int)));
227
228
for(int i = old_size; i < seq->s_labelmap_size; i++) {
229
seq->s_labelmap[i] = -111; /* something weird, for debugging */
230
}
231
seq->s_labelmap[lbl] = seq->s_used; /* label refers to the next instruction */
232
return SUCCESS;
233
}
234
235
236
#define MAX_OPCODE 511
237
238
static int
239
instr_sequence_addop(instr_sequence *seq, int opcode, int oparg, location loc)
240
{
241
assert(0 <= opcode && opcode <= MAX_OPCODE);
242
assert(IS_WITHIN_OPCODE_RANGE(opcode));
243
assert(OPCODE_HAS_ARG(opcode) || HAS_TARGET(opcode) || oparg == 0);
244
assert(0 <= oparg && oparg < (1 << 30));
245
246
int idx = instr_sequence_next_inst(seq);
247
RETURN_IF_ERROR(idx);
248
instruction *ci = &seq->s_instrs[idx];
249
ci->i_opcode = opcode;
250
ci->i_oparg = oparg;
251
ci->i_loc = loc;
252
return SUCCESS;
253
}
254
255
static int
256
instr_sequence_insert_instruction(instr_sequence *seq, int pos,
257
int opcode, int oparg, location loc)
258
{
259
assert(pos >= 0 && pos <= seq->s_used);
260
int last_idx = instr_sequence_next_inst(seq);
261
RETURN_IF_ERROR(last_idx);
262
for (int i=last_idx-1; i >= pos; i--) {
263
seq->s_instrs[i+1] = seq->s_instrs[i];
264
}
265
instruction *ci = &seq->s_instrs[pos];
266
ci->i_opcode = opcode;
267
ci->i_oparg = oparg;
268
ci->i_loc = loc;
269
270
/* fix the labels map */
271
for(int lbl=0; lbl < seq->s_labelmap_size; lbl++) {
272
if (seq->s_labelmap[lbl] >= pos) {
273
seq->s_labelmap[lbl]++;
274
}
275
}
276
return SUCCESS;
277
}
278
279
static void
280
instr_sequence_fini(instr_sequence *seq) {
281
PyObject_Free(seq->s_labelmap);
282
seq->s_labelmap = NULL;
283
284
PyObject_Free(seq->s_instrs);
285
seq->s_instrs = NULL;
286
}
287
288
static int
289
instr_sequence_to_cfg(instr_sequence *seq, cfg_builder *g) {
290
memset(g, 0, sizeof(cfg_builder));
291
RETURN_IF_ERROR(_PyCfgBuilder_Init(g));
292
293
/* There can be more than one label for the same offset. The
294
* offset2lbl maping selects one of them which we use consistently.
295
*/
296
297
int *offset2lbl = PyMem_Malloc(seq->s_used * sizeof(int));
298
if (offset2lbl == NULL) {
299
PyErr_NoMemory();
300
return ERROR;
301
}
302
for (int i = 0; i < seq->s_used; i++) {
303
offset2lbl[i] = -1;
304
}
305
for (int lbl=0; lbl < seq->s_labelmap_size; lbl++) {
306
int offset = seq->s_labelmap[lbl];
307
if (offset >= 0) {
308
assert(offset < seq->s_used);
309
offset2lbl[offset] = lbl;
310
}
311
}
312
313
for (int i = 0; i < seq->s_used; i++) {
314
int lbl = offset2lbl[i];
315
if (lbl >= 0) {
316
assert (lbl < seq->s_labelmap_size);
317
jump_target_label lbl_ = {lbl};
318
if (_PyCfgBuilder_UseLabel(g, lbl_) < 0) {
319
goto error;
320
}
321
}
322
instruction *instr = &seq->s_instrs[i];
323
int opcode = instr->i_opcode;
324
int oparg = instr->i_oparg;
325
if (HAS_TARGET(opcode)) {
326
int offset = seq->s_labelmap[oparg];
327
assert(offset >= 0 && offset < seq->s_used);
328
int lbl = offset2lbl[offset];
329
assert(lbl >= 0 && lbl < seq->s_labelmap_size);
330
oparg = lbl;
331
}
332
if (_PyCfgBuilder_Addop(g, opcode, oparg, instr->i_loc) < 0) {
333
goto error;
334
}
335
}
336
PyMem_Free(offset2lbl);
337
338
int nblocks = 0;
339
for (basicblock *b = g->g_block_list; b != NULL; b = b->b_list) {
340
nblocks++;
341
}
342
if ((size_t)nblocks > SIZE_MAX / sizeof(basicblock *)) {
343
PyErr_NoMemory();
344
return ERROR;
345
}
346
return SUCCESS;
347
error:
348
PyMem_Free(offset2lbl);
349
return ERROR;
350
}
351
352
353
/* The following items change on entry and exit of code blocks.
354
They must be saved and restored when returning to a block.
355
*/
356
struct compiler_unit {
357
PySTEntryObject *u_ste;
358
359
int u_scope_type;
360
361
PyObject *u_private; /* for private name mangling */
362
363
instr_sequence u_instr_sequence; /* codegen output */
364
365
int u_nfblocks;
366
int u_in_inlined_comp;
367
368
struct fblockinfo u_fblock[CO_MAXBLOCKS];
369
370
_PyCompile_CodeUnitMetadata u_metadata;
371
};
372
373
/* This struct captures the global state of a compilation.
374
375
The u pointer points to the current compilation unit, while units
376
for enclosing blocks are stored in c_stack. The u and c_stack are
377
managed by compiler_enter_scope() and compiler_exit_scope().
378
379
Note that we don't track recursion levels during compilation - the
380
task of detecting and rejecting excessive levels of nesting is
381
handled by the symbol analysis pass.
382
383
*/
384
385
struct compiler {
386
PyObject *c_filename;
387
struct symtable *c_st;
388
PyFutureFeatures c_future; /* module's __future__ */
389
PyCompilerFlags c_flags;
390
391
int c_optimize; /* optimization level */
392
int c_interactive; /* true if in interactive mode */
393
int c_nestlevel;
394
PyObject *c_const_cache; /* Python dict holding all constants,
395
including names tuple */
396
struct compiler_unit *u; /* compiler state for current block */
397
PyObject *c_stack; /* Python list holding compiler_unit ptrs */
398
PyArena *c_arena; /* pointer to memory allocation arena */
399
};
400
401
#define INSTR_SEQUENCE(C) (&((C)->u->u_instr_sequence))
402
403
404
typedef struct {
405
// A list of strings corresponding to name captures. It is used to track:
406
// - Repeated name assignments in the same pattern.
407
// - Different name assignments in alternatives.
408
// - The order of name assignments in alternatives.
409
PyObject *stores;
410
// If 0, any name captures against our subject will raise.
411
int allow_irrefutable;
412
// An array of blocks to jump to on failure. Jumping to fail_pop[i] will pop
413
// i items off of the stack. The end result looks like this (with each block
414
// falling through to the next):
415
// fail_pop[4]: POP_TOP
416
// fail_pop[3]: POP_TOP
417
// fail_pop[2]: POP_TOP
418
// fail_pop[1]: POP_TOP
419
// fail_pop[0]: NOP
420
jump_target_label *fail_pop;
421
// The current length of fail_pop.
422
Py_ssize_t fail_pop_size;
423
// The number of items on top of the stack that need to *stay* on top of the
424
// stack. Variable captures go beneath these. All of them will be popped on
425
// failure.
426
Py_ssize_t on_top;
427
} pattern_context;
428
429
static int codegen_addop_i(instr_sequence *seq, int opcode, Py_ssize_t oparg, location loc);
430
431
static void compiler_free(struct compiler *);
432
static int compiler_error(struct compiler *, location loc, const char *, ...);
433
static int compiler_warn(struct compiler *, location loc, const char *, ...);
434
static int compiler_nameop(struct compiler *, location, identifier, expr_context_ty);
435
436
static PyCodeObject *compiler_mod(struct compiler *, mod_ty);
437
static int compiler_visit_stmt(struct compiler *, stmt_ty);
438
static int compiler_visit_keyword(struct compiler *, keyword_ty);
439
static int compiler_visit_expr(struct compiler *, expr_ty);
440
static int compiler_augassign(struct compiler *, stmt_ty);
441
static int compiler_annassign(struct compiler *, stmt_ty);
442
static int compiler_subscript(struct compiler *, expr_ty);
443
static int compiler_slice(struct compiler *, expr_ty);
444
445
static bool are_all_items_const(asdl_expr_seq *, Py_ssize_t, Py_ssize_t);
446
447
448
static int compiler_with(struct compiler *, stmt_ty, int);
449
static int compiler_async_with(struct compiler *, stmt_ty, int);
450
static int compiler_async_for(struct compiler *, stmt_ty);
451
static int compiler_call_simple_kw_helper(struct compiler *c,
452
location loc,
453
asdl_keyword_seq *keywords,
454
Py_ssize_t nkwelts);
455
static int compiler_call_helper(struct compiler *c, location loc,
456
int n, asdl_expr_seq *args,
457
asdl_keyword_seq *keywords);
458
static int compiler_try_except(struct compiler *, stmt_ty);
459
static int compiler_try_star_except(struct compiler *, stmt_ty);
460
static int compiler_set_qualname(struct compiler *);
461
462
static int compiler_sync_comprehension_generator(
463
struct compiler *c, location loc,
464
asdl_comprehension_seq *generators, int gen_index,
465
int depth,
466
expr_ty elt, expr_ty val, int type,
467
int iter_on_stack);
468
469
static int compiler_async_comprehension_generator(
470
struct compiler *c, location loc,
471
asdl_comprehension_seq *generators, int gen_index,
472
int depth,
473
expr_ty elt, expr_ty val, int type,
474
int iter_on_stack);
475
476
static int compiler_pattern(struct compiler *, pattern_ty, pattern_context *);
477
static int compiler_match(struct compiler *, stmt_ty);
478
static int compiler_pattern_subpattern(struct compiler *,
479
pattern_ty, pattern_context *);
480
481
static PyCodeObject *optimize_and_assemble(struct compiler *, int addNone);
482
483
#define CAPSULE_NAME "compile.c compiler unit"
484
485
486
static int
487
compiler_setup(struct compiler *c, mod_ty mod, PyObject *filename,
488
PyCompilerFlags *flags, int optimize, PyArena *arena)
489
{
490
PyCompilerFlags local_flags = _PyCompilerFlags_INIT;
491
492
c->c_const_cache = PyDict_New();
493
if (!c->c_const_cache) {
494
return ERROR;
495
}
496
497
c->c_stack = PyList_New(0);
498
if (!c->c_stack) {
499
return ERROR;
500
}
501
502
c->c_filename = Py_NewRef(filename);
503
c->c_arena = arena;
504
if (!_PyFuture_FromAST(mod, filename, &c->c_future)) {
505
return ERROR;
506
}
507
if (!flags) {
508
flags = &local_flags;
509
}
510
int merged = c->c_future.ff_features | flags->cf_flags;
511
c->c_future.ff_features = merged;
512
flags->cf_flags = merged;
513
c->c_flags = *flags;
514
c->c_optimize = (optimize == -1) ? _Py_GetConfig()->optimization_level : optimize;
515
c->c_nestlevel = 0;
516
517
if (!_PyAST_Optimize(mod, arena, c->c_optimize, merged)) {
518
return ERROR;
519
}
520
c->c_st = _PySymtable_Build(mod, filename, &c->c_future);
521
if (c->c_st == NULL) {
522
if (!PyErr_Occurred()) {
523
PyErr_SetString(PyExc_SystemError, "no symtable");
524
}
525
return ERROR;
526
}
527
return SUCCESS;
528
}
529
530
static struct compiler*
531
new_compiler(mod_ty mod, PyObject *filename, PyCompilerFlags *pflags,
532
int optimize, PyArena *arena)
533
{
534
struct compiler *c = PyMem_Calloc(1, sizeof(struct compiler));
535
if (c == NULL) {
536
return NULL;
537
}
538
if (compiler_setup(c, mod, filename, pflags, optimize, arena) < 0) {
539
compiler_free(c);
540
return NULL;
541
}
542
return c;
543
}
544
545
PyCodeObject *
546
_PyAST_Compile(mod_ty mod, PyObject *filename, PyCompilerFlags *pflags,
547
int optimize, PyArena *arena)
548
{
549
assert(!PyErr_Occurred());
550
struct compiler *c = new_compiler(mod, filename, pflags, optimize, arena);
551
if (c == NULL) {
552
return NULL;
553
}
554
555
PyCodeObject *co = compiler_mod(c, mod);
556
compiler_free(c);
557
assert(co || PyErr_Occurred());
558
return co;
559
}
560
561
static void
562
compiler_free(struct compiler *c)
563
{
564
if (c->c_st)
565
_PySymtable_Free(c->c_st);
566
Py_XDECREF(c->c_filename);
567
Py_XDECREF(c->c_const_cache);
568
Py_XDECREF(c->c_stack);
569
PyMem_Free(c);
570
}
571
572
static PyObject *
573
list2dict(PyObject *list)
574
{
575
Py_ssize_t i, n;
576
PyObject *v, *k;
577
PyObject *dict = PyDict_New();
578
if (!dict) return NULL;
579
580
n = PyList_Size(list);
581
for (i = 0; i < n; i++) {
582
v = PyLong_FromSsize_t(i);
583
if (!v) {
584
Py_DECREF(dict);
585
return NULL;
586
}
587
k = PyList_GET_ITEM(list, i);
588
if (PyDict_SetItem(dict, k, v) < 0) {
589
Py_DECREF(v);
590
Py_DECREF(dict);
591
return NULL;
592
}
593
Py_DECREF(v);
594
}
595
return dict;
596
}
597
598
/* Return new dict containing names from src that match scope(s).
599
600
src is a symbol table dictionary. If the scope of a name matches
601
either scope_type or flag is set, insert it into the new dict. The
602
values are integers, starting at offset and increasing by one for
603
each key.
604
*/
605
606
static PyObject *
607
dictbytype(PyObject *src, int scope_type, int flag, Py_ssize_t offset)
608
{
609
Py_ssize_t i = offset, scope, num_keys, key_i;
610
PyObject *k, *v, *dest = PyDict_New();
611
PyObject *sorted_keys;
612
613
assert(offset >= 0);
614
if (dest == NULL)
615
return NULL;
616
617
/* Sort the keys so that we have a deterministic order on the indexes
618
saved in the returned dictionary. These indexes are used as indexes
619
into the free and cell var storage. Therefore if they aren't
620
deterministic, then the generated bytecode is not deterministic.
621
*/
622
sorted_keys = PyDict_Keys(src);
623
if (sorted_keys == NULL)
624
return NULL;
625
if (PyList_Sort(sorted_keys) != 0) {
626
Py_DECREF(sorted_keys);
627
return NULL;
628
}
629
num_keys = PyList_GET_SIZE(sorted_keys);
630
631
for (key_i = 0; key_i < num_keys; key_i++) {
632
/* XXX this should probably be a macro in symtable.h */
633
long vi;
634
k = PyList_GET_ITEM(sorted_keys, key_i);
635
v = PyDict_GetItemWithError(src, k);
636
assert(v && PyLong_Check(v));
637
vi = PyLong_AS_LONG(v);
638
scope = (vi >> SCOPE_OFFSET) & SCOPE_MASK;
639
640
if (scope == scope_type || vi & flag) {
641
PyObject *item = PyLong_FromSsize_t(i);
642
if (item == NULL) {
643
Py_DECREF(sorted_keys);
644
Py_DECREF(dest);
645
return NULL;
646
}
647
i++;
648
if (PyDict_SetItem(dest, k, item) < 0) {
649
Py_DECREF(sorted_keys);
650
Py_DECREF(item);
651
Py_DECREF(dest);
652
return NULL;
653
}
654
Py_DECREF(item);
655
}
656
}
657
Py_DECREF(sorted_keys);
658
return dest;
659
}
660
661
static void
662
compiler_unit_free(struct compiler_unit *u)
663
{
664
instr_sequence_fini(&u->u_instr_sequence);
665
Py_CLEAR(u->u_ste);
666
Py_CLEAR(u->u_metadata.u_name);
667
Py_CLEAR(u->u_metadata.u_qualname);
668
Py_CLEAR(u->u_metadata.u_consts);
669
Py_CLEAR(u->u_metadata.u_names);
670
Py_CLEAR(u->u_metadata.u_varnames);
671
Py_CLEAR(u->u_metadata.u_freevars);
672
Py_CLEAR(u->u_metadata.u_cellvars);
673
Py_CLEAR(u->u_metadata.u_fasthidden);
674
Py_CLEAR(u->u_private);
675
PyObject_Free(u);
676
}
677
678
static int
679
compiler_set_qualname(struct compiler *c)
680
{
681
Py_ssize_t stack_size;
682
struct compiler_unit *u = c->u;
683
PyObject *name, *base;
684
685
base = NULL;
686
stack_size = PyList_GET_SIZE(c->c_stack);
687
assert(stack_size >= 1);
688
if (stack_size > 1) {
689
int scope, force_global = 0;
690
struct compiler_unit *parent;
691
PyObject *mangled, *capsule;
692
693
capsule = PyList_GET_ITEM(c->c_stack, stack_size - 1);
694
parent = (struct compiler_unit *)PyCapsule_GetPointer(capsule, CAPSULE_NAME);
695
assert(parent);
696
if (parent->u_scope_type == COMPILER_SCOPE_TYPEPARAMS) {
697
/* The parent is a type parameter scope, so we need to
698
look at the grandparent. */
699
if (stack_size == 2) {
700
// If we're immediately within the module, we can skip
701
// the rest and just set the qualname to be the same as name.
702
u->u_metadata.u_qualname = Py_NewRef(u->u_metadata.u_name);
703
return SUCCESS;
704
}
705
capsule = PyList_GET_ITEM(c->c_stack, stack_size - 2);
706
parent = (struct compiler_unit *)PyCapsule_GetPointer(capsule, CAPSULE_NAME);
707
assert(parent);
708
}
709
710
if (u->u_scope_type == COMPILER_SCOPE_FUNCTION
711
|| u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION
712
|| u->u_scope_type == COMPILER_SCOPE_CLASS) {
713
assert(u->u_metadata.u_name);
714
mangled = _Py_Mangle(parent->u_private, u->u_metadata.u_name);
715
if (!mangled) {
716
return ERROR;
717
}
718
719
scope = _PyST_GetScope(parent->u_ste, mangled);
720
Py_DECREF(mangled);
721
assert(scope != GLOBAL_IMPLICIT);
722
if (scope == GLOBAL_EXPLICIT)
723
force_global = 1;
724
}
725
726
if (!force_global) {
727
if (parent->u_scope_type == COMPILER_SCOPE_FUNCTION
728
|| parent->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION
729
|| parent->u_scope_type == COMPILER_SCOPE_LAMBDA)
730
{
731
_Py_DECLARE_STR(dot_locals, ".<locals>");
732
base = PyUnicode_Concat(parent->u_metadata.u_qualname,
733
&_Py_STR(dot_locals));
734
if (base == NULL) {
735
return ERROR;
736
}
737
}
738
else {
739
base = Py_NewRef(parent->u_metadata.u_qualname);
740
}
741
}
742
}
743
744
if (base != NULL) {
745
_Py_DECLARE_STR(dot, ".");
746
name = PyUnicode_Concat(base, &_Py_STR(dot));
747
Py_DECREF(base);
748
if (name == NULL) {
749
return ERROR;
750
}
751
PyUnicode_Append(&name, u->u_metadata.u_name);
752
if (name == NULL) {
753
return ERROR;
754
}
755
}
756
else {
757
name = Py_NewRef(u->u_metadata.u_name);
758
}
759
u->u_metadata.u_qualname = name;
760
761
return SUCCESS;
762
}
763
764
/* Return the stack effect of opcode with argument oparg.
765
766
Some opcodes have different stack effect when jump to the target and
767
when not jump. The 'jump' parameter specifies the case:
768
769
* 0 -- when not jump
770
* 1 -- when jump
771
* -1 -- maximal
772
*/
773
static int
774
stack_effect(int opcode, int oparg, int jump)
775
{
776
if (0 <= opcode && opcode <= MAX_REAL_OPCODE) {
777
if (_PyOpcode_Deopt[opcode] != opcode) {
778
// Specialized instructions are not supported.
779
return PY_INVALID_STACK_EFFECT;
780
}
781
int popped, pushed;
782
if (jump > 0) {
783
popped = _PyOpcode_num_popped(opcode, oparg, true);
784
pushed = _PyOpcode_num_pushed(opcode, oparg, true);
785
}
786
else {
787
popped = _PyOpcode_num_popped(opcode, oparg, false);
788
pushed = _PyOpcode_num_pushed(opcode, oparg, false);
789
}
790
if (popped < 0 || pushed < 0) {
791
return PY_INVALID_STACK_EFFECT;
792
}
793
if (jump >= 0) {
794
return pushed - popped;
795
}
796
if (jump < 0) {
797
// Compute max(pushed - popped, alt_pushed - alt_popped)
798
int alt_popped = _PyOpcode_num_popped(opcode, oparg, true);
799
int alt_pushed = _PyOpcode_num_pushed(opcode, oparg, true);
800
if (alt_popped < 0 || alt_pushed < 0) {
801
return PY_INVALID_STACK_EFFECT;
802
}
803
int diff = pushed - popped;
804
int alt_diff = alt_pushed - alt_popped;
805
if (alt_diff > diff) {
806
return alt_diff;
807
}
808
return diff;
809
}
810
}
811
812
// Pseudo ops
813
switch (opcode) {
814
case POP_BLOCK:
815
case JUMP:
816
case JUMP_NO_INTERRUPT:
817
return 0;
818
819
case EXIT_INIT_CHECK:
820
return -1;
821
822
/* Exception handling pseudo-instructions */
823
case SETUP_FINALLY:
824
/* 0 in the normal flow.
825
* Restore the stack position and push 1 value before jumping to
826
* the handler if an exception be raised. */
827
return jump ? 1 : 0;
828
case SETUP_CLEANUP:
829
/* As SETUP_FINALLY, but pushes lasti as well */
830
return jump ? 2 : 0;
831
case SETUP_WITH:
832
/* 0 in the normal flow.
833
* Restore the stack position to the position before the result
834
* of __(a)enter__ and push 2 values before jumping to the handler
835
* if an exception be raised. */
836
return jump ? 1 : 0;
837
838
case STORE_FAST_MAYBE_NULL:
839
return -1;
840
case LOAD_CLOSURE:
841
return 1;
842
case LOAD_METHOD:
843
return 1;
844
case LOAD_SUPER_METHOD:
845
case LOAD_ZERO_SUPER_METHOD:
846
case LOAD_ZERO_SUPER_ATTR:
847
return -1;
848
default:
849
return PY_INVALID_STACK_EFFECT;
850
}
851
852
return PY_INVALID_STACK_EFFECT; /* not reachable */
853
}
854
855
int
856
PyCompile_OpcodeStackEffectWithJump(int opcode, int oparg, int jump)
857
{
858
return stack_effect(opcode, oparg, jump);
859
}
860
861
int
862
PyCompile_OpcodeStackEffect(int opcode, int oparg)
863
{
864
return stack_effect(opcode, oparg, -1);
865
}
866
867
static int
868
codegen_addop_noarg(instr_sequence *seq, int opcode, location loc)
869
{
870
assert(!OPCODE_HAS_ARG(opcode));
871
assert(!IS_ASSEMBLER_OPCODE(opcode));
872
return instr_sequence_addop(seq, opcode, 0, loc);
873
}
874
875
static Py_ssize_t
876
dict_add_o(PyObject *dict, PyObject *o)
877
{
878
PyObject *v;
879
Py_ssize_t arg;
880
881
v = PyDict_GetItemWithError(dict, o);
882
if (!v) {
883
if (PyErr_Occurred()) {
884
return ERROR;
885
}
886
arg = PyDict_GET_SIZE(dict);
887
v = PyLong_FromSsize_t(arg);
888
if (!v) {
889
return ERROR;
890
}
891
if (PyDict_SetItem(dict, o, v) < 0) {
892
Py_DECREF(v);
893
return ERROR;
894
}
895
Py_DECREF(v);
896
}
897
else
898
arg = PyLong_AsLong(v);
899
return arg;
900
}
901
902
// Merge const *o* recursively and return constant key object.
903
static PyObject*
904
merge_consts_recursive(PyObject *const_cache, PyObject *o)
905
{
906
assert(PyDict_CheckExact(const_cache));
907
// None and Ellipsis are immortal objects, and key is the singleton.
908
// No need to merge object and key.
909
if (o == Py_None || o == Py_Ellipsis) {
910
return o;
911
}
912
913
PyObject *key = _PyCode_ConstantKey(o);
914
if (key == NULL) {
915
return NULL;
916
}
917
918
// t is borrowed reference
919
PyObject *t = PyDict_SetDefault(const_cache, key, key);
920
if (t != key) {
921
// o is registered in const_cache. Just use it.
922
Py_XINCREF(t);
923
Py_DECREF(key);
924
return t;
925
}
926
927
// We registered o in const_cache.
928
// When o is a tuple or frozenset, we want to merge its
929
// items too.
930
if (PyTuple_CheckExact(o)) {
931
Py_ssize_t len = PyTuple_GET_SIZE(o);
932
for (Py_ssize_t i = 0; i < len; i++) {
933
PyObject *item = PyTuple_GET_ITEM(o, i);
934
PyObject *u = merge_consts_recursive(const_cache, item);
935
if (u == NULL) {
936
Py_DECREF(key);
937
return NULL;
938
}
939
940
// See _PyCode_ConstantKey()
941
PyObject *v; // borrowed
942
if (PyTuple_CheckExact(u)) {
943
v = PyTuple_GET_ITEM(u, 1);
944
}
945
else {
946
v = u;
947
}
948
if (v != item) {
949
PyTuple_SET_ITEM(o, i, Py_NewRef(v));
950
Py_DECREF(item);
951
}
952
953
Py_DECREF(u);
954
}
955
}
956
else if (PyFrozenSet_CheckExact(o)) {
957
// *key* is tuple. And its first item is frozenset of
958
// constant keys.
959
// See _PyCode_ConstantKey() for detail.
960
assert(PyTuple_CheckExact(key));
961
assert(PyTuple_GET_SIZE(key) == 2);
962
963
Py_ssize_t len = PySet_GET_SIZE(o);
964
if (len == 0) { // empty frozenset should not be re-created.
965
return key;
966
}
967
PyObject *tuple = PyTuple_New(len);
968
if (tuple == NULL) {
969
Py_DECREF(key);
970
return NULL;
971
}
972
Py_ssize_t i = 0, pos = 0;
973
PyObject *item;
974
Py_hash_t hash;
975
while (_PySet_NextEntry(o, &pos, &item, &hash)) {
976
PyObject *k = merge_consts_recursive(const_cache, item);
977
if (k == NULL) {
978
Py_DECREF(tuple);
979
Py_DECREF(key);
980
return NULL;
981
}
982
PyObject *u;
983
if (PyTuple_CheckExact(k)) {
984
u = Py_NewRef(PyTuple_GET_ITEM(k, 1));
985
Py_DECREF(k);
986
}
987
else {
988
u = k;
989
}
990
PyTuple_SET_ITEM(tuple, i, u); // Steals reference of u.
991
i++;
992
}
993
994
// Instead of rewriting o, we create new frozenset and embed in the
995
// key tuple. Caller should get merged frozenset from the key tuple.
996
PyObject *new = PyFrozenSet_New(tuple);
997
Py_DECREF(tuple);
998
if (new == NULL) {
999
Py_DECREF(key);
1000
return NULL;
1001
}
1002
assert(PyTuple_GET_ITEM(key, 1) == o);
1003
Py_DECREF(o);
1004
PyTuple_SET_ITEM(key, 1, new);
1005
}
1006
1007
return key;
1008
}
1009
1010
static Py_ssize_t
1011
compiler_add_const(PyObject *const_cache, struct compiler_unit *u, PyObject *o)
1012
{
1013
assert(PyDict_CheckExact(const_cache));
1014
PyObject *key = merge_consts_recursive(const_cache, o);
1015
if (key == NULL) {
1016
return ERROR;
1017
}
1018
1019
Py_ssize_t arg = dict_add_o(u->u_metadata.u_consts, key);
1020
Py_DECREF(key);
1021
return arg;
1022
}
1023
1024
static int
1025
compiler_addop_load_const(PyObject *const_cache, struct compiler_unit *u, location loc, PyObject *o)
1026
{
1027
Py_ssize_t arg = compiler_add_const(const_cache, u, o);
1028
if (arg < 0) {
1029
return ERROR;
1030
}
1031
return codegen_addop_i(&u->u_instr_sequence, LOAD_CONST, arg, loc);
1032
}
1033
1034
static int
1035
compiler_addop_o(struct compiler_unit *u, location loc,
1036
int opcode, PyObject *dict, PyObject *o)
1037
{
1038
Py_ssize_t arg = dict_add_o(dict, o);
1039
if (arg < 0) {
1040
return ERROR;
1041
}
1042
return codegen_addop_i(&u->u_instr_sequence, opcode, arg, loc);
1043
}
1044
1045
static int
1046
compiler_addop_name(struct compiler_unit *u, location loc,
1047
int opcode, PyObject *dict, PyObject *o)
1048
{
1049
PyObject *mangled = _Py_Mangle(u->u_private, o);
1050
if (!mangled) {
1051
return ERROR;
1052
}
1053
Py_ssize_t arg = dict_add_o(dict, mangled);
1054
Py_DECREF(mangled);
1055
if (arg < 0) {
1056
return ERROR;
1057
}
1058
if (opcode == LOAD_ATTR) {
1059
arg <<= 1;
1060
}
1061
if (opcode == LOAD_METHOD) {
1062
assert(SAME_OPCODE_METADATA(LOAD_METHOD, LOAD_ATTR));
1063
opcode = LOAD_ATTR;
1064
arg <<= 1;
1065
arg |= 1;
1066
}
1067
if (opcode == LOAD_SUPER_ATTR) {
1068
arg <<= 2;
1069
arg |= 2;
1070
}
1071
if (opcode == LOAD_SUPER_METHOD) {
1072
assert(SAME_OPCODE_METADATA(LOAD_SUPER_METHOD, LOAD_SUPER_ATTR));
1073
opcode = LOAD_SUPER_ATTR;
1074
arg <<= 2;
1075
arg |= 3;
1076
}
1077
if (opcode == LOAD_ZERO_SUPER_ATTR) {
1078
assert(SAME_OPCODE_METADATA(LOAD_ZERO_SUPER_ATTR, LOAD_SUPER_ATTR));
1079
opcode = LOAD_SUPER_ATTR;
1080
arg <<= 2;
1081
}
1082
if (opcode == LOAD_ZERO_SUPER_METHOD) {
1083
assert(SAME_OPCODE_METADATA(LOAD_ZERO_SUPER_METHOD, LOAD_SUPER_ATTR));
1084
opcode = LOAD_SUPER_ATTR;
1085
arg <<= 2;
1086
arg |= 1;
1087
}
1088
return codegen_addop_i(&u->u_instr_sequence, opcode, arg, loc);
1089
}
1090
1091
/* Add an opcode with an integer argument */
1092
static int
1093
codegen_addop_i(instr_sequence *seq, int opcode, Py_ssize_t oparg, location loc)
1094
{
1095
/* oparg value is unsigned, but a signed C int is usually used to store
1096
it in the C code (like Python/ceval.c).
1097
1098
Limit to 32-bit signed C int (rather than INT_MAX) for portability.
1099
1100
The argument of a concrete bytecode instruction is limited to 8-bit.
1101
EXTENDED_ARG is used for 16, 24, and 32-bit arguments. */
1102
1103
int oparg_ = Py_SAFE_DOWNCAST(oparg, Py_ssize_t, int);
1104
assert(!IS_ASSEMBLER_OPCODE(opcode));
1105
return instr_sequence_addop(seq, opcode, oparg_, loc);
1106
}
1107
1108
static int
1109
codegen_addop_j(instr_sequence *seq, location loc,
1110
int opcode, jump_target_label target)
1111
{
1112
assert(IS_LABEL(target));
1113
assert(OPCODE_HAS_JUMP(opcode) || IS_BLOCK_PUSH_OPCODE(opcode));
1114
assert(!IS_ASSEMBLER_OPCODE(opcode));
1115
return instr_sequence_addop(seq, opcode, target.id, loc);
1116
}
1117
1118
#define RETURN_IF_ERROR_IN_SCOPE(C, CALL) { \
1119
if ((CALL) < 0) { \
1120
compiler_exit_scope((C)); \
1121
return ERROR; \
1122
} \
1123
}
1124
1125
#define ADDOP(C, LOC, OP) \
1126
RETURN_IF_ERROR(codegen_addop_noarg(INSTR_SEQUENCE(C), (OP), (LOC)))
1127
1128
#define ADDOP_IN_SCOPE(C, LOC, OP) RETURN_IF_ERROR_IN_SCOPE((C), codegen_addop_noarg(INSTR_SEQUENCE(C), (OP), (LOC)))
1129
1130
#define ADDOP_LOAD_CONST(C, LOC, O) \
1131
RETURN_IF_ERROR(compiler_addop_load_const((C)->c_const_cache, (C)->u, (LOC), (O)))
1132
1133
/* Same as ADDOP_LOAD_CONST, but steals a reference. */
1134
#define ADDOP_LOAD_CONST_NEW(C, LOC, O) { \
1135
PyObject *__new_const = (O); \
1136
if (__new_const == NULL) { \
1137
return ERROR; \
1138
} \
1139
if (compiler_addop_load_const((C)->c_const_cache, (C)->u, (LOC), __new_const) < 0) { \
1140
Py_DECREF(__new_const); \
1141
return ERROR; \
1142
} \
1143
Py_DECREF(__new_const); \
1144
}
1145
1146
#define ADDOP_N(C, LOC, OP, O, TYPE) { \
1147
assert(!OPCODE_HAS_CONST(OP)); /* use ADDOP_LOAD_CONST_NEW */ \
1148
if (compiler_addop_o((C)->u, (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O)) < 0) { \
1149
Py_DECREF((O)); \
1150
return ERROR; \
1151
} \
1152
Py_DECREF((O)); \
1153
}
1154
1155
#define ADDOP_NAME(C, LOC, OP, O, TYPE) \
1156
RETURN_IF_ERROR(compiler_addop_name((C)->u, (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O)))
1157
1158
#define ADDOP_I(C, LOC, OP, O) \
1159
RETURN_IF_ERROR(codegen_addop_i(INSTR_SEQUENCE(C), (OP), (O), (LOC)))
1160
1161
#define ADDOP_JUMP(C, LOC, OP, O) \
1162
RETURN_IF_ERROR(codegen_addop_j(INSTR_SEQUENCE(C), (LOC), (OP), (O)))
1163
1164
#define ADDOP_COMPARE(C, LOC, CMP) \
1165
RETURN_IF_ERROR(compiler_addcompare((C), (LOC), (cmpop_ty)(CMP)))
1166
1167
#define ADDOP_BINARY(C, LOC, BINOP) \
1168
RETURN_IF_ERROR(addop_binary((C), (LOC), (BINOP), false))
1169
1170
#define ADDOP_INPLACE(C, LOC, BINOP) \
1171
RETURN_IF_ERROR(addop_binary((C), (LOC), (BINOP), true))
1172
1173
#define ADD_YIELD_FROM(C, LOC, await) \
1174
RETURN_IF_ERROR(compiler_add_yield_from((C), (LOC), (await)))
1175
1176
#define POP_EXCEPT_AND_RERAISE(C, LOC) \
1177
RETURN_IF_ERROR(compiler_pop_except_and_reraise((C), (LOC)))
1178
1179
#define ADDOP_YIELD(C, LOC) \
1180
RETURN_IF_ERROR(addop_yield((C), (LOC)))
1181
1182
/* VISIT and VISIT_SEQ takes an ASDL type as their second argument. They use
1183
the ASDL name to synthesize the name of the C type and the visit function.
1184
*/
1185
1186
#define VISIT(C, TYPE, V) \
1187
RETURN_IF_ERROR(compiler_visit_ ## TYPE((C), (V)));
1188
1189
#define VISIT_IN_SCOPE(C, TYPE, V) \
1190
RETURN_IF_ERROR_IN_SCOPE((C), compiler_visit_ ## TYPE((C), (V)))
1191
1192
#define VISIT_SEQ(C, TYPE, SEQ) { \
1193
int _i; \
1194
asdl_ ## TYPE ## _seq *seq = (SEQ); /* avoid variable capture */ \
1195
for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1196
TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1197
RETURN_IF_ERROR(compiler_visit_ ## TYPE((C), elt)); \
1198
} \
1199
}
1200
1201
#define VISIT_SEQ_IN_SCOPE(C, TYPE, SEQ) { \
1202
int _i; \
1203
asdl_ ## TYPE ## _seq *seq = (SEQ); /* avoid variable capture */ \
1204
for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
1205
TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
1206
if (compiler_visit_ ## TYPE((C), elt) < 0) { \
1207
compiler_exit_scope(C); \
1208
return ERROR; \
1209
} \
1210
} \
1211
}
1212
1213
1214
static int
1215
compiler_enter_scope(struct compiler *c, identifier name,
1216
int scope_type, void *key, int lineno)
1217
{
1218
location loc = LOCATION(lineno, lineno, 0, 0);
1219
1220
struct compiler_unit *u;
1221
1222
u = (struct compiler_unit *)PyObject_Calloc(1, sizeof(
1223
struct compiler_unit));
1224
if (!u) {
1225
PyErr_NoMemory();
1226
return ERROR;
1227
}
1228
u->u_scope_type = scope_type;
1229
u->u_metadata.u_argcount = 0;
1230
u->u_metadata.u_posonlyargcount = 0;
1231
u->u_metadata.u_kwonlyargcount = 0;
1232
u->u_ste = PySymtable_Lookup(c->c_st, key);
1233
if (!u->u_ste) {
1234
compiler_unit_free(u);
1235
return ERROR;
1236
}
1237
u->u_metadata.u_name = Py_NewRef(name);
1238
u->u_metadata.u_varnames = list2dict(u->u_ste->ste_varnames);
1239
if (!u->u_metadata.u_varnames) {
1240
compiler_unit_free(u);
1241
return ERROR;
1242
}
1243
u->u_metadata.u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, DEF_COMP_CELL, 0);
1244
if (!u->u_metadata.u_cellvars) {
1245
compiler_unit_free(u);
1246
return ERROR;
1247
}
1248
if (u->u_ste->ste_needs_class_closure) {
1249
/* Cook up an implicit __class__ cell. */
1250
Py_ssize_t res;
1251
assert(u->u_scope_type == COMPILER_SCOPE_CLASS);
1252
res = dict_add_o(u->u_metadata.u_cellvars, &_Py_ID(__class__));
1253
if (res < 0) {
1254
compiler_unit_free(u);
1255
return ERROR;
1256
}
1257
}
1258
if (u->u_ste->ste_needs_classdict) {
1259
/* Cook up an implicit __classdict__ cell. */
1260
Py_ssize_t res;
1261
assert(u->u_scope_type == COMPILER_SCOPE_CLASS);
1262
res = dict_add_o(u->u_metadata.u_cellvars, &_Py_ID(__classdict__));
1263
if (res < 0) {
1264
compiler_unit_free(u);
1265
return ERROR;
1266
}
1267
}
1268
1269
u->u_metadata.u_freevars = dictbytype(u->u_ste->ste_symbols, FREE, DEF_FREE_CLASS,
1270
PyDict_GET_SIZE(u->u_metadata.u_cellvars));
1271
if (!u->u_metadata.u_freevars) {
1272
compiler_unit_free(u);
1273
return ERROR;
1274
}
1275
1276
u->u_metadata.u_fasthidden = PyDict_New();
1277
if (!u->u_metadata.u_fasthidden) {
1278
compiler_unit_free(u);
1279
return ERROR;
1280
}
1281
1282
u->u_nfblocks = 0;
1283
u->u_in_inlined_comp = 0;
1284
u->u_metadata.u_firstlineno = lineno;
1285
u->u_metadata.u_consts = PyDict_New();
1286
if (!u->u_metadata.u_consts) {
1287
compiler_unit_free(u);
1288
return ERROR;
1289
}
1290
u->u_metadata.u_names = PyDict_New();
1291
if (!u->u_metadata.u_names) {
1292
compiler_unit_free(u);
1293
return ERROR;
1294
}
1295
1296
u->u_private = NULL;
1297
1298
/* Push the old compiler_unit on the stack. */
1299
if (c->u) {
1300
PyObject *capsule = PyCapsule_New(c->u, CAPSULE_NAME, NULL);
1301
if (!capsule || PyList_Append(c->c_stack, capsule) < 0) {
1302
Py_XDECREF(capsule);
1303
compiler_unit_free(u);
1304
return ERROR;
1305
}
1306
Py_DECREF(capsule);
1307
u->u_private = Py_XNewRef(c->u->u_private);
1308
}
1309
c->u = u;
1310
1311
c->c_nestlevel++;
1312
1313
if (u->u_scope_type == COMPILER_SCOPE_MODULE) {
1314
loc.lineno = 0;
1315
}
1316
else {
1317
RETURN_IF_ERROR(compiler_set_qualname(c));
1318
}
1319
ADDOP_I(c, loc, RESUME, 0);
1320
1321
if (u->u_scope_type == COMPILER_SCOPE_MODULE) {
1322
loc.lineno = -1;
1323
}
1324
return SUCCESS;
1325
}
1326
1327
static void
1328
compiler_exit_scope(struct compiler *c)
1329
{
1330
// Don't call PySequence_DelItem() with an exception raised
1331
PyObject *exc = PyErr_GetRaisedException();
1332
1333
c->c_nestlevel--;
1334
compiler_unit_free(c->u);
1335
/* Restore c->u to the parent unit. */
1336
Py_ssize_t n = PyList_GET_SIZE(c->c_stack) - 1;
1337
if (n >= 0) {
1338
PyObject *capsule = PyList_GET_ITEM(c->c_stack, n);
1339
c->u = (struct compiler_unit *)PyCapsule_GetPointer(capsule, CAPSULE_NAME);
1340
assert(c->u);
1341
/* we are deleting from a list so this really shouldn't fail */
1342
if (PySequence_DelItem(c->c_stack, n) < 0) {
1343
_PyErr_WriteUnraisableMsg("on removing the last compiler "
1344
"stack item", NULL);
1345
}
1346
}
1347
else {
1348
c->u = NULL;
1349
}
1350
1351
PyErr_SetRaisedException(exc);
1352
}
1353
1354
/* Search if variable annotations are present statically in a block. */
1355
1356
static bool
1357
find_ann(asdl_stmt_seq *stmts)
1358
{
1359
int i, j, res = 0;
1360
stmt_ty st;
1361
1362
for (i = 0; i < asdl_seq_LEN(stmts); i++) {
1363
st = (stmt_ty)asdl_seq_GET(stmts, i);
1364
switch (st->kind) {
1365
case AnnAssign_kind:
1366
return true;
1367
case For_kind:
1368
res = find_ann(st->v.For.body) ||
1369
find_ann(st->v.For.orelse);
1370
break;
1371
case AsyncFor_kind:
1372
res = find_ann(st->v.AsyncFor.body) ||
1373
find_ann(st->v.AsyncFor.orelse);
1374
break;
1375
case While_kind:
1376
res = find_ann(st->v.While.body) ||
1377
find_ann(st->v.While.orelse);
1378
break;
1379
case If_kind:
1380
res = find_ann(st->v.If.body) ||
1381
find_ann(st->v.If.orelse);
1382
break;
1383
case With_kind:
1384
res = find_ann(st->v.With.body);
1385
break;
1386
case AsyncWith_kind:
1387
res = find_ann(st->v.AsyncWith.body);
1388
break;
1389
case Try_kind:
1390
for (j = 0; j < asdl_seq_LEN(st->v.Try.handlers); j++) {
1391
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
1392
st->v.Try.handlers, j);
1393
if (find_ann(handler->v.ExceptHandler.body)) {
1394
return true;
1395
}
1396
}
1397
res = find_ann(st->v.Try.body) ||
1398
find_ann(st->v.Try.finalbody) ||
1399
find_ann(st->v.Try.orelse);
1400
break;
1401
case TryStar_kind:
1402
for (j = 0; j < asdl_seq_LEN(st->v.TryStar.handlers); j++) {
1403
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
1404
st->v.TryStar.handlers, j);
1405
if (find_ann(handler->v.ExceptHandler.body)) {
1406
return true;
1407
}
1408
}
1409
res = find_ann(st->v.TryStar.body) ||
1410
find_ann(st->v.TryStar.finalbody) ||
1411
find_ann(st->v.TryStar.orelse);
1412
break;
1413
case Match_kind:
1414
for (j = 0; j < asdl_seq_LEN(st->v.Match.cases); j++) {
1415
match_case_ty match_case = (match_case_ty)asdl_seq_GET(
1416
st->v.Match.cases, j);
1417
if (find_ann(match_case->body)) {
1418
return true;
1419
}
1420
}
1421
break;
1422
default:
1423
res = false;
1424
break;
1425
}
1426
if (res) {
1427
break;
1428
}
1429
}
1430
return res;
1431
}
1432
1433
/*
1434
* Frame block handling functions
1435
*/
1436
1437
static int
1438
compiler_push_fblock(struct compiler *c, location loc,
1439
enum fblocktype t, jump_target_label block_label,
1440
jump_target_label exit, void *datum)
1441
{
1442
struct fblockinfo *f;
1443
if (c->u->u_nfblocks >= CO_MAXBLOCKS) {
1444
return compiler_error(c, loc, "too many statically nested blocks");
1445
}
1446
f = &c->u->u_fblock[c->u->u_nfblocks++];
1447
f->fb_type = t;
1448
f->fb_block = block_label;
1449
f->fb_exit = exit;
1450
f->fb_datum = datum;
1451
return SUCCESS;
1452
}
1453
1454
static void
1455
compiler_pop_fblock(struct compiler *c, enum fblocktype t, jump_target_label block_label)
1456
{
1457
struct compiler_unit *u = c->u;
1458
assert(u->u_nfblocks > 0);
1459
u->u_nfblocks--;
1460
assert(u->u_fblock[u->u_nfblocks].fb_type == t);
1461
assert(SAME_LABEL(u->u_fblock[u->u_nfblocks].fb_block, block_label));
1462
}
1463
1464
static int
1465
compiler_call_exit_with_nones(struct compiler *c, location loc)
1466
{
1467
ADDOP_LOAD_CONST(c, loc, Py_None);
1468
ADDOP_LOAD_CONST(c, loc, Py_None);
1469
ADDOP_LOAD_CONST(c, loc, Py_None);
1470
ADDOP_I(c, loc, CALL, 2);
1471
return SUCCESS;
1472
}
1473
1474
static int
1475
compiler_add_yield_from(struct compiler *c, location loc, int await)
1476
{
1477
NEW_JUMP_TARGET_LABEL(c, send);
1478
NEW_JUMP_TARGET_LABEL(c, fail);
1479
NEW_JUMP_TARGET_LABEL(c, exit);
1480
1481
USE_LABEL(c, send);
1482
ADDOP_JUMP(c, loc, SEND, exit);
1483
// Set up a virtual try/except to handle when StopIteration is raised during
1484
// a close or throw call. The only way YIELD_VALUE raises if they do!
1485
ADDOP_JUMP(c, loc, SETUP_FINALLY, fail);
1486
ADDOP_I(c, loc, YIELD_VALUE, 0);
1487
ADDOP(c, NO_LOCATION, POP_BLOCK);
1488
ADDOP_I(c, loc, RESUME, await ? 3 : 2);
1489
ADDOP_JUMP(c, loc, JUMP_NO_INTERRUPT, send);
1490
1491
USE_LABEL(c, fail);
1492
ADDOP(c, loc, CLEANUP_THROW);
1493
1494
USE_LABEL(c, exit);
1495
ADDOP(c, loc, END_SEND);
1496
return SUCCESS;
1497
}
1498
1499
static int
1500
compiler_pop_except_and_reraise(struct compiler *c, location loc)
1501
{
1502
/* Stack contents
1503
* [exc_info, lasti, exc] COPY 3
1504
* [exc_info, lasti, exc, exc_info] POP_EXCEPT
1505
* [exc_info, lasti, exc] RERAISE 1
1506
* (exception_unwind clears the stack)
1507
*/
1508
1509
ADDOP_I(c, loc, COPY, 3);
1510
ADDOP(c, loc, POP_EXCEPT);
1511
ADDOP_I(c, loc, RERAISE, 1);
1512
return SUCCESS;
1513
}
1514
1515
/* Unwind a frame block. If preserve_tos is true, the TOS before
1516
* popping the blocks will be restored afterwards, unless another
1517
* return, break or continue is found. In which case, the TOS will
1518
* be popped.
1519
*/
1520
static int
1521
compiler_unwind_fblock(struct compiler *c, location *ploc,
1522
struct fblockinfo *info, int preserve_tos)
1523
{
1524
switch (info->fb_type) {
1525
case WHILE_LOOP:
1526
case EXCEPTION_HANDLER:
1527
case EXCEPTION_GROUP_HANDLER:
1528
case ASYNC_COMPREHENSION_GENERATOR:
1529
return SUCCESS;
1530
1531
case FOR_LOOP:
1532
/* Pop the iterator */
1533
if (preserve_tos) {
1534
ADDOP_I(c, *ploc, SWAP, 2);
1535
}
1536
ADDOP(c, *ploc, POP_TOP);
1537
return SUCCESS;
1538
1539
case TRY_EXCEPT:
1540
ADDOP(c, *ploc, POP_BLOCK);
1541
return SUCCESS;
1542
1543
case FINALLY_TRY:
1544
/* This POP_BLOCK gets the line number of the unwinding statement */
1545
ADDOP(c, *ploc, POP_BLOCK);
1546
if (preserve_tos) {
1547
RETURN_IF_ERROR(
1548
compiler_push_fblock(c, *ploc, POP_VALUE, NO_LABEL, NO_LABEL, NULL));
1549
}
1550
/* Emit the finally block */
1551
VISIT_SEQ(c, stmt, info->fb_datum);
1552
if (preserve_tos) {
1553
compiler_pop_fblock(c, POP_VALUE, NO_LABEL);
1554
}
1555
/* The finally block should appear to execute after the
1556
* statement causing the unwinding, so make the unwinding
1557
* instruction artificial */
1558
*ploc = NO_LOCATION;
1559
return SUCCESS;
1560
1561
case FINALLY_END:
1562
if (preserve_tos) {
1563
ADDOP_I(c, *ploc, SWAP, 2);
1564
}
1565
ADDOP(c, *ploc, POP_TOP); /* exc_value */
1566
if (preserve_tos) {
1567
ADDOP_I(c, *ploc, SWAP, 2);
1568
}
1569
ADDOP(c, *ploc, POP_BLOCK);
1570
ADDOP(c, *ploc, POP_EXCEPT);
1571
return SUCCESS;
1572
1573
case WITH:
1574
case ASYNC_WITH:
1575
*ploc = LOC((stmt_ty)info->fb_datum);
1576
ADDOP(c, *ploc, POP_BLOCK);
1577
if (preserve_tos) {
1578
ADDOP_I(c, *ploc, SWAP, 2);
1579
}
1580
RETURN_IF_ERROR(compiler_call_exit_with_nones(c, *ploc));
1581
if (info->fb_type == ASYNC_WITH) {
1582
ADDOP_I(c, *ploc, GET_AWAITABLE, 2);
1583
ADDOP_LOAD_CONST(c, *ploc, Py_None);
1584
ADD_YIELD_FROM(c, *ploc, 1);
1585
}
1586
ADDOP(c, *ploc, POP_TOP);
1587
/* The exit block should appear to execute after the
1588
* statement causing the unwinding, so make the unwinding
1589
* instruction artificial */
1590
*ploc = NO_LOCATION;
1591
return SUCCESS;
1592
1593
case HANDLER_CLEANUP: {
1594
if (info->fb_datum) {
1595
ADDOP(c, *ploc, POP_BLOCK);
1596
}
1597
if (preserve_tos) {
1598
ADDOP_I(c, *ploc, SWAP, 2);
1599
}
1600
ADDOP(c, *ploc, POP_BLOCK);
1601
ADDOP(c, *ploc, POP_EXCEPT);
1602
if (info->fb_datum) {
1603
ADDOP_LOAD_CONST(c, *ploc, Py_None);
1604
RETURN_IF_ERROR(compiler_nameop(c, *ploc, info->fb_datum, Store));
1605
RETURN_IF_ERROR(compiler_nameop(c, *ploc, info->fb_datum, Del));
1606
}
1607
return SUCCESS;
1608
}
1609
case POP_VALUE: {
1610
if (preserve_tos) {
1611
ADDOP_I(c, *ploc, SWAP, 2);
1612
}
1613
ADDOP(c, *ploc, POP_TOP);
1614
return SUCCESS;
1615
}
1616
}
1617
Py_UNREACHABLE();
1618
}
1619
1620
/** Unwind block stack. If loop is not NULL, then stop when the first loop is encountered. */
1621
static int
1622
compiler_unwind_fblock_stack(struct compiler *c, location *ploc,
1623
int preserve_tos, struct fblockinfo **loop)
1624
{
1625
if (c->u->u_nfblocks == 0) {
1626
return SUCCESS;
1627
}
1628
struct fblockinfo *top = &c->u->u_fblock[c->u->u_nfblocks-1];
1629
if (top->fb_type == EXCEPTION_GROUP_HANDLER) {
1630
return compiler_error(
1631
c, *ploc, "'break', 'continue' and 'return' cannot appear in an except* block");
1632
}
1633
if (loop != NULL && (top->fb_type == WHILE_LOOP || top->fb_type == FOR_LOOP)) {
1634
*loop = top;
1635
return SUCCESS;
1636
}
1637
struct fblockinfo copy = *top;
1638
c->u->u_nfblocks--;
1639
RETURN_IF_ERROR(compiler_unwind_fblock(c, ploc, &copy, preserve_tos));
1640
RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, ploc, preserve_tos, loop));
1641
c->u->u_fblock[c->u->u_nfblocks] = copy;
1642
c->u->u_nfblocks++;
1643
return SUCCESS;
1644
}
1645
1646
/* Compile a sequence of statements, checking for a docstring
1647
and for annotations. */
1648
1649
static int
1650
compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts)
1651
{
1652
int i = 0;
1653
stmt_ty st;
1654
PyObject *docstring;
1655
1656
/* Set current line number to the line number of first statement.
1657
This way line number for SETUP_ANNOTATIONS will always
1658
coincide with the line number of first "real" statement in module.
1659
If body is empty, then lineno will be set later in optimize_and_assemble. */
1660
if (c->u->u_scope_type == COMPILER_SCOPE_MODULE && asdl_seq_LEN(stmts)) {
1661
st = (stmt_ty)asdl_seq_GET(stmts, 0);
1662
loc = LOC(st);
1663
}
1664
/* Every annotated class and module should have __annotations__. */
1665
if (find_ann(stmts)) {
1666
ADDOP(c, loc, SETUP_ANNOTATIONS);
1667
}
1668
if (!asdl_seq_LEN(stmts)) {
1669
return SUCCESS;
1670
}
1671
/* if not -OO mode, set docstring */
1672
if (c->c_optimize < 2) {
1673
docstring = _PyAST_GetDocString(stmts);
1674
if (docstring) {
1675
i = 1;
1676
st = (stmt_ty)asdl_seq_GET(stmts, 0);
1677
assert(st->kind == Expr_kind);
1678
VISIT(c, expr, st->v.Expr.value);
1679
RETURN_IF_ERROR(compiler_nameop(c, NO_LOCATION, &_Py_ID(__doc__), Store));
1680
}
1681
}
1682
for (; i < asdl_seq_LEN(stmts); i++) {
1683
VISIT(c, stmt, (stmt_ty)asdl_seq_GET(stmts, i));
1684
}
1685
return SUCCESS;
1686
}
1687
1688
static int
1689
compiler_codegen(struct compiler *c, mod_ty mod)
1690
{
1691
_Py_DECLARE_STR(anon_module, "<module>");
1692
RETURN_IF_ERROR(
1693
compiler_enter_scope(c, &_Py_STR(anon_module), COMPILER_SCOPE_MODULE,
1694
mod, 1));
1695
1696
location loc = LOCATION(1, 1, 0, 0);
1697
switch (mod->kind) {
1698
case Module_kind:
1699
if (compiler_body(c, loc, mod->v.Module.body) < 0) {
1700
compiler_exit_scope(c);
1701
return ERROR;
1702
}
1703
break;
1704
case Interactive_kind:
1705
if (find_ann(mod->v.Interactive.body)) {
1706
ADDOP(c, loc, SETUP_ANNOTATIONS);
1707
}
1708
c->c_interactive = 1;
1709
VISIT_SEQ_IN_SCOPE(c, stmt, mod->v.Interactive.body);
1710
break;
1711
case Expression_kind:
1712
VISIT_IN_SCOPE(c, expr, mod->v.Expression.body);
1713
break;
1714
default:
1715
PyErr_Format(PyExc_SystemError,
1716
"module kind %d should not be possible",
1717
mod->kind);
1718
return ERROR;
1719
}
1720
return SUCCESS;
1721
}
1722
1723
static PyCodeObject *
1724
compiler_mod(struct compiler *c, mod_ty mod)
1725
{
1726
int addNone = mod->kind != Expression_kind;
1727
if (compiler_codegen(c, mod) < 0) {
1728
return NULL;
1729
}
1730
PyCodeObject *co = optimize_and_assemble(c, addNone);
1731
compiler_exit_scope(c);
1732
return co;
1733
}
1734
1735
/* The test for LOCAL must come before the test for FREE in order to
1736
handle classes where name is both local and free. The local var is
1737
a method and the free var is a free var referenced within a method.
1738
*/
1739
1740
static int
1741
get_ref_type(struct compiler *c, PyObject *name)
1742
{
1743
int scope;
1744
if (c->u->u_scope_type == COMPILER_SCOPE_CLASS &&
1745
(_PyUnicode_EqualToASCIIString(name, "__class__") ||
1746
_PyUnicode_EqualToASCIIString(name, "__classdict__"))) {
1747
return CELL;
1748
}
1749
scope = _PyST_GetScope(c->u->u_ste, name);
1750
if (scope == 0) {
1751
PyErr_Format(PyExc_SystemError,
1752
"_PyST_GetScope(name=%R) failed: "
1753
"unknown scope in unit %S (%R); "
1754
"symbols: %R; locals: %R; globals: %R",
1755
name,
1756
c->u->u_metadata.u_name, c->u->u_ste->ste_id,
1757
c->u->u_ste->ste_symbols, c->u->u_metadata.u_varnames, c->u->u_metadata.u_names);
1758
return ERROR;
1759
}
1760
return scope;
1761
}
1762
1763
static int
1764
compiler_lookup_arg(PyObject *dict, PyObject *name)
1765
{
1766
PyObject *v = PyDict_GetItemWithError(dict, name);
1767
if (v == NULL) {
1768
return ERROR;
1769
}
1770
return PyLong_AS_LONG(v);
1771
}
1772
1773
static int
1774
compiler_make_closure(struct compiler *c, location loc,
1775
PyCodeObject *co, Py_ssize_t flags)
1776
{
1777
if (co->co_nfreevars) {
1778
int i = PyCode_GetFirstFree(co);
1779
for (; i < co->co_nlocalsplus; ++i) {
1780
/* Bypass com_addop_varname because it will generate
1781
LOAD_DEREF but LOAD_CLOSURE is needed.
1782
*/
1783
PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
1784
1785
/* Special case: If a class contains a method with a
1786
free variable that has the same name as a method,
1787
the name will be considered free *and* local in the
1788
class. It should be handled by the closure, as
1789
well as by the normal name lookup logic.
1790
*/
1791
int reftype = get_ref_type(c, name);
1792
if (reftype == -1) {
1793
return ERROR;
1794
}
1795
int arg;
1796
if (reftype == CELL) {
1797
arg = compiler_lookup_arg(c->u->u_metadata.u_cellvars, name);
1798
}
1799
else {
1800
arg = compiler_lookup_arg(c->u->u_metadata.u_freevars, name);
1801
}
1802
if (arg == -1) {
1803
PyObject *freevars = _PyCode_GetFreevars(co);
1804
if (freevars == NULL) {
1805
PyErr_Clear();
1806
}
1807
PyErr_Format(PyExc_SystemError,
1808
"compiler_lookup_arg(name=%R) with reftype=%d failed in %S; "
1809
"freevars of code %S: %R",
1810
name,
1811
reftype,
1812
c->u->u_metadata.u_name,
1813
co->co_name,
1814
freevars);
1815
Py_DECREF(freevars);
1816
return ERROR;
1817
}
1818
ADDOP_I(c, loc, LOAD_CLOSURE, arg);
1819
}
1820
flags |= MAKE_FUNCTION_CLOSURE;
1821
ADDOP_I(c, loc, BUILD_TUPLE, co->co_nfreevars);
1822
}
1823
ADDOP_LOAD_CONST(c, loc, (PyObject*)co);
1824
1825
ADDOP(c, loc, MAKE_FUNCTION);
1826
1827
if (flags & MAKE_FUNCTION_CLOSURE) {
1828
ADDOP_I(c, loc, SET_FUNCTION_ATTRIBUTE, MAKE_FUNCTION_CLOSURE);
1829
}
1830
if (flags & MAKE_FUNCTION_ANNOTATIONS) {
1831
ADDOP_I(c, loc, SET_FUNCTION_ATTRIBUTE, MAKE_FUNCTION_ANNOTATIONS);
1832
}
1833
if (flags & MAKE_FUNCTION_KWDEFAULTS) {
1834
ADDOP_I(c, loc, SET_FUNCTION_ATTRIBUTE, MAKE_FUNCTION_KWDEFAULTS);
1835
}
1836
if (flags & MAKE_FUNCTION_DEFAULTS) {
1837
ADDOP_I(c, loc, SET_FUNCTION_ATTRIBUTE, MAKE_FUNCTION_DEFAULTS);
1838
}
1839
return SUCCESS;
1840
}
1841
1842
static int
1843
compiler_decorators(struct compiler *c, asdl_expr_seq* decos)
1844
{
1845
if (!decos) {
1846
return SUCCESS;
1847
}
1848
1849
for (Py_ssize_t i = 0; i < asdl_seq_LEN(decos); i++) {
1850
VISIT(c, expr, (expr_ty)asdl_seq_GET(decos, i));
1851
}
1852
return SUCCESS;
1853
}
1854
1855
static int
1856
compiler_apply_decorators(struct compiler *c, asdl_expr_seq* decos)
1857
{
1858
if (!decos) {
1859
return SUCCESS;
1860
}
1861
1862
for (Py_ssize_t i = asdl_seq_LEN(decos) - 1; i > -1; i--) {
1863
location loc = LOC((expr_ty)asdl_seq_GET(decos, i));
1864
ADDOP_I(c, loc, CALL, 0);
1865
}
1866
return SUCCESS;
1867
}
1868
1869
static int
1870
compiler_visit_kwonlydefaults(struct compiler *c, location loc,
1871
asdl_arg_seq *kwonlyargs, asdl_expr_seq *kw_defaults)
1872
{
1873
/* Push a dict of keyword-only default values.
1874
1875
Return -1 on error, 0 if no dict pushed, 1 if a dict is pushed.
1876
*/
1877
int i;
1878
PyObject *keys = NULL;
1879
1880
for (i = 0; i < asdl_seq_LEN(kwonlyargs); i++) {
1881
arg_ty arg = asdl_seq_GET(kwonlyargs, i);
1882
expr_ty default_ = asdl_seq_GET(kw_defaults, i);
1883
if (default_) {
1884
PyObject *mangled = _Py_Mangle(c->u->u_private, arg->arg);
1885
if (!mangled) {
1886
goto error;
1887
}
1888
if (keys == NULL) {
1889
keys = PyList_New(1);
1890
if (keys == NULL) {
1891
Py_DECREF(mangled);
1892
return ERROR;
1893
}
1894
PyList_SET_ITEM(keys, 0, mangled);
1895
}
1896
else {
1897
int res = PyList_Append(keys, mangled);
1898
Py_DECREF(mangled);
1899
if (res == -1) {
1900
goto error;
1901
}
1902
}
1903
if (compiler_visit_expr(c, default_) < 0) {
1904
goto error;
1905
}
1906
}
1907
}
1908
if (keys != NULL) {
1909
Py_ssize_t default_count = PyList_GET_SIZE(keys);
1910
PyObject *keys_tuple = PyList_AsTuple(keys);
1911
Py_DECREF(keys);
1912
ADDOP_LOAD_CONST_NEW(c, loc, keys_tuple);
1913
ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, default_count);
1914
assert(default_count > 0);
1915
return 1;
1916
}
1917
else {
1918
return 0;
1919
}
1920
1921
error:
1922
Py_XDECREF(keys);
1923
return ERROR;
1924
}
1925
1926
static int
1927
compiler_visit_annexpr(struct compiler *c, expr_ty annotation)
1928
{
1929
location loc = LOC(annotation);
1930
ADDOP_LOAD_CONST_NEW(c, loc, _PyAST_ExprAsUnicode(annotation));
1931
return SUCCESS;
1932
}
1933
1934
static int
1935
compiler_visit_argannotation(struct compiler *c, identifier id,
1936
expr_ty annotation, Py_ssize_t *annotations_len, location loc)
1937
{
1938
if (!annotation) {
1939
return SUCCESS;
1940
}
1941
PyObject *mangled = _Py_Mangle(c->u->u_private, id);
1942
if (!mangled) {
1943
return ERROR;
1944
}
1945
ADDOP_LOAD_CONST(c, loc, mangled);
1946
Py_DECREF(mangled);
1947
1948
if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) {
1949
VISIT(c, annexpr, annotation);
1950
}
1951
else {
1952
if (annotation->kind == Starred_kind) {
1953
// *args: *Ts (where Ts is a TypeVarTuple).
1954
// Do [annotation_value] = [*Ts].
1955
// (Note that in theory we could end up here even for an argument
1956
// other than *args, but in practice the grammar doesn't allow it.)
1957
VISIT(c, expr, annotation->v.Starred.value);
1958
ADDOP_I(c, loc, UNPACK_SEQUENCE, (Py_ssize_t) 1);
1959
}
1960
else {
1961
VISIT(c, expr, annotation);
1962
}
1963
}
1964
*annotations_len += 2;
1965
return SUCCESS;
1966
}
1967
1968
static int
1969
compiler_visit_argannotations(struct compiler *c, asdl_arg_seq* args,
1970
Py_ssize_t *annotations_len, location loc)
1971
{
1972
int i;
1973
for (i = 0; i < asdl_seq_LEN(args); i++) {
1974
arg_ty arg = (arg_ty)asdl_seq_GET(args, i);
1975
RETURN_IF_ERROR(
1976
compiler_visit_argannotation(
1977
c,
1978
arg->arg,
1979
arg->annotation,
1980
annotations_len,
1981
loc));
1982
}
1983
return SUCCESS;
1984
}
1985
1986
static int
1987
compiler_visit_annotations(struct compiler *c, location loc,
1988
arguments_ty args, expr_ty returns)
1989
{
1990
/* Push arg annotation names and values.
1991
The expressions are evaluated out-of-order wrt the source code.
1992
1993
Return -1 on error, 0 if no annotations pushed, 1 if a annotations is pushed.
1994
*/
1995
Py_ssize_t annotations_len = 0;
1996
1997
RETURN_IF_ERROR(
1998
compiler_visit_argannotations(c, args->args, &annotations_len, loc));
1999
2000
RETURN_IF_ERROR(
2001
compiler_visit_argannotations(c, args->posonlyargs, &annotations_len, loc));
2002
2003
if (args->vararg && args->vararg->annotation) {
2004
RETURN_IF_ERROR(
2005
compiler_visit_argannotation(c, args->vararg->arg,
2006
args->vararg->annotation, &annotations_len, loc));
2007
}
2008
2009
RETURN_IF_ERROR(
2010
compiler_visit_argannotations(c, args->kwonlyargs, &annotations_len, loc));
2011
2012
if (args->kwarg && args->kwarg->annotation) {
2013
RETURN_IF_ERROR(
2014
compiler_visit_argannotation(c, args->kwarg->arg,
2015
args->kwarg->annotation, &annotations_len, loc));
2016
}
2017
2018
RETURN_IF_ERROR(
2019
compiler_visit_argannotation(c, &_Py_ID(return), returns, &annotations_len, loc));
2020
2021
if (annotations_len) {
2022
ADDOP_I(c, loc, BUILD_TUPLE, annotations_len);
2023
return 1;
2024
}
2025
2026
return 0;
2027
}
2028
2029
static int
2030
compiler_visit_defaults(struct compiler *c, arguments_ty args,
2031
location loc)
2032
{
2033
VISIT_SEQ(c, expr, args->defaults);
2034
ADDOP_I(c, loc, BUILD_TUPLE, asdl_seq_LEN(args->defaults));
2035
return SUCCESS;
2036
}
2037
2038
static Py_ssize_t
2039
compiler_default_arguments(struct compiler *c, location loc,
2040
arguments_ty args)
2041
{
2042
Py_ssize_t funcflags = 0;
2043
if (args->defaults && asdl_seq_LEN(args->defaults) > 0) {
2044
RETURN_IF_ERROR(compiler_visit_defaults(c, args, loc));
2045
funcflags |= MAKE_FUNCTION_DEFAULTS;
2046
}
2047
if (args->kwonlyargs) {
2048
int res = compiler_visit_kwonlydefaults(c, loc,
2049
args->kwonlyargs,
2050
args->kw_defaults);
2051
RETURN_IF_ERROR(res);
2052
if (res > 0) {
2053
funcflags |= MAKE_FUNCTION_KWDEFAULTS;
2054
}
2055
}
2056
return funcflags;
2057
}
2058
2059
static bool
2060
forbidden_name(struct compiler *c, location loc, identifier name,
2061
expr_context_ty ctx)
2062
{
2063
if (ctx == Store && _PyUnicode_EqualToASCIIString(name, "__debug__")) {
2064
compiler_error(c, loc, "cannot assign to __debug__");
2065
return true;
2066
}
2067
if (ctx == Del && _PyUnicode_EqualToASCIIString(name, "__debug__")) {
2068
compiler_error(c, loc, "cannot delete __debug__");
2069
return true;
2070
}
2071
return false;
2072
}
2073
2074
static int
2075
compiler_check_debug_one_arg(struct compiler *c, arg_ty arg)
2076
{
2077
if (arg != NULL) {
2078
if (forbidden_name(c, LOC(arg), arg->arg, Store)) {
2079
return ERROR;
2080
}
2081
}
2082
return SUCCESS;
2083
}
2084
2085
static int
2086
compiler_check_debug_args_seq(struct compiler *c, asdl_arg_seq *args)
2087
{
2088
if (args != NULL) {
2089
for (Py_ssize_t i = 0, n = asdl_seq_LEN(args); i < n; i++) {
2090
RETURN_IF_ERROR(
2091
compiler_check_debug_one_arg(c, asdl_seq_GET(args, i)));
2092
}
2093
}
2094
return SUCCESS;
2095
}
2096
2097
static int
2098
compiler_check_debug_args(struct compiler *c, arguments_ty args)
2099
{
2100
RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->posonlyargs));
2101
RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->args));
2102
RETURN_IF_ERROR(compiler_check_debug_one_arg(c, args->vararg));
2103
RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->kwonlyargs));
2104
RETURN_IF_ERROR(compiler_check_debug_one_arg(c, args->kwarg));
2105
return SUCCESS;
2106
}
2107
2108
static int
2109
wrap_in_stopiteration_handler(struct compiler *c)
2110
{
2111
NEW_JUMP_TARGET_LABEL(c, handler);
2112
2113
/* Insert SETUP_CLEANUP at start */
2114
RETURN_IF_ERROR(
2115
instr_sequence_insert_instruction(
2116
INSTR_SEQUENCE(c), 0,
2117
SETUP_CLEANUP, handler.id, NO_LOCATION));
2118
2119
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
2120
ADDOP(c, NO_LOCATION, RETURN_VALUE);
2121
USE_LABEL(c, handler);
2122
ADDOP_I(c, NO_LOCATION, CALL_INTRINSIC_1, INTRINSIC_STOPITERATION_ERROR);
2123
ADDOP_I(c, NO_LOCATION, RERAISE, 1);
2124
return SUCCESS;
2125
}
2126
2127
static int
2128
compiler_type_params(struct compiler *c, asdl_type_param_seq *type_params)
2129
{
2130
if (!type_params) {
2131
return SUCCESS;
2132
}
2133
Py_ssize_t n = asdl_seq_LEN(type_params);
2134
2135
for (Py_ssize_t i = 0; i < n; i++) {
2136
type_param_ty typeparam = asdl_seq_GET(type_params, i);
2137
location loc = LOC(typeparam);
2138
switch(typeparam->kind) {
2139
case TypeVar_kind:
2140
ADDOP_LOAD_CONST(c, loc, typeparam->v.TypeVar.name);
2141
if (typeparam->v.TypeVar.bound) {
2142
expr_ty bound = typeparam->v.TypeVar.bound;
2143
if (compiler_enter_scope(c, typeparam->v.TypeVar.name, COMPILER_SCOPE_TYPEPARAMS,
2144
(void *)typeparam, bound->lineno) == -1) {
2145
return ERROR;
2146
}
2147
VISIT_IN_SCOPE(c, expr, bound);
2148
ADDOP_IN_SCOPE(c, loc, RETURN_VALUE);
2149
PyCodeObject *co = optimize_and_assemble(c, 1);
2150
compiler_exit_scope(c);
2151
if (co == NULL) {
2152
return ERROR;
2153
}
2154
if (compiler_make_closure(c, loc, co, 0) < 0) {
2155
Py_DECREF(co);
2156
return ERROR;
2157
}
2158
Py_DECREF(co);
2159
2160
int intrinsic = bound->kind == Tuple_kind
2161
? INTRINSIC_TYPEVAR_WITH_CONSTRAINTS
2162
: INTRINSIC_TYPEVAR_WITH_BOUND;
2163
ADDOP_I(c, loc, CALL_INTRINSIC_2, intrinsic);
2164
}
2165
else {
2166
ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_TYPEVAR);
2167
}
2168
ADDOP_I(c, loc, COPY, 1);
2169
RETURN_IF_ERROR(compiler_nameop(c, loc, typeparam->v.TypeVar.name, Store));
2170
break;
2171
case TypeVarTuple_kind:
2172
ADDOP_LOAD_CONST(c, loc, typeparam->v.TypeVarTuple.name);
2173
ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_TYPEVARTUPLE);
2174
ADDOP_I(c, loc, COPY, 1);
2175
RETURN_IF_ERROR(compiler_nameop(c, loc, typeparam->v.TypeVarTuple.name, Store));
2176
break;
2177
case ParamSpec_kind:
2178
ADDOP_LOAD_CONST(c, loc, typeparam->v.ParamSpec.name);
2179
ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_PARAMSPEC);
2180
ADDOP_I(c, loc, COPY, 1);
2181
RETURN_IF_ERROR(compiler_nameop(c, loc, typeparam->v.ParamSpec.name, Store));
2182
break;
2183
}
2184
}
2185
ADDOP_I(c, LOC(asdl_seq_GET(type_params, 0)), BUILD_TUPLE, n);
2186
return SUCCESS;
2187
}
2188
2189
static int
2190
compiler_function_body(struct compiler *c, stmt_ty s, int is_async, Py_ssize_t funcflags,
2191
int firstlineno)
2192
{
2193
PyObject *docstring = NULL;
2194
arguments_ty args;
2195
identifier name;
2196
asdl_stmt_seq *body;
2197
int scope_type;
2198
2199
if (is_async) {
2200
assert(s->kind == AsyncFunctionDef_kind);
2201
2202
args = s->v.AsyncFunctionDef.args;
2203
name = s->v.AsyncFunctionDef.name;
2204
body = s->v.AsyncFunctionDef.body;
2205
2206
scope_type = COMPILER_SCOPE_ASYNC_FUNCTION;
2207
} else {
2208
assert(s->kind == FunctionDef_kind);
2209
2210
args = s->v.FunctionDef.args;
2211
name = s->v.FunctionDef.name;
2212
body = s->v.FunctionDef.body;
2213
2214
scope_type = COMPILER_SCOPE_FUNCTION;
2215
}
2216
2217
RETURN_IF_ERROR(
2218
compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno));
2219
2220
/* if not -OO mode, add docstring */
2221
if (c->c_optimize < 2) {
2222
docstring = _PyAST_GetDocString(body);
2223
}
2224
if (compiler_add_const(c->c_const_cache, c->u, docstring ? docstring : Py_None) < 0) {
2225
compiler_exit_scope(c);
2226
return ERROR;
2227
}
2228
2229
c->u->u_metadata.u_argcount = asdl_seq_LEN(args->args);
2230
c->u->u_metadata.u_posonlyargcount = asdl_seq_LEN(args->posonlyargs);
2231
c->u->u_metadata.u_kwonlyargcount = asdl_seq_LEN(args->kwonlyargs);
2232
for (Py_ssize_t i = docstring ? 1 : 0; i < asdl_seq_LEN(body); i++) {
2233
VISIT_IN_SCOPE(c, stmt, (stmt_ty)asdl_seq_GET(body, i));
2234
}
2235
if (c->u->u_ste->ste_coroutine || c->u->u_ste->ste_generator) {
2236
if (wrap_in_stopiteration_handler(c) < 0) {
2237
compiler_exit_scope(c);
2238
return ERROR;
2239
}
2240
}
2241
PyCodeObject *co = optimize_and_assemble(c, 1);
2242
compiler_exit_scope(c);
2243
if (co == NULL) {
2244
Py_XDECREF(co);
2245
return ERROR;
2246
}
2247
location loc = LOC(s);
2248
if (compiler_make_closure(c, loc, co, funcflags) < 0) {
2249
Py_DECREF(co);
2250
return ERROR;
2251
}
2252
Py_DECREF(co);
2253
return SUCCESS;
2254
}
2255
2256
static int
2257
compiler_function(struct compiler *c, stmt_ty s, int is_async)
2258
{
2259
arguments_ty args;
2260
expr_ty returns;
2261
identifier name;
2262
asdl_expr_seq *decos;
2263
asdl_type_param_seq *type_params;
2264
Py_ssize_t funcflags;
2265
int annotations;
2266
int firstlineno;
2267
2268
if (is_async) {
2269
assert(s->kind == AsyncFunctionDef_kind);
2270
2271
args = s->v.AsyncFunctionDef.args;
2272
returns = s->v.AsyncFunctionDef.returns;
2273
decos = s->v.AsyncFunctionDef.decorator_list;
2274
name = s->v.AsyncFunctionDef.name;
2275
type_params = s->v.AsyncFunctionDef.type_params;
2276
} else {
2277
assert(s->kind == FunctionDef_kind);
2278
2279
args = s->v.FunctionDef.args;
2280
returns = s->v.FunctionDef.returns;
2281
decos = s->v.FunctionDef.decorator_list;
2282
name = s->v.FunctionDef.name;
2283
type_params = s->v.FunctionDef.type_params;
2284
}
2285
2286
RETURN_IF_ERROR(compiler_check_debug_args(c, args));
2287
RETURN_IF_ERROR(compiler_decorators(c, decos));
2288
2289
firstlineno = s->lineno;
2290
if (asdl_seq_LEN(decos)) {
2291
firstlineno = ((expr_ty)asdl_seq_GET(decos, 0))->lineno;
2292
}
2293
2294
location loc = LOC(s);
2295
2296
int is_generic = asdl_seq_LEN(type_params) > 0;
2297
2298
if (is_generic) {
2299
// Used by the CALL to the type parameters function.
2300
ADDOP(c, loc, PUSH_NULL);
2301
}
2302
2303
funcflags = compiler_default_arguments(c, loc, args);
2304
if (funcflags == -1) {
2305
return ERROR;
2306
}
2307
2308
int num_typeparam_args = 0;
2309
2310
if (is_generic) {
2311
if (funcflags & MAKE_FUNCTION_DEFAULTS) {
2312
num_typeparam_args += 1;
2313
}
2314
if (funcflags & MAKE_FUNCTION_KWDEFAULTS) {
2315
num_typeparam_args += 1;
2316
}
2317
if (num_typeparam_args == 2) {
2318
ADDOP_I(c, loc, SWAP, 2);
2319
}
2320
PyObject *type_params_name = PyUnicode_FromFormat("<generic parameters of %U>", name);
2321
if (!type_params_name) {
2322
return ERROR;
2323
}
2324
if (compiler_enter_scope(c, type_params_name, COMPILER_SCOPE_TYPEPARAMS,
2325
(void *)type_params, firstlineno) == -1) {
2326
Py_DECREF(type_params_name);
2327
return ERROR;
2328
}
2329
Py_DECREF(type_params_name);
2330
RETURN_IF_ERROR_IN_SCOPE(c, compiler_type_params(c, type_params));
2331
for (int i = 0; i < num_typeparam_args; i++) {
2332
RETURN_IF_ERROR_IN_SCOPE(c, codegen_addop_i(INSTR_SEQUENCE(c), LOAD_FAST, i, loc));
2333
}
2334
}
2335
2336
annotations = compiler_visit_annotations(c, loc, args, returns);
2337
if (annotations < 0) {
2338
if (is_generic) {
2339
compiler_exit_scope(c);
2340
}
2341
return ERROR;
2342
}
2343
if (annotations > 0) {
2344
funcflags |= MAKE_FUNCTION_ANNOTATIONS;
2345
}
2346
2347
if (compiler_function_body(c, s, is_async, funcflags, firstlineno) < 0) {
2348
if (is_generic) {
2349
compiler_exit_scope(c);
2350
}
2351
return ERROR;
2352
}
2353
2354
if (is_generic) {
2355
RETURN_IF_ERROR_IN_SCOPE(c, codegen_addop_i(
2356
INSTR_SEQUENCE(c), SWAP, 2, loc));
2357
RETURN_IF_ERROR_IN_SCOPE(c, codegen_addop_i(
2358
INSTR_SEQUENCE(c), CALL_INTRINSIC_2, INTRINSIC_SET_FUNCTION_TYPE_PARAMS, loc));
2359
2360
c->u->u_metadata.u_argcount = num_typeparam_args;
2361
PyCodeObject *co = optimize_and_assemble(c, 0);
2362
compiler_exit_scope(c);
2363
if (co == NULL) {
2364
return ERROR;
2365
}
2366
if (compiler_make_closure(c, loc, co, 0) < 0) {
2367
Py_DECREF(co);
2368
return ERROR;
2369
}
2370
Py_DECREF(co);
2371
if (num_typeparam_args > 0) {
2372
ADDOP_I(c, loc, SWAP, num_typeparam_args + 1);
2373
}
2374
ADDOP_I(c, loc, CALL, num_typeparam_args);
2375
}
2376
2377
RETURN_IF_ERROR(compiler_apply_decorators(c, decos));
2378
return compiler_nameop(c, loc, name, Store);
2379
}
2380
2381
static int
2382
compiler_set_type_params_in_class(struct compiler *c, location loc)
2383
{
2384
_Py_DECLARE_STR(type_params, ".type_params");
2385
RETURN_IF_ERROR(compiler_nameop(c, loc, &_Py_STR(type_params), Load));
2386
RETURN_IF_ERROR(compiler_nameop(c, loc, &_Py_ID(__type_params__), Store));
2387
return 1;
2388
}
2389
2390
static int
2391
compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno)
2392
{
2393
/* ultimately generate code for:
2394
<name> = __build_class__(<func>, <name>, *<bases>, **<keywords>)
2395
where:
2396
<func> is a zero arg function/closure created from the class body.
2397
It mutates its locals to build the class namespace.
2398
<name> is the class name
2399
<bases> is the positional arguments and *varargs argument
2400
<keywords> is the keyword arguments and **kwds argument
2401
This borrows from compiler_call.
2402
*/
2403
2404
/* 1. compile the class body into a code object */
2405
RETURN_IF_ERROR(
2406
compiler_enter_scope(c, s->v.ClassDef.name,
2407
COMPILER_SCOPE_CLASS, (void *)s, firstlineno));
2408
2409
location loc = LOCATION(firstlineno, firstlineno, 0, 0);
2410
/* use the class name for name mangling */
2411
Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name));
2412
/* load (global) __name__ ... */
2413
if (compiler_nameop(c, loc, &_Py_ID(__name__), Load) < 0) {
2414
compiler_exit_scope(c);
2415
return ERROR;
2416
}
2417
/* ... and store it as __module__ */
2418
if (compiler_nameop(c, loc, &_Py_ID(__module__), Store) < 0) {
2419
compiler_exit_scope(c);
2420
return ERROR;
2421
}
2422
assert(c->u->u_metadata.u_qualname);
2423
ADDOP_LOAD_CONST(c, loc, c->u->u_metadata.u_qualname);
2424
if (compiler_nameop(c, loc, &_Py_ID(__qualname__), Store) < 0) {
2425
compiler_exit_scope(c);
2426
return ERROR;
2427
}
2428
asdl_type_param_seq *type_params = s->v.ClassDef.type_params;
2429
if (asdl_seq_LEN(type_params) > 0) {
2430
if (!compiler_set_type_params_in_class(c, loc)) {
2431
compiler_exit_scope(c);
2432
return ERROR;
2433
}
2434
}
2435
if (c->u->u_ste->ste_needs_classdict) {
2436
ADDOP(c, loc, LOAD_LOCALS);
2437
2438
// We can't use compiler_nameop here because we need to generate a
2439
// STORE_DEREF in a class namespace, and compiler_nameop() won't do
2440
// that by default.
2441
PyObject *cellvars = c->u->u_metadata.u_cellvars;
2442
if (compiler_addop_o(c->u, loc, STORE_DEREF, cellvars,
2443
&_Py_ID(__classdict__)) < 0) {
2444
compiler_exit_scope(c);
2445
return ERROR;
2446
}
2447
}
2448
/* compile the body proper */
2449
if (compiler_body(c, loc, s->v.ClassDef.body) < 0) {
2450
compiler_exit_scope(c);
2451
return ERROR;
2452
}
2453
/* The following code is artificial */
2454
/* Set __classdictcell__ if necessary */
2455
if (c->u->u_ste->ste_needs_classdict) {
2456
/* Store __classdictcell__ into class namespace */
2457
int i = compiler_lookup_arg(c->u->u_metadata.u_cellvars, &_Py_ID(__classdict__));
2458
if (i < 0) {
2459
compiler_exit_scope(c);
2460
return ERROR;
2461
}
2462
ADDOP_I(c, NO_LOCATION, LOAD_CLOSURE, i);
2463
if (compiler_nameop(c, NO_LOCATION, &_Py_ID(__classdictcell__), Store) < 0) {
2464
compiler_exit_scope(c);
2465
return ERROR;
2466
}
2467
}
2468
/* Return __classcell__ if it is referenced, otherwise return None */
2469
if (c->u->u_ste->ste_needs_class_closure) {
2470
/* Store __classcell__ into class namespace & return it */
2471
int i = compiler_lookup_arg(c->u->u_metadata.u_cellvars, &_Py_ID(__class__));
2472
if (i < 0) {
2473
compiler_exit_scope(c);
2474
return ERROR;
2475
}
2476
ADDOP_I(c, NO_LOCATION, LOAD_CLOSURE, i);
2477
ADDOP_I(c, NO_LOCATION, COPY, 1);
2478
if (compiler_nameop(c, NO_LOCATION, &_Py_ID(__classcell__), Store) < 0) {
2479
compiler_exit_scope(c);
2480
return ERROR;
2481
}
2482
}
2483
else {
2484
/* No methods referenced __class__, so just return None */
2485
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
2486
}
2487
ADDOP_IN_SCOPE(c, NO_LOCATION, RETURN_VALUE);
2488
/* create the code object */
2489
PyCodeObject *co = optimize_and_assemble(c, 1);
2490
2491
/* leave the new scope */
2492
compiler_exit_scope(c);
2493
if (co == NULL) {
2494
return ERROR;
2495
}
2496
2497
/* 2. load the 'build_class' function */
2498
2499
// these instructions should be attributed to the class line,
2500
// not a decorator line
2501
loc = LOC(s);
2502
ADDOP(c, loc, PUSH_NULL);
2503
ADDOP(c, loc, LOAD_BUILD_CLASS);
2504
2505
/* 3. load a function (or closure) made from the code object */
2506
if (compiler_make_closure(c, loc, co, 0) < 0) {
2507
Py_DECREF(co);
2508
return ERROR;
2509
}
2510
Py_DECREF(co);
2511
2512
/* 4. load class name */
2513
ADDOP_LOAD_CONST(c, loc, s->v.ClassDef.name);
2514
2515
return SUCCESS;
2516
}
2517
2518
static int
2519
compiler_class(struct compiler *c, stmt_ty s)
2520
{
2521
asdl_expr_seq *decos = s->v.ClassDef.decorator_list;
2522
2523
RETURN_IF_ERROR(compiler_decorators(c, decos));
2524
2525
int firstlineno = s->lineno;
2526
if (asdl_seq_LEN(decos)) {
2527
firstlineno = ((expr_ty)asdl_seq_GET(decos, 0))->lineno;
2528
}
2529
location loc = LOC(s);
2530
2531
asdl_type_param_seq *type_params = s->v.ClassDef.type_params;
2532
int is_generic = asdl_seq_LEN(type_params) > 0;
2533
if (is_generic) {
2534
Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name));
2535
ADDOP(c, loc, PUSH_NULL);
2536
PyObject *type_params_name = PyUnicode_FromFormat("<generic parameters of %U>",
2537
s->v.ClassDef.name);
2538
if (!type_params_name) {
2539
return ERROR;
2540
}
2541
if (compiler_enter_scope(c, type_params_name, COMPILER_SCOPE_TYPEPARAMS,
2542
(void *)type_params, firstlineno) == -1) {
2543
Py_DECREF(type_params_name);
2544
return ERROR;
2545
}
2546
Py_DECREF(type_params_name);
2547
RETURN_IF_ERROR_IN_SCOPE(c, compiler_type_params(c, type_params));
2548
_Py_DECLARE_STR(type_params, ".type_params");
2549
RETURN_IF_ERROR_IN_SCOPE(c, compiler_nameop(c, loc, &_Py_STR(type_params), Store));
2550
}
2551
2552
if (compiler_class_body(c, s, firstlineno) < 0) {
2553
if (is_generic) {
2554
compiler_exit_scope(c);
2555
}
2556
return ERROR;
2557
}
2558
2559
/* generate the rest of the code for the call */
2560
2561
if (is_generic) {
2562
_Py_DECLARE_STR(type_params, ".type_params");
2563
_Py_DECLARE_STR(generic_base, ".generic_base");
2564
RETURN_IF_ERROR_IN_SCOPE(c, compiler_nameop(c, loc, &_Py_STR(type_params), Load));
2565
RETURN_IF_ERROR_IN_SCOPE(
2566
c, codegen_addop_i(INSTR_SEQUENCE(c), CALL_INTRINSIC_1, INTRINSIC_SUBSCRIPT_GENERIC, loc)
2567
)
2568
RETURN_IF_ERROR_IN_SCOPE(c, compiler_nameop(c, loc, &_Py_STR(generic_base), Store));
2569
2570
Py_ssize_t original_len = asdl_seq_LEN(s->v.ClassDef.bases);
2571
asdl_expr_seq *bases = _Py_asdl_expr_seq_new(
2572
original_len + 1, c->c_arena);
2573
if (bases == NULL) {
2574
compiler_exit_scope(c);
2575
return ERROR;
2576
}
2577
for (Py_ssize_t i = 0; i < original_len; i++) {
2578
asdl_seq_SET(bases, i, asdl_seq_GET(s->v.ClassDef.bases, i));
2579
}
2580
expr_ty name_node = _PyAST_Name(
2581
&_Py_STR(generic_base), Load,
2582
loc.lineno, loc.col_offset, loc.end_lineno, loc.end_col_offset, c->c_arena
2583
);
2584
if (name_node == NULL) {
2585
compiler_exit_scope(c);
2586
return ERROR;
2587
}
2588
asdl_seq_SET(bases, original_len, name_node);
2589
RETURN_IF_ERROR_IN_SCOPE(c, compiler_call_helper(c, loc, 2,
2590
bases,
2591
s->v.ClassDef.keywords));
2592
2593
PyCodeObject *co = optimize_and_assemble(c, 0);
2594
compiler_exit_scope(c);
2595
if (co == NULL) {
2596
return ERROR;
2597
}
2598
if (compiler_make_closure(c, loc, co, 0) < 0) {
2599
Py_DECREF(co);
2600
return ERROR;
2601
}
2602
Py_DECREF(co);
2603
ADDOP_I(c, loc, CALL, 0);
2604
} else {
2605
RETURN_IF_ERROR(compiler_call_helper(c, loc, 2,
2606
s->v.ClassDef.bases,
2607
s->v.ClassDef.keywords));
2608
}
2609
2610
/* 6. apply decorators */
2611
RETURN_IF_ERROR(compiler_apply_decorators(c, decos));
2612
2613
/* 7. store into <name> */
2614
RETURN_IF_ERROR(compiler_nameop(c, loc, s->v.ClassDef.name, Store));
2615
return SUCCESS;
2616
}
2617
2618
static int
2619
compiler_typealias_body(struct compiler *c, stmt_ty s)
2620
{
2621
location loc = LOC(s);
2622
PyObject *name = s->v.TypeAlias.name->v.Name.id;
2623
RETURN_IF_ERROR(
2624
compiler_enter_scope(c, name, COMPILER_SCOPE_FUNCTION, s, loc.lineno));
2625
/* Make None the first constant, so the evaluate function can't have a
2626
docstring. */
2627
RETURN_IF_ERROR(compiler_add_const(c->c_const_cache, c->u, Py_None));
2628
VISIT_IN_SCOPE(c, expr, s->v.TypeAlias.value);
2629
ADDOP_IN_SCOPE(c, loc, RETURN_VALUE);
2630
PyCodeObject *co = optimize_and_assemble(c, 0);
2631
compiler_exit_scope(c);
2632
if (co == NULL) {
2633
return ERROR;
2634
}
2635
if (compiler_make_closure(c, loc, co, 0) < 0) {
2636
Py_DECREF(co);
2637
return ERROR;
2638
}
2639
Py_DECREF(co);
2640
ADDOP_I(c, loc, BUILD_TUPLE, 3);
2641
ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_TYPEALIAS);
2642
return SUCCESS;
2643
}
2644
2645
static int
2646
compiler_typealias(struct compiler *c, stmt_ty s)
2647
{
2648
location loc = LOC(s);
2649
asdl_type_param_seq *type_params = s->v.TypeAlias.type_params;
2650
int is_generic = asdl_seq_LEN(type_params) > 0;
2651
PyObject *name = s->v.TypeAlias.name->v.Name.id;
2652
if (is_generic) {
2653
ADDOP(c, loc, PUSH_NULL);
2654
PyObject *type_params_name = PyUnicode_FromFormat("<generic parameters of %U>",
2655
name);
2656
if (!type_params_name) {
2657
return ERROR;
2658
}
2659
if (compiler_enter_scope(c, type_params_name, COMPILER_SCOPE_TYPEPARAMS,
2660
(void *)type_params, loc.lineno) == -1) {
2661
Py_DECREF(type_params_name);
2662
return ERROR;
2663
}
2664
Py_DECREF(type_params_name);
2665
RETURN_IF_ERROR_IN_SCOPE(
2666
c, compiler_addop_load_const(c->c_const_cache, c->u, loc, name)
2667
);
2668
RETURN_IF_ERROR_IN_SCOPE(c, compiler_type_params(c, type_params));
2669
}
2670
else {
2671
ADDOP_LOAD_CONST(c, loc, name);
2672
ADDOP_LOAD_CONST(c, loc, Py_None);
2673
}
2674
2675
if (compiler_typealias_body(c, s) < 0) {
2676
if (is_generic) {
2677
compiler_exit_scope(c);
2678
}
2679
return ERROR;
2680
}
2681
2682
if (is_generic) {
2683
PyCodeObject *co = optimize_and_assemble(c, 0);
2684
compiler_exit_scope(c);
2685
if (co == NULL) {
2686
return ERROR;
2687
}
2688
if (compiler_make_closure(c, loc, co, 0) < 0) {
2689
Py_DECREF(co);
2690
return ERROR;
2691
}
2692
Py_DECREF(co);
2693
ADDOP_I(c, loc, CALL, 0);
2694
}
2695
RETURN_IF_ERROR(compiler_nameop(c, loc, name, Store));
2696
return SUCCESS;
2697
}
2698
2699
/* Return false if the expression is a constant value except named singletons.
2700
Return true otherwise. */
2701
static bool
2702
check_is_arg(expr_ty e)
2703
{
2704
if (e->kind != Constant_kind) {
2705
return true;
2706
}
2707
PyObject *value = e->v.Constant.value;
2708
return (value == Py_None
2709
|| value == Py_False
2710
|| value == Py_True
2711
|| value == Py_Ellipsis);
2712
}
2713
2714
static PyTypeObject * infer_type(expr_ty e);
2715
2716
/* Check operands of identity checks ("is" and "is not").
2717
Emit a warning if any operand is a constant except named singletons.
2718
*/
2719
static int
2720
check_compare(struct compiler *c, expr_ty e)
2721
{
2722
Py_ssize_t i, n;
2723
bool left = check_is_arg(e->v.Compare.left);
2724
expr_ty left_expr = e->v.Compare.left;
2725
n = asdl_seq_LEN(e->v.Compare.ops);
2726
for (i = 0; i < n; i++) {
2727
cmpop_ty op = (cmpop_ty)asdl_seq_GET(e->v.Compare.ops, i);
2728
expr_ty right_expr = (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i);
2729
bool right = check_is_arg(right_expr);
2730
if (op == Is || op == IsNot) {
2731
if (!right || !left) {
2732
const char *msg = (op == Is)
2733
? "\"is\" with '%.200s' literal. Did you mean \"==\"?"
2734
: "\"is not\" with '%.200s' literal. Did you mean \"!=\"?";
2735
expr_ty literal = !left ? left_expr : right_expr;
2736
return compiler_warn(
2737
c, LOC(e), msg, infer_type(literal)->tp_name
2738
);
2739
}
2740
}
2741
left = right;
2742
left_expr = right_expr;
2743
}
2744
return SUCCESS;
2745
}
2746
2747
static const int compare_masks[] = {
2748
[Py_LT] = COMPARISON_LESS_THAN,
2749
[Py_LE] = COMPARISON_LESS_THAN | COMPARISON_EQUALS,
2750
[Py_EQ] = COMPARISON_EQUALS,
2751
[Py_NE] = COMPARISON_NOT_EQUALS,
2752
[Py_GT] = COMPARISON_GREATER_THAN,
2753
[Py_GE] = COMPARISON_GREATER_THAN | COMPARISON_EQUALS,
2754
};
2755
2756
static int compiler_addcompare(struct compiler *c, location loc,
2757
cmpop_ty op)
2758
{
2759
int cmp;
2760
switch (op) {
2761
case Eq:
2762
cmp = Py_EQ;
2763
break;
2764
case NotEq:
2765
cmp = Py_NE;
2766
break;
2767
case Lt:
2768
cmp = Py_LT;
2769
break;
2770
case LtE:
2771
cmp = Py_LE;
2772
break;
2773
case Gt:
2774
cmp = Py_GT;
2775
break;
2776
case GtE:
2777
cmp = Py_GE;
2778
break;
2779
case Is:
2780
ADDOP_I(c, loc, IS_OP, 0);
2781
return SUCCESS;
2782
case IsNot:
2783
ADDOP_I(c, loc, IS_OP, 1);
2784
return SUCCESS;
2785
case In:
2786
ADDOP_I(c, loc, CONTAINS_OP, 0);
2787
return SUCCESS;
2788
case NotIn:
2789
ADDOP_I(c, loc, CONTAINS_OP, 1);
2790
return SUCCESS;
2791
default:
2792
Py_UNREACHABLE();
2793
}
2794
// cmp goes in top three bits of the oparg, while the low four bits are used
2795
// by quickened versions of this opcode to store the comparison mask. The
2796
// fifth-lowest bit indicates whether the result should be converted to bool
2797
// and is set later):
2798
ADDOP_I(c, loc, COMPARE_OP, (cmp << 5) | compare_masks[cmp]);
2799
return SUCCESS;
2800
}
2801
2802
2803
2804
static int
2805
compiler_jump_if(struct compiler *c, location loc,
2806
expr_ty e, jump_target_label next, int cond)
2807
{
2808
switch (e->kind) {
2809
case UnaryOp_kind:
2810
if (e->v.UnaryOp.op == Not) {
2811
return compiler_jump_if(c, loc, e->v.UnaryOp.operand, next, !cond);
2812
}
2813
/* fallback to general implementation */
2814
break;
2815
case BoolOp_kind: {
2816
asdl_expr_seq *s = e->v.BoolOp.values;
2817
Py_ssize_t i, n = asdl_seq_LEN(s) - 1;
2818
assert(n >= 0);
2819
int cond2 = e->v.BoolOp.op == Or;
2820
jump_target_label next2 = next;
2821
if (!cond2 != !cond) {
2822
NEW_JUMP_TARGET_LABEL(c, new_next2);
2823
next2 = new_next2;
2824
}
2825
for (i = 0; i < n; ++i) {
2826
RETURN_IF_ERROR(
2827
compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, i), next2, cond2));
2828
}
2829
RETURN_IF_ERROR(
2830
compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, n), next, cond));
2831
if (!SAME_LABEL(next2, next)) {
2832
USE_LABEL(c, next2);
2833
}
2834
return SUCCESS;
2835
}
2836
case IfExp_kind: {
2837
NEW_JUMP_TARGET_LABEL(c, end);
2838
NEW_JUMP_TARGET_LABEL(c, next2);
2839
RETURN_IF_ERROR(
2840
compiler_jump_if(c, loc, e->v.IfExp.test, next2, 0));
2841
RETURN_IF_ERROR(
2842
compiler_jump_if(c, loc, e->v.IfExp.body, next, cond));
2843
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
2844
2845
USE_LABEL(c, next2);
2846
RETURN_IF_ERROR(
2847
compiler_jump_if(c, loc, e->v.IfExp.orelse, next, cond));
2848
2849
USE_LABEL(c, end);
2850
return SUCCESS;
2851
}
2852
case Compare_kind: {
2853
Py_ssize_t n = asdl_seq_LEN(e->v.Compare.ops) - 1;
2854
if (n > 0) {
2855
RETURN_IF_ERROR(check_compare(c, e));
2856
NEW_JUMP_TARGET_LABEL(c, cleanup);
2857
VISIT(c, expr, e->v.Compare.left);
2858
for (Py_ssize_t i = 0; i < n; i++) {
2859
VISIT(c, expr,
2860
(expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
2861
ADDOP_I(c, LOC(e), SWAP, 2);
2862
ADDOP_I(c, LOC(e), COPY, 2);
2863
ADDOP_COMPARE(c, LOC(e), asdl_seq_GET(e->v.Compare.ops, i));
2864
ADDOP(c, LOC(e), TO_BOOL);
2865
ADDOP_JUMP(c, LOC(e), POP_JUMP_IF_FALSE, cleanup);
2866
}
2867
VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n));
2868
ADDOP_COMPARE(c, LOC(e), asdl_seq_GET(e->v.Compare.ops, n));
2869
ADDOP(c, LOC(e), TO_BOOL);
2870
ADDOP_JUMP(c, LOC(e), cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next);
2871
NEW_JUMP_TARGET_LABEL(c, end);
2872
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
2873
2874
USE_LABEL(c, cleanup);
2875
ADDOP(c, LOC(e), POP_TOP);
2876
if (!cond) {
2877
ADDOP_JUMP(c, NO_LOCATION, JUMP, next);
2878
}
2879
2880
USE_LABEL(c, end);
2881
return SUCCESS;
2882
}
2883
/* fallback to general implementation */
2884
break;
2885
}
2886
default:
2887
/* fallback to general implementation */
2888
break;
2889
}
2890
2891
/* general implementation */
2892
VISIT(c, expr, e);
2893
ADDOP(c, LOC(e), TO_BOOL);
2894
ADDOP_JUMP(c, LOC(e), cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next);
2895
return SUCCESS;
2896
}
2897
2898
static int
2899
compiler_ifexp(struct compiler *c, expr_ty e)
2900
{
2901
assert(e->kind == IfExp_kind);
2902
NEW_JUMP_TARGET_LABEL(c, end);
2903
NEW_JUMP_TARGET_LABEL(c, next);
2904
2905
RETURN_IF_ERROR(
2906
compiler_jump_if(c, LOC(e), e->v.IfExp.test, next, 0));
2907
2908
VISIT(c, expr, e->v.IfExp.body);
2909
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
2910
2911
USE_LABEL(c, next);
2912
VISIT(c, expr, e->v.IfExp.orelse);
2913
2914
USE_LABEL(c, end);
2915
return SUCCESS;
2916
}
2917
2918
static int
2919
compiler_lambda(struct compiler *c, expr_ty e)
2920
{
2921
PyCodeObject *co;
2922
Py_ssize_t funcflags;
2923
arguments_ty args = e->v.Lambda.args;
2924
assert(e->kind == Lambda_kind);
2925
2926
RETURN_IF_ERROR(compiler_check_debug_args(c, args));
2927
2928
location loc = LOC(e);
2929
funcflags = compiler_default_arguments(c, loc, args);
2930
if (funcflags == -1) {
2931
return ERROR;
2932
}
2933
2934
_Py_DECLARE_STR(anon_lambda, "<lambda>");
2935
RETURN_IF_ERROR(
2936
compiler_enter_scope(c, &_Py_STR(anon_lambda), COMPILER_SCOPE_LAMBDA,
2937
(void *)e, e->lineno));
2938
2939
/* Make None the first constant, so the lambda can't have a
2940
docstring. */
2941
RETURN_IF_ERROR(compiler_add_const(c->c_const_cache, c->u, Py_None));
2942
2943
c->u->u_metadata.u_argcount = asdl_seq_LEN(args->args);
2944
c->u->u_metadata.u_posonlyargcount = asdl_seq_LEN(args->posonlyargs);
2945
c->u->u_metadata.u_kwonlyargcount = asdl_seq_LEN(args->kwonlyargs);
2946
VISIT_IN_SCOPE(c, expr, e->v.Lambda.body);
2947
if (c->u->u_ste->ste_generator) {
2948
co = optimize_and_assemble(c, 0);
2949
}
2950
else {
2951
location loc = LOCATION(e->lineno, e->lineno, 0, 0);
2952
ADDOP_IN_SCOPE(c, loc, RETURN_VALUE);
2953
co = optimize_and_assemble(c, 1);
2954
}
2955
compiler_exit_scope(c);
2956
if (co == NULL) {
2957
return ERROR;
2958
}
2959
2960
if (compiler_make_closure(c, loc, co, funcflags) < 0) {
2961
Py_DECREF(co);
2962
return ERROR;
2963
}
2964
Py_DECREF(co);
2965
2966
return SUCCESS;
2967
}
2968
2969
static int
2970
compiler_if(struct compiler *c, stmt_ty s)
2971
{
2972
jump_target_label next;
2973
assert(s->kind == If_kind);
2974
NEW_JUMP_TARGET_LABEL(c, end);
2975
if (asdl_seq_LEN(s->v.If.orelse)) {
2976
NEW_JUMP_TARGET_LABEL(c, orelse);
2977
next = orelse;
2978
}
2979
else {
2980
next = end;
2981
}
2982
RETURN_IF_ERROR(
2983
compiler_jump_if(c, LOC(s), s->v.If.test, next, 0));
2984
2985
VISIT_SEQ(c, stmt, s->v.If.body);
2986
if (asdl_seq_LEN(s->v.If.orelse)) {
2987
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
2988
2989
USE_LABEL(c, next);
2990
VISIT_SEQ(c, stmt, s->v.If.orelse);
2991
}
2992
2993
USE_LABEL(c, end);
2994
return SUCCESS;
2995
}
2996
2997
static int
2998
compiler_for(struct compiler *c, stmt_ty s)
2999
{
3000
location loc = LOC(s);
3001
NEW_JUMP_TARGET_LABEL(c, start);
3002
NEW_JUMP_TARGET_LABEL(c, body);
3003
NEW_JUMP_TARGET_LABEL(c, cleanup);
3004
NEW_JUMP_TARGET_LABEL(c, end);
3005
3006
RETURN_IF_ERROR(compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL));
3007
3008
VISIT(c, expr, s->v.For.iter);
3009
ADDOP(c, loc, GET_ITER);
3010
3011
USE_LABEL(c, start);
3012
ADDOP_JUMP(c, loc, FOR_ITER, cleanup);
3013
3014
USE_LABEL(c, body);
3015
VISIT(c, expr, s->v.For.target);
3016
VISIT_SEQ(c, stmt, s->v.For.body);
3017
/* Mark jump as artificial */
3018
ADDOP_JUMP(c, NO_LOCATION, JUMP, start);
3019
3020
USE_LABEL(c, cleanup);
3021
ADDOP(c, NO_LOCATION, END_FOR);
3022
3023
compiler_pop_fblock(c, FOR_LOOP, start);
3024
3025
VISIT_SEQ(c, stmt, s->v.For.orelse);
3026
3027
USE_LABEL(c, end);
3028
return SUCCESS;
3029
}
3030
3031
3032
static int
3033
compiler_async_for(struct compiler *c, stmt_ty s)
3034
{
3035
location loc = LOC(s);
3036
if (IS_TOP_LEVEL_AWAIT(c)){
3037
c->u->u_ste->ste_coroutine = 1;
3038
} else if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION) {
3039
return compiler_error(c, loc, "'async for' outside async function");
3040
}
3041
3042
NEW_JUMP_TARGET_LABEL(c, start);
3043
NEW_JUMP_TARGET_LABEL(c, except);
3044
NEW_JUMP_TARGET_LABEL(c, end);
3045
3046
VISIT(c, expr, s->v.AsyncFor.iter);
3047
ADDOP(c, loc, GET_AITER);
3048
3049
USE_LABEL(c, start);
3050
RETURN_IF_ERROR(compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL));
3051
3052
/* SETUP_FINALLY to guard the __anext__ call */
3053
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
3054
ADDOP(c, loc, GET_ANEXT);
3055
ADDOP_LOAD_CONST(c, loc, Py_None);
3056
ADD_YIELD_FROM(c, loc, 1);
3057
ADDOP(c, loc, POP_BLOCK); /* for SETUP_FINALLY */
3058
3059
/* Success block for __anext__ */
3060
VISIT(c, expr, s->v.AsyncFor.target);
3061
VISIT_SEQ(c, stmt, s->v.AsyncFor.body);
3062
/* Mark jump as artificial */
3063
ADDOP_JUMP(c, NO_LOCATION, JUMP, start);
3064
3065
compiler_pop_fblock(c, FOR_LOOP, start);
3066
3067
/* Except block for __anext__ */
3068
USE_LABEL(c, except);
3069
3070
/* Use same line number as the iterator,
3071
* as the END_ASYNC_FOR succeeds the `for`, not the body. */
3072
loc = LOC(s->v.AsyncFor.iter);
3073
ADDOP(c, loc, END_ASYNC_FOR);
3074
3075
/* `else` block */
3076
VISIT_SEQ(c, stmt, s->v.For.orelse);
3077
3078
USE_LABEL(c, end);
3079
return SUCCESS;
3080
}
3081
3082
static int
3083
compiler_while(struct compiler *c, stmt_ty s)
3084
{
3085
NEW_JUMP_TARGET_LABEL(c, loop);
3086
NEW_JUMP_TARGET_LABEL(c, body);
3087
NEW_JUMP_TARGET_LABEL(c, end);
3088
NEW_JUMP_TARGET_LABEL(c, anchor);
3089
3090
USE_LABEL(c, loop);
3091
3092
RETURN_IF_ERROR(compiler_push_fblock(c, LOC(s), WHILE_LOOP, loop, end, NULL));
3093
RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.While.test, anchor, 0));
3094
3095
USE_LABEL(c, body);
3096
VISIT_SEQ(c, stmt, s->v.While.body);
3097
RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.While.test, body, 1));
3098
3099
compiler_pop_fblock(c, WHILE_LOOP, loop);
3100
3101
USE_LABEL(c, anchor);
3102
if (s->v.While.orelse) {
3103
VISIT_SEQ(c, stmt, s->v.While.orelse);
3104
}
3105
3106
USE_LABEL(c, end);
3107
return SUCCESS;
3108
}
3109
3110
static int
3111
compiler_return(struct compiler *c, stmt_ty s)
3112
{
3113
location loc = LOC(s);
3114
int preserve_tos = ((s->v.Return.value != NULL) &&
3115
(s->v.Return.value->kind != Constant_kind));
3116
if (!_PyST_IsFunctionLike(c->u->u_ste)) {
3117
return compiler_error(c, loc, "'return' outside function");
3118
}
3119
if (s->v.Return.value != NULL &&
3120
c->u->u_ste->ste_coroutine && c->u->u_ste->ste_generator)
3121
{
3122
return compiler_error(c, loc, "'return' with value in async generator");
3123
}
3124
3125
if (preserve_tos) {
3126
VISIT(c, expr, s->v.Return.value);
3127
} else {
3128
/* Emit instruction with line number for return value */
3129
if (s->v.Return.value != NULL) {
3130
loc = LOC(s->v.Return.value);
3131
ADDOP(c, loc, NOP);
3132
}
3133
}
3134
if (s->v.Return.value == NULL || s->v.Return.value->lineno != s->lineno) {
3135
loc = LOC(s);
3136
ADDOP(c, loc, NOP);
3137
}
3138
3139
RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, preserve_tos, NULL));
3140
if (s->v.Return.value == NULL) {
3141
ADDOP_LOAD_CONST(c, loc, Py_None);
3142
}
3143
else if (!preserve_tos) {
3144
ADDOP_LOAD_CONST(c, loc, s->v.Return.value->v.Constant.value);
3145
}
3146
ADDOP(c, loc, RETURN_VALUE);
3147
3148
return SUCCESS;
3149
}
3150
3151
static int
3152
compiler_break(struct compiler *c, location loc)
3153
{
3154
struct fblockinfo *loop = NULL;
3155
location origin_loc = loc;
3156
/* Emit instruction with line number */
3157
ADDOP(c, loc, NOP);
3158
RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, 0, &loop));
3159
if (loop == NULL) {
3160
return compiler_error(c, origin_loc, "'break' outside loop");
3161
}
3162
RETURN_IF_ERROR(compiler_unwind_fblock(c, &loc, loop, 0));
3163
ADDOP_JUMP(c, loc, JUMP, loop->fb_exit);
3164
return SUCCESS;
3165
}
3166
3167
static int
3168
compiler_continue(struct compiler *c, location loc)
3169
{
3170
struct fblockinfo *loop = NULL;
3171
location origin_loc = loc;
3172
/* Emit instruction with line number */
3173
ADDOP(c, loc, NOP);
3174
RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, 0, &loop));
3175
if (loop == NULL) {
3176
return compiler_error(c, origin_loc, "'continue' not properly in loop");
3177
}
3178
ADDOP_JUMP(c, loc, JUMP, loop->fb_block);
3179
return SUCCESS;
3180
}
3181
3182
3183
static location
3184
location_of_last_executing_statement(asdl_stmt_seq *stmts)
3185
{
3186
for (Py_ssize_t i = asdl_seq_LEN(stmts) - 1; i >= 0; i++) {
3187
location loc = LOC((stmt_ty)asdl_seq_GET(stmts, i));
3188
if (loc.lineno > 0) {
3189
return loc;
3190
}
3191
}
3192
return NO_LOCATION;
3193
}
3194
3195
/* Code generated for "try: <body> finally: <finalbody>" is as follows:
3196
3197
SETUP_FINALLY L
3198
<code for body>
3199
POP_BLOCK
3200
<code for finalbody>
3201
JUMP E
3202
L:
3203
<code for finalbody>
3204
E:
3205
3206
The special instructions use the block stack. Each block
3207
stack entry contains the instruction that created it (here
3208
SETUP_FINALLY), the level of the value stack at the time the
3209
block stack entry was created, and a label (here L).
3210
3211
SETUP_FINALLY:
3212
Pushes the current value stack level and the label
3213
onto the block stack.
3214
POP_BLOCK:
3215
Pops en entry from the block stack.
3216
3217
The block stack is unwound when an exception is raised:
3218
when a SETUP_FINALLY entry is found, the raised and the caught
3219
exceptions are pushed onto the value stack (and the exception
3220
condition is cleared), and the interpreter jumps to the label
3221
gotten from the block stack.
3222
*/
3223
3224
static int
3225
compiler_try_finally(struct compiler *c, stmt_ty s)
3226
{
3227
location loc = LOC(s);
3228
3229
NEW_JUMP_TARGET_LABEL(c, body);
3230
NEW_JUMP_TARGET_LABEL(c, end);
3231
NEW_JUMP_TARGET_LABEL(c, exit);
3232
NEW_JUMP_TARGET_LABEL(c, cleanup);
3233
3234
/* `try` block */
3235
ADDOP_JUMP(c, loc, SETUP_FINALLY, end);
3236
3237
USE_LABEL(c, body);
3238
RETURN_IF_ERROR(
3239
compiler_push_fblock(c, loc, FINALLY_TRY, body, end,
3240
s->v.Try.finalbody));
3241
3242
if (s->v.Try.handlers && asdl_seq_LEN(s->v.Try.handlers)) {
3243
RETURN_IF_ERROR(compiler_try_except(c, s));
3244
}
3245
else {
3246
VISIT_SEQ(c, stmt, s->v.Try.body);
3247
}
3248
ADDOP(c, NO_LOCATION, POP_BLOCK);
3249
compiler_pop_fblock(c, FINALLY_TRY, body);
3250
VISIT_SEQ(c, stmt, s->v.Try.finalbody);
3251
3252
ADDOP_JUMP(c, NO_LOCATION, JUMP, exit);
3253
/* `finally` block */
3254
3255
USE_LABEL(c, end);
3256
3257
loc = NO_LOCATION;
3258
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup);
3259
ADDOP(c, loc, PUSH_EXC_INFO);
3260
RETURN_IF_ERROR(
3261
compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL));
3262
VISIT_SEQ(c, stmt, s->v.Try.finalbody);
3263
loc = location_of_last_executing_statement(s->v.Try.finalbody);
3264
compiler_pop_fblock(c, FINALLY_END, end);
3265
3266
ADDOP_I(c, loc, RERAISE, 0);
3267
3268
USE_LABEL(c, cleanup);
3269
POP_EXCEPT_AND_RERAISE(c, loc);
3270
3271
USE_LABEL(c, exit);
3272
return SUCCESS;
3273
}
3274
3275
static int
3276
compiler_try_star_finally(struct compiler *c, stmt_ty s)
3277
{
3278
location loc = LOC(s);
3279
3280
NEW_JUMP_TARGET_LABEL(c, body);
3281
NEW_JUMP_TARGET_LABEL(c, end);
3282
NEW_JUMP_TARGET_LABEL(c, exit);
3283
NEW_JUMP_TARGET_LABEL(c, cleanup);
3284
/* `try` block */
3285
ADDOP_JUMP(c, loc, SETUP_FINALLY, end);
3286
3287
USE_LABEL(c, body);
3288
RETURN_IF_ERROR(
3289
compiler_push_fblock(c, loc, FINALLY_TRY, body, end,
3290
s->v.TryStar.finalbody));
3291
3292
if (s->v.TryStar.handlers && asdl_seq_LEN(s->v.TryStar.handlers)) {
3293
RETURN_IF_ERROR(compiler_try_star_except(c, s));
3294
}
3295
else {
3296
VISIT_SEQ(c, stmt, s->v.TryStar.body);
3297
}
3298
ADDOP(c, NO_LOCATION, POP_BLOCK);
3299
compiler_pop_fblock(c, FINALLY_TRY, body);
3300
VISIT_SEQ(c, stmt, s->v.TryStar.finalbody);
3301
3302
ADDOP_JUMP(c, NO_LOCATION, JUMP, exit);
3303
3304
/* `finally` block */
3305
USE_LABEL(c, end);
3306
3307
loc = NO_LOCATION;
3308
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup);
3309
ADDOP(c, loc, PUSH_EXC_INFO);
3310
RETURN_IF_ERROR(
3311
compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL));
3312
3313
VISIT_SEQ(c, stmt, s->v.TryStar.finalbody);
3314
loc = location_of_last_executing_statement(s->v.Try.finalbody);
3315
3316
compiler_pop_fblock(c, FINALLY_END, end);
3317
ADDOP_I(c, loc, RERAISE, 0);
3318
3319
USE_LABEL(c, cleanup);
3320
POP_EXCEPT_AND_RERAISE(c, loc);
3321
3322
USE_LABEL(c, exit);
3323
return SUCCESS;
3324
}
3325
3326
3327
/*
3328
Code generated for "try: S except E1 as V1: S1 except E2 as V2: S2 ...":
3329
(The contents of the value stack is shown in [], with the top
3330
at the right; 'tb' is trace-back info, 'val' the exception's
3331
associated value, and 'exc' the exception.)
3332
3333
Value stack Label Instruction Argument
3334
[] SETUP_FINALLY L1
3335
[] <code for S>
3336
[] POP_BLOCK
3337
[] JUMP L0
3338
3339
[exc] L1: <evaluate E1> )
3340
[exc, E1] CHECK_EXC_MATCH )
3341
[exc, bool] POP_JUMP_IF_FALSE L2 ) only if E1
3342
[exc] <assign to V1> (or POP if no V1)
3343
[] <code for S1>
3344
JUMP L0
3345
3346
[exc] L2: <evaluate E2>
3347
.............................etc.......................
3348
3349
[exc] Ln+1: RERAISE # re-raise exception
3350
3351
[] L0: <next statement>
3352
3353
Of course, parts are not generated if Vi or Ei is not present.
3354
*/
3355
static int
3356
compiler_try_except(struct compiler *c, stmt_ty s)
3357
{
3358
location loc = LOC(s);
3359
Py_ssize_t i, n;
3360
3361
NEW_JUMP_TARGET_LABEL(c, body);
3362
NEW_JUMP_TARGET_LABEL(c, except);
3363
NEW_JUMP_TARGET_LABEL(c, end);
3364
NEW_JUMP_TARGET_LABEL(c, cleanup);
3365
3366
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
3367
3368
USE_LABEL(c, body);
3369
RETURN_IF_ERROR(
3370
compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL));
3371
VISIT_SEQ(c, stmt, s->v.Try.body);
3372
compiler_pop_fblock(c, TRY_EXCEPT, body);
3373
ADDOP(c, NO_LOCATION, POP_BLOCK);
3374
if (s->v.Try.orelse && asdl_seq_LEN(s->v.Try.orelse)) {
3375
VISIT_SEQ(c, stmt, s->v.Try.orelse);
3376
}
3377
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
3378
n = asdl_seq_LEN(s->v.Try.handlers);
3379
3380
USE_LABEL(c, except);
3381
3382
ADDOP_JUMP(c, NO_LOCATION, SETUP_CLEANUP, cleanup);
3383
ADDOP(c, NO_LOCATION, PUSH_EXC_INFO);
3384
3385
/* Runtime will push a block here, so we need to account for that */
3386
RETURN_IF_ERROR(
3387
compiler_push_fblock(c, loc, EXCEPTION_HANDLER, NO_LABEL, NO_LABEL, NULL));
3388
3389
for (i = 0; i < n; i++) {
3390
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
3391
s->v.Try.handlers, i);
3392
location loc = LOC(handler);
3393
if (!handler->v.ExceptHandler.type && i < n-1) {
3394
return compiler_error(c, loc, "default 'except:' must be last");
3395
}
3396
NEW_JUMP_TARGET_LABEL(c, next_except);
3397
except = next_except;
3398
if (handler->v.ExceptHandler.type) {
3399
VISIT(c, expr, handler->v.ExceptHandler.type);
3400
ADDOP(c, loc, CHECK_EXC_MATCH);
3401
ADDOP_JUMP(c, loc, POP_JUMP_IF_FALSE, except);
3402
}
3403
if (handler->v.ExceptHandler.name) {
3404
NEW_JUMP_TARGET_LABEL(c, cleanup_end);
3405
NEW_JUMP_TARGET_LABEL(c, cleanup_body);
3406
3407
RETURN_IF_ERROR(
3408
compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store));
3409
3410
/*
3411
try:
3412
# body
3413
except type as name:
3414
try:
3415
# body
3416
finally:
3417
name = None # in case body contains "del name"
3418
del name
3419
*/
3420
3421
/* second try: */
3422
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup_end);
3423
3424
USE_LABEL(c, cleanup_body);
3425
RETURN_IF_ERROR(
3426
compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body,
3427
NO_LABEL, handler->v.ExceptHandler.name));
3428
3429
/* second # body */
3430
VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
3431
compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body);
3432
/* name = None; del name; # Mark as artificial */
3433
ADDOP(c, NO_LOCATION, POP_BLOCK);
3434
ADDOP(c, NO_LOCATION, POP_BLOCK);
3435
ADDOP(c, NO_LOCATION, POP_EXCEPT);
3436
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
3437
RETURN_IF_ERROR(
3438
compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
3439
RETURN_IF_ERROR(
3440
compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
3441
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
3442
3443
/* except: */
3444
USE_LABEL(c, cleanup_end);
3445
3446
/* name = None; del name; # artificial */
3447
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
3448
RETURN_IF_ERROR(
3449
compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
3450
RETURN_IF_ERROR(
3451
compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
3452
3453
ADDOP_I(c, NO_LOCATION, RERAISE, 1);
3454
}
3455
else {
3456
NEW_JUMP_TARGET_LABEL(c, cleanup_body);
3457
3458
ADDOP(c, loc, POP_TOP); /* exc_value */
3459
3460
USE_LABEL(c, cleanup_body);
3461
RETURN_IF_ERROR(
3462
compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body,
3463
NO_LABEL, NULL));
3464
3465
VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
3466
compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body);
3467
ADDOP(c, NO_LOCATION, POP_BLOCK);
3468
ADDOP(c, NO_LOCATION, POP_EXCEPT);
3469
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
3470
}
3471
3472
USE_LABEL(c, except);
3473
}
3474
/* artificial */
3475
compiler_pop_fblock(c, EXCEPTION_HANDLER, NO_LABEL);
3476
ADDOP_I(c, NO_LOCATION, RERAISE, 0);
3477
3478
USE_LABEL(c, cleanup);
3479
POP_EXCEPT_AND_RERAISE(c, NO_LOCATION);
3480
3481
USE_LABEL(c, end);
3482
return SUCCESS;
3483
}
3484
3485
/*
3486
Code generated for "try: S except* E1 as V1: S1 except* E2 as V2: S2 ...":
3487
(The contents of the value stack is shown in [], with the top
3488
at the right; 'tb' is trace-back info, 'val' the exception instance,
3489
and 'typ' the exception's type.)
3490
3491
Value stack Label Instruction Argument
3492
[] SETUP_FINALLY L1
3493
[] <code for S>
3494
[] POP_BLOCK
3495
[] JUMP L0
3496
3497
[exc] L1: BUILD_LIST ) list for raised/reraised excs ("result")
3498
[orig, res] COPY 2 ) make a copy of the original EG
3499
3500
[orig, res, exc] <evaluate E1>
3501
[orig, res, exc, E1] CHECK_EG_MATCH
3502
[orig, res, rest/exc, match?] COPY 1
3503
[orig, res, rest/exc, match?, match?] POP_JUMP_IF_NONE C1
3504
3505
[orig, res, rest, match] <assign to V1> (or POP if no V1)
3506
3507
[orig, res, rest] SETUP_FINALLY R1
3508
[orig, res, rest] <code for S1>
3509
[orig, res, rest] JUMP L2
3510
3511
[orig, res, rest, i, v] R1: LIST_APPEND 3 ) exc raised in except* body - add to res
3512
[orig, res, rest, i] POP
3513
[orig, res, rest] JUMP LE2
3514
3515
[orig, res, rest] L2: NOP ) for lineno
3516
[orig, res, rest] JUMP LE2
3517
3518
[orig, res, rest/exc, None] C1: POP
3519
3520
[orig, res, rest] LE2: <evaluate E2>
3521
.............................etc.......................
3522
3523
[orig, res, rest] Ln+1: LIST_APPEND 1 ) add unhandled exc to res (could be None)
3524
3525
[orig, res] CALL_INTRINSIC_2 PREP_RERAISE_STAR
3526
[exc] COPY 1
3527
[exc, exc] POP_JUMP_IF_NOT_NONE RER
3528
[exc] POP_TOP
3529
[] JUMP L0
3530
3531
[exc] RER: SWAP 2
3532
[exc, prev_exc_info] POP_EXCEPT
3533
[exc] RERAISE 0
3534
3535
[] L0: <next statement>
3536
*/
3537
static int
3538
compiler_try_star_except(struct compiler *c, stmt_ty s)
3539
{
3540
location loc = LOC(s);
3541
3542
NEW_JUMP_TARGET_LABEL(c, body);
3543
NEW_JUMP_TARGET_LABEL(c, except);
3544
NEW_JUMP_TARGET_LABEL(c, orelse);
3545
NEW_JUMP_TARGET_LABEL(c, end);
3546
NEW_JUMP_TARGET_LABEL(c, cleanup);
3547
NEW_JUMP_TARGET_LABEL(c, reraise_star);
3548
3549
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
3550
3551
USE_LABEL(c, body);
3552
RETURN_IF_ERROR(
3553
compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL));
3554
VISIT_SEQ(c, stmt, s->v.TryStar.body);
3555
compiler_pop_fblock(c, TRY_EXCEPT, body);
3556
ADDOP(c, NO_LOCATION, POP_BLOCK);
3557
ADDOP_JUMP(c, NO_LOCATION, JUMP, orelse);
3558
Py_ssize_t n = asdl_seq_LEN(s->v.TryStar.handlers);
3559
3560
USE_LABEL(c, except);
3561
3562
ADDOP_JUMP(c, NO_LOCATION, SETUP_CLEANUP, cleanup);
3563
ADDOP(c, NO_LOCATION, PUSH_EXC_INFO);
3564
3565
/* Runtime will push a block here, so we need to account for that */
3566
RETURN_IF_ERROR(
3567
compiler_push_fblock(c, loc, EXCEPTION_GROUP_HANDLER,
3568
NO_LABEL, NO_LABEL, "except handler"));
3569
3570
for (Py_ssize_t i = 0; i < n; i++) {
3571
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
3572
s->v.TryStar.handlers, i);
3573
location loc = LOC(handler);
3574
NEW_JUMP_TARGET_LABEL(c, next_except);
3575
except = next_except;
3576
NEW_JUMP_TARGET_LABEL(c, except_with_error);
3577
NEW_JUMP_TARGET_LABEL(c, no_match);
3578
if (i == 0) {
3579
/* create empty list for exceptions raised/reraise in the except* blocks */
3580
/*
3581
[orig] BUILD_LIST
3582
*/
3583
/* Create a copy of the original EG */
3584
/*
3585
[orig, []] COPY 2
3586
[orig, [], exc]
3587
*/
3588
ADDOP_I(c, loc, BUILD_LIST, 0);
3589
ADDOP_I(c, loc, COPY, 2);
3590
}
3591
if (handler->v.ExceptHandler.type) {
3592
VISIT(c, expr, handler->v.ExceptHandler.type);
3593
ADDOP(c, loc, CHECK_EG_MATCH);
3594
ADDOP_I(c, loc, COPY, 1);
3595
ADDOP_JUMP(c, loc, POP_JUMP_IF_NONE, no_match);
3596
}
3597
3598
NEW_JUMP_TARGET_LABEL(c, cleanup_end);
3599
NEW_JUMP_TARGET_LABEL(c, cleanup_body);
3600
3601
if (handler->v.ExceptHandler.name) {
3602
RETURN_IF_ERROR(
3603
compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store));
3604
}
3605
else {
3606
ADDOP(c, loc, POP_TOP); // match
3607
}
3608
3609
/*
3610
try:
3611
# body
3612
except type as name:
3613
try:
3614
# body
3615
finally:
3616
name = None # in case body contains "del name"
3617
del name
3618
*/
3619
/* second try: */
3620
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup_end);
3621
3622
USE_LABEL(c, cleanup_body);
3623
RETURN_IF_ERROR(
3624
compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body,
3625
NO_LABEL, handler->v.ExceptHandler.name));
3626
3627
/* second # body */
3628
VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
3629
compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body);
3630
/* name = None; del name; # artificial */
3631
ADDOP(c, NO_LOCATION, POP_BLOCK);
3632
if (handler->v.ExceptHandler.name) {
3633
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
3634
RETURN_IF_ERROR(
3635
compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
3636
RETURN_IF_ERROR(
3637
compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
3638
}
3639
ADDOP_JUMP(c, NO_LOCATION, JUMP, except);
3640
3641
/* except: */
3642
USE_LABEL(c, cleanup_end);
3643
3644
/* name = None; del name; # artificial */
3645
if (handler->v.ExceptHandler.name) {
3646
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
3647
RETURN_IF_ERROR(
3648
compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
3649
RETURN_IF_ERROR(
3650
compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
3651
}
3652
3653
/* add exception raised to the res list */
3654
ADDOP_I(c, NO_LOCATION, LIST_APPEND, 3); // exc
3655
ADDOP(c, NO_LOCATION, POP_TOP); // lasti
3656
ADDOP_JUMP(c, NO_LOCATION, JUMP, except_with_error);
3657
3658
USE_LABEL(c, except);
3659
ADDOP(c, NO_LOCATION, NOP); // to hold a propagated location info
3660
ADDOP_JUMP(c, NO_LOCATION, JUMP, except_with_error);
3661
3662
USE_LABEL(c, no_match);
3663
ADDOP(c, loc, POP_TOP); // match (None)
3664
3665
USE_LABEL(c, except_with_error);
3666
3667
if (i == n - 1) {
3668
/* Add exc to the list (if not None it's the unhandled part of the EG) */
3669
ADDOP_I(c, NO_LOCATION, LIST_APPEND, 1);
3670
ADDOP_JUMP(c, NO_LOCATION, JUMP, reraise_star);
3671
}
3672
}
3673
/* artificial */
3674
compiler_pop_fblock(c, EXCEPTION_GROUP_HANDLER, NO_LABEL);
3675
NEW_JUMP_TARGET_LABEL(c, reraise);
3676
3677
USE_LABEL(c, reraise_star);
3678
ADDOP_I(c, NO_LOCATION, CALL_INTRINSIC_2, INTRINSIC_PREP_RERAISE_STAR);
3679
ADDOP_I(c, NO_LOCATION, COPY, 1);
3680
ADDOP_JUMP(c, NO_LOCATION, POP_JUMP_IF_NOT_NONE, reraise);
3681
3682
/* Nothing to reraise */
3683
ADDOP(c, NO_LOCATION, POP_TOP);
3684
ADDOP(c, NO_LOCATION, POP_BLOCK);
3685
ADDOP(c, NO_LOCATION, POP_EXCEPT);
3686
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
3687
3688
USE_LABEL(c, reraise);
3689
ADDOP(c, NO_LOCATION, POP_BLOCK);
3690
ADDOP_I(c, NO_LOCATION, SWAP, 2);
3691
ADDOP(c, NO_LOCATION, POP_EXCEPT);
3692
ADDOP_I(c, NO_LOCATION, RERAISE, 0);
3693
3694
USE_LABEL(c, cleanup);
3695
POP_EXCEPT_AND_RERAISE(c, NO_LOCATION);
3696
3697
USE_LABEL(c, orelse);
3698
VISIT_SEQ(c, stmt, s->v.TryStar.orelse);
3699
3700
USE_LABEL(c, end);
3701
return SUCCESS;
3702
}
3703
3704
static int
3705
compiler_try(struct compiler *c, stmt_ty s) {
3706
if (s->v.Try.finalbody && asdl_seq_LEN(s->v.Try.finalbody))
3707
return compiler_try_finally(c, s);
3708
else
3709
return compiler_try_except(c, s);
3710
}
3711
3712
static int
3713
compiler_try_star(struct compiler *c, stmt_ty s)
3714
{
3715
if (s->v.TryStar.finalbody && asdl_seq_LEN(s->v.TryStar.finalbody)) {
3716
return compiler_try_star_finally(c, s);
3717
}
3718
else {
3719
return compiler_try_star_except(c, s);
3720
}
3721
}
3722
3723
static int
3724
compiler_import_as(struct compiler *c, location loc,
3725
identifier name, identifier asname)
3726
{
3727
/* The IMPORT_NAME opcode was already generated. This function
3728
merely needs to bind the result to a name.
3729
3730
If there is a dot in name, we need to split it and emit a
3731
IMPORT_FROM for each name.
3732
*/
3733
Py_ssize_t len = PyUnicode_GET_LENGTH(name);
3734
Py_ssize_t dot = PyUnicode_FindChar(name, '.', 0, len, 1);
3735
if (dot == -2) {
3736
return ERROR;
3737
}
3738
if (dot != -1) {
3739
/* Consume the base module name to get the first attribute */
3740
while (1) {
3741
Py_ssize_t pos = dot + 1;
3742
PyObject *attr;
3743
dot = PyUnicode_FindChar(name, '.', pos, len, 1);
3744
if (dot == -2) {
3745
return ERROR;
3746
}
3747
attr = PyUnicode_Substring(name, pos, (dot != -1) ? dot : len);
3748
if (!attr) {
3749
return ERROR;
3750
}
3751
ADDOP_N(c, loc, IMPORT_FROM, attr, names);
3752
if (dot == -1) {
3753
break;
3754
}
3755
ADDOP_I(c, loc, SWAP, 2);
3756
ADDOP(c, loc, POP_TOP);
3757
}
3758
RETURN_IF_ERROR(compiler_nameop(c, loc, asname, Store));
3759
ADDOP(c, loc, POP_TOP);
3760
return SUCCESS;
3761
}
3762
return compiler_nameop(c, loc, asname, Store);
3763
}
3764
3765
static int
3766
compiler_import(struct compiler *c, stmt_ty s)
3767
{
3768
location loc = LOC(s);
3769
/* The Import node stores a module name like a.b.c as a single
3770
string. This is convenient for all cases except
3771
import a.b.c as d
3772
where we need to parse that string to extract the individual
3773
module names.
3774
XXX Perhaps change the representation to make this case simpler?
3775
*/
3776
Py_ssize_t i, n = asdl_seq_LEN(s->v.Import.names);
3777
3778
PyObject *zero = _PyLong_GetZero(); // borrowed reference
3779
for (i = 0; i < n; i++) {
3780
alias_ty alias = (alias_ty)asdl_seq_GET(s->v.Import.names, i);
3781
int r;
3782
3783
ADDOP_LOAD_CONST(c, loc, zero);
3784
ADDOP_LOAD_CONST(c, loc, Py_None);
3785
ADDOP_NAME(c, loc, IMPORT_NAME, alias->name, names);
3786
3787
if (alias->asname) {
3788
r = compiler_import_as(c, loc, alias->name, alias->asname);
3789
RETURN_IF_ERROR(r);
3790
}
3791
else {
3792
identifier tmp = alias->name;
3793
Py_ssize_t dot = PyUnicode_FindChar(
3794
alias->name, '.', 0, PyUnicode_GET_LENGTH(alias->name), 1);
3795
if (dot != -1) {
3796
tmp = PyUnicode_Substring(alias->name, 0, dot);
3797
if (tmp == NULL) {
3798
return ERROR;
3799
}
3800
}
3801
r = compiler_nameop(c, loc, tmp, Store);
3802
if (dot != -1) {
3803
Py_DECREF(tmp);
3804
}
3805
RETURN_IF_ERROR(r);
3806
}
3807
}
3808
return SUCCESS;
3809
}
3810
3811
static int
3812
compiler_from_import(struct compiler *c, stmt_ty s)
3813
{
3814
Py_ssize_t n = asdl_seq_LEN(s->v.ImportFrom.names);
3815
3816
ADDOP_LOAD_CONST_NEW(c, LOC(s), PyLong_FromLong(s->v.ImportFrom.level));
3817
3818
PyObject *names = PyTuple_New(n);
3819
if (!names) {
3820
return ERROR;
3821
}
3822
3823
/* build up the names */
3824
for (Py_ssize_t i = 0; i < n; i++) {
3825
alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
3826
PyTuple_SET_ITEM(names, i, Py_NewRef(alias->name));
3827
}
3828
3829
if (location_is_after(LOC(s), c->c_future.ff_location) &&
3830
s->v.ImportFrom.module &&
3831
_PyUnicode_EqualToASCIIString(s->v.ImportFrom.module, "__future__"))
3832
{
3833
Py_DECREF(names);
3834
return compiler_error(c, LOC(s), "from __future__ imports must occur "
3835
"at the beginning of the file");
3836
}
3837
ADDOP_LOAD_CONST_NEW(c, LOC(s), names);
3838
3839
if (s->v.ImportFrom.module) {
3840
ADDOP_NAME(c, LOC(s), IMPORT_NAME, s->v.ImportFrom.module, names);
3841
}
3842
else {
3843
_Py_DECLARE_STR(empty, "");
3844
ADDOP_NAME(c, LOC(s), IMPORT_NAME, &_Py_STR(empty), names);
3845
}
3846
for (Py_ssize_t i = 0; i < n; i++) {
3847
alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
3848
identifier store_name;
3849
3850
if (i == 0 && PyUnicode_READ_CHAR(alias->name, 0) == '*') {
3851
assert(n == 1);
3852
ADDOP_I(c, LOC(s), CALL_INTRINSIC_1, INTRINSIC_IMPORT_STAR);
3853
ADDOP(c, NO_LOCATION, POP_TOP);
3854
return SUCCESS;
3855
}
3856
3857
ADDOP_NAME(c, LOC(s), IMPORT_FROM, alias->name, names);
3858
store_name = alias->name;
3859
if (alias->asname) {
3860
store_name = alias->asname;
3861
}
3862
3863
RETURN_IF_ERROR(compiler_nameop(c, LOC(s), store_name, Store));
3864
}
3865
/* remove imported module */
3866
ADDOP(c, LOC(s), POP_TOP);
3867
return SUCCESS;
3868
}
3869
3870
static int
3871
compiler_assert(struct compiler *c, stmt_ty s)
3872
{
3873
/* Always emit a warning if the test is a non-zero length tuple */
3874
if ((s->v.Assert.test->kind == Tuple_kind &&
3875
asdl_seq_LEN(s->v.Assert.test->v.Tuple.elts) > 0) ||
3876
(s->v.Assert.test->kind == Constant_kind &&
3877
PyTuple_Check(s->v.Assert.test->v.Constant.value) &&
3878
PyTuple_Size(s->v.Assert.test->v.Constant.value) > 0))
3879
{
3880
RETURN_IF_ERROR(
3881
compiler_warn(c, LOC(s), "assertion is always true, "
3882
"perhaps remove parentheses?"));
3883
}
3884
if (c->c_optimize) {
3885
return SUCCESS;
3886
}
3887
NEW_JUMP_TARGET_LABEL(c, end);
3888
RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.Assert.test, end, 1));
3889
ADDOP(c, LOC(s), LOAD_ASSERTION_ERROR);
3890
if (s->v.Assert.msg) {
3891
VISIT(c, expr, s->v.Assert.msg);
3892
ADDOP_I(c, LOC(s), CALL, 0);
3893
}
3894
ADDOP_I(c, LOC(s), RAISE_VARARGS, 1);
3895
3896
USE_LABEL(c, end);
3897
return SUCCESS;
3898
}
3899
3900
static int
3901
compiler_stmt_expr(struct compiler *c, location loc, expr_ty value)
3902
{
3903
if (c->c_interactive && c->c_nestlevel <= 1) {
3904
VISIT(c, expr, value);
3905
ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_PRINT);
3906
ADDOP(c, NO_LOCATION, POP_TOP);
3907
return SUCCESS;
3908
}
3909
3910
if (value->kind == Constant_kind) {
3911
/* ignore constant statement */
3912
ADDOP(c, loc, NOP);
3913
return SUCCESS;
3914
}
3915
3916
VISIT(c, expr, value);
3917
ADDOP(c, NO_LOCATION, POP_TOP); /* artificial */
3918
return SUCCESS;
3919
}
3920
3921
static int
3922
compiler_visit_stmt(struct compiler *c, stmt_ty s)
3923
{
3924
3925
switch (s->kind) {
3926
case FunctionDef_kind:
3927
return compiler_function(c, s, 0);
3928
case ClassDef_kind:
3929
return compiler_class(c, s);
3930
case TypeAlias_kind:
3931
return compiler_typealias(c, s);
3932
case Return_kind:
3933
return compiler_return(c, s);
3934
case Delete_kind:
3935
VISIT_SEQ(c, expr, s->v.Delete.targets)
3936
break;
3937
case Assign_kind:
3938
{
3939
Py_ssize_t n = asdl_seq_LEN(s->v.Assign.targets);
3940
VISIT(c, expr, s->v.Assign.value);
3941
for (Py_ssize_t i = 0; i < n; i++) {
3942
if (i < n - 1) {
3943
ADDOP_I(c, LOC(s), COPY, 1);
3944
}
3945
VISIT(c, expr,
3946
(expr_ty)asdl_seq_GET(s->v.Assign.targets, i));
3947
}
3948
break;
3949
}
3950
case AugAssign_kind:
3951
return compiler_augassign(c, s);
3952
case AnnAssign_kind:
3953
return compiler_annassign(c, s);
3954
case For_kind:
3955
return compiler_for(c, s);
3956
case While_kind:
3957
return compiler_while(c, s);
3958
case If_kind:
3959
return compiler_if(c, s);
3960
case Match_kind:
3961
return compiler_match(c, s);
3962
case Raise_kind:
3963
{
3964
Py_ssize_t n = 0;
3965
if (s->v.Raise.exc) {
3966
VISIT(c, expr, s->v.Raise.exc);
3967
n++;
3968
if (s->v.Raise.cause) {
3969
VISIT(c, expr, s->v.Raise.cause);
3970
n++;
3971
}
3972
}
3973
ADDOP_I(c, LOC(s), RAISE_VARARGS, (int)n);
3974
break;
3975
}
3976
case Try_kind:
3977
return compiler_try(c, s);
3978
case TryStar_kind:
3979
return compiler_try_star(c, s);
3980
case Assert_kind:
3981
return compiler_assert(c, s);
3982
case Import_kind:
3983
return compiler_import(c, s);
3984
case ImportFrom_kind:
3985
return compiler_from_import(c, s);
3986
case Global_kind:
3987
case Nonlocal_kind:
3988
break;
3989
case Expr_kind:
3990
{
3991
return compiler_stmt_expr(c, LOC(s), s->v.Expr.value);
3992
}
3993
case Pass_kind:
3994
{
3995
ADDOP(c, LOC(s), NOP);
3996
break;
3997
}
3998
case Break_kind:
3999
{
4000
return compiler_break(c, LOC(s));
4001
}
4002
case Continue_kind:
4003
{
4004
return compiler_continue(c, LOC(s));
4005
}
4006
case With_kind:
4007
return compiler_with(c, s, 0);
4008
case AsyncFunctionDef_kind:
4009
return compiler_function(c, s, 1);
4010
case AsyncWith_kind:
4011
return compiler_async_with(c, s, 0);
4012
case AsyncFor_kind:
4013
return compiler_async_for(c, s);
4014
}
4015
4016
return SUCCESS;
4017
}
4018
4019
static int
4020
unaryop(unaryop_ty op)
4021
{
4022
switch (op) {
4023
case Invert:
4024
return UNARY_INVERT;
4025
case USub:
4026
return UNARY_NEGATIVE;
4027
default:
4028
PyErr_Format(PyExc_SystemError,
4029
"unary op %d should not be possible", op);
4030
return 0;
4031
}
4032
}
4033
4034
static int
4035
addop_binary(struct compiler *c, location loc, operator_ty binop,
4036
bool inplace)
4037
{
4038
int oparg;
4039
switch (binop) {
4040
case Add:
4041
oparg = inplace ? NB_INPLACE_ADD : NB_ADD;
4042
break;
4043
case Sub:
4044
oparg = inplace ? NB_INPLACE_SUBTRACT : NB_SUBTRACT;
4045
break;
4046
case Mult:
4047
oparg = inplace ? NB_INPLACE_MULTIPLY : NB_MULTIPLY;
4048
break;
4049
case MatMult:
4050
oparg = inplace ? NB_INPLACE_MATRIX_MULTIPLY : NB_MATRIX_MULTIPLY;
4051
break;
4052
case Div:
4053
oparg = inplace ? NB_INPLACE_TRUE_DIVIDE : NB_TRUE_DIVIDE;
4054
break;
4055
case Mod:
4056
oparg = inplace ? NB_INPLACE_REMAINDER : NB_REMAINDER;
4057
break;
4058
case Pow:
4059
oparg = inplace ? NB_INPLACE_POWER : NB_POWER;
4060
break;
4061
case LShift:
4062
oparg = inplace ? NB_INPLACE_LSHIFT : NB_LSHIFT;
4063
break;
4064
case RShift:
4065
oparg = inplace ? NB_INPLACE_RSHIFT : NB_RSHIFT;
4066
break;
4067
case BitOr:
4068
oparg = inplace ? NB_INPLACE_OR : NB_OR;
4069
break;
4070
case BitXor:
4071
oparg = inplace ? NB_INPLACE_XOR : NB_XOR;
4072
break;
4073
case BitAnd:
4074
oparg = inplace ? NB_INPLACE_AND : NB_AND;
4075
break;
4076
case FloorDiv:
4077
oparg = inplace ? NB_INPLACE_FLOOR_DIVIDE : NB_FLOOR_DIVIDE;
4078
break;
4079
default:
4080
PyErr_Format(PyExc_SystemError, "%s op %d should not be possible",
4081
inplace ? "inplace" : "binary", binop);
4082
return ERROR;
4083
}
4084
ADDOP_I(c, loc, BINARY_OP, oparg);
4085
return SUCCESS;
4086
}
4087
4088
4089
static int
4090
addop_yield(struct compiler *c, location loc) {
4091
if (c->u->u_ste->ste_generator && c->u->u_ste->ste_coroutine) {
4092
ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_ASYNC_GEN_WRAP);
4093
}
4094
ADDOP_I(c, loc, YIELD_VALUE, 0);
4095
ADDOP_I(c, loc, RESUME, 1);
4096
return SUCCESS;
4097
}
4098
4099
static int
4100
compiler_nameop(struct compiler *c, location loc,
4101
identifier name, expr_context_ty ctx)
4102
{
4103
int op, scope;
4104
Py_ssize_t arg;
4105
enum { OP_FAST, OP_GLOBAL, OP_DEREF, OP_NAME } optype;
4106
4107
PyObject *dict = c->u->u_metadata.u_names;
4108
PyObject *mangled;
4109
4110
assert(!_PyUnicode_EqualToASCIIString(name, "None") &&
4111
!_PyUnicode_EqualToASCIIString(name, "True") &&
4112
!_PyUnicode_EqualToASCIIString(name, "False"));
4113
4114
if (forbidden_name(c, loc, name, ctx)) {
4115
return ERROR;
4116
}
4117
4118
mangled = _Py_Mangle(c->u->u_private, name);
4119
if (!mangled) {
4120
return ERROR;
4121
}
4122
4123
op = 0;
4124
optype = OP_NAME;
4125
scope = _PyST_GetScope(c->u->u_ste, mangled);
4126
switch (scope) {
4127
case FREE:
4128
dict = c->u->u_metadata.u_freevars;
4129
optype = OP_DEREF;
4130
break;
4131
case CELL:
4132
dict = c->u->u_metadata.u_cellvars;
4133
optype = OP_DEREF;
4134
break;
4135
case LOCAL:
4136
if (_PyST_IsFunctionLike(c->u->u_ste) ||
4137
(PyDict_GetItem(c->u->u_metadata.u_fasthidden, mangled) == Py_True))
4138
optype = OP_FAST;
4139
break;
4140
case GLOBAL_IMPLICIT:
4141
if (_PyST_IsFunctionLike(c->u->u_ste))
4142
optype = OP_GLOBAL;
4143
break;
4144
case GLOBAL_EXPLICIT:
4145
optype = OP_GLOBAL;
4146
break;
4147
default:
4148
/* scope can be 0 */
4149
break;
4150
}
4151
4152
/* XXX Leave assert here, but handle __doc__ and the like better */
4153
assert(scope || PyUnicode_READ_CHAR(name, 0) == '_');
4154
4155
switch (optype) {
4156
case OP_DEREF:
4157
switch (ctx) {
4158
case Load:
4159
if (c->u->u_ste->ste_type == ClassBlock && !c->u->u_in_inlined_comp) {
4160
op = LOAD_FROM_DICT_OR_DEREF;
4161
// First load the locals
4162
if (codegen_addop_noarg(INSTR_SEQUENCE(c), LOAD_LOCALS, loc) < 0) {
4163
return ERROR;
4164
}
4165
}
4166
else if (c->u->u_ste->ste_can_see_class_scope) {
4167
op = LOAD_FROM_DICT_OR_DEREF;
4168
// First load the classdict
4169
if (compiler_addop_o(c->u, loc, LOAD_DEREF,
4170
c->u->u_metadata.u_freevars, &_Py_ID(__classdict__)) < 0) {
4171
return ERROR;
4172
}
4173
}
4174
else {
4175
op = LOAD_DEREF;
4176
}
4177
break;
4178
case Store: op = STORE_DEREF; break;
4179
case Del: op = DELETE_DEREF; break;
4180
}
4181
break;
4182
case OP_FAST:
4183
switch (ctx) {
4184
case Load: op = LOAD_FAST; break;
4185
case Store: op = STORE_FAST; break;
4186
case Del: op = DELETE_FAST; break;
4187
}
4188
ADDOP_N(c, loc, op, mangled, varnames);
4189
return SUCCESS;
4190
case OP_GLOBAL:
4191
switch (ctx) {
4192
case Load:
4193
if (c->u->u_ste->ste_can_see_class_scope && scope == GLOBAL_IMPLICIT) {
4194
op = LOAD_FROM_DICT_OR_GLOBALS;
4195
// First load the classdict
4196
if (compiler_addop_o(c->u, loc, LOAD_DEREF,
4197
c->u->u_metadata.u_freevars, &_Py_ID(__classdict__)) < 0) {
4198
return ERROR;
4199
}
4200
} else {
4201
op = LOAD_GLOBAL;
4202
}
4203
break;
4204
case Store: op = STORE_GLOBAL; break;
4205
case Del: op = DELETE_GLOBAL; break;
4206
}
4207
break;
4208
case OP_NAME:
4209
switch (ctx) {
4210
case Load:
4211
op = (c->u->u_ste->ste_type == ClassBlock
4212
&& c->u->u_in_inlined_comp)
4213
? LOAD_GLOBAL
4214
: LOAD_NAME;
4215
break;
4216
case Store: op = STORE_NAME; break;
4217
case Del: op = DELETE_NAME; break;
4218
}
4219
break;
4220
}
4221
4222
assert(op);
4223
arg = dict_add_o(dict, mangled);
4224
Py_DECREF(mangled);
4225
if (arg < 0) {
4226
return ERROR;
4227
}
4228
if (op == LOAD_GLOBAL) {
4229
arg <<= 1;
4230
}
4231
return codegen_addop_i(INSTR_SEQUENCE(c), op, arg, loc);
4232
}
4233
4234
static int
4235
compiler_boolop(struct compiler *c, expr_ty e)
4236
{
4237
int jumpi;
4238
Py_ssize_t i, n;
4239
asdl_expr_seq *s;
4240
4241
location loc = LOC(e);
4242
assert(e->kind == BoolOp_kind);
4243
if (e->v.BoolOp.op == And)
4244
jumpi = POP_JUMP_IF_FALSE;
4245
else
4246
jumpi = POP_JUMP_IF_TRUE;
4247
NEW_JUMP_TARGET_LABEL(c, end);
4248
s = e->v.BoolOp.values;
4249
n = asdl_seq_LEN(s) - 1;
4250
assert(n >= 0);
4251
for (i = 0; i < n; ++i) {
4252
VISIT(c, expr, (expr_ty)asdl_seq_GET(s, i));
4253
ADDOP_I(c, loc, COPY, 1);
4254
ADDOP(c, loc, TO_BOOL);
4255
ADDOP_JUMP(c, loc, jumpi, end);
4256
ADDOP(c, loc, POP_TOP);
4257
}
4258
VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n));
4259
4260
USE_LABEL(c, end);
4261
return SUCCESS;
4262
}
4263
4264
static int
4265
starunpack_helper(struct compiler *c, location loc,
4266
asdl_expr_seq *elts, int pushed,
4267
int build, int add, int extend, int tuple)
4268
{
4269
Py_ssize_t n = asdl_seq_LEN(elts);
4270
if (n > 2 && are_all_items_const(elts, 0, n)) {
4271
PyObject *folded = PyTuple_New(n);
4272
if (folded == NULL) {
4273
return ERROR;
4274
}
4275
PyObject *val;
4276
for (Py_ssize_t i = 0; i < n; i++) {
4277
val = ((expr_ty)asdl_seq_GET(elts, i))->v.Constant.value;
4278
PyTuple_SET_ITEM(folded, i, Py_NewRef(val));
4279
}
4280
if (tuple && !pushed) {
4281
ADDOP_LOAD_CONST_NEW(c, loc, folded);
4282
} else {
4283
if (add == SET_ADD) {
4284
Py_SETREF(folded, PyFrozenSet_New(folded));
4285
if (folded == NULL) {
4286
return ERROR;
4287
}
4288
}
4289
ADDOP_I(c, loc, build, pushed);
4290
ADDOP_LOAD_CONST_NEW(c, loc, folded);
4291
ADDOP_I(c, loc, extend, 1);
4292
if (tuple) {
4293
ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_LIST_TO_TUPLE);
4294
}
4295
}
4296
return SUCCESS;
4297
}
4298
4299
int big = n+pushed > STACK_USE_GUIDELINE;
4300
int seen_star = 0;
4301
for (Py_ssize_t i = 0; i < n; i++) {
4302
expr_ty elt = asdl_seq_GET(elts, i);
4303
if (elt->kind == Starred_kind) {
4304
seen_star = 1;
4305
break;
4306
}
4307
}
4308
if (!seen_star && !big) {
4309
for (Py_ssize_t i = 0; i < n; i++) {
4310
expr_ty elt = asdl_seq_GET(elts, i);
4311
VISIT(c, expr, elt);
4312
}
4313
if (tuple) {
4314
ADDOP_I(c, loc, BUILD_TUPLE, n+pushed);
4315
} else {
4316
ADDOP_I(c, loc, build, n+pushed);
4317
}
4318
return SUCCESS;
4319
}
4320
int sequence_built = 0;
4321
if (big) {
4322
ADDOP_I(c, loc, build, pushed);
4323
sequence_built = 1;
4324
}
4325
for (Py_ssize_t i = 0; i < n; i++) {
4326
expr_ty elt = asdl_seq_GET(elts, i);
4327
if (elt->kind == Starred_kind) {
4328
if (sequence_built == 0) {
4329
ADDOP_I(c, loc, build, i+pushed);
4330
sequence_built = 1;
4331
}
4332
VISIT(c, expr, elt->v.Starred.value);
4333
ADDOP_I(c, loc, extend, 1);
4334
}
4335
else {
4336
VISIT(c, expr, elt);
4337
if (sequence_built) {
4338
ADDOP_I(c, loc, add, 1);
4339
}
4340
}
4341
}
4342
assert(sequence_built);
4343
if (tuple) {
4344
ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_LIST_TO_TUPLE);
4345
}
4346
return SUCCESS;
4347
}
4348
4349
static int
4350
unpack_helper(struct compiler *c, location loc, asdl_expr_seq *elts)
4351
{
4352
Py_ssize_t n = asdl_seq_LEN(elts);
4353
int seen_star = 0;
4354
for (Py_ssize_t i = 0; i < n; i++) {
4355
expr_ty elt = asdl_seq_GET(elts, i);
4356
if (elt->kind == Starred_kind && !seen_star) {
4357
if ((i >= (1 << 8)) ||
4358
(n-i-1 >= (INT_MAX >> 8))) {
4359
return compiler_error(c, loc,
4360
"too many expressions in "
4361
"star-unpacking assignment");
4362
}
4363
ADDOP_I(c, loc, UNPACK_EX, (i + ((n-i-1) << 8)));
4364
seen_star = 1;
4365
}
4366
else if (elt->kind == Starred_kind) {
4367
return compiler_error(c, loc,
4368
"multiple starred expressions in assignment");
4369
}
4370
}
4371
if (!seen_star) {
4372
ADDOP_I(c, loc, UNPACK_SEQUENCE, n);
4373
}
4374
return SUCCESS;
4375
}
4376
4377
static int
4378
assignment_helper(struct compiler *c, location loc, asdl_expr_seq *elts)
4379
{
4380
Py_ssize_t n = asdl_seq_LEN(elts);
4381
RETURN_IF_ERROR(unpack_helper(c, loc, elts));
4382
for (Py_ssize_t i = 0; i < n; i++) {
4383
expr_ty elt = asdl_seq_GET(elts, i);
4384
VISIT(c, expr, elt->kind != Starred_kind ? elt : elt->v.Starred.value);
4385
}
4386
return SUCCESS;
4387
}
4388
4389
static int
4390
compiler_list(struct compiler *c, expr_ty e)
4391
{
4392
location loc = LOC(e);
4393
asdl_expr_seq *elts = e->v.List.elts;
4394
if (e->v.List.ctx == Store) {
4395
return assignment_helper(c, loc, elts);
4396
}
4397
else if (e->v.List.ctx == Load) {
4398
return starunpack_helper(c, loc, elts, 0,
4399
BUILD_LIST, LIST_APPEND, LIST_EXTEND, 0);
4400
}
4401
else {
4402
VISIT_SEQ(c, expr, elts);
4403
}
4404
return SUCCESS;
4405
}
4406
4407
static int
4408
compiler_tuple(struct compiler *c, expr_ty e)
4409
{
4410
location loc = LOC(e);
4411
asdl_expr_seq *elts = e->v.Tuple.elts;
4412
if (e->v.Tuple.ctx == Store) {
4413
return assignment_helper(c, loc, elts);
4414
}
4415
else if (e->v.Tuple.ctx == Load) {
4416
return starunpack_helper(c, loc, elts, 0,
4417
BUILD_LIST, LIST_APPEND, LIST_EXTEND, 1);
4418
}
4419
else {
4420
VISIT_SEQ(c, expr, elts);
4421
}
4422
return SUCCESS;
4423
}
4424
4425
static int
4426
compiler_set(struct compiler *c, expr_ty e)
4427
{
4428
location loc = LOC(e);
4429
return starunpack_helper(c, loc, e->v.Set.elts, 0,
4430
BUILD_SET, SET_ADD, SET_UPDATE, 0);
4431
}
4432
4433
static bool
4434
are_all_items_const(asdl_expr_seq *seq, Py_ssize_t begin, Py_ssize_t end)
4435
{
4436
for (Py_ssize_t i = begin; i < end; i++) {
4437
expr_ty key = (expr_ty)asdl_seq_GET(seq, i);
4438
if (key == NULL || key->kind != Constant_kind) {
4439
return false;
4440
}
4441
}
4442
return true;
4443
}
4444
4445
static int
4446
compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end)
4447
{
4448
Py_ssize_t i, n = end - begin;
4449
PyObject *keys, *key;
4450
int big = n*2 > STACK_USE_GUIDELINE;
4451
location loc = LOC(e);
4452
if (n > 1 && !big && are_all_items_const(e->v.Dict.keys, begin, end)) {
4453
for (i = begin; i < end; i++) {
4454
VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.values, i));
4455
}
4456
keys = PyTuple_New(n);
4457
if (keys == NULL) {
4458
return SUCCESS;
4459
}
4460
for (i = begin; i < end; i++) {
4461
key = ((expr_ty)asdl_seq_GET(e->v.Dict.keys, i))->v.Constant.value;
4462
PyTuple_SET_ITEM(keys, i - begin, Py_NewRef(key));
4463
}
4464
ADDOP_LOAD_CONST_NEW(c, loc, keys);
4465
ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n);
4466
return SUCCESS;
4467
}
4468
if (big) {
4469
ADDOP_I(c, loc, BUILD_MAP, 0);
4470
}
4471
for (i = begin; i < end; i++) {
4472
VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.keys, i));
4473
VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.values, i));
4474
if (big) {
4475
ADDOP_I(c, loc, MAP_ADD, 1);
4476
}
4477
}
4478
if (!big) {
4479
ADDOP_I(c, loc, BUILD_MAP, n);
4480
}
4481
return SUCCESS;
4482
}
4483
4484
static int
4485
compiler_dict(struct compiler *c, expr_ty e)
4486
{
4487
location loc = LOC(e);
4488
Py_ssize_t i, n, elements;
4489
int have_dict;
4490
int is_unpacking = 0;
4491
n = asdl_seq_LEN(e->v.Dict.values);
4492
have_dict = 0;
4493
elements = 0;
4494
for (i = 0; i < n; i++) {
4495
is_unpacking = (expr_ty)asdl_seq_GET(e->v.Dict.keys, i) == NULL;
4496
if (is_unpacking) {
4497
if (elements) {
4498
RETURN_IF_ERROR(compiler_subdict(c, e, i - elements, i));
4499
if (have_dict) {
4500
ADDOP_I(c, loc, DICT_UPDATE, 1);
4501
}
4502
have_dict = 1;
4503
elements = 0;
4504
}
4505
if (have_dict == 0) {
4506
ADDOP_I(c, loc, BUILD_MAP, 0);
4507
have_dict = 1;
4508
}
4509
VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.values, i));
4510
ADDOP_I(c, loc, DICT_UPDATE, 1);
4511
}
4512
else {
4513
if (elements*2 > STACK_USE_GUIDELINE) {
4514
RETURN_IF_ERROR(compiler_subdict(c, e, i - elements, i + 1));
4515
if (have_dict) {
4516
ADDOP_I(c, loc, DICT_UPDATE, 1);
4517
}
4518
have_dict = 1;
4519
elements = 0;
4520
}
4521
else {
4522
elements++;
4523
}
4524
}
4525
}
4526
if (elements) {
4527
RETURN_IF_ERROR(compiler_subdict(c, e, n - elements, n));
4528
if (have_dict) {
4529
ADDOP_I(c, loc, DICT_UPDATE, 1);
4530
}
4531
have_dict = 1;
4532
}
4533
if (!have_dict) {
4534
ADDOP_I(c, loc, BUILD_MAP, 0);
4535
}
4536
return SUCCESS;
4537
}
4538
4539
static int
4540
compiler_compare(struct compiler *c, expr_ty e)
4541
{
4542
location loc = LOC(e);
4543
Py_ssize_t i, n;
4544
4545
RETURN_IF_ERROR(check_compare(c, e));
4546
VISIT(c, expr, e->v.Compare.left);
4547
assert(asdl_seq_LEN(e->v.Compare.ops) > 0);
4548
n = asdl_seq_LEN(e->v.Compare.ops) - 1;
4549
if (n == 0) {
4550
VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, 0));
4551
ADDOP_COMPARE(c, loc, asdl_seq_GET(e->v.Compare.ops, 0));
4552
}
4553
else {
4554
NEW_JUMP_TARGET_LABEL(c, cleanup);
4555
for (i = 0; i < n; i++) {
4556
VISIT(c, expr,
4557
(expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
4558
ADDOP_I(c, loc, SWAP, 2);
4559
ADDOP_I(c, loc, COPY, 2);
4560
ADDOP_COMPARE(c, loc, asdl_seq_GET(e->v.Compare.ops, i));
4561
ADDOP_I(c, loc, COPY, 1);
4562
ADDOP(c, loc, TO_BOOL);
4563
ADDOP_JUMP(c, loc, POP_JUMP_IF_FALSE, cleanup);
4564
ADDOP(c, loc, POP_TOP);
4565
}
4566
VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n));
4567
ADDOP_COMPARE(c, loc, asdl_seq_GET(e->v.Compare.ops, n));
4568
NEW_JUMP_TARGET_LABEL(c, end);
4569
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
4570
4571
USE_LABEL(c, cleanup);
4572
ADDOP_I(c, loc, SWAP, 2);
4573
ADDOP(c, loc, POP_TOP);
4574
4575
USE_LABEL(c, end);
4576
}
4577
return SUCCESS;
4578
}
4579
4580
static PyTypeObject *
4581
infer_type(expr_ty e)
4582
{
4583
switch (e->kind) {
4584
case Tuple_kind:
4585
return &PyTuple_Type;
4586
case List_kind:
4587
case ListComp_kind:
4588
return &PyList_Type;
4589
case Dict_kind:
4590
case DictComp_kind:
4591
return &PyDict_Type;
4592
case Set_kind:
4593
case SetComp_kind:
4594
return &PySet_Type;
4595
case GeneratorExp_kind:
4596
return &PyGen_Type;
4597
case Lambda_kind:
4598
return &PyFunction_Type;
4599
case JoinedStr_kind:
4600
case FormattedValue_kind:
4601
return &PyUnicode_Type;
4602
case Constant_kind:
4603
return Py_TYPE(e->v.Constant.value);
4604
default:
4605
return NULL;
4606
}
4607
}
4608
4609
static int
4610
check_caller(struct compiler *c, expr_ty e)
4611
{
4612
switch (e->kind) {
4613
case Constant_kind:
4614
case Tuple_kind:
4615
case List_kind:
4616
case ListComp_kind:
4617
case Dict_kind:
4618
case DictComp_kind:
4619
case Set_kind:
4620
case SetComp_kind:
4621
case GeneratorExp_kind:
4622
case JoinedStr_kind:
4623
case FormattedValue_kind: {
4624
location loc = LOC(e);
4625
return compiler_warn(c, loc, "'%.200s' object is not callable; "
4626
"perhaps you missed a comma?",
4627
infer_type(e)->tp_name);
4628
}
4629
default:
4630
return SUCCESS;
4631
}
4632
}
4633
4634
static int
4635
check_subscripter(struct compiler *c, expr_ty e)
4636
{
4637
PyObject *v;
4638
4639
switch (e->kind) {
4640
case Constant_kind:
4641
v = e->v.Constant.value;
4642
if (!(v == Py_None || v == Py_Ellipsis ||
4643
PyLong_Check(v) || PyFloat_Check(v) || PyComplex_Check(v) ||
4644
PyAnySet_Check(v)))
4645
{
4646
return SUCCESS;
4647
}
4648
/* fall through */
4649
case Set_kind:
4650
case SetComp_kind:
4651
case GeneratorExp_kind:
4652
case Lambda_kind: {
4653
location loc = LOC(e);
4654
return compiler_warn(c, loc, "'%.200s' object is not subscriptable; "
4655
"perhaps you missed a comma?",
4656
infer_type(e)->tp_name);
4657
}
4658
default:
4659
return SUCCESS;
4660
}
4661
}
4662
4663
static int
4664
check_index(struct compiler *c, expr_ty e, expr_ty s)
4665
{
4666
PyObject *v;
4667
4668
PyTypeObject *index_type = infer_type(s);
4669
if (index_type == NULL
4670
|| PyType_FastSubclass(index_type, Py_TPFLAGS_LONG_SUBCLASS)
4671
|| index_type == &PySlice_Type) {
4672
return SUCCESS;
4673
}
4674
4675
switch (e->kind) {
4676
case Constant_kind:
4677
v = e->v.Constant.value;
4678
if (!(PyUnicode_Check(v) || PyBytes_Check(v) || PyTuple_Check(v))) {
4679
return SUCCESS;
4680
}
4681
/* fall through */
4682
case Tuple_kind:
4683
case List_kind:
4684
case ListComp_kind:
4685
case JoinedStr_kind:
4686
case FormattedValue_kind: {
4687
location loc = LOC(e);
4688
return compiler_warn(c, loc, "%.200s indices must be integers "
4689
"or slices, not %.200s; "
4690
"perhaps you missed a comma?",
4691
infer_type(e)->tp_name,
4692
index_type->tp_name);
4693
}
4694
default:
4695
return SUCCESS;
4696
}
4697
}
4698
4699
static int
4700
is_import_originated(struct compiler *c, expr_ty e)
4701
{
4702
/* Check whether the global scope has an import named
4703
e, if it is a Name object. For not traversing all the
4704
scope stack every time this function is called, it will
4705
only check the global scope to determine whether something
4706
is imported or not. */
4707
4708
if (e->kind != Name_kind) {
4709
return 0;
4710
}
4711
4712
long flags = _PyST_GetSymbol(c->c_st->st_top, e->v.Name.id);
4713
return flags & DEF_IMPORT;
4714
}
4715
4716
static int
4717
can_optimize_super_call(struct compiler *c, expr_ty attr)
4718
{
4719
expr_ty e = attr->v.Attribute.value;
4720
if (e->kind != Call_kind ||
4721
e->v.Call.func->kind != Name_kind ||
4722
!_PyUnicode_EqualToASCIIString(e->v.Call.func->v.Name.id, "super") ||
4723
_PyUnicode_EqualToASCIIString(attr->v.Attribute.attr, "__class__") ||
4724
asdl_seq_LEN(e->v.Call.keywords) != 0) {
4725
return 0;
4726
}
4727
Py_ssize_t num_args = asdl_seq_LEN(e->v.Call.args);
4728
4729
PyObject *super_name = e->v.Call.func->v.Name.id;
4730
// detect statically-visible shadowing of 'super' name
4731
int scope = _PyST_GetScope(c->u->u_ste, super_name);
4732
if (scope != GLOBAL_IMPLICIT) {
4733
return 0;
4734
}
4735
scope = _PyST_GetScope(c->c_st->st_top, super_name);
4736
if (scope != 0) {
4737
return 0;
4738
}
4739
4740
if (num_args == 2) {
4741
for (Py_ssize_t i = 0; i < num_args; i++) {
4742
expr_ty elt = asdl_seq_GET(e->v.Call.args, i);
4743
if (elt->kind == Starred_kind) {
4744
return 0;
4745
}
4746
}
4747
// exactly two non-starred args; we can just load
4748
// the provided args
4749
return 1;
4750
}
4751
4752
if (num_args != 0) {
4753
return 0;
4754
}
4755
// we need the following for zero-arg super():
4756
4757
// enclosing function should have at least one argument
4758
if (c->u->u_metadata.u_argcount == 0 &&
4759
c->u->u_metadata.u_posonlyargcount == 0) {
4760
return 0;
4761
}
4762
// __class__ cell should be available
4763
if (get_ref_type(c, &_Py_ID(__class__)) == FREE) {
4764
return 1;
4765
}
4766
return 0;
4767
}
4768
4769
static int
4770
load_args_for_super(struct compiler *c, expr_ty e) {
4771
location loc = LOC(e);
4772
4773
// load super() global
4774
PyObject *super_name = e->v.Call.func->v.Name.id;
4775
RETURN_IF_ERROR(compiler_nameop(c, loc, super_name, Load));
4776
4777
if (asdl_seq_LEN(e->v.Call.args) == 2) {
4778
VISIT(c, expr, asdl_seq_GET(e->v.Call.args, 0));
4779
VISIT(c, expr, asdl_seq_GET(e->v.Call.args, 1));
4780
return SUCCESS;
4781
}
4782
4783
// load __class__ cell
4784
PyObject *name = &_Py_ID(__class__);
4785
assert(get_ref_type(c, name) == FREE);
4786
RETURN_IF_ERROR(compiler_nameop(c, loc, name, Load));
4787
4788
// load self (first argument)
4789
Py_ssize_t i = 0;
4790
PyObject *key, *value;
4791
if (!PyDict_Next(c->u->u_metadata.u_varnames, &i, &key, &value)) {
4792
return ERROR;
4793
}
4794
RETURN_IF_ERROR(compiler_nameop(c, loc, key, Load));
4795
4796
return SUCCESS;
4797
}
4798
4799
// If an attribute access spans multiple lines, update the current start
4800
// location to point to the attribute name.
4801
static location
4802
update_start_location_to_match_attr(struct compiler *c, location loc,
4803
expr_ty attr)
4804
{
4805
assert(attr->kind == Attribute_kind);
4806
if (loc.lineno != attr->end_lineno) {
4807
loc.lineno = attr->end_lineno;
4808
int len = (int)PyUnicode_GET_LENGTH(attr->v.Attribute.attr);
4809
if (len <= attr->end_col_offset) {
4810
loc.col_offset = attr->end_col_offset - len;
4811
}
4812
else {
4813
// GH-94694: Somebody's compiling weird ASTs. Just drop the columns:
4814
loc.col_offset = -1;
4815
loc.end_col_offset = -1;
4816
}
4817
// Make sure the end position still follows the start position, even for
4818
// weird ASTs:
4819
loc.end_lineno = Py_MAX(loc.lineno, loc.end_lineno);
4820
if (loc.lineno == loc.end_lineno) {
4821
loc.end_col_offset = Py_MAX(loc.col_offset, loc.end_col_offset);
4822
}
4823
}
4824
return loc;
4825
}
4826
4827
// Return 1 if the method call was optimized, 0 if not, and -1 on error.
4828
static int
4829
maybe_optimize_method_call(struct compiler *c, expr_ty e)
4830
{
4831
Py_ssize_t argsl, i, kwdsl;
4832
expr_ty meth = e->v.Call.func;
4833
asdl_expr_seq *args = e->v.Call.args;
4834
asdl_keyword_seq *kwds = e->v.Call.keywords;
4835
4836
/* Check that the call node is an attribute access */
4837
if (meth->kind != Attribute_kind || meth->v.Attribute.ctx != Load) {
4838
return 0;
4839
}
4840
4841
/* Check that the base object is not something that is imported */
4842
if (is_import_originated(c, meth->v.Attribute.value)) {
4843
return 0;
4844
}
4845
4846
/* Check that there aren't too many arguments */
4847
argsl = asdl_seq_LEN(args);
4848
kwdsl = asdl_seq_LEN(kwds);
4849
if (argsl + kwdsl + (kwdsl != 0) >= STACK_USE_GUIDELINE) {
4850
return 0;
4851
}
4852
/* Check that there are no *varargs types of arguments. */
4853
for (i = 0; i < argsl; i++) {
4854
expr_ty elt = asdl_seq_GET(args, i);
4855
if (elt->kind == Starred_kind) {
4856
return 0;
4857
}
4858
}
4859
4860
for (i = 0; i < kwdsl; i++) {
4861
keyword_ty kw = asdl_seq_GET(kwds, i);
4862
if (kw->arg == NULL) {
4863
return 0;
4864
}
4865
}
4866
4867
/* Alright, we can optimize the code. */
4868
location loc = LOC(meth);
4869
4870
if (can_optimize_super_call(c, meth)) {
4871
RETURN_IF_ERROR(load_args_for_super(c, meth->v.Attribute.value));
4872
int opcode = asdl_seq_LEN(meth->v.Attribute.value->v.Call.args) ?
4873
LOAD_SUPER_METHOD : LOAD_ZERO_SUPER_METHOD;
4874
ADDOP_NAME(c, loc, opcode, meth->v.Attribute.attr, names);
4875
loc = update_start_location_to_match_attr(c, loc, meth);
4876
ADDOP(c, loc, NOP);
4877
} else {
4878
VISIT(c, expr, meth->v.Attribute.value);
4879
loc = update_start_location_to_match_attr(c, loc, meth);
4880
ADDOP_NAME(c, loc, LOAD_METHOD, meth->v.Attribute.attr, names);
4881
}
4882
4883
VISIT_SEQ(c, expr, e->v.Call.args);
4884
4885
if (kwdsl) {
4886
VISIT_SEQ(c, keyword, kwds);
4887
RETURN_IF_ERROR(
4888
compiler_call_simple_kw_helper(c, loc, kwds, kwdsl));
4889
}
4890
loc = update_start_location_to_match_attr(c, LOC(e), meth);
4891
ADDOP_I(c, loc, CALL, argsl + kwdsl);
4892
return 1;
4893
}
4894
4895
static int
4896
validate_keywords(struct compiler *c, asdl_keyword_seq *keywords)
4897
{
4898
Py_ssize_t nkeywords = asdl_seq_LEN(keywords);
4899
for (Py_ssize_t i = 0; i < nkeywords; i++) {
4900
keyword_ty key = ((keyword_ty)asdl_seq_GET(keywords, i));
4901
if (key->arg == NULL) {
4902
continue;
4903
}
4904
location loc = LOC(key);
4905
if (forbidden_name(c, loc, key->arg, Store)) {
4906
return ERROR;
4907
}
4908
for (Py_ssize_t j = i + 1; j < nkeywords; j++) {
4909
keyword_ty other = ((keyword_ty)asdl_seq_GET(keywords, j));
4910
if (other->arg && !PyUnicode_Compare(key->arg, other->arg)) {
4911
compiler_error(c, LOC(other), "keyword argument repeated: %U", key->arg);
4912
return ERROR;
4913
}
4914
}
4915
}
4916
return SUCCESS;
4917
}
4918
4919
static int
4920
compiler_call(struct compiler *c, expr_ty e)
4921
{
4922
RETURN_IF_ERROR(validate_keywords(c, e->v.Call.keywords));
4923
int ret = maybe_optimize_method_call(c, e);
4924
if (ret < 0) {
4925
return ERROR;
4926
}
4927
if (ret == 1) {
4928
return SUCCESS;
4929
}
4930
RETURN_IF_ERROR(check_caller(c, e->v.Call.func));
4931
location loc = LOC(e->v.Call.func);
4932
ADDOP(c, loc, PUSH_NULL);
4933
VISIT(c, expr, e->v.Call.func);
4934
loc = LOC(e);
4935
return compiler_call_helper(c, loc, 0,
4936
e->v.Call.args,
4937
e->v.Call.keywords);
4938
}
4939
4940
static int
4941
compiler_joined_str(struct compiler *c, expr_ty e)
4942
{
4943
location loc = LOC(e);
4944
Py_ssize_t value_count = asdl_seq_LEN(e->v.JoinedStr.values);
4945
if (value_count > STACK_USE_GUIDELINE) {
4946
_Py_DECLARE_STR(empty, "");
4947
ADDOP_LOAD_CONST_NEW(c, loc, Py_NewRef(&_Py_STR(empty)));
4948
ADDOP_NAME(c, loc, LOAD_METHOD, &_Py_ID(join), names);
4949
ADDOP_I(c, loc, BUILD_LIST, 0);
4950
for (Py_ssize_t i = 0; i < asdl_seq_LEN(e->v.JoinedStr.values); i++) {
4951
VISIT(c, expr, asdl_seq_GET(e->v.JoinedStr.values, i));
4952
ADDOP_I(c, loc, LIST_APPEND, 1);
4953
}
4954
ADDOP_I(c, loc, CALL, 1);
4955
}
4956
else {
4957
VISIT_SEQ(c, expr, e->v.JoinedStr.values);
4958
if (asdl_seq_LEN(e->v.JoinedStr.values) != 1) {
4959
ADDOP_I(c, loc, BUILD_STRING, asdl_seq_LEN(e->v.JoinedStr.values));
4960
}
4961
}
4962
return SUCCESS;
4963
}
4964
4965
/* Used to implement f-strings. Format a single value. */
4966
static int
4967
compiler_formatted_value(struct compiler *c, expr_ty e)
4968
{
4969
/* Our oparg encodes 2 pieces of information: the conversion
4970
character, and whether or not a format_spec was provided.
4971
4972
Convert the conversion char to 3 bits:
4973
: 000 0x0 FVC_NONE The default if nothing specified.
4974
!s : 001 0x1 FVC_STR
4975
!r : 010 0x2 FVC_REPR
4976
!a : 011 0x3 FVC_ASCII
4977
4978
next bit is whether or not we have a format spec:
4979
yes : 100 0x4
4980
no : 000 0x0
4981
*/
4982
4983
int conversion = e->v.FormattedValue.conversion;
4984
int oparg;
4985
4986
/* The expression to be formatted. */
4987
VISIT(c, expr, e->v.FormattedValue.value);
4988
4989
location loc = LOC(e);
4990
if (conversion != -1) {
4991
switch (conversion) {
4992
case 's': oparg = FVC_STR; break;
4993
case 'r': oparg = FVC_REPR; break;
4994
case 'a': oparg = FVC_ASCII; break;
4995
default:
4996
PyErr_Format(PyExc_SystemError,
4997
"Unrecognized conversion character %d", conversion);
4998
return ERROR;
4999
}
5000
ADDOP_I(c, loc, CONVERT_VALUE, oparg);
5001
}
5002
if (e->v.FormattedValue.format_spec) {
5003
/* Evaluate the format spec, and update our opcode arg. */
5004
VISIT(c, expr, e->v.FormattedValue.format_spec);
5005
ADDOP(c, loc, FORMAT_WITH_SPEC);
5006
} else {
5007
ADDOP(c, loc, FORMAT_SIMPLE);
5008
}
5009
return SUCCESS;
5010
}
5011
5012
static int
5013
compiler_subkwargs(struct compiler *c, location loc,
5014
asdl_keyword_seq *keywords,
5015
Py_ssize_t begin, Py_ssize_t end)
5016
{
5017
Py_ssize_t i, n = end - begin;
5018
keyword_ty kw;
5019
PyObject *keys, *key;
5020
assert(n > 0);
5021
int big = n*2 > STACK_USE_GUIDELINE;
5022
if (n > 1 && !big) {
5023
for (i = begin; i < end; i++) {
5024
kw = asdl_seq_GET(keywords, i);
5025
VISIT(c, expr, kw->value);
5026
}
5027
keys = PyTuple_New(n);
5028
if (keys == NULL) {
5029
return ERROR;
5030
}
5031
for (i = begin; i < end; i++) {
5032
key = ((keyword_ty) asdl_seq_GET(keywords, i))->arg;
5033
PyTuple_SET_ITEM(keys, i - begin, Py_NewRef(key));
5034
}
5035
ADDOP_LOAD_CONST_NEW(c, loc, keys);
5036
ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n);
5037
return SUCCESS;
5038
}
5039
if (big) {
5040
ADDOP_I(c, NO_LOCATION, BUILD_MAP, 0);
5041
}
5042
for (i = begin; i < end; i++) {
5043
kw = asdl_seq_GET(keywords, i);
5044
ADDOP_LOAD_CONST(c, loc, kw->arg);
5045
VISIT(c, expr, kw->value);
5046
if (big) {
5047
ADDOP_I(c, NO_LOCATION, MAP_ADD, 1);
5048
}
5049
}
5050
if (!big) {
5051
ADDOP_I(c, loc, BUILD_MAP, n);
5052
}
5053
return SUCCESS;
5054
}
5055
5056
/* Used by compiler_call_helper and maybe_optimize_method_call to emit
5057
* KW_NAMES before CALL.
5058
*/
5059
static int
5060
compiler_call_simple_kw_helper(struct compiler *c, location loc,
5061
asdl_keyword_seq *keywords, Py_ssize_t nkwelts)
5062
{
5063
PyObject *names;
5064
names = PyTuple_New(nkwelts);
5065
if (names == NULL) {
5066
return ERROR;
5067
}
5068
for (int i = 0; i < nkwelts; i++) {
5069
keyword_ty kw = asdl_seq_GET(keywords, i);
5070
PyTuple_SET_ITEM(names, i, Py_NewRef(kw->arg));
5071
}
5072
Py_ssize_t arg = compiler_add_const(c->c_const_cache, c->u, names);
5073
if (arg < 0) {
5074
return ERROR;
5075
}
5076
Py_DECREF(names);
5077
ADDOP_I(c, loc, KW_NAMES, arg);
5078
return SUCCESS;
5079
}
5080
5081
5082
/* shared code between compiler_call and compiler_class */
5083
static int
5084
compiler_call_helper(struct compiler *c, location loc,
5085
int n, /* Args already pushed */
5086
asdl_expr_seq *args,
5087
asdl_keyword_seq *keywords)
5088
{
5089
Py_ssize_t i, nseen, nelts, nkwelts;
5090
5091
RETURN_IF_ERROR(validate_keywords(c, keywords));
5092
5093
nelts = asdl_seq_LEN(args);
5094
nkwelts = asdl_seq_LEN(keywords);
5095
5096
if (nelts + nkwelts*2 > STACK_USE_GUIDELINE) {
5097
goto ex_call;
5098
}
5099
for (i = 0; i < nelts; i++) {
5100
expr_ty elt = asdl_seq_GET(args, i);
5101
if (elt->kind == Starred_kind) {
5102
goto ex_call;
5103
}
5104
}
5105
for (i = 0; i < nkwelts; i++) {
5106
keyword_ty kw = asdl_seq_GET(keywords, i);
5107
if (kw->arg == NULL) {
5108
goto ex_call;
5109
}
5110
}
5111
5112
/* No * or ** args, so can use faster calling sequence */
5113
for (i = 0; i < nelts; i++) {
5114
expr_ty elt = asdl_seq_GET(args, i);
5115
assert(elt->kind != Starred_kind);
5116
VISIT(c, expr, elt);
5117
}
5118
if (nkwelts) {
5119
VISIT_SEQ(c, keyword, keywords);
5120
RETURN_IF_ERROR(
5121
compiler_call_simple_kw_helper(c, loc, keywords, nkwelts));
5122
}
5123
ADDOP_I(c, loc, CALL, n + nelts + nkwelts);
5124
return SUCCESS;
5125
5126
ex_call:
5127
5128
/* Do positional arguments. */
5129
if (n ==0 && nelts == 1 && ((expr_ty)asdl_seq_GET(args, 0))->kind == Starred_kind) {
5130
VISIT(c, expr, ((expr_ty)asdl_seq_GET(args, 0))->v.Starred.value);
5131
}
5132
else {
5133
RETURN_IF_ERROR(starunpack_helper(c, loc, args, n, BUILD_LIST,
5134
LIST_APPEND, LIST_EXTEND, 1));
5135
}
5136
/* Then keyword arguments */
5137
if (nkwelts) {
5138
/* Has a new dict been pushed */
5139
int have_dict = 0;
5140
5141
nseen = 0; /* the number of keyword arguments on the stack following */
5142
for (i = 0; i < nkwelts; i++) {
5143
keyword_ty kw = asdl_seq_GET(keywords, i);
5144
if (kw->arg == NULL) {
5145
/* A keyword argument unpacking. */
5146
if (nseen) {
5147
RETURN_IF_ERROR(compiler_subkwargs(c, loc, keywords, i - nseen, i));
5148
if (have_dict) {
5149
ADDOP_I(c, loc, DICT_MERGE, 1);
5150
}
5151
have_dict = 1;
5152
nseen = 0;
5153
}
5154
if (!have_dict) {
5155
ADDOP_I(c, loc, BUILD_MAP, 0);
5156
have_dict = 1;
5157
}
5158
VISIT(c, expr, kw->value);
5159
ADDOP_I(c, loc, DICT_MERGE, 1);
5160
}
5161
else {
5162
nseen++;
5163
}
5164
}
5165
if (nseen) {
5166
/* Pack up any trailing keyword arguments. */
5167
RETURN_IF_ERROR(compiler_subkwargs(c, loc, keywords, nkwelts - nseen, nkwelts));
5168
if (have_dict) {
5169
ADDOP_I(c, loc, DICT_MERGE, 1);
5170
}
5171
have_dict = 1;
5172
}
5173
assert(have_dict);
5174
}
5175
ADDOP_I(c, loc, CALL_FUNCTION_EX, nkwelts > 0);
5176
return SUCCESS;
5177
}
5178
5179
5180
/* List and set comprehensions and generator expressions work by creating a
5181
nested function to perform the actual iteration. This means that the
5182
iteration variables don't leak into the current scope.
5183
The defined function is called immediately following its definition, with the
5184
result of that call being the result of the expression.
5185
The LC/SC version returns the populated container, while the GE version is
5186
flagged in symtable.c as a generator, so it returns the generator object
5187
when the function is called.
5188
5189
Possible cleanups:
5190
- iterate over the generator sequence instead of using recursion
5191
*/
5192
5193
5194
static int
5195
compiler_comprehension_generator(struct compiler *c, location loc,
5196
asdl_comprehension_seq *generators, int gen_index,
5197
int depth,
5198
expr_ty elt, expr_ty val, int type,
5199
int iter_on_stack)
5200
{
5201
comprehension_ty gen;
5202
gen = (comprehension_ty)asdl_seq_GET(generators, gen_index);
5203
if (gen->is_async) {
5204
return compiler_async_comprehension_generator(
5205
c, loc, generators, gen_index, depth, elt, val, type,
5206
iter_on_stack);
5207
} else {
5208
return compiler_sync_comprehension_generator(
5209
c, loc, generators, gen_index, depth, elt, val, type,
5210
iter_on_stack);
5211
}
5212
}
5213
5214
static int
5215
compiler_sync_comprehension_generator(struct compiler *c, location loc,
5216
asdl_comprehension_seq *generators,
5217
int gen_index, int depth,
5218
expr_ty elt, expr_ty val, int type,
5219
int iter_on_stack)
5220
{
5221
/* generate code for the iterator, then each of the ifs,
5222
and then write to the element */
5223
5224
NEW_JUMP_TARGET_LABEL(c, start);
5225
NEW_JUMP_TARGET_LABEL(c, if_cleanup);
5226
NEW_JUMP_TARGET_LABEL(c, anchor);
5227
5228
comprehension_ty gen = (comprehension_ty)asdl_seq_GET(generators,
5229
gen_index);
5230
5231
if (!iter_on_stack) {
5232
if (gen_index == 0) {
5233
/* Receive outermost iter as an implicit argument */
5234
c->u->u_metadata.u_argcount = 1;
5235
ADDOP_I(c, loc, LOAD_FAST, 0);
5236
}
5237
else {
5238
/* Sub-iter - calculate on the fly */
5239
/* Fast path for the temporary variable assignment idiom:
5240
for y in [f(x)]
5241
*/
5242
asdl_expr_seq *elts;
5243
switch (gen->iter->kind) {
5244
case List_kind:
5245
elts = gen->iter->v.List.elts;
5246
break;
5247
case Tuple_kind:
5248
elts = gen->iter->v.Tuple.elts;
5249
break;
5250
default:
5251
elts = NULL;
5252
}
5253
if (asdl_seq_LEN(elts) == 1) {
5254
expr_ty elt = asdl_seq_GET(elts, 0);
5255
if (elt->kind != Starred_kind) {
5256
VISIT(c, expr, elt);
5257
start = NO_LABEL;
5258
}
5259
}
5260
if (IS_LABEL(start)) {
5261
VISIT(c, expr, gen->iter);
5262
ADDOP(c, loc, GET_ITER);
5263
}
5264
}
5265
}
5266
if (IS_LABEL(start)) {
5267
depth++;
5268
USE_LABEL(c, start);
5269
ADDOP_JUMP(c, loc, FOR_ITER, anchor);
5270
}
5271
VISIT(c, expr, gen->target);
5272
5273
/* XXX this needs to be cleaned up...a lot! */
5274
Py_ssize_t n = asdl_seq_LEN(gen->ifs);
5275
for (Py_ssize_t i = 0; i < n; i++) {
5276
expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i);
5277
RETURN_IF_ERROR(compiler_jump_if(c, loc, e, if_cleanup, 0));
5278
}
5279
5280
if (++gen_index < asdl_seq_LEN(generators)) {
5281
RETURN_IF_ERROR(
5282
compiler_comprehension_generator(c, loc,
5283
generators, gen_index, depth,
5284
elt, val, type, 0));
5285
}
5286
5287
location elt_loc = LOC(elt);
5288
5289
/* only append after the last for generator */
5290
if (gen_index >= asdl_seq_LEN(generators)) {
5291
/* comprehension specific code */
5292
switch (type) {
5293
case COMP_GENEXP:
5294
VISIT(c, expr, elt);
5295
ADDOP_YIELD(c, elt_loc);
5296
ADDOP(c, elt_loc, POP_TOP);
5297
break;
5298
case COMP_LISTCOMP:
5299
VISIT(c, expr, elt);
5300
ADDOP_I(c, elt_loc, LIST_APPEND, depth + 1);
5301
break;
5302
case COMP_SETCOMP:
5303
VISIT(c, expr, elt);
5304
ADDOP_I(c, elt_loc, SET_ADD, depth + 1);
5305
break;
5306
case COMP_DICTCOMP:
5307
/* With '{k: v}', k is evaluated before v, so we do
5308
the same. */
5309
VISIT(c, expr, elt);
5310
VISIT(c, expr, val);
5311
elt_loc = LOCATION(elt->lineno,
5312
val->end_lineno,
5313
elt->col_offset,
5314
val->end_col_offset);
5315
ADDOP_I(c, elt_loc, MAP_ADD, depth + 1);
5316
break;
5317
default:
5318
return ERROR;
5319
}
5320
}
5321
5322
USE_LABEL(c, if_cleanup);
5323
if (IS_LABEL(start)) {
5324
ADDOP_JUMP(c, elt_loc, JUMP, start);
5325
5326
USE_LABEL(c, anchor);
5327
ADDOP(c, NO_LOCATION, END_FOR);
5328
}
5329
5330
return SUCCESS;
5331
}
5332
5333
static int
5334
compiler_async_comprehension_generator(struct compiler *c, location loc,
5335
asdl_comprehension_seq *generators,
5336
int gen_index, int depth,
5337
expr_ty elt, expr_ty val, int type,
5338
int iter_on_stack)
5339
{
5340
NEW_JUMP_TARGET_LABEL(c, start);
5341
NEW_JUMP_TARGET_LABEL(c, except);
5342
NEW_JUMP_TARGET_LABEL(c, if_cleanup);
5343
5344
comprehension_ty gen = (comprehension_ty)asdl_seq_GET(generators,
5345
gen_index);
5346
5347
if (!iter_on_stack) {
5348
if (gen_index == 0) {
5349
/* Receive outermost iter as an implicit argument */
5350
c->u->u_metadata.u_argcount = 1;
5351
ADDOP_I(c, loc, LOAD_FAST, 0);
5352
}
5353
else {
5354
/* Sub-iter - calculate on the fly */
5355
VISIT(c, expr, gen->iter);
5356
ADDOP(c, loc, GET_AITER);
5357
}
5358
}
5359
5360
USE_LABEL(c, start);
5361
/* Runtime will push a block here, so we need to account for that */
5362
RETURN_IF_ERROR(
5363
compiler_push_fblock(c, loc, ASYNC_COMPREHENSION_GENERATOR,
5364
start, NO_LABEL, NULL));
5365
5366
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
5367
ADDOP(c, loc, GET_ANEXT);
5368
ADDOP_LOAD_CONST(c, loc, Py_None);
5369
ADD_YIELD_FROM(c, loc, 1);
5370
ADDOP(c, loc, POP_BLOCK);
5371
VISIT(c, expr, gen->target);
5372
5373
Py_ssize_t n = asdl_seq_LEN(gen->ifs);
5374
for (Py_ssize_t i = 0; i < n; i++) {
5375
expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i);
5376
RETURN_IF_ERROR(compiler_jump_if(c, loc, e, if_cleanup, 0));
5377
}
5378
5379
depth++;
5380
if (++gen_index < asdl_seq_LEN(generators)) {
5381
RETURN_IF_ERROR(
5382
compiler_comprehension_generator(c, loc,
5383
generators, gen_index, depth,
5384
elt, val, type, 0));
5385
}
5386
5387
location elt_loc = LOC(elt);
5388
/* only append after the last for generator */
5389
if (gen_index >= asdl_seq_LEN(generators)) {
5390
/* comprehension specific code */
5391
switch (type) {
5392
case COMP_GENEXP:
5393
VISIT(c, expr, elt);
5394
ADDOP_YIELD(c, elt_loc);
5395
ADDOP(c, elt_loc, POP_TOP);
5396
break;
5397
case COMP_LISTCOMP:
5398
VISIT(c, expr, elt);
5399
ADDOP_I(c, elt_loc, LIST_APPEND, depth + 1);
5400
break;
5401
case COMP_SETCOMP:
5402
VISIT(c, expr, elt);
5403
ADDOP_I(c, elt_loc, SET_ADD, depth + 1);
5404
break;
5405
case COMP_DICTCOMP:
5406
/* With '{k: v}', k is evaluated before v, so we do
5407
the same. */
5408
VISIT(c, expr, elt);
5409
VISIT(c, expr, val);
5410
elt_loc = LOCATION(elt->lineno,
5411
val->end_lineno,
5412
elt->col_offset,
5413
val->end_col_offset);
5414
ADDOP_I(c, elt_loc, MAP_ADD, depth + 1);
5415
break;
5416
default:
5417
return ERROR;
5418
}
5419
}
5420
5421
USE_LABEL(c, if_cleanup);
5422
ADDOP_JUMP(c, elt_loc, JUMP, start);
5423
5424
compiler_pop_fblock(c, ASYNC_COMPREHENSION_GENERATOR, start);
5425
5426
USE_LABEL(c, except);
5427
5428
ADDOP(c, loc, END_ASYNC_FOR);
5429
5430
return SUCCESS;
5431
}
5432
5433
typedef struct {
5434
PyObject *pushed_locals;
5435
PyObject *temp_symbols;
5436
PyObject *fast_hidden;
5437
} inlined_comprehension_state;
5438
5439
static int
5440
push_inlined_comprehension_state(struct compiler *c, location loc,
5441
PySTEntryObject *entry,
5442
inlined_comprehension_state *state)
5443
{
5444
int in_class_block = (c->u->u_ste->ste_type == ClassBlock) && !c->u->u_in_inlined_comp;
5445
c->u->u_in_inlined_comp++;
5446
// iterate over names bound in the comprehension and ensure we isolate
5447
// them from the outer scope as needed
5448
PyObject *k, *v;
5449
Py_ssize_t pos = 0;
5450
while (PyDict_Next(entry->ste_symbols, &pos, &k, &v)) {
5451
assert(PyLong_Check(v));
5452
long symbol = PyLong_AS_LONG(v);
5453
// only values bound in the comprehension (DEF_LOCAL) need to be handled
5454
// at all; DEF_LOCAL | DEF_NONLOCAL can occur in the case of an
5455
// assignment expression to a nonlocal in the comprehension, these don't
5456
// need handling here since they shouldn't be isolated
5457
if ((symbol & DEF_LOCAL && !(symbol & DEF_NONLOCAL)) || in_class_block) {
5458
if (!_PyST_IsFunctionLike(c->u->u_ste)) {
5459
// non-function scope: override this name to use fast locals
5460
PyObject *orig = PyDict_GetItem(c->u->u_metadata.u_fasthidden, k);
5461
if (orig != Py_True) {
5462
if (PyDict_SetItem(c->u->u_metadata.u_fasthidden, k, Py_True) < 0) {
5463
return ERROR;
5464
}
5465
if (state->fast_hidden == NULL) {
5466
state->fast_hidden = PySet_New(NULL);
5467
if (state->fast_hidden == NULL) {
5468
return ERROR;
5469
}
5470
}
5471
if (PySet_Add(state->fast_hidden, k) < 0) {
5472
return ERROR;
5473
}
5474
}
5475
}
5476
long scope = (symbol >> SCOPE_OFFSET) & SCOPE_MASK;
5477
PyObject *outv = PyDict_GetItemWithError(c->u->u_ste->ste_symbols, k);
5478
if (outv == NULL) {
5479
outv = _PyLong_GetZero();
5480
}
5481
assert(PyLong_Check(outv));
5482
long outsc = (PyLong_AS_LONG(outv) >> SCOPE_OFFSET) & SCOPE_MASK;
5483
if (scope != outsc && !(scope == CELL && outsc == FREE)) {
5484
// If a name has different scope inside than outside the
5485
// comprehension, we need to temporarily handle it with the
5486
// right scope while compiling the comprehension. (If it's free
5487
// in outer scope and cell in inner scope, we can't treat it as
5488
// both cell and free in the same function, but treating it as
5489
// free throughout is fine; it's *_DEREF either way.)
5490
5491
if (state->temp_symbols == NULL) {
5492
state->temp_symbols = PyDict_New();
5493
if (state->temp_symbols == NULL) {
5494
return ERROR;
5495
}
5496
}
5497
// update the symbol to the in-comprehension version and save
5498
// the outer version; we'll restore it after running the
5499
// comprehension
5500
Py_INCREF(outv);
5501
if (PyDict_SetItem(c->u->u_ste->ste_symbols, k, v) < 0) {
5502
Py_DECREF(outv);
5503
return ERROR;
5504
}
5505
if (PyDict_SetItem(state->temp_symbols, k, outv) < 0) {
5506
Py_DECREF(outv);
5507
return ERROR;
5508
}
5509
Py_DECREF(outv);
5510
}
5511
// local names bound in comprehension must be isolated from
5512
// outer scope; push existing value (which may be NULL if
5513
// not defined) on stack
5514
if (state->pushed_locals == NULL) {
5515
state->pushed_locals = PyList_New(0);
5516
if (state->pushed_locals == NULL) {
5517
return ERROR;
5518
}
5519
}
5520
// in the case of a cell, this will actually push the cell
5521
// itself to the stack, then we'll create a new one for the
5522
// comprehension and restore the original one after
5523
ADDOP_NAME(c, loc, LOAD_FAST_AND_CLEAR, k, varnames);
5524
if (scope == CELL) {
5525
if (outsc == FREE) {
5526
ADDOP_NAME(c, loc, MAKE_CELL, k, freevars);
5527
} else {
5528
ADDOP_NAME(c, loc, MAKE_CELL, k, cellvars);
5529
}
5530
}
5531
if (PyList_Append(state->pushed_locals, k) < 0) {
5532
return ERROR;
5533
}
5534
}
5535
}
5536
if (state->pushed_locals) {
5537
// Outermost iterable expression was already evaluated and is on the
5538
// stack, we need to swap it back to TOS. This also rotates the order of
5539
// `pushed_locals` on the stack, but this will be reversed when we swap
5540
// out the comprehension result in pop_inlined_comprehension_state
5541
ADDOP_I(c, loc, SWAP, PyList_GET_SIZE(state->pushed_locals) + 1);
5542
}
5543
5544
return SUCCESS;
5545
}
5546
5547
static int
5548
pop_inlined_comprehension_state(struct compiler *c, location loc,
5549
inlined_comprehension_state state)
5550
{
5551
c->u->u_in_inlined_comp--;
5552
PyObject *k, *v;
5553
Py_ssize_t pos = 0;
5554
if (state.temp_symbols) {
5555
while (PyDict_Next(state.temp_symbols, &pos, &k, &v)) {
5556
if (PyDict_SetItem(c->u->u_ste->ste_symbols, k, v)) {
5557
return ERROR;
5558
}
5559
}
5560
Py_CLEAR(state.temp_symbols);
5561
}
5562
if (state.pushed_locals) {
5563
// pop names we pushed to stack earlier
5564
Py_ssize_t npops = PyList_GET_SIZE(state.pushed_locals);
5565
// Preserve the list/dict/set result of the comprehension as TOS. This
5566
// reverses the SWAP we did in push_inlined_comprehension_state to get
5567
// the outermost iterable to TOS, so we can still just iterate
5568
// pushed_locals in simple reverse order
5569
ADDOP_I(c, loc, SWAP, npops + 1);
5570
for (Py_ssize_t i = npops - 1; i >= 0; --i) {
5571
k = PyList_GetItem(state.pushed_locals, i);
5572
if (k == NULL) {
5573
return ERROR;
5574
}
5575
ADDOP_NAME(c, loc, STORE_FAST_MAYBE_NULL, k, varnames);
5576
}
5577
Py_CLEAR(state.pushed_locals);
5578
}
5579
if (state.fast_hidden) {
5580
while (PySet_Size(state.fast_hidden) > 0) {
5581
PyObject *k = PySet_Pop(state.fast_hidden);
5582
if (k == NULL) {
5583
return ERROR;
5584
}
5585
// we set to False instead of clearing, so we can track which names
5586
// were temporarily fast-locals and should use CO_FAST_HIDDEN
5587
if (PyDict_SetItem(c->u->u_metadata.u_fasthidden, k, Py_False)) {
5588
Py_DECREF(k);
5589
return ERROR;
5590
}
5591
Py_DECREF(k);
5592
}
5593
Py_CLEAR(state.fast_hidden);
5594
}
5595
return SUCCESS;
5596
}
5597
5598
static inline int
5599
compiler_comprehension_iter(struct compiler *c, location loc,
5600
comprehension_ty comp)
5601
{
5602
VISIT(c, expr, comp->iter);
5603
if (comp->is_async) {
5604
ADDOP(c, loc, GET_AITER);
5605
}
5606
else {
5607
ADDOP(c, loc, GET_ITER);
5608
}
5609
return SUCCESS;
5610
}
5611
5612
static int
5613
compiler_comprehension(struct compiler *c, expr_ty e, int type,
5614
identifier name, asdl_comprehension_seq *generators, expr_ty elt,
5615
expr_ty val)
5616
{
5617
PyCodeObject *co = NULL;
5618
inlined_comprehension_state inline_state = {NULL, NULL};
5619
comprehension_ty outermost;
5620
int scope_type = c->u->u_scope_type;
5621
int is_top_level_await = IS_TOP_LEVEL_AWAIT(c);
5622
PySTEntryObject *entry = PySymtable_Lookup(c->c_st, (void *)e);
5623
if (entry == NULL) {
5624
goto error;
5625
}
5626
int is_inlined = entry->ste_comp_inlined;
5627
int is_async_generator = entry->ste_coroutine;
5628
5629
location loc = LOC(e);
5630
5631
outermost = (comprehension_ty) asdl_seq_GET(generators, 0);
5632
if (is_inlined) {
5633
if (compiler_comprehension_iter(c, loc, outermost)) {
5634
goto error;
5635
}
5636
if (push_inlined_comprehension_state(c, loc, entry, &inline_state)) {
5637
goto error;
5638
}
5639
}
5640
else {
5641
if (compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION,
5642
(void *)e, e->lineno) < 0)
5643
{
5644
goto error;
5645
}
5646
}
5647
Py_CLEAR(entry);
5648
5649
if (is_async_generator && type != COMP_GENEXP &&
5650
scope_type != COMPILER_SCOPE_ASYNC_FUNCTION &&
5651
scope_type != COMPILER_SCOPE_COMPREHENSION &&
5652
!is_top_level_await)
5653
{
5654
compiler_error(c, loc, "asynchronous comprehension outside of "
5655
"an asynchronous function");
5656
goto error_in_scope;
5657
}
5658
5659
if (type != COMP_GENEXP) {
5660
int op;
5661
switch (type) {
5662
case COMP_LISTCOMP:
5663
op = BUILD_LIST;
5664
break;
5665
case COMP_SETCOMP:
5666
op = BUILD_SET;
5667
break;
5668
case COMP_DICTCOMP:
5669
op = BUILD_MAP;
5670
break;
5671
default:
5672
PyErr_Format(PyExc_SystemError,
5673
"unknown comprehension type %d", type);
5674
goto error_in_scope;
5675
}
5676
5677
ADDOP_I(c, loc, op, 0);
5678
if (is_inlined) {
5679
ADDOP_I(c, loc, SWAP, 2);
5680
}
5681
}
5682
5683
if (compiler_comprehension_generator(c, loc, generators, 0, 0,
5684
elt, val, type, is_inlined) < 0) {
5685
goto error_in_scope;
5686
}
5687
5688
if (is_inlined) {
5689
if (pop_inlined_comprehension_state(c, loc, inline_state)) {
5690
goto error;
5691
}
5692
return SUCCESS;
5693
}
5694
5695
if (type != COMP_GENEXP) {
5696
ADDOP(c, LOC(e), RETURN_VALUE);
5697
}
5698
if (type == COMP_GENEXP) {
5699
if (wrap_in_stopiteration_handler(c) < 0) {
5700
goto error_in_scope;
5701
}
5702
}
5703
5704
co = optimize_and_assemble(c, 1);
5705
compiler_exit_scope(c);
5706
if (is_top_level_await && is_async_generator){
5707
c->u->u_ste->ste_coroutine = 1;
5708
}
5709
if (co == NULL) {
5710
goto error;
5711
}
5712
5713
loc = LOC(e);
5714
if (compiler_make_closure(c, loc, co, 0) < 0) {
5715
goto error;
5716
}
5717
Py_CLEAR(co);
5718
5719
if (compiler_comprehension_iter(c, loc, outermost)) {
5720
goto error;
5721
}
5722
5723
ADDOP_I(c, loc, CALL, 0);
5724
5725
if (is_async_generator && type != COMP_GENEXP) {
5726
ADDOP_I(c, loc, GET_AWAITABLE, 0);
5727
ADDOP_LOAD_CONST(c, loc, Py_None);
5728
ADD_YIELD_FROM(c, loc, 1);
5729
}
5730
5731
return SUCCESS;
5732
error_in_scope:
5733
if (!is_inlined) {
5734
compiler_exit_scope(c);
5735
}
5736
error:
5737
Py_XDECREF(co);
5738
Py_XDECREF(entry);
5739
Py_XDECREF(inline_state.pushed_locals);
5740
Py_XDECREF(inline_state.temp_symbols);
5741
Py_XDECREF(inline_state.fast_hidden);
5742
return ERROR;
5743
}
5744
5745
static int
5746
compiler_genexp(struct compiler *c, expr_ty e)
5747
{
5748
assert(e->kind == GeneratorExp_kind);
5749
_Py_DECLARE_STR(anon_genexpr, "<genexpr>");
5750
return compiler_comprehension(c, e, COMP_GENEXP, &_Py_STR(anon_genexpr),
5751
e->v.GeneratorExp.generators,
5752
e->v.GeneratorExp.elt, NULL);
5753
}
5754
5755
static int
5756
compiler_listcomp(struct compiler *c, expr_ty e)
5757
{
5758
assert(e->kind == ListComp_kind);
5759
_Py_DECLARE_STR(anon_listcomp, "<listcomp>");
5760
return compiler_comprehension(c, e, COMP_LISTCOMP, &_Py_STR(anon_listcomp),
5761
e->v.ListComp.generators,
5762
e->v.ListComp.elt, NULL);
5763
}
5764
5765
static int
5766
compiler_setcomp(struct compiler *c, expr_ty e)
5767
{
5768
assert(e->kind == SetComp_kind);
5769
_Py_DECLARE_STR(anon_setcomp, "<setcomp>");
5770
return compiler_comprehension(c, e, COMP_SETCOMP, &_Py_STR(anon_setcomp),
5771
e->v.SetComp.generators,
5772
e->v.SetComp.elt, NULL);
5773
}
5774
5775
5776
static int
5777
compiler_dictcomp(struct compiler *c, expr_ty e)
5778
{
5779
assert(e->kind == DictComp_kind);
5780
_Py_DECLARE_STR(anon_dictcomp, "<dictcomp>");
5781
return compiler_comprehension(c, e, COMP_DICTCOMP, &_Py_STR(anon_dictcomp),
5782
e->v.DictComp.generators,
5783
e->v.DictComp.key, e->v.DictComp.value);
5784
}
5785
5786
5787
static int
5788
compiler_visit_keyword(struct compiler *c, keyword_ty k)
5789
{
5790
VISIT(c, expr, k->value);
5791
return SUCCESS;
5792
}
5793
5794
5795
static int
5796
compiler_with_except_finish(struct compiler *c, jump_target_label cleanup) {
5797
NEW_JUMP_TARGET_LABEL(c, suppress);
5798
ADDOP(c, NO_LOCATION, TO_BOOL);
5799
ADDOP_JUMP(c, NO_LOCATION, POP_JUMP_IF_TRUE, suppress);
5800
ADDOP_I(c, NO_LOCATION, RERAISE, 2);
5801
5802
USE_LABEL(c, suppress);
5803
ADDOP(c, NO_LOCATION, POP_TOP); /* exc_value */
5804
ADDOP(c, NO_LOCATION, POP_BLOCK);
5805
ADDOP(c, NO_LOCATION, POP_EXCEPT);
5806
ADDOP(c, NO_LOCATION, POP_TOP);
5807
ADDOP(c, NO_LOCATION, POP_TOP);
5808
NEW_JUMP_TARGET_LABEL(c, exit);
5809
ADDOP_JUMP(c, NO_LOCATION, JUMP, exit);
5810
5811
USE_LABEL(c, cleanup);
5812
POP_EXCEPT_AND_RERAISE(c, NO_LOCATION);
5813
5814
USE_LABEL(c, exit);
5815
return SUCCESS;
5816
}
5817
5818
/*
5819
Implements the async with statement.
5820
5821
The semantics outlined in that PEP are as follows:
5822
5823
async with EXPR as VAR:
5824
BLOCK
5825
5826
It is implemented roughly as:
5827
5828
context = EXPR
5829
exit = context.__aexit__ # not calling it
5830
value = await context.__aenter__()
5831
try:
5832
VAR = value # if VAR present in the syntax
5833
BLOCK
5834
finally:
5835
if an exception was raised:
5836
exc = copy of (exception, instance, traceback)
5837
else:
5838
exc = (None, None, None)
5839
if not (await exit(*exc)):
5840
raise
5841
*/
5842
static int
5843
compiler_async_with(struct compiler *c, stmt_ty s, int pos)
5844
{
5845
location loc = LOC(s);
5846
withitem_ty item = asdl_seq_GET(s->v.AsyncWith.items, pos);
5847
5848
assert(s->kind == AsyncWith_kind);
5849
if (IS_TOP_LEVEL_AWAIT(c)){
5850
c->u->u_ste->ste_coroutine = 1;
5851
} else if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION){
5852
return compiler_error(c, loc, "'async with' outside async function");
5853
}
5854
5855
NEW_JUMP_TARGET_LABEL(c, block);
5856
NEW_JUMP_TARGET_LABEL(c, final);
5857
NEW_JUMP_TARGET_LABEL(c, exit);
5858
NEW_JUMP_TARGET_LABEL(c, cleanup);
5859
5860
/* Evaluate EXPR */
5861
VISIT(c, expr, item->context_expr);
5862
5863
ADDOP(c, loc, BEFORE_ASYNC_WITH);
5864
ADDOP_I(c, loc, GET_AWAITABLE, 1);
5865
ADDOP_LOAD_CONST(c, loc, Py_None);
5866
ADD_YIELD_FROM(c, loc, 1);
5867
5868
ADDOP_JUMP(c, loc, SETUP_WITH, final);
5869
5870
/* SETUP_WITH pushes a finally block. */
5871
USE_LABEL(c, block);
5872
RETURN_IF_ERROR(compiler_push_fblock(c, loc, ASYNC_WITH, block, final, s));
5873
5874
if (item->optional_vars) {
5875
VISIT(c, expr, item->optional_vars);
5876
}
5877
else {
5878
/* Discard result from context.__aenter__() */
5879
ADDOP(c, loc, POP_TOP);
5880
}
5881
5882
pos++;
5883
if (pos == asdl_seq_LEN(s->v.AsyncWith.items)) {
5884
/* BLOCK code */
5885
VISIT_SEQ(c, stmt, s->v.AsyncWith.body)
5886
}
5887
else {
5888
RETURN_IF_ERROR(compiler_async_with(c, s, pos));
5889
}
5890
5891
compiler_pop_fblock(c, ASYNC_WITH, block);
5892
5893
ADDOP(c, loc, POP_BLOCK);
5894
/* End of body; start the cleanup */
5895
5896
/* For successful outcome:
5897
* call __exit__(None, None, None)
5898
*/
5899
RETURN_IF_ERROR(compiler_call_exit_with_nones(c, loc));
5900
ADDOP_I(c, loc, GET_AWAITABLE, 2);
5901
ADDOP_LOAD_CONST(c, loc, Py_None);
5902
ADD_YIELD_FROM(c, loc, 1);
5903
5904
ADDOP(c, loc, POP_TOP);
5905
5906
ADDOP_JUMP(c, loc, JUMP, exit);
5907
5908
/* For exceptional outcome: */
5909
USE_LABEL(c, final);
5910
5911
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup);
5912
ADDOP(c, loc, PUSH_EXC_INFO);
5913
ADDOP(c, loc, WITH_EXCEPT_START);
5914
ADDOP_I(c, loc, GET_AWAITABLE, 2);
5915
ADDOP_LOAD_CONST(c, loc, Py_None);
5916
ADD_YIELD_FROM(c, loc, 1);
5917
RETURN_IF_ERROR(compiler_with_except_finish(c, cleanup));
5918
5919
USE_LABEL(c, exit);
5920
return SUCCESS;
5921
}
5922
5923
5924
/*
5925
Implements the with statement from PEP 343.
5926
with EXPR as VAR:
5927
BLOCK
5928
is implemented as:
5929
<code for EXPR>
5930
SETUP_WITH E
5931
<code to store to VAR> or POP_TOP
5932
<code for BLOCK>
5933
LOAD_CONST (None, None, None)
5934
CALL_FUNCTION_EX 0
5935
JUMP EXIT
5936
E: WITH_EXCEPT_START (calls EXPR.__exit__)
5937
POP_JUMP_IF_TRUE T:
5938
RERAISE
5939
T: POP_TOP (remove exception from stack)
5940
POP_EXCEPT
5941
POP_TOP
5942
EXIT:
5943
*/
5944
5945
static int
5946
compiler_with(struct compiler *c, stmt_ty s, int pos)
5947
{
5948
withitem_ty item = asdl_seq_GET(s->v.With.items, pos);
5949
5950
assert(s->kind == With_kind);
5951
5952
NEW_JUMP_TARGET_LABEL(c, block);
5953
NEW_JUMP_TARGET_LABEL(c, final);
5954
NEW_JUMP_TARGET_LABEL(c, exit);
5955
NEW_JUMP_TARGET_LABEL(c, cleanup);
5956
5957
/* Evaluate EXPR */
5958
VISIT(c, expr, item->context_expr);
5959
/* Will push bound __exit__ */
5960
location loc = LOC(s);
5961
ADDOP(c, loc, BEFORE_WITH);
5962
ADDOP_JUMP(c, loc, SETUP_WITH, final);
5963
5964
/* SETUP_WITH pushes a finally block. */
5965
USE_LABEL(c, block);
5966
RETURN_IF_ERROR(compiler_push_fblock(c, loc, WITH, block, final, s));
5967
5968
if (item->optional_vars) {
5969
VISIT(c, expr, item->optional_vars);
5970
}
5971
else {
5972
/* Discard result from context.__enter__() */
5973
ADDOP(c, loc, POP_TOP);
5974
}
5975
5976
pos++;
5977
if (pos == asdl_seq_LEN(s->v.With.items)) {
5978
/* BLOCK code */
5979
VISIT_SEQ(c, stmt, s->v.With.body)
5980
}
5981
else {
5982
RETURN_IF_ERROR(compiler_with(c, s, pos));
5983
}
5984
5985
ADDOP(c, NO_LOCATION, POP_BLOCK);
5986
compiler_pop_fblock(c, WITH, block);
5987
5988
/* End of body; start the cleanup. */
5989
5990
/* For successful outcome:
5991
* call __exit__(None, None, None)
5992
*/
5993
loc = LOC(s);
5994
RETURN_IF_ERROR(compiler_call_exit_with_nones(c, loc));
5995
ADDOP(c, loc, POP_TOP);
5996
ADDOP_JUMP(c, loc, JUMP, exit);
5997
5998
/* For exceptional outcome: */
5999
USE_LABEL(c, final);
6000
6001
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup);
6002
ADDOP(c, loc, PUSH_EXC_INFO);
6003
ADDOP(c, loc, WITH_EXCEPT_START);
6004
RETURN_IF_ERROR(compiler_with_except_finish(c, cleanup));
6005
6006
USE_LABEL(c, exit);
6007
return SUCCESS;
6008
}
6009
6010
static int
6011
compiler_visit_expr1(struct compiler *c, expr_ty e)
6012
{
6013
location loc = LOC(e);
6014
switch (e->kind) {
6015
case NamedExpr_kind:
6016
VISIT(c, expr, e->v.NamedExpr.value);
6017
ADDOP_I(c, loc, COPY, 1);
6018
VISIT(c, expr, e->v.NamedExpr.target);
6019
break;
6020
case BoolOp_kind:
6021
return compiler_boolop(c, e);
6022
case BinOp_kind:
6023
VISIT(c, expr, e->v.BinOp.left);
6024
VISIT(c, expr, e->v.BinOp.right);
6025
ADDOP_BINARY(c, loc, e->v.BinOp.op);
6026
break;
6027
case UnaryOp_kind:
6028
VISIT(c, expr, e->v.UnaryOp.operand);
6029
if (e->v.UnaryOp.op == UAdd) {
6030
ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_UNARY_POSITIVE);
6031
}
6032
else if (e->v.UnaryOp.op == Not) {
6033
ADDOP(c, loc, TO_BOOL);
6034
ADDOP(c, loc, UNARY_NOT);
6035
}
6036
else {
6037
ADDOP(c, loc, unaryop(e->v.UnaryOp.op));
6038
}
6039
break;
6040
case Lambda_kind:
6041
return compiler_lambda(c, e);
6042
case IfExp_kind:
6043
return compiler_ifexp(c, e);
6044
case Dict_kind:
6045
return compiler_dict(c, e);
6046
case Set_kind:
6047
return compiler_set(c, e);
6048
case GeneratorExp_kind:
6049
return compiler_genexp(c, e);
6050
case ListComp_kind:
6051
return compiler_listcomp(c, e);
6052
case SetComp_kind:
6053
return compiler_setcomp(c, e);
6054
case DictComp_kind:
6055
return compiler_dictcomp(c, e);
6056
case Yield_kind:
6057
if (!_PyST_IsFunctionLike(c->u->u_ste)) {
6058
return compiler_error(c, loc, "'yield' outside function");
6059
}
6060
if (e->v.Yield.value) {
6061
VISIT(c, expr, e->v.Yield.value);
6062
}
6063
else {
6064
ADDOP_LOAD_CONST(c, loc, Py_None);
6065
}
6066
ADDOP_YIELD(c, loc);
6067
break;
6068
case YieldFrom_kind:
6069
if (!_PyST_IsFunctionLike(c->u->u_ste)) {
6070
return compiler_error(c, loc, "'yield' outside function");
6071
}
6072
if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION) {
6073
return compiler_error(c, loc, "'yield from' inside async function");
6074
}
6075
VISIT(c, expr, e->v.YieldFrom.value);
6076
ADDOP(c, loc, GET_YIELD_FROM_ITER);
6077
ADDOP_LOAD_CONST(c, loc, Py_None);
6078
ADD_YIELD_FROM(c, loc, 0);
6079
break;
6080
case Await_kind:
6081
if (!IS_TOP_LEVEL_AWAIT(c)){
6082
if (!_PyST_IsFunctionLike(c->u->u_ste)) {
6083
return compiler_error(c, loc, "'await' outside function");
6084
}
6085
6086
if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION &&
6087
c->u->u_scope_type != COMPILER_SCOPE_COMPREHENSION) {
6088
return compiler_error(c, loc, "'await' outside async function");
6089
}
6090
}
6091
6092
VISIT(c, expr, e->v.Await.value);
6093
ADDOP_I(c, loc, GET_AWAITABLE, 0);
6094
ADDOP_LOAD_CONST(c, loc, Py_None);
6095
ADD_YIELD_FROM(c, loc, 1);
6096
break;
6097
case Compare_kind:
6098
return compiler_compare(c, e);
6099
case Call_kind:
6100
return compiler_call(c, e);
6101
case Constant_kind:
6102
ADDOP_LOAD_CONST(c, loc, e->v.Constant.value);
6103
break;
6104
case JoinedStr_kind:
6105
return compiler_joined_str(c, e);
6106
case FormattedValue_kind:
6107
return compiler_formatted_value(c, e);
6108
/* The following exprs can be assignment targets. */
6109
case Attribute_kind:
6110
if (e->v.Attribute.ctx == Load && can_optimize_super_call(c, e)) {
6111
RETURN_IF_ERROR(load_args_for_super(c, e->v.Attribute.value));
6112
int opcode = asdl_seq_LEN(e->v.Attribute.value->v.Call.args) ?
6113
LOAD_SUPER_ATTR : LOAD_ZERO_SUPER_ATTR;
6114
ADDOP_NAME(c, loc, opcode, e->v.Attribute.attr, names);
6115
loc = update_start_location_to_match_attr(c, loc, e);
6116
ADDOP(c, loc, NOP);
6117
return SUCCESS;
6118
}
6119
VISIT(c, expr, e->v.Attribute.value);
6120
loc = LOC(e);
6121
loc = update_start_location_to_match_attr(c, loc, e);
6122
switch (e->v.Attribute.ctx) {
6123
case Load:
6124
ADDOP_NAME(c, loc, LOAD_ATTR, e->v.Attribute.attr, names);
6125
break;
6126
case Store:
6127
if (forbidden_name(c, loc, e->v.Attribute.attr, e->v.Attribute.ctx)) {
6128
return ERROR;
6129
}
6130
ADDOP_NAME(c, loc, STORE_ATTR, e->v.Attribute.attr, names);
6131
break;
6132
case Del:
6133
ADDOP_NAME(c, loc, DELETE_ATTR, e->v.Attribute.attr, names);
6134
break;
6135
}
6136
break;
6137
case Subscript_kind:
6138
return compiler_subscript(c, e);
6139
case Starred_kind:
6140
switch (e->v.Starred.ctx) {
6141
case Store:
6142
/* In all legitimate cases, the Starred node was already replaced
6143
* by compiler_list/compiler_tuple. XXX: is that okay? */
6144
return compiler_error(c, loc,
6145
"starred assignment target must be in a list or tuple");
6146
default:
6147
return compiler_error(c, loc,
6148
"can't use starred expression here");
6149
}
6150
break;
6151
case Slice_kind:
6152
{
6153
int n = compiler_slice(c, e);
6154
RETURN_IF_ERROR(n);
6155
ADDOP_I(c, loc, BUILD_SLICE, n);
6156
break;
6157
}
6158
case Name_kind:
6159
return compiler_nameop(c, loc, e->v.Name.id, e->v.Name.ctx);
6160
/* child nodes of List and Tuple will have expr_context set */
6161
case List_kind:
6162
return compiler_list(c, e);
6163
case Tuple_kind:
6164
return compiler_tuple(c, e);
6165
}
6166
return SUCCESS;
6167
}
6168
6169
static int
6170
compiler_visit_expr(struct compiler *c, expr_ty e)
6171
{
6172
int res = compiler_visit_expr1(c, e);
6173
return res;
6174
}
6175
6176
static bool
6177
is_two_element_slice(expr_ty s)
6178
{
6179
return s->kind == Slice_kind &&
6180
s->v.Slice.step == NULL;
6181
}
6182
6183
static int
6184
compiler_augassign(struct compiler *c, stmt_ty s)
6185
{
6186
assert(s->kind == AugAssign_kind);
6187
expr_ty e = s->v.AugAssign.target;
6188
6189
location loc = LOC(e);
6190
6191
switch (e->kind) {
6192
case Attribute_kind:
6193
VISIT(c, expr, e->v.Attribute.value);
6194
ADDOP_I(c, loc, COPY, 1);
6195
loc = update_start_location_to_match_attr(c, loc, e);
6196
ADDOP_NAME(c, loc, LOAD_ATTR, e->v.Attribute.attr, names);
6197
break;
6198
case Subscript_kind:
6199
VISIT(c, expr, e->v.Subscript.value);
6200
if (is_two_element_slice(e->v.Subscript.slice)) {
6201
RETURN_IF_ERROR(compiler_slice(c, e->v.Subscript.slice));
6202
ADDOP_I(c, loc, COPY, 3);
6203
ADDOP_I(c, loc, COPY, 3);
6204
ADDOP_I(c, loc, COPY, 3);
6205
ADDOP(c, loc, BINARY_SLICE);
6206
}
6207
else {
6208
VISIT(c, expr, e->v.Subscript.slice);
6209
ADDOP_I(c, loc, COPY, 2);
6210
ADDOP_I(c, loc, COPY, 2);
6211
ADDOP(c, loc, BINARY_SUBSCR);
6212
}
6213
break;
6214
case Name_kind:
6215
RETURN_IF_ERROR(compiler_nameop(c, loc, e->v.Name.id, Load));
6216
break;
6217
default:
6218
PyErr_Format(PyExc_SystemError,
6219
"invalid node type (%d) for augmented assignment",
6220
e->kind);
6221
return ERROR;
6222
}
6223
6224
loc = LOC(s);
6225
6226
VISIT(c, expr, s->v.AugAssign.value);
6227
ADDOP_INPLACE(c, loc, s->v.AugAssign.op);
6228
6229
loc = LOC(e);
6230
6231
switch (e->kind) {
6232
case Attribute_kind:
6233
loc = update_start_location_to_match_attr(c, loc, e);
6234
ADDOP_I(c, loc, SWAP, 2);
6235
ADDOP_NAME(c, loc, STORE_ATTR, e->v.Attribute.attr, names);
6236
break;
6237
case Subscript_kind:
6238
if (is_two_element_slice(e->v.Subscript.slice)) {
6239
ADDOP_I(c, loc, SWAP, 4);
6240
ADDOP_I(c, loc, SWAP, 3);
6241
ADDOP_I(c, loc, SWAP, 2);
6242
ADDOP(c, loc, STORE_SLICE);
6243
}
6244
else {
6245
ADDOP_I(c, loc, SWAP, 3);
6246
ADDOP_I(c, loc, SWAP, 2);
6247
ADDOP(c, loc, STORE_SUBSCR);
6248
}
6249
break;
6250
case Name_kind:
6251
return compiler_nameop(c, loc, e->v.Name.id, Store);
6252
default:
6253
Py_UNREACHABLE();
6254
}
6255
return SUCCESS;
6256
}
6257
6258
static int
6259
check_ann_expr(struct compiler *c, expr_ty e)
6260
{
6261
VISIT(c, expr, e);
6262
ADDOP(c, LOC(e), POP_TOP);
6263
return SUCCESS;
6264
}
6265
6266
static int
6267
check_annotation(struct compiler *c, stmt_ty s)
6268
{
6269
/* Annotations of complex targets does not produce anything
6270
under annotations future */
6271
if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) {
6272
return SUCCESS;
6273
}
6274
6275
/* Annotations are only evaluated in a module or class. */
6276
if (c->u->u_scope_type == COMPILER_SCOPE_MODULE ||
6277
c->u->u_scope_type == COMPILER_SCOPE_CLASS) {
6278
return check_ann_expr(c, s->v.AnnAssign.annotation);
6279
}
6280
return SUCCESS;
6281
}
6282
6283
static int
6284
check_ann_subscr(struct compiler *c, expr_ty e)
6285
{
6286
/* We check that everything in a subscript is defined at runtime. */
6287
switch (e->kind) {
6288
case Slice_kind:
6289
if (e->v.Slice.lower && check_ann_expr(c, e->v.Slice.lower) < 0) {
6290
return ERROR;
6291
}
6292
if (e->v.Slice.upper && check_ann_expr(c, e->v.Slice.upper) < 0) {
6293
return ERROR;
6294
}
6295
if (e->v.Slice.step && check_ann_expr(c, e->v.Slice.step) < 0) {
6296
return ERROR;
6297
}
6298
return SUCCESS;
6299
case Tuple_kind: {
6300
/* extended slice */
6301
asdl_expr_seq *elts = e->v.Tuple.elts;
6302
Py_ssize_t i, n = asdl_seq_LEN(elts);
6303
for (i = 0; i < n; i++) {
6304
RETURN_IF_ERROR(check_ann_subscr(c, asdl_seq_GET(elts, i)));
6305
}
6306
return SUCCESS;
6307
}
6308
default:
6309
return check_ann_expr(c, e);
6310
}
6311
}
6312
6313
static int
6314
compiler_annassign(struct compiler *c, stmt_ty s)
6315
{
6316
location loc = LOC(s);
6317
expr_ty targ = s->v.AnnAssign.target;
6318
PyObject* mangled;
6319
6320
assert(s->kind == AnnAssign_kind);
6321
6322
/* We perform the actual assignment first. */
6323
if (s->v.AnnAssign.value) {
6324
VISIT(c, expr, s->v.AnnAssign.value);
6325
VISIT(c, expr, targ);
6326
}
6327
switch (targ->kind) {
6328
case Name_kind:
6329
if (forbidden_name(c, loc, targ->v.Name.id, Store)) {
6330
return ERROR;
6331
}
6332
/* If we have a simple name in a module or class, store annotation. */
6333
if (s->v.AnnAssign.simple &&
6334
(c->u->u_scope_type == COMPILER_SCOPE_MODULE ||
6335
c->u->u_scope_type == COMPILER_SCOPE_CLASS)) {
6336
if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) {
6337
VISIT(c, annexpr, s->v.AnnAssign.annotation)
6338
}
6339
else {
6340
VISIT(c, expr, s->v.AnnAssign.annotation);
6341
}
6342
ADDOP_NAME(c, loc, LOAD_NAME, &_Py_ID(__annotations__), names);
6343
mangled = _Py_Mangle(c->u->u_private, targ->v.Name.id);
6344
ADDOP_LOAD_CONST_NEW(c, loc, mangled);
6345
ADDOP(c, loc, STORE_SUBSCR);
6346
}
6347
break;
6348
case Attribute_kind:
6349
if (forbidden_name(c, loc, targ->v.Attribute.attr, Store)) {
6350
return ERROR;
6351
}
6352
if (!s->v.AnnAssign.value &&
6353
check_ann_expr(c, targ->v.Attribute.value) < 0) {
6354
return ERROR;
6355
}
6356
break;
6357
case Subscript_kind:
6358
if (!s->v.AnnAssign.value &&
6359
(check_ann_expr(c, targ->v.Subscript.value) < 0 ||
6360
check_ann_subscr(c, targ->v.Subscript.slice) < 0)) {
6361
return ERROR;
6362
}
6363
break;
6364
default:
6365
PyErr_Format(PyExc_SystemError,
6366
"invalid node type (%d) for annotated assignment",
6367
targ->kind);
6368
return ERROR;
6369
}
6370
/* Annotation is evaluated last. */
6371
if (!s->v.AnnAssign.simple && check_annotation(c, s) < 0) {
6372
return ERROR;
6373
}
6374
return SUCCESS;
6375
}
6376
6377
/* Raises a SyntaxError and returns 0.
6378
If something goes wrong, a different exception may be raised.
6379
*/
6380
6381
static int
6382
compiler_error(struct compiler *c, location loc,
6383
const char *format, ...)
6384
{
6385
va_list vargs;
6386
va_start(vargs, format);
6387
PyObject *msg = PyUnicode_FromFormatV(format, vargs);
6388
va_end(vargs);
6389
if (msg == NULL) {
6390
return ERROR;
6391
}
6392
PyObject *loc_obj = PyErr_ProgramTextObject(c->c_filename, loc.lineno);
6393
if (loc_obj == NULL) {
6394
loc_obj = Py_None;
6395
}
6396
PyObject *args = Py_BuildValue("O(OiiOii)", msg, c->c_filename,
6397
loc.lineno, loc.col_offset + 1, loc_obj,
6398
loc.end_lineno, loc.end_col_offset + 1);
6399
Py_DECREF(msg);
6400
if (args == NULL) {
6401
goto exit;
6402
}
6403
PyErr_SetObject(PyExc_SyntaxError, args);
6404
exit:
6405
Py_DECREF(loc_obj);
6406
Py_XDECREF(args);
6407
return ERROR;
6408
}
6409
6410
/* Emits a SyntaxWarning and returns 1 on success.
6411
If a SyntaxWarning raised as error, replaces it with a SyntaxError
6412
and returns 0.
6413
*/
6414
static int
6415
compiler_warn(struct compiler *c, location loc,
6416
const char *format, ...)
6417
{
6418
va_list vargs;
6419
va_start(vargs, format);
6420
PyObject *msg = PyUnicode_FromFormatV(format, vargs);
6421
va_end(vargs);
6422
if (msg == NULL) {
6423
return ERROR;
6424
}
6425
if (PyErr_WarnExplicitObject(PyExc_SyntaxWarning, msg, c->c_filename,
6426
loc.lineno, NULL, NULL) < 0)
6427
{
6428
if (PyErr_ExceptionMatches(PyExc_SyntaxWarning)) {
6429
/* Replace the SyntaxWarning exception with a SyntaxError
6430
to get a more accurate error report */
6431
PyErr_Clear();
6432
assert(PyUnicode_AsUTF8(msg) != NULL);
6433
compiler_error(c, loc, PyUnicode_AsUTF8(msg));
6434
}
6435
Py_DECREF(msg);
6436
return ERROR;
6437
}
6438
Py_DECREF(msg);
6439
return SUCCESS;
6440
}
6441
6442
static int
6443
compiler_subscript(struct compiler *c, expr_ty e)
6444
{
6445
location loc = LOC(e);
6446
expr_context_ty ctx = e->v.Subscript.ctx;
6447
int op = 0;
6448
6449
if (ctx == Load) {
6450
RETURN_IF_ERROR(check_subscripter(c, e->v.Subscript.value));
6451
RETURN_IF_ERROR(check_index(c, e->v.Subscript.value, e->v.Subscript.slice));
6452
}
6453
6454
VISIT(c, expr, e->v.Subscript.value);
6455
if (is_two_element_slice(e->v.Subscript.slice) && ctx != Del) {
6456
RETURN_IF_ERROR(compiler_slice(c, e->v.Subscript.slice));
6457
if (ctx == Load) {
6458
ADDOP(c, loc, BINARY_SLICE);
6459
}
6460
else {
6461
assert(ctx == Store);
6462
ADDOP(c, loc, STORE_SLICE);
6463
}
6464
}
6465
else {
6466
VISIT(c, expr, e->v.Subscript.slice);
6467
switch (ctx) {
6468
case Load: op = BINARY_SUBSCR; break;
6469
case Store: op = STORE_SUBSCR; break;
6470
case Del: op = DELETE_SUBSCR; break;
6471
}
6472
assert(op);
6473
ADDOP(c, loc, op);
6474
}
6475
return SUCCESS;
6476
}
6477
6478
/* Returns the number of the values emitted,
6479
* thus are needed to build the slice, or -1 if there is an error. */
6480
static int
6481
compiler_slice(struct compiler *c, expr_ty s)
6482
{
6483
int n = 2;
6484
assert(s->kind == Slice_kind);
6485
6486
/* only handles the cases where BUILD_SLICE is emitted */
6487
if (s->v.Slice.lower) {
6488
VISIT(c, expr, s->v.Slice.lower);
6489
}
6490
else {
6491
ADDOP_LOAD_CONST(c, LOC(s), Py_None);
6492
}
6493
6494
if (s->v.Slice.upper) {
6495
VISIT(c, expr, s->v.Slice.upper);
6496
}
6497
else {
6498
ADDOP_LOAD_CONST(c, LOC(s), Py_None);
6499
}
6500
6501
if (s->v.Slice.step) {
6502
n++;
6503
VISIT(c, expr, s->v.Slice.step);
6504
}
6505
return n;
6506
}
6507
6508
6509
// PEP 634: Structural Pattern Matching
6510
6511
// To keep things simple, all compiler_pattern_* and pattern_helper_* routines
6512
// follow the convention of consuming TOS (the subject for the given pattern)
6513
// and calling jump_to_fail_pop on failure (no match).
6514
6515
// When calling into these routines, it's important that pc->on_top be kept
6516
// updated to reflect the current number of items that we are using on the top
6517
// of the stack: they will be popped on failure, and any name captures will be
6518
// stored *underneath* them on success. This lets us defer all names stores
6519
// until the *entire* pattern matches.
6520
6521
#define WILDCARD_CHECK(N) \
6522
((N)->kind == MatchAs_kind && !(N)->v.MatchAs.name)
6523
6524
#define WILDCARD_STAR_CHECK(N) \
6525
((N)->kind == MatchStar_kind && !(N)->v.MatchStar.name)
6526
6527
// Limit permitted subexpressions, even if the parser & AST validator let them through
6528
#define MATCH_VALUE_EXPR(N) \
6529
((N)->kind == Constant_kind || (N)->kind == Attribute_kind)
6530
6531
// Allocate or resize pc->fail_pop to allow for n items to be popped on failure.
6532
static int
6533
ensure_fail_pop(struct compiler *c, pattern_context *pc, Py_ssize_t n)
6534
{
6535
Py_ssize_t size = n + 1;
6536
if (size <= pc->fail_pop_size) {
6537
return SUCCESS;
6538
}
6539
Py_ssize_t needed = sizeof(jump_target_label) * size;
6540
jump_target_label *resized = PyObject_Realloc(pc->fail_pop, needed);
6541
if (resized == NULL) {
6542
PyErr_NoMemory();
6543
return ERROR;
6544
}
6545
pc->fail_pop = resized;
6546
while (pc->fail_pop_size < size) {
6547
NEW_JUMP_TARGET_LABEL(c, new_block);
6548
pc->fail_pop[pc->fail_pop_size++] = new_block;
6549
}
6550
return SUCCESS;
6551
}
6552
6553
// Use op to jump to the correct fail_pop block.
6554
static int
6555
jump_to_fail_pop(struct compiler *c, location loc,
6556
pattern_context *pc, int op)
6557
{
6558
// Pop any items on the top of the stack, plus any objects we were going to
6559
// capture on success:
6560
Py_ssize_t pops = pc->on_top + PyList_GET_SIZE(pc->stores);
6561
RETURN_IF_ERROR(ensure_fail_pop(c, pc, pops));
6562
ADDOP_JUMP(c, loc, op, pc->fail_pop[pops]);
6563
return SUCCESS;
6564
}
6565
6566
// Build all of the fail_pop blocks and reset fail_pop.
6567
static int
6568
emit_and_reset_fail_pop(struct compiler *c, location loc,
6569
pattern_context *pc)
6570
{
6571
if (!pc->fail_pop_size) {
6572
assert(pc->fail_pop == NULL);
6573
return SUCCESS;
6574
}
6575
while (--pc->fail_pop_size) {
6576
USE_LABEL(c, pc->fail_pop[pc->fail_pop_size]);
6577
if (codegen_addop_noarg(INSTR_SEQUENCE(c), POP_TOP, loc) < 0) {
6578
pc->fail_pop_size = 0;
6579
PyObject_Free(pc->fail_pop);
6580
pc->fail_pop = NULL;
6581
return ERROR;
6582
}
6583
}
6584
USE_LABEL(c, pc->fail_pop[0]);
6585
PyObject_Free(pc->fail_pop);
6586
pc->fail_pop = NULL;
6587
return SUCCESS;
6588
}
6589
6590
static int
6591
compiler_error_duplicate_store(struct compiler *c, location loc, identifier n)
6592
{
6593
return compiler_error(c, loc,
6594
"multiple assignments to name %R in pattern", n);
6595
}
6596
6597
// Duplicate the effect of 3.10's ROT_* instructions using SWAPs.
6598
static int
6599
pattern_helper_rotate(struct compiler *c, location loc, Py_ssize_t count)
6600
{
6601
while (1 < count) {
6602
ADDOP_I(c, loc, SWAP, count--);
6603
}
6604
return SUCCESS;
6605
}
6606
6607
static int
6608
pattern_helper_store_name(struct compiler *c, location loc,
6609
identifier n, pattern_context *pc)
6610
{
6611
if (n == NULL) {
6612
ADDOP(c, loc, POP_TOP);
6613
return SUCCESS;
6614
}
6615
if (forbidden_name(c, loc, n, Store)) {
6616
return ERROR;
6617
}
6618
// Can't assign to the same name twice:
6619
int duplicate = PySequence_Contains(pc->stores, n);
6620
RETURN_IF_ERROR(duplicate);
6621
if (duplicate) {
6622
return compiler_error_duplicate_store(c, loc, n);
6623
}
6624
// Rotate this object underneath any items we need to preserve:
6625
Py_ssize_t rotations = pc->on_top + PyList_GET_SIZE(pc->stores) + 1;
6626
RETURN_IF_ERROR(pattern_helper_rotate(c, loc, rotations));
6627
RETURN_IF_ERROR(PyList_Append(pc->stores, n));
6628
return SUCCESS;
6629
}
6630
6631
6632
static int
6633
pattern_unpack_helper(struct compiler *c, location loc,
6634
asdl_pattern_seq *elts)
6635
{
6636
Py_ssize_t n = asdl_seq_LEN(elts);
6637
int seen_star = 0;
6638
for (Py_ssize_t i = 0; i < n; i++) {
6639
pattern_ty elt = asdl_seq_GET(elts, i);
6640
if (elt->kind == MatchStar_kind && !seen_star) {
6641
if ((i >= (1 << 8)) ||
6642
(n-i-1 >= (INT_MAX >> 8))) {
6643
return compiler_error(c, loc,
6644
"too many expressions in "
6645
"star-unpacking sequence pattern");
6646
}
6647
ADDOP_I(c, loc, UNPACK_EX, (i + ((n-i-1) << 8)));
6648
seen_star = 1;
6649
}
6650
else if (elt->kind == MatchStar_kind) {
6651
return compiler_error(c, loc,
6652
"multiple starred expressions in sequence pattern");
6653
}
6654
}
6655
if (!seen_star) {
6656
ADDOP_I(c, loc, UNPACK_SEQUENCE, n);
6657
}
6658
return SUCCESS;
6659
}
6660
6661
static int
6662
pattern_helper_sequence_unpack(struct compiler *c, location loc,
6663
asdl_pattern_seq *patterns, Py_ssize_t star,
6664
pattern_context *pc)
6665
{
6666
RETURN_IF_ERROR(pattern_unpack_helper(c, loc, patterns));
6667
Py_ssize_t size = asdl_seq_LEN(patterns);
6668
// We've now got a bunch of new subjects on the stack. They need to remain
6669
// there after each subpattern match:
6670
pc->on_top += size;
6671
for (Py_ssize_t i = 0; i < size; i++) {
6672
// One less item to keep track of each time we loop through:
6673
pc->on_top--;
6674
pattern_ty pattern = asdl_seq_GET(patterns, i);
6675
RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
6676
}
6677
return SUCCESS;
6678
}
6679
6680
// Like pattern_helper_sequence_unpack, but uses BINARY_SUBSCR instead of
6681
// UNPACK_SEQUENCE / UNPACK_EX. This is more efficient for patterns with a
6682
// starred wildcard like [first, *_] / [first, *_, last] / [*_, last] / etc.
6683
static int
6684
pattern_helper_sequence_subscr(struct compiler *c, location loc,
6685
asdl_pattern_seq *patterns, Py_ssize_t star,
6686
pattern_context *pc)
6687
{
6688
// We need to keep the subject around for extracting elements:
6689
pc->on_top++;
6690
Py_ssize_t size = asdl_seq_LEN(patterns);
6691
for (Py_ssize_t i = 0; i < size; i++) {
6692
pattern_ty pattern = asdl_seq_GET(patterns, i);
6693
if (WILDCARD_CHECK(pattern)) {
6694
continue;
6695
}
6696
if (i == star) {
6697
assert(WILDCARD_STAR_CHECK(pattern));
6698
continue;
6699
}
6700
ADDOP_I(c, loc, COPY, 1);
6701
if (i < star) {
6702
ADDOP_LOAD_CONST_NEW(c, loc, PyLong_FromSsize_t(i));
6703
}
6704
else {
6705
// The subject may not support negative indexing! Compute a
6706
// nonnegative index:
6707
ADDOP(c, loc, GET_LEN);
6708
ADDOP_LOAD_CONST_NEW(c, loc, PyLong_FromSsize_t(size - i));
6709
ADDOP_BINARY(c, loc, Sub);
6710
}
6711
ADDOP(c, loc, BINARY_SUBSCR);
6712
RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
6713
}
6714
// Pop the subject, we're done with it:
6715
pc->on_top--;
6716
ADDOP(c, loc, POP_TOP);
6717
return SUCCESS;
6718
}
6719
6720
// Like compiler_pattern, but turn off checks for irrefutability.
6721
static int
6722
compiler_pattern_subpattern(struct compiler *c,
6723
pattern_ty p, pattern_context *pc)
6724
{
6725
int allow_irrefutable = pc->allow_irrefutable;
6726
pc->allow_irrefutable = 1;
6727
RETURN_IF_ERROR(compiler_pattern(c, p, pc));
6728
pc->allow_irrefutable = allow_irrefutable;
6729
return SUCCESS;
6730
}
6731
6732
static int
6733
compiler_pattern_as(struct compiler *c, pattern_ty p, pattern_context *pc)
6734
{
6735
assert(p->kind == MatchAs_kind);
6736
if (p->v.MatchAs.pattern == NULL) {
6737
// An irrefutable match:
6738
if (!pc->allow_irrefutable) {
6739
if (p->v.MatchAs.name) {
6740
const char *e = "name capture %R makes remaining patterns unreachable";
6741
return compiler_error(c, LOC(p), e, p->v.MatchAs.name);
6742
}
6743
const char *e = "wildcard makes remaining patterns unreachable";
6744
return compiler_error(c, LOC(p), e);
6745
}
6746
return pattern_helper_store_name(c, LOC(p), p->v.MatchAs.name, pc);
6747
}
6748
// Need to make a copy for (possibly) storing later:
6749
pc->on_top++;
6750
ADDOP_I(c, LOC(p), COPY, 1);
6751
RETURN_IF_ERROR(compiler_pattern(c, p->v.MatchAs.pattern, pc));
6752
// Success! Store it:
6753
pc->on_top--;
6754
RETURN_IF_ERROR(pattern_helper_store_name(c, LOC(p), p->v.MatchAs.name, pc));
6755
return SUCCESS;
6756
}
6757
6758
static int
6759
compiler_pattern_star(struct compiler *c, pattern_ty p, pattern_context *pc)
6760
{
6761
assert(p->kind == MatchStar_kind);
6762
RETURN_IF_ERROR(
6763
pattern_helper_store_name(c, LOC(p), p->v.MatchStar.name, pc));
6764
return SUCCESS;
6765
}
6766
6767
static int
6768
validate_kwd_attrs(struct compiler *c, asdl_identifier_seq *attrs, asdl_pattern_seq* patterns)
6769
{
6770
// Any errors will point to the pattern rather than the arg name as the
6771
// parser is only supplying identifiers rather than Name or keyword nodes
6772
Py_ssize_t nattrs = asdl_seq_LEN(attrs);
6773
for (Py_ssize_t i = 0; i < nattrs; i++) {
6774
identifier attr = ((identifier)asdl_seq_GET(attrs, i));
6775
location loc = LOC((pattern_ty) asdl_seq_GET(patterns, i));
6776
if (forbidden_name(c, loc, attr, Store)) {
6777
return ERROR;
6778
}
6779
for (Py_ssize_t j = i + 1; j < nattrs; j++) {
6780
identifier other = ((identifier)asdl_seq_GET(attrs, j));
6781
if (!PyUnicode_Compare(attr, other)) {
6782
location loc = LOC((pattern_ty) asdl_seq_GET(patterns, j));
6783
compiler_error(c, loc, "attribute name repeated in class pattern: %U", attr);
6784
return ERROR;
6785
}
6786
}
6787
}
6788
return SUCCESS;
6789
}
6790
6791
static int
6792
compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc)
6793
{
6794
assert(p->kind == MatchClass_kind);
6795
asdl_pattern_seq *patterns = p->v.MatchClass.patterns;
6796
asdl_identifier_seq *kwd_attrs = p->v.MatchClass.kwd_attrs;
6797
asdl_pattern_seq *kwd_patterns = p->v.MatchClass.kwd_patterns;
6798
Py_ssize_t nargs = asdl_seq_LEN(patterns);
6799
Py_ssize_t nattrs = asdl_seq_LEN(kwd_attrs);
6800
Py_ssize_t nkwd_patterns = asdl_seq_LEN(kwd_patterns);
6801
if (nattrs != nkwd_patterns) {
6802
// AST validator shouldn't let this happen, but if it does,
6803
// just fail, don't crash out of the interpreter
6804
const char * e = "kwd_attrs (%d) / kwd_patterns (%d) length mismatch in class pattern";
6805
return compiler_error(c, LOC(p), e, nattrs, nkwd_patterns);
6806
}
6807
if (INT_MAX < nargs || INT_MAX < nargs + nattrs - 1) {
6808
const char *e = "too many sub-patterns in class pattern %R";
6809
return compiler_error(c, LOC(p), e, p->v.MatchClass.cls);
6810
}
6811
if (nattrs) {
6812
RETURN_IF_ERROR(validate_kwd_attrs(c, kwd_attrs, kwd_patterns));
6813
}
6814
VISIT(c, expr, p->v.MatchClass.cls);
6815
PyObject *attr_names = PyTuple_New(nattrs);
6816
if (attr_names == NULL) {
6817
return ERROR;
6818
}
6819
Py_ssize_t i;
6820
for (i = 0; i < nattrs; i++) {
6821
PyObject *name = asdl_seq_GET(kwd_attrs, i);
6822
PyTuple_SET_ITEM(attr_names, i, Py_NewRef(name));
6823
}
6824
ADDOP_LOAD_CONST_NEW(c, LOC(p), attr_names);
6825
ADDOP_I(c, LOC(p), MATCH_CLASS, nargs);
6826
ADDOP_I(c, LOC(p), COPY, 1);
6827
ADDOP_LOAD_CONST(c, LOC(p), Py_None);
6828
ADDOP_I(c, LOC(p), IS_OP, 1);
6829
// TOS is now a tuple of (nargs + nattrs) attributes (or None):
6830
pc->on_top++;
6831
RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
6832
ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, nargs + nattrs);
6833
pc->on_top += nargs + nattrs - 1;
6834
for (i = 0; i < nargs + nattrs; i++) {
6835
pc->on_top--;
6836
pattern_ty pattern;
6837
if (i < nargs) {
6838
// Positional:
6839
pattern = asdl_seq_GET(patterns, i);
6840
}
6841
else {
6842
// Keyword:
6843
pattern = asdl_seq_GET(kwd_patterns, i - nargs);
6844
}
6845
if (WILDCARD_CHECK(pattern)) {
6846
ADDOP(c, LOC(p), POP_TOP);
6847
continue;
6848
}
6849
RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
6850
}
6851
// Success! Pop the tuple of attributes:
6852
return SUCCESS;
6853
}
6854
6855
static int
6856
compiler_pattern_mapping(struct compiler *c, pattern_ty p,
6857
pattern_context *pc)
6858
{
6859
assert(p->kind == MatchMapping_kind);
6860
asdl_expr_seq *keys = p->v.MatchMapping.keys;
6861
asdl_pattern_seq *patterns = p->v.MatchMapping.patterns;
6862
Py_ssize_t size = asdl_seq_LEN(keys);
6863
Py_ssize_t npatterns = asdl_seq_LEN(patterns);
6864
if (size != npatterns) {
6865
// AST validator shouldn't let this happen, but if it does,
6866
// just fail, don't crash out of the interpreter
6867
const char * e = "keys (%d) / patterns (%d) length mismatch in mapping pattern";
6868
return compiler_error(c, LOC(p), e, size, npatterns);
6869
}
6870
// We have a double-star target if "rest" is set
6871
PyObject *star_target = p->v.MatchMapping.rest;
6872
// We need to keep the subject on top during the mapping and length checks:
6873
pc->on_top++;
6874
ADDOP(c, LOC(p), MATCH_MAPPING);
6875
RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
6876
if (!size && !star_target) {
6877
// If the pattern is just "{}", we're done! Pop the subject:
6878
pc->on_top--;
6879
ADDOP(c, LOC(p), POP_TOP);
6880
return SUCCESS;
6881
}
6882
if (size) {
6883
// If the pattern has any keys in it, perform a length check:
6884
ADDOP(c, LOC(p), GET_LEN);
6885
ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size));
6886
ADDOP_COMPARE(c, LOC(p), GtE);
6887
RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
6888
}
6889
if (INT_MAX < size - 1) {
6890
return compiler_error(c, LOC(p), "too many sub-patterns in mapping pattern");
6891
}
6892
// Collect all of the keys into a tuple for MATCH_KEYS and
6893
// **rest. They can either be dotted names or literals:
6894
6895
// Maintaining a set of Constant_kind kind keys allows us to raise a
6896
// SyntaxError in the case of duplicates.
6897
PyObject *seen = PySet_New(NULL);
6898
if (seen == NULL) {
6899
return ERROR;
6900
}
6901
6902
// NOTE: goto error on failure in the loop below to avoid leaking `seen`
6903
for (Py_ssize_t i = 0; i < size; i++) {
6904
expr_ty key = asdl_seq_GET(keys, i);
6905
if (key == NULL) {
6906
const char *e = "can't use NULL keys in MatchMapping "
6907
"(set 'rest' parameter instead)";
6908
location loc = LOC((pattern_ty) asdl_seq_GET(patterns, i));
6909
compiler_error(c, loc, e);
6910
goto error;
6911
}
6912
6913
if (key->kind == Constant_kind) {
6914
int in_seen = PySet_Contains(seen, key->v.Constant.value);
6915
if (in_seen < 0) {
6916
goto error;
6917
}
6918
if (in_seen) {
6919
const char *e = "mapping pattern checks duplicate key (%R)";
6920
compiler_error(c, LOC(p), e, key->v.Constant.value);
6921
goto error;
6922
}
6923
if (PySet_Add(seen, key->v.Constant.value)) {
6924
goto error;
6925
}
6926
}
6927
6928
else if (key->kind != Attribute_kind) {
6929
const char *e = "mapping pattern keys may only match literals and attribute lookups";
6930
compiler_error(c, LOC(p), e);
6931
goto error;
6932
}
6933
if (compiler_visit_expr(c, key) < 0) {
6934
goto error;
6935
}
6936
}
6937
6938
// all keys have been checked; there are no duplicates
6939
Py_DECREF(seen);
6940
6941
ADDOP_I(c, LOC(p), BUILD_TUPLE, size);
6942
ADDOP(c, LOC(p), MATCH_KEYS);
6943
// There's now a tuple of keys and a tuple of values on top of the subject:
6944
pc->on_top += 2;
6945
ADDOP_I(c, LOC(p), COPY, 1);
6946
ADDOP_LOAD_CONST(c, LOC(p), Py_None);
6947
ADDOP_I(c, LOC(p), IS_OP, 1);
6948
RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
6949
// So far so good. Use that tuple of values on the stack to match
6950
// sub-patterns against:
6951
ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, size);
6952
pc->on_top += size - 1;
6953
for (Py_ssize_t i = 0; i < size; i++) {
6954
pc->on_top--;
6955
pattern_ty pattern = asdl_seq_GET(patterns, i);
6956
RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
6957
}
6958
// If we get this far, it's a match! Whatever happens next should consume
6959
// the tuple of keys and the subject:
6960
pc->on_top -= 2;
6961
if (star_target) {
6962
// If we have a starred name, bind a dict of remaining items to it (this may
6963
// seem a bit inefficient, but keys is rarely big enough to actually impact
6964
// runtime):
6965
// rest = dict(TOS1)
6966
// for key in TOS:
6967
// del rest[key]
6968
ADDOP_I(c, LOC(p), BUILD_MAP, 0); // [subject, keys, empty]
6969
ADDOP_I(c, LOC(p), SWAP, 3); // [empty, keys, subject]
6970
ADDOP_I(c, LOC(p), DICT_UPDATE, 2); // [copy, keys]
6971
ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, size); // [copy, keys...]
6972
while (size) {
6973
ADDOP_I(c, LOC(p), COPY, 1 + size--); // [copy, keys..., copy]
6974
ADDOP_I(c, LOC(p), SWAP, 2); // [copy, keys..., copy, key]
6975
ADDOP(c, LOC(p), DELETE_SUBSCR); // [copy, keys...]
6976
}
6977
RETURN_IF_ERROR(pattern_helper_store_name(c, LOC(p), star_target, pc));
6978
}
6979
else {
6980
ADDOP(c, LOC(p), POP_TOP); // Tuple of keys.
6981
ADDOP(c, LOC(p), POP_TOP); // Subject.
6982
}
6983
return SUCCESS;
6984
6985
error:
6986
Py_DECREF(seen);
6987
return ERROR;
6988
}
6989
6990
static int
6991
compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
6992
{
6993
assert(p->kind == MatchOr_kind);
6994
NEW_JUMP_TARGET_LABEL(c, end);
6995
Py_ssize_t size = asdl_seq_LEN(p->v.MatchOr.patterns);
6996
assert(size > 1);
6997
// We're going to be messing with pc. Keep the original info handy:
6998
pattern_context old_pc = *pc;
6999
Py_INCREF(pc->stores);
7000
// control is the list of names bound by the first alternative. It is used
7001
// for checking different name bindings in alternatives, and for correcting
7002
// the order in which extracted elements are placed on the stack.
7003
PyObject *control = NULL;
7004
// NOTE: We can't use returning macros anymore! goto error on error.
7005
for (Py_ssize_t i = 0; i < size; i++) {
7006
pattern_ty alt = asdl_seq_GET(p->v.MatchOr.patterns, i);
7007
PyObject *pc_stores = PyList_New(0);
7008
if (pc_stores == NULL) {
7009
goto error;
7010
}
7011
Py_SETREF(pc->stores, pc_stores);
7012
// An irrefutable sub-pattern must be last, if it is allowed at all:
7013
pc->allow_irrefutable = (i == size - 1) && old_pc.allow_irrefutable;
7014
pc->fail_pop = NULL;
7015
pc->fail_pop_size = 0;
7016
pc->on_top = 0;
7017
if (codegen_addop_i(INSTR_SEQUENCE(c), COPY, 1, LOC(alt)) < 0 ||
7018
compiler_pattern(c, alt, pc) < 0) {
7019
goto error;
7020
}
7021
// Success!
7022
Py_ssize_t nstores = PyList_GET_SIZE(pc->stores);
7023
if (!i) {
7024
// This is the first alternative, so save its stores as a "control"
7025
// for the others (they can't bind a different set of names, and
7026
// might need to be reordered):
7027
assert(control == NULL);
7028
control = Py_NewRef(pc->stores);
7029
}
7030
else if (nstores != PyList_GET_SIZE(control)) {
7031
goto diff;
7032
}
7033
else if (nstores) {
7034
// There were captures. Check to see if we differ from control:
7035
Py_ssize_t icontrol = nstores;
7036
while (icontrol--) {
7037
PyObject *name = PyList_GET_ITEM(control, icontrol);
7038
Py_ssize_t istores = PySequence_Index(pc->stores, name);
7039
if (istores < 0) {
7040
PyErr_Clear();
7041
goto diff;
7042
}
7043
if (icontrol != istores) {
7044
// Reorder the names on the stack to match the order of the
7045
// names in control. There's probably a better way of doing
7046
// this; the current solution is potentially very
7047
// inefficient when each alternative subpattern binds lots
7048
// of names in different orders. It's fine for reasonable
7049
// cases, though, and the peephole optimizer will ensure
7050
// that the final code is as efficient as possible.
7051
assert(istores < icontrol);
7052
Py_ssize_t rotations = istores + 1;
7053
// Perform the same rotation on pc->stores:
7054
PyObject *rotated = PyList_GetSlice(pc->stores, 0,
7055
rotations);
7056
if (rotated == NULL ||
7057
PyList_SetSlice(pc->stores, 0, rotations, NULL) ||
7058
PyList_SetSlice(pc->stores, icontrol - istores,
7059
icontrol - istores, rotated))
7060
{
7061
Py_XDECREF(rotated);
7062
goto error;
7063
}
7064
Py_DECREF(rotated);
7065
// That just did:
7066
// rotated = pc_stores[:rotations]
7067
// del pc_stores[:rotations]
7068
// pc_stores[icontrol-istores:icontrol-istores] = rotated
7069
// Do the same thing to the stack, using several
7070
// rotations:
7071
while (rotations--) {
7072
if (pattern_helper_rotate(c, LOC(alt), icontrol + 1) < 0) {
7073
goto error;
7074
}
7075
}
7076
}
7077
}
7078
}
7079
assert(control);
7080
if (codegen_addop_j(INSTR_SEQUENCE(c), LOC(alt), JUMP, end) < 0 ||
7081
emit_and_reset_fail_pop(c, LOC(alt), pc) < 0)
7082
{
7083
goto error;
7084
}
7085
}
7086
Py_DECREF(pc->stores);
7087
*pc = old_pc;
7088
Py_INCREF(pc->stores);
7089
// Need to NULL this for the PyObject_Free call in the error block.
7090
old_pc.fail_pop = NULL;
7091
// No match. Pop the remaining copy of the subject and fail:
7092
if (codegen_addop_noarg(INSTR_SEQUENCE(c), POP_TOP, LOC(p)) < 0 ||
7093
jump_to_fail_pop(c, LOC(p), pc, JUMP) < 0) {
7094
goto error;
7095
}
7096
7097
USE_LABEL(c, end);
7098
Py_ssize_t nstores = PyList_GET_SIZE(control);
7099
// There's a bunch of stuff on the stack between where the new stores
7100
// are and where they need to be:
7101
// - The other stores.
7102
// - A copy of the subject.
7103
// - Anything else that may be on top of the stack.
7104
// - Any previous stores we've already stashed away on the stack.
7105
Py_ssize_t nrots = nstores + 1 + pc->on_top + PyList_GET_SIZE(pc->stores);
7106
for (Py_ssize_t i = 0; i < nstores; i++) {
7107
// Rotate this capture to its proper place on the stack:
7108
if (pattern_helper_rotate(c, LOC(p), nrots) < 0) {
7109
goto error;
7110
}
7111
// Update the list of previous stores with this new name, checking for
7112
// duplicates:
7113
PyObject *name = PyList_GET_ITEM(control, i);
7114
int dupe = PySequence_Contains(pc->stores, name);
7115
if (dupe < 0) {
7116
goto error;
7117
}
7118
if (dupe) {
7119
compiler_error_duplicate_store(c, LOC(p), name);
7120
goto error;
7121
}
7122
if (PyList_Append(pc->stores, name)) {
7123
goto error;
7124
}
7125
}
7126
Py_DECREF(old_pc.stores);
7127
Py_DECREF(control);
7128
// NOTE: Returning macros are safe again.
7129
// Pop the copy of the subject:
7130
ADDOP(c, LOC(p), POP_TOP);
7131
return SUCCESS;
7132
diff:
7133
compiler_error(c, LOC(p), "alternative patterns bind different names");
7134
error:
7135
PyObject_Free(old_pc.fail_pop);
7136
Py_DECREF(old_pc.stores);
7137
Py_XDECREF(control);
7138
return ERROR;
7139
}
7140
7141
7142
static int
7143
compiler_pattern_sequence(struct compiler *c, pattern_ty p,
7144
pattern_context *pc)
7145
{
7146
assert(p->kind == MatchSequence_kind);
7147
asdl_pattern_seq *patterns = p->v.MatchSequence.patterns;
7148
Py_ssize_t size = asdl_seq_LEN(patterns);
7149
Py_ssize_t star = -1;
7150
int only_wildcard = 1;
7151
int star_wildcard = 0;
7152
// Find a starred name, if it exists. There may be at most one:
7153
for (Py_ssize_t i = 0; i < size; i++) {
7154
pattern_ty pattern = asdl_seq_GET(patterns, i);
7155
if (pattern->kind == MatchStar_kind) {
7156
if (star >= 0) {
7157
const char *e = "multiple starred names in sequence pattern";
7158
return compiler_error(c, LOC(p), e);
7159
}
7160
star_wildcard = WILDCARD_STAR_CHECK(pattern);
7161
only_wildcard &= star_wildcard;
7162
star = i;
7163
continue;
7164
}
7165
only_wildcard &= WILDCARD_CHECK(pattern);
7166
}
7167
// We need to keep the subject on top during the sequence and length checks:
7168
pc->on_top++;
7169
ADDOP(c, LOC(p), MATCH_SEQUENCE);
7170
RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
7171
if (star < 0) {
7172
// No star: len(subject) == size
7173
ADDOP(c, LOC(p), GET_LEN);
7174
ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size));
7175
ADDOP_COMPARE(c, LOC(p), Eq);
7176
RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
7177
}
7178
else if (size > 1) {
7179
// Star: len(subject) >= size - 1
7180
ADDOP(c, LOC(p), GET_LEN);
7181
ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size - 1));
7182
ADDOP_COMPARE(c, LOC(p), GtE);
7183
RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
7184
}
7185
// Whatever comes next should consume the subject:
7186
pc->on_top--;
7187
if (only_wildcard) {
7188
// Patterns like: [] / [_] / [_, _] / [*_] / [_, *_] / [_, _, *_] / etc.
7189
ADDOP(c, LOC(p), POP_TOP);
7190
}
7191
else if (star_wildcard) {
7192
RETURN_IF_ERROR(pattern_helper_sequence_subscr(c, LOC(p), patterns, star, pc));
7193
}
7194
else {
7195
RETURN_IF_ERROR(pattern_helper_sequence_unpack(c, LOC(p), patterns, star, pc));
7196
}
7197
return SUCCESS;
7198
}
7199
7200
static int
7201
compiler_pattern_value(struct compiler *c, pattern_ty p, pattern_context *pc)
7202
{
7203
assert(p->kind == MatchValue_kind);
7204
expr_ty value = p->v.MatchValue.value;
7205
if (!MATCH_VALUE_EXPR(value)) {
7206
const char *e = "patterns may only match literals and attribute lookups";
7207
return compiler_error(c, LOC(p), e);
7208
}
7209
VISIT(c, expr, value);
7210
ADDOP_COMPARE(c, LOC(p), Eq);
7211
ADDOP(c, LOC(p), TO_BOOL);
7212
RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
7213
return SUCCESS;
7214
}
7215
7216
static int
7217
compiler_pattern_singleton(struct compiler *c, pattern_ty p, pattern_context *pc)
7218
{
7219
assert(p->kind == MatchSingleton_kind);
7220
ADDOP_LOAD_CONST(c, LOC(p), p->v.MatchSingleton.value);
7221
ADDOP_COMPARE(c, LOC(p), Is);
7222
RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
7223
return SUCCESS;
7224
}
7225
7226
static int
7227
compiler_pattern(struct compiler *c, pattern_ty p, pattern_context *pc)
7228
{
7229
switch (p->kind) {
7230
case MatchValue_kind:
7231
return compiler_pattern_value(c, p, pc);
7232
case MatchSingleton_kind:
7233
return compiler_pattern_singleton(c, p, pc);
7234
case MatchSequence_kind:
7235
return compiler_pattern_sequence(c, p, pc);
7236
case MatchMapping_kind:
7237
return compiler_pattern_mapping(c, p, pc);
7238
case MatchClass_kind:
7239
return compiler_pattern_class(c, p, pc);
7240
case MatchStar_kind:
7241
return compiler_pattern_star(c, p, pc);
7242
case MatchAs_kind:
7243
return compiler_pattern_as(c, p, pc);
7244
case MatchOr_kind:
7245
return compiler_pattern_or(c, p, pc);
7246
}
7247
// AST validator shouldn't let this happen, but if it does,
7248
// just fail, don't crash out of the interpreter
7249
const char *e = "invalid match pattern node in AST (kind=%d)";
7250
return compiler_error(c, LOC(p), e, p->kind);
7251
}
7252
7253
static int
7254
compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
7255
{
7256
VISIT(c, expr, s->v.Match.subject);
7257
NEW_JUMP_TARGET_LABEL(c, end);
7258
Py_ssize_t cases = asdl_seq_LEN(s->v.Match.cases);
7259
assert(cases > 0);
7260
match_case_ty m = asdl_seq_GET(s->v.Match.cases, cases - 1);
7261
int has_default = WILDCARD_CHECK(m->pattern) && 1 < cases;
7262
for (Py_ssize_t i = 0; i < cases - has_default; i++) {
7263
m = asdl_seq_GET(s->v.Match.cases, i);
7264
// Only copy the subject if we're *not* on the last case:
7265
if (i != cases - has_default - 1) {
7266
ADDOP_I(c, LOC(m->pattern), COPY, 1);
7267
}
7268
pc->stores = PyList_New(0);
7269
if (pc->stores == NULL) {
7270
return ERROR;
7271
}
7272
// Irrefutable cases must be either guarded, last, or both:
7273
pc->allow_irrefutable = m->guard != NULL || i == cases - 1;
7274
pc->fail_pop = NULL;
7275
pc->fail_pop_size = 0;
7276
pc->on_top = 0;
7277
// NOTE: Can't use returning macros here (they'll leak pc->stores)!
7278
if (compiler_pattern(c, m->pattern, pc) < 0) {
7279
Py_DECREF(pc->stores);
7280
return ERROR;
7281
}
7282
assert(!pc->on_top);
7283
// It's a match! Store all of the captured names (they're on the stack).
7284
Py_ssize_t nstores = PyList_GET_SIZE(pc->stores);
7285
for (Py_ssize_t n = 0; n < nstores; n++) {
7286
PyObject *name = PyList_GET_ITEM(pc->stores, n);
7287
if (compiler_nameop(c, LOC(m->pattern), name, Store) < 0) {
7288
Py_DECREF(pc->stores);
7289
return ERROR;
7290
}
7291
}
7292
Py_DECREF(pc->stores);
7293
// NOTE: Returning macros are safe again.
7294
if (m->guard) {
7295
RETURN_IF_ERROR(ensure_fail_pop(c, pc, 0));
7296
RETURN_IF_ERROR(compiler_jump_if(c, LOC(m->pattern), m->guard, pc->fail_pop[0], 0));
7297
}
7298
// Success! Pop the subject off, we're done with it:
7299
if (i != cases - has_default - 1) {
7300
ADDOP(c, LOC(m->pattern), POP_TOP);
7301
}
7302
VISIT_SEQ(c, stmt, m->body);
7303
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
7304
// If the pattern fails to match, we want the line number of the
7305
// cleanup to be associated with the failed pattern, not the last line
7306
// of the body
7307
RETURN_IF_ERROR(emit_and_reset_fail_pop(c, LOC(m->pattern), pc));
7308
}
7309
if (has_default) {
7310
// A trailing "case _" is common, and lets us save a bit of redundant
7311
// pushing and popping in the loop above:
7312
m = asdl_seq_GET(s->v.Match.cases, cases - 1);
7313
if (cases == 1) {
7314
// No matches. Done with the subject:
7315
ADDOP(c, LOC(m->pattern), POP_TOP);
7316
}
7317
else {
7318
// Show line coverage for default case (it doesn't create bytecode)
7319
ADDOP(c, LOC(m->pattern), NOP);
7320
}
7321
if (m->guard) {
7322
RETURN_IF_ERROR(compiler_jump_if(c, LOC(m->pattern), m->guard, end, 0));
7323
}
7324
VISIT_SEQ(c, stmt, m->body);
7325
}
7326
USE_LABEL(c, end);
7327
return SUCCESS;
7328
}
7329
7330
static int
7331
compiler_match(struct compiler *c, stmt_ty s)
7332
{
7333
pattern_context pc;
7334
pc.fail_pop = NULL;
7335
int result = compiler_match_inner(c, s, &pc);
7336
PyObject_Free(pc.fail_pop);
7337
return result;
7338
}
7339
7340
#undef WILDCARD_CHECK
7341
#undef WILDCARD_STAR_CHECK
7342
7343
static PyObject *
7344
consts_dict_keys_inorder(PyObject *dict)
7345
{
7346
PyObject *consts, *k, *v;
7347
Py_ssize_t i, pos = 0, size = PyDict_GET_SIZE(dict);
7348
7349
consts = PyList_New(size); /* PyCode_Optimize() requires a list */
7350
if (consts == NULL)
7351
return NULL;
7352
while (PyDict_Next(dict, &pos, &k, &v)) {
7353
i = PyLong_AS_LONG(v);
7354
/* The keys of the dictionary can be tuples wrapping a constant.
7355
* (see dict_add_o and _PyCode_ConstantKey). In that case
7356
* the object we want is always second. */
7357
if (PyTuple_CheckExact(k)) {
7358
k = PyTuple_GET_ITEM(k, 1);
7359
}
7360
assert(i < size);
7361
assert(i >= 0);
7362
PyList_SET_ITEM(consts, i, Py_NewRef(k));
7363
}
7364
return consts;
7365
}
7366
7367
static int
7368
compute_code_flags(struct compiler *c)
7369
{
7370
PySTEntryObject *ste = c->u->u_ste;
7371
int flags = 0;
7372
if (_PyST_IsFunctionLike(c->u->u_ste)) {
7373
flags |= CO_NEWLOCALS | CO_OPTIMIZED;
7374
if (ste->ste_nested)
7375
flags |= CO_NESTED;
7376
if (ste->ste_generator && !ste->ste_coroutine)
7377
flags |= CO_GENERATOR;
7378
if (!ste->ste_generator && ste->ste_coroutine)
7379
flags |= CO_COROUTINE;
7380
if (ste->ste_generator && ste->ste_coroutine)
7381
flags |= CO_ASYNC_GENERATOR;
7382
if (ste->ste_varargs)
7383
flags |= CO_VARARGS;
7384
if (ste->ste_varkeywords)
7385
flags |= CO_VARKEYWORDS;
7386
}
7387
7388
/* (Only) inherit compilerflags in PyCF_MASK */
7389
flags |= (c->c_flags.cf_flags & PyCF_MASK);
7390
7391
if ((IS_TOP_LEVEL_AWAIT(c)) &&
7392
ste->ste_coroutine &&
7393
!ste->ste_generator) {
7394
flags |= CO_COROUTINE;
7395
}
7396
7397
return flags;
7398
}
7399
7400
// Merge *obj* with constant cache.
7401
// Unlike merge_consts_recursive(), this function doesn't work recursively.
7402
int
7403
_PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj)
7404
{
7405
assert(PyDict_CheckExact(const_cache));
7406
PyObject *key = _PyCode_ConstantKey(*obj);
7407
if (key == NULL) {
7408
return ERROR;
7409
}
7410
7411
// t is borrowed reference
7412
PyObject *t = PyDict_SetDefault(const_cache, key, key);
7413
Py_DECREF(key);
7414
if (t == NULL) {
7415
return ERROR;
7416
}
7417
if (t == key) { // obj is new constant.
7418
return SUCCESS;
7419
}
7420
7421
if (PyTuple_CheckExact(t)) {
7422
// t is still borrowed reference
7423
t = PyTuple_GET_ITEM(t, 1);
7424
}
7425
7426
Py_SETREF(*obj, Py_NewRef(t));
7427
return SUCCESS;
7428
}
7429
7430
7431
static int *
7432
build_cellfixedoffsets(_PyCompile_CodeUnitMetadata *umd)
7433
{
7434
int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames);
7435
int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars);
7436
int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars);
7437
7438
int noffsets = ncellvars + nfreevars;
7439
int *fixed = PyMem_New(int, noffsets);
7440
if (fixed == NULL) {
7441
PyErr_NoMemory();
7442
return NULL;
7443
}
7444
for (int i = 0; i < noffsets; i++) {
7445
fixed[i] = nlocals + i;
7446
}
7447
7448
PyObject *varname, *cellindex;
7449
Py_ssize_t pos = 0;
7450
while (PyDict_Next(umd->u_cellvars, &pos, &varname, &cellindex)) {
7451
PyObject *varindex = PyDict_GetItem(umd->u_varnames, varname);
7452
if (varindex != NULL) {
7453
assert(PyLong_AS_LONG(cellindex) < INT_MAX);
7454
assert(PyLong_AS_LONG(varindex) < INT_MAX);
7455
int oldindex = (int)PyLong_AS_LONG(cellindex);
7456
int argoffset = (int)PyLong_AS_LONG(varindex);
7457
fixed[oldindex] = argoffset;
7458
}
7459
}
7460
7461
return fixed;
7462
}
7463
7464
static int
7465
insert_prefix_instructions(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock,
7466
int *fixed, int nfreevars, int code_flags)
7467
{
7468
assert(umd->u_firstlineno > 0);
7469
7470
/* Add the generator prefix instructions. */
7471
if (code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) {
7472
cfg_instr make_gen = {
7473
.i_opcode = RETURN_GENERATOR,
7474
.i_oparg = 0,
7475
.i_loc = LOCATION(umd->u_firstlineno, umd->u_firstlineno, -1, -1),
7476
.i_target = NULL,
7477
};
7478
RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, 0, &make_gen));
7479
cfg_instr pop_top = {
7480
.i_opcode = POP_TOP,
7481
.i_oparg = 0,
7482
.i_loc = NO_LOCATION,
7483
.i_target = NULL,
7484
};
7485
RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, 1, &pop_top));
7486
}
7487
7488
/* Set up cells for any variable that escapes, to be put in a closure. */
7489
const int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars);
7490
if (ncellvars) {
7491
// umd->u_cellvars has the cells out of order so we sort them
7492
// before adding the MAKE_CELL instructions. Note that we
7493
// adjust for arg cells, which come first.
7494
const int nvars = ncellvars + (int)PyDict_GET_SIZE(umd->u_varnames);
7495
int *sorted = PyMem_RawCalloc(nvars, sizeof(int));
7496
if (sorted == NULL) {
7497
PyErr_NoMemory();
7498
return ERROR;
7499
}
7500
for (int i = 0; i < ncellvars; i++) {
7501
sorted[fixed[i]] = i + 1;
7502
}
7503
for (int i = 0, ncellsused = 0; ncellsused < ncellvars; i++) {
7504
int oldindex = sorted[i] - 1;
7505
if (oldindex == -1) {
7506
continue;
7507
}
7508
cfg_instr make_cell = {
7509
.i_opcode = MAKE_CELL,
7510
// This will get fixed in offset_derefs().
7511
.i_oparg = oldindex,
7512
.i_loc = NO_LOCATION,
7513
.i_target = NULL,
7514
};
7515
if (_PyBasicblock_InsertInstruction(entryblock, ncellsused, &make_cell) < 0) {
7516
PyMem_RawFree(sorted);
7517
return ERROR;
7518
}
7519
ncellsused += 1;
7520
}
7521
PyMem_RawFree(sorted);
7522
}
7523
7524
if (nfreevars) {
7525
cfg_instr copy_frees = {
7526
.i_opcode = COPY_FREE_VARS,
7527
.i_oparg = nfreevars,
7528
.i_loc = NO_LOCATION,
7529
.i_target = NULL,
7530
};
7531
RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, 0, &copy_frees));
7532
}
7533
7534
return SUCCESS;
7535
}
7536
7537
static int
7538
fix_cell_offsets(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock, int *fixedmap)
7539
{
7540
int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames);
7541
int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars);
7542
int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars);
7543
int noffsets = ncellvars + nfreevars;
7544
7545
// First deal with duplicates (arg cells).
7546
int numdropped = 0;
7547
for (int i = 0; i < noffsets ; i++) {
7548
if (fixedmap[i] == i + nlocals) {
7549
fixedmap[i] -= numdropped;
7550
}
7551
else {
7552
// It was a duplicate (cell/arg).
7553
numdropped += 1;
7554
}
7555
}
7556
7557
// Then update offsets, either relative to locals or by cell2arg.
7558
for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
7559
for (int i = 0; i < b->b_iused; i++) {
7560
cfg_instr *inst = &b->b_instr[i];
7561
// This is called before extended args are generated.
7562
assert(inst->i_opcode != EXTENDED_ARG);
7563
int oldoffset = inst->i_oparg;
7564
switch(inst->i_opcode) {
7565
case MAKE_CELL:
7566
case LOAD_CLOSURE:
7567
case LOAD_DEREF:
7568
case STORE_DEREF:
7569
case DELETE_DEREF:
7570
case LOAD_FROM_DICT_OR_DEREF:
7571
assert(oldoffset >= 0);
7572
assert(oldoffset < noffsets);
7573
assert(fixedmap[oldoffset] >= 0);
7574
inst->i_oparg = fixedmap[oldoffset];
7575
}
7576
}
7577
}
7578
7579
return numdropped;
7580
}
7581
7582
7583
static int
7584
prepare_localsplus(_PyCompile_CodeUnitMetadata *umd, cfg_builder *g, int code_flags)
7585
{
7586
assert(PyDict_GET_SIZE(umd->u_varnames) < INT_MAX);
7587
assert(PyDict_GET_SIZE(umd->u_cellvars) < INT_MAX);
7588
assert(PyDict_GET_SIZE(umd->u_freevars) < INT_MAX);
7589
int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames);
7590
int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars);
7591
int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars);
7592
assert(INT_MAX - nlocals - ncellvars > 0);
7593
assert(INT_MAX - nlocals - ncellvars - nfreevars > 0);
7594
int nlocalsplus = nlocals + ncellvars + nfreevars;
7595
int* cellfixedoffsets = build_cellfixedoffsets(umd);
7596
if (cellfixedoffsets == NULL) {
7597
return ERROR;
7598
}
7599
7600
7601
// This must be called before fix_cell_offsets().
7602
if (insert_prefix_instructions(umd, g->g_entryblock, cellfixedoffsets, nfreevars, code_flags)) {
7603
PyMem_Free(cellfixedoffsets);
7604
return ERROR;
7605
}
7606
7607
int numdropped = fix_cell_offsets(umd, g->g_entryblock, cellfixedoffsets);
7608
PyMem_Free(cellfixedoffsets); // At this point we're done with it.
7609
cellfixedoffsets = NULL;
7610
if (numdropped < 0) {
7611
return ERROR;
7612
}
7613
7614
nlocalsplus -= numdropped;
7615
return nlocalsplus;
7616
}
7617
7618
static int
7619
add_return_at_end(struct compiler *c, int addNone)
7620
{
7621
/* Make sure every instruction stream that falls off the end returns None.
7622
* This also ensures that no jump target offsets are out of bounds.
7623
*/
7624
if (addNone) {
7625
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
7626
}
7627
ADDOP(c, NO_LOCATION, RETURN_VALUE);
7628
return SUCCESS;
7629
}
7630
7631
static int cfg_to_instr_sequence(cfg_builder *g, instr_sequence *seq);
7632
7633
static PyCodeObject *
7634
optimize_and_assemble_code_unit(struct compiler_unit *u, PyObject *const_cache,
7635
int code_flags, PyObject *filename)
7636
{
7637
instr_sequence optimized_instrs;
7638
memset(&optimized_instrs, 0, sizeof(instr_sequence));
7639
7640
PyCodeObject *co = NULL;
7641
PyObject *consts = consts_dict_keys_inorder(u->u_metadata.u_consts);
7642
if (consts == NULL) {
7643
goto error;
7644
}
7645
cfg_builder g;
7646
if (instr_sequence_to_cfg(&u->u_instr_sequence, &g) < 0) {
7647
goto error;
7648
}
7649
int nparams = (int)PyList_GET_SIZE(u->u_ste->ste_varnames);
7650
int nlocals = (int)PyDict_GET_SIZE(u->u_metadata.u_varnames);
7651
assert(u->u_metadata.u_firstlineno);
7652
if (_PyCfg_OptimizeCodeUnit(&g, consts, const_cache, code_flags, nlocals,
7653
nparams, u->u_metadata.u_firstlineno) < 0) {
7654
goto error;
7655
}
7656
7657
/** Assembly **/
7658
int nlocalsplus = prepare_localsplus(&u->u_metadata, &g, code_flags);
7659
if (nlocalsplus < 0) {
7660
goto error;
7661
}
7662
7663
int maxdepth = _PyCfg_Stackdepth(g.g_entryblock, code_flags);
7664
if (maxdepth < 0) {
7665
goto error;
7666
}
7667
7668
_PyCfg_ConvertPseudoOps(g.g_entryblock);
7669
7670
/* Order of basic blocks must have been determined by now */
7671
7672
if (_PyCfg_ResolveJumps(&g) < 0) {
7673
goto error;
7674
}
7675
7676
/* Can't modify the bytecode after computing jump offsets. */
7677
7678
if (cfg_to_instr_sequence(&g, &optimized_instrs) < 0) {
7679
goto error;
7680
}
7681
7682
co = _PyAssemble_MakeCodeObject(&u->u_metadata, const_cache, consts,
7683
maxdepth, &optimized_instrs, nlocalsplus,
7684
code_flags, filename);
7685
7686
error:
7687
Py_XDECREF(consts);
7688
instr_sequence_fini(&optimized_instrs);
7689
_PyCfgBuilder_Fini(&g);
7690
return co;
7691
}
7692
7693
static PyCodeObject *
7694
optimize_and_assemble(struct compiler *c, int addNone)
7695
{
7696
struct compiler_unit *u = c->u;
7697
PyObject *const_cache = c->c_const_cache;
7698
PyObject *filename = c->c_filename;
7699
7700
int code_flags = compute_code_flags(c);
7701
if (code_flags < 0) {
7702
return NULL;
7703
}
7704
7705
if (add_return_at_end(c, addNone) < 0) {
7706
return NULL;
7707
}
7708
7709
return optimize_and_assemble_code_unit(u, const_cache, code_flags, filename);
7710
}
7711
7712
static int
7713
cfg_to_instr_sequence(cfg_builder *g, instr_sequence *seq)
7714
{
7715
int lbl = 0;
7716
for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
7717
b->b_label = (jump_target_label){lbl};
7718
lbl += b->b_iused;
7719
}
7720
for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
7721
RETURN_IF_ERROR(instr_sequence_use_label(seq, b->b_label.id));
7722
for (int i = 0; i < b->b_iused; i++) {
7723
cfg_instr *instr = &b->b_instr[i];
7724
if (OPCODE_HAS_JUMP(instr->i_opcode)) {
7725
instr->i_oparg = instr->i_target->b_label.id;
7726
}
7727
RETURN_IF_ERROR(
7728
instr_sequence_addop(seq, instr->i_opcode, instr->i_oparg, instr->i_loc));
7729
7730
_PyCompile_ExceptHandlerInfo *hi = &seq->s_instrs[seq->s_used-1].i_except_handler_info;
7731
if (instr->i_except != NULL) {
7732
hi->h_label = instr->i_except->b_label.id;
7733
hi->h_startdepth = instr->i_except->b_startdepth;
7734
hi->h_preserve_lasti = instr->i_except->b_preserve_lasti;
7735
}
7736
else {
7737
hi->h_label = -1;
7738
}
7739
}
7740
}
7741
return SUCCESS;
7742
}
7743
7744
7745
/* Access to compiler optimizations for unit tests.
7746
*
7747
* _PyCompile_CodeGen takes and AST, applies code-gen and
7748
* returns the unoptimized CFG as an instruction list.
7749
*
7750
* _PyCompile_OptimizeCfg takes an instruction list, constructs
7751
* a CFG, optimizes it and converts back to an instruction list.
7752
*
7753
* An instruction list is a PyList where each item is either
7754
* a tuple describing a single instruction:
7755
* (opcode, oparg, lineno, end_lineno, col, end_col), or
7756
* a jump target label marking the beginning of a basic block.
7757
*/
7758
7759
static int
7760
instructions_to_instr_sequence(PyObject *instructions, instr_sequence *seq)
7761
{
7762
assert(PyList_Check(instructions));
7763
7764
Py_ssize_t num_insts = PyList_GET_SIZE(instructions);
7765
bool *is_target = PyMem_Calloc(num_insts, sizeof(bool));
7766
if (is_target == NULL) {
7767
return ERROR;
7768
}
7769
for (Py_ssize_t i = 0; i < num_insts; i++) {
7770
PyObject *item = PyList_GET_ITEM(instructions, i);
7771
if (!PyTuple_Check(item) || PyTuple_GET_SIZE(item) != 6) {
7772
PyErr_SetString(PyExc_ValueError, "expected a 6-tuple");
7773
goto error;
7774
}
7775
int opcode = PyLong_AsLong(PyTuple_GET_ITEM(item, 0));
7776
if (PyErr_Occurred()) {
7777
goto error;
7778
}
7779
if (HAS_TARGET(opcode)) {
7780
int oparg = PyLong_AsLong(PyTuple_GET_ITEM(item, 1));
7781
if (PyErr_Occurred()) {
7782
goto error;
7783
}
7784
if (oparg < 0 || oparg >= num_insts) {
7785
PyErr_SetString(PyExc_ValueError, "label out of range");
7786
goto error;
7787
}
7788
is_target[oparg] = true;
7789
}
7790
}
7791
7792
for (int i = 0; i < num_insts; i++) {
7793
if (is_target[i]) {
7794
if (instr_sequence_use_label(seq, i) < 0) {
7795
goto error;
7796
}
7797
}
7798
PyObject *item = PyList_GET_ITEM(instructions, i);
7799
if (!PyTuple_Check(item) || PyTuple_GET_SIZE(item) != 6) {
7800
PyErr_SetString(PyExc_ValueError, "expected a 6-tuple");
7801
goto error;
7802
}
7803
int opcode = PyLong_AsLong(PyTuple_GET_ITEM(item, 0));
7804
if (PyErr_Occurred()) {
7805
goto error;
7806
}
7807
int oparg;
7808
if (OPCODE_HAS_ARG(opcode)) {
7809
oparg = PyLong_AsLong(PyTuple_GET_ITEM(item, 1));
7810
if (PyErr_Occurred()) {
7811
goto error;
7812
}
7813
}
7814
else {
7815
oparg = 0;
7816
}
7817
location loc;
7818
loc.lineno = PyLong_AsLong(PyTuple_GET_ITEM(item, 2));
7819
if (PyErr_Occurred()) {
7820
goto error;
7821
}
7822
loc.end_lineno = PyLong_AsLong(PyTuple_GET_ITEM(item, 3));
7823
if (PyErr_Occurred()) {
7824
goto error;
7825
}
7826
loc.col_offset = PyLong_AsLong(PyTuple_GET_ITEM(item, 4));
7827
if (PyErr_Occurred()) {
7828
goto error;
7829
}
7830
loc.end_col_offset = PyLong_AsLong(PyTuple_GET_ITEM(item, 5));
7831
if (PyErr_Occurred()) {
7832
goto error;
7833
}
7834
if (instr_sequence_addop(seq, opcode, oparg, loc) < 0) {
7835
goto error;
7836
}
7837
}
7838
PyMem_Free(is_target);
7839
return SUCCESS;
7840
error:
7841
PyMem_Free(is_target);
7842
return ERROR;
7843
}
7844
7845
static int
7846
instructions_to_cfg(PyObject *instructions, cfg_builder *g)
7847
{
7848
instr_sequence seq;
7849
memset(&seq, 0, sizeof(instr_sequence));
7850
7851
if (instructions_to_instr_sequence(instructions, &seq) < 0) {
7852
goto error;
7853
}
7854
if (instr_sequence_to_cfg(&seq, g) < 0) {
7855
goto error;
7856
}
7857
instr_sequence_fini(&seq);
7858
return SUCCESS;
7859
error:
7860
instr_sequence_fini(&seq);
7861
return ERROR;
7862
}
7863
7864
static PyObject *
7865
instr_sequence_to_instructions(instr_sequence *seq)
7866
{
7867
PyObject *instructions = PyList_New(0);
7868
if (instructions == NULL) {
7869
return NULL;
7870
}
7871
for (int i = 0; i < seq->s_used; i++) {
7872
instruction *instr = &seq->s_instrs[i];
7873
location loc = instr->i_loc;
7874
int arg = HAS_TARGET(instr->i_opcode) ?
7875
seq->s_labelmap[instr->i_oparg] : instr->i_oparg;
7876
7877
PyObject *inst_tuple = Py_BuildValue(
7878
"(iiiiii)", instr->i_opcode, arg,
7879
loc.lineno, loc.end_lineno,
7880
loc.col_offset, loc.end_col_offset);
7881
if (inst_tuple == NULL) {
7882
goto error;
7883
}
7884
7885
int res = PyList_Append(instructions, inst_tuple);
7886
Py_DECREF(inst_tuple);
7887
if (res != 0) {
7888
goto error;
7889
}
7890
}
7891
return instructions;
7892
error:
7893
Py_XDECREF(instructions);
7894
return NULL;
7895
}
7896
7897
static PyObject *
7898
cfg_to_instructions(cfg_builder *g)
7899
{
7900
PyObject *instructions = PyList_New(0);
7901
if (instructions == NULL) {
7902
return NULL;
7903
}
7904
int lbl = 0;
7905
for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
7906
b->b_label = (jump_target_label){lbl};
7907
lbl += b->b_iused;
7908
}
7909
for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
7910
for (int i = 0; i < b->b_iused; i++) {
7911
cfg_instr *instr = &b->b_instr[i];
7912
location loc = instr->i_loc;
7913
int arg = HAS_TARGET(instr->i_opcode) ?
7914
instr->i_target->b_label.id : instr->i_oparg;
7915
7916
PyObject *inst_tuple = Py_BuildValue(
7917
"(iiiiii)", instr->i_opcode, arg,
7918
loc.lineno, loc.end_lineno,
7919
loc.col_offset, loc.end_col_offset);
7920
if (inst_tuple == NULL) {
7921
goto error;
7922
}
7923
7924
if (PyList_Append(instructions, inst_tuple) != 0) {
7925
Py_DECREF(inst_tuple);
7926
goto error;
7927
}
7928
Py_DECREF(inst_tuple);
7929
}
7930
}
7931
7932
return instructions;
7933
error:
7934
Py_DECREF(instructions);
7935
return NULL;
7936
}
7937
7938
PyObject *
7939
_PyCompile_CodeGen(PyObject *ast, PyObject *filename, PyCompilerFlags *pflags,
7940
int optimize, int compile_mode)
7941
{
7942
PyObject *res = NULL;
7943
PyObject *metadata = NULL;
7944
7945
if (!PyAST_Check(ast)) {
7946
PyErr_SetString(PyExc_TypeError, "expected an AST");
7947
return NULL;
7948
}
7949
7950
PyArena *arena = _PyArena_New();
7951
if (arena == NULL) {
7952
return NULL;
7953
}
7954
7955
mod_ty mod = PyAST_obj2mod(ast, arena, compile_mode);
7956
if (mod == NULL || !_PyAST_Validate(mod)) {
7957
_PyArena_Free(arena);
7958
return NULL;
7959
}
7960
7961
struct compiler *c = new_compiler(mod, filename, pflags, optimize, arena);
7962
if (c == NULL) {
7963
_PyArena_Free(arena);
7964
return NULL;
7965
}
7966
7967
if (compiler_codegen(c, mod) < 0) {
7968
goto finally;
7969
}
7970
7971
_PyCompile_CodeUnitMetadata *umd = &c->u->u_metadata;
7972
metadata = PyDict_New();
7973
if (metadata == NULL) {
7974
goto finally;
7975
}
7976
#define SET_MATADATA_ITEM(key, value) \
7977
if (value != NULL) { \
7978
if (PyDict_SetItemString(metadata, key, value) < 0) goto finally; \
7979
}
7980
7981
SET_MATADATA_ITEM("name", umd->u_name);
7982
SET_MATADATA_ITEM("qualname", umd->u_qualname);
7983
SET_MATADATA_ITEM("consts", umd->u_consts);
7984
SET_MATADATA_ITEM("names", umd->u_names);
7985
SET_MATADATA_ITEM("varnames", umd->u_varnames);
7986
SET_MATADATA_ITEM("cellvars", umd->u_cellvars);
7987
SET_MATADATA_ITEM("freevars", umd->u_freevars);
7988
#undef SET_MATADATA_ITEM
7989
7990
#define SET_MATADATA_INT(key, value) do { \
7991
PyObject *v = PyLong_FromLong((long)value); \
7992
if (v == NULL) goto finally; \
7993
int res = PyDict_SetItemString(metadata, key, v); \
7994
Py_XDECREF(v); \
7995
if (res < 0) goto finally; \
7996
} while (0);
7997
7998
SET_MATADATA_INT("argcount", umd->u_argcount);
7999
SET_MATADATA_INT("posonlyargcount", umd->u_posonlyargcount);
8000
SET_MATADATA_INT("kwonlyargcount", umd->u_kwonlyargcount);
8001
#undef SET_MATADATA_INT
8002
8003
int addNone = mod->kind != Expression_kind;
8004
if (add_return_at_end(c, addNone) < 0) {
8005
goto finally;
8006
}
8007
8008
PyObject *insts = instr_sequence_to_instructions(INSTR_SEQUENCE(c));
8009
if (insts == NULL) {
8010
goto finally;
8011
}
8012
res = PyTuple_Pack(2, insts, metadata);
8013
Py_DECREF(insts);
8014
8015
finally:
8016
Py_XDECREF(metadata);
8017
compiler_exit_scope(c);
8018
compiler_free(c);
8019
_PyArena_Free(arena);
8020
return res;
8021
}
8022
8023
PyObject *
8024
_PyCompile_OptimizeCfg(PyObject *instructions, PyObject *consts, int nlocals)
8025
{
8026
PyObject *res = NULL;
8027
PyObject *const_cache = PyDict_New();
8028
if (const_cache == NULL) {
8029
return NULL;
8030
}
8031
8032
cfg_builder g;
8033
if (instructions_to_cfg(instructions, &g) < 0) {
8034
goto error;
8035
}
8036
int code_flags = 0, nparams = 0, firstlineno = 1;
8037
if (_PyCfg_OptimizeCodeUnit(&g, consts, const_cache, code_flags, nlocals,
8038
nparams, firstlineno) < 0) {
8039
goto error;
8040
}
8041
res = cfg_to_instructions(&g);
8042
error:
8043
Py_DECREF(const_cache);
8044
_PyCfgBuilder_Fini(&g);
8045
return res;
8046
}
8047
8048
int _PyCfg_JumpLabelsToTargets(basicblock *entryblock);
8049
8050
PyCodeObject *
8051
_PyCompile_Assemble(_PyCompile_CodeUnitMetadata *umd, PyObject *filename,
8052
PyObject *instructions)
8053
{
8054
PyCodeObject *co = NULL;
8055
instr_sequence optimized_instrs;
8056
memset(&optimized_instrs, 0, sizeof(instr_sequence));
8057
8058
PyObject *const_cache = PyDict_New();
8059
if (const_cache == NULL) {
8060
return NULL;
8061
}
8062
8063
cfg_builder g;
8064
if (instructions_to_cfg(instructions, &g) < 0) {
8065
goto error;
8066
}
8067
8068
if (_PyCfg_JumpLabelsToTargets(g.g_entryblock) < 0) {
8069
goto error;
8070
}
8071
8072
int code_flags = 0;
8073
int nlocalsplus = prepare_localsplus(umd, &g, code_flags);
8074
if (nlocalsplus < 0) {
8075
goto error;
8076
}
8077
8078
int maxdepth = _PyCfg_Stackdepth(g.g_entryblock, code_flags);
8079
if (maxdepth < 0) {
8080
goto error;
8081
}
8082
8083
_PyCfg_ConvertPseudoOps(g.g_entryblock);
8084
8085
/* Order of basic blocks must have been determined by now */
8086
8087
if (_PyCfg_ResolveJumps(&g) < 0) {
8088
goto error;
8089
}
8090
8091
/* Can't modify the bytecode after computing jump offsets. */
8092
8093
if (cfg_to_instr_sequence(&g, &optimized_instrs) < 0) {
8094
goto error;
8095
}
8096
8097
PyObject *consts = consts_dict_keys_inorder(umd->u_consts);
8098
if (consts == NULL) {
8099
goto error;
8100
}
8101
co = _PyAssemble_MakeCodeObject(umd, const_cache,
8102
consts, maxdepth, &optimized_instrs,
8103
nlocalsplus, code_flags, filename);
8104
Py_DECREF(consts);
8105
8106
error:
8107
Py_DECREF(const_cache);
8108
_PyCfgBuilder_Fini(&g);
8109
instr_sequence_fini(&optimized_instrs);
8110
return co;
8111
}
8112
8113
8114
/* Retained for API compatibility.
8115
* Optimization is now done in _PyCfg_OptimizeCodeUnit */
8116
8117
PyObject *
8118
PyCode_Optimize(PyObject *code, PyObject* Py_UNUSED(consts),
8119
PyObject *Py_UNUSED(names), PyObject *Py_UNUSED(lnotab_obj))
8120
{
8121
return Py_NewRef(code);
8122
}
8123
8124