Changeset 391 for python/trunk/Python/compile.c
- Timestamp:
- Mar 19, 2014, 11:31:01 PM (11 years ago)
- Location:
- python/trunk
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
python/trunk
-
Property svn:mergeinfo
set to
/python/vendor/Python-2.7.6 merged eligible /python/vendor/current merged eligible
-
Property svn:mergeinfo
set to
-
python/trunk/Python/compile.c
r2 r391 6 6 * object: 7 7 * 1. Checks for future statements. See future.c 8 * 2. Builds a symbol table. 8 * 2. Builds a symbol table. See symtable.c. 9 9 * 3. Generate code for basic blocks. See compiler_mod() in this file. 10 10 * 4. Assemble the basic blocks into final code. See assemble() in 11 * this file.11 * this file. 12 12 * 5. Optimize the byte code (peephole optimizations). See peephole.c 13 13 * … … 40 40 #define DEFAULT_LNOTAB_SIZE 16 41 41 42 #define COMP_GENEXP 0 43 #define COMP_SETCOMP 1 44 #define COMP_DICTCOMP 2 45 42 46 struct instr { 43 44 45 46 47 48 49 47 unsigned i_jabs : 1; 48 unsigned i_jrel : 1; 49 unsigned i_hasarg : 1; 50 unsigned char i_opcode; 51 int i_oparg; 52 struct basicblock_ *i_target; /* target block (if jump instruction) */ 53 int i_lineno; 50 54 }; 51 55 … … 54 58 reverse order that the block are allocated. b_list points to the next 55 59 block, not to be confused with b_next, which is next by control flow. */ 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 60 struct basicblock_ *b_list; 61 /* number of instructions used */ 62 int b_iused; 63 /* length of instruction array (b_instr) */ 64 int b_ialloc; 65 /* pointer to an array of instructions, initially NULL */ 66 struct instr *b_instr; 67 /* If b_next is non-NULL, it is a pointer to the next 68 block reached by normal control flow. */ 69 struct basicblock_ *b_next; 70 /* b_seen is used to perform a DFS of basicblocks. */ 71 unsigned b_seen : 1; 72 /* b_return is true if a RETURN_VALUE opcode is inserted. */ 73 unsigned b_return : 1; 74 /* depth of stack upon entry of block, computed by stackdepth() */ 75 int b_startdepth; 76 /* instruction offset for block, computed by assemble_jump_offsets() */ 77 int b_offset; 74 78 } basicblock; 75 79 … … 84 88 85 89 struct fblockinfo { 86 87 90 enum fblocktype fb_type; 91 basicblock *fb_block; 88 92 }; 89 93 … … 92 96 */ 93 97 struct compiler_unit { 94 PySTEntryObject *u_ste; 95 96 PyObject *u_name; 97 /* The following fields are dicts that map objects to 98 the index of them in co_XXX. The index is used as 99 the argument for opcodes that refer to those collections. 100 */ 101 PyObject *u_consts; /* all constants */ 102 PyObject *u_names; /* all names */ 103 PyObject *u_varnames; /* local variables */ 104 PyObject *u_cellvars; /* cell variables */ 105 PyObject *u_freevars; /* free variables */ 106 107 PyObject *u_private; /* for private name mangling */ 108 109 int u_argcount; /* number of arguments for block */ 110 /* Pointer to the most recently allocated block. By following b_list 111 members, you can reach all early allocated blocks. */ 112 basicblock *u_blocks; 113 basicblock *u_curblock; /* pointer to current block */ 114 int u_tmpname; /* temporary variables for list comps */ 115 116 int u_nfblocks; 117 struct fblockinfo u_fblock[CO_MAXBLOCKS]; 118 119 int u_firstlineno; /* the first lineno of the block */ 120 int u_lineno; /* the lineno for the current stmt */ 121 bool u_lineno_set; /* boolean to indicate whether instr 122 has been generated with current lineno */ 98 PySTEntryObject *u_ste; 99 100 PyObject *u_name; 101 /* The following fields are dicts that map objects to 102 the index of them in co_XXX. The index is used as 103 the argument for opcodes that refer to those collections. 104 */ 105 PyObject *u_consts; /* all constants */ 106 PyObject *u_names; /* all names */ 107 PyObject *u_varnames; /* local variables */ 108 PyObject *u_cellvars; /* cell variables */ 109 PyObject *u_freevars; /* free variables */ 110 111 PyObject *u_private; /* for private name mangling */ 112 113 int u_argcount; /* number of arguments for block */ 114 /* Pointer to the most recently allocated block. By following b_list 115 members, you can reach all early allocated blocks. */ 116 basicblock *u_blocks; 117 basicblock *u_curblock; /* pointer to current block */ 118 119 int u_nfblocks; 120 struct fblockinfo u_fblock[CO_MAXBLOCKS]; 121 122 int u_firstlineno; /* the first lineno of the block */ 123 int u_lineno; /* the lineno for the current stmt */ 124 bool u_lineno_set; /* boolean to indicate whether instr 125 has been generated with current lineno */ 123 126 }; 124 127 125 /* This struct captures the global state of a compilation. 128 /* This struct captures the global state of a compilation. 126 129 127 130 The u pointer points to the current compilation unit, while units 128 for enclosing blocks are stored in c_stack. 131 for enclosing blocks are stored in c_stack. The u and c_stack are 129 132 managed by compiler_enter_scope() and compiler_exit_scope(). 130 133 */ 131 134 132 135 struct compiler { 133 const char *c_filename; 134 struct symtable *c_st; 135 PyFutureFeatures *c_future; /* pointer to module's __future__ */ 136 PyCompilerFlags *c_flags; 137 138 int c_interactive; /* true if in interactive mode */ 139 int c_nestlevel; 140 141 struct compiler_unit *u; /* compiler state for current block */ 142 PyObject *c_stack; /* Python list holding compiler_unit ptrs */ 143 char *c_encoding; /* source encoding (a borrowed reference) */ 144 PyArena *c_arena; /* pointer to memory allocation arena */ 136 const char *c_filename; 137 struct symtable *c_st; 138 PyFutureFeatures *c_future; /* pointer to module's __future__ */ 139 PyCompilerFlags *c_flags; 140 141 int c_interactive; /* true if in interactive mode */ 142 int c_nestlevel; 143 144 struct compiler_unit *u; /* compiler state for current block */ 145 PyObject *c_stack; /* Python list holding compiler_unit ptrs */ 146 PyArena *c_arena; /* pointer to memory allocation arena */ 145 147 }; 146 148 … … 163 165 static int compiler_augassign(struct compiler *, stmt_ty); 164 166 static int compiler_visit_slice(struct compiler *, slice_ty, 165 167 expr_context_ty); 166 168 167 169 static int compiler_push_fblock(struct compiler *, enum fblocktype, 168 170 basicblock *); 169 171 static void compiler_pop_fblock(struct compiler *, enum fblocktype, 170 172 basicblock *); 171 173 /* Returns true if there is a loop on the fblock stack. */ 172 174 static int compiler_in_loop(struct compiler *); … … 180 182 static PyObject *__doc__; 181 183 184 #define COMPILER_CAPSULE_NAME_COMPILER_UNIT "compile.c compiler unit" 185 182 186 PyObject * 183 187 _Py_Mangle(PyObject *privateobj, PyObject *ident) 184 188 { 185 /* Name mangling: __private becomes _classname__private. 186 This is independent from how the name is used. */ 187 const char *p, *name = PyString_AsString(ident); 188 char *buffer; 189 size_t nlen, plen; 190 if (privateobj == NULL || !PyString_Check(privateobj) || 191 name == NULL || name[0] != '_' || name[1] != '_') { 192 Py_INCREF(ident); 193 return ident; 194 } 195 p = PyString_AsString(privateobj); 196 nlen = strlen(name); 197 /* Don't mangle __id__ or names with dots. 198 199 The only time a name with a dot can occur is when 200 we are compiling an import statement that has a 201 package name. 202 203 TODO(jhylton): Decide whether we want to support 204 mangling of the module name, e.g. __M.X. 205 */ 206 if ((name[nlen-1] == '_' && name[nlen-2] == '_') 207 || strchr(name, '.')) { 208 Py_INCREF(ident); 209 return ident; /* Don't mangle __whatever__ */ 210 } 211 /* Strip leading underscores from class name */ 212 while (*p == '_') 213 p++; 214 if (*p == '\0') { 215 Py_INCREF(ident); 216 return ident; /* Don't mangle if class is just underscores */ 217 } 218 plen = strlen(p); 219 220 assert(1 <= PY_SSIZE_T_MAX - nlen); 221 assert(1 + nlen <= PY_SSIZE_T_MAX - plen); 222 223 ident = PyString_FromStringAndSize(NULL, 1 + nlen + plen); 224 if (!ident) 225 return 0; 226 /* ident = "_" + p[:plen] + name # i.e. 1+plen+nlen bytes */ 227 buffer = PyString_AS_STRING(ident); 228 buffer[0] = '_'; 229 strncpy(buffer+1, p, plen); 230 strcpy(buffer+1+plen, name); 231 return ident; 189 /* Name mangling: __private becomes _classname__private. 190 This is independent from how the name is used. */ 191 const char *p, *name = PyString_AsString(ident); 192 char *buffer; 193 size_t nlen, plen; 194 if (privateobj == NULL || !PyString_Check(privateobj) || 195 name == NULL || name[0] != '_' || name[1] != '_') { 196 Py_INCREF(ident); 197 return ident; 198 } 199 p = PyString_AsString(privateobj); 200 nlen = strlen(name); 201 /* Don't mangle __id__ or names with dots. 202 203 The only time a name with a dot can occur is when 204 we are compiling an import statement that has a 205 package name. 206 207 TODO(jhylton): Decide whether we want to support 208 mangling of the module name, e.g. __M.X. 209 */ 210 if ((name[nlen-1] == '_' && name[nlen-2] == '_') 211 || strchr(name, '.')) { 212 Py_INCREF(ident); 213 return ident; /* Don't mangle __whatever__ */ 214 } 215 /* Strip leading underscores from class name */ 216 while (*p == '_') 217 p++; 218 if (*p == '\0') { 219 Py_INCREF(ident); 220 return ident; /* Don't mangle if class is just underscores */ 221 } 222 plen = strlen(p); 223 224 if (plen + nlen >= PY_SSIZE_T_MAX - 1) { 225 PyErr_SetString(PyExc_OverflowError, 226 "private identifier too large to be mangled"); 227 return NULL; 228 } 229 230 ident = PyString_FromStringAndSize(NULL, 1 + nlen + plen); 231 if (!ident) 232 return 0; 233 /* ident = "_" + p[:plen] + name # i.e. 1+plen+nlen bytes */ 234 buffer = PyString_AS_STRING(ident); 235 buffer[0] = '_'; 236 strncpy(buffer+1, p, plen); 237 strcpy(buffer+1+plen, name); 238 return ident; 232 239 } 233 240 … … 235 242 compiler_init(struct compiler *c) 236 243 { 237 238 239 240 241 242 243 244 memset(c, 0, sizeof(struct compiler)); 245 246 c->c_stack = PyList_New(0); 247 if (!c->c_stack) 248 return 0; 249 250 return 1; 244 251 } 245 252 246 253 PyCodeObject * 247 254 PyAST_Compile(mod_ty mod, const char *filename, PyCompilerFlags *flags, 248 PyArena *arena) 249 { 250 struct compiler c; 251 PyCodeObject *co = NULL; 252 PyCompilerFlags local_flags; 253 int merged; 254 255 if (!__doc__) { 256 __doc__ = PyString_InternFromString("__doc__"); 257 if (!__doc__) 258 return NULL; 259 } 260 261 if (!compiler_init(&c)) 262 return NULL; 263 c.c_filename = filename; 264 c.c_arena = arena; 265 c.c_future = PyFuture_FromAST(mod, filename); 266 if (c.c_future == NULL) 267 goto finally; 268 if (!flags) { 269 local_flags.cf_flags = 0; 270 flags = &local_flags; 271 } 272 merged = c.c_future->ff_features | flags->cf_flags; 273 c.c_future->ff_features = merged; 274 flags->cf_flags = merged; 275 c.c_flags = flags; 276 c.c_nestlevel = 0; 277 278 c.c_st = PySymtable_Build(mod, filename, c.c_future); 279 if (c.c_st == NULL) { 280 if (!PyErr_Occurred()) 281 PyErr_SetString(PyExc_SystemError, "no symtable"); 282 goto finally; 283 } 284 285 /* XXX initialize to NULL for now, need to handle */ 286 c.c_encoding = NULL; 287 288 co = compiler_mod(&c, mod); 255 PyArena *arena) 256 { 257 struct compiler c; 258 PyCodeObject *co = NULL; 259 PyCompilerFlags local_flags; 260 int merged; 261 262 if (!__doc__) { 263 __doc__ = PyString_InternFromString("__doc__"); 264 if (!__doc__) 265 return NULL; 266 } 267 268 if (!compiler_init(&c)) 269 return NULL; 270 c.c_filename = filename; 271 c.c_arena = arena; 272 c.c_future = PyFuture_FromAST(mod, filename); 273 if (c.c_future == NULL) 274 goto finally; 275 if (!flags) { 276 local_flags.cf_flags = 0; 277 flags = &local_flags; 278 } 279 merged = c.c_future->ff_features | flags->cf_flags; 280 c.c_future->ff_features = merged; 281 flags->cf_flags = merged; 282 c.c_flags = flags; 283 c.c_nestlevel = 0; 284 285 c.c_st = PySymtable_Build(mod, filename, c.c_future); 286 if (c.c_st == NULL) { 287 if (!PyErr_Occurred()) 288 PyErr_SetString(PyExc_SystemError, "no symtable"); 289 goto finally; 290 } 291 292 co = compiler_mod(&c, mod); 289 293 290 294 finally: 291 292 293 295 compiler_free(&c); 296 assert(co || PyErr_Occurred()); 297 return co; 294 298 } 295 299 … … 297 301 PyNode_Compile(struct _node *n, const char *filename) 298 302 { 299 300 301 302 303 304 305 306 307 308 303 PyCodeObject *co = NULL; 304 mod_ty mod; 305 PyArena *arena = PyArena_New(); 306 if (!arena) 307 return NULL; 308 mod = PyAST_FromNode(n, NULL, filename, arena); 309 if (mod) 310 co = PyAST_Compile(mod, filename, NULL, arena); 311 PyArena_Free(arena); 312 return co; 309 313 } 310 314 … … 312 316 compiler_free(struct compiler *c) 313 317 { 314 315 316 317 318 318 if (c->c_st) 319 PySymtable_Free(c->c_st); 320 if (c->c_future) 321 PyObject_Free(c->c_future); 322 Py_DECREF(c->c_stack); 319 323 } 320 324 … … 322 326 list2dict(PyObject *list) 323 327 { 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 328 Py_ssize_t i, n; 329 PyObject *v, *k; 330 PyObject *dict = PyDict_New(); 331 if (!dict) return NULL; 332 333 n = PyList_Size(list); 334 for (i = 0; i < n; i++) { 335 v = PyInt_FromLong(i); 336 if (!v) { 337 Py_DECREF(dict); 338 return NULL; 339 } 340 k = PyList_GET_ITEM(list, i); 341 k = PyTuple_Pack(2, k, k->ob_type); 342 if (k == NULL || PyDict_SetItem(dict, k, v) < 0) { 343 Py_XDECREF(k); 344 Py_DECREF(v); 345 Py_DECREF(dict); 346 return NULL; 347 } 348 Py_DECREF(k); 349 Py_DECREF(v); 350 } 351 return dict; 348 352 } 349 353 … … 351 355 352 356 src is a symbol table dictionary. If the scope of a name matches 353 either scope_type or flag is set, insert it into the new dict. 357 either scope_type or flag is set, insert it into the new dict. The 354 358 values are integers, starting at offset and increasing by one for 355 359 each key. … … 359 363 dictbytype(PyObject *src, int scope_type, int flag, int offset) 360 364 { 361 Py_ssize_t pos = 0, i = offset, scope; 362 PyObject *k, *v, *dest = PyDict_New(); 363 364 assert(offset >= 0); 365 if (dest == NULL) 366 return NULL; 367 368 while (PyDict_Next(src, &pos, &k, &v)) { 369 /* XXX this should probably be a macro in symtable.h */ 370 assert(PyInt_Check(v)); 371 scope = (PyInt_AS_LONG(v) >> SCOPE_OFF) & SCOPE_MASK; 372 373 if (scope == scope_type || PyInt_AS_LONG(v) & flag) { 374 PyObject *tuple, *item = PyInt_FromLong(i); 375 if (item == NULL) { 376 Py_DECREF(dest); 377 return NULL; 378 } 379 i++; 380 tuple = PyTuple_Pack(2, k, k->ob_type); 381 if (!tuple || PyDict_SetItem(dest, tuple, item) < 0) { 382 Py_DECREF(item); 383 Py_DECREF(dest); 384 Py_XDECREF(tuple); 385 return NULL; 386 } 387 Py_DECREF(item); 388 Py_DECREF(tuple); 389 } 390 } 391 return dest; 365 Py_ssize_t i = offset, scope, num_keys, key_i; 366 PyObject *k, *v, *dest = PyDict_New(); 367 PyObject *sorted_keys; 368 369 assert(offset >= 0); 370 if (dest == NULL) 371 return NULL; 372 373 /* Sort the keys so that we have a deterministic order on the indexes 374 saved in the returned dictionary. These indexes are used as indexes 375 into the free and cell var storage. Therefore if they aren't 376 deterministic, then the generated bytecode is not deterministic. 377 */ 378 sorted_keys = PyDict_Keys(src); 379 if (sorted_keys == NULL) 380 return NULL; 381 if (PyList_Sort(sorted_keys) != 0) { 382 Py_DECREF(sorted_keys); 383 return NULL; 384 } 385 num_keys = PyList_GET_SIZE(sorted_keys); 386 387 for (key_i = 0; key_i < num_keys; key_i++) { 388 k = PyList_GET_ITEM(sorted_keys, key_i); 389 v = PyDict_GetItem(src, k); 390 /* XXX this should probably be a macro in symtable.h */ 391 assert(PyInt_Check(v)); 392 scope = (PyInt_AS_LONG(v) >> SCOPE_OFF) & SCOPE_MASK; 393 394 if (scope == scope_type || PyInt_AS_LONG(v) & flag) { 395 PyObject *tuple, *item = PyInt_FromLong(i); 396 if (item == NULL) { 397 Py_DECREF(sorted_keys); 398 Py_DECREF(dest); 399 return NULL; 400 } 401 i++; 402 tuple = PyTuple_Pack(2, k, k->ob_type); 403 if (!tuple || PyDict_SetItem(dest, tuple, item) < 0) { 404 Py_DECREF(sorted_keys); 405 Py_DECREF(item); 406 Py_DECREF(dest); 407 Py_XDECREF(tuple); 408 return NULL; 409 } 410 Py_DECREF(item); 411 Py_DECREF(tuple); 412 } 413 } 414 Py_DECREF(sorted_keys); 415 return dest; 392 416 } 393 417 … … 395 419 compiler_unit_check(struct compiler_unit *u) 396 420 { 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 421 basicblock *block; 422 for (block = u->u_blocks; block != NULL; block = block->b_list) { 423 assert((void *)block != (void *)0xcbcbcbcb); 424 assert((void *)block != (void *)0xfbfbfbfb); 425 assert((void *)block != (void *)0xdbdbdbdb); 426 if (block->b_instr != NULL) { 427 assert(block->b_ialloc > 0); 428 assert(block->b_iused > 0); 429 assert(block->b_ialloc >= block->b_iused); 430 } 431 else { 432 assert (block->b_iused == 0); 433 assert (block->b_ialloc == 0); 434 } 435 } 412 436 } 413 437 … … 415 439 compiler_unit_free(struct compiler_unit *u) 416 440 { 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 441 basicblock *b, *next; 442 443 compiler_unit_check(u); 444 b = u->u_blocks; 445 while (b != NULL) { 446 if (b->b_instr) 447 PyObject_Free((void *)b->b_instr); 448 next = b->b_list; 449 PyObject_Free((void *)b); 450 b = next; 451 } 452 Py_CLEAR(u->u_ste); 453 Py_CLEAR(u->u_name); 454 Py_CLEAR(u->u_consts); 455 Py_CLEAR(u->u_names); 456 Py_CLEAR(u->u_varnames); 457 Py_CLEAR(u->u_freevars); 458 Py_CLEAR(u->u_cellvars); 459 Py_CLEAR(u->u_private); 460 PyObject_Free(u); 437 461 } 438 462 439 463 static int 440 464 compiler_enter_scope(struct compiler *c, identifier name, void *key, 441 int lineno) 442 { 443 struct compiler_unit *u; 444 445 u = (struct compiler_unit *)PyObject_Malloc(sizeof( 446 struct compiler_unit)); 447 if (!u) { 448 PyErr_NoMemory(); 449 return 0; 450 } 451 memset(u, 0, sizeof(struct compiler_unit)); 452 u->u_argcount = 0; 453 u->u_ste = PySymtable_Lookup(c->c_st, key); 454 if (!u->u_ste) { 455 compiler_unit_free(u); 456 return 0; 457 } 458 Py_INCREF(name); 459 u->u_name = name; 460 u->u_varnames = list2dict(u->u_ste->ste_varnames); 461 u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0); 462 if (!u->u_varnames || !u->u_cellvars) { 463 compiler_unit_free(u); 464 return 0; 465 } 466 467 u->u_freevars = dictbytype(u->u_ste->ste_symbols, FREE, DEF_FREE_CLASS, 468 PyDict_Size(u->u_cellvars)); 469 if (!u->u_freevars) { 470 compiler_unit_free(u); 471 return 0; 472 } 473 474 u->u_blocks = NULL; 475 u->u_tmpname = 0; 476 u->u_nfblocks = 0; 477 u->u_firstlineno = lineno; 478 u->u_lineno = 0; 479 u->u_lineno_set = false; 480 u->u_consts = PyDict_New(); 481 if (!u->u_consts) { 482 compiler_unit_free(u); 483 return 0; 484 } 485 u->u_names = PyDict_New(); 486 if (!u->u_names) { 487 compiler_unit_free(u); 488 return 0; 489 } 490 491 u->u_private = NULL; 492 493 /* Push the old compiler_unit on the stack. */ 494 if (c->u) { 495 PyObject *wrapper = PyCObject_FromVoidPtr(c->u, NULL); 496 if (!wrapper || PyList_Append(c->c_stack, wrapper) < 0) { 497 Py_XDECREF(wrapper); 498 compiler_unit_free(u); 499 return 0; 500 } 501 Py_DECREF(wrapper); 502 u->u_private = c->u->u_private; 503 Py_XINCREF(u->u_private); 504 } 505 c->u = u; 506 507 c->c_nestlevel++; 508 if (compiler_use_new_block(c) == NULL) 509 return 0; 510 511 return 1; 465 int lineno) 466 { 467 struct compiler_unit *u; 468 469 u = (struct compiler_unit *)PyObject_Malloc(sizeof( 470 struct compiler_unit)); 471 if (!u) { 472 PyErr_NoMemory(); 473 return 0; 474 } 475 memset(u, 0, sizeof(struct compiler_unit)); 476 u->u_argcount = 0; 477 u->u_ste = PySymtable_Lookup(c->c_st, key); 478 if (!u->u_ste) { 479 compiler_unit_free(u); 480 return 0; 481 } 482 Py_INCREF(name); 483 u->u_name = name; 484 u->u_varnames = list2dict(u->u_ste->ste_varnames); 485 u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0); 486 if (!u->u_varnames || !u->u_cellvars) { 487 compiler_unit_free(u); 488 return 0; 489 } 490 491 u->u_freevars = dictbytype(u->u_ste->ste_symbols, FREE, DEF_FREE_CLASS, 492 PyDict_Size(u->u_cellvars)); 493 if (!u->u_freevars) { 494 compiler_unit_free(u); 495 return 0; 496 } 497 498 u->u_blocks = NULL; 499 u->u_nfblocks = 0; 500 u->u_firstlineno = lineno; 501 u->u_lineno = 0; 502 u->u_lineno_set = false; 503 u->u_consts = PyDict_New(); 504 if (!u->u_consts) { 505 compiler_unit_free(u); 506 return 0; 507 } 508 u->u_names = PyDict_New(); 509 if (!u->u_names) { 510 compiler_unit_free(u); 511 return 0; 512 } 513 514 u->u_private = NULL; 515 516 /* Push the old compiler_unit on the stack. */ 517 if (c->u) { 518 PyObject *capsule = PyCapsule_New(c->u, COMPILER_CAPSULE_NAME_COMPILER_UNIT, NULL); 519 if (!capsule || PyList_Append(c->c_stack, capsule) < 0) { 520 Py_XDECREF(capsule); 521 compiler_unit_free(u); 522 return 0; 523 } 524 Py_DECREF(capsule); 525 u->u_private = c->u->u_private; 526 Py_XINCREF(u->u_private); 527 } 528 c->u = u; 529 530 c->c_nestlevel++; 531 if (compiler_use_new_block(c) == NULL) 532 return 0; 533 534 return 1; 512 535 } 513 536 … … 515 538 compiler_exit_scope(struct compiler *c) 516 539 { 517 int n; 518 PyObject *wrapper; 519 520 c->c_nestlevel--; 521 compiler_unit_free(c->u); 522 /* Restore c->u to the parent unit. */ 523 n = PyList_GET_SIZE(c->c_stack) - 1; 524 if (n >= 0) { 525 wrapper = PyList_GET_ITEM(c->c_stack, n); 526 c->u = (struct compiler_unit *)PyCObject_AsVoidPtr(wrapper); 527 assert(c->u); 528 /* we are deleting from a list so this really shouldn't fail */ 529 if (PySequence_DelItem(c->c_stack, n) < 0) 530 Py_FatalError("compiler_exit_scope()"); 531 compiler_unit_check(c->u); 532 } 533 else 534 c->u = NULL; 535 536 } 537 538 /* Allocate a new "anonymous" local variable. 539 Used by list comprehensions and with statements. 540 */ 541 542 static PyObject * 543 compiler_new_tmpname(struct compiler *c) 544 { 545 char tmpname[256]; 546 PyOS_snprintf(tmpname, sizeof(tmpname), "_[%d]", ++c->u->u_tmpname); 547 return PyString_FromString(tmpname); 540 int n; 541 PyObject *capsule; 542 543 c->c_nestlevel--; 544 compiler_unit_free(c->u); 545 /* Restore c->u to the parent unit. */ 546 n = PyList_GET_SIZE(c->c_stack) - 1; 547 if (n >= 0) { 548 capsule = PyList_GET_ITEM(c->c_stack, n); 549 c->u = (struct compiler_unit *)PyCapsule_GetPointer(capsule, COMPILER_CAPSULE_NAME_COMPILER_UNIT); 550 assert(c->u); 551 /* we are deleting from a list so this really shouldn't fail */ 552 if (PySequence_DelItem(c->c_stack, n) < 0) 553 Py_FatalError("compiler_exit_scope()"); 554 compiler_unit_check(c->u); 555 } 556 else 557 c->u = NULL; 558 548 559 } 549 560 … … 555 566 compiler_new_block(struct compiler *c) 556 567 { 557 558 559 560 561 562 563 564 565 566 567 568 569 570 568 basicblock *b; 569 struct compiler_unit *u; 570 571 u = c->u; 572 b = (basicblock *)PyObject_Malloc(sizeof(basicblock)); 573 if (b == NULL) { 574 PyErr_NoMemory(); 575 return NULL; 576 } 577 memset((void *)b, 0, sizeof(basicblock)); 578 /* Extend the singly linked list of blocks with new block. */ 579 b->b_list = u->u_blocks; 580 u->u_blocks = b; 581 return b; 571 582 } 572 583 … … 574 585 compiler_use_new_block(struct compiler *c) 575 586 { 576 577 578 579 580 587 basicblock *block = compiler_new_block(c); 588 if (block == NULL) 589 return NULL; 590 c->u->u_curblock = block; 591 return block; 581 592 } 582 593 … … 584 595 compiler_next_block(struct compiler *c) 585 596 { 586 587 588 589 590 591 597 basicblock *block = compiler_new_block(c); 598 if (block == NULL) 599 return NULL; 600 c->u->u_curblock->b_next = block; 601 c->u->u_curblock = block; 602 return block; 592 603 } 593 604 … … 595 606 compiler_use_next_block(struct compiler *c, basicblock *block) 596 607 { 597 598 599 600 608 assert(block != NULL); 609 c->u->u_curblock->b_next = block; 610 c->u->u_curblock = block; 611 return block; 601 612 } 602 613 … … 609 620 compiler_next_instr(struct compiler *c, basicblock *b) 610 621 { 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 622 assert(b != NULL); 623 if (b->b_instr == NULL) { 624 b->b_instr = (struct instr *)PyObject_Malloc( 625 sizeof(struct instr) * DEFAULT_BLOCK_SIZE); 626 if (b->b_instr == NULL) { 627 PyErr_NoMemory(); 628 return -1; 629 } 630 b->b_ialloc = DEFAULT_BLOCK_SIZE; 631 memset((char *)b->b_instr, 0, 632 sizeof(struct instr) * DEFAULT_BLOCK_SIZE); 633 } 634 else if (b->b_iused == b->b_ialloc) { 635 struct instr *tmp; 636 size_t oldsize, newsize; 637 oldsize = b->b_ialloc * sizeof(struct instr); 638 newsize = oldsize << 1; 639 640 if (oldsize > (PY_SIZE_MAX >> 1)) { 641 PyErr_NoMemory(); 642 return -1; 643 } 644 645 if (newsize == 0) { 646 PyErr_NoMemory(); 647 return -1; 648 } 649 b->b_ialloc <<= 1; 650 tmp = (struct instr *)PyObject_Realloc( 651 (void *)b->b_instr, newsize); 652 if (tmp == NULL) { 653 PyErr_NoMemory(); 654 return -1; 655 } 656 b->b_instr = tmp; 657 memset((char *)b->b_instr + oldsize, 0, newsize - oldsize); 658 } 659 return b->b_iused++; 649 660 } 650 661 … … 664 675 compiler_set_lineno(struct compiler *c, int off) 665 676 { 666 667 668 669 670 671 677 basicblock *b; 678 if (c->u->u_lineno_set) 679 return; 680 c->u->u_lineno_set = true; 681 b = c->u->u_curblock; 682 b->b_instr[off].i_lineno = c->u->u_lineno; 672 683 } 673 684 … … 675 686 opcode_stack_effect(int opcode, int oparg) 676 687 { 677 switch (opcode) { 678 case POP_TOP: 679 return -1; 680 case ROT_TWO: 681 case ROT_THREE: 682 return 0; 683 case DUP_TOP: 684 return 1; 685 case ROT_FOUR: 686 return 0; 687 688 case UNARY_POSITIVE: 689 case UNARY_NEGATIVE: 690 case UNARY_NOT: 691 case UNARY_CONVERT: 692 case UNARY_INVERT: 693 return 0; 694 695 case LIST_APPEND: 696 return -2; 697 698 case BINARY_POWER: 699 case BINARY_MULTIPLY: 700 case BINARY_DIVIDE: 701 case BINARY_MODULO: 702 case BINARY_ADD: 703 case BINARY_SUBTRACT: 704 case BINARY_SUBSCR: 705 case BINARY_FLOOR_DIVIDE: 706 case BINARY_TRUE_DIVIDE: 707 return -1; 708 case INPLACE_FLOOR_DIVIDE: 709 case INPLACE_TRUE_DIVIDE: 710 return -1; 711 712 case SLICE+0: 713 return 1; 714 case SLICE+1: 715 return 0; 716 case SLICE+2: 717 return 0; 718 case SLICE+3: 719 return -1; 720 721 case STORE_SLICE+0: 722 return -2; 723 case STORE_SLICE+1: 724 return -3; 725 case STORE_SLICE+2: 726 return -3; 727 case STORE_SLICE+3: 728 return -4; 729 730 case DELETE_SLICE+0: 731 return -1; 732 case DELETE_SLICE+1: 733 return -2; 734 case DELETE_SLICE+2: 735 return -2; 736 case DELETE_SLICE+3: 737 return -3; 738 739 case INPLACE_ADD: 740 case INPLACE_SUBTRACT: 741 case INPLACE_MULTIPLY: 742 case INPLACE_DIVIDE: 743 case INPLACE_MODULO: 744 return -1; 745 case STORE_SUBSCR: 746 return -3; 747 case STORE_MAP: 748 return -2; 749 case DELETE_SUBSCR: 750 return -2; 751 752 case BINARY_LSHIFT: 753 case BINARY_RSHIFT: 754 case BINARY_AND: 755 case BINARY_XOR: 756 case BINARY_OR: 757 return -1; 758 case INPLACE_POWER: 759 return -1; 760 case GET_ITER: 761 return 0; 762 763 case PRINT_EXPR: 764 return -1; 765 case PRINT_ITEM: 766 return -1; 767 case PRINT_NEWLINE: 768 return 0; 769 case PRINT_ITEM_TO: 770 return -2; 771 case PRINT_NEWLINE_TO: 772 return -1; 773 case INPLACE_LSHIFT: 774 case INPLACE_RSHIFT: 775 case INPLACE_AND: 776 case INPLACE_XOR: 777 case INPLACE_OR: 778 return -1; 779 case BREAK_LOOP: 780 return 0; 781 case WITH_CLEANUP: 782 return -1; /* XXX Sometimes more */ 783 case LOAD_LOCALS: 784 return 1; 785 case RETURN_VALUE: 786 return -1; 787 case IMPORT_STAR: 788 return -1; 789 case EXEC_STMT: 790 return -3; 791 case YIELD_VALUE: 792 return 0; 793 794 case POP_BLOCK: 795 return 0; 796 case END_FINALLY: 797 return -1; /* or -2 or -3 if exception occurred */ 798 case BUILD_CLASS: 799 return -2; 800 801 case STORE_NAME: 802 return -1; 803 case DELETE_NAME: 804 return 0; 805 case UNPACK_SEQUENCE: 806 return oparg-1; 807 case FOR_ITER: 808 return 1; 809 810 case STORE_ATTR: 811 return -2; 812 case DELETE_ATTR: 813 return -1; 814 case STORE_GLOBAL: 815 return -1; 816 case DELETE_GLOBAL: 817 return 0; 818 case DUP_TOPX: 819 return oparg; 820 case LOAD_CONST: 821 return 1; 822 case LOAD_NAME: 823 return 1; 824 case BUILD_TUPLE: 825 case BUILD_LIST: 826 return 1-oparg; 827 case BUILD_MAP: 828 return 1; 829 case LOAD_ATTR: 830 return 0; 831 case COMPARE_OP: 832 return -1; 833 case IMPORT_NAME: 834 return 0; 835 case IMPORT_FROM: 836 return 1; 837 838 case JUMP_FORWARD: 839 case JUMP_IF_FALSE: 840 case JUMP_IF_TRUE: 841 case JUMP_ABSOLUTE: 842 return 0; 843 844 case LOAD_GLOBAL: 845 return 1; 846 847 case CONTINUE_LOOP: 848 return 0; 849 case SETUP_LOOP: 850 return 0; 851 case SETUP_EXCEPT: 852 case SETUP_FINALLY: 853 return 3; /* actually pushed by an exception */ 854 855 case LOAD_FAST: 856 return 1; 857 case STORE_FAST: 858 return -1; 859 case DELETE_FAST: 860 return 0; 861 862 case RAISE_VARARGS: 863 return -oparg; 688 switch (opcode) { 689 case POP_TOP: 690 return -1; 691 case ROT_TWO: 692 case ROT_THREE: 693 return 0; 694 case DUP_TOP: 695 return 1; 696 case ROT_FOUR: 697 return 0; 698 699 case UNARY_POSITIVE: 700 case UNARY_NEGATIVE: 701 case UNARY_NOT: 702 case UNARY_CONVERT: 703 case UNARY_INVERT: 704 return 0; 705 706 case SET_ADD: 707 case LIST_APPEND: 708 return -1; 709 710 case MAP_ADD: 711 return -2; 712 713 case BINARY_POWER: 714 case BINARY_MULTIPLY: 715 case BINARY_DIVIDE: 716 case BINARY_MODULO: 717 case BINARY_ADD: 718 case BINARY_SUBTRACT: 719 case BINARY_SUBSCR: 720 case BINARY_FLOOR_DIVIDE: 721 case BINARY_TRUE_DIVIDE: 722 return -1; 723 case INPLACE_FLOOR_DIVIDE: 724 case INPLACE_TRUE_DIVIDE: 725 return -1; 726 727 case SLICE+0: 728 return 0; 729 case SLICE+1: 730 return -1; 731 case SLICE+2: 732 return -1; 733 case SLICE+3: 734 return -2; 735 736 case STORE_SLICE+0: 737 return -2; 738 case STORE_SLICE+1: 739 return -3; 740 case STORE_SLICE+2: 741 return -3; 742 case STORE_SLICE+3: 743 return -4; 744 745 case DELETE_SLICE+0: 746 return -1; 747 case DELETE_SLICE+1: 748 return -2; 749 case DELETE_SLICE+2: 750 return -2; 751 case DELETE_SLICE+3: 752 return -3; 753 754 case INPLACE_ADD: 755 case INPLACE_SUBTRACT: 756 case INPLACE_MULTIPLY: 757 case INPLACE_DIVIDE: 758 case INPLACE_MODULO: 759 return -1; 760 case STORE_SUBSCR: 761 return -3; 762 case STORE_MAP: 763 return -2; 764 case DELETE_SUBSCR: 765 return -2; 766 767 case BINARY_LSHIFT: 768 case BINARY_RSHIFT: 769 case BINARY_AND: 770 case BINARY_XOR: 771 case BINARY_OR: 772 return -1; 773 case INPLACE_POWER: 774 return -1; 775 case GET_ITER: 776 return 0; 777 778 case PRINT_EXPR: 779 return -1; 780 case PRINT_ITEM: 781 return -1; 782 case PRINT_NEWLINE: 783 return 0; 784 case PRINT_ITEM_TO: 785 return -2; 786 case PRINT_NEWLINE_TO: 787 return -1; 788 case INPLACE_LSHIFT: 789 case INPLACE_RSHIFT: 790 case INPLACE_AND: 791 case INPLACE_XOR: 792 case INPLACE_OR: 793 return -1; 794 case BREAK_LOOP: 795 return 0; 796 case SETUP_WITH: 797 return 4; 798 case WITH_CLEANUP: 799 return -1; /* XXX Sometimes more */ 800 case LOAD_LOCALS: 801 return 1; 802 case RETURN_VALUE: 803 return -1; 804 case IMPORT_STAR: 805 return -1; 806 case EXEC_STMT: 807 return -3; 808 case YIELD_VALUE: 809 return 0; 810 811 case POP_BLOCK: 812 return 0; 813 case END_FINALLY: 814 return -3; /* or -1 or -2 if no exception occurred or 815 return/break/continue */ 816 case BUILD_CLASS: 817 return -2; 818 819 case STORE_NAME: 820 return -1; 821 case DELETE_NAME: 822 return 0; 823 case UNPACK_SEQUENCE: 824 return oparg-1; 825 case FOR_ITER: 826 return 1; /* or -1, at end of iterator */ 827 828 case STORE_ATTR: 829 return -2; 830 case DELETE_ATTR: 831 return -1; 832 case STORE_GLOBAL: 833 return -1; 834 case DELETE_GLOBAL: 835 return 0; 836 case DUP_TOPX: 837 return oparg; 838 case LOAD_CONST: 839 return 1; 840 case LOAD_NAME: 841 return 1; 842 case BUILD_TUPLE: 843 case BUILD_LIST: 844 case BUILD_SET: 845 return 1-oparg; 846 case BUILD_MAP: 847 return 1; 848 case LOAD_ATTR: 849 return 0; 850 case COMPARE_OP: 851 return -1; 852 case IMPORT_NAME: 853 return -1; 854 case IMPORT_FROM: 855 return 1; 856 857 case JUMP_FORWARD: 858 case JUMP_IF_TRUE_OR_POP: /* -1 if jump not taken */ 859 case JUMP_IF_FALSE_OR_POP: /* "" */ 860 case JUMP_ABSOLUTE: 861 return 0; 862 863 case POP_JUMP_IF_FALSE: 864 case POP_JUMP_IF_TRUE: 865 return -1; 866 867 case LOAD_GLOBAL: 868 return 1; 869 870 case CONTINUE_LOOP: 871 return 0; 872 case SETUP_LOOP: 873 case SETUP_EXCEPT: 874 case SETUP_FINALLY: 875 return 0; 876 877 case LOAD_FAST: 878 return 1; 879 case STORE_FAST: 880 return -1; 881 case DELETE_FAST: 882 return 0; 883 884 case RAISE_VARARGS: 885 return -oparg; 864 886 #define NARGS(o) (((o) % 256) + 2*((o) / 256)) 865 866 867 868 869 870 871 887 case CALL_FUNCTION: 888 return -NARGS(oparg); 889 case CALL_FUNCTION_VAR: 890 case CALL_FUNCTION_KW: 891 return -NARGS(oparg)-1; 892 case CALL_FUNCTION_VAR_KW: 893 return -NARGS(oparg)-2; 872 894 #undef NARGS 873 874 875 876 877 878 879 880 881 882 return -oparg;883 884 885 886 887 888 889 890 891 892 893 894 895 case MAKE_FUNCTION: 896 return -oparg; 897 case BUILD_SLICE: 898 if (oparg == 3) 899 return -2; 900 else 901 return -1; 902 903 case MAKE_CLOSURE: 904 return -oparg-1; 905 case LOAD_CLOSURE: 906 return 1; 907 case LOAD_DEREF: 908 return 1; 909 case STORE_DEREF: 910 return -1; 911 default: 912 fprintf(stderr, "opcode = %d\n", opcode); 913 Py_FatalError("opcode_stack_effect()"); 914 915 } 916 return 0; /* not reachable */ 895 917 } 896 918 … … 902 924 compiler_addop(struct compiler *c, int opcode) 903 925 { 904 905 906 907 908 909 910 911 912 913 914 915 916 917 926 basicblock *b; 927 struct instr *i; 928 int off; 929 off = compiler_next_instr(c, c->u->u_curblock); 930 if (off < 0) 931 return 0; 932 b = c->u->u_curblock; 933 i = &b->b_instr[off]; 934 i->i_opcode = opcode; 935 i->i_hasarg = 0; 936 if (opcode == RETURN_VALUE) 937 b->b_return = 1; 938 compiler_set_lineno(c, off); 939 return 1; 918 940 } 919 941 … … 921 943 compiler_add_o(struct compiler *c, PyObject *dict, PyObject *o) 922 944 { 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 945 PyObject *t, *v; 946 Py_ssize_t arg; 947 double d; 948 949 /* necessary to make sure types aren't coerced (e.g., int and long) */ 950 /* _and_ to distinguish 0.0 from -0.0 e.g. on IEEE platforms */ 951 if (PyFloat_Check(o)) { 952 d = PyFloat_AS_DOUBLE(o); 953 /* all we need is to make the tuple different in either the 0.0 954 * or -0.0 case from all others, just to avoid the "coercion". 955 */ 956 if (d == 0.0 && copysign(1.0, d) < 0.0) 957 t = PyTuple_Pack(3, o, o->ob_type, Py_None); 958 else 959 t = PyTuple_Pack(2, o, o->ob_type); 960 } 939 961 #ifndef WITHOUT_COMPLEX 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 962 else if (PyComplex_Check(o)) { 963 Py_complex z; 964 int real_negzero, imag_negzero; 965 /* For the complex case we must make complex(x, 0.) 966 different from complex(x, -0.) and complex(0., y) 967 different from complex(-0., y), for any x and y. 968 All four complex zeros must be distinguished.*/ 969 z = PyComplex_AsCComplex(o); 970 real_negzero = z.real == 0.0 && copysign(1.0, z.real) < 0.0; 971 imag_negzero = z.imag == 0.0 && copysign(1.0, z.imag) < 0.0; 972 if (real_negzero && imag_negzero) { 973 t = PyTuple_Pack(5, o, o->ob_type, 974 Py_None, Py_None, Py_None); 975 } 976 else if (imag_negzero) { 977 t = PyTuple_Pack(4, o, o->ob_type, Py_None, Py_None); 978 } 979 else if (real_negzero) { 980 t = PyTuple_Pack(3, o, o->ob_type, Py_None); 981 } 982 else { 983 t = PyTuple_Pack(2, o, o->ob_type); 984 } 985 } 964 986 #endif /* WITHOUT_COMPLEX */ 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 987 else { 988 t = PyTuple_Pack(2, o, o->ob_type); 989 } 990 if (t == NULL) 991 return -1; 992 993 v = PyDict_GetItem(dict, t); 994 if (!v) { 995 arg = PyDict_Size(dict); 996 v = PyInt_FromLong(arg); 997 if (!v) { 998 Py_DECREF(t); 999 return -1; 1000 } 1001 if (PyDict_SetItem(dict, t, v) < 0) { 1002 Py_DECREF(t); 1003 Py_DECREF(v); 1004 return -1; 1005 } 1006 Py_DECREF(v); 1007 } 1008 else 1009 arg = PyInt_AsLong(v); 1010 Py_DECREF(t); 1011 return arg; 990 1012 } 991 1013 992 1014 static int 993 1015 compiler_addop_o(struct compiler *c, int opcode, PyObject *dict, 994 1016 PyObject *o) 995 1017 { 996 1018 int arg = compiler_add_o(c, dict, o); 997 1019 if (arg < 0) 998 1020 return 0; 999 1021 return compiler_addop_i(c, opcode, arg); 1000 1022 } … … 1002 1024 static int 1003 1025 compiler_addop_name(struct compiler *c, int opcode, PyObject *dict, 1004 1026 PyObject *o) 1005 1027 { 1006 1028 int arg; 1007 1029 PyObject *mangled = _Py_Mangle(c->u->u_private, o); 1008 1030 if (!mangled) 1009 1031 return 0; 1010 1032 arg = compiler_add_o(c, dict, mangled); 1011 1033 Py_DECREF(mangled); 1012 1034 if (arg < 0) 1013 1035 return 0; 1014 1036 return compiler_addop_i(c, opcode, arg); 1015 1037 } … … 1022 1044 compiler_addop_i(struct compiler *c, int opcode, int oparg) 1023 1045 { 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1046 struct instr *i; 1047 int off; 1048 off = compiler_next_instr(c, c->u->u_curblock); 1049 if (off < 0) 1050 return 0; 1051 i = &c->u->u_curblock->b_instr[off]; 1052 i->i_opcode = opcode; 1053 i->i_oparg = oparg; 1054 i->i_hasarg = 1; 1055 compiler_set_lineno(c, off); 1056 return 1; 1035 1057 } 1036 1058 … … 1038 1060 compiler_addop_j(struct compiler *c, int opcode, basicblock *b, int absolute) 1039 1061 { 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 } 1058 1059 /* The distinction between NEW_BLOCK and NEXT_BLOCK is subtle. 1060 like to find better names.) 1062 struct instr *i; 1063 int off; 1064 1065 assert(b != NULL); 1066 off = compiler_next_instr(c, c->u->u_curblock); 1067 if (off < 0) 1068 return 0; 1069 i = &c->u->u_curblock->b_instr[off]; 1070 i->i_opcode = opcode; 1071 i->i_target = b; 1072 i->i_hasarg = 1; 1073 if (absolute) 1074 i->i_jabs = 1; 1075 else 1076 i->i_jrel = 1; 1077 compiler_set_lineno(c, off); 1078 return 1; 1079 } 1080 1081 /* The distinction between NEW_BLOCK and NEXT_BLOCK is subtle. (I'd 1082 like to find better names.) NEW_BLOCK() creates a new block and sets 1061 1083 it as the current block. NEXT_BLOCK() also creates an implicit jump 1062 1084 from the current block to the new block. … … 1069 1091 1070 1092 #define NEW_BLOCK(C) { \ 1071 1072 1093 if (compiler_use_new_block((C)) == NULL) \ 1094 return 0; \ 1073 1095 } 1074 1096 1075 1097 #define NEXT_BLOCK(C) { \ 1076 1077 1098 if (compiler_next_block((C)) == NULL) \ 1099 return 0; \ 1078 1100 } 1079 1101 1080 1102 #define ADDOP(C, OP) { \ 1081 1082 1103 if (!compiler_addop((C), (OP))) \ 1104 return 0; \ 1083 1105 } 1084 1106 1085 1107 #define ADDOP_IN_SCOPE(C, OP) { \ 1086 1087 1088 1089 1108 if (!compiler_addop((C), (OP))) { \ 1109 compiler_exit_scope(c); \ 1110 return 0; \ 1111 } \ 1090 1112 } 1091 1113 1092 1114 #define ADDOP_O(C, OP, O, TYPE) { \ 1093 1094 1115 if (!compiler_addop_o((C), (OP), (C)->u->u_ ## TYPE, (O))) \ 1116 return 0; \ 1095 1117 } 1096 1118 1097 1119 #define ADDOP_NAME(C, OP, O, TYPE) { \ 1098 1099 1120 if (!compiler_addop_name((C), (OP), (C)->u->u_ ## TYPE, (O))) \ 1121 return 0; \ 1100 1122 } 1101 1123 1102 1124 #define ADDOP_I(C, OP, O) { \ 1103 1104 1125 if (!compiler_addop_i((C), (OP), (O))) \ 1126 return 0; \ 1105 1127 } 1106 1128 1107 1129 #define ADDOP_JABS(C, OP, O) { \ 1108 1109 1130 if (!compiler_addop_j((C), (OP), (O), 1)) \ 1131 return 0; \ 1110 1132 } 1111 1133 1112 1134 #define ADDOP_JREL(C, OP, O) { \ 1113 1114 1135 if (!compiler_addop_j((C), (OP), (O), 0)) \ 1136 return 0; \ 1115 1137 } 1116 1138 … … 1120 1142 1121 1143 #define VISIT(C, TYPE, V) {\ 1122 1123 1144 if (!compiler_visit_ ## TYPE((C), (V))) \ 1145 return 0; \ 1124 1146 } 1125 1147 1126 1148 #define VISIT_IN_SCOPE(C, TYPE, V) {\ 1127 1128 1129 1130 1149 if (!compiler_visit_ ## TYPE((C), (V))) { \ 1150 compiler_exit_scope(c); \ 1151 return 0; \ 1152 } \ 1131 1153 } 1132 1154 1133 1155 #define VISIT_SLICE(C, V, CTX) {\ 1134 1135 1156 if (!compiler_visit_slice((C), (V), (CTX))) \ 1157 return 0; \ 1136 1158 } 1137 1159 1138 1160 #define VISIT_SEQ(C, TYPE, SEQ) { \ 1139 1140 1141 1142 1143 1144 1145 1161 int _i; \ 1162 asdl_seq *seq = (SEQ); /* avoid variable capture */ \ 1163 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \ 1164 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \ 1165 if (!compiler_visit_ ## TYPE((C), elt)) \ 1166 return 0; \ 1167 } \ 1146 1168 } 1147 1169 1148 1170 #define VISIT_SEQ_IN_SCOPE(C, TYPE, SEQ) { \ 1149 1150 1151 1152 1153 1154 1155 1156 1157 1171 int _i; \ 1172 asdl_seq *seq = (SEQ); /* avoid variable capture */ \ 1173 for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \ 1174 TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \ 1175 if (!compiler_visit_ ## TYPE((C), elt)) { \ 1176 compiler_exit_scope(c); \ 1177 return 0; \ 1178 } \ 1179 } \ 1158 1180 } 1159 1181 … … 1162 1184 { 1163 1185 if (s->kind != Expr_kind) 1164 1186 return 0; 1165 1187 return s->v.Expr.value->kind == Str_kind; 1166 1188 } … … 1171 1193 compiler_body(struct compiler *c, asdl_seq *stmts) 1172 1194 { 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1195 int i = 0; 1196 stmt_ty st; 1197 1198 if (!asdl_seq_LEN(stmts)) 1199 return 1; 1200 st = (stmt_ty)asdl_seq_GET(stmts, 0); 1201 if (compiler_isdocstring(st) && Py_OptimizeFlag < 2) { 1202 /* don't generate docstrings if -OO */ 1203 i = 1; 1204 VISIT(c, expr, st->v.Expr.value); 1205 if (!compiler_nameop(c, __doc__, Store)) 1206 return 0; 1207 } 1208 for (; i < asdl_seq_LEN(stmts); i++) 1209 VISIT(c, stmt, (stmt_ty)asdl_seq_GET(stmts, i)); 1210 return 1; 1189 1211 } 1190 1212 … … 1192 1214 compiler_mod(struct compiler *c, mod_ty mod) 1193 1215 { 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 case Module_kind: 1207 1208 1209 1210 1211 1212 1213 1214 VISIT_SEQ_IN_SCOPE(c, stmt, 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1216 PyCodeObject *co; 1217 int addNone = 1; 1218 static PyObject *module; 1219 if (!module) { 1220 module = PyString_InternFromString("<module>"); 1221 if (!module) 1222 return NULL; 1223 } 1224 /* Use 0 for firstlineno initially, will fixup in assemble(). */ 1225 if (!compiler_enter_scope(c, module, mod, 0)) 1226 return NULL; 1227 switch (mod->kind) { 1228 case Module_kind: 1229 if (!compiler_body(c, mod->v.Module.body)) { 1230 compiler_exit_scope(c); 1231 return 0; 1232 } 1233 break; 1234 case Interactive_kind: 1235 c->c_interactive = 1; 1236 VISIT_SEQ_IN_SCOPE(c, stmt, 1237 mod->v.Interactive.body); 1238 break; 1239 case Expression_kind: 1240 VISIT_IN_SCOPE(c, expr, mod->v.Expression.body); 1241 addNone = 0; 1242 break; 1243 case Suite_kind: 1244 PyErr_SetString(PyExc_SystemError, 1245 "suite should not be possible"); 1246 return 0; 1247 default: 1248 PyErr_Format(PyExc_SystemError, 1249 "module kind %d should not be possible", 1250 mod->kind); 1251 return 0; 1252 } 1253 co = assemble(c, addNone); 1254 compiler_exit_scope(c); 1255 return co; 1234 1256 } 1235 1257 … … 1242 1264 get_ref_type(struct compiler *c, PyObject *name) 1243 1265 { 1244 1245 1246 1247 1248 1249 "symbols: %s\nlocals: %s\nglobals: %s\n",1250 PyString_AS_STRING(name), 1251 PyString_AS_STRING(c->u->u_name), 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1266 int scope = PyST_GetScope(c->u->u_ste, name); 1267 if (scope == 0) { 1268 char buf[350]; 1269 PyOS_snprintf(buf, sizeof(buf), 1270 "unknown scope for %.100s in %.100s(%s) in %s\n" 1271 "symbols: %s\nlocals: %s\nglobals: %s", 1272 PyString_AS_STRING(name), 1273 PyString_AS_STRING(c->u->u_name), 1274 PyObject_REPR(c->u->u_ste->ste_id), 1275 c->c_filename, 1276 PyObject_REPR(c->u->u_ste->ste_symbols), 1277 PyObject_REPR(c->u->u_varnames), 1278 PyObject_REPR(c->u->u_names) 1279 ); 1280 Py_FatalError(buf); 1281 } 1282 1283 return scope; 1262 1284 } 1263 1285 … … 1268 1290 k = PyTuple_Pack(2, name, name->ob_type); 1269 1291 if (k == NULL) 1270 1292 return -1; 1271 1293 v = PyDict_GetItem(dict, k); 1272 1294 Py_DECREF(k); 1273 1295 if (v == NULL) 1274 1296 return -1; 1275 1297 return PyInt_AS_LONG(v); 1276 1298 } … … 1279 1301 compiler_make_closure(struct compiler *c, PyCodeObject *co, int args) 1280 1302 { 1281 1282 1283 1284 1285 1286 1287 1288 1289 LOAD_DEREF but LOAD_CLOSURE is needed. 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 PyObject_REPR(name), 1309 PyString_AS_STRING(c->u->u_name), 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1303 int i, free = PyCode_GetNumFree(co); 1304 if (free == 0) { 1305 ADDOP_O(c, LOAD_CONST, (PyObject*)co, consts); 1306 ADDOP_I(c, MAKE_FUNCTION, args); 1307 return 1; 1308 } 1309 for (i = 0; i < free; ++i) { 1310 /* Bypass com_addop_varname because it will generate 1311 LOAD_DEREF but LOAD_CLOSURE is needed. 1312 */ 1313 PyObject *name = PyTuple_GET_ITEM(co->co_freevars, i); 1314 int arg, reftype; 1315 1316 /* Special case: If a class contains a method with a 1317 free variable that has the same name as a method, 1318 the name will be considered free *and* local in the 1319 class. It should be handled by the closure, as 1320 well as by the normal name loookup logic. 1321 */ 1322 reftype = get_ref_type(c, name); 1323 if (reftype == CELL) 1324 arg = compiler_lookup_arg(c->u->u_cellvars, name); 1325 else /* (reftype == FREE) */ 1326 arg = compiler_lookup_arg(c->u->u_freevars, name); 1327 if (arg == -1) { 1328 printf("lookup %s in %s %d %d\n" 1329 "freevars of %s: %s\n", 1330 PyObject_REPR(name), 1331 PyString_AS_STRING(c->u->u_name), 1332 reftype, arg, 1333 PyString_AS_STRING(co->co_name), 1334 PyObject_REPR(co->co_freevars)); 1335 Py_FatalError("compiler_make_closure()"); 1336 } 1337 ADDOP_I(c, LOAD_CLOSURE, arg); 1338 } 1339 ADDOP_I(c, BUILD_TUPLE, free); 1340 ADDOP_O(c, LOAD_CONST, (PyObject*)co, consts); 1341 ADDOP_I(c, MAKE_CLOSURE, args); 1342 return 1; 1321 1343 } 1322 1344 … … 1324 1346 compiler_decorators(struct compiler *c, asdl_seq* decos) 1325 1347 { 1326 1327 1328 1329 1330 1331 1332 1333 1334 1348 int i; 1349 1350 if (!decos) 1351 return 1; 1352 1353 for (i = 0; i < asdl_seq_LEN(decos); i++) { 1354 VISIT(c, expr, (expr_ty)asdl_seq_GET(decos, i)); 1355 } 1356 return 1; 1335 1357 } 1336 1358 … … 1338 1360 compiler_arguments(struct compiler *c, arguments_ty args) 1339 1361 { 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1362 int i; 1363 int n = asdl_seq_LEN(args->args); 1364 /* Correctly handle nested argument lists */ 1365 for (i = 0; i < n; i++) { 1366 expr_ty arg = (expr_ty)asdl_seq_GET(args->args, i); 1367 if (arg->kind == Tuple_kind) { 1368 PyObject *id = PyString_FromFormat(".%d", i); 1369 if (id == NULL) { 1370 return 0; 1371 } 1372 if (!compiler_nameop(c, id, Load)) { 1373 Py_DECREF(id); 1374 return 0; 1375 } 1376 Py_DECREF(id); 1377 VISIT(c, expr, arg); 1378 } 1379 } 1380 return 1; 1359 1381 } 1360 1382 … … 1362 1384 compiler_function(struct compiler *c, stmt_ty s) 1363 1385 { 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 if (compiler_add_o(c, c->u->u_consts, first_const) < 0){1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1386 PyCodeObject *co; 1387 PyObject *first_const = Py_None; 1388 arguments_ty args = s->v.FunctionDef.args; 1389 asdl_seq* decos = s->v.FunctionDef.decorator_list; 1390 stmt_ty st; 1391 int i, n, docstring; 1392 1393 assert(s->kind == FunctionDef_kind); 1394 1395 if (!compiler_decorators(c, decos)) 1396 return 0; 1397 if (args->defaults) 1398 VISIT_SEQ(c, expr, args->defaults); 1399 if (!compiler_enter_scope(c, s->v.FunctionDef.name, (void *)s, 1400 s->lineno)) 1401 return 0; 1402 1403 st = (stmt_ty)asdl_seq_GET(s->v.FunctionDef.body, 0); 1404 docstring = compiler_isdocstring(st); 1405 if (docstring && Py_OptimizeFlag < 2) 1406 first_const = st->v.Expr.value->v.Str.s; 1407 if (compiler_add_o(c, c->u->u_consts, first_const) < 0) { 1408 compiler_exit_scope(c); 1409 return 0; 1410 } 1411 1412 /* unpack nested arguments */ 1413 compiler_arguments(c, args); 1414 1415 c->u->u_argcount = asdl_seq_LEN(args->args); 1416 n = asdl_seq_LEN(s->v.FunctionDef.body); 1417 /* if there was a docstring, we need to skip the first statement */ 1418 for (i = docstring; i < n; i++) { 1419 st = (stmt_ty)asdl_seq_GET(s->v.FunctionDef.body, i); 1420 VISIT_IN_SCOPE(c, stmt, st); 1421 } 1422 co = assemble(c, 1); 1423 compiler_exit_scope(c); 1424 if (co == NULL) 1425 return 0; 1426 1427 compiler_make_closure(c, co, asdl_seq_LEN(args->defaults)); 1428 Py_DECREF(co); 1429 1430 for (i = 0; i < asdl_seq_LEN(decos); i++) { 1431 ADDOP_I(c, CALL_FUNCTION, 1); 1432 } 1433 1434 return compiler_nameop(c, s->v.FunctionDef.name, Store); 1413 1435 } 1414 1436 … … 1416 1438 compiler_class(struct compiler *c, stmt_ty s) 1417 1439 { 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1440 int n, i; 1441 PyCodeObject *co; 1442 PyObject *str; 1443 asdl_seq* decos = s->v.ClassDef.decorator_list; 1444 1445 if (!compiler_decorators(c, decos)) 1446 return 0; 1447 1448 /* push class name on stack, needed by BUILD_CLASS */ 1449 ADDOP_O(c, LOAD_CONST, s->v.ClassDef.name, consts); 1450 /* push the tuple of base classes on the stack */ 1451 n = asdl_seq_LEN(s->v.ClassDef.bases); 1452 if (n > 0) 1453 VISIT_SEQ(c, expr, s->v.ClassDef.bases); 1454 ADDOP_I(c, BUILD_TUPLE, n); 1455 if (!compiler_enter_scope(c, s->v.ClassDef.name, (void *)s, 1456 s->lineno)) 1457 return 0; 1458 Py_XDECREF(c->u->u_private); 1459 c->u->u_private = s->v.ClassDef.name; 1460 Py_INCREF(c->u->u_private); 1461 str = PyString_InternFromString("__name__"); 1462 if (!str || !compiler_nameop(c, str, Load)) { 1463 Py_XDECREF(str); 1464 compiler_exit_scope(c); 1465 return 0; 1466 } 1467 1468 Py_DECREF(str); 1469 str = PyString_InternFromString("__module__"); 1470 if (!str || !compiler_nameop(c, str, Store)) { 1471 Py_XDECREF(str); 1472 compiler_exit_scope(c); 1473 return 0; 1474 } 1475 Py_DECREF(str); 1476 1477 if (!compiler_body(c, s->v.ClassDef.body)) { 1478 compiler_exit_scope(c); 1479 return 0; 1480 } 1481 1482 ADDOP_IN_SCOPE(c, LOAD_LOCALS); 1483 ADDOP_IN_SCOPE(c, RETURN_VALUE); 1484 co = assemble(c, 1); 1485 compiler_exit_scope(c); 1486 if (co == NULL) 1487 return 0; 1488 1489 compiler_make_closure(c, co, 0); 1490 Py_DECREF(co); 1491 1492 ADDOP_I(c, CALL_FUNCTION, 0); 1493 ADDOP(c, BUILD_CLASS); 1494 /* apply decorators */ 1495 for (i = 0; i < asdl_seq_LEN(decos); i++) { 1496 ADDOP_I(c, CALL_FUNCTION, 1); 1497 } 1498 if (!compiler_nameop(c, s->v.ClassDef.name, Store)) 1499 return 0; 1500 return 1; 1479 1501 } 1480 1502 … … 1482 1504 compiler_ifexp(struct compiler *c, expr_ty e) 1483 1505 { 1484 basicblock *end, *next; 1485 1486 assert(e->kind == IfExp_kind); 1487 end = compiler_new_block(c); 1488 if (end == NULL) 1489 return 0; 1490 next = compiler_new_block(c); 1491 if (next == NULL) 1492 return 0; 1493 VISIT(c, expr, e->v.IfExp.test); 1494 ADDOP_JREL(c, JUMP_IF_FALSE, next); 1495 ADDOP(c, POP_TOP); 1496 VISIT(c, expr, e->v.IfExp.body); 1497 ADDOP_JREL(c, JUMP_FORWARD, end); 1498 compiler_use_next_block(c, next); 1499 ADDOP(c, POP_TOP); 1500 VISIT(c, expr, e->v.IfExp.orelse); 1501 compiler_use_next_block(c, end); 1502 return 1; 1506 basicblock *end, *next; 1507 1508 assert(e->kind == IfExp_kind); 1509 end = compiler_new_block(c); 1510 if (end == NULL) 1511 return 0; 1512 next = compiler_new_block(c); 1513 if (next == NULL) 1514 return 0; 1515 VISIT(c, expr, e->v.IfExp.test); 1516 ADDOP_JABS(c, POP_JUMP_IF_FALSE, next); 1517 VISIT(c, expr, e->v.IfExp.body); 1518 ADDOP_JREL(c, JUMP_FORWARD, end); 1519 compiler_use_next_block(c, next); 1520 VISIT(c, expr, e->v.IfExp.orelse); 1521 compiler_use_next_block(c, end); 1522 return 1; 1503 1523 } 1504 1524 … … 1506 1526 compiler_lambda(struct compiler *c, expr_ty e) 1507 1527 { 1508 PyCodeObject *co; 1509 static identifier name; 1510 arguments_ty args = e->v.Lambda.args; 1511 assert(e->kind == Lambda_kind); 1512 1513 if (!name) { 1514 name = PyString_InternFromString("<lambda>"); 1515 if (!name) 1516 return 0; 1517 } 1518 1519 if (args->defaults) 1520 VISIT_SEQ(c, expr, args->defaults); 1521 if (!compiler_enter_scope(c, name, (void *)e, e->lineno)) 1522 return 0; 1523 1524 /* unpack nested arguments */ 1525 compiler_arguments(c, args); 1526 1527 c->u->u_argcount = asdl_seq_LEN(args->args); 1528 VISIT_IN_SCOPE(c, expr, e->v.Lambda.body); 1529 ADDOP_IN_SCOPE(c, RETURN_VALUE); 1530 co = assemble(c, 1); 1531 compiler_exit_scope(c); 1532 if (co == NULL) 1533 return 0; 1534 1535 compiler_make_closure(c, co, asdl_seq_LEN(args->defaults)); 1536 Py_DECREF(co); 1537 1538 return 1; 1528 PyCodeObject *co; 1529 static identifier name; 1530 arguments_ty args = e->v.Lambda.args; 1531 assert(e->kind == Lambda_kind); 1532 1533 if (!name) { 1534 name = PyString_InternFromString("<lambda>"); 1535 if (!name) 1536 return 0; 1537 } 1538 1539 if (args->defaults) 1540 VISIT_SEQ(c, expr, args->defaults); 1541 if (!compiler_enter_scope(c, name, (void *)e, e->lineno)) 1542 return 0; 1543 1544 /* unpack nested arguments */ 1545 compiler_arguments(c, args); 1546 1547 /* Make None the first constant, so the lambda can't have a 1548 docstring. */ 1549 if (compiler_add_o(c, c->u->u_consts, Py_None) < 0) 1550 return 0; 1551 1552 c->u->u_argcount = asdl_seq_LEN(args->args); 1553 VISIT_IN_SCOPE(c, expr, e->v.Lambda.body); 1554 if (c->u->u_ste->ste_generator) { 1555 ADDOP_IN_SCOPE(c, POP_TOP); 1556 } 1557 else { 1558 ADDOP_IN_SCOPE(c, RETURN_VALUE); 1559 } 1560 co = assemble(c, 1); 1561 compiler_exit_scope(c); 1562 if (co == NULL) 1563 return 0; 1564 1565 compiler_make_closure(c, co, asdl_seq_LEN(args->defaults)); 1566 Py_DECREF(co); 1567 1568 return 1; 1539 1569 } 1540 1570 … … 1542 1572 compiler_print(struct compiler *c, stmt_ty s) 1543 1573 { 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1574 int i, n; 1575 bool dest; 1576 1577 assert(s->kind == Print_kind); 1578 n = asdl_seq_LEN(s->v.Print.values); 1579 dest = false; 1580 if (s->v.Print.dest) { 1581 VISIT(c, expr, s->v.Print.dest); 1582 dest = true; 1583 } 1584 for (i = 0; i < n; i++) { 1585 expr_ty e = (expr_ty)asdl_seq_GET(s->v.Print.values, i); 1586 if (dest) { 1587 ADDOP(c, DUP_TOP); 1588 VISIT(c, expr, e); 1589 ADDOP(c, ROT_TWO); 1590 ADDOP(c, PRINT_ITEM_TO); 1591 } 1592 else { 1593 VISIT(c, expr, e); 1594 ADDOP(c, PRINT_ITEM); 1595 } 1596 } 1597 if (s->v.Print.nl) { 1598 if (dest) 1599 ADDOP(c, PRINT_NEWLINE_TO) 1600 else 1601 ADDOP(c, PRINT_NEWLINE) 1602 } 1603 else if (dest) 1604 ADDOP(c, POP_TOP); 1605 return 1; 1576 1606 } 1577 1607 … … 1579 1609 compiler_if(struct compiler *c, stmt_ty s) 1580 1610 { 1581 basicblock *end, *next; 1582 int constant; 1583 assert(s->kind == If_kind); 1584 end = compiler_new_block(c); 1585 if (end == NULL) 1586 return 0; 1587 next = compiler_new_block(c); 1588 if (next == NULL) 1589 return 0; 1590 1591 constant = expr_constant(s->v.If.test); 1592 /* constant = 0: "if 0" 1593 * constant = 1: "if 1", "if 2", ... 1594 * constant = -1: rest */ 1595 if (constant == 0) { 1596 if (s->v.If.orelse) 1597 VISIT_SEQ(c, stmt, s->v.If.orelse); 1598 } else if (constant == 1) { 1599 VISIT_SEQ(c, stmt, s->v.If.body); 1600 } else { 1601 VISIT(c, expr, s->v.If.test); 1602 ADDOP_JREL(c, JUMP_IF_FALSE, next); 1603 ADDOP(c, POP_TOP); 1604 VISIT_SEQ(c, stmt, s->v.If.body); 1605 ADDOP_JREL(c, JUMP_FORWARD, end); 1606 compiler_use_next_block(c, next); 1607 ADDOP(c, POP_TOP); 1608 if (s->v.If.orelse) 1609 VISIT_SEQ(c, stmt, s->v.If.orelse); 1610 } 1611 compiler_use_next_block(c, end); 1612 return 1; 1611 basicblock *end, *next; 1612 int constant; 1613 assert(s->kind == If_kind); 1614 end = compiler_new_block(c); 1615 if (end == NULL) 1616 return 0; 1617 1618 constant = expr_constant(s->v.If.test); 1619 /* constant = 0: "if 0" 1620 * constant = 1: "if 1", "if 2", ... 1621 * constant = -1: rest */ 1622 if (constant == 0) { 1623 if (s->v.If.orelse) 1624 VISIT_SEQ(c, stmt, s->v.If.orelse); 1625 } else if (constant == 1) { 1626 VISIT_SEQ(c, stmt, s->v.If.body); 1627 } else { 1628 if (s->v.If.orelse) { 1629 next = compiler_new_block(c); 1630 if (next == NULL) 1631 return 0; 1632 } 1633 else 1634 next = end; 1635 VISIT(c, expr, s->v.If.test); 1636 ADDOP_JABS(c, POP_JUMP_IF_FALSE, next); 1637 VISIT_SEQ(c, stmt, s->v.If.body); 1638 ADDOP_JREL(c, JUMP_FORWARD, end); 1639 if (s->v.If.orelse) { 1640 compiler_use_next_block(c, next); 1641 VISIT_SEQ(c, stmt, s->v.If.orelse); 1642 } 1643 } 1644 compiler_use_next_block(c, end); 1645 return 1; 1613 1646 } 1614 1647 … … 1616 1649 compiler_for(struct compiler *c, stmt_ty s) 1617 1650 { 1618 basicblock *start, *cleanup, *end; 1619 1620 start = compiler_new_block(c); 1621 cleanup = compiler_new_block(c); 1622 end = compiler_new_block(c); 1623 if (start == NULL || end == NULL || cleanup == NULL) 1624 return 0; 1625 ADDOP_JREL(c, SETUP_LOOP, end); 1626 if (!compiler_push_fblock(c, LOOP, start)) 1627 return 0; 1628 VISIT(c, expr, s->v.For.iter); 1629 ADDOP(c, GET_ITER); 1630 compiler_use_next_block(c, start); 1631 /* for expressions must be traced on each iteration, 1632 so we need to set an extra line number. */ 1633 c->u->u_lineno_set = false; 1634 ADDOP_JREL(c, FOR_ITER, cleanup); 1635 VISIT(c, expr, s->v.For.target); 1636 VISIT_SEQ(c, stmt, s->v.For.body); 1637 ADDOP_JABS(c, JUMP_ABSOLUTE, start); 1638 compiler_use_next_block(c, cleanup); 1639 ADDOP(c, POP_BLOCK); 1640 compiler_pop_fblock(c, LOOP, start); 1641 VISIT_SEQ(c, stmt, s->v.For.orelse); 1642 compiler_use_next_block(c, end); 1643 return 1; 1651 basicblock *start, *cleanup, *end; 1652 1653 start = compiler_new_block(c); 1654 cleanup = compiler_new_block(c); 1655 end = compiler_new_block(c); 1656 if (start == NULL || end == NULL || cleanup == NULL) 1657 return 0; 1658 ADDOP_JREL(c, SETUP_LOOP, end); 1659 if (!compiler_push_fblock(c, LOOP, start)) 1660 return 0; 1661 VISIT(c, expr, s->v.For.iter); 1662 ADDOP(c, GET_ITER); 1663 compiler_use_next_block(c, start); 1664 ADDOP_JREL(c, FOR_ITER, cleanup); 1665 VISIT(c, expr, s->v.For.target); 1666 VISIT_SEQ(c, stmt, s->v.For.body); 1667 ADDOP_JABS(c, JUMP_ABSOLUTE, start); 1668 compiler_use_next_block(c, cleanup); 1669 ADDOP(c, POP_BLOCK); 1670 compiler_pop_fblock(c, LOOP, start); 1671 VISIT_SEQ(c, stmt, s->v.For.orelse); 1672 compiler_use_next_block(c, end); 1673 return 1; 1644 1674 } 1645 1675 … … 1647 1677 compiler_while(struct compiler *c, stmt_ty s) 1648 1678 { 1649 basicblock *loop, *orelse, *end, *anchor = NULL; 1650 int constant = expr_constant(s->v.While.test); 1651 1652 if (constant == 0) { 1653 if (s->v.While.orelse) 1654 VISIT_SEQ(c, stmt, s->v.While.orelse); 1655 return 1; 1656 } 1657 loop = compiler_new_block(c); 1658 end = compiler_new_block(c); 1659 if (constant == -1) { 1660 anchor = compiler_new_block(c); 1661 if (anchor == NULL) 1662 return 0; 1663 } 1664 if (loop == NULL || end == NULL) 1665 return 0; 1666 if (s->v.While.orelse) { 1667 orelse = compiler_new_block(c); 1668 if (orelse == NULL) 1669 return 0; 1670 } 1671 else 1672 orelse = NULL; 1673 1674 ADDOP_JREL(c, SETUP_LOOP, end); 1675 compiler_use_next_block(c, loop); 1676 if (!compiler_push_fblock(c, LOOP, loop)) 1677 return 0; 1678 if (constant == -1) { 1679 /* while expressions must be traced on each iteration, 1680 so we need to set an extra line number. */ 1681 c->u->u_lineno_set = false; 1682 VISIT(c, expr, s->v.While.test); 1683 ADDOP_JREL(c, JUMP_IF_FALSE, anchor); 1684 ADDOP(c, POP_TOP); 1685 } 1686 VISIT_SEQ(c, stmt, s->v.While.body); 1687 ADDOP_JABS(c, JUMP_ABSOLUTE, loop); 1688 1689 /* XXX should the two POP instructions be in a separate block 1690 if there is no else clause ? 1691 */ 1692 1693 if (constant == -1) { 1694 compiler_use_next_block(c, anchor); 1695 ADDOP(c, POP_TOP); 1696 ADDOP(c, POP_BLOCK); 1697 } 1698 compiler_pop_fblock(c, LOOP, loop); 1699 if (orelse != NULL) /* what if orelse is just pass? */ 1700 VISIT_SEQ(c, stmt, s->v.While.orelse); 1701 compiler_use_next_block(c, end); 1702 1703 return 1; 1679 basicblock *loop, *orelse, *end, *anchor = NULL; 1680 int constant = expr_constant(s->v.While.test); 1681 1682 if (constant == 0) { 1683 if (s->v.While.orelse) 1684 VISIT_SEQ(c, stmt, s->v.While.orelse); 1685 return 1; 1686 } 1687 loop = compiler_new_block(c); 1688 end = compiler_new_block(c); 1689 if (constant == -1) { 1690 anchor = compiler_new_block(c); 1691 if (anchor == NULL) 1692 return 0; 1693 } 1694 if (loop == NULL || end == NULL) 1695 return 0; 1696 if (s->v.While.orelse) { 1697 orelse = compiler_new_block(c); 1698 if (orelse == NULL) 1699 return 0; 1700 } 1701 else 1702 orelse = NULL; 1703 1704 ADDOP_JREL(c, SETUP_LOOP, end); 1705 compiler_use_next_block(c, loop); 1706 if (!compiler_push_fblock(c, LOOP, loop)) 1707 return 0; 1708 if (constant == -1) { 1709 VISIT(c, expr, s->v.While.test); 1710 ADDOP_JABS(c, POP_JUMP_IF_FALSE, anchor); 1711 } 1712 VISIT_SEQ(c, stmt, s->v.While.body); 1713 ADDOP_JABS(c, JUMP_ABSOLUTE, loop); 1714 1715 /* XXX should the two POP instructions be in a separate block 1716 if there is no else clause ? 1717 */ 1718 1719 if (constant == -1) { 1720 compiler_use_next_block(c, anchor); 1721 ADDOP(c, POP_BLOCK); 1722 } 1723 compiler_pop_fblock(c, LOOP, loop); 1724 if (orelse != NULL) /* what if orelse is just pass? */ 1725 VISIT_SEQ(c, stmt, s->v.While.orelse); 1726 compiler_use_next_block(c, end); 1727 1728 return 1; 1704 1729 } 1705 1730 … … 1707 1732 compiler_continue(struct compiler *c) 1708 1733 { 1709 1710 static const char IN_FINALLY_ERROR_MSG[] = 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1734 static const char LOOP_ERROR_MSG[] = "'continue' not properly in loop"; 1735 static const char IN_FINALLY_ERROR_MSG[] = 1736 "'continue' not supported inside 'finally' clause"; 1737 int i; 1738 1739 if (!c->u->u_nfblocks) 1740 return compiler_error(c, LOOP_ERROR_MSG); 1741 i = c->u->u_nfblocks - 1; 1742 switch (c->u->u_fblock[i].fb_type) { 1743 case LOOP: 1744 ADDOP_JABS(c, JUMP_ABSOLUTE, c->u->u_fblock[i].fb_block); 1745 break; 1746 case EXCEPT: 1747 case FINALLY_TRY: 1748 while (--i >= 0 && c->u->u_fblock[i].fb_type != LOOP) { 1749 /* Prevent continue anywhere under a finally 1750 even if hidden in a sub-try or except. */ 1751 if (c->u->u_fblock[i].fb_type == FINALLY_END) 1752 return compiler_error(c, IN_FINALLY_ERROR_MSG); 1753 } 1754 if (i == -1) 1755 return compiler_error(c, LOOP_ERROR_MSG); 1756 ADDOP_JABS(c, CONTINUE_LOOP, c->u->u_fblock[i].fb_block); 1757 break; 1758 case FINALLY_END: 1759 return compiler_error(c, IN_FINALLY_ERROR_MSG); 1760 } 1761 1762 return 1; 1738 1763 } 1739 1764 1740 1765 /* Code generated for "try: <body> finally: <finalbody>" is as follows: 1741 1742 SETUP_FINALLYL1743 1744 1745 LOAD_CONST<None>1746 L:<code for finalbody>1747 1748 1766 1767 SETUP_FINALLY L 1768 <code for body> 1769 POP_BLOCK 1770 LOAD_CONST <None> 1771 L: <code for finalbody> 1772 END_FINALLY 1773 1749 1774 The special instructions use the block stack. Each block 1750 1775 stack entry contains the instruction that created it (here 1751 1776 SETUP_FINALLY), the level of the value stack at the time the 1752 1777 block stack entry was created, and a label (here L). 1753 1778 1754 1779 SETUP_FINALLY: 1755 1756 1780 Pushes the current value stack level and the label 1781 onto the block stack. 1757 1782 POP_BLOCK: 1758 1759 1760 1783 Pops en entry from the block stack, and pops the value 1784 stack until its level is the same as indicated on the 1785 block stack. (The label is ignored.) 1761 1786 END_FINALLY: 1762 1763 1764 1765 1787 Pops a variable number of entries from the *value* stack 1788 and re-raises the exception they specify. The number of 1789 entries popped depends on the (pseudo) exception type. 1790 1766 1791 The block stack is unwound when an exception is raised: 1767 1792 when a SETUP_FINALLY entry is found, the exception is pushed … … 1774 1799 compiler_try_finally(struct compiler *c, stmt_ty s) 1775 1800 { 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1801 basicblock *body, *end; 1802 body = compiler_new_block(c); 1803 end = compiler_new_block(c); 1804 if (body == NULL || end == NULL) 1805 return 0; 1806 1807 ADDOP_JREL(c, SETUP_FINALLY, end); 1808 compiler_use_next_block(c, body); 1809 if (!compiler_push_fblock(c, FINALLY_TRY, body)) 1810 return 0; 1811 VISIT_SEQ(c, stmt, s->v.TryFinally.body); 1812 ADDOP(c, POP_BLOCK); 1813 compiler_pop_fblock(c, FINALLY_TRY, body); 1814 1815 ADDOP_O(c, LOAD_CONST, Py_None, consts); 1816 compiler_use_next_block(c, end); 1817 if (!compiler_push_fblock(c, FINALLY_END, end)) 1818 return 0; 1819 VISIT_SEQ(c, stmt, s->v.TryFinally.finalbody); 1820 ADDOP(c, END_FINALLY); 1821 compiler_pop_fblock(c, FINALLY_END, end); 1822 1823 return 1; 1799 1824 } 1800 1825 … … 1804 1829 at the right; 'tb' is trace-back info, 'val' the exception's 1805 1830 associated value, and 'exc' the exception.) 1806 1807 Value stack Label Instruction Argument 1808 [] SETUP_EXCEPT L1 1809 [] <code for S> 1810 [] POP_BLOCK 1811 [] JUMP_FORWARD L0 1812 1813 [tb, val, exc] L1: DUP ) 1814 [tb, val, exc, exc] <evaluate E1> ) 1815 [tb, val, exc, exc, E1] COMPARE_OP EXC_MATCH ) only if E1 1816 [tb, val, exc, 1-or-0] JUMP_IF_FALSE L2 ) 1817 [tb, val, exc, 1] POP ) 1818 [tb, val, exc] POP 1819 [tb, val] <assign to V1> (or POP if no V1) 1820 [tb] POP 1821 [] <code for S1> 1822 JUMP_FORWARD L0 1823 1824 [tb, val, exc, 0] L2: POP 1825 [tb, val, exc] DUP 1831 1832 Value stack Label Instruction Argument 1833 [] SETUP_EXCEPT L1 1834 [] <code for S> 1835 [] POP_BLOCK 1836 [] JUMP_FORWARD L0 1837 1838 [tb, val, exc] L1: DUP ) 1839 [tb, val, exc, exc] <evaluate E1> ) 1840 [tb, val, exc, exc, E1] COMPARE_OP EXC_MATCH ) only if E1 1841 [tb, val, exc, 1-or-0] POP_JUMP_IF_FALSE L2 ) 1842 [tb, val, exc] POP 1843 [tb, val] <assign to V1> (or POP if no V1) 1844 [tb] POP 1845 [] <code for S1> 1846 JUMP_FORWARD L0 1847 1848 [tb, val, exc] L2: DUP 1826 1849 .............................etc....................... 1827 1850 1828 [tb, val, exc, 0] Ln+1: POP 1829 [tb, val, exc] END_FINALLY # re-raise exception 1830 1831 [] L0: <next statement> 1832 1851 [tb, val, exc] Ln+1: END_FINALLY # re-raise exception 1852 1853 [] L0: <next statement> 1854 1833 1855 Of course, parts are not generated if Vi or Ei is not present. 1834 1856 */ … … 1836 1858 compiler_try_except(struct compiler *c, stmt_ty s) 1837 1859 { 1838 basicblock *body, *orelse, *except, *end; 1839 int i, n; 1840 1841 body = compiler_new_block(c); 1842 except = compiler_new_block(c); 1843 orelse = compiler_new_block(c); 1844 end = compiler_new_block(c); 1845 if (body == NULL || except == NULL || orelse == NULL || end == NULL) 1846 return 0; 1847 ADDOP_JREL(c, SETUP_EXCEPT, except); 1848 compiler_use_next_block(c, body); 1849 if (!compiler_push_fblock(c, EXCEPT, body)) 1850 return 0; 1851 VISIT_SEQ(c, stmt, s->v.TryExcept.body); 1852 ADDOP(c, POP_BLOCK); 1853 compiler_pop_fblock(c, EXCEPT, body); 1854 ADDOP_JREL(c, JUMP_FORWARD, orelse); 1855 n = asdl_seq_LEN(s->v.TryExcept.handlers); 1856 compiler_use_next_block(c, except); 1857 for (i = 0; i < n; i++) { 1858 excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET( 1859 s->v.TryExcept.handlers, i); 1860 if (!handler->v.ExceptHandler.type && i < n-1) 1861 return compiler_error(c, "default 'except:' must be last"); 1862 c->u->u_lineno_set = false; 1863 c->u->u_lineno = handler->lineno; 1864 except = compiler_new_block(c); 1865 if (except == NULL) 1866 return 0; 1867 if (handler->v.ExceptHandler.type) { 1868 ADDOP(c, DUP_TOP); 1869 VISIT(c, expr, handler->v.ExceptHandler.type); 1870 ADDOP_I(c, COMPARE_OP, PyCmp_EXC_MATCH); 1871 ADDOP_JREL(c, JUMP_IF_FALSE, except); 1872 ADDOP(c, POP_TOP); 1873 } 1874 ADDOP(c, POP_TOP); 1875 if (handler->v.ExceptHandler.name) { 1876 VISIT(c, expr, handler->v.ExceptHandler.name); 1877 } 1878 else { 1879 ADDOP(c, POP_TOP); 1880 } 1881 ADDOP(c, POP_TOP); 1882 VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body); 1883 ADDOP_JREL(c, JUMP_FORWARD, end); 1884 compiler_use_next_block(c, except); 1885 if (handler->v.ExceptHandler.type) 1886 ADDOP(c, POP_TOP); 1887 } 1888 ADDOP(c, END_FINALLY); 1889 compiler_use_next_block(c, orelse); 1890 VISIT_SEQ(c, stmt, s->v.TryExcept.orelse); 1891 compiler_use_next_block(c, end); 1892 return 1; 1860 basicblock *body, *orelse, *except, *end; 1861 int i, n; 1862 1863 body = compiler_new_block(c); 1864 except = compiler_new_block(c); 1865 orelse = compiler_new_block(c); 1866 end = compiler_new_block(c); 1867 if (body == NULL || except == NULL || orelse == NULL || end == NULL) 1868 return 0; 1869 ADDOP_JREL(c, SETUP_EXCEPT, except); 1870 compiler_use_next_block(c, body); 1871 if (!compiler_push_fblock(c, EXCEPT, body)) 1872 return 0; 1873 VISIT_SEQ(c, stmt, s->v.TryExcept.body); 1874 ADDOP(c, POP_BLOCK); 1875 compiler_pop_fblock(c, EXCEPT, body); 1876 ADDOP_JREL(c, JUMP_FORWARD, orelse); 1877 n = asdl_seq_LEN(s->v.TryExcept.handlers); 1878 compiler_use_next_block(c, except); 1879 for (i = 0; i < n; i++) { 1880 excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET( 1881 s->v.TryExcept.handlers, i); 1882 if (!handler->v.ExceptHandler.type && i < n-1) 1883 return compiler_error(c, "default 'except:' must be last"); 1884 c->u->u_lineno_set = false; 1885 c->u->u_lineno = handler->lineno; 1886 except = compiler_new_block(c); 1887 if (except == NULL) 1888 return 0; 1889 if (handler->v.ExceptHandler.type) { 1890 ADDOP(c, DUP_TOP); 1891 VISIT(c, expr, handler->v.ExceptHandler.type); 1892 ADDOP_I(c, COMPARE_OP, PyCmp_EXC_MATCH); 1893 ADDOP_JABS(c, POP_JUMP_IF_FALSE, except); 1894 } 1895 ADDOP(c, POP_TOP); 1896 if (handler->v.ExceptHandler.name) { 1897 VISIT(c, expr, handler->v.ExceptHandler.name); 1898 } 1899 else { 1900 ADDOP(c, POP_TOP); 1901 } 1902 ADDOP(c, POP_TOP); 1903 VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body); 1904 ADDOP_JREL(c, JUMP_FORWARD, end); 1905 compiler_use_next_block(c, except); 1906 } 1907 ADDOP(c, END_FINALLY); 1908 compiler_use_next_block(c, orelse); 1909 VISIT_SEQ(c, stmt, s->v.TryExcept.orelse); 1910 compiler_use_next_block(c, end); 1911 return 1; 1893 1912 } 1894 1913 … … 1896 1915 compiler_import_as(struct compiler *c, identifier name, identifier asname) 1897 1916 { 1898 1899 1900 1901 If there is a dot in name, we need to split it and emit a 1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 attr = PyString_FromStringAndSize(src, 1914 1915 1916 1917 1918 1919 1920 1921 1922 1917 /* The IMPORT_NAME opcode was already generated. This function 1918 merely needs to bind the result to a name. 1919 1920 If there is a dot in name, we need to split it and emit a 1921 LOAD_ATTR for each name. 1922 */ 1923 const char *src = PyString_AS_STRING(name); 1924 const char *dot = strchr(src, '.'); 1925 if (dot) { 1926 /* Consume the base module name to get the first attribute */ 1927 src = dot + 1; 1928 while (dot) { 1929 /* NB src is only defined when dot != NULL */ 1930 PyObject *attr; 1931 dot = strchr(src, '.'); 1932 attr = PyString_FromStringAndSize(src, 1933 dot ? dot - src : strlen(src)); 1934 if (!attr) 1935 return -1; 1936 ADDOP_O(c, LOAD_ATTR, attr, names); 1937 Py_DECREF(attr); 1938 src = dot + 1; 1939 } 1940 } 1941 return compiler_nameop(c, asname, Store); 1923 1942 } 1924 1943 … … 1926 1945 compiler_import(struct compiler *c, stmt_ty s) 1927 1946 { 1928 1929 1930 1931 1932 module names. 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 tmp = PyString_FromStringAndSize(base, 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1947 /* The Import node stores a module name like a.b.c as a single 1948 string. This is convenient for all cases except 1949 import a.b.c as d 1950 where we need to parse that string to extract the individual 1951 module names. 1952 XXX Perhaps change the representation to make this case simpler? 1953 */ 1954 int i, n = asdl_seq_LEN(s->v.Import.names); 1955 1956 for (i = 0; i < n; i++) { 1957 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.Import.names, i); 1958 int r; 1959 PyObject *level; 1960 1961 if (c->c_flags && (c->c_flags->cf_flags & CO_FUTURE_ABSOLUTE_IMPORT)) 1962 level = PyInt_FromLong(0); 1963 else 1964 level = PyInt_FromLong(-1); 1965 1966 if (level == NULL) 1967 return 0; 1968 1969 ADDOP_O(c, LOAD_CONST, level, consts); 1970 Py_DECREF(level); 1971 ADDOP_O(c, LOAD_CONST, Py_None, consts); 1972 ADDOP_NAME(c, IMPORT_NAME, alias->name, names); 1973 1974 if (alias->asname) { 1975 r = compiler_import_as(c, alias->name, alias->asname); 1976 if (!r) 1977 return r; 1978 } 1979 else { 1980 identifier tmp = alias->name; 1981 const char *base = PyString_AS_STRING(alias->name); 1982 char *dot = strchr(base, '.'); 1983 if (dot) 1984 tmp = PyString_FromStringAndSize(base, 1985 dot - base); 1986 r = compiler_nameop(c, tmp, Store); 1987 if (dot) { 1988 Py_DECREF(tmp); 1989 } 1990 if (!r) 1991 return r; 1992 } 1993 } 1994 return 1; 1976 1995 } 1977 1996 … … 1979 1998 compiler_from_import(struct compiler *c, stmt_ty s) 1980 1999 { 1981 int i, n = asdl_seq_LEN(s->v.ImportFrom.names); 1982 1983 PyObject *names = PyTuple_New(n); 1984 PyObject *level; 1985 1986 if (!names) 1987 return 0; 1988 1989 if (s->v.ImportFrom.level == 0 && c->c_flags && 1990 !(c->c_flags->cf_flags & CO_FUTURE_ABSOLUTE_IMPORT)) 1991 level = PyInt_FromLong(-1); 1992 else 1993 level = PyInt_FromLong(s->v.ImportFrom.level); 1994 1995 if (!level) { 1996 Py_DECREF(names); 1997 return 0; 1998 } 1999 2000 /* build up the names */ 2001 for (i = 0; i < n; i++) { 2002 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i); 2003 Py_INCREF(alias->name); 2004 PyTuple_SET_ITEM(names, i, alias->name); 2005 } 2006 2007 if (s->lineno > c->c_future->ff_lineno) { 2008 if (!strcmp(PyString_AS_STRING(s->v.ImportFrom.module), 2009 "__future__")) { 2010 Py_DECREF(level); 2011 Py_DECREF(names); 2012 return compiler_error(c, 2013 "from __future__ imports must occur " 2014 "at the beginning of the file"); 2015 2016 } 2017 } 2018 2019 ADDOP_O(c, LOAD_CONST, level, consts); 2020 Py_DECREF(level); 2021 ADDOP_O(c, LOAD_CONST, names, consts); 2022 Py_DECREF(names); 2023 ADDOP_NAME(c, IMPORT_NAME, s->v.ImportFrom.module, names); 2024 for (i = 0; i < n; i++) { 2025 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i); 2026 identifier store_name; 2027 2028 if (i == 0 && *PyString_AS_STRING(alias->name) == '*') { 2029 assert(n == 1); 2030 ADDOP(c, IMPORT_STAR); 2031 return 1; 2032 } 2033 2034 ADDOP_NAME(c, IMPORT_FROM, alias->name, names); 2035 store_name = alias->name; 2036 if (alias->asname) 2037 store_name = alias->asname; 2038 2039 if (!compiler_nameop(c, store_name, Store)) { 2040 Py_DECREF(names); 2041 return 0; 2042 } 2043 } 2044 /* remove imported module */ 2045 ADDOP(c, POP_TOP); 2046 return 1; 2000 int i, n = asdl_seq_LEN(s->v.ImportFrom.names); 2001 2002 PyObject *names = PyTuple_New(n); 2003 PyObject *level; 2004 static PyObject *empty_string; 2005 2006 if (!empty_string) { 2007 empty_string = PyString_FromString(""); 2008 if (!empty_string) 2009 return 0; 2010 } 2011 2012 if (!names) 2013 return 0; 2014 2015 if (s->v.ImportFrom.level == 0 && c->c_flags && 2016 !(c->c_flags->cf_flags & CO_FUTURE_ABSOLUTE_IMPORT)) 2017 level = PyInt_FromLong(-1); 2018 else 2019 level = PyInt_FromLong(s->v.ImportFrom.level); 2020 2021 if (!level) { 2022 Py_DECREF(names); 2023 return 0; 2024 } 2025 2026 /* build up the names */ 2027 for (i = 0; i < n; i++) { 2028 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i); 2029 Py_INCREF(alias->name); 2030 PyTuple_SET_ITEM(names, i, alias->name); 2031 } 2032 2033 if (s->lineno > c->c_future->ff_lineno && s->v.ImportFrom.module && 2034 !strcmp(PyString_AS_STRING(s->v.ImportFrom.module), "__future__")) { 2035 Py_DECREF(level); 2036 Py_DECREF(names); 2037 return compiler_error(c, "from __future__ imports must occur " 2038 "at the beginning of the file"); 2039 } 2040 2041 ADDOP_O(c, LOAD_CONST, level, consts); 2042 Py_DECREF(level); 2043 ADDOP_O(c, LOAD_CONST, names, consts); 2044 Py_DECREF(names); 2045 if (s->v.ImportFrom.module) { 2046 ADDOP_NAME(c, IMPORT_NAME, s->v.ImportFrom.module, names); 2047 } 2048 else { 2049 ADDOP_NAME(c, IMPORT_NAME, empty_string, names); 2050 } 2051 for (i = 0; i < n; i++) { 2052 alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i); 2053 identifier store_name; 2054 2055 if (i == 0 && *PyString_AS_STRING(alias->name) == '*') { 2056 assert(n == 1); 2057 ADDOP(c, IMPORT_STAR); 2058 return 1; 2059 } 2060 2061 ADDOP_NAME(c, IMPORT_FROM, alias->name, names); 2062 store_name = alias->name; 2063 if (alias->asname) 2064 store_name = alias->asname; 2065 2066 if (!compiler_nameop(c, store_name, Store)) { 2067 Py_DECREF(names); 2068 return 0; 2069 } 2070 } 2071 /* remove imported module */ 2072 ADDOP(c, POP_TOP); 2073 return 1; 2047 2074 } 2048 2075 … … 2050 2077 compiler_assert(struct compiler *c, stmt_ty s) 2051 2078 { 2052 static PyObject *assertion_error = NULL; 2053 basicblock *end; 2054 2055 if (Py_OptimizeFlag) 2056 return 1; 2057 if (assertion_error == NULL) { 2058 assertion_error = PyString_InternFromString("AssertionError"); 2059 if (assertion_error == NULL) 2060 return 0; 2061 } 2062 if (s->v.Assert.test->kind == Tuple_kind && 2063 asdl_seq_LEN(s->v.Assert.test->v.Tuple.elts) > 0) { 2064 const char* msg = 2065 "assertion is always true, perhaps remove parentheses?"; 2066 if (PyErr_WarnExplicit(PyExc_SyntaxWarning, msg, c->c_filename, 2067 c->u->u_lineno, NULL, NULL) == -1) 2068 return 0; 2069 } 2070 VISIT(c, expr, s->v.Assert.test); 2071 end = compiler_new_block(c); 2072 if (end == NULL) 2073 return 0; 2074 ADDOP_JREL(c, JUMP_IF_TRUE, end); 2075 ADDOP(c, POP_TOP); 2076 ADDOP_O(c, LOAD_GLOBAL, assertion_error, names); 2077 if (s->v.Assert.msg) { 2078 VISIT(c, expr, s->v.Assert.msg); 2079 ADDOP_I(c, RAISE_VARARGS, 2); 2080 } 2081 else { 2082 ADDOP_I(c, RAISE_VARARGS, 1); 2083 } 2084 compiler_use_next_block(c, end); 2085 ADDOP(c, POP_TOP); 2086 return 1; 2079 static PyObject *assertion_error = NULL; 2080 basicblock *end; 2081 2082 if (Py_OptimizeFlag) 2083 return 1; 2084 if (assertion_error == NULL) { 2085 assertion_error = PyString_InternFromString("AssertionError"); 2086 if (assertion_error == NULL) 2087 return 0; 2088 } 2089 if (s->v.Assert.test->kind == Tuple_kind && 2090 asdl_seq_LEN(s->v.Assert.test->v.Tuple.elts) > 0) { 2091 const char* msg = 2092 "assertion is always true, perhaps remove parentheses?"; 2093 if (PyErr_WarnExplicit(PyExc_SyntaxWarning, msg, c->c_filename, 2094 c->u->u_lineno, NULL, NULL) == -1) 2095 return 0; 2096 } 2097 VISIT(c, expr, s->v.Assert.test); 2098 end = compiler_new_block(c); 2099 if (end == NULL) 2100 return 0; 2101 ADDOP_JABS(c, POP_JUMP_IF_TRUE, end); 2102 ADDOP_O(c, LOAD_GLOBAL, assertion_error, names); 2103 if (s->v.Assert.msg) { 2104 VISIT(c, expr, s->v.Assert.msg); 2105 ADDOP_I(c, CALL_FUNCTION, 1); 2106 } 2107 ADDOP_I(c, RAISE_VARARGS, 1); 2108 compiler_use_next_block(c, end); 2109 return 1; 2087 2110 } 2088 2111 … … 2090 2113 compiler_visit_stmt(struct compiler *c, stmt_ty s) 2091 2114 { 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197 2198 2199 2200 2201 2202 2115 int i, n; 2116 2117 /* Always assign a lineno to the next instruction for a stmt. */ 2118 c->u->u_lineno = s->lineno; 2119 c->u->u_lineno_set = false; 2120 2121 switch (s->kind) { 2122 case FunctionDef_kind: 2123 return compiler_function(c, s); 2124 case ClassDef_kind: 2125 return compiler_class(c, s); 2126 case Return_kind: 2127 if (c->u->u_ste->ste_type != FunctionBlock) 2128 return compiler_error(c, "'return' outside function"); 2129 if (s->v.Return.value) { 2130 VISIT(c, expr, s->v.Return.value); 2131 } 2132 else 2133 ADDOP_O(c, LOAD_CONST, Py_None, consts); 2134 ADDOP(c, RETURN_VALUE); 2135 break; 2136 case Delete_kind: 2137 VISIT_SEQ(c, expr, s->v.Delete.targets) 2138 break; 2139 case Assign_kind: 2140 n = asdl_seq_LEN(s->v.Assign.targets); 2141 VISIT(c, expr, s->v.Assign.value); 2142 for (i = 0; i < n; i++) { 2143 if (i < n - 1) 2144 ADDOP(c, DUP_TOP); 2145 VISIT(c, expr, 2146 (expr_ty)asdl_seq_GET(s->v.Assign.targets, i)); 2147 } 2148 break; 2149 case AugAssign_kind: 2150 return compiler_augassign(c, s); 2151 case Print_kind: 2152 return compiler_print(c, s); 2153 case For_kind: 2154 return compiler_for(c, s); 2155 case While_kind: 2156 return compiler_while(c, s); 2157 case If_kind: 2158 return compiler_if(c, s); 2159 case Raise_kind: 2160 n = 0; 2161 if (s->v.Raise.type) { 2162 VISIT(c, expr, s->v.Raise.type); 2163 n++; 2164 if (s->v.Raise.inst) { 2165 VISIT(c, expr, s->v.Raise.inst); 2166 n++; 2167 if (s->v.Raise.tback) { 2168 VISIT(c, expr, s->v.Raise.tback); 2169 n++; 2170 } 2171 } 2172 } 2173 ADDOP_I(c, RAISE_VARARGS, n); 2174 break; 2175 case TryExcept_kind: 2176 return compiler_try_except(c, s); 2177 case TryFinally_kind: 2178 return compiler_try_finally(c, s); 2179 case Assert_kind: 2180 return compiler_assert(c, s); 2181 case Import_kind: 2182 return compiler_import(c, s); 2183 case ImportFrom_kind: 2184 return compiler_from_import(c, s); 2185 case Exec_kind: 2186 VISIT(c, expr, s->v.Exec.body); 2187 if (s->v.Exec.globals) { 2188 VISIT(c, expr, s->v.Exec.globals); 2189 if (s->v.Exec.locals) { 2190 VISIT(c, expr, s->v.Exec.locals); 2191 } else { 2192 ADDOP(c, DUP_TOP); 2193 } 2194 } else { 2195 ADDOP_O(c, LOAD_CONST, Py_None, consts); 2196 ADDOP(c, DUP_TOP); 2197 } 2198 ADDOP(c, EXEC_STMT); 2199 break; 2200 case Global_kind: 2201 break; 2202 case Expr_kind: 2203 if (c->c_interactive && c->c_nestlevel <= 1) { 2204 VISIT(c, expr, s->v.Expr.value); 2205 ADDOP(c, PRINT_EXPR); 2206 } 2207 else if (s->v.Expr.value->kind != Str_kind && 2208 s->v.Expr.value->kind != Num_kind) { 2209 VISIT(c, expr, s->v.Expr.value); 2210 ADDOP(c, POP_TOP); 2211 } 2212 break; 2213 case Pass_kind: 2214 break; 2215 case Break_kind: 2216 if (!compiler_in_loop(c)) 2217 return compiler_error(c, "'break' outside loop"); 2218 ADDOP(c, BREAK_LOOP); 2219 break; 2220 case Continue_kind: 2221 return compiler_continue(c); 2222 case With_kind: 2223 return compiler_with(c, s); 2224 } 2225 return 1; 2203 2226 } 2204 2227 … … 2206 2229 unaryop(unaryop_ty op) 2207 2230 { 2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218 2219 2220 2221 2231 switch (op) { 2232 case Invert: 2233 return UNARY_INVERT; 2234 case Not: 2235 return UNARY_NOT; 2236 case UAdd: 2237 return UNARY_POSITIVE; 2238 case USub: 2239 return UNARY_NEGATIVE; 2240 default: 2241 PyErr_Format(PyExc_SystemError, 2242 "unary op %d should not be possible", op); 2243 return 0; 2244 } 2222 2245 } 2223 2246 … … 2225 2248 binop(struct compiler *c, operator_ty op) 2226 2249 { 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237 2238 2239 2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2250 switch (op) { 2251 case Add: 2252 return BINARY_ADD; 2253 case Sub: 2254 return BINARY_SUBTRACT; 2255 case Mult: 2256 return BINARY_MULTIPLY; 2257 case Div: 2258 if (c->c_flags && c->c_flags->cf_flags & CO_FUTURE_DIVISION) 2259 return BINARY_TRUE_DIVIDE; 2260 else 2261 return BINARY_DIVIDE; 2262 case Mod: 2263 return BINARY_MODULO; 2264 case Pow: 2265 return BINARY_POWER; 2266 case LShift: 2267 return BINARY_LSHIFT; 2268 case RShift: 2269 return BINARY_RSHIFT; 2270 case BitOr: 2271 return BINARY_OR; 2272 case BitXor: 2273 return BINARY_XOR; 2274 case BitAnd: 2275 return BINARY_AND; 2276 case FloorDiv: 2277 return BINARY_FLOOR_DIVIDE; 2278 default: 2279 PyErr_Format(PyExc_SystemError, 2280 "binary op %d should not be possible", op); 2281 return 0; 2282 } 2260 2283 } 2261 2284 … … 2263 2286 cmpop(cmpop_ty op) 2264 2287 { 2265 2266 2267 2268 2269 2270 2271 2272 2273 2274 2275 2276 2277 2278 2279 2280 2281 2282 2283 2284 2285 2286 2287 2288 2288 switch (op) { 2289 case Eq: 2290 return PyCmp_EQ; 2291 case NotEq: 2292 return PyCmp_NE; 2293 case Lt: 2294 return PyCmp_LT; 2295 case LtE: 2296 return PyCmp_LE; 2297 case Gt: 2298 return PyCmp_GT; 2299 case GtE: 2300 return PyCmp_GE; 2301 case Is: 2302 return PyCmp_IS; 2303 case IsNot: 2304 return PyCmp_IS_NOT; 2305 case In: 2306 return PyCmp_IN; 2307 case NotIn: 2308 return PyCmp_NOT_IN; 2309 default: 2310 return PyCmp_BAD; 2311 } 2289 2312 } 2290 2313 … … 2292 2315 inplace_binop(struct compiler *c, operator_ty op) 2293 2316 { 2294 2295 2296 2297 2298 2299 2300 2301 2302 2303 2304 2305 2306 2307 2308 2309 2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2317 switch (op) { 2318 case Add: 2319 return INPLACE_ADD; 2320 case Sub: 2321 return INPLACE_SUBTRACT; 2322 case Mult: 2323 return INPLACE_MULTIPLY; 2324 case Div: 2325 if (c->c_flags && c->c_flags->cf_flags & CO_FUTURE_DIVISION) 2326 return INPLACE_TRUE_DIVIDE; 2327 else 2328 return INPLACE_DIVIDE; 2329 case Mod: 2330 return INPLACE_MODULO; 2331 case Pow: 2332 return INPLACE_POWER; 2333 case LShift: 2334 return INPLACE_LSHIFT; 2335 case RShift: 2336 return INPLACE_RSHIFT; 2337 case BitOr: 2338 return INPLACE_OR; 2339 case BitXor: 2340 return INPLACE_XOR; 2341 case BitAnd: 2342 return INPLACE_AND; 2343 case FloorDiv: 2344 return INPLACE_FLOOR_DIVIDE; 2345 default: 2346 PyErr_Format(PyExc_SystemError, 2347 "inplace binary op %d should not be possible", op); 2348 return 0; 2349 } 2327 2350 } 2328 2351 … … 2330 2353 compiler_nameop(struct compiler *c, identifier name, expr_context_ty ctx) 2331 2354 { 2332 int op, scope, arg; 2333 enum { OP_FAST, OP_GLOBAL, OP_DEREF, OP_NAME } optype; 2334 2335 PyObject *dict = c->u->u_names; 2336 PyObject *mangled; 2337 /* XXX AugStore isn't used anywhere! */ 2338 2339 /* First check for assignment to __debug__. Param? */ 2340 if ((ctx == Store || ctx == AugStore || ctx == Del) 2341 && !strcmp(PyString_AS_STRING(name), "__debug__")) { 2342 return compiler_error(c, "can not assign to __debug__"); 2343 } 2344 2345 mangled = _Py_Mangle(c->u->u_private, name); 2346 if (!mangled) 2347 return 0; 2348 2349 op = 0; 2350 optype = OP_NAME; 2351 scope = PyST_GetScope(c->u->u_ste, mangled); 2352 switch (scope) { 2353 case FREE: 2354 dict = c->u->u_freevars; 2355 optype = OP_DEREF; 2356 break; 2357 case CELL: 2358 dict = c->u->u_cellvars; 2359 optype = OP_DEREF; 2360 break; 2361 case LOCAL: 2362 if (c->u->u_ste->ste_type == FunctionBlock) 2363 optype = OP_FAST; 2364 break; 2365 case GLOBAL_IMPLICIT: 2366 if (c->u->u_ste->ste_type == FunctionBlock && 2367 !c->u->u_ste->ste_unoptimized) 2368 optype = OP_GLOBAL; 2369 break; 2370 case GLOBAL_EXPLICIT: 2371 optype = OP_GLOBAL; 2372 break; 2373 default: 2374 /* scope can be 0 */ 2375 break; 2376 } 2377 2378 /* XXX Leave assert here, but handle __doc__ and the like better */ 2379 assert(scope || PyString_AS_STRING(name)[0] == '_'); 2380 2381 switch (optype) { 2382 case OP_DEREF: 2383 switch (ctx) { 2384 case Load: op = LOAD_DEREF; break; 2385 case Store: op = STORE_DEREF; break; 2386 case AugLoad: 2387 case AugStore: 2388 break; 2389 case Del: 2390 PyErr_Format(PyExc_SyntaxError, 2391 "can not delete variable '%s' referenced " 2392 "in nested scope", 2393 PyString_AS_STRING(name)); 2394 Py_DECREF(mangled); 2395 return 0; 2396 case Param: 2397 default: 2398 PyErr_SetString(PyExc_SystemError, 2399 "param invalid for deref variable"); 2400 return 0; 2401 } 2402 break; 2403 case OP_FAST: 2404 switch (ctx) { 2405 case Load: op = LOAD_FAST; break; 2406 case Store: op = STORE_FAST; break; 2407 case Del: op = DELETE_FAST; break; 2408 case AugLoad: 2409 case AugStore: 2410 break; 2411 case Param: 2412 default: 2413 PyErr_SetString(PyExc_SystemError, 2414 "param invalid for local variable"); 2415 return 0; 2416 } 2417 ADDOP_O(c, op, mangled, varnames); 2418 Py_DECREF(mangled); 2419 return 1; 2420 case OP_GLOBAL: 2421 switch (ctx) { 2422 case Load: op = LOAD_GLOBAL; break; 2423 case Store: op = STORE_GLOBAL; break; 2424 case Del: op = DELETE_GLOBAL; break; 2425 case AugLoad: 2426 case AugStore: 2427 break; 2428 case Param: 2429 default: 2430 PyErr_SetString(PyExc_SystemError, 2431 "param invalid for global variable"); 2432 return 0; 2433 } 2434 break; 2435 case OP_NAME: 2436 switch (ctx) { 2437 case Load: op = LOAD_NAME; break; 2438 case Store: op = STORE_NAME; break; 2439 case Del: op = DELETE_NAME; break; 2440 case AugLoad: 2441 case AugStore: 2442 break; 2443 case Param: 2444 default: 2445 PyErr_SetString(PyExc_SystemError, 2446 "param invalid for name variable"); 2447 return 0; 2448 } 2449 break; 2450 } 2451 2452 assert(op); 2453 arg = compiler_add_o(c, dict, mangled); 2454 Py_DECREF(mangled); 2455 if (arg < 0) 2456 return 0; 2457 return compiler_addop_i(c, op, arg); 2355 int op, scope, arg; 2356 enum { OP_FAST, OP_GLOBAL, OP_DEREF, OP_NAME } optype; 2357 2358 PyObject *dict = c->u->u_names; 2359 PyObject *mangled; 2360 /* XXX AugStore isn't used anywhere! */ 2361 2362 mangled = _Py_Mangle(c->u->u_private, name); 2363 if (!mangled) 2364 return 0; 2365 2366 op = 0; 2367 optype = OP_NAME; 2368 scope = PyST_GetScope(c->u->u_ste, mangled); 2369 switch (scope) { 2370 case FREE: 2371 dict = c->u->u_freevars; 2372 optype = OP_DEREF; 2373 break; 2374 case CELL: 2375 dict = c->u->u_cellvars; 2376 optype = OP_DEREF; 2377 break; 2378 case LOCAL: 2379 if (c->u->u_ste->ste_type == FunctionBlock) 2380 optype = OP_FAST; 2381 break; 2382 case GLOBAL_IMPLICIT: 2383 if (c->u->u_ste->ste_type == FunctionBlock && 2384 !c->u->u_ste->ste_unoptimized) 2385 optype = OP_GLOBAL; 2386 break; 2387 case GLOBAL_EXPLICIT: 2388 optype = OP_GLOBAL; 2389 break; 2390 default: 2391 /* scope can be 0 */ 2392 break; 2393 } 2394 2395 /* XXX Leave assert here, but handle __doc__ and the like better */ 2396 assert(scope || PyString_AS_STRING(name)[0] == '_'); 2397 2398 switch (optype) { 2399 case OP_DEREF: 2400 switch (ctx) { 2401 case Load: op = LOAD_DEREF; break; 2402 case Store: op = STORE_DEREF; break; 2403 case AugLoad: 2404 case AugStore: 2405 break; 2406 case Del: 2407 PyErr_Format(PyExc_SyntaxError, 2408 "can not delete variable '%s' referenced " 2409 "in nested scope", 2410 PyString_AS_STRING(name)); 2411 Py_DECREF(mangled); 2412 return 0; 2413 case Param: 2414 default: 2415 PyErr_SetString(PyExc_SystemError, 2416 "param invalid for deref variable"); 2417 return 0; 2418 } 2419 break; 2420 case OP_FAST: 2421 switch (ctx) { 2422 case Load: op = LOAD_FAST; break; 2423 case Store: op = STORE_FAST; break; 2424 case Del: op = DELETE_FAST; break; 2425 case AugLoad: 2426 case AugStore: 2427 break; 2428 case Param: 2429 default: 2430 PyErr_SetString(PyExc_SystemError, 2431 "param invalid for local variable"); 2432 return 0; 2433 } 2434 ADDOP_O(c, op, mangled, varnames); 2435 Py_DECREF(mangled); 2436 return 1; 2437 case OP_GLOBAL: 2438 switch (ctx) { 2439 case Load: op = LOAD_GLOBAL; break; 2440 case Store: op = STORE_GLOBAL; break; 2441 case Del: op = DELETE_GLOBAL; break; 2442 case AugLoad: 2443 case AugStore: 2444 break; 2445 case Param: 2446 default: 2447 PyErr_SetString(PyExc_SystemError, 2448 "param invalid for global variable"); 2449 return 0; 2450 } 2451 break; 2452 case OP_NAME: 2453 switch (ctx) { 2454 case Load: op = LOAD_NAME; break; 2455 case Store: op = STORE_NAME; break; 2456 case Del: op = DELETE_NAME; break; 2457 case AugLoad: 2458 case AugStore: 2459 break; 2460 case Param: 2461 default: 2462 PyErr_SetString(PyExc_SystemError, 2463 "param invalid for name variable"); 2464 return 0; 2465 } 2466 break; 2467 } 2468 2469 assert(op); 2470 arg = compiler_add_o(c, dict, mangled); 2471 Py_DECREF(mangled); 2472 if (arg < 0) 2473 return 0; 2474 return compiler_addop_i(c, op, arg); 2458 2475 } 2459 2476 … … 2461 2478 compiler_boolop(struct compiler *c, expr_ty e) 2462 2479 { 2463 basicblock *end; 2464 int jumpi, i, n; 2465 asdl_seq *s; 2466 2467 assert(e->kind == BoolOp_kind); 2468 if (e->v.BoolOp.op == And) 2469 jumpi = JUMP_IF_FALSE; 2470 else 2471 jumpi = JUMP_IF_TRUE; 2472 end = compiler_new_block(c); 2473 if (end == NULL) 2474 return 0; 2475 s = e->v.BoolOp.values; 2476 n = asdl_seq_LEN(s) - 1; 2477 assert(n >= 0); 2478 for (i = 0; i < n; ++i) { 2479 VISIT(c, expr, (expr_ty)asdl_seq_GET(s, i)); 2480 ADDOP_JREL(c, jumpi, end); 2481 ADDOP(c, POP_TOP) 2482 } 2483 VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n)); 2484 compiler_use_next_block(c, end); 2485 return 1; 2480 basicblock *end; 2481 int jumpi, i, n; 2482 asdl_seq *s; 2483 2484 assert(e->kind == BoolOp_kind); 2485 if (e->v.BoolOp.op == And) 2486 jumpi = JUMP_IF_FALSE_OR_POP; 2487 else 2488 jumpi = JUMP_IF_TRUE_OR_POP; 2489 end = compiler_new_block(c); 2490 if (end == NULL) 2491 return 0; 2492 s = e->v.BoolOp.values; 2493 n = asdl_seq_LEN(s) - 1; 2494 assert(n >= 0); 2495 for (i = 0; i < n; ++i) { 2496 VISIT(c, expr, (expr_ty)asdl_seq_GET(s, i)); 2497 ADDOP_JABS(c, jumpi, end); 2498 } 2499 VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n)); 2500 compiler_use_next_block(c, end); 2501 return 1; 2486 2502 } 2487 2503 … … 2489 2505 compiler_list(struct compiler *c, expr_ty e) 2490 2506 { 2491 2492 2493 2494 2495 2496 2497 2498 2499 2507 int n = asdl_seq_LEN(e->v.List.elts); 2508 if (e->v.List.ctx == Store) { 2509 ADDOP_I(c, UNPACK_SEQUENCE, n); 2510 } 2511 VISIT_SEQ(c, expr, e->v.List.elts); 2512 if (e->v.List.ctx == Load) { 2513 ADDOP_I(c, BUILD_LIST, n); 2514 } 2515 return 1; 2500 2516 } 2501 2517 … … 2503 2519 compiler_tuple(struct compiler *c, expr_ty e) 2504 2520 { 2505 2506 2507 2508 2509 2510 2511 2512 2513 2521 int n = asdl_seq_LEN(e->v.Tuple.elts); 2522 if (e->v.Tuple.ctx == Store) { 2523 ADDOP_I(c, UNPACK_SEQUENCE, n); 2524 } 2525 VISIT_SEQ(c, expr, e->v.Tuple.elts); 2526 if (e->v.Tuple.ctx == Load) { 2527 ADDOP_I(c, BUILD_TUPLE, n); 2528 } 2529 return 1; 2514 2530 } 2515 2531 … … 2517 2533 compiler_compare(struct compiler *c, expr_ty e) 2518 2534 { 2519 int i, n; 2520 basicblock *cleanup = NULL; 2521 2522 /* XXX the logic can be cleaned up for 1 or multiple comparisons */ 2523 VISIT(c, expr, e->v.Compare.left); 2524 n = asdl_seq_LEN(e->v.Compare.ops); 2525 assert(n > 0); 2526 if (n > 1) { 2527 cleanup = compiler_new_block(c); 2528 if (cleanup == NULL) 2529 return 0; 2530 VISIT(c, expr, 2531 (expr_ty)asdl_seq_GET(e->v.Compare.comparators, 0)); 2532 } 2533 for (i = 1; i < n; i++) { 2534 ADDOP(c, DUP_TOP); 2535 ADDOP(c, ROT_THREE); 2536 ADDOP_I(c, COMPARE_OP, 2537 cmpop((cmpop_ty)(asdl_seq_GET( 2538 e->v.Compare.ops, i - 1)))); 2539 ADDOP_JREL(c, JUMP_IF_FALSE, cleanup); 2540 NEXT_BLOCK(c); 2541 ADDOP(c, POP_TOP); 2542 if (i < (n - 1)) 2543 VISIT(c, expr, 2544 (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i)); 2545 } 2546 VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n - 1)); 2547 ADDOP_I(c, COMPARE_OP, 2548 cmpop((cmpop_ty)(asdl_seq_GET(e->v.Compare.ops, n - 1)))); 2549 if (n > 1) { 2550 basicblock *end = compiler_new_block(c); 2551 if (end == NULL) 2552 return 0; 2553 ADDOP_JREL(c, JUMP_FORWARD, end); 2554 compiler_use_next_block(c, cleanup); 2555 ADDOP(c, ROT_TWO); 2556 ADDOP(c, POP_TOP); 2557 compiler_use_next_block(c, end); 2558 } 2559 return 1; 2535 int i, n; 2536 basicblock *cleanup = NULL; 2537 2538 /* XXX the logic can be cleaned up for 1 or multiple comparisons */ 2539 VISIT(c, expr, e->v.Compare.left); 2540 n = asdl_seq_LEN(e->v.Compare.ops); 2541 assert(n > 0); 2542 if (n > 1) { 2543 cleanup = compiler_new_block(c); 2544 if (cleanup == NULL) 2545 return 0; 2546 VISIT(c, expr, 2547 (expr_ty)asdl_seq_GET(e->v.Compare.comparators, 0)); 2548 } 2549 for (i = 1; i < n; i++) { 2550 ADDOP(c, DUP_TOP); 2551 ADDOP(c, ROT_THREE); 2552 ADDOP_I(c, COMPARE_OP, 2553 cmpop((cmpop_ty)(asdl_seq_GET( 2554 e->v.Compare.ops, i - 1)))); 2555 ADDOP_JABS(c, JUMP_IF_FALSE_OR_POP, cleanup); 2556 NEXT_BLOCK(c); 2557 if (i < (n - 1)) 2558 VISIT(c, expr, 2559 (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i)); 2560 } 2561 VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n - 1)); 2562 ADDOP_I(c, COMPARE_OP, 2563 cmpop((cmpop_ty)(asdl_seq_GET(e->v.Compare.ops, n - 1)))); 2564 if (n > 1) { 2565 basicblock *end = compiler_new_block(c); 2566 if (end == NULL) 2567 return 0; 2568 ADDOP_JREL(c, JUMP_FORWARD, end); 2569 compiler_use_next_block(c, cleanup); 2570 ADDOP(c, ROT_TWO); 2571 ADDOP(c, POP_TOP); 2572 compiler_use_next_block(c, end); 2573 } 2574 return 1; 2560 2575 } 2561 2576 … … 2563 2578 compiler_call(struct compiler *c, expr_ty e) 2564 2579 { 2565 int n, code = 0; 2566 2567 VISIT(c, expr, e->v.Call.func); 2568 n = asdl_seq_LEN(e->v.Call.args); 2569 VISIT_SEQ(c, expr, e->v.Call.args); 2570 if (e->v.Call.keywords) { 2571 VISIT_SEQ(c, keyword, e->v.Call.keywords); 2572 n |= asdl_seq_LEN(e->v.Call.keywords) << 8; 2573 } 2574 if (e->v.Call.starargs) { 2575 VISIT(c, expr, e->v.Call.starargs); 2576 code |= 1; 2577 } 2578 if (e->v.Call.kwargs) { 2579 VISIT(c, expr, e->v.Call.kwargs); 2580 code |= 2; 2581 } 2582 switch (code) { 2583 case 0: 2584 ADDOP_I(c, CALL_FUNCTION, n); 2585 break; 2586 case 1: 2587 ADDOP_I(c, CALL_FUNCTION_VAR, n); 2588 break; 2589 case 2: 2590 ADDOP_I(c, CALL_FUNCTION_KW, n); 2591 break; 2592 case 3: 2593 ADDOP_I(c, CALL_FUNCTION_VAR_KW, n); 2594 break; 2595 } 2596 return 1; 2597 } 2598 2599 static int 2600 compiler_listcomp_generator(struct compiler *c, PyObject *tmpname, 2601 asdl_seq *generators, int gen_index, 2602 expr_ty elt) 2603 { 2604 /* generate code for the iterator, then each of the ifs, 2605 and then write to the element */ 2606 2607 comprehension_ty l; 2608 basicblock *start, *anchor, *skip, *if_cleanup; 2609 int i, n; 2610 2611 start = compiler_new_block(c); 2612 skip = compiler_new_block(c); 2613 if_cleanup = compiler_new_block(c); 2614 anchor = compiler_new_block(c); 2615 2616 if (start == NULL || skip == NULL || if_cleanup == NULL || 2617 anchor == NULL) 2618 return 0; 2619 2620 l = (comprehension_ty)asdl_seq_GET(generators, gen_index); 2621 VISIT(c, expr, l->iter); 2622 ADDOP(c, GET_ITER); 2623 compiler_use_next_block(c, start); 2624 ADDOP_JREL(c, FOR_ITER, anchor); 2625 NEXT_BLOCK(c); 2626 VISIT(c, expr, l->target); 2627 2628 /* XXX this needs to be cleaned up...a lot! */ 2629 n = asdl_seq_LEN(l->ifs); 2630 for (i = 0; i < n; i++) { 2631 expr_ty e = (expr_ty)asdl_seq_GET(l->ifs, i); 2632 VISIT(c, expr, e); 2633 ADDOP_JREL(c, JUMP_IF_FALSE, if_cleanup); 2634 NEXT_BLOCK(c); 2635 ADDOP(c, POP_TOP); 2636 } 2637 2638 if (++gen_index < asdl_seq_LEN(generators)) 2639 if (!compiler_listcomp_generator(c, tmpname, 2640 generators, gen_index, elt)) 2641 return 0; 2642 2643 /* only append after the last for generator */ 2644 if (gen_index >= asdl_seq_LEN(generators)) { 2645 if (!compiler_nameop(c, tmpname, Load)) 2646 return 0; 2647 VISIT(c, expr, elt); 2648 ADDOP(c, LIST_APPEND); 2649 2650 compiler_use_next_block(c, skip); 2651 } 2652 for (i = 0; i < n; i++) { 2653 ADDOP_I(c, JUMP_FORWARD, 1); 2654 if (i == 0) 2655 compiler_use_next_block(c, if_cleanup); 2656 ADDOP(c, POP_TOP); 2657 } 2658 ADDOP_JABS(c, JUMP_ABSOLUTE, start); 2659 compiler_use_next_block(c, anchor); 2660 /* delete the temporary list name added to locals */ 2661 if (gen_index == 1) 2662 if (!compiler_nameop(c, tmpname, Del)) 2663 return 0; 2664 2665 return 1; 2580 int n, code = 0; 2581 2582 VISIT(c, expr, e->v.Call.func); 2583 n = asdl_seq_LEN(e->v.Call.args); 2584 VISIT_SEQ(c, expr, e->v.Call.args); 2585 if (e->v.Call.keywords) { 2586 VISIT_SEQ(c, keyword, e->v.Call.keywords); 2587 n |= asdl_seq_LEN(e->v.Call.keywords) << 8; 2588 } 2589 if (e->v.Call.starargs) { 2590 VISIT(c, expr, e->v.Call.starargs); 2591 code |= 1; 2592 } 2593 if (e->v.Call.kwargs) { 2594 VISIT(c, expr, e->v.Call.kwargs); 2595 code |= 2; 2596 } 2597 switch (code) { 2598 case 0: 2599 ADDOP_I(c, CALL_FUNCTION, n); 2600 break; 2601 case 1: 2602 ADDOP_I(c, CALL_FUNCTION_VAR, n); 2603 break; 2604 case 2: 2605 ADDOP_I(c, CALL_FUNCTION_KW, n); 2606 break; 2607 case 3: 2608 ADDOP_I(c, CALL_FUNCTION_VAR_KW, n); 2609 break; 2610 } 2611 return 1; 2612 } 2613 2614 static int 2615 compiler_listcomp_generator(struct compiler *c, asdl_seq *generators, 2616 int gen_index, expr_ty elt) 2617 { 2618 /* generate code for the iterator, then each of the ifs, 2619 and then write to the element */ 2620 2621 comprehension_ty l; 2622 basicblock *start, *anchor, *skip, *if_cleanup; 2623 int i, n; 2624 2625 start = compiler_new_block(c); 2626 skip = compiler_new_block(c); 2627 if_cleanup = compiler_new_block(c); 2628 anchor = compiler_new_block(c); 2629 2630 if (start == NULL || skip == NULL || if_cleanup == NULL || 2631 anchor == NULL) 2632 return 0; 2633 2634 l = (comprehension_ty)asdl_seq_GET(generators, gen_index); 2635 VISIT(c, expr, l->iter); 2636 ADDOP(c, GET_ITER); 2637 compiler_use_next_block(c, start); 2638 ADDOP_JREL(c, FOR_ITER, anchor); 2639 NEXT_BLOCK(c); 2640 VISIT(c, expr, l->target); 2641 2642 /* XXX this needs to be cleaned up...a lot! */ 2643 n = asdl_seq_LEN(l->ifs); 2644 for (i = 0; i < n; i++) { 2645 expr_ty e = (expr_ty)asdl_seq_GET(l->ifs, i); 2646 VISIT(c, expr, e); 2647 ADDOP_JABS(c, POP_JUMP_IF_FALSE, if_cleanup); 2648 NEXT_BLOCK(c); 2649 } 2650 2651 if (++gen_index < asdl_seq_LEN(generators)) 2652 if (!compiler_listcomp_generator(c, generators, gen_index, elt)) 2653 return 0; 2654 2655 /* only append after the last for generator */ 2656 if (gen_index >= asdl_seq_LEN(generators)) { 2657 VISIT(c, expr, elt); 2658 ADDOP_I(c, LIST_APPEND, gen_index+1); 2659 2660 compiler_use_next_block(c, skip); 2661 } 2662 compiler_use_next_block(c, if_cleanup); 2663 ADDOP_JABS(c, JUMP_ABSOLUTE, start); 2664 compiler_use_next_block(c, anchor); 2665 2666 return 1; 2666 2667 } 2667 2668 … … 2669 2670 compiler_listcomp(struct compiler *c, expr_ty e) 2670 2671 { 2671 identifier tmp; 2672 int rc = 0; 2673 asdl_seq *generators = e->v.ListComp.generators; 2674 2675 assert(e->kind == ListComp_kind); 2676 tmp = compiler_new_tmpname(c); 2677 if (!tmp) 2678 return 0; 2679 ADDOP_I(c, BUILD_LIST, 0); 2680 ADDOP(c, DUP_TOP); 2681 if (compiler_nameop(c, tmp, Store)) 2682 rc = compiler_listcomp_generator(c, tmp, generators, 0, 2683 e->v.ListComp.elt); 2684 Py_DECREF(tmp); 2685 return rc; 2686 } 2687 2688 static int 2689 compiler_genexp_generator(struct compiler *c, 2690 asdl_seq *generators, int gen_index, 2691 expr_ty elt) 2692 { 2693 /* generate code for the iterator, then each of the ifs, 2694 and then write to the element */ 2695 2696 comprehension_ty ge; 2697 basicblock *start, *anchor, *skip, *if_cleanup, *end; 2698 int i, n; 2699 2700 start = compiler_new_block(c); 2701 skip = compiler_new_block(c); 2702 if_cleanup = compiler_new_block(c); 2703 anchor = compiler_new_block(c); 2704 end = compiler_new_block(c); 2705 2706 if (start == NULL || skip == NULL || if_cleanup == NULL || 2707 anchor == NULL || end == NULL) 2708 return 0; 2709 2710 ge = (comprehension_ty)asdl_seq_GET(generators, gen_index); 2711 ADDOP_JREL(c, SETUP_LOOP, end); 2712 if (!compiler_push_fblock(c, LOOP, start)) 2713 return 0; 2714 2715 if (gen_index == 0) { 2716 /* Receive outermost iter as an implicit argument */ 2717 c->u->u_argcount = 1; 2718 ADDOP_I(c, LOAD_FAST, 0); 2719 } 2720 else { 2721 /* Sub-iter - calculate on the fly */ 2722 VISIT(c, expr, ge->iter); 2723 ADDOP(c, GET_ITER); 2724 } 2725 compiler_use_next_block(c, start); 2726 ADDOP_JREL(c, FOR_ITER, anchor); 2727 NEXT_BLOCK(c); 2728 VISIT(c, expr, ge->target); 2729 2730 /* XXX this needs to be cleaned up...a lot! */ 2731 n = asdl_seq_LEN(ge->ifs); 2732 for (i = 0; i < n; i++) { 2733 expr_ty e = (expr_ty)asdl_seq_GET(ge->ifs, i); 2734 VISIT(c, expr, e); 2735 ADDOP_JREL(c, JUMP_IF_FALSE, if_cleanup); 2736 NEXT_BLOCK(c); 2737 ADDOP(c, POP_TOP); 2738 } 2739 2740 if (++gen_index < asdl_seq_LEN(generators)) 2741 if (!compiler_genexp_generator(c, generators, gen_index, elt)) 2742 return 0; 2743 2744 /* only append after the last 'for' generator */ 2745 if (gen_index >= asdl_seq_LEN(generators)) { 2746 VISIT(c, expr, elt); 2747 ADDOP(c, YIELD_VALUE); 2748 ADDOP(c, POP_TOP); 2749 2750 compiler_use_next_block(c, skip); 2751 } 2752 for (i = 0; i < n; i++) { 2753 ADDOP_I(c, JUMP_FORWARD, 1); 2754 if (i == 0) 2755 compiler_use_next_block(c, if_cleanup); 2756 2757 ADDOP(c, POP_TOP); 2758 } 2759 ADDOP_JABS(c, JUMP_ABSOLUTE, start); 2760 compiler_use_next_block(c, anchor); 2761 ADDOP(c, POP_BLOCK); 2762 compiler_pop_fblock(c, LOOP, start); 2763 compiler_use_next_block(c, end); 2764 2765 return 1; 2672 assert(e->kind == ListComp_kind); 2673 ADDOP_I(c, BUILD_LIST, 0); 2674 return compiler_listcomp_generator(c, e->v.ListComp.generators, 0, 2675 e->v.ListComp.elt); 2676 } 2677 2678 /* Dict and set comprehensions and generator expressions work by creating a 2679 nested function to perform the actual iteration. This means that the 2680 iteration variables don't leak into the current scope. 2681 The defined function is called immediately following its definition, with the 2682 result of that call being the result of the expression. 2683 The LC/SC version returns the populated container, while the GE version is 2684 flagged in symtable.c as a generator, so it returns the generator object 2685 when the function is called. 2686 This code *knows* that the loop cannot contain break, continue, or return, 2687 so it cheats and skips the SETUP_LOOP/POP_BLOCK steps used in normal loops. 2688 2689 Possible cleanups: 2690 - iterate over the generator sequence instead of using recursion 2691 */ 2692 2693 static int 2694 compiler_comprehension_generator(struct compiler *c, 2695 asdl_seq *generators, int gen_index, 2696 expr_ty elt, expr_ty val, int type) 2697 { 2698 /* generate code for the iterator, then each of the ifs, 2699 and then write to the element */ 2700 2701 comprehension_ty gen; 2702 basicblock *start, *anchor, *skip, *if_cleanup; 2703 int i, n; 2704 2705 start = compiler_new_block(c); 2706 skip = compiler_new_block(c); 2707 if_cleanup = compiler_new_block(c); 2708 anchor = compiler_new_block(c); 2709 2710 if (start == NULL || skip == NULL || if_cleanup == NULL || 2711 anchor == NULL) 2712 return 0; 2713 2714 gen = (comprehension_ty)asdl_seq_GET(generators, gen_index); 2715 2716 if (gen_index == 0) { 2717 /* Receive outermost iter as an implicit argument */ 2718 c->u->u_argcount = 1; 2719 ADDOP_I(c, LOAD_FAST, 0); 2720 } 2721 else { 2722 /* Sub-iter - calculate on the fly */ 2723 VISIT(c, expr, gen->iter); 2724 ADDOP(c, GET_ITER); 2725 } 2726 compiler_use_next_block(c, start); 2727 ADDOP_JREL(c, FOR_ITER, anchor); 2728 NEXT_BLOCK(c); 2729 VISIT(c, expr, gen->target); 2730 2731 /* XXX this needs to be cleaned up...a lot! */ 2732 n = asdl_seq_LEN(gen->ifs); 2733 for (i = 0; i < n; i++) { 2734 expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i); 2735 VISIT(c, expr, e); 2736 ADDOP_JABS(c, POP_JUMP_IF_FALSE, if_cleanup); 2737 NEXT_BLOCK(c); 2738 } 2739 2740 if (++gen_index < asdl_seq_LEN(generators)) 2741 if (!compiler_comprehension_generator(c, 2742 generators, gen_index, 2743 elt, val, type)) 2744 return 0; 2745 2746 /* only append after the last for generator */ 2747 if (gen_index >= asdl_seq_LEN(generators)) { 2748 /* comprehension specific code */ 2749 switch (type) { 2750 case COMP_GENEXP: 2751 VISIT(c, expr, elt); 2752 ADDOP(c, YIELD_VALUE); 2753 ADDOP(c, POP_TOP); 2754 break; 2755 case COMP_SETCOMP: 2756 VISIT(c, expr, elt); 2757 ADDOP_I(c, SET_ADD, gen_index + 1); 2758 break; 2759 case COMP_DICTCOMP: 2760 /* With 'd[k] = v', v is evaluated before k, so we do 2761 the same. */ 2762 VISIT(c, expr, val); 2763 VISIT(c, expr, elt); 2764 ADDOP_I(c, MAP_ADD, gen_index + 1); 2765 break; 2766 default: 2767 return 0; 2768 } 2769 2770 compiler_use_next_block(c, skip); 2771 } 2772 compiler_use_next_block(c, if_cleanup); 2773 ADDOP_JABS(c, JUMP_ABSOLUTE, start); 2774 compiler_use_next_block(c, anchor); 2775 2776 return 1; 2777 } 2778 2779 static int 2780 compiler_comprehension(struct compiler *c, expr_ty e, int type, identifier name, 2781 asdl_seq *generators, expr_ty elt, expr_ty val) 2782 { 2783 PyCodeObject *co = NULL; 2784 expr_ty outermost_iter; 2785 2786 outermost_iter = ((comprehension_ty) 2787 asdl_seq_GET(generators, 0))->iter; 2788 2789 if (!compiler_enter_scope(c, name, (void *)e, e->lineno)) 2790 goto error; 2791 2792 if (type != COMP_GENEXP) { 2793 int op; 2794 switch (type) { 2795 case COMP_SETCOMP: 2796 op = BUILD_SET; 2797 break; 2798 case COMP_DICTCOMP: 2799 op = BUILD_MAP; 2800 break; 2801 default: 2802 PyErr_Format(PyExc_SystemError, 2803 "unknown comprehension type %d", type); 2804 goto error_in_scope; 2805 } 2806 2807 ADDOP_I(c, op, 0); 2808 } 2809 2810 if (!compiler_comprehension_generator(c, generators, 0, elt, 2811 val, type)) 2812 goto error_in_scope; 2813 2814 if (type != COMP_GENEXP) { 2815 ADDOP(c, RETURN_VALUE); 2816 } 2817 2818 co = assemble(c, 1); 2819 compiler_exit_scope(c); 2820 if (co == NULL) 2821 goto error; 2822 2823 if (!compiler_make_closure(c, co, 0)) 2824 goto error; 2825 Py_DECREF(co); 2826 2827 VISIT(c, expr, outermost_iter); 2828 ADDOP(c, GET_ITER); 2829 ADDOP_I(c, CALL_FUNCTION, 1); 2830 return 1; 2831 error_in_scope: 2832 compiler_exit_scope(c); 2833 error: 2834 Py_XDECREF(co); 2835 return 0; 2766 2836 } 2767 2837 … … 2769 2839 compiler_genexp(struct compiler *c, expr_ty e) 2770 2840 { 2771 static identifier name; 2772 PyCodeObject *co; 2773 expr_ty outermost_iter = ((comprehension_ty) 2774 (asdl_seq_GET(e->v.GeneratorExp.generators, 2775 0)))->iter; 2776 2777 if (!name) { 2778 name = PyString_FromString("<genexpr>"); 2779 if (!name) 2780 return 0; 2781 } 2782 2783 if (!compiler_enter_scope(c, name, (void *)e, e->lineno)) 2784 return 0; 2785 compiler_genexp_generator(c, e->v.GeneratorExp.generators, 0, 2786 e->v.GeneratorExp.elt); 2787 co = assemble(c, 1); 2788 compiler_exit_scope(c); 2789 if (co == NULL) 2790 return 0; 2791 2792 compiler_make_closure(c, co, 0); 2793 Py_DECREF(co); 2794 2795 VISIT(c, expr, outermost_iter); 2796 ADDOP(c, GET_ITER); 2797 ADDOP_I(c, CALL_FUNCTION, 1); 2798 2799 return 1; 2841 static identifier name; 2842 if (!name) { 2843 name = PyString_FromString("<genexpr>"); 2844 if (!name) 2845 return 0; 2846 } 2847 assert(e->kind == GeneratorExp_kind); 2848 return compiler_comprehension(c, e, COMP_GENEXP, name, 2849 e->v.GeneratorExp.generators, 2850 e->v.GeneratorExp.elt, NULL); 2851 } 2852 2853 static int 2854 compiler_setcomp(struct compiler *c, expr_ty e) 2855 { 2856 static identifier name; 2857 if (!name) { 2858 name = PyString_FromString("<setcomp>"); 2859 if (!name) 2860 return 0; 2861 } 2862 assert(e->kind == SetComp_kind); 2863 return compiler_comprehension(c, e, COMP_SETCOMP, name, 2864 e->v.SetComp.generators, 2865 e->v.SetComp.elt, NULL); 2866 } 2867 2868 static int 2869 compiler_dictcomp(struct compiler *c, expr_ty e) 2870 { 2871 static identifier name; 2872 if (!name) { 2873 name = PyString_FromString("<dictcomp>"); 2874 if (!name) 2875 return 0; 2876 } 2877 assert(e->kind == DictComp_kind); 2878 return compiler_comprehension(c, e, COMP_DICTCOMP, name, 2879 e->v.DictComp.generators, 2880 e->v.DictComp.key, e->v.DictComp.value); 2800 2881 } 2801 2882 … … 2803 2884 compiler_visit_keyword(struct compiler *c, keyword_ty k) 2804 2885 { 2805 2806 2807 2808 } 2809 2810 /* Test whether expression is constant. 2886 ADDOP_O(c, LOAD_CONST, k->arg, consts); 2887 VISIT(c, expr, k->value); 2888 return 1; 2889 } 2890 2891 /* Test whether expression is constant. For constants, report 2811 2892 whether they are true or false. 2812 2893 … … 2817 2898 expr_constant(expr_ty e) 2818 2899 { 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2900 switch (e->kind) { 2901 case Num_kind: 2902 return PyObject_IsTrue(e->v.Num.n); 2903 case Str_kind: 2904 return PyObject_IsTrue(e->v.Str.s); 2905 case Name_kind: 2906 /* __debug__ is not assignable, so we can optimize 2907 * it away in if and while statements */ 2908 if (strcmp(PyString_AS_STRING(e->v.Name.id), 2909 "__debug__") == 0) 2910 return ! Py_OptimizeFlag; 2911 /* fall through */ 2912 default: 2913 return -1; 2914 } 2834 2915 } 2835 2916 … … 2837 2918 Implements the with statement from PEP 343. 2838 2919 2839 The semantics outlined in that PEP are as follows: 2920 The semantics outlined in that PEP are as follows: 2840 2921 2841 2922 with EXPR as VAR: 2842 2923 BLOCK 2843 2924 2844 2925 It is implemented roughly as: 2845 2926 2846 2927 context = EXPR 2847 2928 exit = context.__exit__ # not calling it … … 2852 2933 finally: 2853 2934 if an exception was raised: 2854 2935 exc = copy of (exception, instance, traceback) 2855 2936 else: 2856 2937 exc = (None, None, None) 2857 2938 exit(*exc) 2858 2939 */ … … 2860 2941 compiler_with(struct compiler *c, stmt_ty s) 2861 2942 { 2862 static identifier enter_attr, exit_attr;2863 2943 basicblock *block, *finally; 2864 identifier tmpvalue = NULL;2865 2944 2866 2945 assert(s->kind == With_kind); 2867 2868 if (!enter_attr) {2869 enter_attr = PyString_InternFromString("__enter__");2870 if (!enter_attr)2871 return 0;2872 }2873 if (!exit_attr) {2874 exit_attr = PyString_InternFromString("__exit__");2875 if (!exit_attr)2876 return 0;2877 }2878 2946 2879 2947 block = compiler_new_block(c); 2880 2948 finally = compiler_new_block(c); 2881 2949 if (!block || !finally) 2882 return 0; 2883 2884 if (s->v.With.optional_vars) { 2885 /* Create a temporary variable to hold context.__enter__(). 2886 We need to do this rather than preserving it on the stack 2887 because SETUP_FINALLY remembers the stack level. 2888 We need to do the assignment *inside* the try/finally 2889 so that context.__exit__() is called when the assignment 2890 fails. But we need to call context.__enter__() *before* 2891 the try/finally so that if it fails we won't call 2892 context.__exit__(). 2893 */ 2894 tmpvalue = compiler_new_tmpname(c); 2895 if (tmpvalue == NULL) 2896 return 0; 2897 PyArena_AddPyObject(c->c_arena, tmpvalue); 2898 } 2950 return 0; 2899 2951 2900 2952 /* Evaluate EXPR */ 2901 2953 VISIT(c, expr, s->v.With.context_expr); 2902 2903 /* Squirrel away context.__exit__ by stuffing it under context */ 2904 ADDOP(c, DUP_TOP); 2905 ADDOP_O(c, LOAD_ATTR, exit_attr, names); 2906 ADDOP(c, ROT_TWO); 2907 2908 /* Call context.__enter__() */ 2909 ADDOP_O(c, LOAD_ATTR, enter_attr, names); 2910 ADDOP_I(c, CALL_FUNCTION, 0); 2954 ADDOP_JREL(c, SETUP_WITH, finally); 2955 2956 /* SETUP_WITH pushes a finally block. */ 2957 compiler_use_next_block(c, block); 2958 /* Note that the block is actually called SETUP_WITH in ceval.c, but 2959 functions the same as SETUP_FINALLY except that exceptions are 2960 normalized. */ 2961 if (!compiler_push_fblock(c, FINALLY_TRY, block)) { 2962 return 0; 2963 } 2911 2964 2912 2965 if (s->v.With.optional_vars) { 2913 /* Store it in tmpvalue */ 2914 if (!compiler_nameop(c, tmpvalue, Store)) 2915 return 0; 2966 VISIT(c, expr, s->v.With.optional_vars); 2916 2967 } 2917 2968 else { 2918 /* Discard result from context.__enter__() */ 2919 ADDOP(c, POP_TOP); 2920 } 2921 2922 /* Start the try block */ 2923 ADDOP_JREL(c, SETUP_FINALLY, finally); 2924 2925 compiler_use_next_block(c, block); 2926 if (!compiler_push_fblock(c, FINALLY_TRY, block)) { 2927 return 0; 2928 } 2929 2930 if (s->v.With.optional_vars) { 2931 /* Bind saved result of context.__enter__() to VAR */ 2932 if (!compiler_nameop(c, tmpvalue, Load) || 2933 !compiler_nameop(c, tmpvalue, Del)) 2934 return 0; 2935 VISIT(c, expr, s->v.With.optional_vars); 2969 /* Discard result from context.__enter__() */ 2970 ADDOP(c, POP_TOP); 2936 2971 } 2937 2972 … … 2946 2981 compiler_use_next_block(c, finally); 2947 2982 if (!compiler_push_fblock(c, FINALLY_END, finally)) 2948 2983 return 0; 2949 2984 2950 2985 /* Finally block starts; context.__exit__ is on the stack under … … 2962 2997 compiler_visit_expr(struct compiler *c, expr_ty e) 2963 2998 { 2964 int i, n; 2965 2966 /* If expr e has a different line number than the last expr/stmt, 2967 set a new line number for the next instruction. 2968 */ 2969 if (e->lineno > c->u->u_lineno) { 2970 c->u->u_lineno = e->lineno; 2971 c->u->u_lineno_set = false; 2972 } 2973 switch (e->kind) { 2974 case BoolOp_kind: 2975 return compiler_boolop(c, e); 2976 case BinOp_kind: 2977 VISIT(c, expr, e->v.BinOp.left); 2978 VISIT(c, expr, e->v.BinOp.right); 2979 ADDOP(c, binop(c, e->v.BinOp.op)); 2980 break; 2981 case UnaryOp_kind: 2982 VISIT(c, expr, e->v.UnaryOp.operand); 2983 ADDOP(c, unaryop(e->v.UnaryOp.op)); 2984 break; 2985 case Lambda_kind: 2986 return compiler_lambda(c, e); 2987 case IfExp_kind: 2988 return compiler_ifexp(c, e); 2989 case Dict_kind: 2990 n = asdl_seq_LEN(e->v.Dict.values); 2991 ADDOP_I(c, BUILD_MAP, (n>0xFFFF ? 0xFFFF : n)); 2992 for (i = 0; i < n; i++) { 2993 VISIT(c, expr, 2994 (expr_ty)asdl_seq_GET(e->v.Dict.values, i)); 2995 VISIT(c, expr, 2996 (expr_ty)asdl_seq_GET(e->v.Dict.keys, i)); 2997 ADDOP(c, STORE_MAP); 2998 } 2999 break; 3000 case ListComp_kind: 3001 return compiler_listcomp(c, e); 3002 case GeneratorExp_kind: 3003 return compiler_genexp(c, e); 3004 case Yield_kind: 3005 if (c->u->u_ste->ste_type != FunctionBlock) 3006 return compiler_error(c, "'yield' outside function"); 3007 if (e->v.Yield.value) { 3008 VISIT(c, expr, e->v.Yield.value); 3009 } 3010 else { 3011 ADDOP_O(c, LOAD_CONST, Py_None, consts); 3012 } 3013 ADDOP(c, YIELD_VALUE); 3014 break; 3015 case Compare_kind: 3016 return compiler_compare(c, e); 3017 case Call_kind: 3018 return compiler_call(c, e); 3019 case Repr_kind: 3020 VISIT(c, expr, e->v.Repr.value); 3021 ADDOP(c, UNARY_CONVERT); 3022 break; 3023 case Num_kind: 3024 ADDOP_O(c, LOAD_CONST, e->v.Num.n, consts); 3025 break; 3026 case Str_kind: 3027 ADDOP_O(c, LOAD_CONST, e->v.Str.s, consts); 3028 break; 3029 /* The following exprs can be assignment targets. */ 3030 case Attribute_kind: 3031 if (e->v.Attribute.ctx != AugStore) 3032 VISIT(c, expr, e->v.Attribute.value); 3033 switch (e->v.Attribute.ctx) { 3034 case AugLoad: 3035 ADDOP(c, DUP_TOP); 3036 /* Fall through to load */ 3037 case Load: 3038 ADDOP_NAME(c, LOAD_ATTR, e->v.Attribute.attr, names); 3039 break; 3040 case AugStore: 3041 ADDOP(c, ROT_TWO); 3042 /* Fall through to save */ 3043 case Store: 3044 ADDOP_NAME(c, STORE_ATTR, e->v.Attribute.attr, names); 3045 break; 3046 case Del: 3047 ADDOP_NAME(c, DELETE_ATTR, e->v.Attribute.attr, names); 3048 break; 3049 case Param: 3050 default: 3051 PyErr_SetString(PyExc_SystemError, 3052 "param invalid in attribute expression"); 3053 return 0; 3054 } 3055 break; 3056 case Subscript_kind: 3057 switch (e->v.Subscript.ctx) { 3058 case AugLoad: 3059 VISIT(c, expr, e->v.Subscript.value); 3060 VISIT_SLICE(c, e->v.Subscript.slice, AugLoad); 3061 break; 3062 case Load: 3063 VISIT(c, expr, e->v.Subscript.value); 3064 VISIT_SLICE(c, e->v.Subscript.slice, Load); 3065 break; 3066 case AugStore: 3067 VISIT_SLICE(c, e->v.Subscript.slice, AugStore); 3068 break; 3069 case Store: 3070 VISIT(c, expr, e->v.Subscript.value); 3071 VISIT_SLICE(c, e->v.Subscript.slice, Store); 3072 break; 3073 case Del: 3074 VISIT(c, expr, e->v.Subscript.value); 3075 VISIT_SLICE(c, e->v.Subscript.slice, Del); 3076 break; 3077 case Param: 3078 default: 3079 PyErr_SetString(PyExc_SystemError, 3080 "param invalid in subscript expression"); 3081 return 0; 3082 } 3083 break; 3084 case Name_kind: 3085 return compiler_nameop(c, e->v.Name.id, e->v.Name.ctx); 3086 /* child nodes of List and Tuple will have expr_context set */ 3087 case List_kind: 3088 return compiler_list(c, e); 3089 case Tuple_kind: 3090 return compiler_tuple(c, e); 3091 } 3092 return 1; 2999 int i, n; 3000 3001 /* If expr e has a different line number than the last expr/stmt, 3002 set a new line number for the next instruction. 3003 */ 3004 if (e->lineno > c->u->u_lineno) { 3005 c->u->u_lineno = e->lineno; 3006 c->u->u_lineno_set = false; 3007 } 3008 switch (e->kind) { 3009 case BoolOp_kind: 3010 return compiler_boolop(c, e); 3011 case BinOp_kind: 3012 VISIT(c, expr, e->v.BinOp.left); 3013 VISIT(c, expr, e->v.BinOp.right); 3014 ADDOP(c, binop(c, e->v.BinOp.op)); 3015 break; 3016 case UnaryOp_kind: 3017 VISIT(c, expr, e->v.UnaryOp.operand); 3018 ADDOP(c, unaryop(e->v.UnaryOp.op)); 3019 break; 3020 case Lambda_kind: 3021 return compiler_lambda(c, e); 3022 case IfExp_kind: 3023 return compiler_ifexp(c, e); 3024 case Dict_kind: 3025 n = asdl_seq_LEN(e->v.Dict.values); 3026 ADDOP_I(c, BUILD_MAP, (n>0xFFFF ? 0xFFFF : n)); 3027 for (i = 0; i < n; i++) { 3028 VISIT(c, expr, 3029 (expr_ty)asdl_seq_GET(e->v.Dict.values, i)); 3030 VISIT(c, expr, 3031 (expr_ty)asdl_seq_GET(e->v.Dict.keys, i)); 3032 ADDOP(c, STORE_MAP); 3033 } 3034 break; 3035 case Set_kind: 3036 n = asdl_seq_LEN(e->v.Set.elts); 3037 VISIT_SEQ(c, expr, e->v.Set.elts); 3038 ADDOP_I(c, BUILD_SET, n); 3039 break; 3040 case ListComp_kind: 3041 return compiler_listcomp(c, e); 3042 case SetComp_kind: 3043 return compiler_setcomp(c, e); 3044 case DictComp_kind: 3045 return compiler_dictcomp(c, e); 3046 case GeneratorExp_kind: 3047 return compiler_genexp(c, e); 3048 case Yield_kind: 3049 if (c->u->u_ste->ste_type != FunctionBlock) 3050 return compiler_error(c, "'yield' outside function"); 3051 if (e->v.Yield.value) { 3052 VISIT(c, expr, e->v.Yield.value); 3053 } 3054 else { 3055 ADDOP_O(c, LOAD_CONST, Py_None, consts); 3056 } 3057 ADDOP(c, YIELD_VALUE); 3058 break; 3059 case Compare_kind: 3060 return compiler_compare(c, e); 3061 case Call_kind: 3062 return compiler_call(c, e); 3063 case Repr_kind: 3064 VISIT(c, expr, e->v.Repr.value); 3065 ADDOP(c, UNARY_CONVERT); 3066 break; 3067 case Num_kind: 3068 ADDOP_O(c, LOAD_CONST, e->v.Num.n, consts); 3069 break; 3070 case Str_kind: 3071 ADDOP_O(c, LOAD_CONST, e->v.Str.s, consts); 3072 break; 3073 /* The following exprs can be assignment targets. */ 3074 case Attribute_kind: 3075 if (e->v.Attribute.ctx != AugStore) 3076 VISIT(c, expr, e->v.Attribute.value); 3077 switch (e->v.Attribute.ctx) { 3078 case AugLoad: 3079 ADDOP(c, DUP_TOP); 3080 /* Fall through to load */ 3081 case Load: 3082 ADDOP_NAME(c, LOAD_ATTR, e->v.Attribute.attr, names); 3083 break; 3084 case AugStore: 3085 ADDOP(c, ROT_TWO); 3086 /* Fall through to save */ 3087 case Store: 3088 ADDOP_NAME(c, STORE_ATTR, e->v.Attribute.attr, names); 3089 break; 3090 case Del: 3091 ADDOP_NAME(c, DELETE_ATTR, e->v.Attribute.attr, names); 3092 break; 3093 case Param: 3094 default: 3095 PyErr_SetString(PyExc_SystemError, 3096 "param invalid in attribute expression"); 3097 return 0; 3098 } 3099 break; 3100 case Subscript_kind: 3101 switch (e->v.Subscript.ctx) { 3102 case AugLoad: 3103 VISIT(c, expr, e->v.Subscript.value); 3104 VISIT_SLICE(c, e->v.Subscript.slice, AugLoad); 3105 break; 3106 case Load: 3107 VISIT(c, expr, e->v.Subscript.value); 3108 VISIT_SLICE(c, e->v.Subscript.slice, Load); 3109 break; 3110 case AugStore: 3111 VISIT_SLICE(c, e->v.Subscript.slice, AugStore); 3112 break; 3113 case Store: 3114 VISIT(c, expr, e->v.Subscript.value); 3115 VISIT_SLICE(c, e->v.Subscript.slice, Store); 3116 break; 3117 case Del: 3118 VISIT(c, expr, e->v.Subscript.value); 3119 VISIT_SLICE(c, e->v.Subscript.slice, Del); 3120 break; 3121 case Param: 3122 default: 3123 PyErr_SetString(PyExc_SystemError, 3124 "param invalid in subscript expression"); 3125 return 0; 3126 } 3127 break; 3128 case Name_kind: 3129 return compiler_nameop(c, e->v.Name.id, e->v.Name.ctx); 3130 /* child nodes of List and Tuple will have expr_context set */ 3131 case List_kind: 3132 return compiler_list(c, e); 3133 case Tuple_kind: 3134 return compiler_tuple(c, e); 3135 } 3136 return 1; 3093 3137 } 3094 3138 … … 3096 3140 compiler_augassign(struct compiler *c, stmt_ty s) 3097 3141 { 3098 3099 3100 3101 3102 3103 3104 3105 3106 3107 3108 3109 3110 3111 3112 3113 3114 3115 3116 3117 3118 3119 3120 3121 3122 3123 3124 3125 3126 3127 3128 3129 3130 3131 3132 3133 PyErr_Format(PyExc_SystemError, 3134 3135 3136 3137 3138 3142 expr_ty e = s->v.AugAssign.target; 3143 expr_ty auge; 3144 3145 assert(s->kind == AugAssign_kind); 3146 3147 switch (e->kind) { 3148 case Attribute_kind: 3149 auge = Attribute(e->v.Attribute.value, e->v.Attribute.attr, 3150 AugLoad, e->lineno, e->col_offset, c->c_arena); 3151 if (auge == NULL) 3152 return 0; 3153 VISIT(c, expr, auge); 3154 VISIT(c, expr, s->v.AugAssign.value); 3155 ADDOP(c, inplace_binop(c, s->v.AugAssign.op)); 3156 auge->v.Attribute.ctx = AugStore; 3157 VISIT(c, expr, auge); 3158 break; 3159 case Subscript_kind: 3160 auge = Subscript(e->v.Subscript.value, e->v.Subscript.slice, 3161 AugLoad, e->lineno, e->col_offset, c->c_arena); 3162 if (auge == NULL) 3163 return 0; 3164 VISIT(c, expr, auge); 3165 VISIT(c, expr, s->v.AugAssign.value); 3166 ADDOP(c, inplace_binop(c, s->v.AugAssign.op)); 3167 auge->v.Subscript.ctx = AugStore; 3168 VISIT(c, expr, auge); 3169 break; 3170 case Name_kind: 3171 if (!compiler_nameop(c, e->v.Name.id, Load)) 3172 return 0; 3173 VISIT(c, expr, s->v.AugAssign.value); 3174 ADDOP(c, inplace_binop(c, s->v.AugAssign.op)); 3175 return compiler_nameop(c, e->v.Name.id, Store); 3176 default: 3177 PyErr_Format(PyExc_SystemError, 3178 "invalid node type (%d) for augmented assignment", 3179 e->kind); 3180 return 0; 3181 } 3182 return 1; 3139 3183 } 3140 3184 … … 3142 3186 compiler_push_fblock(struct compiler *c, enum fblocktype t, basicblock *b) 3143 3187 { 3144 3145 3146 3147 3148 3149 3150 3151 3152 3153 3188 struct fblockinfo *f; 3189 if (c->u->u_nfblocks >= CO_MAXBLOCKS) { 3190 PyErr_SetString(PyExc_SystemError, 3191 "too many statically nested blocks"); 3192 return 0; 3193 } 3194 f = &c->u->u_fblock[c->u->u_nfblocks++]; 3195 f->fb_type = t; 3196 f->fb_block = b; 3197 return 1; 3154 3198 } 3155 3199 … … 3157 3201 compiler_pop_fblock(struct compiler *c, enum fblocktype t, basicblock *b) 3158 3202 { 3159 3160 3161 3162 3163 3203 struct compiler_unit *u = c->u; 3204 assert(u->u_nfblocks > 0); 3205 u->u_nfblocks--; 3206 assert(u->u_fblock[u->u_nfblocks].fb_type == t); 3207 assert(u->u_fblock[u->u_nfblocks].fb_block == b); 3164 3208 } 3165 3209 3166 3210 static int 3167 3211 compiler_in_loop(struct compiler *c) { 3168 3169 3170 3171 3172 3173 3174 3212 int i; 3213 struct compiler_unit *u = c->u; 3214 for (i = 0; i < u->u_nfblocks; ++i) { 3215 if (u->u_fblock[i].fb_type == LOOP) 3216 return 1; 3217 } 3218 return 0; 3175 3219 } 3176 3220 /* Raises a SyntaxError and returns 0. … … 3181 3225 compiler_error(struct compiler *c, const char *errstr) 3182 3226 { 3183 3184 3185 3186 3187 3188 3189 3190 3191 3192 3193 3194 3195 3196 3197 3198 3227 PyObject *loc; 3228 PyObject *u = NULL, *v = NULL; 3229 3230 loc = PyErr_ProgramText(c->c_filename, c->u->u_lineno); 3231 if (!loc) { 3232 Py_INCREF(Py_None); 3233 loc = Py_None; 3234 } 3235 u = Py_BuildValue("(ziOO)", c->c_filename, c->u->u_lineno, 3236 Py_None, loc); 3237 if (!u) 3238 goto exit; 3239 v = Py_BuildValue("(zO)", errstr, u); 3240 if (!v) 3241 goto exit; 3242 PyErr_SetObject(PyExc_SyntaxError, v); 3199 3243 exit: 3200 3201 3202 3203 3204 } 3205 3206 static int 3207 compiler_handle_subscr(struct compiler *c, const char *kind, 3208 expr_context_ty ctx) 3209 { 3210 3211 3212 3213 3214 3215 3216 3217 3218 3219 3220 PyErr_Format(PyExc_SystemError, 3221 "invalid %s kind %d in subscript\n", 3222 3223 3224 3225 3226 3227 3228 3229 3230 3231 3232 3244 Py_DECREF(loc); 3245 Py_XDECREF(u); 3246 Py_XDECREF(v); 3247 return 0; 3248 } 3249 3250 static int 3251 compiler_handle_subscr(struct compiler *c, const char *kind, 3252 expr_context_ty ctx) 3253 { 3254 int op = 0; 3255 3256 /* XXX this code is duplicated */ 3257 switch (ctx) { 3258 case AugLoad: /* fall through to Load */ 3259 case Load: op = BINARY_SUBSCR; break; 3260 case AugStore:/* fall through to Store */ 3261 case Store: op = STORE_SUBSCR; break; 3262 case Del: op = DELETE_SUBSCR; break; 3263 case Param: 3264 PyErr_Format(PyExc_SystemError, 3265 "invalid %s kind %d in subscript\n", 3266 kind, ctx); 3267 return 0; 3268 } 3269 if (ctx == AugLoad) { 3270 ADDOP_I(c, DUP_TOPX, 2); 3271 } 3272 else if (ctx == AugStore) { 3273 ADDOP(c, ROT_THREE); 3274 } 3275 ADDOP(c, op); 3276 return 1; 3233 3277 } 3234 3278 … … 3236 3280 compiler_slice(struct compiler *c, slice_ty s, expr_context_ty ctx) 3237 3281 { 3238 3239 3240 3241 3242 3243 3244 3245 3246 3247 3248 3249 3250 3251 3252 3253 3254 3255 3256 3257 3258 3259 3260 3261 3282 int n = 2; 3283 assert(s->kind == Slice_kind); 3284 3285 /* only handles the cases where BUILD_SLICE is emitted */ 3286 if (s->v.Slice.lower) { 3287 VISIT(c, expr, s->v.Slice.lower); 3288 } 3289 else { 3290 ADDOP_O(c, LOAD_CONST, Py_None, consts); 3291 } 3292 3293 if (s->v.Slice.upper) { 3294 VISIT(c, expr, s->v.Slice.upper); 3295 } 3296 else { 3297 ADDOP_O(c, LOAD_CONST, Py_None, consts); 3298 } 3299 3300 if (s->v.Slice.step) { 3301 n++; 3302 VISIT(c, expr, s->v.Slice.step); 3303 } 3304 ADDOP_I(c, BUILD_SLICE, n); 3305 return 1; 3262 3306 } 3263 3307 … … 3265 3309 compiler_simple_slice(struct compiler *c, slice_ty s, expr_context_ty ctx) 3266 3310 { 3267 3268 3269 3270 3271 3272 3273 if (ctx != AugStore) 3274 3275 3276 3277 3278 3279 if (ctx != AugStore) 3280 3281 3282 3283 3284 3285 3286 3287 3288 3289 3290 3291 3292 3293 3294 3295 3296 3297 3298 3299 3300 3301 3302 3303 3304 3305 3306 3307 3308 3309 3310 3311 3312 3313 } 3314 3315 static int 3316 compiler_visit_nested_slice(struct compiler *c, slice_ty s, 3317 3318 { 3319 3320 3321 3322 3323 3324 3325 3326 3327 3328 3329 3330 3331 3332 3333 3334 3311 int op = 0, slice_offset = 0, stack_count = 0; 3312 3313 assert(s->v.Slice.step == NULL); 3314 if (s->v.Slice.lower) { 3315 slice_offset++; 3316 stack_count++; 3317 if (ctx != AugStore) 3318 VISIT(c, expr, s->v.Slice.lower); 3319 } 3320 if (s->v.Slice.upper) { 3321 slice_offset += 2; 3322 stack_count++; 3323 if (ctx != AugStore) 3324 VISIT(c, expr, s->v.Slice.upper); 3325 } 3326 3327 if (ctx == AugLoad) { 3328 switch (stack_count) { 3329 case 0: ADDOP(c, DUP_TOP); break; 3330 case 1: ADDOP_I(c, DUP_TOPX, 2); break; 3331 case 2: ADDOP_I(c, DUP_TOPX, 3); break; 3332 } 3333 } 3334 else if (ctx == AugStore) { 3335 switch (stack_count) { 3336 case 0: ADDOP(c, ROT_TWO); break; 3337 case 1: ADDOP(c, ROT_THREE); break; 3338 case 2: ADDOP(c, ROT_FOUR); break; 3339 } 3340 } 3341 3342 switch (ctx) { 3343 case AugLoad: /* fall through to Load */ 3344 case Load: op = SLICE; break; 3345 case AugStore:/* fall through to Store */ 3346 case Store: op = STORE_SLICE; break; 3347 case Del: op = DELETE_SLICE; break; 3348 case Param: 3349 default: 3350 PyErr_SetString(PyExc_SystemError, 3351 "param invalid in simple slice"); 3352 return 0; 3353 } 3354 3355 ADDOP(c, op + slice_offset); 3356 return 1; 3357 } 3358 3359 static int 3360 compiler_visit_nested_slice(struct compiler *c, slice_ty s, 3361 expr_context_ty ctx) 3362 { 3363 switch (s->kind) { 3364 case Ellipsis_kind: 3365 ADDOP_O(c, LOAD_CONST, Py_Ellipsis, consts); 3366 break; 3367 case Slice_kind: 3368 return compiler_slice(c, s, ctx); 3369 case Index_kind: 3370 VISIT(c, expr, s->v.Index.value); 3371 break; 3372 case ExtSlice_kind: 3373 default: 3374 PyErr_SetString(PyExc_SystemError, 3375 "extended slice invalid in nested slice"); 3376 return 0; 3377 } 3378 return 1; 3335 3379 } 3336 3380 … … 3338 3382 compiler_visit_slice(struct compiler *c, slice_ty s, expr_context_ty ctx) 3339 3383 { 3340 3341 3342 3343 3344 3345 3346 3347 3348 3349 3350 3351 3352 3353 3354 3355 3356 if (!s->v.Slice.step) 3357 3358 3359 3360 3361 3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373 3374 3375 3376 3377 3378 3379 3380 3381 3384 char * kindname = NULL; 3385 switch (s->kind) { 3386 case Index_kind: 3387 kindname = "index"; 3388 if (ctx != AugStore) { 3389 VISIT(c, expr, s->v.Index.value); 3390 } 3391 break; 3392 case Ellipsis_kind: 3393 kindname = "ellipsis"; 3394 if (ctx != AugStore) { 3395 ADDOP_O(c, LOAD_CONST, Py_Ellipsis, consts); 3396 } 3397 break; 3398 case Slice_kind: 3399 kindname = "slice"; 3400 if (!s->v.Slice.step) 3401 return compiler_simple_slice(c, s, ctx); 3402 if (ctx != AugStore) { 3403 if (!compiler_slice(c, s, ctx)) 3404 return 0; 3405 } 3406 break; 3407 case ExtSlice_kind: 3408 kindname = "extended slice"; 3409 if (ctx != AugStore) { 3410 int i, n = asdl_seq_LEN(s->v.ExtSlice.dims); 3411 for (i = 0; i < n; i++) { 3412 slice_ty sub = (slice_ty)asdl_seq_GET( 3413 s->v.ExtSlice.dims, i); 3414 if (!compiler_visit_nested_slice(c, sub, ctx)) 3415 return 0; 3416 } 3417 ADDOP_I(c, BUILD_TUPLE, n); 3418 } 3419 break; 3420 default: 3421 PyErr_Format(PyExc_SystemError, 3422 "invalid subscript kind %d", s->kind); 3423 return 0; 3424 } 3425 return compiler_handle_subscr(c, kindname, ctx); 3382 3426 } 3383 3427 … … 3392 3436 3393 3437 struct assembler { 3394 3395 int a_offset;/* offset into bytecode */3396 int a_nblocks;/* number of reachable blocks */3397 3398 3399 3400 int a_lineno;/* last lineno of emitted instruction */3401 3438 PyObject *a_bytecode; /* string containing bytecode */ 3439 int a_offset; /* offset into bytecode */ 3440 int a_nblocks; /* number of reachable blocks */ 3441 basicblock **a_postorder; /* list of blocks in dfs postorder */ 3442 PyObject *a_lnotab; /* string containing lnotab */ 3443 int a_lnotab_off; /* offset into lnotab */ 3444 int a_lineno; /* last lineno of emitted instruction */ 3445 int a_lineno_off; /* bytecode offset of last lineno */ 3402 3446 }; 3403 3447 … … 3405 3449 dfs(struct compiler *c, basicblock *b, struct assembler *a) 3406 3450 { 3407 3408 3409 3410 3411 3412 3413 3414 3415 3416 3417 3418 3419 3420 3451 int i; 3452 struct instr *instr = NULL; 3453 3454 if (b->b_seen) 3455 return; 3456 b->b_seen = 1; 3457 if (b->b_next != NULL) 3458 dfs(c, b->b_next, a); 3459 for (i = 0; i < b->b_iused; i++) { 3460 instr = &b->b_instr[i]; 3461 if (instr->i_jrel || instr->i_jabs) 3462 dfs(c, instr->i_target, a); 3463 } 3464 a->a_postorder[a->a_nblocks++] = b; 3421 3465 } 3422 3466 … … 3424 3468 stackdepth_walk(struct compiler *c, basicblock *b, int depth, int maxdepth) 3425 3469 { 3426 int i; 3427 struct instr *instr; 3428 if (b->b_seen || b->b_startdepth >= depth) 3429 return maxdepth; 3430 b->b_seen = 1; 3431 b->b_startdepth = depth; 3432 for (i = 0; i < b->b_iused; i++) { 3433 instr = &b->b_instr[i]; 3434 depth += opcode_stack_effect(instr->i_opcode, instr->i_oparg); 3435 if (depth > maxdepth) 3436 maxdepth = depth; 3437 assert(depth >= 0); /* invalid code or bug in stackdepth() */ 3438 if (instr->i_jrel || instr->i_jabs) { 3439 maxdepth = stackdepth_walk(c, instr->i_target, 3440 depth, maxdepth); 3441 if (instr->i_opcode == JUMP_ABSOLUTE || 3442 instr->i_opcode == JUMP_FORWARD) { 3443 goto out; /* remaining code is dead */ 3444 } 3445 } 3446 } 3447 if (b->b_next) 3448 maxdepth = stackdepth_walk(c, b->b_next, depth, maxdepth); 3470 int i, target_depth; 3471 struct instr *instr; 3472 if (b->b_seen || b->b_startdepth >= depth) 3473 return maxdepth; 3474 b->b_seen = 1; 3475 b->b_startdepth = depth; 3476 for (i = 0; i < b->b_iused; i++) { 3477 instr = &b->b_instr[i]; 3478 depth += opcode_stack_effect(instr->i_opcode, instr->i_oparg); 3479 if (depth > maxdepth) 3480 maxdepth = depth; 3481 assert(depth >= 0); /* invalid code or bug in stackdepth() */ 3482 if (instr->i_jrel || instr->i_jabs) { 3483 target_depth = depth; 3484 if (instr->i_opcode == FOR_ITER) { 3485 target_depth = depth-2; 3486 } else if (instr->i_opcode == SETUP_FINALLY || 3487 instr->i_opcode == SETUP_EXCEPT) { 3488 target_depth = depth+3; 3489 if (target_depth > maxdepth) 3490 maxdepth = target_depth; 3491 } 3492 maxdepth = stackdepth_walk(c, instr->i_target, 3493 target_depth, maxdepth); 3494 if (instr->i_opcode == JUMP_ABSOLUTE || 3495 instr->i_opcode == JUMP_FORWARD) { 3496 goto out; /* remaining code is dead */ 3497 } 3498 } 3499 } 3500 if (b->b_next) 3501 maxdepth = stackdepth_walk(c, b->b_next, depth, maxdepth); 3449 3502 out: 3450 3451 3503 b->b_seen = 0; 3504 return maxdepth; 3452 3505 } 3453 3506 … … 3458 3511 stackdepth(struct compiler *c) 3459 3512 { 3460 3461 3462 3463 3464 3465 3466 3467 3468 3469 3513 basicblock *b, *entryblock; 3514 entryblock = NULL; 3515 for (b = c->u->u_blocks; b != NULL; b = b->b_list) { 3516 b->b_seen = 0; 3517 b->b_startdepth = INT_MIN; 3518 entryblock = b; 3519 } 3520 if (!entryblock) 3521 return 0; 3522 return stackdepth_walk(c, entryblock, 0, 0); 3470 3523 } 3471 3524 … … 3473 3526 assemble_init(struct assembler *a, int nblocks, int firstlineno) 3474 3527 { 3475 3476 3477 3478 3479 3480 3481 3482 3483 3484 3485 3486 3487 3488 3489 3490 3491 3492 3493 3528 memset(a, 0, sizeof(struct assembler)); 3529 a->a_lineno = firstlineno; 3530 a->a_bytecode = PyString_FromStringAndSize(NULL, DEFAULT_CODE_SIZE); 3531 if (!a->a_bytecode) 3532 return 0; 3533 a->a_lnotab = PyString_FromStringAndSize(NULL, DEFAULT_LNOTAB_SIZE); 3534 if (!a->a_lnotab) 3535 return 0; 3536 if (nblocks > PY_SIZE_MAX / sizeof(basicblock *)) { 3537 PyErr_NoMemory(); 3538 return 0; 3539 } 3540 a->a_postorder = (basicblock **)PyObject_Malloc( 3541 sizeof(basicblock *) * nblocks); 3542 if (!a->a_postorder) { 3543 PyErr_NoMemory(); 3544 return 0; 3545 } 3546 return 1; 3494 3547 } 3495 3548 … … 3497 3550 assemble_free(struct assembler *a) 3498 3551 { 3499 3500 3501 3502 3552 Py_XDECREF(a->a_bytecode); 3553 Py_XDECREF(a->a_lnotab); 3554 if (a->a_postorder) 3555 PyObject_Free(a->a_postorder); 3503 3556 } 3504 3557 … … 3508 3561 instrsize(struct instr *instr) 3509 3562 { 3510 3511 return 1;/* 1 byte for the opcode*/3512 3513 return 6;/* 1 (opcode) + 1 (EXTENDED_ARG opcode) + 2 (oparg) + 2(oparg extended) */3514 return 3;/* 1 (opcode) + 2 (oparg) */3563 if (!instr->i_hasarg) 3564 return 1; /* 1 byte for the opcode*/ 3565 if (instr->i_oparg > 0xffff) 3566 return 6; /* 1 (opcode) + 1 (EXTENDED_ARG opcode) + 2 (oparg) + 2(oparg extended) */ 3567 return 3; /* 1 (opcode) + 2 (oparg) */ 3515 3568 } 3516 3569 … … 3518 3571 blocksize(basicblock *b) 3519 3572 { 3520 int i; 3521 int size = 0; 3522 3523 for (i = 0; i < b->b_iused; i++) 3524 size += instrsize(&b->b_instr[i]); 3525 return size; 3526 } 3527 3528 /* All about a_lnotab. 3529 3530 c_lnotab is an array of unsigned bytes disguised as a Python string. 3531 It is used to map bytecode offsets to source code line #s (when needed 3532 for tracebacks). 3533 3534 The array is conceptually a list of 3535 (bytecode offset increment, line number increment) 3536 pairs. The details are important and delicate, best illustrated by example: 3537 3538 byte code offset source code line number 3539 0 1 3540 6 2 3541 50 7 3542 350 307 3543 361 308 3544 3545 The first trick is that these numbers aren't stored, only the increments 3546 from one row to the next (this doesn't really work, but it's a start): 3547 3548 0, 1, 6, 1, 44, 5, 300, 300, 11, 1 3549 3550 The second trick is that an unsigned byte can't hold negative values, or 3551 values larger than 255, so (a) there's a deep assumption that byte code 3552 offsets and their corresponding line #s both increase monotonically, and (b) 3553 if at least one column jumps by more than 255 from one row to the next, more 3554 than one pair is written to the table. In case #b, there's no way to know 3555 from looking at the table later how many were written. That's the delicate 3556 part. A user of c_lnotab desiring to find the source line number 3557 corresponding to a bytecode address A should do something like this 3558 3559 lineno = addr = 0 3560 for addr_incr, line_incr in c_lnotab: 3561 addr += addr_incr 3562 if addr > A: 3563 return lineno 3564 lineno += line_incr 3565 3566 In order for this to work, when the addr field increments by more than 255, 3567 the line # increment in each pair generated must be 0 until the remaining addr 3568 increment is < 256. So, in the example above, assemble_lnotab (it used 3569 to be called com_set_lineno) should not (as was actually done until 2.2) 3570 expand 300, 300 to 255, 255, 45, 45, 3571 but to 255, 0, 45, 255, 0, 45. 3572 */ 3573 int i; 3574 int size = 0; 3575 3576 for (i = 0; i < b->b_iused; i++) 3577 size += instrsize(&b->b_instr[i]); 3578 return size; 3579 } 3580 3581 /* Appends a pair to the end of the line number table, a_lnotab, representing 3582 the instruction's bytecode offset and line number. See 3583 Objects/lnotab_notes.txt for the description of the line number table. */ 3573 3584 3574 3585 static int 3575 3586 assemble_lnotab(struct assembler *a, struct instr *i) 3576 3587 { 3577 3578 3579 3580 3581 3582 3583 3584 3585 3586 3587 3588 3589 3590 3591 3592 3593 3594 3595 3596 3597 3598 3599 3600 3601 3602 3603 3604 3605 3606 3607 3608 3609 3610 3611 3612 3613 3614 3615 3616 3617 3618 3619 3620 3621 3622 3623 3624 3625 3626 3627 3628 3629 3630 3631 3632 3633 3634 3635 3636 3637 3638 3639 3640 3641 3642 3643 3644 3645 3646 3647 3648 3649 3650 3651 3652 3653 3654 3655 3656 3657 3658 else {/* First line of a block; def stmt, etc. */3659 3660 3661 3662 3663 3664 3588 int d_bytecode, d_lineno; 3589 int len; 3590 unsigned char *lnotab; 3591 3592 d_bytecode = a->a_offset - a->a_lineno_off; 3593 d_lineno = i->i_lineno - a->a_lineno; 3594 3595 assert(d_bytecode >= 0); 3596 assert(d_lineno >= 0); 3597 3598 if(d_bytecode == 0 && d_lineno == 0) 3599 return 1; 3600 3601 if (d_bytecode > 255) { 3602 int j, nbytes, ncodes = d_bytecode / 255; 3603 nbytes = a->a_lnotab_off + 2 * ncodes; 3604 len = PyString_GET_SIZE(a->a_lnotab); 3605 if (nbytes >= len) { 3606 if ((len <= INT_MAX / 2) && (len * 2 < nbytes)) 3607 len = nbytes; 3608 else if (len <= INT_MAX / 2) 3609 len *= 2; 3610 else { 3611 PyErr_NoMemory(); 3612 return 0; 3613 } 3614 if (_PyString_Resize(&a->a_lnotab, len) < 0) 3615 return 0; 3616 } 3617 lnotab = (unsigned char *) 3618 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off; 3619 for (j = 0; j < ncodes; j++) { 3620 *lnotab++ = 255; 3621 *lnotab++ = 0; 3622 } 3623 d_bytecode -= ncodes * 255; 3624 a->a_lnotab_off += ncodes * 2; 3625 } 3626 assert(d_bytecode <= 255); 3627 if (d_lineno > 255) { 3628 int j, nbytes, ncodes = d_lineno / 255; 3629 nbytes = a->a_lnotab_off + 2 * ncodes; 3630 len = PyString_GET_SIZE(a->a_lnotab); 3631 if (nbytes >= len) { 3632 if ((len <= INT_MAX / 2) && len * 2 < nbytes) 3633 len = nbytes; 3634 else if (len <= INT_MAX / 2) 3635 len *= 2; 3636 else { 3637 PyErr_NoMemory(); 3638 return 0; 3639 } 3640 if (_PyString_Resize(&a->a_lnotab, len) < 0) 3641 return 0; 3642 } 3643 lnotab = (unsigned char *) 3644 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off; 3645 *lnotab++ = d_bytecode; 3646 *lnotab++ = 255; 3647 d_bytecode = 0; 3648 for (j = 1; j < ncodes; j++) { 3649 *lnotab++ = 0; 3650 *lnotab++ = 255; 3651 } 3652 d_lineno -= ncodes * 255; 3653 a->a_lnotab_off += ncodes * 2; 3654 } 3655 3656 len = PyString_GET_SIZE(a->a_lnotab); 3657 if (a->a_lnotab_off + 2 >= len) { 3658 if (_PyString_Resize(&a->a_lnotab, len * 2) < 0) 3659 return 0; 3660 } 3661 lnotab = (unsigned char *) 3662 PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off; 3663 3664 a->a_lnotab_off += 2; 3665 if (d_bytecode) { 3666 *lnotab++ = d_bytecode; 3667 *lnotab++ = d_lineno; 3668 } 3669 else { /* First line of a block; def stmt, etc. */ 3670 *lnotab++ = 0; 3671 *lnotab++ = d_lineno; 3672 } 3673 a->a_lineno = i->i_lineno; 3674 a->a_lineno_off = a->a_offset; 3675 return 1; 3665 3676 } 3666 3677 … … 3673 3684 assemble_emit(struct assembler *a, struct instr *i) 3674 3685 { 3675 3676 3677 3678 3679 3680 3681 3682 3683 3684 3685 3686 3687 3688 3689 3690 3691 3692 3693 3694 3695 3696 3697 3698 3699 3700 3701 3702 3703 3704 3705 3706 3707 3686 int size, arg = 0, ext = 0; 3687 Py_ssize_t len = PyString_GET_SIZE(a->a_bytecode); 3688 char *code; 3689 3690 size = instrsize(i); 3691 if (i->i_hasarg) { 3692 arg = i->i_oparg; 3693 ext = arg >> 16; 3694 } 3695 if (i->i_lineno && !assemble_lnotab(a, i)) 3696 return 0; 3697 if (a->a_offset + size >= len) { 3698 if (len > PY_SSIZE_T_MAX / 2) 3699 return 0; 3700 if (_PyString_Resize(&a->a_bytecode, len * 2) < 0) 3701 return 0; 3702 } 3703 code = PyString_AS_STRING(a->a_bytecode) + a->a_offset; 3704 a->a_offset += size; 3705 if (size == 6) { 3706 assert(i->i_hasarg); 3707 *code++ = (char)EXTENDED_ARG; 3708 *code++ = ext & 0xff; 3709 *code++ = ext >> 8; 3710 arg &= 0xffff; 3711 } 3712 *code++ = i->i_opcode; 3713 if (i->i_hasarg) { 3714 assert(size == 3 || size == 6); 3715 *code++ = arg & 0xff; 3716 *code++ = arg >> 8; 3717 } 3718 return 1; 3708 3719 } 3709 3720 … … 3711 3722 assemble_jump_offsets(struct assembler *a, struct compiler *c) 3712 3723 { 3713 basicblock *b; 3714 int bsize, totsize, extended_arg_count, last_extended_arg_count = 0; 3715 int i; 3716 3717 /* Compute the size of each block and fixup jump args. 3718 Replace block pointer with position in bytecode. */ 3719 start: 3720 totsize = 0; 3721 for (i = a->a_nblocks - 1; i >= 0; i--) { 3722 b = a->a_postorder[i]; 3723 bsize = blocksize(b); 3724 b->b_offset = totsize; 3725 totsize += bsize; 3726 } 3727 extended_arg_count = 0; 3728 for (b = c->u->u_blocks; b != NULL; b = b->b_list) { 3729 bsize = b->b_offset; 3730 for (i = 0; i < b->b_iused; i++) { 3731 struct instr *instr = &b->b_instr[i]; 3732 /* Relative jumps are computed relative to 3733 the instruction pointer after fetching 3734 the jump instruction. 3735 */ 3736 bsize += instrsize(instr); 3737 if (instr->i_jabs) 3738 instr->i_oparg = instr->i_target->b_offset; 3739 else if (instr->i_jrel) { 3740 int delta = instr->i_target->b_offset - bsize; 3741 instr->i_oparg = delta; 3742 } 3743 else 3744 continue; 3745 if (instr->i_oparg > 0xffff) 3746 extended_arg_count++; 3747 } 3748 } 3749 3750 /* XXX: This is an awful hack that could hurt performance, but 3751 on the bright side it should work until we come up 3752 with a better solution. 3753 3754 In the meantime, should the goto be dropped in favor 3755 of a loop? 3756 3757 The issue is that in the first loop blocksize() is called 3758 which calls instrsize() which requires i_oparg be set 3759 appropriately. There is a bootstrap problem because 3760 i_oparg is calculated in the second loop above. 3761 3762 So we loop until we stop seeing new EXTENDED_ARGs. 3763 The only EXTENDED_ARGs that could be popping up are 3764 ones in jump instructions. So this should converge 3765 fairly quickly. 3766 */ 3767 if (last_extended_arg_count != extended_arg_count) { 3768 last_extended_arg_count = extended_arg_count; 3769 goto start; 3770 } 3724 basicblock *b; 3725 int bsize, totsize, extended_arg_count = 0, last_extended_arg_count; 3726 int i; 3727 3728 /* Compute the size of each block and fixup jump args. 3729 Replace block pointer with position in bytecode. */ 3730 do { 3731 totsize = 0; 3732 for (i = a->a_nblocks - 1; i >= 0; i--) { 3733 b = a->a_postorder[i]; 3734 bsize = blocksize(b); 3735 b->b_offset = totsize; 3736 totsize += bsize; 3737 } 3738 last_extended_arg_count = extended_arg_count; 3739 extended_arg_count = 0; 3740 for (b = c->u->u_blocks; b != NULL; b = b->b_list) { 3741 bsize = b->b_offset; 3742 for (i = 0; i < b->b_iused; i++) { 3743 struct instr *instr = &b->b_instr[i]; 3744 /* Relative jumps are computed relative to 3745 the instruction pointer after fetching 3746 the jump instruction. 3747 */ 3748 bsize += instrsize(instr); 3749 if (instr->i_jabs) 3750 instr->i_oparg = instr->i_target->b_offset; 3751 else if (instr->i_jrel) { 3752 int delta = instr->i_target->b_offset - bsize; 3753 instr->i_oparg = delta; 3754 } 3755 else 3756 continue; 3757 if (instr->i_oparg > 0xffff) 3758 extended_arg_count++; 3759 } 3760 } 3761 3762 /* XXX: This is an awful hack that could hurt performance, but 3763 on the bright side it should work until we come up 3764 with a better solution. 3765 3766 The issue is that in the first loop blocksize() is called 3767 which calls instrsize() which requires i_oparg be set 3768 appropriately. There is a bootstrap problem because 3769 i_oparg is calculated in the second loop above. 3770 3771 So we loop until we stop seeing new EXTENDED_ARGs. 3772 The only EXTENDED_ARGs that could be popping up are 3773 ones in jump instructions. So this should converge 3774 fairly quickly. 3775 */ 3776 } while (last_extended_arg_count != extended_arg_count); 3771 3777 } 3772 3778 … … 3774 3780 dict_keys_inorder(PyObject *dict, int offset) 3775 3781 { 3776 PyObject *tuple, *k, *v; 3777 Py_ssize_t i, pos = 0, size = PyDict_Size(dict); 3778 3779 tuple = PyTuple_New(size); 3780 if (tuple == NULL) 3781 return NULL; 3782 while (PyDict_Next(dict, &pos, &k, &v)) { 3783 i = PyInt_AS_LONG(v); 3784 k = PyTuple_GET_ITEM(k, 0); 3785 Py_INCREF(k); 3786 assert((i - offset) < size); 3787 assert((i - offset) >= 0); 3788 PyTuple_SET_ITEM(tuple, i - offset, k); 3789 } 3790 return tuple; 3782 PyObject *tuple, *k, *v; 3783 Py_ssize_t i, pos = 0, size = PyDict_Size(dict); 3784 3785 tuple = PyTuple_New(size); 3786 if (tuple == NULL) 3787 return NULL; 3788 while (PyDict_Next(dict, &pos, &k, &v)) { 3789 i = PyInt_AS_LONG(v); 3790 /* The keys of the dictionary are tuples. (see compiler_add_o) 3791 The object we want is always first, though. */ 3792 k = PyTuple_GET_ITEM(k, 0); 3793 Py_INCREF(k); 3794 assert((i - offset) < size); 3795 assert((i - offset) >= 0); 3796 PyTuple_SET_ITEM(tuple, i - offset, k); 3797 } 3798 return tuple; 3791 3799 } 3792 3800 … … 3794 3802 compute_code_flags(struct compiler *c) 3795 3803 { 3796 PySTEntryObject *ste = c->u->u_ste; 3797 int flags = 0, n; 3798 if (ste->ste_type != ModuleBlock) 3799 flags |= CO_NEWLOCALS; 3800 if (ste->ste_type == FunctionBlock) { 3801 if (!ste->ste_unoptimized) 3802 flags |= CO_OPTIMIZED; 3803 if (ste->ste_nested) 3804 flags |= CO_NESTED; 3805 if (ste->ste_generator) 3806 flags |= CO_GENERATOR; 3807 } 3808 if (ste->ste_varargs) 3809 flags |= CO_VARARGS; 3810 if (ste->ste_varkeywords) 3811 flags |= CO_VARKEYWORDS; 3812 if (ste->ste_generator) 3813 flags |= CO_GENERATOR; 3814 3815 /* (Only) inherit compilerflags in PyCF_MASK */ 3816 flags |= (c->c_flags->cf_flags & PyCF_MASK); 3817 3818 n = PyDict_Size(c->u->u_freevars); 3819 if (n < 0) 3820 return -1; 3821 if (n == 0) { 3822 n = PyDict_Size(c->u->u_cellvars); 3823 if (n < 0) 3824 return -1; 3825 if (n == 0) { 3826 flags |= CO_NOFREE; 3827 } 3828 } 3829 3830 return flags; 3804 PySTEntryObject *ste = c->u->u_ste; 3805 int flags = 0, n; 3806 if (ste->ste_type != ModuleBlock) 3807 flags |= CO_NEWLOCALS; 3808 if (ste->ste_type == FunctionBlock) { 3809 if (!ste->ste_unoptimized) 3810 flags |= CO_OPTIMIZED; 3811 if (ste->ste_nested) 3812 flags |= CO_NESTED; 3813 if (ste->ste_generator) 3814 flags |= CO_GENERATOR; 3815 if (ste->ste_varargs) 3816 flags |= CO_VARARGS; 3817 if (ste->ste_varkeywords) 3818 flags |= CO_VARKEYWORDS; 3819 } 3820 3821 /* (Only) inherit compilerflags in PyCF_MASK */ 3822 flags |= (c->c_flags->cf_flags & PyCF_MASK); 3823 3824 n = PyDict_Size(c->u->u_freevars); 3825 if (n < 0) 3826 return -1; 3827 if (n == 0) { 3828 n = PyDict_Size(c->u->u_cellvars); 3829 if (n < 0) 3830 return -1; 3831 if (n == 0) { 3832 flags |= CO_NOFREE; 3833 } 3834 } 3835 3836 return flags; 3831 3837 } 3832 3838 … … 3834 3840 makecode(struct compiler *c, struct assembler *a) 3835 3841 { 3836 3837 3838 3839 3840 3841 3842 3843 3844 3845 3846 3847 3848 3849 3850 3851 3852 3853 3854 3855 3856 3857 3858 3859 3860 3861 3862 3863 3864 3865 3866 3867 3868 3869 3870 3871 3872 3873 3874 3875 3876 3877 3878 3879 3880 3881 3882 3883 3884 3885 3886 3887 3888 3889 3842 PyObject *tmp; 3843 PyCodeObject *co = NULL; 3844 PyObject *consts = NULL; 3845 PyObject *names = NULL; 3846 PyObject *varnames = NULL; 3847 PyObject *filename = NULL; 3848 PyObject *name = NULL; 3849 PyObject *freevars = NULL; 3850 PyObject *cellvars = NULL; 3851 PyObject *bytecode = NULL; 3852 int nlocals, flags; 3853 3854 tmp = dict_keys_inorder(c->u->u_consts, 0); 3855 if (!tmp) 3856 goto error; 3857 consts = PySequence_List(tmp); /* optimize_code requires a list */ 3858 Py_DECREF(tmp); 3859 3860 names = dict_keys_inorder(c->u->u_names, 0); 3861 varnames = dict_keys_inorder(c->u->u_varnames, 0); 3862 if (!consts || !names || !varnames) 3863 goto error; 3864 3865 cellvars = dict_keys_inorder(c->u->u_cellvars, 0); 3866 if (!cellvars) 3867 goto error; 3868 freevars = dict_keys_inorder(c->u->u_freevars, PyTuple_Size(cellvars)); 3869 if (!freevars) 3870 goto error; 3871 filename = PyString_FromString(c->c_filename); 3872 if (!filename) 3873 goto error; 3874 3875 nlocals = PyDict_Size(c->u->u_varnames); 3876 flags = compute_code_flags(c); 3877 if (flags < 0) 3878 goto error; 3879 3880 bytecode = PyCode_Optimize(a->a_bytecode, consts, names, a->a_lnotab); 3881 if (!bytecode) 3882 goto error; 3883 3884 tmp = PyList_AsTuple(consts); /* PyCode_New requires a tuple */ 3885 if (!tmp) 3886 goto error; 3887 Py_DECREF(consts); 3888 consts = tmp; 3889 3890 co = PyCode_New(c->u->u_argcount, nlocals, stackdepth(c), flags, 3891 bytecode, consts, names, varnames, 3892 freevars, cellvars, 3893 filename, c->u->u_name, 3894 c->u->u_firstlineno, 3895 a->a_lnotab); 3890 3896 error: 3891 3892 3893 3894 3895 3896 3897 3898 3899 3897 Py_XDECREF(consts); 3898 Py_XDECREF(names); 3899 Py_XDECREF(varnames); 3900 Py_XDECREF(filename); 3901 Py_XDECREF(name); 3902 Py_XDECREF(freevars); 3903 Py_XDECREF(cellvars); 3904 Py_XDECREF(bytecode); 3905 return co; 3900 3906 } 3901 3907 … … 3906 3912 dump_instr(const struct instr *i) 3907 3913 { 3908 3909 3910 3911 3912 3913 3914 3915 3916 fprintf(stderr, "line: %d, opcode: %d %s%s%s\n", 3917 3914 const char *jrel = i->i_jrel ? "jrel " : ""; 3915 const char *jabs = i->i_jabs ? "jabs " : ""; 3916 char arg[128]; 3917 3918 *arg = '\0'; 3919 if (i->i_hasarg) 3920 sprintf(arg, "arg: %d ", i->i_oparg); 3921 3922 fprintf(stderr, "line: %d, opcode: %d %s%s%s\n", 3923 i->i_lineno, i->i_opcode, arg, jabs, jrel); 3918 3924 } 3919 3925 … … 3921 3927 dump_basicblock(const basicblock *b) 3922 3928 { 3923 3924 3925 3926 3927 3928 3929 3930 3931 3932 3933 3929 const char *seen = b->b_seen ? "seen " : ""; 3930 const char *b_return = b->b_return ? "return " : ""; 3931 fprintf(stderr, "used: %d, depth: %d, offset: %d %s%s\n", 3932 b->b_iused, b->b_startdepth, b->b_offset, seen, b_return); 3933 if (b->b_instr) { 3934 int i; 3935 for (i = 0; i < b->b_iused; i++) { 3936 fprintf(stderr, " [%02d] ", i); 3937 dump_instr(b->b_instr + i); 3938 } 3939 } 3934 3940 } 3935 3941 #endif … … 3938 3944 assemble(struct compiler *c, int addNone) 3939 3945 { 3940 3941 3942 3943 3944 3945 3946 3947 3948 3949 3950 3951 3952 3953 3954 3955 3956 3957 3958 3959 3960 entryblock = b; 3961 3962 3963 3964 3965 3966 3967 3968 3969 3970 3971 3972 3973 3974 3975 3976 3977 3978 3979 3980 3981 3982 3983 3984 3985 3986 3987 3988 3989 3990 3946 basicblock *b, *entryblock; 3947 struct assembler a; 3948 int i, j, nblocks; 3949 PyCodeObject *co = NULL; 3950 3951 /* Make sure every block that falls off the end returns None. 3952 XXX NEXT_BLOCK() isn't quite right, because if the last 3953 block ends with a jump or return b_next shouldn't set. 3954 */ 3955 if (!c->u->u_curblock->b_return) { 3956 NEXT_BLOCK(c); 3957 if (addNone) 3958 ADDOP_O(c, LOAD_CONST, Py_None, consts); 3959 ADDOP(c, RETURN_VALUE); 3960 } 3961 3962 nblocks = 0; 3963 entryblock = NULL; 3964 for (b = c->u->u_blocks; b != NULL; b = b->b_list) { 3965 nblocks++; 3966 entryblock = b; 3967 } 3968 3969 /* Set firstlineno if it wasn't explicitly set. */ 3970 if (!c->u->u_firstlineno) { 3971 if (entryblock && entryblock->b_instr) 3972 c->u->u_firstlineno = entryblock->b_instr->i_lineno; 3973 else 3974 c->u->u_firstlineno = 1; 3975 } 3976 if (!assemble_init(&a, nblocks, c->u->u_firstlineno)) 3977 goto error; 3978 dfs(c, entryblock, &a); 3979 3980 /* Can't modify the bytecode after computing jump offsets. */ 3981 assemble_jump_offsets(&a, c); 3982 3983 /* Emit code in reverse postorder from dfs. */ 3984 for (i = a.a_nblocks - 1; i >= 0; i--) { 3985 b = a.a_postorder[i]; 3986 for (j = 0; j < b->b_iused; j++) 3987 if (!assemble_emit(&a, &b->b_instr[j])) 3988 goto error; 3989 } 3990 3991 if (_PyString_Resize(&a.a_lnotab, a.a_lnotab_off) < 0) 3992 goto error; 3993 if (_PyString_Resize(&a.a_bytecode, a.a_offset) < 0) 3994 goto error; 3995 3996 co = makecode(c, &a); 3991 3997 error: 3992 3993 3994 } 3998 assemble_free(&a); 3999 return co; 4000 }
Note:
See TracChangeset
for help on using the changeset viewer.