comparison cos/python/Python/ceval.c @ 27:7f74363f4c82

Added some files for the python port
author windel
date Tue, 27 Dec 2011 18:59:02 +0100
parents
children
comparison
equal deleted inserted replaced
26:dcce92b1efbc 27:7f74363f4c82
1
2 /* Execute compiled code */
3
4 /* XXX TO DO:
5 XXX speed up searching for keywords by using a dictionary
6 XXX document it!
7 */
8
9 /* enable more aggressive intra-module optimizations, where available */
10 #define PY_LOCAL_AGGRESSIVE
11
12 #include "Python.h"
13
14 #include "code.h"
15 #include "frameobject.h"
16 #include "opcode.h"
17 #include "structmember.h"
18
19 #include <ctype.h>
20
21 typedef PyObject *(*callproc)(PyObject *, PyObject *, PyObject *);
22
23 /* Forward declarations */
24 static PyObject * call_function(PyObject ***, int);
25 static PyObject * fast_function(PyObject *, PyObject ***, int, int, int);
26 static PyObject * do_call(PyObject *, PyObject ***, int, int);
27 static PyObject * ext_do_call(PyObject *, PyObject ***, int, int, int);
28 static PyObject * update_keyword_args(PyObject *, int, PyObject ***,
29 PyObject *);
30 static PyObject * update_star_args(int, int, PyObject *, PyObject ***);
31 static PyObject * load_args(PyObject ***, int);
32 #define CALL_FLAG_VAR 1
33 #define CALL_FLAG_KW 2
34
35 static int call_trace(Py_tracefunc, PyObject *, PyFrameObject *,
36 int, PyObject *);
37 static int call_trace_protected(Py_tracefunc, PyObject *,
38 PyFrameObject *, int, PyObject *);
39 static void call_exc_trace(Py_tracefunc, PyObject *, PyFrameObject *);
40 static int maybe_call_line_trace(Py_tracefunc, PyObject *,
41 PyFrameObject *, int *, int *, int *);
42
43 static PyObject * cmp_outcome(int, PyObject *, PyObject *);
44 static PyObject * import_from(PyObject *, PyObject *);
45 static int import_all_from(PyObject *, PyObject *);
46 static void format_exc_check_arg(PyObject *, const char *, PyObject *);
47 static void format_exc_unbound(PyCodeObject *co, int oparg);
48 static PyObject * unicode_concatenate(PyObject *, PyObject *,
49 PyFrameObject *, unsigned char *);
50 static PyObject * special_lookup(PyObject *, char *, PyObject **);
51
52 #define NAME_ERROR_MSG \
53 "name '%.200s' is not defined"
54 #define GLOBAL_NAME_ERROR_MSG \
55 "global name '%.200s' is not defined"
56 #define UNBOUNDLOCAL_ERROR_MSG \
57 "local variable '%.200s' referenced before assignment"
58 #define UNBOUNDFREE_ERROR_MSG \
59 "free variable '%.200s' referenced before assignment" \
60 " in enclosing scope"
61
62 #define PCALL(O)
63
64 PyObject *
65 PyEval_GetCallStats(PyObject *self)
66 {
67 Py_INCREF(Py_None);
68 return Py_None;
69 }
70
71
72 #ifdef WITH_THREAD
73 #define GIL_REQUEST _Py_atomic_load_relaxed(&gil_drop_request)
74 #else
75 #define GIL_REQUEST 0
76 #endif
77
78 /* This can set eval_breaker to 0 even though gil_drop_request became
79 1. We believe this is all right because the eval loop will release
80 the GIL eventually anyway. */
81 #define COMPUTE_EVAL_BREAKER() \
82 _Py_atomic_store_relaxed( \
83 &eval_breaker, \
84 GIL_REQUEST | \
85 _Py_atomic_load_relaxed(&pendingcalls_to_do) | \
86 pending_async_exc)
87
88 #ifdef WITH_THREAD
89
90 #define SET_GIL_DROP_REQUEST() \
91 do { \
92 _Py_atomic_store_relaxed(&gil_drop_request, 1); \
93 _Py_atomic_store_relaxed(&eval_breaker, 1); \
94 } while (0)
95
96 #define RESET_GIL_DROP_REQUEST() \
97 do { \
98 _Py_atomic_store_relaxed(&gil_drop_request, 0); \
99 COMPUTE_EVAL_BREAKER(); \
100 } while (0)
101
102 #endif
103
104 /* Pending calls are only modified under pending_lock */
105 #define SIGNAL_PENDING_CALLS() \
106 do { \
107 _Py_atomic_store_relaxed(&pendingcalls_to_do, 1); \
108 _Py_atomic_store_relaxed(&eval_breaker, 1); \
109 } while (0)
110
111 #define UNSIGNAL_PENDING_CALLS() \
112 do { \
113 _Py_atomic_store_relaxed(&pendingcalls_to_do, 0); \
114 COMPUTE_EVAL_BREAKER(); \
115 } while (0)
116
117 #define SIGNAL_ASYNC_EXC() \
118 do { \
119 pending_async_exc = 1; \
120 _Py_atomic_store_relaxed(&eval_breaker, 1); \
121 } while (0)
122
123 #define UNSIGNAL_ASYNC_EXC() \
124 do { pending_async_exc = 0; COMPUTE_EVAL_BREAKER(); } while (0)
125
126
127 #ifdef WITH_THREAD
128
129 #include "pythread.h"
130
131 static PyThread_type_lock pending_lock = 0; /* for pending calls */
132 static long main_thread = 0;
133 /* This single variable consolidates all requests to break out of the fast path
134 in the eval loop. */
135 static _Py_atomic_int eval_breaker = {0};
136 /* Request for dropping the GIL */
137 static _Py_atomic_int gil_drop_request = {0};
138 /* Request for running pending calls. */
139 static _Py_atomic_int pendingcalls_to_do = {0};
140 /* Request for looking at the `async_exc` field of the current thread state.
141 Guarded by the GIL. */
142 static int pending_async_exc = 0;
143
144 #include "ceval_gil.h"
145
146 int
147 PyEval_ThreadsInitialized(void)
148 {
149 return gil_created();
150 }
151
152 void
153 PyEval_InitThreads(void)
154 {
155 if (gil_created())
156 return;
157 create_gil();
158 take_gil(PyThreadState_GET());
159 main_thread = PyThread_get_thread_ident();
160 if (!pending_lock)
161 pending_lock = PyThread_allocate_lock();
162 }
163
164 void
165 _PyEval_FiniThreads(void)
166 {
167 if (!gil_created())
168 return;
169 destroy_gil();
170 assert(!gil_created());
171 }
172
173 void
174 PyEval_AcquireLock(void)
175 {
176 PyThreadState *tstate = PyThreadState_GET();
177 if (tstate == NULL)
178 Py_FatalError("PyEval_AcquireLock: current thread state is NULL");
179 take_gil(tstate);
180 }
181
182 void
183 PyEval_ReleaseLock(void)
184 {
185 /* This function must succeed when the current thread state is NULL.
186 We therefore avoid PyThreadState_GET() which dumps a fatal error
187 in debug mode.
188 */
189 drop_gil((PyThreadState*)_Py_atomic_load_relaxed(
190 &_PyThreadState_Current));
191 }
192
193 void
194 PyEval_AcquireThread(PyThreadState *tstate)
195 {
196 if (tstate == NULL)
197 Py_FatalError("PyEval_AcquireThread: NULL new thread state");
198 /* Check someone has called PyEval_InitThreads() to create the lock */
199 assert(gil_created());
200 take_gil(tstate);
201 if (PyThreadState_Swap(tstate) != NULL)
202 Py_FatalError(
203 "PyEval_AcquireThread: non-NULL old thread state");
204 }
205
206 void
207 PyEval_ReleaseThread(PyThreadState *tstate)
208 {
209 if (tstate == NULL)
210 Py_FatalError("PyEval_ReleaseThread: NULL thread state");
211 if (PyThreadState_Swap(NULL) != tstate)
212 Py_FatalError("PyEval_ReleaseThread: wrong thread state");
213 drop_gil(tstate);
214 }
215
216 /* This function is called from PyOS_AfterFork to ensure that newly
217 created child processes don't hold locks referring to threads which
218 are not running in the child process. (This could also be done using
219 pthread_atfork mechanism, at least for the pthreads implementation.) */
220
221 void
222 PyEval_ReInitThreads(void)
223 {
224 _Py_IDENTIFIER(_after_fork);
225 PyObject *threading, *result;
226 PyThreadState *tstate = PyThreadState_GET();
227
228 if (!gil_created())
229 return;
230 recreate_gil();
231 pending_lock = PyThread_allocate_lock();
232 take_gil(tstate);
233 main_thread = PyThread_get_thread_ident();
234
235 /* Update the threading module with the new state.
236 */
237 tstate = PyThreadState_GET();
238 threading = PyMapping_GetItemString(tstate->interp->modules,
239 "threading");
240 if (threading == NULL) {
241 /* threading not imported */
242 PyErr_Clear();
243 return;
244 }
245 result = _PyObject_CallMethodId(threading, &PyId__after_fork, NULL);
246 if (result == NULL)
247 PyErr_WriteUnraisable(threading);
248 else
249 Py_DECREF(result);
250 Py_DECREF(threading);
251 }
252
253 #else
254 static _Py_atomic_int eval_breaker = {0};
255 static int pending_async_exc = 0;
256 #endif /* WITH_THREAD */
257
258 /* This function is used to signal that async exceptions are waiting to be
259 raised, therefore it is also useful in non-threaded builds. */
260
261 void
262 _PyEval_SignalAsyncExc(void)
263 {
264 SIGNAL_ASYNC_EXC();
265 }
266
267 /* Functions save_thread and restore_thread are always defined so
268 dynamically loaded modules needn't be compiled separately for use
269 with and without threads: */
270
271 PyThreadState *
272 PyEval_SaveThread(void)
273 {
274 PyThreadState *tstate = PyThreadState_Swap(NULL);
275 if (tstate == NULL)
276 Py_FatalError("PyEval_SaveThread: NULL tstate");
277 #ifdef WITH_THREAD
278 if (gil_created())
279 drop_gil(tstate);
280 #endif
281 return tstate;
282 }
283
284 void
285 PyEval_RestoreThread(PyThreadState *tstate)
286 {
287 if (tstate == NULL)
288 Py_FatalError("PyEval_RestoreThread: NULL tstate");
289 #ifdef WITH_THREAD
290 if (gil_created()) {
291 int err = errno;
292 take_gil(tstate);
293 /* _Py_Finalizing is protected by the GIL */
294 if (_Py_Finalizing && tstate != _Py_Finalizing) {
295 drop_gil(tstate);
296 PyThread_exit_thread();
297 assert(0); /* unreachable */
298 }
299 errno = err;
300 }
301 #endif
302 PyThreadState_Swap(tstate);
303 }
304
305
306 /* Mechanism whereby asynchronously executing callbacks (e.g. UNIX
307 signal handlers or Mac I/O completion routines) can schedule calls
308 to a function to be called synchronously.
309 The synchronous function is called with one void* argument.
310 It should return 0 for success or -1 for failure -- failure should
311 be accompanied by an exception.
312
313 If registry succeeds, the registry function returns 0; if it fails
314 (e.g. due to too many pending calls) it returns -1 (without setting
315 an exception condition).
316
317 Note that because registry may occur from within signal handlers,
318 or other asynchronous events, calling malloc() is unsafe!
319
320 #ifdef WITH_THREAD
321 Any thread can schedule pending calls, but only the main thread
322 will execute them.
323 There is no facility to schedule calls to a particular thread, but
324 that should be easy to change, should that ever be required. In
325 that case, the static variables here should go into the python
326 threadstate.
327 #endif
328 */
329
330 #ifdef WITH_THREAD
331
332 /* The WITH_THREAD implementation is thread-safe. It allows
333 scheduling to be made from any thread, and even from an executing
334 callback.
335 */
336
337 #define NPENDINGCALLS 32
338 static struct {
339 int (*func)(void *);
340 void *arg;
341 } pendingcalls[NPENDINGCALLS];
342 static int pendingfirst = 0;
343 static int pendinglast = 0;
344
345 int
346 Py_AddPendingCall(int (*func)(void *), void *arg)
347 {
348 int i, j, result=0;
349 PyThread_type_lock lock = pending_lock;
350
351 /* try a few times for the lock. Since this mechanism is used
352 * for signal handling (on the main thread), there is a (slim)
353 * chance that a signal is delivered on the same thread while we
354 * hold the lock during the Py_MakePendingCalls() function.
355 * This avoids a deadlock in that case.
356 * Note that signals can be delivered on any thread. In particular,
357 * on Windows, a SIGINT is delivered on a system-created worker
358 * thread.
359 * We also check for lock being NULL, in the unlikely case that
360 * this function is called before any bytecode evaluation takes place.
361 */
362 if (lock != NULL) {
363 for (i = 0; i<100; i++) {
364 if (PyThread_acquire_lock(lock, NOWAIT_LOCK))
365 break;
366 }
367 if (i == 100)
368 return -1;
369 }
370
371 i = pendinglast;
372 j = (i + 1) % NPENDINGCALLS;
373 if (j == pendingfirst) {
374 result = -1; /* Queue full */
375 } else {
376 pendingcalls[i].func = func;
377 pendingcalls[i].arg = arg;
378 pendinglast = j;
379 }
380 /* signal main loop */
381 SIGNAL_PENDING_CALLS();
382 if (lock != NULL)
383 PyThread_release_lock(lock);
384 return result;
385 }
386
387 int
388 Py_MakePendingCalls(void)
389 {
390 static int busy = 0;
391 int i;
392 int r = 0;
393
394 if (!pending_lock) {
395 /* initial allocation of the lock */
396 pending_lock = PyThread_allocate_lock();
397 if (pending_lock == NULL)
398 return -1;
399 }
400
401 /* only service pending calls on main thread */
402 if (main_thread && PyThread_get_thread_ident() != main_thread)
403 return 0;
404 /* don't perform recursive pending calls */
405 if (busy)
406 return 0;
407 busy = 1;
408 /* perform a bounded number of calls, in case of recursion */
409 for (i=0; i<NPENDINGCALLS; i++) {
410 int j;
411 int (*func)(void *);
412 void *arg = NULL;
413
414 /* pop one item off the queue while holding the lock */
415 PyThread_acquire_lock(pending_lock, WAIT_LOCK);
416 j = pendingfirst;
417 if (j == pendinglast) {
418 func = NULL; /* Queue empty */
419 } else {
420 func = pendingcalls[j].func;
421 arg = pendingcalls[j].arg;
422 pendingfirst = (j + 1) % NPENDINGCALLS;
423 }
424 if (pendingfirst != pendinglast)
425 SIGNAL_PENDING_CALLS();
426 else
427 UNSIGNAL_PENDING_CALLS();
428 PyThread_release_lock(pending_lock);
429 /* having released the lock, perform the callback */
430 if (func == NULL)
431 break;
432 r = func(arg);
433 if (r)
434 break;
435 }
436 busy = 0;
437 return r;
438 }
439
440 #else /* if ! defined WITH_THREAD */
441
442 /*
443 WARNING! ASYNCHRONOUSLY EXECUTING CODE!
444 This code is used for signal handling in python that isn't built
445 with WITH_THREAD.
446 Don't use this implementation when Py_AddPendingCalls() can happen
447 on a different thread!
448
449 There are two possible race conditions:
450 (1) nested asynchronous calls to Py_AddPendingCall()
451 (2) AddPendingCall() calls made while pending calls are being processed.
452
453 (1) is very unlikely because typically signal delivery
454 is blocked during signal handling. So it should be impossible.
455 (2) is a real possibility.
456 The current code is safe against (2), but not against (1).
457 The safety against (2) is derived from the fact that only one
458 thread is present, interrupted by signals, and that the critical
459 section is protected with the "busy" variable. On Windows, which
460 delivers SIGINT on a system thread, this does not hold and therefore
461 Windows really shouldn't use this version.
462 The two threads could theoretically wiggle around the "busy" variable.
463 */
464
465 #define NPENDINGCALLS 32
466 static struct {
467 int (*func)(void *);
468 void *arg;
469 } pendingcalls[NPENDINGCALLS];
470 static volatile int pendingfirst = 0;
471 static volatile int pendinglast = 0;
472 static _Py_atomic_int pendingcalls_to_do = {0};
473
474 int
475 Py_AddPendingCall(int (*func)(void *), void *arg)
476 {
477 static volatile int busy = 0;
478 int i, j;
479 /* XXX Begin critical section */
480 if (busy)
481 return -1;
482 busy = 1;
483 i = pendinglast;
484 j = (i + 1) % NPENDINGCALLS;
485 if (j == pendingfirst) {
486 busy = 0;
487 return -1; /* Queue full */
488 }
489 pendingcalls[i].func = func;
490 pendingcalls[i].arg = arg;
491 pendinglast = j;
492
493 SIGNAL_PENDING_CALLS();
494 busy = 0;
495 /* XXX End critical section */
496 return 0;
497 }
498
499 int
500 Py_MakePendingCalls(void)
501 {
502 static int busy = 0;
503 if (busy)
504 return 0;
505 busy = 1;
506 UNSIGNAL_PENDING_CALLS();
507 for (;;) {
508 int i;
509 int (*func)(void *);
510 void *arg;
511 i = pendingfirst;
512 if (i == pendinglast)
513 break; /* Queue empty */
514 func = pendingcalls[i].func;
515 arg = pendingcalls[i].arg;
516 pendingfirst = (i + 1) % NPENDINGCALLS;
517 if (func(arg) < 0) {
518 busy = 0;
519 SIGNAL_PENDING_CALLS(); /* We're not done yet */
520 return -1;
521 }
522 }
523 busy = 0;
524 return 0;
525 }
526
527 #endif /* WITH_THREAD */
528
529
530 /* The interpreter's recursion limit */
531
532 #ifndef Py_DEFAULT_RECURSION_LIMIT
533 #define Py_DEFAULT_RECURSION_LIMIT 1000
534 #endif
535 static int recursion_limit = Py_DEFAULT_RECURSION_LIMIT;
536 int _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT;
537
538 /* the macro Py_EnterRecursiveCall() only calls _Py_CheckRecursiveCall()
539 if the recursion_depth reaches _Py_CheckRecursionLimit.
540 If USE_STACKCHECK, the macro decrements _Py_CheckRecursionLimit
541 to guarantee that _Py_CheckRecursiveCall() is regularly called.
542 Without USE_STACKCHECK, there is no need for this. */
543 int
544 _Py_CheckRecursiveCall(char *where)
545 {
546 PyThreadState *tstate = PyThreadState_GET();
547
548 #ifdef USE_STACKCHECK
549 if (PyOS_CheckStack()) {
550 --tstate->recursion_depth;
551 PyErr_SetString(PyExc_MemoryError, "Stack overflow");
552 return -1;
553 }
554 #endif
555 _Py_CheckRecursionLimit = recursion_limit;
556 if (tstate->recursion_critical)
557 /* Somebody asked that we don't check for recursion. */
558 return 0;
559 if (tstate->overflowed) {
560 if (tstate->recursion_depth > recursion_limit + 50) {
561 /* Overflowing while handling an overflow. Give up. */
562 Py_FatalError("Cannot recover from stack overflow.");
563 }
564 return 0;
565 }
566 if (tstate->recursion_depth > recursion_limit) {
567 --tstate->recursion_depth;
568 tstate->overflowed = 1;
569 PyErr_Format(PyExc_RuntimeError,
570 "maximum recursion depth exceeded%s",
571 where);
572 return -1;
573 }
574 return 0;
575 }
576
577 /* Status code for main loop (reason for stack unwind) */
578 enum why_code {
579 WHY_NOT = 0x0001, /* No error */
580 WHY_EXCEPTION = 0x0002, /* Exception occurred */
581 WHY_RERAISE = 0x0004, /* Exception re-raised by 'finally' */
582 WHY_RETURN = 0x0008, /* 'return' statement */
583 WHY_BREAK = 0x0010, /* 'break' statement */
584 WHY_CONTINUE = 0x0020, /* 'continue' statement */
585 WHY_YIELD = 0x0040, /* 'yield' operator */
586 WHY_SILENCED = 0x0080 /* Exception silenced by 'with' */
587 };
588
589 static void save_exc_state(PyThreadState *, PyFrameObject *);
590 static void swap_exc_state(PyThreadState *, PyFrameObject *);
591 static void restore_and_clear_exc_state(PyThreadState *, PyFrameObject *);
592 static enum why_code do_raise(PyObject *, PyObject *);
593 static int unpack_iterable(PyObject *, int, int, PyObject **);
594
595 /* Records whether tracing is on for any thread. Counts the number of
596 threads for which tstate->c_tracefunc is non-NULL, so if the value
597 is 0, we know we don't have to check this thread's c_tracefunc.
598 This speeds up the if statement in PyEval_EvalFrameEx() after
599 fast_next_opcode*/
600 static int _Py_TracingPossible = 0;
601
602
603
604 PyObject *
605 PyEval_EvalCode(PyObject *co, PyObject *globals, PyObject *locals)
606 {
607 return PyEval_EvalCodeEx(co,
608 globals, locals,
609 (PyObject **)NULL, 0,
610 (PyObject **)NULL, 0,
611 (PyObject **)NULL, 0,
612 NULL, NULL);
613 }
614
615
616 /* Interpreter main loop */
617
618 PyObject *
619 PyEval_EvalFrameEx(PyFrameObject *f, int throwflag)
620 {
621 register PyObject **stack_pointer; /* Next free slot in value stack */
622 register unsigned char *next_instr;
623 register int opcode; /* Current opcode */
624 register int oparg; /* Current opcode argument, if any */
625 register enum why_code why; /* Reason for block stack unwind */
626 register int err; /* Error status -- nonzero if error */
627 register PyObject *x; /* Result object -- NULL if error */
628 register PyObject *v; /* Temporary objects popped off stack */
629 register PyObject *w;
630 register PyObject *u;
631 register PyObject *t;
632 register PyObject **fastlocals, **freevars;
633 PyObject *retval = NULL; /* Return value */
634 PyThreadState *tstate = PyThreadState_GET();
635 PyCodeObject *co;
636
637 /* when tracing we set things up so that
638
639 not (instr_lb <= current_bytecode_offset < instr_ub)
640
641 is true when the line being executed has changed. The
642 initial values are such as to make this false the first
643 time it is tested. */
644 int instr_ub = -1, instr_lb = 0, instr_prev = -1;
645
646 unsigned char *first_instr;
647 PyObject *names;
648 PyObject *consts;
649
650 /* Computed GOTOs, or
651 the-optimization-commonly-but-improperly-known-as-"threaded code"
652 using gcc's labels-as-values extension
653 (http://gcc.gnu.org/onlinedocs/gcc/Labels-as-Values.html).
654
655 The traditional bytecode evaluation loop uses a "switch" statement, which
656 decent compilers will optimize as a single indirect branch instruction
657 combined with a lookup table of jump addresses. However, since the
658 indirect jump instruction is shared by all opcodes, the CPU will have a
659 hard time making the right prediction for where to jump next (actually,
660 it will be always wrong except in the uncommon case of a sequence of
661 several identical opcodes).
662
663 "Threaded code" in contrast, uses an explicit jump table and an explicit
664 indirect jump instruction at the end of each opcode. Since the jump
665 instruction is at a different address for each opcode, the CPU will make a
666 separate prediction for each of these instructions, which is equivalent to
667 predicting the second opcode of each opcode pair. These predictions have
668 a much better chance to turn out valid, especially in small bytecode loops.
669
670 A mispredicted branch on a modern CPU flushes the whole pipeline and
671 can cost several CPU cycles (depending on the pipeline depth),
672 and potentially many more instructions (depending on the pipeline width).
673 A correctly predicted branch, however, is nearly free.
674
675 At the time of this writing, the "threaded code" version is up to 15-20%
676 faster than the normal "switch" version, depending on the compiler and the
677 CPU architecture.
678
679 We disable the optimization if DYNAMIC_EXECUTION_PROFILE is defined,
680 because it would render the measurements invalid.
681
682
683 NOTE: care must be taken that the compiler doesn't try to "optimize" the
684 indirect jumps by sharing them between all opcodes. Such optimizations
685 can be disabled on gcc by using the -fno-gcse flag (or possibly
686 -fno-crossjumping).
687 */
688
689 #ifdef DYNAMIC_EXECUTION_PROFILE
690 #undef USE_COMPUTED_GOTOS
691 #define USE_COMPUTED_GOTOS 0
692 #endif
693
694 #ifdef HAVE_COMPUTED_GOTOS
695 #ifndef USE_COMPUTED_GOTOS
696 #define USE_COMPUTED_GOTOS 1
697 #endif
698 #else
699 #if defined(USE_COMPUTED_GOTOS) && USE_COMPUTED_GOTOS
700 #error "Computed gotos are not supported on this compiler."
701 #endif
702 #undef USE_COMPUTED_GOTOS
703 #define USE_COMPUTED_GOTOS 0
704 #endif
705
706 #if USE_COMPUTED_GOTOS
707 /* Import the static jump table */
708 #include "opcode_targets.h"
709
710 /* This macro is used when several opcodes defer to the same implementation
711 (e.g. SETUP_LOOP, SETUP_FINALLY) */
712 #define TARGET_WITH_IMPL(op, impl) \
713 TARGET_##op: \
714 opcode = op; \
715 if (HAS_ARG(op)) \
716 oparg = NEXTARG(); \
717 case op: \
718 goto impl; \
719
720 #define TARGET(op) \
721 TARGET_##op: \
722 opcode = op; \
723 if (HAS_ARG(op)) \
724 oparg = NEXTARG(); \
725 case op:
726
727
728 #define DISPATCH() \
729 { \
730 if (!_Py_atomic_load_relaxed(&eval_breaker)) { \
731 FAST_DISPATCH(); \
732 } \
733 continue; \
734 }
735
736 #ifdef LLTRACE
737 #define FAST_DISPATCH() \
738 { \
739 if (!lltrace && !_Py_TracingPossible) { \
740 f->f_lasti = INSTR_OFFSET(); \
741 goto *opcode_targets[*next_instr++]; \
742 } \
743 goto fast_next_opcode; \
744 }
745 #else
746 #define FAST_DISPATCH() \
747 { \
748 if (!_Py_TracingPossible) { \
749 f->f_lasti = INSTR_OFFSET(); \
750 goto *opcode_targets[*next_instr++]; \
751 } \
752 goto fast_next_opcode; \
753 }
754 #endif
755
756 #else
757 #define TARGET(op) \
758 case op:
759 #define TARGET_WITH_IMPL(op, impl) \
760 /* silence compiler warnings about `impl` unused */ \
761 if (0) goto impl; \
762 case op:
763 #define DISPATCH() continue
764 #define FAST_DISPATCH() goto fast_next_opcode
765 #endif
766
767
768 /* Tuple access macros */
769
770 #ifndef Py_DEBUG
771 #define GETITEM(v, i) PyTuple_GET_ITEM((PyTupleObject *)(v), (i))
772 #else
773 #define GETITEM(v, i) PyTuple_GetItem((v), (i))
774 #endif
775
776 #ifdef WITH_TSC
777 /* Use Pentium timestamp counter to mark certain events:
778 inst0 -- beginning of switch statement for opcode dispatch
779 inst1 -- end of switch statement (may be skipped)
780 loop0 -- the top of the mainloop
781 loop1 -- place where control returns again to top of mainloop
782 (may be skipped)
783 intr1 -- beginning of long interruption
784 intr2 -- end of long interruption
785
786 Many opcodes call out to helper C functions. In some cases, the
787 time in those functions should be counted towards the time for the
788 opcode, but not in all cases. For example, a CALL_FUNCTION opcode
789 calls another Python function; there's no point in charge all the
790 bytecode executed by the called function to the caller.
791
792 It's hard to make a useful judgement statically. In the presence
793 of operator overloading, it's impossible to tell if a call will
794 execute new Python code or not.
795
796 It's a case-by-case judgement. I'll use intr1 for the following
797 cases:
798
799 IMPORT_STAR
800 IMPORT_FROM
801 CALL_FUNCTION (and friends)
802
803 */
804 uint64 inst0, inst1, loop0, loop1, intr0 = 0, intr1 = 0;
805 int ticked = 0;
806
807 READ_TIMESTAMP(inst0);
808 READ_TIMESTAMP(inst1);
809 READ_TIMESTAMP(loop0);
810 READ_TIMESTAMP(loop1);
811
812 /* shut up the compiler */
813 opcode = 0;
814 #endif
815
816 /* Code access macros */
817
818 #define INSTR_OFFSET() ((int)(next_instr - first_instr))
819 #define NEXTOP() (*next_instr++)
820 #define NEXTARG() (next_instr += 2, (next_instr[-1]<<8) + next_instr[-2])
821 #define PEEKARG() ((next_instr[2]<<8) + next_instr[1])
822 #define JUMPTO(x) (next_instr = first_instr + (x))
823 #define JUMPBY(x) (next_instr += (x))
824
825 /* OpCode prediction macros
826 Some opcodes tend to come in pairs thus making it possible to
827 predict the second code when the first is run. For example,
828 COMPARE_OP is often followed by JUMP_IF_FALSE or JUMP_IF_TRUE. And,
829 those opcodes are often followed by a POP_TOP.
830
831 Verifying the prediction costs a single high-speed test of a register
832 variable against a constant. If the pairing was good, then the
833 processor's own internal branch predication has a high likelihood of
834 success, resulting in a nearly zero-overhead transition to the
835 next opcode. A successful prediction saves a trip through the eval-loop
836 including its two unpredictable branches, the HAS_ARG test and the
837 switch-case. Combined with the processor's internal branch prediction,
838 a successful PREDICT has the effect of making the two opcodes run as if
839 they were a single new opcode with the bodies combined.
840
841 If collecting opcode statistics, your choices are to either keep the
842 predictions turned-on and interpret the results as if some opcodes
843 had been combined or turn-off predictions so that the opcode frequency
844 counter updates for both opcodes.
845
846 Opcode prediction is disabled with threaded code, since the latter allows
847 the CPU to record separate branch prediction information for each
848 opcode.
849
850 */
851
852 #if defined(DYNAMIC_EXECUTION_PROFILE) || USE_COMPUTED_GOTOS
853 #define PREDICT(op) if (0) goto PRED_##op
854 #define PREDICTED(op) PRED_##op:
855 #define PREDICTED_WITH_ARG(op) PRED_##op:
856 #else
857 #define PREDICT(op) if (*next_instr == op) goto PRED_##op
858 #define PREDICTED(op) PRED_##op: next_instr++
859 #define PREDICTED_WITH_ARG(op) PRED_##op: oparg = PEEKARG(); next_instr += 3
860 #endif
861
862
863 /* Stack manipulation macros */
864
865 /* The stack can grow at most MAXINT deep, as co_nlocals and
866 co_stacksize are ints. */
867 #define STACK_LEVEL() ((int)(stack_pointer - f->f_valuestack))
868 #define EMPTY() (STACK_LEVEL() == 0)
869 #define TOP() (stack_pointer[-1])
870 #define SECOND() (stack_pointer[-2])
871 #define THIRD() (stack_pointer[-3])
872 #define FOURTH() (stack_pointer[-4])
873 #define PEEK(n) (stack_pointer[-(n)])
874 #define SET_TOP(v) (stack_pointer[-1] = (v))
875 #define SET_SECOND(v) (stack_pointer[-2] = (v))
876 #define SET_THIRD(v) (stack_pointer[-3] = (v))
877 #define SET_FOURTH(v) (stack_pointer[-4] = (v))
878 #define SET_VALUE(n, v) (stack_pointer[-(n)] = (v))
879 #define BASIC_STACKADJ(n) (stack_pointer += n)
880 #define BASIC_PUSH(v) (*stack_pointer++ = (v))
881 #define BASIC_POP() (*--stack_pointer)
882
883 #define PUSH(v) BASIC_PUSH(v)
884 #define POP() BASIC_POP()
885 #define STACKADJ(n) BASIC_STACKADJ(n)
886 #define EXT_POP(STACK_POINTER) (*--(STACK_POINTER))
887
888 /* Local variable macros */
889
890 #define GETLOCAL(i) (fastlocals[i])
891
892 /* The SETLOCAL() macro must not DECREF the local variable in-place and
893 then store the new value; it must copy the old value to a temporary
894 value, then store the new value, and then DECREF the temporary value.
895 This is because it is possible that during the DECREF the frame is
896 accessed by other code (e.g. a __del__ method or gc.collect()) and the
897 variable would be pointing to already-freed memory. */
898 #define SETLOCAL(i, value) do { PyObject *tmp = GETLOCAL(i); \
899 GETLOCAL(i) = value; \
900 Py_XDECREF(tmp); } while (0)
901
902
903 #define UNWIND_BLOCK(b) \
904 while (STACK_LEVEL() > (b)->b_level) { \
905 PyObject *v = POP(); \
906 Py_XDECREF(v); \
907 }
908
909 #define UNWIND_EXCEPT_HANDLER(b) \
910 { \
911 PyObject *type, *value, *traceback; \
912 assert(STACK_LEVEL() >= (b)->b_level + 3); \
913 while (STACK_LEVEL() > (b)->b_level + 3) { \
914 value = POP(); \
915 Py_XDECREF(value); \
916 } \
917 type = tstate->exc_type; \
918 value = tstate->exc_value; \
919 traceback = tstate->exc_traceback; \
920 tstate->exc_type = POP(); \
921 tstate->exc_value = POP(); \
922 tstate->exc_traceback = POP(); \
923 Py_XDECREF(type); \
924 Py_XDECREF(value); \
925 Py_XDECREF(traceback); \
926 }
927
928 /* Start of code */
929
930 /* push frame */
931 if (Py_EnterRecursiveCall(""))
932 return NULL;
933
934 tstate->frame = f;
935
936 if (tstate->use_tracing) {
937 if (tstate->c_tracefunc != NULL) {
938 /* tstate->c_tracefunc, if defined, is a
939 function that will be called on *every* entry
940 to a code block. Its return value, if not
941 None, is a function that will be called at
942 the start of each executed line of code.
943 (Actually, the function must return itself
944 in order to continue tracing.) The trace
945 functions are called with three arguments:
946 a pointer to the current frame, a string
947 indicating why the function is called, and
948 an argument which depends on the situation.
949 The global trace function is also called
950 whenever an exception is detected. */
951 if (call_trace_protected(tstate->c_tracefunc,
952 tstate->c_traceobj,
953 f, PyTrace_CALL, Py_None)) {
954 /* Trace function raised an error */
955 goto exit_eval_frame;
956 }
957 }
958 if (tstate->c_profilefunc != NULL) {
959 /* Similar for c_profilefunc, except it needn't
960 return itself and isn't called for "line" events */
961 if (call_trace_protected(tstate->c_profilefunc,
962 tstate->c_profileobj,
963 f, PyTrace_CALL, Py_None)) {
964 /* Profile function raised an error */
965 goto exit_eval_frame;
966 }
967 }
968 }
969
970 co = f->f_code;
971 names = co->co_names;
972 consts = co->co_consts;
973 fastlocals = f->f_localsplus;
974 freevars = f->f_localsplus + co->co_nlocals;
975 first_instr = (unsigned char*) PyBytes_AS_STRING(co->co_code);
976 /* An explanation is in order for the next line.
977
978 f->f_lasti now refers to the index of the last instruction
979 executed. You might think this was obvious from the name, but
980 this wasn't always true before 2.3! PyFrame_New now sets
981 f->f_lasti to -1 (i.e. the index *before* the first instruction)
982 and YIELD_VALUE doesn't fiddle with f_lasti any more. So this
983 does work. Promise.
984
985 When the PREDICT() macros are enabled, some opcode pairs follow in
986 direct succession without updating f->f_lasti. A successful
987 prediction effectively links the two codes together as if they
988 were a single new opcode; accordingly,f->f_lasti will point to
989 the first code in the pair (for instance, GET_ITER followed by
990 FOR_ITER is effectively a single opcode and f->f_lasti will point
991 at to the beginning of the combined pair.)
992 */
993 next_instr = first_instr + f->f_lasti + 1;
994 stack_pointer = f->f_stacktop;
995 assert(stack_pointer != NULL);
996 f->f_stacktop = NULL; /* remains NULL unless yield suspends frame */
997
998 if (co->co_flags & CO_GENERATOR && !throwflag) {
999 if (f->f_exc_type != NULL && f->f_exc_type != Py_None) {
1000 /* We were in an except handler when we left,
1001 restore the exception state which was put aside
1002 (see YIELD_VALUE). */
1003 swap_exc_state(tstate, f);
1004 }
1005 else
1006 save_exc_state(tstate, f);
1007 }
1008
1009 why = WHY_NOT;
1010 err = 0;
1011 x = Py_None; /* Not a reference, just anything non-NULL */
1012 w = NULL;
1013
1014 if (throwflag) { /* support for generator.throw() */
1015 why = WHY_EXCEPTION;
1016 goto on_error;
1017 }
1018
1019 for (;;) {
1020 assert(stack_pointer >= f->f_valuestack); /* else underflow */
1021 assert(STACK_LEVEL() <= co->co_stacksize); /* else overflow */
1022
1023 /* Do periodic things. Doing this every time through
1024 the loop would add too much overhead, so we do it
1025 only every Nth instruction. We also do it if
1026 ``pendingcalls_to_do'' is set, i.e. when an asynchronous
1027 event needs attention (e.g. a signal handler or
1028 async I/O handler); see Py_AddPendingCall() and
1029 Py_MakePendingCalls() above. */
1030
1031 if (_Py_atomic_load_relaxed(&eval_breaker)) {
1032 if (*next_instr == SETUP_FINALLY) {
1033 /* Make the last opcode before
1034 a try: finally: block uninterruptible. */
1035 goto fast_next_opcode;
1036 }
1037 tstate->tick_counter++;
1038 if (_Py_atomic_load_relaxed(&pendingcalls_to_do)) {
1039 if (Py_MakePendingCalls() < 0) {
1040 why = WHY_EXCEPTION;
1041 goto on_error;
1042 }
1043 }
1044 #ifdef WITH_THREAD
1045 if (_Py_atomic_load_relaxed(&gil_drop_request)) {
1046 /* Give another thread a chance */
1047 if (PyThreadState_Swap(NULL) != tstate)
1048 Py_FatalError("ceval: tstate mix-up");
1049 drop_gil(tstate);
1050
1051 /* Other threads may run now */
1052
1053 take_gil(tstate);
1054 if (PyThreadState_Swap(tstate) != NULL)
1055 Py_FatalError("ceval: orphan tstate");
1056 }
1057 #endif
1058 /* Check for asynchronous exceptions. */
1059 if (tstate->async_exc != NULL) {
1060 x = tstate->async_exc;
1061 tstate->async_exc = NULL;
1062 UNSIGNAL_ASYNC_EXC();
1063 PyErr_SetNone(x);
1064 Py_DECREF(x);
1065 why = WHY_EXCEPTION;
1066 goto on_error;
1067 }
1068 }
1069
1070 fast_next_opcode:
1071 f->f_lasti = INSTR_OFFSET();
1072
1073 /* line-by-line tracing support */
1074
1075 if (_Py_TracingPossible &&
1076 tstate->c_tracefunc != NULL && !tstate->tracing) {
1077 /* see maybe_call_line_trace
1078 for expository comments */
1079 f->f_stacktop = stack_pointer;
1080
1081 err = maybe_call_line_trace(tstate->c_tracefunc,
1082 tstate->c_traceobj,
1083 f, &instr_lb, &instr_ub,
1084 &instr_prev);
1085 /* Reload possibly changed frame fields */
1086 JUMPTO(f->f_lasti);
1087 if (f->f_stacktop != NULL) {
1088 stack_pointer = f->f_stacktop;
1089 f->f_stacktop = NULL;
1090 }
1091 if (err) {
1092 /* trace function raised an exception */
1093 goto on_error;
1094 }
1095 }
1096
1097 /* Extract opcode and argument */
1098
1099 opcode = NEXTOP();
1100 oparg = 0; /* allows oparg to be stored in a register because
1101 it doesn't have to be remembered across a full loop */
1102 if (HAS_ARG(opcode))
1103 oparg = NEXTARG();
1104 dispatch_opcode:
1105
1106 /* Main switch on opcode */
1107
1108 switch (opcode) {
1109
1110 /* BEWARE!
1111 It is essential that any operation that fails sets either
1112 x to NULL, err to nonzero, or why to anything but WHY_NOT,
1113 and that no operation that succeeds does this! */
1114
1115 TARGET(NOP)
1116 FAST_DISPATCH();
1117
1118 TARGET(LOAD_FAST)
1119 x = GETLOCAL(oparg);
1120 if (x != NULL) {
1121 Py_INCREF(x);
1122 PUSH(x);
1123 FAST_DISPATCH();
1124 }
1125 format_exc_check_arg(PyExc_UnboundLocalError,
1126 UNBOUNDLOCAL_ERROR_MSG,
1127 PyTuple_GetItem(co->co_varnames, oparg));
1128 break;
1129
1130 TARGET(LOAD_CONST)
1131 x = GETITEM(consts, oparg);
1132 Py_INCREF(x);
1133 PUSH(x);
1134 FAST_DISPATCH();
1135
1136 PREDICTED_WITH_ARG(STORE_FAST);
1137 TARGET(STORE_FAST)
1138 v = POP();
1139 SETLOCAL(oparg, v);
1140 FAST_DISPATCH();
1141
1142 TARGET(POP_TOP)
1143 v = POP();
1144 Py_DECREF(v);
1145 FAST_DISPATCH();
1146
1147 TARGET(ROT_TWO)
1148 v = TOP();
1149 w = SECOND();
1150 SET_TOP(w);
1151 SET_SECOND(v);
1152 FAST_DISPATCH();
1153
1154 TARGET(ROT_THREE)
1155 v = TOP();
1156 w = SECOND();
1157 x = THIRD();
1158 SET_TOP(w);
1159 SET_SECOND(x);
1160 SET_THIRD(v);
1161 FAST_DISPATCH();
1162
1163 TARGET(DUP_TOP)
1164 v = TOP();
1165 Py_INCREF(v);
1166 PUSH(v);
1167 FAST_DISPATCH();
1168
1169 TARGET(DUP_TOP_TWO)
1170 x = TOP();
1171 Py_INCREF(x);
1172 w = SECOND();
1173 Py_INCREF(w);
1174 STACKADJ(2);
1175 SET_TOP(x);
1176 SET_SECOND(w);
1177 FAST_DISPATCH();
1178
1179 TARGET(UNARY_POSITIVE)
1180 v = TOP();
1181 x = PyNumber_Positive(v);
1182 Py_DECREF(v);
1183 SET_TOP(x);
1184 if (x != NULL) DISPATCH();
1185 break;
1186
1187 TARGET(UNARY_NEGATIVE)
1188 v = TOP();
1189 x = PyNumber_Negative(v);
1190 Py_DECREF(v);
1191 SET_TOP(x);
1192 if (x != NULL) DISPATCH();
1193 break;
1194
1195 TARGET(UNARY_NOT)
1196 v = TOP();
1197 err = PyObject_IsTrue(v);
1198 Py_DECREF(v);
1199 if (err == 0) {
1200 Py_INCREF(Py_True);
1201 SET_TOP(Py_True);
1202 DISPATCH();
1203 }
1204 else if (err > 0) {
1205 Py_INCREF(Py_False);
1206 SET_TOP(Py_False);
1207 err = 0;
1208 DISPATCH();
1209 }
1210 STACKADJ(-1);
1211 break;
1212
1213 TARGET(UNARY_INVERT)
1214 v = TOP();
1215 x = PyNumber_Invert(v);
1216 Py_DECREF(v);
1217 SET_TOP(x);
1218 if (x != NULL) DISPATCH();
1219 break;
1220
1221 TARGET(BINARY_POWER)
1222 w = POP();
1223 v = TOP();
1224 x = PyNumber_Power(v, w, Py_None);
1225 Py_DECREF(v);
1226 Py_DECREF(w);
1227 SET_TOP(x);
1228 if (x != NULL) DISPATCH();
1229 break;
1230
1231 TARGET(BINARY_MULTIPLY)
1232 w = POP();
1233 v = TOP();
1234 x = PyNumber_Multiply(v, w);
1235 Py_DECREF(v);
1236 Py_DECREF(w);
1237 SET_TOP(x);
1238 if (x != NULL) DISPATCH();
1239 break;
1240
1241 TARGET(BINARY_TRUE_DIVIDE)
1242 w = POP();
1243 v = TOP();
1244 x = PyNumber_TrueDivide(v, w);
1245 Py_DECREF(v);
1246 Py_DECREF(w);
1247 SET_TOP(x);
1248 if (x != NULL) DISPATCH();
1249 break;
1250
1251 TARGET(BINARY_FLOOR_DIVIDE)
1252 w = POP();
1253 v = TOP();
1254 x = PyNumber_FloorDivide(v, w);
1255 Py_DECREF(v);
1256 Py_DECREF(w);
1257 SET_TOP(x);
1258 if (x != NULL) DISPATCH();
1259 break;
1260
1261 TARGET(BINARY_MODULO)
1262 w = POP();
1263 v = TOP();
1264 if (PyUnicode_CheckExact(v))
1265 x = PyUnicode_Format(v, w);
1266 else
1267 x = PyNumber_Remainder(v, w);
1268 Py_DECREF(v);
1269 Py_DECREF(w);
1270 SET_TOP(x);
1271 if (x != NULL) DISPATCH();
1272 break;
1273
1274 TARGET(BINARY_ADD)
1275 w = POP();
1276 v = TOP();
1277 if (PyUnicode_CheckExact(v) &&
1278 PyUnicode_CheckExact(w)) {
1279 x = unicode_concatenate(v, w, f, next_instr);
1280 /* unicode_concatenate consumed the ref to v */
1281 goto skip_decref_vx;
1282 }
1283 else {
1284 x = PyNumber_Add(v, w);
1285 }
1286 Py_DECREF(v);
1287 skip_decref_vx:
1288 Py_DECREF(w);
1289 SET_TOP(x);
1290 if (x != NULL) DISPATCH();
1291 break;
1292
1293 TARGET(BINARY_SUBTRACT)
1294 w = POP();
1295 v = TOP();
1296 x = PyNumber_Subtract(v, w);
1297 Py_DECREF(v);
1298 Py_DECREF(w);
1299 SET_TOP(x);
1300 if (x != NULL) DISPATCH();
1301 break;
1302
1303 TARGET(BINARY_SUBSCR)
1304 w = POP();
1305 v = TOP();
1306 x = PyObject_GetItem(v, w);
1307 Py_DECREF(v);
1308 Py_DECREF(w);
1309 SET_TOP(x);
1310 if (x != NULL) DISPATCH();
1311 break;
1312
1313 TARGET(BINARY_LSHIFT)
1314 w = POP();
1315 v = TOP();
1316 x = PyNumber_Lshift(v, w);
1317 Py_DECREF(v);
1318 Py_DECREF(w);
1319 SET_TOP(x);
1320 if (x != NULL) DISPATCH();
1321 break;
1322
1323 TARGET(BINARY_RSHIFT)
1324 w = POP();
1325 v = TOP();
1326 x = PyNumber_Rshift(v, w);
1327 Py_DECREF(v);
1328 Py_DECREF(w);
1329 SET_TOP(x);
1330 if (x != NULL) DISPATCH();
1331 break;
1332
1333 TARGET(BINARY_AND)
1334 w = POP();
1335 v = TOP();
1336 x = PyNumber_And(v, w);
1337 Py_DECREF(v);
1338 Py_DECREF(w);
1339 SET_TOP(x);
1340 if (x != NULL) DISPATCH();
1341 break;
1342
1343 TARGET(BINARY_XOR)
1344 w = POP();
1345 v = TOP();
1346 x = PyNumber_Xor(v, w);
1347 Py_DECREF(v);
1348 Py_DECREF(w);
1349 SET_TOP(x);
1350 if (x != NULL) DISPATCH();
1351 break;
1352
1353 TARGET(BINARY_OR)
1354 w = POP();
1355 v = TOP();
1356 x = PyNumber_Or(v, w);
1357 Py_DECREF(v);
1358 Py_DECREF(w);
1359 SET_TOP(x);
1360 if (x != NULL) DISPATCH();
1361 break;
1362
1363 TARGET(LIST_APPEND)
1364 w = POP();
1365 v = PEEK(oparg);
1366 err = PyList_Append(v, w);
1367 Py_DECREF(w);
1368 if (err == 0) {
1369 PREDICT(JUMP_ABSOLUTE);
1370 DISPATCH();
1371 }
1372 break;
1373
1374 TARGET(SET_ADD)
1375 w = POP();
1376 v = stack_pointer[-oparg];
1377 err = PySet_Add(v, w);
1378 Py_DECREF(w);
1379 if (err == 0) {
1380 PREDICT(JUMP_ABSOLUTE);
1381 DISPATCH();
1382 }
1383 break;
1384
1385 TARGET(INPLACE_POWER)
1386 w = POP();
1387 v = TOP();
1388 x = PyNumber_InPlacePower(v, w, Py_None);
1389 Py_DECREF(v);
1390 Py_DECREF(w);
1391 SET_TOP(x);
1392 if (x != NULL) DISPATCH();
1393 break;
1394
1395 TARGET(INPLACE_MULTIPLY)
1396 w = POP();
1397 v = TOP();
1398 x = PyNumber_InPlaceMultiply(v, w);
1399 Py_DECREF(v);
1400 Py_DECREF(w);
1401 SET_TOP(x);
1402 if (x != NULL) DISPATCH();
1403 break;
1404
1405 TARGET(INPLACE_TRUE_DIVIDE)
1406 w = POP();
1407 v = TOP();
1408 x = PyNumber_InPlaceTrueDivide(v, w);
1409 Py_DECREF(v);
1410 Py_DECREF(w);
1411 SET_TOP(x);
1412 if (x != NULL) DISPATCH();
1413 break;
1414
1415 TARGET(INPLACE_FLOOR_DIVIDE)
1416 w = POP();
1417 v = TOP();
1418 x = PyNumber_InPlaceFloorDivide(v, w);
1419 Py_DECREF(v);
1420 Py_DECREF(w);
1421 SET_TOP(x);
1422 if (x != NULL) DISPATCH();
1423 break;
1424
1425 TARGET(INPLACE_MODULO)
1426 w = POP();
1427 v = TOP();
1428 x = PyNumber_InPlaceRemainder(v, w);
1429 Py_DECREF(v);
1430 Py_DECREF(w);
1431 SET_TOP(x);
1432 if (x != NULL) DISPATCH();
1433 break;
1434
1435 TARGET(INPLACE_ADD)
1436 w = POP();
1437 v = TOP();
1438 if (PyUnicode_CheckExact(v) &&
1439 PyUnicode_CheckExact(w)) {
1440 x = unicode_concatenate(v, w, f, next_instr);
1441 /* unicode_concatenate consumed the ref to v */
1442 goto skip_decref_v;
1443 }
1444 else {
1445 x = PyNumber_InPlaceAdd(v, w);
1446 }
1447 Py_DECREF(v);
1448 skip_decref_v:
1449 Py_DECREF(w);
1450 SET_TOP(x);
1451 if (x != NULL) DISPATCH();
1452 break;
1453
1454 TARGET(INPLACE_SUBTRACT)
1455 w = POP();
1456 v = TOP();
1457 x = PyNumber_InPlaceSubtract(v, w);
1458 Py_DECREF(v);
1459 Py_DECREF(w);
1460 SET_TOP(x);
1461 if (x != NULL) DISPATCH();
1462 break;
1463
1464 TARGET(INPLACE_LSHIFT)
1465 w = POP();
1466 v = TOP();
1467 x = PyNumber_InPlaceLshift(v, w);
1468 Py_DECREF(v);
1469 Py_DECREF(w);
1470 SET_TOP(x);
1471 if (x != NULL) DISPATCH();
1472 break;
1473
1474 TARGET(INPLACE_RSHIFT)
1475 w = POP();
1476 v = TOP();
1477 x = PyNumber_InPlaceRshift(v, w);
1478 Py_DECREF(v);
1479 Py_DECREF(w);
1480 SET_TOP(x);
1481 if (x != NULL) DISPATCH();
1482 break;
1483
1484 TARGET(INPLACE_AND)
1485 w = POP();
1486 v = TOP();
1487 x = PyNumber_InPlaceAnd(v, w);
1488 Py_DECREF(v);
1489 Py_DECREF(w);
1490 SET_TOP(x);
1491 if (x != NULL) DISPATCH();
1492 break;
1493
1494 TARGET(INPLACE_XOR)
1495 w = POP();
1496 v = TOP();
1497 x = PyNumber_InPlaceXor(v, w);
1498 Py_DECREF(v);
1499 Py_DECREF(w);
1500 SET_TOP(x);
1501 if (x != NULL) DISPATCH();
1502 break;
1503
1504 TARGET(INPLACE_OR)
1505 w = POP();
1506 v = TOP();
1507 x = PyNumber_InPlaceOr(v, w);
1508 Py_DECREF(v);
1509 Py_DECREF(w);
1510 SET_TOP(x);
1511 if (x != NULL) DISPATCH();
1512 break;
1513
1514 TARGET(STORE_SUBSCR)
1515 w = TOP();
1516 v = SECOND();
1517 u = THIRD();
1518 STACKADJ(-3);
1519 /* v[w] = u */
1520 err = PyObject_SetItem(v, w, u);
1521 Py_DECREF(u);
1522 Py_DECREF(v);
1523 Py_DECREF(w);
1524 if (err == 0) DISPATCH();
1525 break;
1526
1527 TARGET(DELETE_SUBSCR)
1528 w = TOP();
1529 v = SECOND();
1530 STACKADJ(-2);
1531 /* del v[w] */
1532 err = PyObject_DelItem(v, w);
1533 Py_DECREF(v);
1534 Py_DECREF(w);
1535 if (err == 0) DISPATCH();
1536 break;
1537
1538 TARGET(PRINT_EXPR)
1539 v = POP();
1540 w = PySys_GetObject("displayhook");
1541 if (w == NULL) {
1542 PyErr_SetString(PyExc_RuntimeError,
1543 "lost sys.displayhook");
1544 err = -1;
1545 x = NULL;
1546 }
1547 if (err == 0) {
1548 x = PyTuple_Pack(1, v);
1549 if (x == NULL)
1550 err = -1;
1551 }
1552 if (err == 0) {
1553 w = PyEval_CallObject(w, x);
1554 Py_XDECREF(w);
1555 if (w == NULL)
1556 err = -1;
1557 }
1558 Py_DECREF(v);
1559 Py_XDECREF(x);
1560 break;
1561
1562 TARGET(RAISE_VARARGS)
1563 v = w = NULL;
1564 switch (oparg) {
1565 case 2:
1566 v = POP(); /* cause */
1567 case 1:
1568 w = POP(); /* exc */
1569 case 0: /* Fallthrough */
1570 why = do_raise(w, v);
1571 break;
1572 default:
1573 PyErr_SetString(PyExc_SystemError,
1574 "bad RAISE_VARARGS oparg");
1575 why = WHY_EXCEPTION;
1576 break;
1577 }
1578 break;
1579
1580 TARGET(STORE_LOCALS)
1581 x = POP();
1582 v = f->f_locals;
1583 Py_XDECREF(v);
1584 f->f_locals = x;
1585 DISPATCH();
1586
1587 TARGET(RETURN_VALUE)
1588 retval = POP();
1589 why = WHY_RETURN;
1590 goto fast_block_end;
1591
1592 TARGET(YIELD_VALUE)
1593 retval = POP();
1594 f->f_stacktop = stack_pointer;
1595 why = WHY_YIELD;
1596 goto fast_yield;
1597
1598 TARGET(POP_EXCEPT)
1599 {
1600 PyTryBlock *b = PyFrame_BlockPop(f);
1601 if (b->b_type != EXCEPT_HANDLER) {
1602 PyErr_SetString(PyExc_SystemError,
1603 "popped block is not an except handler");
1604 why = WHY_EXCEPTION;
1605 break;
1606 }
1607 UNWIND_EXCEPT_HANDLER(b);
1608 }
1609 DISPATCH();
1610
1611 TARGET(POP_BLOCK)
1612 {
1613 PyTryBlock *b = PyFrame_BlockPop(f);
1614 UNWIND_BLOCK(b);
1615 }
1616 DISPATCH();
1617
1618 PREDICTED(END_FINALLY);
1619 TARGET(END_FINALLY)
1620 v = POP();
1621 if (PyLong_Check(v)) {
1622 why = (enum why_code) PyLong_AS_LONG(v);
1623 assert(why != WHY_YIELD);
1624 if (why == WHY_RETURN ||
1625 why == WHY_CONTINUE)
1626 retval = POP();
1627 if (why == WHY_SILENCED) {
1628 /* An exception was silenced by 'with', we must
1629 manually unwind the EXCEPT_HANDLER block which was
1630 created when the exception was caught, otherwise
1631 the stack will be in an inconsistent state. */
1632 PyTryBlock *b = PyFrame_BlockPop(f);
1633 assert(b->b_type == EXCEPT_HANDLER);
1634 UNWIND_EXCEPT_HANDLER(b);
1635 why = WHY_NOT;
1636 }
1637 }
1638 else if (PyExceptionClass_Check(v)) {
1639 w = POP();
1640 u = POP();
1641 PyErr_Restore(v, w, u);
1642 why = WHY_RERAISE;
1643 break;
1644 }
1645 else if (v != Py_None) {
1646 PyErr_SetString(PyExc_SystemError,
1647 "'finally' pops bad exception");
1648 why = WHY_EXCEPTION;
1649 }
1650 Py_DECREF(v);
1651 break;
1652
1653 TARGET(LOAD_BUILD_CLASS)
1654 x = PyDict_GetItemString(f->f_builtins,
1655 "__build_class__");
1656 if (x == NULL) {
1657 PyErr_SetString(PyExc_ImportError,
1658 "__build_class__ not found");
1659 break;
1660 }
1661 Py_INCREF(x);
1662 PUSH(x);
1663 break;
1664
1665 TARGET(STORE_NAME)
1666 w = GETITEM(names, oparg);
1667 v = POP();
1668 if ((x = f->f_locals) != NULL) {
1669 if (PyDict_CheckExact(x))
1670 err = PyDict_SetItem(x, w, v);
1671 else
1672 err = PyObject_SetItem(x, w, v);
1673 Py_DECREF(v);
1674 if (err == 0) DISPATCH();
1675 break;
1676 }
1677 PyErr_Format(PyExc_SystemError,
1678 "no locals found when storing %R", w);
1679 break;
1680
1681 TARGET(DELETE_NAME)
1682 w = GETITEM(names, oparg);
1683 if ((x = f->f_locals) != NULL) {
1684 if ((err = PyObject_DelItem(x, w)) != 0)
1685 format_exc_check_arg(PyExc_NameError,
1686 NAME_ERROR_MSG,
1687 w);
1688 break;
1689 }
1690 PyErr_Format(PyExc_SystemError,
1691 "no locals when deleting %R", w);
1692 break;
1693
1694 PREDICTED_WITH_ARG(UNPACK_SEQUENCE);
1695 TARGET(UNPACK_SEQUENCE)
1696 v = POP();
1697 if (PyTuple_CheckExact(v) &&
1698 PyTuple_GET_SIZE(v) == oparg) {
1699 PyObject **items = \
1700 ((PyTupleObject *)v)->ob_item;
1701 while (oparg--) {
1702 w = items[oparg];
1703 Py_INCREF(w);
1704 PUSH(w);
1705 }
1706 Py_DECREF(v);
1707 DISPATCH();
1708 } else if (PyList_CheckExact(v) &&
1709 PyList_GET_SIZE(v) == oparg) {
1710 PyObject **items = \
1711 ((PyListObject *)v)->ob_item;
1712 while (oparg--) {
1713 w = items[oparg];
1714 Py_INCREF(w);
1715 PUSH(w);
1716 }
1717 } else if (unpack_iterable(v, oparg, -1,
1718 stack_pointer + oparg)) {
1719 STACKADJ(oparg);
1720 } else {
1721 /* unpack_iterable() raised an exception */
1722 why = WHY_EXCEPTION;
1723 }
1724 Py_DECREF(v);
1725 break;
1726
1727 TARGET(UNPACK_EX)
1728 {
1729 int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8);
1730 v = POP();
1731
1732 if (unpack_iterable(v, oparg & 0xFF, oparg >> 8,
1733 stack_pointer + totalargs)) {
1734 stack_pointer += totalargs;
1735 } else {
1736 why = WHY_EXCEPTION;
1737 }
1738 Py_DECREF(v);
1739 break;
1740 }
1741
1742 TARGET(STORE_ATTR)
1743 w = GETITEM(names, oparg);
1744 v = TOP();
1745 u = SECOND();
1746 STACKADJ(-2);
1747 err = PyObject_SetAttr(v, w, u); /* v.w = u */
1748 Py_DECREF(v);
1749 Py_DECREF(u);
1750 if (err == 0) DISPATCH();
1751 break;
1752
1753 TARGET(DELETE_ATTR)
1754 w = GETITEM(names, oparg);
1755 v = POP();
1756 err = PyObject_SetAttr(v, w, (PyObject *)NULL);
1757 /* del v.w */
1758 Py_DECREF(v);
1759 break;
1760
1761 TARGET(STORE_GLOBAL)
1762 w = GETITEM(names, oparg);
1763 v = POP();
1764 err = PyDict_SetItem(f->f_globals, w, v);
1765 Py_DECREF(v);
1766 if (err == 0) DISPATCH();
1767 break;
1768
1769 TARGET(DELETE_GLOBAL)
1770 w = GETITEM(names, oparg);
1771 if ((err = PyDict_DelItem(f->f_globals, w)) != 0)
1772 format_exc_check_arg(
1773 PyExc_NameError, GLOBAL_NAME_ERROR_MSG, w);
1774 break;
1775
1776 TARGET(LOAD_NAME)
1777 w = GETITEM(names, oparg);
1778 if ((v = f->f_locals) == NULL) {
1779 PyErr_Format(PyExc_SystemError,
1780 "no locals when loading %R", w);
1781 why = WHY_EXCEPTION;
1782 break;
1783 }
1784 if (PyDict_CheckExact(v)) {
1785 x = PyDict_GetItem(v, w);
1786 Py_XINCREF(x);
1787 }
1788 else {
1789 x = PyObject_GetItem(v, w);
1790 if (x == NULL && PyErr_Occurred()) {
1791 if (!PyErr_ExceptionMatches(
1792 PyExc_KeyError))
1793 break;
1794 PyErr_Clear();
1795 }
1796 }
1797 if (x == NULL) {
1798 x = PyDict_GetItem(f->f_globals, w);
1799 if (x == NULL) {
1800 x = PyDict_GetItem(f->f_builtins, w);
1801 if (x == NULL) {
1802 format_exc_check_arg(
1803 PyExc_NameError,
1804 NAME_ERROR_MSG, w);
1805 break;
1806 }
1807 }
1808 Py_INCREF(x);
1809 }
1810 PUSH(x);
1811 DISPATCH();
1812
1813 TARGET(LOAD_GLOBAL)
1814 w = GETITEM(names, oparg);
1815 if (PyUnicode_CheckExact(w)) {
1816 /* Inline the PyDict_GetItem() calls.
1817 WARNING: this is an extreme speed hack.
1818 Do not try this at home. */
1819 Py_hash_t hash = ((PyASCIIObject *)w)->hash;
1820 if (hash != -1) {
1821 PyDictObject *d;
1822 PyDictEntry *e;
1823 d = (PyDictObject *)(f->f_globals);
1824 e = d->ma_lookup(d, w, hash);
1825 if (e == NULL) {
1826 x = NULL;
1827 break;
1828 }
1829 x = e->me_value;
1830 if (x != NULL) {
1831 Py_INCREF(x);
1832 PUSH(x);
1833 DISPATCH();
1834 }
1835 d = (PyDictObject *)(f->f_builtins);
1836 e = d->ma_lookup(d, w, hash);
1837 if (e == NULL) {
1838 x = NULL;
1839 break;
1840 }
1841 x = e->me_value;
1842 if (x != NULL) {
1843 Py_INCREF(x);
1844 PUSH(x);
1845 DISPATCH();
1846 }
1847 goto load_global_error;
1848 }
1849 }
1850 /* This is the un-inlined version of the code above */
1851 x = PyDict_GetItem(f->f_globals, w);
1852 if (x == NULL) {
1853 x = PyDict_GetItem(f->f_builtins, w);
1854 if (x == NULL) {
1855 load_global_error:
1856 format_exc_check_arg(
1857 PyExc_NameError,
1858 GLOBAL_NAME_ERROR_MSG, w);
1859 break;
1860 }
1861 }
1862 Py_INCREF(x);
1863 PUSH(x);
1864 DISPATCH();
1865
1866 TARGET(DELETE_FAST)
1867 x = GETLOCAL(oparg);
1868 if (x != NULL) {
1869 SETLOCAL(oparg, NULL);
1870 DISPATCH();
1871 }
1872 format_exc_check_arg(
1873 PyExc_UnboundLocalError,
1874 UNBOUNDLOCAL_ERROR_MSG,
1875 PyTuple_GetItem(co->co_varnames, oparg)
1876 );
1877 break;
1878
1879 TARGET(DELETE_DEREF)
1880 x = freevars[oparg];
1881 if (PyCell_GET(x) != NULL) {
1882 PyCell_Set(x, NULL);
1883 DISPATCH();
1884 }
1885 err = -1;
1886 format_exc_unbound(co, oparg);
1887 break;
1888
1889 TARGET(LOAD_CLOSURE)
1890 x = freevars[oparg];
1891 Py_INCREF(x);
1892 PUSH(x);
1893 if (x != NULL) DISPATCH();
1894 break;
1895
1896 TARGET(LOAD_DEREF)
1897 x = freevars[oparg];
1898 w = PyCell_Get(x);
1899 if (w != NULL) {
1900 PUSH(w);
1901 DISPATCH();
1902 }
1903 err = -1;
1904 format_exc_unbound(co, oparg);
1905 break;
1906
1907 TARGET(STORE_DEREF)
1908 w = POP();
1909 x = freevars[oparg];
1910 PyCell_Set(x, w);
1911 Py_DECREF(w);
1912 DISPATCH();
1913
1914 TARGET(BUILD_TUPLE)
1915 x = PyTuple_New(oparg);
1916 if (x != NULL) {
1917 for (; --oparg >= 0;) {
1918 w = POP();
1919 PyTuple_SET_ITEM(x, oparg, w);
1920 }
1921 PUSH(x);
1922 DISPATCH();
1923 }
1924 break;
1925
1926 TARGET(BUILD_LIST)
1927 x = PyList_New(oparg);
1928 if (x != NULL) {
1929 for (; --oparg >= 0;) {
1930 w = POP();
1931 PyList_SET_ITEM(x, oparg, w);
1932 }
1933 PUSH(x);
1934 DISPATCH();
1935 }
1936 break;
1937
1938 TARGET(BUILD_SET)
1939 x = PySet_New(NULL);
1940 if (x != NULL) {
1941 for (; --oparg >= 0;) {
1942 w = POP();
1943 if (err == 0)
1944 err = PySet_Add(x, w);
1945 Py_DECREF(w);
1946 }
1947 if (err != 0) {
1948 Py_DECREF(x);
1949 break;
1950 }
1951 PUSH(x);
1952 DISPATCH();
1953 }
1954 break;
1955
1956 TARGET(BUILD_MAP)
1957 x = _PyDict_NewPresized((Py_ssize_t)oparg);
1958 PUSH(x);
1959 if (x != NULL) DISPATCH();
1960 break;
1961
1962 TARGET(STORE_MAP)
1963 w = TOP(); /* key */
1964 u = SECOND(); /* value */
1965 v = THIRD(); /* dict */
1966 STACKADJ(-2);
1967 assert (PyDict_CheckExact(v));
1968 err = PyDict_SetItem(v, w, u); /* v[w] = u */
1969 Py_DECREF(u);
1970 Py_DECREF(w);
1971 if (err == 0) DISPATCH();
1972 break;
1973
1974 TARGET(MAP_ADD)
1975 w = TOP(); /* key */
1976 u = SECOND(); /* value */
1977 STACKADJ(-2);
1978 v = stack_pointer[-oparg]; /* dict */
1979 assert (PyDict_CheckExact(v));
1980 err = PyDict_SetItem(v, w, u); /* v[w] = u */
1981 Py_DECREF(u);
1982 Py_DECREF(w);
1983 if (err == 0) {
1984 PREDICT(JUMP_ABSOLUTE);
1985 DISPATCH();
1986 }
1987 break;
1988
1989 TARGET(LOAD_ATTR)
1990 w = GETITEM(names, oparg);
1991 v = TOP();
1992 x = PyObject_GetAttr(v, w);
1993 Py_DECREF(v);
1994 SET_TOP(x);
1995 if (x != NULL) DISPATCH();
1996 break;
1997
1998 TARGET(COMPARE_OP)
1999 w = POP();
2000 v = TOP();
2001 x = cmp_outcome(oparg, v, w);
2002 Py_DECREF(v);
2003 Py_DECREF(w);
2004 SET_TOP(x);
2005 if (x == NULL) break;
2006 PREDICT(POP_JUMP_IF_FALSE);
2007 PREDICT(POP_JUMP_IF_TRUE);
2008 DISPATCH();
2009
2010 TARGET(IMPORT_NAME)
2011 w = GETITEM(names, oparg);
2012 x = PyDict_GetItemString(f->f_builtins, "__import__");
2013 if (x == NULL) {
2014 PyErr_SetString(PyExc_ImportError,
2015 "__import__ not found");
2016 break;
2017 }
2018 Py_INCREF(x);
2019 v = POP();
2020 u = TOP();
2021 if (PyLong_AsLong(u) != -1 || PyErr_Occurred())
2022 w = PyTuple_Pack(5,
2023 w,
2024 f->f_globals,
2025 f->f_locals == NULL ?
2026 Py_None : f->f_locals,
2027 v,
2028 u);
2029 else
2030 w = PyTuple_Pack(4,
2031 w,
2032 f->f_globals,
2033 f->f_locals == NULL ?
2034 Py_None : f->f_locals,
2035 v);
2036 Py_DECREF(v);
2037 Py_DECREF(u);
2038 if (w == NULL) {
2039 u = POP();
2040 Py_DECREF(x);
2041 x = NULL;
2042 break;
2043 }
2044 READ_TIMESTAMP(intr0);
2045 v = x;
2046 x = PyEval_CallObject(v, w);
2047 Py_DECREF(v);
2048 READ_TIMESTAMP(intr1);
2049 Py_DECREF(w);
2050 SET_TOP(x);
2051 if (x != NULL) DISPATCH();
2052 break;
2053
2054 TARGET(IMPORT_STAR)
2055 v = POP();
2056 PyFrame_FastToLocals(f);
2057 if ((x = f->f_locals) == NULL) {
2058 PyErr_SetString(PyExc_SystemError,
2059 "no locals found during 'import *'");
2060 break;
2061 }
2062 READ_TIMESTAMP(intr0);
2063 err = import_all_from(x, v);
2064 READ_TIMESTAMP(intr1);
2065 PyFrame_LocalsToFast(f, 0);
2066 Py_DECREF(v);
2067 if (err == 0) DISPATCH();
2068 break;
2069
2070 TARGET(IMPORT_FROM)
2071 w = GETITEM(names, oparg);
2072 v = TOP();
2073 READ_TIMESTAMP(intr0);
2074 x = import_from(v, w);
2075 READ_TIMESTAMP(intr1);
2076 PUSH(x);
2077 if (x != NULL) DISPATCH();
2078 break;
2079
2080 TARGET(JUMP_FORWARD)
2081 JUMPBY(oparg);
2082 FAST_DISPATCH();
2083
2084 PREDICTED_WITH_ARG(POP_JUMP_IF_FALSE);
2085 TARGET(POP_JUMP_IF_FALSE)
2086 w = POP();
2087 if (w == Py_True) {
2088 Py_DECREF(w);
2089 FAST_DISPATCH();
2090 }
2091 if (w == Py_False) {
2092 Py_DECREF(w);
2093 JUMPTO(oparg);
2094 FAST_DISPATCH();
2095 }
2096 err = PyObject_IsTrue(w);
2097 Py_DECREF(w);
2098 if (err > 0)
2099 err = 0;
2100 else if (err == 0)
2101 JUMPTO(oparg);
2102 else
2103 break;
2104 DISPATCH();
2105
2106 PREDICTED_WITH_ARG(POP_JUMP_IF_TRUE);
2107 TARGET(POP_JUMP_IF_TRUE)
2108 w = POP();
2109 if (w == Py_False) {
2110 Py_DECREF(w);
2111 FAST_DISPATCH();
2112 }
2113 if (w == Py_True) {
2114 Py_DECREF(w);
2115 JUMPTO(oparg);
2116 FAST_DISPATCH();
2117 }
2118 err = PyObject_IsTrue(w);
2119 Py_DECREF(w);
2120 if (err > 0) {
2121 err = 0;
2122 JUMPTO(oparg);
2123 }
2124 else if (err == 0)
2125 ;
2126 else
2127 break;
2128 DISPATCH();
2129
2130 TARGET(JUMP_IF_FALSE_OR_POP)
2131 w = TOP();
2132 if (w == Py_True) {
2133 STACKADJ(-1);
2134 Py_DECREF(w);
2135 FAST_DISPATCH();
2136 }
2137 if (w == Py_False) {
2138 JUMPTO(oparg);
2139 FAST_DISPATCH();
2140 }
2141 err = PyObject_IsTrue(w);
2142 if (err > 0) {
2143 STACKADJ(-1);
2144 Py_DECREF(w);
2145 err = 0;
2146 }
2147 else if (err == 0)
2148 JUMPTO(oparg);
2149 else
2150 break;
2151 DISPATCH();
2152
2153 TARGET(JUMP_IF_TRUE_OR_POP)
2154 w = TOP();
2155 if (w == Py_False) {
2156 STACKADJ(-1);
2157 Py_DECREF(w);
2158 FAST_DISPATCH();
2159 }
2160 if (w == Py_True) {
2161 JUMPTO(oparg);
2162 FAST_DISPATCH();
2163 }
2164 err = PyObject_IsTrue(w);
2165 if (err > 0) {
2166 err = 0;
2167 JUMPTO(oparg);
2168 }
2169 else if (err == 0) {
2170 STACKADJ(-1);
2171 Py_DECREF(w);
2172 }
2173 else
2174 break;
2175 DISPATCH();
2176
2177 PREDICTED_WITH_ARG(JUMP_ABSOLUTE);
2178 TARGET(JUMP_ABSOLUTE)
2179 JUMPTO(oparg);
2180 #if FAST_LOOPS
2181 /* Enabling this path speeds-up all while and for-loops by bypassing
2182 the per-loop checks for signals. By default, this should be turned-off
2183 because it prevents detection of a control-break in tight loops like
2184 "while 1: pass". Compile with this option turned-on when you need
2185 the speed-up and do not need break checking inside tight loops (ones
2186 that contain only instructions ending with FAST_DISPATCH).
2187 */
2188 FAST_DISPATCH();
2189 #else
2190 DISPATCH();
2191 #endif
2192
2193 TARGET(GET_ITER)
2194 /* before: [obj]; after [getiter(obj)] */
2195 v = TOP();
2196 x = PyObject_GetIter(v);
2197 Py_DECREF(v);
2198 if (x != NULL) {
2199 SET_TOP(x);
2200 PREDICT(FOR_ITER);
2201 DISPATCH();
2202 }
2203 STACKADJ(-1);
2204 break;
2205
2206 PREDICTED_WITH_ARG(FOR_ITER);
2207 TARGET(FOR_ITER)
2208 /* before: [iter]; after: [iter, iter()] *or* [] */
2209 v = TOP();
2210 x = (*v->ob_type->tp_iternext)(v);
2211 if (x != NULL) {
2212 PUSH(x);
2213 PREDICT(STORE_FAST);
2214 PREDICT(UNPACK_SEQUENCE);
2215 DISPATCH();
2216 }
2217 if (PyErr_Occurred()) {
2218 if (!PyErr_ExceptionMatches(
2219 PyExc_StopIteration))
2220 break;
2221 PyErr_Clear();
2222 }
2223 /* iterator ended normally */
2224 x = v = POP();
2225 Py_DECREF(v);
2226 JUMPBY(oparg);
2227 DISPATCH();
2228
2229 TARGET(BREAK_LOOP)
2230 why = WHY_BREAK;
2231 goto fast_block_end;
2232
2233 TARGET(CONTINUE_LOOP)
2234 retval = PyLong_FromLong(oparg);
2235 if (!retval) {
2236 x = NULL;
2237 break;
2238 }
2239 why = WHY_CONTINUE;
2240 goto fast_block_end;
2241
2242 TARGET_WITH_IMPL(SETUP_LOOP, _setup_finally)
2243 TARGET_WITH_IMPL(SETUP_EXCEPT, _setup_finally)
2244 TARGET(SETUP_FINALLY)
2245 _setup_finally:
2246 /* NOTE: If you add any new block-setup opcodes that
2247 are not try/except/finally handlers, you may need
2248 to update the PyGen_NeedsFinalizing() function.
2249 */
2250
2251 PyFrame_BlockSetup(f, opcode, INSTR_OFFSET() + oparg,
2252 STACK_LEVEL());
2253 DISPATCH();
2254
2255 TARGET(SETUP_WITH)
2256 {
2257 static PyObject *exit, *enter;
2258 w = TOP();
2259 x = special_lookup(w, "__exit__", &exit);
2260 if (!x)
2261 break;
2262 SET_TOP(x);
2263 u = special_lookup(w, "__enter__", &enter);
2264 Py_DECREF(w);
2265 if (!u) {
2266 x = NULL;
2267 break;
2268 }
2269 x = PyObject_CallFunctionObjArgs(u, NULL);
2270 Py_DECREF(u);
2271 if (!x)
2272 break;
2273 /* Setup the finally block before pushing the result
2274 of __enter__ on the stack. */
2275 PyFrame_BlockSetup(f, SETUP_FINALLY, INSTR_OFFSET() + oparg,
2276 STACK_LEVEL());
2277
2278 PUSH(x);
2279 DISPATCH();
2280 }
2281
2282 TARGET(WITH_CLEANUP)
2283 {
2284 /* At the top of the stack are 1-3 values indicating
2285 how/why we entered the finally clause:
2286 - TOP = None
2287 - (TOP, SECOND) = (WHY_{RETURN,CONTINUE}), retval
2288 - TOP = WHY_*; no retval below it
2289 - (TOP, SECOND, THIRD) = exc_info()
2290 (FOURTH, FITH, SIXTH) = previous exception for EXCEPT_HANDLER
2291 Below them is EXIT, the context.__exit__ bound method.
2292 In the last case, we must call
2293 EXIT(TOP, SECOND, THIRD)
2294 otherwise we must call
2295 EXIT(None, None, None)
2296
2297 In the first two cases, we remove EXIT from the
2298 stack, leaving the rest in the same order. In the
2299 third case, we shift the bottom 3 values of the
2300 stack down, and replace the empty spot with NULL.
2301
2302 In addition, if the stack represents an exception,
2303 *and* the function call returns a 'true' value, we
2304 push WHY_SILENCED onto the stack. END_FINALLY will
2305 then not re-raise the exception. (But non-local
2306 gotos should still be resumed.)
2307 */
2308
2309 PyObject *exit_func;
2310 u = TOP();
2311 if (u == Py_None) {
2312 (void)POP();
2313 exit_func = TOP();
2314 SET_TOP(u);
2315 v = w = Py_None;
2316 }
2317 else if (PyLong_Check(u)) {
2318 (void)POP();
2319 switch(PyLong_AsLong(u)) {
2320 case WHY_RETURN:
2321 case WHY_CONTINUE:
2322 /* Retval in TOP. */
2323 exit_func = SECOND();
2324 SET_SECOND(TOP());
2325 SET_TOP(u);
2326 break;
2327 default:
2328 exit_func = TOP();
2329 SET_TOP(u);
2330 break;
2331 }
2332 u = v = w = Py_None;
2333 }
2334 else {
2335 PyObject *tp, *exc, *tb;
2336 PyTryBlock *block;
2337 v = SECOND();
2338 w = THIRD();
2339 tp = FOURTH();
2340 exc = PEEK(5);
2341 tb = PEEK(6);
2342 exit_func = PEEK(7);
2343 SET_VALUE(7, tb);
2344 SET_VALUE(6, exc);
2345 SET_VALUE(5, tp);
2346 /* UNWIND_EXCEPT_HANDLER will pop this off. */
2347 SET_FOURTH(NULL);
2348 /* We just shifted the stack down, so we have
2349 to tell the except handler block that the
2350 values are lower than it expects. */
2351 block = &f->f_blockstack[f->f_iblock - 1];
2352 assert(block->b_type == EXCEPT_HANDLER);
2353 block->b_level--;
2354 }
2355 /* XXX Not the fastest way to call it... */
2356 x = PyObject_CallFunctionObjArgs(exit_func, u, v, w,
2357 NULL);
2358 Py_DECREF(exit_func);
2359 if (x == NULL)
2360 break; /* Go to error exit */
2361
2362 if (u != Py_None)
2363 err = PyObject_IsTrue(x);
2364 else
2365 err = 0;
2366 Py_DECREF(x);
2367
2368 if (err < 0)
2369 break; /* Go to error exit */
2370 else if (err > 0) {
2371 err = 0;
2372 /* There was an exception and a True return */
2373 PUSH(PyLong_FromLong((long) WHY_SILENCED));
2374 }
2375 PREDICT(END_FINALLY);
2376 break;
2377 }
2378
2379 TARGET(CALL_FUNCTION)
2380 {
2381 PyObject **sp;
2382 PCALL(PCALL_ALL);
2383 sp = stack_pointer;
2384 x = call_function(&sp, oparg);
2385 stack_pointer = sp;
2386 PUSH(x);
2387 if (x != NULL)
2388 DISPATCH();
2389 break;
2390 }
2391
2392 TARGET_WITH_IMPL(CALL_FUNCTION_VAR, _call_function_var_kw)
2393 TARGET_WITH_IMPL(CALL_FUNCTION_KW, _call_function_var_kw)
2394 TARGET(CALL_FUNCTION_VAR_KW)
2395 _call_function_var_kw:
2396 {
2397 int na = oparg & 0xff;
2398 int nk = (oparg>>8) & 0xff;
2399 int flags = (opcode - CALL_FUNCTION) & 3;
2400 int n = na + 2 * nk;
2401 PyObject **pfunc, *func, **sp;
2402 PCALL(PCALL_ALL);
2403 if (flags & CALL_FLAG_VAR)
2404 n++;
2405 if (flags & CALL_FLAG_KW)
2406 n++;
2407 pfunc = stack_pointer - n - 1;
2408 func = *pfunc;
2409
2410 if (PyMethod_Check(func)
2411 && PyMethod_GET_SELF(func) != NULL) {
2412 PyObject *self = PyMethod_GET_SELF(func);
2413 Py_INCREF(self);
2414 func = PyMethod_GET_FUNCTION(func);
2415 Py_INCREF(func);
2416 Py_DECREF(*pfunc);
2417 *pfunc = self;
2418 na++;
2419 /* n++; */
2420 } else
2421 Py_INCREF(func);
2422 sp = stack_pointer;
2423 READ_TIMESTAMP(intr0);
2424 x = ext_do_call(func, &sp, flags, na, nk);
2425 READ_TIMESTAMP(intr1);
2426 stack_pointer = sp;
2427 Py_DECREF(func);
2428
2429 while (stack_pointer > pfunc) {
2430 w = POP();
2431 Py_DECREF(w);
2432 }
2433 PUSH(x);
2434 if (x != NULL)
2435 DISPATCH();
2436 break;
2437 }
2438
2439 TARGET_WITH_IMPL(MAKE_CLOSURE, _make_function)
2440 TARGET(MAKE_FUNCTION)
2441 _make_function:
2442 {
2443 int posdefaults = oparg & 0xff;
2444 int kwdefaults = (oparg>>8) & 0xff;
2445 int num_annotations = (oparg >> 16) & 0x7fff;
2446
2447 w = POP(); /* qualname */
2448 v = POP(); /* code object */
2449 x = PyFunction_NewWithQualName(v, f->f_globals, w);
2450 Py_DECREF(v);
2451 Py_DECREF(w);
2452
2453 if (x != NULL && opcode == MAKE_CLOSURE) {
2454 v = POP();
2455 if (PyFunction_SetClosure(x, v) != 0) {
2456 /* Can't happen unless bytecode is corrupt. */
2457 why = WHY_EXCEPTION;
2458 }
2459 Py_DECREF(v);
2460 }
2461
2462 if (x != NULL && num_annotations > 0) {
2463 Py_ssize_t name_ix;
2464 u = POP(); /* names of args with annotations */
2465 v = PyDict_New();
2466 if (v == NULL) {
2467 Py_DECREF(x);
2468 x = NULL;
2469 break;
2470 }
2471 name_ix = PyTuple_Size(u);
2472 assert(num_annotations == name_ix+1);
2473 while (name_ix > 0) {
2474 --name_ix;
2475 t = PyTuple_GET_ITEM(u, name_ix);
2476 w = POP();
2477 /* XXX(nnorwitz): check for errors */
2478 PyDict_SetItem(v, t, w);
2479 Py_DECREF(w);
2480 }
2481
2482 if (PyFunction_SetAnnotations(x, v) != 0) {
2483 /* Can't happen unless
2484 PyFunction_SetAnnotations changes. */
2485 why = WHY_EXCEPTION;
2486 }
2487 Py_DECREF(v);
2488 Py_DECREF(u);
2489 }
2490
2491 /* XXX Maybe this should be a separate opcode? */
2492 if (x != NULL && posdefaults > 0) {
2493 v = PyTuple_New(posdefaults);
2494 if (v == NULL) {
2495 Py_DECREF(x);
2496 x = NULL;
2497 break;
2498 }
2499 while (--posdefaults >= 0) {
2500 w = POP();
2501 PyTuple_SET_ITEM(v, posdefaults, w);
2502 }
2503 if (PyFunction_SetDefaults(x, v) != 0) {
2504 /* Can't happen unless
2505 PyFunction_SetDefaults changes. */
2506 why = WHY_EXCEPTION;
2507 }
2508 Py_DECREF(v);
2509 }
2510 if (x != NULL && kwdefaults > 0) {
2511 v = PyDict_New();
2512 if (v == NULL) {
2513 Py_DECREF(x);
2514 x = NULL;
2515 break;
2516 }
2517 while (--kwdefaults >= 0) {
2518 w = POP(); /* default value */
2519 u = POP(); /* kw only arg name */
2520 /* XXX(nnorwitz): check for errors */
2521 PyDict_SetItem(v, u, w);
2522 Py_DECREF(w);
2523 Py_DECREF(u);
2524 }
2525 if (PyFunction_SetKwDefaults(x, v) != 0) {
2526 /* Can't happen unless
2527 PyFunction_SetKwDefaults changes. */
2528 why = WHY_EXCEPTION;
2529 }
2530 Py_DECREF(v);
2531 }
2532 PUSH(x);
2533 break;
2534 }
2535
2536 TARGET(BUILD_SLICE)
2537 if (oparg == 3)
2538 w = POP();
2539 else
2540 w = NULL;
2541 v = POP();
2542 u = TOP();
2543 x = PySlice_New(u, v, w);
2544 Py_DECREF(u);
2545 Py_DECREF(v);
2546 Py_XDECREF(w);
2547 SET_TOP(x);
2548 if (x != NULL) DISPATCH();
2549 break;
2550
2551 TARGET(EXTENDED_ARG)
2552 opcode = NEXTOP();
2553 oparg = oparg<<16 | NEXTARG();
2554 goto dispatch_opcode;
2555
2556 #if USE_COMPUTED_GOTOS
2557 _unknown_opcode:
2558 #endif
2559 default:
2560 fprintf(stderr,
2561 "XXX lineno: %d, opcode: %d\n",
2562 PyFrame_GetLineNumber(f),
2563 opcode);
2564 PyErr_SetString(PyExc_SystemError, "unknown opcode");
2565 why = WHY_EXCEPTION;
2566 break;
2567
2568
2569 } /* switch */
2570
2571 on_error:
2572
2573 READ_TIMESTAMP(inst1);
2574
2575 /* Quickly continue if no error occurred */
2576
2577 if (why == WHY_NOT) {
2578 if (err == 0 && x != NULL) {
2579 #ifdef CHECKEXC
2580 /* This check is expensive! */
2581 if (PyErr_Occurred())
2582 fprintf(stderr,
2583 "XXX undetected error\n");
2584 else {
2585 #endif
2586 READ_TIMESTAMP(loop1);
2587 continue; /* Normal, fast path */
2588 #ifdef CHECKEXC
2589 }
2590 #endif
2591 }
2592 why = WHY_EXCEPTION;
2593 x = Py_None;
2594 err = 0;
2595 }
2596
2597 /* Double-check exception status */
2598
2599 if (why == WHY_EXCEPTION || why == WHY_RERAISE) {
2600 if (!PyErr_Occurred()) {
2601 PyErr_SetString(PyExc_SystemError,
2602 "error return without exception set");
2603 why = WHY_EXCEPTION;
2604 }
2605 }
2606 #ifdef CHECKEXC
2607 else {
2608 /* This check is expensive! */
2609 if (PyErr_Occurred()) {
2610 char buf[128];
2611 sprintf(buf, "Stack unwind with exception "
2612 "set and why=%d", why);
2613 Py_FatalError(buf);
2614 }
2615 }
2616 #endif
2617
2618 /* Log traceback info if this is a real exception */
2619
2620 if (why == WHY_EXCEPTION) {
2621 PyTraceBack_Here(f);
2622
2623 if (tstate->c_tracefunc != NULL)
2624 call_exc_trace(tstate->c_tracefunc,
2625 tstate->c_traceobj, f);
2626 }
2627
2628 /* For the rest, treat WHY_RERAISE as WHY_EXCEPTION */
2629
2630 if (why == WHY_RERAISE)
2631 why = WHY_EXCEPTION;
2632
2633 /* Unwind stacks if a (pseudo) exception occurred */
2634
2635 fast_block_end:
2636 while (why != WHY_NOT && f->f_iblock > 0) {
2637 /* Peek at the current block. */
2638 PyTryBlock *b = &f->f_blockstack[f->f_iblock - 1];
2639
2640 assert(why != WHY_YIELD);
2641 if (b->b_type == SETUP_LOOP && why == WHY_CONTINUE) {
2642 why = WHY_NOT;
2643 JUMPTO(PyLong_AS_LONG(retval));
2644 Py_DECREF(retval);
2645 break;
2646 }
2647 /* Now we have to pop the block. */
2648 f->f_iblock--;
2649
2650 if (b->b_type == EXCEPT_HANDLER) {
2651 UNWIND_EXCEPT_HANDLER(b);
2652 continue;
2653 }
2654 UNWIND_BLOCK(b);
2655 if (b->b_type == SETUP_LOOP && why == WHY_BREAK) {
2656 why = WHY_NOT;
2657 JUMPTO(b->b_handler);
2658 break;
2659 }
2660 if (why == WHY_EXCEPTION && (b->b_type == SETUP_EXCEPT
2661 || b->b_type == SETUP_FINALLY)) {
2662 PyObject *exc, *val, *tb;
2663 int handler = b->b_handler;
2664 /* Beware, this invalidates all b->b_* fields */
2665 PyFrame_BlockSetup(f, EXCEPT_HANDLER, -1, STACK_LEVEL());
2666 PUSH(tstate->exc_traceback);
2667 PUSH(tstate->exc_value);
2668 if (tstate->exc_type != NULL) {
2669 PUSH(tstate->exc_type);
2670 }
2671 else {
2672 Py_INCREF(Py_None);
2673 PUSH(Py_None);
2674 }
2675 PyErr_Fetch(&exc, &val, &tb);
2676 /* Make the raw exception data
2677 available to the handler,
2678 so a program can emulate the
2679 Python main loop. */
2680 PyErr_NormalizeException(
2681 &exc, &val, &tb);
2682 PyException_SetTraceback(val, tb);
2683 Py_INCREF(exc);
2684 tstate->exc_type = exc;
2685 Py_INCREF(val);
2686 tstate->exc_value = val;
2687 tstate->exc_traceback = tb;
2688 if (tb == NULL)
2689 tb = Py_None;
2690 Py_INCREF(tb);
2691 PUSH(tb);
2692 PUSH(val);
2693 PUSH(exc);
2694 why = WHY_NOT;
2695 JUMPTO(handler);
2696 break;
2697 }
2698 if (b->b_type == SETUP_FINALLY) {
2699 if (why & (WHY_RETURN | WHY_CONTINUE))
2700 PUSH(retval);
2701 PUSH(PyLong_FromLong((long)why));
2702 why = WHY_NOT;
2703 JUMPTO(b->b_handler);
2704 break;
2705 }
2706 } /* unwind stack */
2707
2708 /* End the loop if we still have an error (or return) */
2709
2710 if (why != WHY_NOT)
2711 break;
2712 READ_TIMESTAMP(loop1);
2713
2714 } /* main loop */
2715
2716 assert(why != WHY_YIELD);
2717 /* Pop remaining stack entries. */
2718 while (!EMPTY()) {
2719 v = POP();
2720 Py_XDECREF(v);
2721 }
2722
2723 if (why != WHY_RETURN)
2724 retval = NULL;
2725
2726 fast_yield:
2727 if (co->co_flags & CO_GENERATOR && (why == WHY_YIELD || why == WHY_RETURN)) {
2728 /* The purpose of this block is to put aside the generator's exception
2729 state and restore that of the calling frame. If the current
2730 exception state is from the caller, we clear the exception values
2731 on the generator frame, so they are not swapped back in latter. The
2732 origin of the current exception state is determined by checking for
2733 except handler blocks, which we must be in iff a new exception
2734 state came into existence in this frame. (An uncaught exception
2735 would have why == WHY_EXCEPTION, and we wouldn't be here). */
2736 int i;
2737 for (i = 0; i < f->f_iblock; i++)
2738 if (f->f_blockstack[i].b_type == EXCEPT_HANDLER)
2739 break;
2740 if (i == f->f_iblock)
2741 /* We did not create this exception. */
2742 restore_and_clear_exc_state(tstate, f);
2743 else
2744 swap_exc_state(tstate, f);
2745 }
2746
2747 if (tstate->use_tracing) {
2748 if (tstate->c_tracefunc) {
2749 if (why == WHY_RETURN || why == WHY_YIELD) {
2750 if (call_trace(tstate->c_tracefunc,
2751 tstate->c_traceobj, f,
2752 PyTrace_RETURN, retval)) {
2753 Py_XDECREF(retval);
2754 retval = NULL;
2755 why = WHY_EXCEPTION;
2756 }
2757 }
2758 else if (why == WHY_EXCEPTION) {
2759 call_trace_protected(tstate->c_tracefunc,
2760 tstate->c_traceobj, f,
2761 PyTrace_RETURN, NULL);
2762 }
2763 }
2764 if (tstate->c_profilefunc) {
2765 if (why == WHY_EXCEPTION)
2766 call_trace_protected(tstate->c_profilefunc,
2767 tstate->c_profileobj, f,
2768 PyTrace_RETURN, NULL);
2769 else if (call_trace(tstate->c_profilefunc,
2770 tstate->c_profileobj, f,
2771 PyTrace_RETURN, retval)) {
2772 Py_XDECREF(retval);
2773 retval = NULL;
2774 /* why = WHY_EXCEPTION; */
2775 }
2776 }
2777 }
2778
2779 /* pop frame */
2780 exit_eval_frame:
2781 Py_LeaveRecursiveCall();
2782 tstate->frame = f->f_back;
2783
2784 return retval;
2785 }
2786
2787 static void
2788 format_missing(const char *kind, PyCodeObject *co, PyObject *names)
2789 {
2790 int err;
2791 Py_ssize_t len = PyList_GET_SIZE(names);
2792 PyObject *name_str, *comma, *tail, *tmp;
2793
2794 assert(PyList_CheckExact(names));
2795 assert(len >= 1);
2796 /* Deal with the joys of natural language. */
2797 switch (len) {
2798 case 1:
2799 name_str = PyList_GET_ITEM(names, 0);
2800 Py_INCREF(name_str);
2801 break;
2802 case 2:
2803 name_str = PyUnicode_FromFormat("%U and %U",
2804 PyList_GET_ITEM(names, len - 2),
2805 PyList_GET_ITEM(names, len - 1));
2806 break;
2807 default:
2808 tail = PyUnicode_FromFormat(", %U, and %U",
2809 PyList_GET_ITEM(names, len - 2),
2810 PyList_GET_ITEM(names, len - 1));
2811 /* Chop off the last two objects in the list. This shouldn't actually
2812 fail, but we can't be too careful. */
2813 err = PyList_SetSlice(names, len - 2, len, NULL);
2814 if (err == -1) {
2815 Py_DECREF(tail);
2816 return;
2817 }
2818 /* Stitch everything up into a nice comma-separated list. */
2819 comma = PyUnicode_FromString(", ");
2820 if (comma == NULL) {
2821 Py_DECREF(tail);
2822 return;
2823 }
2824 tmp = PyUnicode_Join(comma, names);
2825 Py_DECREF(comma);
2826 if (tmp == NULL) {
2827 Py_DECREF(tail);
2828 return;
2829 }
2830 name_str = PyUnicode_Concat(tmp, tail);
2831 Py_DECREF(tmp);
2832 Py_DECREF(tail);
2833 break;
2834 }
2835 if (name_str == NULL)
2836 return;
2837 PyErr_Format(PyExc_TypeError,
2838 "%U() missing %i required %s argument%s: %U",
2839 co->co_name,
2840 len,
2841 kind,
2842 len == 1 ? "" : "s",
2843 name_str);
2844 Py_DECREF(name_str);
2845 }
2846
2847 static void
2848 missing_arguments(PyCodeObject *co, int missing, int defcount,
2849 PyObject **fastlocals)
2850 {
2851 int i, j = 0;
2852 int start, end;
2853 int positional = defcount != -1;
2854 const char *kind = positional ? "positional" : "keyword-only";
2855 PyObject *missing_names;
2856
2857 /* Compute the names of the arguments that are missing. */
2858 missing_names = PyList_New(missing);
2859 if (missing_names == NULL)
2860 return;
2861 if (positional) {
2862 start = 0;
2863 end = co->co_argcount - defcount;
2864 }
2865 else {
2866 start = co->co_argcount;
2867 end = start + co->co_kwonlyargcount;
2868 }
2869 for (i = start; i < end; i++) {
2870 if (GETLOCAL(i) == NULL) {
2871 PyObject *raw = PyTuple_GET_ITEM(co->co_varnames, i);
2872 PyObject *name = PyObject_Repr(raw);
2873 if (name == NULL) {
2874 Py_DECREF(missing_names);
2875 return;
2876 }
2877 PyList_SET_ITEM(missing_names, j++, name);
2878 }
2879 }
2880 assert(j == missing);
2881 format_missing(kind, co, missing_names);
2882 Py_DECREF(missing_names);
2883 }
2884
2885 static void
2886 too_many_positional(PyCodeObject *co, int given, int defcount, PyObject **fastlocals)
2887 {
2888 int plural;
2889 int kwonly_given = 0;
2890 int i;
2891 PyObject *sig, *kwonly_sig;
2892
2893 assert((co->co_flags & CO_VARARGS) == 0);
2894 /* Count missing keyword-only args. */
2895 for (i = co->co_argcount; i < co->co_argcount + co->co_kwonlyargcount; i++)
2896 if (GETLOCAL(i) != NULL)
2897 kwonly_given++;
2898 if (defcount) {
2899 int atleast = co->co_argcount - defcount;
2900 plural = 1;
2901 sig = PyUnicode_FromFormat("from %d to %d", atleast, co->co_argcount);
2902 }
2903 else {
2904 plural = co->co_argcount != 1;
2905 sig = PyUnicode_FromFormat("%d", co->co_argcount);
2906 }
2907 if (sig == NULL)
2908 return;
2909 if (kwonly_given) {
2910 const char *format = " positional argument%s (and %d keyword-only argument%s)";
2911 kwonly_sig = PyUnicode_FromFormat(format, given != 1 ? "s" : "", kwonly_given,
2912 kwonly_given != 1 ? "s" : "");
2913 if (kwonly_sig == NULL) {
2914 Py_DECREF(sig);
2915 return;
2916 }
2917 }
2918 else {
2919 /* This will not fail. */
2920 kwonly_sig = PyUnicode_FromString("");
2921 assert(kwonly_sig != NULL);
2922 }
2923 PyErr_Format(PyExc_TypeError,
2924 "%U() takes %U positional argument%s but %d%U %s given",
2925 co->co_name,
2926 sig,
2927 plural ? "s" : "",
2928 given,
2929 kwonly_sig,
2930 given == 1 && !kwonly_given ? "was" : "were");
2931 Py_DECREF(sig);
2932 Py_DECREF(kwonly_sig);
2933 }
2934
2935 /* This is gonna seem *real weird*, but if you put some other code between
2936 PyEval_EvalFrame() and PyEval_EvalCodeEx() you will need to adjust
2937 the test in the if statements in Misc/gdbinit (pystack and pystackv). */
2938
2939 PyObject *
2940 PyEval_EvalCodeEx(PyObject *_co, PyObject *globals, PyObject *locals,
2941 PyObject **args, int argcount, PyObject **kws, int kwcount,
2942 PyObject **defs, int defcount, PyObject *kwdefs, PyObject *closure)
2943 {
2944 PyCodeObject* co = (PyCodeObject*)_co;
2945 register PyFrameObject *f;
2946 register PyObject *retval = NULL;
2947 register PyObject **fastlocals, **freevars;
2948 PyThreadState *tstate = PyThreadState_GET();
2949 PyObject *x, *u;
2950 int total_args = co->co_argcount + co->co_kwonlyargcount;
2951 int i;
2952 int n = argcount;
2953 PyObject *kwdict = NULL;
2954
2955 if (globals == NULL) {
2956 PyErr_SetString(PyExc_SystemError,
2957 "PyEval_EvalCodeEx: NULL globals");
2958 return NULL;
2959 }
2960
2961 assert(tstate != NULL);
2962 assert(globals != NULL);
2963 f = PyFrame_New(tstate, co, globals, locals);
2964 if (f == NULL)
2965 return NULL;
2966
2967 fastlocals = f->f_localsplus;
2968 freevars = f->f_localsplus + co->co_nlocals;
2969
2970 /* Parse arguments. */
2971 if (co->co_flags & CO_VARKEYWORDS) {
2972 kwdict = PyDict_New();
2973 if (kwdict == NULL)
2974 goto fail;
2975 i = total_args;
2976 if (co->co_flags & CO_VARARGS)
2977 i++;
2978 SETLOCAL(i, kwdict);
2979 }
2980 if (argcount > co->co_argcount)
2981 n = co->co_argcount;
2982 for (i = 0; i < n; i++) {
2983 x = args[i];
2984 Py_INCREF(x);
2985 SETLOCAL(i, x);
2986 }
2987 if (co->co_flags & CO_VARARGS) {
2988 u = PyTuple_New(argcount - n);
2989 if (u == NULL)
2990 goto fail;
2991 SETLOCAL(total_args, u);
2992 for (i = n; i < argcount; i++) {
2993 x = args[i];
2994 Py_INCREF(x);
2995 PyTuple_SET_ITEM(u, i-n, x);
2996 }
2997 }
2998 for (i = 0; i < kwcount; i++) {
2999 PyObject **co_varnames;
3000 PyObject *keyword = kws[2*i];
3001 PyObject *value = kws[2*i + 1];
3002 int j;
3003 if (keyword == NULL || !PyUnicode_Check(keyword)) {
3004 PyErr_Format(PyExc_TypeError,
3005 "%U() keywords must be strings",
3006 co->co_name);
3007 goto fail;
3008 }
3009 /* Speed hack: do raw pointer compares. As names are
3010 normally interned this should almost always hit. */
3011 co_varnames = ((PyTupleObject *)(co->co_varnames))->ob_item;
3012 for (j = 0; j < total_args; j++) {
3013 PyObject *nm = co_varnames[j];
3014 if (nm == keyword)
3015 goto kw_found;
3016 }
3017 /* Slow fallback, just in case */
3018 for (j = 0; j < total_args; j++) {
3019 PyObject *nm = co_varnames[j];
3020 int cmp = PyObject_RichCompareBool(
3021 keyword, nm, Py_EQ);
3022 if (cmp > 0)
3023 goto kw_found;
3024 else if (cmp < 0)
3025 goto fail;
3026 }
3027 if (j >= total_args && kwdict == NULL) {
3028 PyErr_Format(PyExc_TypeError,
3029 "%U() got an unexpected "
3030 "keyword argument '%S'",
3031 co->co_name,
3032 keyword);
3033 goto fail;
3034 }
3035 PyDict_SetItem(kwdict, keyword, value);
3036 continue;
3037 kw_found:
3038 if (GETLOCAL(j) != NULL) {
3039 PyErr_Format(PyExc_TypeError,
3040 "%U() got multiple "
3041 "values for argument '%S'",
3042 co->co_name,
3043 keyword);
3044 goto fail;
3045 }
3046 Py_INCREF(value);
3047 SETLOCAL(j, value);
3048 }
3049 if (argcount > co->co_argcount && !(co->co_flags & CO_VARARGS)) {
3050 too_many_positional(co, argcount, defcount, fastlocals);
3051 goto fail;
3052 }
3053 if (argcount < co->co_argcount) {
3054 int m = co->co_argcount - defcount;
3055 int missing = 0;
3056 for (i = argcount; i < m; i++)
3057 if (GETLOCAL(i) == NULL)
3058 missing++;
3059 if (missing) {
3060 missing_arguments(co, missing, defcount, fastlocals);
3061 goto fail;
3062 }
3063 if (n > m)
3064 i = n - m;
3065 else
3066 i = 0;
3067 for (; i < defcount; i++) {
3068 if (GETLOCAL(m+i) == NULL) {
3069 PyObject *def = defs[i];
3070 Py_INCREF(def);
3071 SETLOCAL(m+i, def);
3072 }
3073 }
3074 }
3075 if (co->co_kwonlyargcount > 0) {
3076 int missing = 0;
3077 for (i = co->co_argcount; i < total_args; i++) {
3078 PyObject *name;
3079 if (GETLOCAL(i) != NULL)
3080 continue;
3081 name = PyTuple_GET_ITEM(co->co_varnames, i);
3082 if (kwdefs != NULL) {
3083 PyObject *def = PyDict_GetItem(kwdefs, name);
3084 if (def) {
3085 Py_INCREF(def);
3086 SETLOCAL(i, def);
3087 continue;
3088 }
3089 }
3090 missing++;
3091 }
3092 if (missing) {
3093 missing_arguments(co, missing, -1, fastlocals);
3094 goto fail;
3095 }
3096 }
3097
3098 /* Allocate and initialize storage for cell vars, and copy free
3099 vars into frame. */
3100 for (i = 0; i < PyTuple_GET_SIZE(co->co_cellvars); ++i) {
3101 PyObject *c;
3102 int arg;
3103 /* Possibly account for the cell variable being an argument. */
3104 if (co->co_cell2arg != NULL &&
3105 (arg = co->co_cell2arg[i]) != CO_CELL_NOT_AN_ARG)
3106 c = PyCell_New(GETLOCAL(arg));
3107 else
3108 c = PyCell_New(NULL);
3109 if (c == NULL)
3110 goto fail;
3111 SETLOCAL(co->co_nlocals + i, c);
3112 }
3113 for (i = 0; i < PyTuple_GET_SIZE(co->co_freevars); ++i) {
3114 PyObject *o = PyTuple_GET_ITEM(closure, i);
3115 Py_INCREF(o);
3116 freevars[PyTuple_GET_SIZE(co->co_cellvars) + i] = o;
3117 }
3118
3119 if (co->co_flags & CO_GENERATOR) {
3120 /* Don't need to keep the reference to f_back, it will be set
3121 * when the generator is resumed. */
3122 Py_XDECREF(f->f_back);
3123 f->f_back = NULL;
3124
3125 PCALL(PCALL_GENERATOR);
3126
3127 /* Create a new generator that owns the ready to run frame
3128 * and return that as the value. */
3129 return PyGen_New(f);
3130 }
3131
3132 retval = PyEval_EvalFrameEx(f,0);
3133
3134 fail: /* Jump here from prelude on failure */
3135
3136 /* decref'ing the frame can cause __del__ methods to get invoked,
3137 which can call back into Python. While we're done with the
3138 current Python frame (f), the associated C stack is still in use,
3139 so recursion_depth must be boosted for the duration.
3140 */
3141 assert(tstate != NULL);
3142 ++tstate->recursion_depth;
3143 Py_DECREF(f);
3144 --tstate->recursion_depth;
3145 return retval;
3146 }
3147
3148
3149 static PyObject *
3150 special_lookup(PyObject *o, char *meth, PyObject **cache)
3151 {
3152 PyObject *res;
3153 res = _PyObject_LookupSpecial(o, meth, cache);
3154 if (res == NULL && !PyErr_Occurred()) {
3155 PyErr_SetObject(PyExc_AttributeError, *cache);
3156 return NULL;
3157 }
3158 return res;
3159 }
3160
3161
3162 /* These 3 functions deal with the exception state of generators. */
3163
3164 static void
3165 save_exc_state(PyThreadState *tstate, PyFrameObject *f)
3166 {
3167 PyObject *type, *value, *traceback;
3168 Py_XINCREF(tstate->exc_type);
3169 Py_XINCREF(tstate->exc_value);
3170 Py_XINCREF(tstate->exc_traceback);
3171 type = f->f_exc_type;
3172 value = f->f_exc_value;
3173 traceback = f->f_exc_traceback;
3174 f->f_exc_type = tstate->exc_type;
3175 f->f_exc_value = tstate->exc_value;
3176 f->f_exc_traceback = tstate->exc_traceback;
3177 Py_XDECREF(type);
3178 Py_XDECREF(value);
3179 Py_XDECREF(traceback);
3180 }
3181
3182 static void
3183 swap_exc_state(PyThreadState *tstate, PyFrameObject *f)
3184 {
3185 PyObject *tmp;
3186 tmp = tstate->exc_type;
3187 tstate->exc_type = f->f_exc_type;
3188 f->f_exc_type = tmp;
3189 tmp = tstate->exc_value;
3190 tstate->exc_value = f->f_exc_value;
3191 f->f_exc_value = tmp;
3192 tmp = tstate->exc_traceback;
3193 tstate->exc_traceback = f->f_exc_traceback;
3194 f->f_exc_traceback = tmp;
3195 }
3196
3197 static void
3198 restore_and_clear_exc_state(PyThreadState *tstate, PyFrameObject *f)
3199 {
3200 PyObject *type, *value, *tb;
3201 type = tstate->exc_type;
3202 value = tstate->exc_value;
3203 tb = tstate->exc_traceback;
3204 tstate->exc_type = f->f_exc_type;
3205 tstate->exc_value = f->f_exc_value;
3206 tstate->exc_traceback = f->f_exc_traceback;
3207 f->f_exc_type = NULL;
3208 f->f_exc_value = NULL;
3209 f->f_exc_traceback = NULL;
3210 Py_XDECREF(type);
3211 Py_XDECREF(value);
3212 Py_XDECREF(tb);
3213 }
3214
3215
3216 /* Logic for the raise statement (too complicated for inlining).
3217 This *consumes* a reference count to each of its arguments. */
3218 static enum why_code
3219 do_raise(PyObject *exc, PyObject *cause)
3220 {
3221 PyObject *type = NULL, *value = NULL;
3222
3223 if (exc == NULL) {
3224 /* Reraise */
3225 PyThreadState *tstate = PyThreadState_GET();
3226 PyObject *tb;
3227 type = tstate->exc_type;
3228 value = tstate->exc_value;
3229 tb = tstate->exc_traceback;
3230 if (type == Py_None) {
3231 PyErr_SetString(PyExc_RuntimeError,
3232 "No active exception to reraise");
3233 return WHY_EXCEPTION;
3234 }
3235 Py_XINCREF(type);
3236 Py_XINCREF(value);
3237 Py_XINCREF(tb);
3238 PyErr_Restore(type, value, tb);
3239 return WHY_RERAISE;
3240 }
3241
3242 /* We support the following forms of raise:
3243 raise
3244 raise <instance>
3245 raise <type> */
3246
3247 if (PyExceptionClass_Check(exc)) {
3248 type = exc;
3249 value = PyObject_CallObject(exc, NULL);
3250 if (value == NULL)
3251 goto raise_error;
3252 if (!PyExceptionInstance_Check(value)) {
3253 PyErr_Format(PyExc_TypeError,
3254 "calling %R should have returned an instance of "
3255 "BaseException, not %R",
3256 type, Py_TYPE(value));
3257 goto raise_error;
3258 }
3259 }
3260 else if (PyExceptionInstance_Check(exc)) {
3261 value = exc;
3262 type = PyExceptionInstance_Class(exc);
3263 Py_INCREF(type);
3264 }
3265 else {
3266 /* Not something you can raise. You get an exception
3267 anyway, just not what you specified :-) */
3268 Py_DECREF(exc);
3269 PyErr_SetString(PyExc_TypeError,
3270 "exceptions must derive from BaseException");
3271 goto raise_error;
3272 }
3273
3274 if (cause) {
3275 PyObject *fixed_cause;
3276 if (PyExceptionClass_Check(cause)) {
3277 fixed_cause = PyObject_CallObject(cause, NULL);
3278 if (fixed_cause == NULL)
3279 goto raise_error;
3280 Py_DECREF(cause);
3281 }
3282 else if (PyExceptionInstance_Check(cause)) {
3283 fixed_cause = cause;
3284 }
3285 else {
3286 PyErr_SetString(PyExc_TypeError,
3287 "exception causes must derive from "
3288 "BaseException");
3289 goto raise_error;
3290 }
3291 PyException_SetCause(value, fixed_cause);
3292 }
3293
3294 PyErr_SetObject(type, value);
3295 /* PyErr_SetObject incref's its arguments */
3296 Py_XDECREF(value);
3297 Py_XDECREF(type);
3298 return WHY_EXCEPTION;
3299
3300 raise_error:
3301 Py_XDECREF(value);
3302 Py_XDECREF(type);
3303 Py_XDECREF(cause);
3304 return WHY_EXCEPTION;
3305 }
3306
3307 /* Iterate v argcnt times and store the results on the stack (via decreasing
3308 sp). Return 1 for success, 0 if error.
3309
3310 If argcntafter == -1, do a simple unpack. If it is >= 0, do an unpack
3311 with a variable target.
3312 */
3313
3314 static int
3315 unpack_iterable(PyObject *v, int argcnt, int argcntafter, PyObject **sp)
3316 {
3317 int i = 0, j = 0;
3318 Py_ssize_t ll = 0;
3319 PyObject *it; /* iter(v) */
3320 PyObject *w;
3321 PyObject *l = NULL; /* variable list */
3322
3323 assert(v != NULL);
3324
3325 it = PyObject_GetIter(v);
3326 if (it == NULL)
3327 goto Error;
3328
3329 for (; i < argcnt; i++) {
3330 w = PyIter_Next(it);
3331 if (w == NULL) {
3332 /* Iterator done, via error or exhaustion. */
3333 if (!PyErr_Occurred()) {
3334 PyErr_Format(PyExc_ValueError,
3335 "need more than %d value%s to unpack",
3336 i, i == 1 ? "" : "s");
3337 }
3338 goto Error;
3339 }
3340 *--sp = w;
3341 }
3342
3343 if (argcntafter == -1) {
3344 /* We better have exhausted the iterator now. */
3345 w = PyIter_Next(it);
3346 if (w == NULL) {
3347 if (PyErr_Occurred())
3348 goto Error;
3349 Py_DECREF(it);
3350 return 1;
3351 }
3352 Py_DECREF(w);
3353 PyErr_Format(PyExc_ValueError, "too many values to unpack "
3354 "(expected %d)", argcnt);
3355 goto Error;
3356 }
3357
3358 l = PySequence_List(it);
3359 if (l == NULL)
3360 goto Error;
3361 *--sp = l;
3362 i++;
3363
3364 ll = PyList_GET_SIZE(l);
3365 if (ll < argcntafter) {
3366 PyErr_Format(PyExc_ValueError, "need more than %zd values to unpack",
3367 argcnt + ll);
3368 goto Error;
3369 }
3370
3371 /* Pop the "after-variable" args off the list. */
3372 for (j = argcntafter; j > 0; j--, i++) {
3373 *--sp = PyList_GET_ITEM(l, ll - j);
3374 }
3375 /* Resize the list. */
3376 Py_SIZE(l) = ll - argcntafter;
3377 Py_DECREF(it);
3378 return 1;
3379
3380 Error:
3381 for (; i > 0; i--, sp++)
3382 Py_DECREF(*sp);
3383 Py_XDECREF(it);
3384 return 0;
3385 }
3386
3387
3388 #ifdef LLTRACE
3389 static int
3390 prtrace(PyObject *v, char *str)
3391 {
3392 printf("%s ", str);
3393 if (PyObject_Print(v, stdout, 0) != 0)
3394 PyErr_Clear(); /* Don't know what else to do */
3395 printf("\n");
3396 return 1;
3397 }
3398 #endif
3399
3400 static void
3401 call_exc_trace(Py_tracefunc func, PyObject *self, PyFrameObject *f)
3402 {
3403 PyObject *type, *value, *traceback, *arg;
3404 int err;
3405 PyErr_Fetch(&type, &value, &traceback);
3406 if (value == NULL) {
3407 value = Py_None;
3408 Py_INCREF(value);
3409 }
3410 arg = PyTuple_Pack(3, type, value, traceback);
3411 if (arg == NULL) {
3412 PyErr_Restore(type, value, traceback);
3413 return;
3414 }
3415 err = call_trace(func, self, f, PyTrace_EXCEPTION, arg);
3416 Py_DECREF(arg);
3417 if (err == 0)
3418 PyErr_Restore(type, value, traceback);
3419 else {
3420 Py_XDECREF(type);
3421 Py_XDECREF(value);
3422 Py_XDECREF(traceback);
3423 }
3424 }
3425
3426 static int
3427 call_trace_protected(Py_tracefunc func, PyObject *obj, PyFrameObject *frame,
3428 int what, PyObject *arg)
3429 {
3430 PyObject *type, *value, *traceback;
3431 int err;
3432 PyErr_Fetch(&type, &value, &traceback);
3433 err = call_trace(func, obj, frame, what, arg);
3434 if (err == 0)
3435 {
3436 PyErr_Restore(type, value, traceback);
3437 return 0;
3438 }
3439 else {
3440 Py_XDECREF(type);
3441 Py_XDECREF(value);
3442 Py_XDECREF(traceback);
3443 return -1;
3444 }
3445 }
3446
3447 static int
3448 call_trace(Py_tracefunc func, PyObject *obj, PyFrameObject *frame,
3449 int what, PyObject *arg)
3450 {
3451 register PyThreadState *tstate = frame->f_tstate;
3452 int result;
3453 if (tstate->tracing)
3454 return 0;
3455 tstate->tracing++;
3456 tstate->use_tracing = 0;
3457 result = func(obj, frame, what, arg);
3458 tstate->use_tracing = ((tstate->c_tracefunc != NULL)
3459 || (tstate->c_profilefunc != NULL));
3460 tstate->tracing--;
3461 return result;
3462 }
3463
3464 PyObject *
3465 _PyEval_CallTracing(PyObject *func, PyObject *args)
3466 {
3467 PyFrameObject *frame = PyEval_GetFrame();
3468 PyThreadState *tstate = frame->f_tstate;
3469 int save_tracing = tstate->tracing;
3470 int save_use_tracing = tstate->use_tracing;
3471 PyObject *result;
3472
3473 tstate->tracing = 0;
3474 tstate->use_tracing = ((tstate->c_tracefunc != NULL)
3475 || (tstate->c_profilefunc != NULL));
3476 result = PyObject_Call(func, args, NULL);
3477 tstate->tracing = save_tracing;
3478 tstate->use_tracing = save_use_tracing;
3479 return result;
3480 }
3481
3482 /* See Objects/lnotab_notes.txt for a description of how tracing works. */
3483 static int
3484 maybe_call_line_trace(Py_tracefunc func, PyObject *obj,
3485 PyFrameObject *frame, int *instr_lb, int *instr_ub,
3486 int *instr_prev)
3487 {
3488 int result = 0;
3489 int line = frame->f_lineno;
3490
3491 /* If the last instruction executed isn't in the current
3492 instruction window, reset the window.
3493 */
3494 if (frame->f_lasti < *instr_lb || frame->f_lasti >= *instr_ub) {
3495 PyAddrPair bounds;
3496 line = _PyCode_CheckLineNumber(frame->f_code, frame->f_lasti,
3497 &bounds);
3498 *instr_lb = bounds.ap_lower;
3499 *instr_ub = bounds.ap_upper;
3500 }
3501 /* If the last instruction falls at the start of a line or if
3502 it represents a jump backwards, update the frame's line
3503 number and call the trace function. */
3504 if (frame->f_lasti == *instr_lb || frame->f_lasti < *instr_prev) {
3505 frame->f_lineno = line;
3506 result = call_trace(func, obj, frame, PyTrace_LINE, Py_None);
3507 }
3508 *instr_prev = frame->f_lasti;
3509 return result;
3510 }
3511
3512 void
3513 PyEval_SetProfile(Py_tracefunc func, PyObject *arg)
3514 {
3515 PyThreadState *tstate = PyThreadState_GET();
3516 PyObject *temp = tstate->c_profileobj;
3517 Py_XINCREF(arg);
3518 tstate->c_profilefunc = NULL;
3519 tstate->c_profileobj = NULL;
3520 /* Must make sure that tracing is not ignored if 'temp' is freed */
3521 tstate->use_tracing = tstate->c_tracefunc != NULL;
3522 Py_XDECREF(temp);
3523 tstate->c_profilefunc = func;
3524 tstate->c_profileobj = arg;
3525 /* Flag that tracing or profiling is turned on */
3526 tstate->use_tracing = (func != NULL) || (tstate->c_tracefunc != NULL);
3527 }
3528
3529 void
3530 PyEval_SetTrace(Py_tracefunc func, PyObject *arg)
3531 {
3532 PyThreadState *tstate = PyThreadState_GET();
3533 PyObject *temp = tstate->c_traceobj;
3534 _Py_TracingPossible += (func != NULL) - (tstate->c_tracefunc != NULL);
3535 Py_XINCREF(arg);
3536 tstate->c_tracefunc = NULL;
3537 tstate->c_traceobj = NULL;
3538 /* Must make sure that profiling is not ignored if 'temp' is freed */
3539 tstate->use_tracing = tstate->c_profilefunc != NULL;
3540 Py_XDECREF(temp);
3541 tstate->c_tracefunc = func;
3542 tstate->c_traceobj = arg;
3543 /* Flag that tracing or profiling is turned on */
3544 tstate->use_tracing = ((func != NULL)
3545 || (tstate->c_profilefunc != NULL));
3546 }
3547
3548 PyObject *
3549 PyEval_GetBuiltins(void)
3550 {
3551 PyFrameObject *current_frame = PyEval_GetFrame();
3552 if (current_frame == NULL)
3553 return PyThreadState_GET()->interp->builtins;
3554 else
3555 return current_frame->f_builtins;
3556 }
3557
3558 PyObject *
3559 PyEval_GetLocals(void)
3560 {
3561 PyFrameObject *current_frame = PyEval_GetFrame();
3562 if (current_frame == NULL)
3563 return NULL;
3564 PyFrame_FastToLocals(current_frame);
3565 return current_frame->f_locals;
3566 }
3567
3568 PyObject *
3569 PyEval_GetGlobals(void)
3570 {
3571 PyFrameObject *current_frame = PyEval_GetFrame();
3572 if (current_frame == NULL)
3573 return NULL;
3574 else
3575 return current_frame->f_globals;
3576 }
3577
3578 PyFrameObject *
3579 PyEval_GetFrame(void)
3580 {
3581 PyThreadState *tstate = PyThreadState_GET();
3582 return _PyThreadState_GetFrame(tstate);
3583 }
3584
3585 int
3586 PyEval_MergeCompilerFlags(PyCompilerFlags *cf)
3587 {
3588 PyFrameObject *current_frame = PyEval_GetFrame();
3589 int result = cf->cf_flags != 0;
3590
3591 if (current_frame != NULL) {
3592 const int codeflags = current_frame->f_code->co_flags;
3593 const int compilerflags = codeflags & PyCF_MASK;
3594 if (compilerflags) {
3595 result = 1;
3596 cf->cf_flags |= compilerflags;
3597 }
3598 #if 0 /* future keyword */
3599 if (codeflags & CO_GENERATOR_ALLOWED) {
3600 result = 1;
3601 cf->cf_flags |= CO_GENERATOR_ALLOWED;
3602 }
3603 #endif
3604 }
3605 return result;
3606 }
3607
3608
3609 /* External interface to call any callable object.
3610 The arg must be a tuple or NULL. The kw must be a dict or NULL. */
3611
3612 PyObject *
3613 PyEval_CallObjectWithKeywords(PyObject *func, PyObject *arg, PyObject *kw)
3614 {
3615 PyObject *result;
3616
3617 if (arg == NULL) {
3618 arg = PyTuple_New(0);
3619 if (arg == NULL)
3620 return NULL;
3621 }
3622 else if (!PyTuple_Check(arg)) {
3623 PyErr_SetString(PyExc_TypeError,
3624 "argument list must be a tuple");
3625 return NULL;
3626 }
3627 else
3628 Py_INCREF(arg);
3629
3630 if (kw != NULL && !PyDict_Check(kw)) {
3631 PyErr_SetString(PyExc_TypeError,
3632 "keyword list must be a dictionary");
3633 Py_DECREF(arg);
3634 return NULL;
3635 }
3636
3637 result = PyObject_Call(func, arg, kw);
3638 Py_DECREF(arg);
3639 return result;
3640 }
3641
3642 const char *
3643 PyEval_GetFuncName(PyObject *func)
3644 {
3645 if (PyMethod_Check(func))
3646 return PyEval_GetFuncName(PyMethod_GET_FUNCTION(func));
3647 else if (PyFunction_Check(func))
3648 return _PyUnicode_AsString(((PyFunctionObject*)func)->func_name);
3649 else if (PyCFunction_Check(func))
3650 return ((PyCFunctionObject*)func)->m_ml->ml_name;
3651 else
3652 return func->ob_type->tp_name;
3653 }
3654
3655 const char *
3656 PyEval_GetFuncDesc(PyObject *func)
3657 {
3658 if (PyMethod_Check(func))
3659 return "()";
3660 else if (PyFunction_Check(func))
3661 return "()";
3662 else if (PyCFunction_Check(func))
3663 return "()";
3664 else
3665 return " object";
3666 }
3667
3668 static void
3669 err_args(PyObject *func, int flags, int nargs)
3670 {
3671 if (flags & METH_NOARGS)
3672 PyErr_Format(PyExc_TypeError,
3673 "%.200s() takes no arguments (%d given)",
3674 ((PyCFunctionObject *)func)->m_ml->ml_name,
3675 nargs);
3676 else
3677 PyErr_Format(PyExc_TypeError,
3678 "%.200s() takes exactly one argument (%d given)",
3679 ((PyCFunctionObject *)func)->m_ml->ml_name,
3680 nargs);
3681 }
3682
3683 #define C_TRACE(x, call) \
3684 if (tstate->use_tracing && tstate->c_profilefunc) { \
3685 if (call_trace(tstate->c_profilefunc, \
3686 tstate->c_profileobj, \
3687 tstate->frame, PyTrace_C_CALL, \
3688 func)) { \
3689 x = NULL; \
3690 } \
3691 else { \
3692 x = call; \
3693 if (tstate->c_profilefunc != NULL) { \
3694 if (x == NULL) { \
3695 call_trace_protected(tstate->c_profilefunc, \
3696 tstate->c_profileobj, \
3697 tstate->frame, PyTrace_C_EXCEPTION, \
3698 func); \
3699 /* XXX should pass (type, value, tb) */ \
3700 } else { \
3701 if (call_trace(tstate->c_profilefunc, \
3702 tstate->c_profileobj, \
3703 tstate->frame, PyTrace_C_RETURN, \
3704 func)) { \
3705 Py_DECREF(x); \
3706 x = NULL; \
3707 } \
3708 } \
3709 } \
3710 } \
3711 } else { \
3712 x = call; \
3713 }
3714
3715 static PyObject *
3716 call_function(PyObject ***pp_stack, int oparg
3717 #ifdef WITH_TSC
3718 , uint64* pintr0, uint64* pintr1
3719 #endif
3720 )
3721 {
3722 int na = oparg & 0xff;
3723 int nk = (oparg>>8) & 0xff;
3724 int n = na + 2 * nk;
3725 PyObject **pfunc = (*pp_stack) - n - 1;
3726 PyObject *func = *pfunc;
3727 PyObject *x, *w;
3728
3729 /* Always dispatch PyCFunction first, because these are
3730 presumed to be the most frequent callable object.
3731 */
3732 if (PyCFunction_Check(func) && nk == 0) {
3733 int flags = PyCFunction_GET_FLAGS(func);
3734 PyThreadState *tstate = PyThreadState_GET();
3735
3736 PCALL(PCALL_CFUNCTION);
3737 if (flags & (METH_NOARGS | METH_O)) {
3738 PyCFunction meth = PyCFunction_GET_FUNCTION(func);
3739 PyObject *self = PyCFunction_GET_SELF(func);
3740 if (flags & METH_NOARGS && na == 0) {
3741 C_TRACE(x, (*meth)(self,NULL));
3742 }
3743 else if (flags & METH_O && na == 1) {
3744 PyObject *arg = EXT_POP(*pp_stack);
3745 C_TRACE(x, (*meth)(self,arg));
3746 Py_DECREF(arg);
3747 }
3748 else {
3749 err_args(func, flags, na);
3750 x = NULL;
3751 }
3752 }
3753 else {
3754 PyObject *callargs;
3755 callargs = load_args(pp_stack, na);
3756 READ_TIMESTAMP(*pintr0);
3757 C_TRACE(x, PyCFunction_Call(func,callargs,NULL));
3758 READ_TIMESTAMP(*pintr1);
3759 Py_XDECREF(callargs);
3760 }
3761 } else {
3762 if (PyMethod_Check(func) && PyMethod_GET_SELF(func) != NULL) {
3763 /* optimize access to bound methods */
3764 PyObject *self = PyMethod_GET_SELF(func);
3765 PCALL(PCALL_METHOD);
3766 PCALL(PCALL_BOUND_METHOD);
3767 Py_INCREF(self);
3768 func = PyMethod_GET_FUNCTION(func);
3769 Py_INCREF(func);
3770 Py_DECREF(*pfunc);
3771 *pfunc = self;
3772 na++;
3773 n++;
3774 } else
3775 Py_INCREF(func);
3776 READ_TIMESTAMP(*pintr0);
3777 if (PyFunction_Check(func))
3778 x = fast_function(func, pp_stack, n, na, nk);
3779 else
3780 x = do_call(func, pp_stack, na, nk);
3781 READ_TIMESTAMP(*pintr1);
3782 Py_DECREF(func);
3783 }
3784
3785 /* Clear the stack of the function object. Also removes
3786 the arguments in case they weren't consumed already
3787 (fast_function() and err_args() leave them on the stack).
3788 */
3789 while ((*pp_stack) > pfunc) {
3790 w = EXT_POP(*pp_stack);
3791 Py_DECREF(w);
3792 PCALL(PCALL_POP);
3793 }
3794 return x;
3795 }
3796
3797 /* The fast_function() function optimize calls for which no argument
3798 tuple is necessary; the objects are passed directly from the stack.
3799 For the simplest case -- a function that takes only positional
3800 arguments and is called with only positional arguments -- it
3801 inlines the most primitive frame setup code from
3802 PyEval_EvalCodeEx(), which vastly reduces the checks that must be
3803 done before evaluating the frame.
3804 */
3805
3806 static PyObject *
3807 fast_function(PyObject *func, PyObject ***pp_stack, int n, int na, int nk)
3808 {
3809 PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
3810 PyObject *globals = PyFunction_GET_GLOBALS(func);
3811 PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
3812 PyObject *kwdefs = PyFunction_GET_KW_DEFAULTS(func);
3813 PyObject **d = NULL;
3814 int nd = 0;
3815
3816 PCALL(PCALL_FUNCTION);
3817 PCALL(PCALL_FAST_FUNCTION);
3818 if (argdefs == NULL && co->co_argcount == n &&
3819 co->co_kwonlyargcount == 0 && nk==0 &&
3820 co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) {
3821 PyFrameObject *f;
3822 PyObject *retval = NULL;
3823 PyThreadState *tstate = PyThreadState_GET();
3824 PyObject **fastlocals, **stack;
3825 int i;
3826
3827 PCALL(PCALL_FASTER_FUNCTION);
3828 assert(globals != NULL);
3829 /* XXX Perhaps we should create a specialized
3830 PyFrame_New() that doesn't take locals, but does
3831 take builtins without sanity checking them.
3832 */
3833 assert(tstate != NULL);
3834 f = PyFrame_New(tstate, co, globals, NULL);
3835 if (f == NULL)
3836 return NULL;
3837
3838 fastlocals = f->f_localsplus;
3839 stack = (*pp_stack) - n;
3840
3841 for (i = 0; i < n; i++) {
3842 Py_INCREF(*stack);
3843 fastlocals[i] = *stack++;
3844 }
3845 retval = PyEval_EvalFrameEx(f,0);
3846 ++tstate->recursion_depth;
3847 Py_DECREF(f);
3848 --tstate->recursion_depth;
3849 return retval;
3850 }
3851 if (argdefs != NULL) {
3852 d = &PyTuple_GET_ITEM(argdefs, 0);
3853 nd = Py_SIZE(argdefs);
3854 }
3855 return PyEval_EvalCodeEx((PyObject*)co, globals,
3856 (PyObject *)NULL, (*pp_stack)-n, na,
3857 (*pp_stack)-2*nk, nk, d, nd, kwdefs,
3858 PyFunction_GET_CLOSURE(func));
3859 }
3860
3861 static PyObject *
3862 update_keyword_args(PyObject *orig_kwdict, int nk, PyObject ***pp_stack,
3863 PyObject *func)
3864 {
3865 PyObject *kwdict = NULL;
3866 if (orig_kwdict == NULL)
3867 kwdict = PyDict_New();
3868 else {
3869 kwdict = PyDict_Copy(orig_kwdict);
3870 Py_DECREF(orig_kwdict);
3871 }
3872 if (kwdict == NULL)
3873 return NULL;
3874 while (--nk >= 0) {
3875 int err;
3876 PyObject *value = EXT_POP(*pp_stack);
3877 PyObject *key = EXT_POP(*pp_stack);
3878 if (PyDict_GetItem(kwdict, key) != NULL) {
3879 PyErr_Format(PyExc_TypeError,
3880 "%.200s%s got multiple values "
3881 "for keyword argument '%U'",
3882 PyEval_GetFuncName(func),
3883 PyEval_GetFuncDesc(func),
3884 key);
3885 Py_DECREF(key);
3886 Py_DECREF(value);
3887 Py_DECREF(kwdict);
3888 return NULL;
3889 }
3890 err = PyDict_SetItem(kwdict, key, value);
3891 Py_DECREF(key);
3892 Py_DECREF(value);
3893 if (err) {
3894 Py_DECREF(kwdict);
3895 return NULL;
3896 }
3897 }
3898 return kwdict;
3899 }
3900
3901 static PyObject *
3902 update_star_args(int nstack, int nstar, PyObject *stararg,
3903 PyObject ***pp_stack)
3904 {
3905 PyObject *callargs, *w;
3906
3907 callargs = PyTuple_New(nstack + nstar);
3908 if (callargs == NULL) {
3909 return NULL;
3910 }
3911 if (nstar) {
3912 int i;
3913 for (i = 0; i < nstar; i++) {
3914 PyObject *a = PyTuple_GET_ITEM(stararg, i);
3915 Py_INCREF(a);
3916 PyTuple_SET_ITEM(callargs, nstack + i, a);
3917 }
3918 }
3919 while (--nstack >= 0) {
3920 w = EXT_POP(*pp_stack);
3921 PyTuple_SET_ITEM(callargs, nstack, w);
3922 }
3923 return callargs;
3924 }
3925
3926 static PyObject *
3927 load_args(PyObject ***pp_stack, int na)
3928 {
3929 PyObject *args = PyTuple_New(na);
3930 PyObject *w;
3931
3932 if (args == NULL)
3933 return NULL;
3934 while (--na >= 0) {
3935 w = EXT_POP(*pp_stack);
3936 PyTuple_SET_ITEM(args, na, w);
3937 }
3938 return args;
3939 }
3940
3941 static PyObject *
3942 do_call(PyObject *func, PyObject ***pp_stack, int na, int nk)
3943 {
3944 PyObject *callargs = NULL;
3945 PyObject *kwdict = NULL;
3946 PyObject *result = NULL;
3947
3948 if (nk > 0) {
3949 kwdict = update_keyword_args(NULL, nk, pp_stack, func);
3950 if (kwdict == NULL)
3951 goto call_fail;
3952 }
3953 callargs = load_args(pp_stack, na);
3954 if (callargs == NULL)
3955 goto call_fail;
3956 #ifdef CALL_PROFILE
3957 /* At this point, we have to look at the type of func to
3958 update the call stats properly. Do it here so as to avoid
3959 exposing the call stats machinery outside ceval.c
3960 */
3961 if (PyFunction_Check(func))
3962 PCALL(PCALL_FUNCTION);
3963 else if (PyMethod_Check(func))
3964 PCALL(PCALL_METHOD);
3965 else if (PyType_Check(func))
3966 PCALL(PCALL_TYPE);
3967 else if (PyCFunction_Check(func))
3968 PCALL(PCALL_CFUNCTION);
3969 else
3970 PCALL(PCALL_OTHER);
3971 #endif
3972 if (PyCFunction_Check(func)) {
3973 PyThreadState *tstate = PyThreadState_GET();
3974 C_TRACE(result, PyCFunction_Call(func, callargs, kwdict));
3975 }
3976 else
3977 result = PyObject_Call(func, callargs, kwdict);
3978 call_fail:
3979 Py_XDECREF(callargs);
3980 Py_XDECREF(kwdict);
3981 return result;
3982 }
3983
3984 static PyObject *
3985 ext_do_call(PyObject *func, PyObject ***pp_stack, int flags, int na, int nk)
3986 {
3987 int nstar = 0;
3988 PyObject *callargs = NULL;
3989 PyObject *stararg = NULL;
3990 PyObject *kwdict = NULL;
3991 PyObject *result = NULL;
3992
3993 if (flags & CALL_FLAG_KW) {
3994 kwdict = EXT_POP(*pp_stack);
3995 if (!PyDict_Check(kwdict)) {
3996 PyObject *d;
3997 d = PyDict_New();
3998 if (d == NULL)
3999 goto ext_call_fail;
4000 if (PyDict_Update(d, kwdict) != 0) {
4001 Py_DECREF(d);
4002 /* PyDict_Update raises attribute
4003 * error (percolated from an attempt
4004 * to get 'keys' attribute) instead of
4005 * a type error if its second argument
4006 * is not a mapping.
4007 */
4008 if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
4009 PyErr_Format(PyExc_TypeError,
4010 "%.200s%.200s argument after ** "
4011 "must be a mapping, not %.200s",
4012 PyEval_GetFuncName(func),
4013 PyEval_GetFuncDesc(func),
4014 kwdict->ob_type->tp_name);
4015 }
4016 goto ext_call_fail;
4017 }
4018 Py_DECREF(kwdict);
4019 kwdict = d;
4020 }
4021 }
4022 if (flags & CALL_FLAG_VAR) {
4023 stararg = EXT_POP(*pp_stack);
4024 if (!PyTuple_Check(stararg)) {
4025 PyObject *t = NULL;
4026 t = PySequence_Tuple(stararg);
4027 if (t == NULL) {
4028 if (PyErr_ExceptionMatches(PyExc_TypeError)) {
4029 PyErr_Format(PyExc_TypeError,
4030 "%.200s%.200s argument after * "
4031 "must be a sequence, not %.200s",
4032 PyEval_GetFuncName(func),
4033 PyEval_GetFuncDesc(func),
4034 stararg->ob_type->tp_name);
4035 }
4036 goto ext_call_fail;
4037 }
4038 Py_DECREF(stararg);
4039 stararg = t;
4040 }
4041 nstar = PyTuple_GET_SIZE(stararg);
4042 }
4043 if (nk > 0) {
4044 kwdict = update_keyword_args(kwdict, nk, pp_stack, func);
4045 if (kwdict == NULL)
4046 goto ext_call_fail;
4047 }
4048 callargs = update_star_args(na, nstar, stararg, pp_stack);
4049 if (callargs == NULL)
4050 goto ext_call_fail;
4051 #ifdef CALL_PROFILE
4052 /* At this point, we have to look at the type of func to
4053 update the call stats properly. Do it here so as to avoid
4054 exposing the call stats machinery outside ceval.c
4055 */
4056 if (PyFunction_Check(func))
4057 PCALL(PCALL_FUNCTION);
4058 else if (PyMethod_Check(func))
4059 PCALL(PCALL_METHOD);
4060 else if (PyType_Check(func))
4061 PCALL(PCALL_TYPE);
4062 else if (PyCFunction_Check(func))
4063 PCALL(PCALL_CFUNCTION);
4064 else
4065 PCALL(PCALL_OTHER);
4066 #endif
4067 if (PyCFunction_Check(func)) {
4068 PyThreadState *tstate = PyThreadState_GET();
4069 C_TRACE(result, PyCFunction_Call(func, callargs, kwdict));
4070 }
4071 else
4072 result = PyObject_Call(func, callargs, kwdict);
4073 ext_call_fail:
4074 Py_XDECREF(callargs);
4075 Py_XDECREF(kwdict);
4076 Py_XDECREF(stararg);
4077 return result;
4078 }
4079
4080 /* Extract a slice index from a PyInt or PyLong or an object with the
4081 nb_index slot defined, and store in *pi.
4082 Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
4083 and silently boost values less than -PY_SSIZE_T_MAX-1 to -PY_SSIZE_T_MAX-1.
4084 Return 0 on error, 1 on success.
4085 */
4086 /* Note: If v is NULL, return success without storing into *pi. This
4087 is because_PyEval_SliceIndex() is called by apply_slice(), which can be
4088 called by the SLICE opcode with v and/or w equal to NULL.
4089 */
4090 int
4091 _PyEval_SliceIndex(PyObject *v, Py_ssize_t *pi)
4092 {
4093 if (v != NULL) {
4094 Py_ssize_t x;
4095 if (PyIndex_Check(v)) {
4096 x = PyNumber_AsSsize_t(v, NULL);
4097 if (x == -1 && PyErr_Occurred())
4098 return 0;
4099 }
4100 else {
4101 PyErr_SetString(PyExc_TypeError,
4102 "slice indices must be integers or "
4103 "None or have an __index__ method");
4104 return 0;
4105 }
4106 *pi = x;
4107 }
4108 return 1;
4109 }
4110
4111 #define CANNOT_CATCH_MSG "catching classes that do not inherit from "\
4112 "BaseException is not allowed"
4113
4114 static PyObject *
4115 cmp_outcome(int op, register PyObject *v, register PyObject *w)
4116 {
4117 int res = 0;
4118 switch (op) {
4119 case PyCmp_IS:
4120 res = (v == w);
4121 break;
4122 case PyCmp_IS_NOT:
4123 res = (v != w);
4124 break;
4125 case PyCmp_IN:
4126 res = PySequence_Contains(w, v);
4127 if (res < 0)
4128 return NULL;
4129 break;
4130 case PyCmp_NOT_IN:
4131 res = PySequence_Contains(w, v);
4132 if (res < 0)
4133 return NULL;
4134 res = !res;
4135 break;
4136 case PyCmp_EXC_MATCH:
4137 if (PyTuple_Check(w)) {
4138 Py_ssize_t i, length;
4139 length = PyTuple_Size(w);
4140 for (i = 0; i < length; i += 1) {
4141 PyObject *exc = PyTuple_GET_ITEM(w, i);
4142 if (!PyExceptionClass_Check(exc)) {
4143 PyErr_SetString(PyExc_TypeError,
4144 CANNOT_CATCH_MSG);
4145 return NULL;
4146 }
4147 }
4148 }
4149 else {
4150 if (!PyExceptionClass_Check(w)) {
4151 PyErr_SetString(PyExc_TypeError,
4152 CANNOT_CATCH_MSG);
4153 return NULL;
4154 }
4155 }
4156 res = PyErr_GivenExceptionMatches(v, w);
4157 break;
4158 default:
4159 return PyObject_RichCompare(v, w, op);
4160 }
4161 v = res ? Py_True : Py_False;
4162 Py_INCREF(v);
4163 return v;
4164 }
4165
4166 static PyObject *
4167 import_from(PyObject *v, PyObject *name)
4168 {
4169 PyObject *x;
4170
4171 x = PyObject_GetAttr(v, name);
4172 if (x == NULL && PyErr_ExceptionMatches(PyExc_AttributeError)) {
4173 PyErr_Format(PyExc_ImportError, "cannot import name %S", name);
4174 }
4175 return x;
4176 }
4177
4178 static int
4179 import_all_from(PyObject *locals, PyObject *v)
4180 {
4181 _Py_IDENTIFIER(__all__);
4182 _Py_IDENTIFIER(__dict__);
4183 PyObject *all = _PyObject_GetAttrId(v, &PyId___all__);
4184 PyObject *dict, *name, *value;
4185 int skip_leading_underscores = 0;
4186 int pos, err;
4187
4188 if (all == NULL) {
4189 if (!PyErr_ExceptionMatches(PyExc_AttributeError))
4190 return -1; /* Unexpected error */
4191 PyErr_Clear();
4192 dict = _PyObject_GetAttrId(v, &PyId___dict__);
4193 if (dict == NULL) {
4194 if (!PyErr_ExceptionMatches(PyExc_AttributeError))
4195 return -1;
4196 PyErr_SetString(PyExc_ImportError,
4197 "from-import-* object has no __dict__ and no __all__");
4198 return -1;
4199 }
4200 all = PyMapping_Keys(dict);
4201 Py_DECREF(dict);
4202 if (all == NULL)
4203 return -1;
4204 skip_leading_underscores = 1;
4205 }
4206
4207 for (pos = 0, err = 0; ; pos++) {
4208 name = PySequence_GetItem(all, pos);
4209 if (name == NULL) {
4210 if (!PyErr_ExceptionMatches(PyExc_IndexError))
4211 err = -1;
4212 else
4213 PyErr_Clear();
4214 break;
4215 }
4216 if (skip_leading_underscores &&
4217 PyUnicode_Check(name) &&
4218 PyUnicode_READY(name) != -1 &&
4219 PyUnicode_READ_CHAR(name, 0) == '_')
4220 {
4221 Py_DECREF(name);
4222 continue;
4223 }
4224 value = PyObject_GetAttr(v, name);
4225 if (value == NULL)
4226 err = -1;
4227 else if (PyDict_CheckExact(locals))
4228 err = PyDict_SetItem(locals, name, value);
4229 else
4230 err = PyObject_SetItem(locals, name, value);
4231 Py_DECREF(name);
4232 Py_XDECREF(value);
4233 if (err != 0)
4234 break;
4235 }
4236 Py_DECREF(all);
4237 return err;
4238 }
4239
4240 static void
4241 format_exc_check_arg(PyObject *exc, const char *format_str, PyObject *obj)
4242 {
4243 const char *obj_str;
4244
4245 if (!obj)
4246 return;
4247
4248 obj_str = _PyUnicode_AsString(obj);
4249 if (!obj_str)
4250 return;
4251
4252 PyErr_Format(exc, format_str, obj_str);
4253 }
4254
4255 static void
4256 format_exc_unbound(PyCodeObject *co, int oparg)
4257 {
4258 PyObject *name;
4259 /* Don't stomp existing exception */
4260 if (PyErr_Occurred())
4261 return;
4262 if (oparg < PyTuple_GET_SIZE(co->co_cellvars)) {
4263 name = PyTuple_GET_ITEM(co->co_cellvars,
4264 oparg);
4265 format_exc_check_arg(
4266 PyExc_UnboundLocalError,
4267 UNBOUNDLOCAL_ERROR_MSG,
4268 name);
4269 } else {
4270 name = PyTuple_GET_ITEM(co->co_freevars, oparg -
4271 PyTuple_GET_SIZE(co->co_cellvars));
4272 format_exc_check_arg(PyExc_NameError,
4273 UNBOUNDFREE_ERROR_MSG, name);
4274 }
4275 }
4276
4277 static PyObject *
4278 unicode_concatenate(PyObject *v, PyObject *w,
4279 PyFrameObject *f, unsigned char *next_instr)
4280 {
4281 PyObject *res;
4282 if (Py_REFCNT(v) == 2) {
4283 /* In the common case, there are 2 references to the value
4284 * stored in 'variable' when the += is performed: one on the
4285 * value stack (in 'v') and one still stored in the
4286 * 'variable'. We try to delete the variable now to reduce
4287 * the refcnt to 1.
4288 */
4289 switch (*next_instr) {
4290 case STORE_FAST:
4291 {
4292 int oparg = PEEKARG();
4293 PyObject **fastlocals = f->f_localsplus;
4294 if (GETLOCAL(oparg) == v)
4295 SETLOCAL(oparg, NULL);
4296 break;
4297 }
4298 case STORE_DEREF:
4299 {
4300 PyObject **freevars = (f->f_localsplus +
4301 f->f_code->co_nlocals);
4302 PyObject *c = freevars[PEEKARG()];
4303 if (PyCell_GET(c) == v)
4304 PyCell_Set(c, NULL);
4305 break;
4306 }
4307 case STORE_NAME:
4308 {
4309 PyObject *names = f->f_code->co_names;
4310 PyObject *name = GETITEM(names, PEEKARG());
4311 PyObject *locals = f->f_locals;
4312 if (PyDict_CheckExact(locals) &&
4313 PyDict_GetItem(locals, name) == v) {
4314 if (PyDict_DelItem(locals, name) != 0) {
4315 PyErr_Clear();
4316 }
4317 }
4318 break;
4319 }
4320 }
4321 }
4322 res = v;
4323 PyUnicode_Append(&res, w);
4324 return res;
4325 }
4326
4327