1/* Peephole optimizations for bytecode compiler. */
2
3#include "Python.h"
4
5#include "Python-ast.h"
6#include "node.h"
7#include "pyarena.h"
8#include "ast.h"
9#include "code.h"
10#include "compile.h"
11#include "symtable.h"
12#include "opcode.h"
13
14#define GETARG(arr, i) ((int)((arr[i+2]<<8) + arr[i+1]))
15#define UNCONDITIONAL_JUMP(op)  (op==JUMP_ABSOLUTE || op==JUMP_FORWARD)
16#define CONDITIONAL_JUMP(op) (op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
17    || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
18#define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP \
19    || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
20    || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
21#define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP)
22#define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3))
23#define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255
24#define CODESIZE(op)  (HAS_ARG(op) ? 3 : 1)
25#define ISBASICBLOCK(blocks, start, bytes) \
26    (blocks[start]==blocks[start+bytes-1])
27
28/* Replace LOAD_CONST c1. LOAD_CONST c2 ... LOAD_CONST cn BUILD_TUPLE n
29   with    LOAD_CONST (c1, c2, ... cn).
30   The consts table must still be in list form so that the
31   new constant (c1, c2, ... cn) can be appended.
32   Called with codestr pointing to the first LOAD_CONST.
33   Bails out with no change if one or more of the LOAD_CONSTs is missing.
34   Also works for BUILD_LIST when followed by an "in" or "not in" test.
35*/
36static int
37tuple_of_constants(unsigned char *codestr, Py_ssize_t n, PyObject *consts)
38{
39    PyObject *newconst, *constant;
40    Py_ssize_t i, arg, len_consts;
41
42    /* Pre-conditions */
43    assert(PyList_CheckExact(consts));
44    assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST);
45    assert(GETARG(codestr, (n*3)) == n);
46    for (i=0 ; i<n ; i++)
47        assert(codestr[i*3] == LOAD_CONST);
48
49    /* Buildup new tuple of constants */
50    newconst = PyTuple_New(n);
51    if (newconst == NULL)
52        return 0;
53    len_consts = PyList_GET_SIZE(consts);
54    for (i=0 ; i<n ; i++) {
55        arg = GETARG(codestr, (i*3));
56        assert(arg < len_consts);
57        constant = PyList_GET_ITEM(consts, arg);
58        Py_INCREF(constant);
59        PyTuple_SET_ITEM(newconst, i, constant);
60    }
61
62    /* Append folded constant onto consts */
63    if (PyList_Append(consts, newconst)) {
64        Py_DECREF(newconst);
65        return 0;
66    }
67    Py_DECREF(newconst);
68
69    /* Write NOPs over old LOAD_CONSTS and
70       add a new LOAD_CONST newconst on top of the BUILD_TUPLE n */
71    memset(codestr, NOP, n*3);
72    codestr[n*3] = LOAD_CONST;
73    SETARG(codestr, (n*3), len_consts);
74    return 1;
75}
76
77/* Replace LOAD_CONST c1. LOAD_CONST c2 BINOP
78   with    LOAD_CONST binop(c1,c2)
79   The consts table must still be in list form so that the
80   new constant can be appended.
81   Called with codestr pointing to the first LOAD_CONST.
82   Abandons the transformation if the folding fails (i.e.  1+'a').
83   If the new constant is a sequence, only folds when the size
84   is below a threshold value.  That keeps pyc files from
85   becoming large in the presence of code like:  (None,)*1000.
86*/
87static int
88fold_binops_on_constants(unsigned char *codestr, PyObject *consts)
89{
90    PyObject *newconst, *v, *w;
91    Py_ssize_t len_consts, size;
92    int opcode;
93
94    /* Pre-conditions */
95    assert(PyList_CheckExact(consts));
96    assert(codestr[0] == LOAD_CONST);
97    assert(codestr[3] == LOAD_CONST);
98
99    /* Create new constant */
100    v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
101    w = PyList_GET_ITEM(consts, GETARG(codestr, 3));
102    opcode = codestr[6];
103    switch (opcode) {
104        case BINARY_POWER:
105            newconst = PyNumber_Power(v, w, Py_None);
106            break;
107        case BINARY_MULTIPLY:
108            newconst = PyNumber_Multiply(v, w);
109            break;
110        case BINARY_DIVIDE:
111            /* Cannot fold this operation statically since
112               the result can depend on the run-time presence
113               of the -Qnew flag */
114            return 0;
115        case BINARY_TRUE_DIVIDE:
116            newconst = PyNumber_TrueDivide(v, w);
117            break;
118        case BINARY_FLOOR_DIVIDE:
119            newconst = PyNumber_FloorDivide(v, w);
120            break;
121        case BINARY_MODULO:
122            newconst = PyNumber_Remainder(v, w);
123            break;
124        case BINARY_ADD:
125            newconst = PyNumber_Add(v, w);
126            break;
127        case BINARY_SUBTRACT:
128            newconst = PyNumber_Subtract(v, w);
129            break;
130        case BINARY_SUBSCR:
131            /* #5057: if v is unicode, there might be differences between
132               wide and narrow builds in cases like '\U00012345'[0] or
133               '\U00012345abcdef'[3], so it's better to skip the optimization
134               in order to produce compatible pycs.
135            */
136            if (PyUnicode_Check(v))
137                return 0;
138            newconst = PyObject_GetItem(v, w);
139            break;
140        case BINARY_LSHIFT:
141            newconst = PyNumber_Lshift(v, w);
142            break;
143        case BINARY_RSHIFT:
144            newconst = PyNumber_Rshift(v, w);
145            break;
146        case BINARY_AND:
147            newconst = PyNumber_And(v, w);
148            break;
149        case BINARY_XOR:
150            newconst = PyNumber_Xor(v, w);
151            break;
152        case BINARY_OR:
153            newconst = PyNumber_Or(v, w);
154            break;
155        default:
156            /* Called with an unknown opcode */
157            PyErr_Format(PyExc_SystemError,
158                 "unexpected binary operation %d on a constant",
159                     opcode);
160            return 0;
161    }
162    if (newconst == NULL) {
163        PyErr_Clear();
164        return 0;
165    }
166    size = PyObject_Size(newconst);
167    if (size == -1)
168        PyErr_Clear();
169    else if (size > 20) {
170        Py_DECREF(newconst);
171        return 0;
172    }
173
174    /* Append folded constant into consts table */
175    len_consts = PyList_GET_SIZE(consts);
176    if (PyList_Append(consts, newconst)) {
177        Py_DECREF(newconst);
178        return 0;
179    }
180    Py_DECREF(newconst);
181
182    /* Write NOP NOP NOP NOP LOAD_CONST newconst */
183    memset(codestr, NOP, 4);
184    codestr[4] = LOAD_CONST;
185    SETARG(codestr, 4, len_consts);
186    return 1;
187}
188
189static int
190fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts)
191{
192    PyObject *newconst=NULL, *v;
193    Py_ssize_t len_consts;
194    int opcode;
195
196    /* Pre-conditions */
197    assert(PyList_CheckExact(consts));
198    assert(codestr[0] == LOAD_CONST);
199
200    /* Create new constant */
201    v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
202    opcode = codestr[3];
203    switch (opcode) {
204        case UNARY_NEGATIVE:
205            /* Preserve the sign of -0.0 */
206            if (PyObject_IsTrue(v) == 1)
207                newconst = PyNumber_Negative(v);
208            break;
209        case UNARY_CONVERT:
210            newconst = PyObject_Repr(v);
211            break;
212        case UNARY_INVERT:
213            newconst = PyNumber_Invert(v);
214            break;
215        default:
216            /* Called with an unknown opcode */
217            PyErr_Format(PyExc_SystemError,
218                 "unexpected unary operation %d on a constant",
219                     opcode);
220            return 0;
221    }
222    if (newconst == NULL) {
223        PyErr_Clear();
224        return 0;
225    }
226
227    /* Append folded constant into consts table */
228    len_consts = PyList_GET_SIZE(consts);
229    if (PyList_Append(consts, newconst)) {
230        Py_DECREF(newconst);
231        return 0;
232    }
233    Py_DECREF(newconst);
234
235    /* Write NOP LOAD_CONST newconst */
236    codestr[0] = NOP;
237    codestr[1] = LOAD_CONST;
238    SETARG(codestr, 1, len_consts);
239    return 1;
240}
241
242static unsigned int *
243markblocks(unsigned char *code, Py_ssize_t len)
244{
245    unsigned int *blocks = PyMem_New(unsigned int, len);
246    int i,j, opcode, blockcnt = 0;
247
248    if (blocks == NULL) {
249        PyErr_NoMemory();
250        return NULL;
251    }
252    memset(blocks, 0, len*sizeof(int));
253
254    /* Mark labels in the first pass */
255    for (i=0 ; i<len ; i+=CODESIZE(opcode)) {
256        opcode = code[i];
257        switch (opcode) {
258            case FOR_ITER:
259            case JUMP_FORWARD:
260            case JUMP_IF_FALSE_OR_POP:
261            case JUMP_IF_TRUE_OR_POP:
262            case POP_JUMP_IF_FALSE:
263            case POP_JUMP_IF_TRUE:
264            case JUMP_ABSOLUTE:
265            case CONTINUE_LOOP:
266            case SETUP_LOOP:
267            case SETUP_EXCEPT:
268            case SETUP_FINALLY:
269            case SETUP_WITH:
270                j = GETJUMPTGT(code, i);
271                blocks[j] = 1;
272                break;
273        }
274    }
275    /* Build block numbers in the second pass */
276    for (i=0 ; i<len ; i++) {
277        blockcnt += blocks[i];          /* increment blockcnt over labels */
278        blocks[i] = blockcnt;
279    }
280    return blocks;
281}
282
283/* Perform basic peephole optimizations to components of a code object.
284   The consts object should still be in list form to allow new constants
285   to be appended.
286
287   To keep the optimizer simple, it bails out (does nothing) for code
288   containing extended arguments or that has a length over 32,700.  That
289   allows us to avoid overflow and sign issues.  Likewise, it bails when
290   the lineno table has complex encoding for gaps >= 255.
291
292   Optimizations are restricted to simple transformations occurring within a
293   single basic block.  All transformations keep the code size the same or
294   smaller.  For those that reduce size, the gaps are initially filled with
295   NOPs.  Later those NOPs are removed and the jump addresses retargeted in
296   a single pass.  Line numbering is adjusted accordingly. */
297
298PyObject *
299PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names,
300                PyObject *lineno_obj)
301{
302    Py_ssize_t i, j, codelen;
303    int nops, h, adj;
304    int tgt, tgttgt, opcode;
305    unsigned char *codestr = NULL;
306    unsigned char *lineno;
307    int *addrmap = NULL;
308    int new_line, cum_orig_line, last_line, tabsiz;
309    int cumlc=0, lastlc=0;      /* Count runs of consecutive LOAD_CONSTs */
310    unsigned int *blocks = NULL;
311    char *name;
312
313    /* Bail out if an exception is set */
314    if (PyErr_Occurred())
315        goto exitError;
316
317    /* Bypass optimization when the lineno table is too complex */
318    assert(PyString_Check(lineno_obj));
319    lineno = (unsigned char*)PyString_AS_STRING(lineno_obj);
320    tabsiz = PyString_GET_SIZE(lineno_obj);
321    if (memchr(lineno, 255, tabsiz) != NULL)
322        goto exitUnchanged;
323
324    /* Avoid situations where jump retargeting could overflow */
325    assert(PyString_Check(code));
326    codelen = PyString_GET_SIZE(code);
327    if (codelen > 32700)
328        goto exitUnchanged;
329
330    /* Make a modifiable copy of the code string */
331    codestr = (unsigned char *)PyMem_Malloc(codelen);
332    if (codestr == NULL)
333        goto exitError;
334    codestr = (unsigned char *)memcpy(codestr,
335                                      PyString_AS_STRING(code), codelen);
336
337    /* Verify that RETURN_VALUE terminates the codestring. This allows
338       the various transformation patterns to look ahead several
339       instructions without additional checks to make sure they are not
340       looking beyond the end of the code string.
341    */
342    if (codestr[codelen-1] != RETURN_VALUE)
343        goto exitUnchanged;
344
345    /* Mapping to new jump targets after NOPs are removed */
346    addrmap = PyMem_New(int, codelen);
347    if (addrmap == NULL) {
348        PyErr_NoMemory();
349        goto exitError;
350    }
351
352    blocks = markblocks(codestr, codelen);
353    if (blocks == NULL)
354        goto exitError;
355    assert(PyList_Check(consts));
356
357    for (i=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
358      reoptimize_current:
359        opcode = codestr[i];
360
361        lastlc = cumlc;
362        cumlc = 0;
363
364        switch (opcode) {
365            /* Replace UNARY_NOT POP_JUMP_IF_FALSE
366               with    POP_JUMP_IF_TRUE */
367            case UNARY_NOT:
368                if (codestr[i+1] != POP_JUMP_IF_FALSE
369                    || !ISBASICBLOCK(blocks,i,4))
370                    continue;
371                j = GETARG(codestr, i+1);
372                codestr[i] = POP_JUMP_IF_TRUE;
373                SETARG(codestr, i, j);
374                codestr[i+3] = NOP;
375                goto reoptimize_current;
376
377                /* not a is b -->  a is not b
378                   not a in b -->  a not in b
379                   not a is not b -->  a is b
380                   not a not in b -->  a in b
381                */
382            case COMPARE_OP:
383                j = GETARG(codestr, i);
384                if (j < 6  ||  j > 9  ||
385                    codestr[i+3] != UNARY_NOT  ||
386                    !ISBASICBLOCK(blocks,i,4))
387                    continue;
388                SETARG(codestr, i, (j^1));
389                codestr[i+3] = NOP;
390                break;
391
392                /* Replace LOAD_GLOBAL/LOAD_NAME None
393                   with LOAD_CONST None */
394            case LOAD_NAME:
395            case LOAD_GLOBAL:
396                j = GETARG(codestr, i);
397                name = PyString_AsString(PyTuple_GET_ITEM(names, j));
398                if (name == NULL  ||  strcmp(name, "None") != 0)
399                    continue;
400                for (j=0 ; j < PyList_GET_SIZE(consts) ; j++) {
401                    if (PyList_GET_ITEM(consts, j) == Py_None)
402                        break;
403                }
404                if (j == PyList_GET_SIZE(consts)) {
405                    if (PyList_Append(consts, Py_None) == -1)
406                        goto exitError;
407                }
408                assert(PyList_GET_ITEM(consts, j) == Py_None);
409                codestr[i] = LOAD_CONST;
410                SETARG(codestr, i, j);
411                cumlc = lastlc + 1;
412                break;
413
414                /* Skip over LOAD_CONST trueconst
415                   POP_JUMP_IF_FALSE xx. This improves
416                   "while 1" performance. */
417            case LOAD_CONST:
418                cumlc = lastlc + 1;
419                j = GETARG(codestr, i);
420                if (codestr[i+3] != POP_JUMP_IF_FALSE  ||
421                    !ISBASICBLOCK(blocks,i,6)  ||
422                    !PyObject_IsTrue(PyList_GET_ITEM(consts, j)))
423                    continue;
424                memset(codestr+i, NOP, 6);
425                cumlc = 0;
426                break;
427
428                /* Try to fold tuples of constants (includes a case for lists
429                   which are only used for "in" and "not in" tests).
430                   Skip over BUILD_SEQN 1 UNPACK_SEQN 1.
431                   Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.
432                   Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */
433            case BUILD_TUPLE:
434            case BUILD_LIST:
435                j = GETARG(codestr, i);
436                h = i - 3 * j;
437                if (h >= 0 &&
438                    j <= lastlc &&
439                    ((opcode == BUILD_TUPLE &&
440                      ISBASICBLOCK(blocks, h, 3*(j+1))) ||
441                     (opcode == BUILD_LIST &&
442                      codestr[i+3]==COMPARE_OP &&
443                      ISBASICBLOCK(blocks, h, 3*(j+2)) &&
444                      (GETARG(codestr,i+3)==6 ||
445                       GETARG(codestr,i+3)==7))) &&
446                    tuple_of_constants(&codestr[h], j, consts)) {
447                    assert(codestr[i] == LOAD_CONST);
448                    cumlc = 1;
449                    break;
450                }
451                if (codestr[i+3] != UNPACK_SEQUENCE  ||
452                    !ISBASICBLOCK(blocks,i,6) ||
453                    j != GETARG(codestr, i+3))
454                    continue;
455                if (j == 1) {
456                    memset(codestr+i, NOP, 6);
457                } else if (j == 2) {
458                    codestr[i] = ROT_TWO;
459                    memset(codestr+i+1, NOP, 5);
460                } else if (j == 3) {
461                    codestr[i] = ROT_THREE;
462                    codestr[i+1] = ROT_TWO;
463                    memset(codestr+i+2, NOP, 4);
464                }
465                break;
466
467                /* Fold binary ops on constants.
468                   LOAD_CONST c1 LOAD_CONST c2 BINOP -->  LOAD_CONST binop(c1,c2) */
469            case BINARY_POWER:
470            case BINARY_MULTIPLY:
471            case BINARY_TRUE_DIVIDE:
472            case BINARY_FLOOR_DIVIDE:
473            case BINARY_MODULO:
474            case BINARY_ADD:
475            case BINARY_SUBTRACT:
476            case BINARY_SUBSCR:
477            case BINARY_LSHIFT:
478            case BINARY_RSHIFT:
479            case BINARY_AND:
480            case BINARY_XOR:
481            case BINARY_OR:
482                if (lastlc >= 2 &&
483                    ISBASICBLOCK(blocks, i-6, 7) &&
484                    fold_binops_on_constants(&codestr[i-6], consts)) {
485                    i -= 2;
486                    assert(codestr[i] == LOAD_CONST);
487                    cumlc = 1;
488                }
489                break;
490
491                /* Fold unary ops on constants.
492                   LOAD_CONST c1  UNARY_OP --> LOAD_CONST unary_op(c) */
493            case UNARY_NEGATIVE:
494            case UNARY_CONVERT:
495            case UNARY_INVERT:
496                if (lastlc >= 1 &&
497                    ISBASICBLOCK(blocks, i-3, 4) &&
498                    fold_unaryops_on_constants(&codestr[i-3], consts)) {
499                    i -= 2;
500                    assert(codestr[i] == LOAD_CONST);
501                    cumlc = 1;
502                }
503                break;
504
505                /* Simplify conditional jump to conditional jump where the
506                   result of the first test implies the success of a similar
507                   test or the failure of the opposite test.
508                   Arises in code like:
509                   "if a and b:"
510                   "if a or b:"
511                   "a and b or c"
512                   "(a and b) and c"
513                   x:JUMP_IF_FALSE_OR_POP y   y:JUMP_IF_FALSE_OR_POP z
514                      -->  x:JUMP_IF_FALSE_OR_POP z
515                   x:JUMP_IF_FALSE_OR_POP y   y:JUMP_IF_TRUE_OR_POP z
516                      -->  x:POP_JUMP_IF_FALSE y+3
517                   where y+3 is the instruction following the second test.
518                */
519            case JUMP_IF_FALSE_OR_POP:
520            case JUMP_IF_TRUE_OR_POP:
521                tgt = GETJUMPTGT(codestr, i);
522                j = codestr[tgt];
523                if (CONDITIONAL_JUMP(j)) {
524                    /* NOTE: all possible jumps here are absolute! */
525                    if (JUMPS_ON_TRUE(j) == JUMPS_ON_TRUE(opcode)) {
526                        /* The second jump will be
527                           taken iff the first is. */
528                        tgttgt = GETJUMPTGT(codestr, tgt);
529                        /* The current opcode inherits
530                           its target's stack behaviour */
531                        codestr[i] = j;
532                        SETARG(codestr, i, tgttgt);
533                        goto reoptimize_current;
534                    } else {
535                        /* The second jump is not taken if the first is (so
536                           jump past it), and all conditional jumps pop their
537                           argument when they're not taken (so change the
538                           first jump to pop its argument when it's taken). */
539                        if (JUMPS_ON_TRUE(opcode))
540                            codestr[i] = POP_JUMP_IF_TRUE;
541                        else
542                            codestr[i] = POP_JUMP_IF_FALSE;
543                        SETARG(codestr, i, (tgt + 3));
544                        goto reoptimize_current;
545                    }
546                }
547                /* Intentional fallthrough */
548
549                /* Replace jumps to unconditional jumps */
550            case POP_JUMP_IF_FALSE:
551            case POP_JUMP_IF_TRUE:
552            case FOR_ITER:
553            case JUMP_FORWARD:
554            case JUMP_ABSOLUTE:
555            case CONTINUE_LOOP:
556            case SETUP_LOOP:
557            case SETUP_EXCEPT:
558            case SETUP_FINALLY:
559            case SETUP_WITH:
560                tgt = GETJUMPTGT(codestr, i);
561                /* Replace JUMP_* to a RETURN into just a RETURN */
562                if (UNCONDITIONAL_JUMP(opcode) &&
563                    codestr[tgt] == RETURN_VALUE) {
564                    codestr[i] = RETURN_VALUE;
565                    memset(codestr+i+1, NOP, 2);
566                    continue;
567                }
568                if (!UNCONDITIONAL_JUMP(codestr[tgt]))
569                    continue;
570                tgttgt = GETJUMPTGT(codestr, tgt);
571                if (opcode == JUMP_FORWARD) /* JMP_ABS can go backwards */
572                    opcode = JUMP_ABSOLUTE;
573                if (!ABSOLUTE_JUMP(opcode))
574                    tgttgt -= i + 3;        /* Calc relative jump addr */
575                if (tgttgt < 0)             /* No backward relative jumps */
576                    continue;
577                codestr[i] = opcode;
578                SETARG(codestr, i, tgttgt);
579                break;
580
581            case EXTENDED_ARG:
582                goto exitUnchanged;
583
584                /* Replace RETURN LOAD_CONST None RETURN with just RETURN */
585                /* Remove unreachable JUMPs after RETURN */
586            case RETURN_VALUE:
587                if (i+4 >= codelen)
588                    continue;
589                if (codestr[i+4] == RETURN_VALUE &&
590                    ISBASICBLOCK(blocks,i,5))
591                    memset(codestr+i+1, NOP, 4);
592                else if (UNCONDITIONAL_JUMP(codestr[i+1]) &&
593                         ISBASICBLOCK(blocks,i,4))
594                    memset(codestr+i+1, NOP, 3);
595                break;
596        }
597    }
598
599    /* Fixup linenotab */
600    for (i=0, nops=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
601        addrmap[i] = i - nops;
602        if (codestr[i] == NOP)
603            nops++;
604    }
605    cum_orig_line = 0;
606    last_line = 0;
607    for (i=0 ; i < tabsiz ; i+=2) {
608        cum_orig_line += lineno[i];
609        new_line = addrmap[cum_orig_line];
610        assert (new_line - last_line < 255);
611        lineno[i] =((unsigned char)(new_line - last_line));
612        last_line = new_line;
613    }
614
615    /* Remove NOPs and fixup jump targets */
616    for (i=0, h=0 ; i<codelen ; ) {
617        opcode = codestr[i];
618        switch (opcode) {
619            case NOP:
620                i++;
621                continue;
622
623            case JUMP_ABSOLUTE:
624            case CONTINUE_LOOP:
625            case POP_JUMP_IF_FALSE:
626            case POP_JUMP_IF_TRUE:
627            case JUMP_IF_FALSE_OR_POP:
628            case JUMP_IF_TRUE_OR_POP:
629                j = addrmap[GETARG(codestr, i)];
630                SETARG(codestr, i, j);
631                break;
632
633            case FOR_ITER:
634            case JUMP_FORWARD:
635            case SETUP_LOOP:
636            case SETUP_EXCEPT:
637            case SETUP_FINALLY:
638            case SETUP_WITH:
639                j = addrmap[GETARG(codestr, i) + i + 3] - addrmap[i] - 3;
640                SETARG(codestr, i, j);
641                break;
642        }
643        adj = CODESIZE(opcode);
644        while (adj--)
645            codestr[h++] = codestr[i++];
646    }
647    assert(h + nops == codelen);
648
649    code = PyString_FromStringAndSize((char *)codestr, h);
650    PyMem_Free(addrmap);
651    PyMem_Free(codestr);
652    PyMem_Free(blocks);
653    return code;
654
655 exitError:
656    code = NULL;
657
658 exitUnchanged:
659    if (blocks != NULL)
660        PyMem_Free(blocks);
661    if (addrmap != NULL)
662        PyMem_Free(addrmap);
663    if (codestr != NULL)
664        PyMem_Free(codestr);
665    Py_XINCREF(code);
666    return code;
667}
668