lower_jumps.cpp revision e71b4ab8a64bf978b2036976a41e30996eebb0c8
1/*
2 * Copyright © 2010 Luca Barbieri
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 */
23
24/**
25 * \file lower_jumps.cpp
26 *
27 * This pass lowers jumps (break, continue, and return) to if/else structures.
28 *
29 * It can be asked to:
30 * 1. Pull jumps out of ifs where possible
31 * 2. Remove all "continue"s, replacing them with an "execute flag"
32 * 3. Replace all "break" with a single conditional one at the end of the loop
33 * 4. Replace all "return"s with a single return at the end of the function,
34 *    for the main function and/or other functions
35 *
36 * Applying this pass gives several benefits:
37 * 1. All functions can be inlined.
38 * 2. nv40 and other pre-DX10 chips without "continue" can be supported
39 * 3. nv30 and other pre-DX10 chips with no control flow at all are better
40 *    supported
41 *
42 * Continues are lowered by adding a per-loop "execute flag", initialized to
43 * true, that when cleared inhibits all execution until the end of the loop.
44 *
45 * Breaks are lowered to continues, plus setting a "break flag" that is checked
46 * at the end of the loop, and trigger the unique "break".
47 *
48 * Returns are lowered to breaks/continues, plus adding a "return flag" that
49 * causes loops to break again out of their enclosing loops until all the
50 * loops are exited: then the "execute flag" logic will ignore everything
51 * until the end of the function.
52 *
53 * Note that "continue" and "return" can also be implemented by adding
54 * a dummy loop and using break.
55 * However, this is bad for hardware with limited nesting depth, and
56 * prevents further optimization, and thus is not currently performed.
57 */
58
59#include "glsl_types.h"
60#include <string.h>
61#include "ir.h"
62
63/**
64 * Enum recording the result of analyzing how control flow might exit
65 * an IR node.
66 *
67 * Each possible value of jump_strength indicates a strictly stronger
68 * guarantee on control flow than the previous value.
69 *
70 * The ordering of strengths roughly reflects the way jumps are
71 * lowered: jumps with higher strength tend to be lowered to jumps of
72 * lower strength.  Accordingly, strength is used as a heuristic to
73 * determine which lowering to perform first.
74 *
75 * This enum is also used by get_jump_strength() to categorize
76 * instructions as either break, continue, return, or other.  When
77 * used in this fashion, strength_always_clears_execute_flag is not
78 * used.
79 *
80 * The control flow analysis made by this optimization pass makes two
81 * simplifying assumptions:
82 *
83 * - It ignores discard instructions, since they are lowered by a
84 *   separate pass (lower_discard.cpp).
85 *
86 * - It assumes it is always possible for control to flow from a loop
87 *   to the instruction immediately following it.  Technically, this
88 *   is not true (since all execution paths through the loop might
89 *   jump back to the top, or return from the function).
90 *
91 * Both of these simplifying assumtions are safe, since they can never
92 * cause reachable code to be incorrectly classified as unreachable;
93 * they can only do the opposite.
94 */
95enum jump_strength
96{
97   /**
98    * Analysis has produced no guarantee on how control flow might
99    * exit this IR node.  It might fall out the bottom (with or
100    * without clearing the execute flag, if present), or it might
101    * continue to the top of the innermost enclosing loop, break out
102    * of it, or return from the function.
103    */
104   strength_none,
105
106   /**
107    * The only way control can fall out the bottom of this node is
108    * through a code path that clears the execute flag.  It might also
109    * continue to the top of the innermost enclosing loop, break out
110    * of it, or return from the function.
111    */
112   strength_always_clears_execute_flag,
113
114   /**
115    * Control cannot fall out the bottom of this node.  It might
116    * continue to the top of the innermost enclosing loop, break out
117    * of it, or return from the function.
118    */
119   strength_continue,
120
121   /**
122    * Control cannot fall out the bottom of this node, or continue the
123    * top of the innermost enclosing loop.  It can only break out of
124    * it or return from the function.
125    */
126   strength_break,
127
128   /**
129    * Control cannot fall out the bottom of this node, continue to the
130    * top of the innermost enclosing loop, or break out of it.  It can
131    * only return from the function.
132    */
133   strength_return
134};
135
136struct block_record
137{
138   /* minimum jump strength (of lowered IR, not pre-lowering IR)
139    *
140    * If the block ends with a jump, must be the strength of the jump.
141    * Otherwise, the jump would be dead and have been deleted before)
142    *
143    * If the block doesn't end with a jump, it can be different than strength_none if all paths before it lead to some jump
144    * (e.g. an if with a return in one branch, and a break in the other, while not lowering them)
145    * Note that identical jumps are usually unified though.
146    */
147   jump_strength min_strength;
148
149   /* can anything clear the execute flag? */
150   bool may_clear_execute_flag;
151
152   block_record()
153   {
154      this->min_strength = strength_none;
155      this->may_clear_execute_flag = false;
156   }
157};
158
159struct loop_record
160{
161   ir_function_signature* signature;
162   ir_loop* loop;
163
164   /* used to avoid lowering the break used to represent lowered breaks */
165   unsigned nesting_depth;
166   bool in_if_at_the_end_of_the_loop;
167
168   bool may_set_return_flag;
169
170   ir_variable* break_flag;
171   ir_variable* execute_flag; /* cleared to emulate continue */
172
173   loop_record(ir_function_signature* p_signature = 0, ir_loop* p_loop = 0)
174   {
175      this->signature = p_signature;
176      this->loop = p_loop;
177      this->nesting_depth = 0;
178      this->in_if_at_the_end_of_the_loop = false;
179      this->may_set_return_flag = false;
180      this->break_flag = 0;
181      this->execute_flag = 0;
182   }
183
184   ir_variable* get_execute_flag()
185   {
186      /* also supported for the "function loop" */
187      if(!this->execute_flag) {
188         exec_list& list = this->loop ? this->loop->body_instructions : signature->body;
189         this->execute_flag = new(this->signature) ir_variable(glsl_type::bool_type, "execute_flag", ir_var_temporary);
190         list.push_head(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(execute_flag), new(this->signature) ir_constant(true), 0));
191         list.push_head(this->execute_flag);
192      }
193      return this->execute_flag;
194   }
195
196   ir_variable* get_break_flag()
197   {
198      assert(this->loop);
199      if(!this->break_flag) {
200         this->break_flag = new(this->signature) ir_variable(glsl_type::bool_type, "break_flag", ir_var_temporary);
201         this->loop->insert_before(this->break_flag);
202         this->loop->insert_before(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(break_flag), new(this->signature) ir_constant(false), 0));
203      }
204      return this->break_flag;
205   }
206};
207
208struct function_record
209{
210   ir_function_signature* signature;
211   ir_variable* return_flag; /* used to break out of all loops and then jump to the return instruction */
212   ir_variable* return_value;
213   bool lower_return;
214   unsigned nesting_depth;
215
216   function_record(ir_function_signature* p_signature = 0,
217                   bool lower_return = false)
218   {
219      this->signature = p_signature;
220      this->return_flag = 0;
221      this->return_value = 0;
222      this->nesting_depth = 0;
223      this->lower_return = lower_return;
224   }
225
226   ir_variable* get_return_flag()
227   {
228      if(!this->return_flag) {
229         this->return_flag = new(this->signature) ir_variable(glsl_type::bool_type, "return_flag", ir_var_temporary);
230         this->signature->body.push_head(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(return_flag), new(this->signature) ir_constant(false), 0));
231         this->signature->body.push_head(this->return_flag);
232      }
233      return this->return_flag;
234   }
235
236   ir_variable* get_return_value()
237   {
238      if(!this->return_value) {
239         assert(!this->signature->return_type->is_void());
240         return_value = new(this->signature) ir_variable(this->signature->return_type, "return_value", ir_var_temporary);
241         this->signature->body.push_head(this->return_value);
242      }
243      return this->return_value;
244   }
245};
246
247struct ir_lower_jumps_visitor : public ir_control_flow_visitor {
248   /* Postconditions: on exit of any visit() function:
249    *
250    * ANALYSIS: this->block.min_strength,
251    * this->block.may_clear_execute_flag, and
252    * this->loop.may_set_return_flag are updated to reflect the
253    * characteristics of the visited statement.
254    *
255    * DEAD_CODE_ELIMINATION: If this->block.min_strength is not
256    * strength_none, the visited node is at the end of its exec_list.
257    * In other words, any unreachable statements that follow the
258    * visited statement in its exec_list have been removed.
259    *
260    * CONTAINED_JUMPS_LOWERED: If the visited statement contains other
261    * statements, then should_lower_jump() is false for all of the
262    * return, break, or continue statements it contains.
263    *
264    * Note that visiting a jump does not lower it.  That is the
265    * responsibility of the statement (or function signature) that
266    * contains the jump.
267    */
268
269   bool progress;
270
271   struct function_record function;
272   struct loop_record loop;
273   struct block_record block;
274
275   bool pull_out_jumps;
276   bool lower_continue;
277   bool lower_break;
278   bool lower_sub_return;
279   bool lower_main_return;
280
281   ir_lower_jumps_visitor()
282   {
283      this->progress = false;
284   }
285
286   void truncate_after_instruction(exec_node *ir)
287   {
288      if (!ir)
289         return;
290
291      while (!ir->get_next()->is_tail_sentinel()) {
292         ((ir_instruction *)ir->get_next())->remove();
293         this->progress = true;
294      }
295   }
296
297   void move_outer_block_inside(ir_instruction *ir, exec_list *inner_block)
298   {
299      while (!ir->get_next()->is_tail_sentinel()) {
300         ir_instruction *move_ir = (ir_instruction *)ir->get_next();
301
302         move_ir->remove();
303         inner_block->push_tail(move_ir);
304      }
305   }
306
307   /**
308    * Insert the instructions necessary to lower a return statement,
309    * before the given return instruction.
310    */
311   void insert_lowered_return(ir_return *ir)
312   {
313      ir_variable* return_flag = this->function.get_return_flag();
314      if(!this->function.signature->return_type->is_void()) {
315         ir_variable* return_value = this->function.get_return_value();
316         ir->insert_before(
317            new(ir) ir_assignment(
318               new (ir) ir_dereference_variable(return_value),
319               ir->value));
320      }
321      ir->insert_before(
322         new(ir) ir_assignment(
323            new (ir) ir_dereference_variable(return_flag),
324            new (ir) ir_constant(true)));
325      this->loop.may_set_return_flag = true;
326   }
327
328   /**
329    * If the given instruction is a return, lower it to instructions
330    * that store the return value (if there is one), set the return
331    * flag, and then break.
332    *
333    * It is safe to pass NULL to this function.
334    */
335   void lower_return_unconditionally(ir_instruction *ir)
336   {
337      if (get_jump_strength(ir) != strength_return) {
338         return;
339      }
340      insert_lowered_return((ir_return*)ir);
341      ir->replace_with(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
342   }
343
344   virtual void visit(class ir_loop_jump * ir)
345   {
346      /* Eliminate all instructions after each one, since they are
347       * unreachable.  This satisfies the DEAD_CODE_ELIMINATION
348       * postcondition.
349       */
350      truncate_after_instruction(ir);
351
352      /* Set this->block.min_strength based on this instruction.  This
353       * satisfies the ANALYSIS postcondition.  It is not necessary to
354       * update this->block.may_clear_execute_flag or
355       * this->loop.may_set_return_flag, because an unlowered jump
356       * instruction can't change any flags.
357       */
358      this->block.min_strength = ir->is_break() ? strength_break : strength_continue;
359
360      /* The CONTAINED_JUMPS_LOWERED postcondition is already
361       * satisfied, because jump statements can't contain other
362       * statements.
363       */
364   }
365
366   virtual void visit(class ir_return * ir)
367   {
368      /* Eliminate all instructions after each one, since they are
369       * unreachable.  This satisfies the DEAD_CODE_ELIMINATION
370       * postcondition.
371       */
372      truncate_after_instruction(ir);
373
374      /* Set this->block.min_strength based on this instruction.  This
375       * satisfies the ANALYSIS postcondition.  It is not necessary to
376       * update this->block.may_clear_execute_flag or
377       * this->loop.may_set_return_flag, because an unlowered return
378       * instruction can't change any flags.
379       */
380      this->block.min_strength = strength_return;
381
382      /* The CONTAINED_JUMPS_LOWERED postcondition is already
383       * satisfied, because jump statements can't contain other
384       * statements.
385       */
386   }
387
388   virtual void visit(class ir_discard * ir)
389   {
390      /* Nothing needs to be done.  The ANALYSIS and
391       * DEAD_CODE_ELIMINATION postconditions are already satisfied,
392       * because discard statements are ignored by this optimization
393       * pass.  The CONTAINED_JUMPS_LOWERED postcondition is already
394       * satisfied, because discard statements can't contain other
395       * statements.
396       */
397   }
398
399   enum jump_strength get_jump_strength(ir_instruction* ir)
400   {
401      if(!ir)
402         return strength_none;
403      else if(ir->ir_type == ir_type_loop_jump) {
404         if(((ir_loop_jump*)ir)->is_break())
405            return strength_break;
406         else
407            return strength_continue;
408      } else if(ir->ir_type == ir_type_return)
409         return strength_return;
410      else
411         return strength_none;
412   }
413
414   bool should_lower_jump(ir_jump* ir)
415   {
416      unsigned strength = get_jump_strength(ir);
417      bool lower;
418      switch(strength)
419      {
420      case strength_none:
421         lower = false; /* don't change this, code relies on it */
422         break;
423      case strength_continue:
424         lower = lower_continue;
425         break;
426      case strength_break:
427         assert(this->loop.loop);
428         /* never lower "canonical break" */
429         if(ir->get_next()->is_tail_sentinel() && (this->loop.nesting_depth == 0
430               || (this->loop.nesting_depth == 1 && this->loop.in_if_at_the_end_of_the_loop)))
431            lower = false;
432         else
433            lower = lower_break;
434         break;
435      case strength_return:
436         /* never lower return at the end of a this->function */
437         if(this->function.nesting_depth == 0 && ir->get_next()->is_tail_sentinel())
438            lower = false;
439         else
440            lower = this->function.lower_return;
441         break;
442      }
443      return lower;
444   }
445
446   block_record visit_block(exec_list* list)
447   {
448      /* Note: since visiting a node may change that node's next
449       * pointer, we can't use visit_exec_list(), because
450       * visit_exec_list() caches the node's next pointer before
451       * visiting it.  So we use foreach_list() instead.
452       *
453       * foreach_list() isn't safe if the node being visited gets
454       * removed, but fortunately this visitor doesn't do that.
455       */
456
457      block_record saved_block = this->block;
458      this->block = block_record();
459      foreach_list(node, list) {
460         ((ir_instruction *) node)->accept(this);
461      }
462      block_record ret = this->block;
463      this->block = saved_block;
464      return ret;
465   }
466
467   virtual void visit(ir_if *ir)
468   {
469      if(this->loop.nesting_depth == 0 && ir->get_next()->is_tail_sentinel())
470         this->loop.in_if_at_the_end_of_the_loop = true;
471
472      ++this->function.nesting_depth;
473      ++this->loop.nesting_depth;
474
475      block_record block_records[2];
476      ir_jump* jumps[2];
477
478      /* Recursively lower nested jumps.  This satisfies the
479       * CONTAINED_JUMPS_LOWERED postcondition, except in the case of
480       * unconditional jumps at the end of ir->then_instructions and
481       * ir->else_instructions, which are handled below.
482       */
483      block_records[0] = visit_block(&ir->then_instructions);
484      block_records[1] = visit_block(&ir->else_instructions);
485
486retry: /* we get here if we put code after the if inside a branch */
487
488      /* Determine which of ir->then_instructions and
489       * ir->else_instructions end with an unconditional jump.
490       */
491      for(unsigned i = 0; i < 2; ++i) {
492         exec_list& list = i ? ir->else_instructions : ir->then_instructions;
493         jumps[i] = 0;
494         if(!list.is_empty() && get_jump_strength((ir_instruction*)list.get_tail()))
495            jumps[i] = (ir_jump*)list.get_tail();
496      }
497
498      /* Loop until we have satisfied the CONTAINED_JUMPS_LOWERED
499       * postcondition by lowering jumps in both then_instructions and
500       * else_instructions.
501       */
502      for(;;) {
503         /* Determine the types of the jumps that terminate
504          * ir->then_instructions and ir->else_instructions.
505          */
506         jump_strength jump_strengths[2];
507
508         for(unsigned i = 0; i < 2; ++i) {
509            if(jumps[i]) {
510               jump_strengths[i] = block_records[i].min_strength;
511               assert(jump_strengths[i] == get_jump_strength(jumps[i]));
512            } else
513               jump_strengths[i] = strength_none;
514         }
515
516         /* If both code paths end in a jump, and the jumps are the
517          * same, and we are pulling out jumps, replace them with a
518          * single jump that comes after the if instruction.  The new
519          * jump will be visited next, and it will be lowered if
520          * necessary by the loop or conditional that encloses it.
521          */
522         if(pull_out_jumps && jump_strengths[0] == jump_strengths[1]) {
523            bool unify = true;
524            if(jump_strengths[0] == strength_continue)
525               ir->insert_after(new(ir) ir_loop_jump(ir_loop_jump::jump_continue));
526            else if(jump_strengths[0] == strength_break)
527               ir->insert_after(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
528            /* FINISHME: unify returns with identical expressions */
529            else if(jump_strengths[0] == strength_return && this->function.signature->return_type->is_void())
530               ir->insert_after(new(ir) ir_return(NULL));
531	    else
532	       unify = false;
533
534            if(unify) {
535               jumps[0]->remove();
536               jumps[1]->remove();
537               this->progress = true;
538
539               /* Update jumps[] to reflect the fact that the jumps
540                * are gone, and update block_records[] to reflect the
541                * fact that control can now flow to the next
542                * instruction.
543                */
544               jumps[0] = 0;
545               jumps[1] = 0;
546               block_records[0].min_strength = strength_none;
547               block_records[1].min_strength = strength_none;
548
549               /* The CONTAINED_JUMPS_LOWERED postcondition is now
550                * satisfied, so we can break out of the loop.
551                */
552               break;
553            }
554         }
555
556         /* lower a jump: if both need to lowered, start with the strongest one, so that
557          * we might later unify the lowered version with the other one
558          */
559         bool should_lower[2];
560         for(unsigned i = 0; i < 2; ++i)
561            should_lower[i] = should_lower_jump(jumps[i]);
562
563         int lower;
564         if(should_lower[1] && should_lower[0])
565            lower = jump_strengths[1] > jump_strengths[0];
566         else if(should_lower[0])
567            lower = 0;
568         else if(should_lower[1])
569            lower = 1;
570         else
571            /* Neither code path ends in a jump that needs to be
572             * lowered, so the CONTAINED_JUMPS_LOWERED postcondition
573             * is satisfied and we can break out of the loop.
574             */
575            break;
576
577         if(jump_strengths[lower] == strength_return) {
578            /* To lower a return, we create a return flag (if the
579             * function doesn't have one already) and add instructions
580             * that: 1. store the return value (if this function has a
581             * non-void return) and 2. set the return flag
582             */
583            insert_lowered_return((ir_return*)jumps[lower]);
584            if(this->loop.loop) {
585               /* If we are in a loop, replace the return instruction
586                * with a break instruction, and then loop so that the
587                * break instruction can be lowered if necessary.
588                */
589               ir_loop_jump* lowered = 0;
590               lowered = new(ir) ir_loop_jump(ir_loop_jump::jump_break);
591               /* Note: we must update block_records and jumps to
592                * reflect the fact that the control path has been
593                * altered from a return to a break.
594                */
595               block_records[lower].min_strength = strength_break;
596               jumps[lower]->replace_with(lowered);
597               jumps[lower] = lowered;
598            } else {
599               /* If we are not in a loop, we then proceed as we would
600                * for a continue statement (set the execute flag to
601                * false to prevent the rest of the function from
602                * executing).
603                */
604               goto lower_continue;
605            }
606            this->progress = true;
607         } else if(jump_strengths[lower] == strength_break) {
608            /* To lower a break, we create a break flag (if the loop
609             * doesn't have one already) and add an instruction that
610             * sets it.
611             *
612             * Then we proceed as we would for a continue statement
613             * (set the execute flag to false to prevent the rest of
614             * the loop body from executing).
615             *
616             * The visit() function for the loop will ensure that the
617             * break flag is checked after executing the loop body.
618             */
619            jumps[lower]->insert_before(new(ir) ir_assignment(new (ir) ir_dereference_variable(this->loop.get_break_flag()), new (ir) ir_constant(true), 0));
620            goto lower_continue;
621         } else if(jump_strengths[lower] == strength_continue) {
622lower_continue:
623            /* To lower a continue, we create an execute flag (if the
624             * loop doesn't have one already) and replace the continue
625             * with an instruction that clears it.
626             *
627             * Note that this code path gets exercised when lowering
628             * return statements that are not inside a loop, so
629             * this->loop must be initialized even outside of loops.
630             */
631            ir_variable* execute_flag = this->loop.get_execute_flag();
632            jumps[lower]->replace_with(new(ir) ir_assignment(new (ir) ir_dereference_variable(execute_flag), new (ir) ir_constant(false), 0));
633            /* Note: we must update block_records and jumps to reflect
634             * the fact that the control path has been altered to an
635             * instruction that clears the execute flag.
636             */
637            jumps[lower] = 0;
638            block_records[lower].min_strength = strength_always_clears_execute_flag;
639            block_records[lower].may_clear_execute_flag = true;
640            this->progress = true;
641
642            /* Let the loop run again, in case the other branch of the
643             * if needs to be lowered too.
644             */
645         }
646      }
647
648      /* move out a jump out if possible */
649      if(pull_out_jumps) {
650         /* If one of the branches ends in a jump, and control cannot
651          * fall out the bottom of the other branch, then we can move
652          * the jump after the if.
653          *
654          * Set move_out to the branch we are moving a jump out of.
655          */
656         int move_out = -1;
657         if(jumps[0] && block_records[1].min_strength >= strength_continue)
658            move_out = 0;
659         else if(jumps[1] && block_records[0].min_strength >= strength_continue)
660            move_out = 1;
661
662         if(move_out >= 0)
663         {
664            jumps[move_out]->remove();
665            ir->insert_after(jumps[move_out]);
666            /* Note: we must update block_records and jumps to reflect
667             * the fact that the jump has been moved out of the if.
668             */
669            jumps[move_out] = 0;
670            block_records[move_out].min_strength = strength_none;
671            this->progress = true;
672         }
673      }
674
675      /* Now satisfy the ANALYSIS postcondition by setting
676       * this->block.min_strength and
677       * this->block.may_clear_execute_flag based on the
678       * characteristics of the two branches.
679       */
680      if(block_records[0].min_strength < block_records[1].min_strength)
681         this->block.min_strength = block_records[0].min_strength;
682      else
683         this->block.min_strength = block_records[1].min_strength;
684      this->block.may_clear_execute_flag = this->block.may_clear_execute_flag || block_records[0].may_clear_execute_flag || block_records[1].may_clear_execute_flag;
685
686      /* Now we need to clean up the instructions that follow the
687       * if.
688       *
689       * If those instructions are unreachable, then satisfy the
690       * DEAD_CODE_ELIMINATION postcondition by eliminating them.
691       * Otherwise that postcondition is already satisfied.
692       */
693      if(this->block.min_strength)
694         truncate_after_instruction(ir);
695      else if(this->block.may_clear_execute_flag)
696      {
697         /* If the "if" instruction might clear the execute flag, then
698          * we need to guard any instructions that follow so that they
699          * are only executed if the execute flag is set.
700          *
701          * If one of the branches of the "if" always clears the
702          * execute flag, and the other branch never clears it, then
703          * this is easy: just move all the instructions following the
704          * "if" into the branch that never clears it.
705          */
706         int move_into = -1;
707         if(block_records[0].min_strength && !block_records[1].may_clear_execute_flag)
708            move_into = 1;
709         else if(block_records[1].min_strength && !block_records[0].may_clear_execute_flag)
710            move_into = 0;
711
712         if(move_into >= 0) {
713            assert(!block_records[move_into].min_strength && !block_records[move_into].may_clear_execute_flag); /* otherwise, we just truncated */
714
715            exec_list* list = move_into ? &ir->else_instructions : &ir->then_instructions;
716            exec_node* next = ir->get_next();
717            if(!next->is_tail_sentinel()) {
718               move_outer_block_inside(ir, list);
719
720               /* If any instructions moved, then we need to visit
721                * them (since they are now inside the "if").  Since
722                * block_records[move_into] is in its default state
723                * (see assertion above), we can safely replace
724                * block_records[move_into] with the result of this
725                * analysis.
726                */
727               exec_list list;
728               list.head = next;
729               block_records[move_into] = visit_block(&list);
730
731               /*
732                * Then we need to re-start our jump lowering, since one
733                * of the instructions we moved might be a jump that
734                * needs to be lowered.
735                */
736               this->progress = true;
737               goto retry;
738            }
739         } else {
740            /* If we get here, then the simple case didn't apply; we
741             * need to actually guard the instructions that follow.
742             *
743             * To avoid creating unnecessarily-deep nesting, first
744             * look through the instructions that follow and unwrap
745             * any instructions that that are already wrapped in the
746             * appropriate guard.
747             */
748            ir_instruction* ir_after;
749            for(ir_after = (ir_instruction*)ir->get_next(); !ir_after->is_tail_sentinel();)
750            {
751               ir_if* ir_if = ir_after->as_if();
752               if(ir_if && ir_if->else_instructions.is_empty()) {
753                  ir_dereference_variable* ir_if_cond_deref = ir_if->condition->as_dereference_variable();
754                  if(ir_if_cond_deref && ir_if_cond_deref->var == this->loop.execute_flag) {
755                     ir_instruction* ir_next = (ir_instruction*)ir_after->get_next();
756                     ir_after->insert_before(&ir_if->then_instructions);
757                     ir_after->remove();
758                     ir_after = ir_next;
759                     continue;
760                  }
761               }
762               ir_after = (ir_instruction*)ir_after->get_next();
763
764               /* only set this if we find any unprotected instruction */
765               this->progress = true;
766            }
767
768            /* Then, wrap all the instructions that follow in a single
769             * guard.
770             */
771            if(!ir->get_next()->is_tail_sentinel()) {
772               assert(this->loop.execute_flag);
773               ir_if* if_execute = new(ir) ir_if(new(ir) ir_dereference_variable(this->loop.execute_flag));
774               move_outer_block_inside(ir, &if_execute->then_instructions);
775               ir->insert_after(if_execute);
776            }
777         }
778      }
779      --this->loop.nesting_depth;
780      --this->function.nesting_depth;
781   }
782
783   virtual void visit(ir_loop *ir)
784   {
785      /* Visit the body of the loop, with a fresh data structure in
786       * this->loop so that the analysis we do here won't bleed into
787       * enclosing loops.
788       *
789       * We assume that all code after a loop is reachable from the
790       * loop (see comments on enum jump_strength), so the
791       * DEAD_CODE_ELIMINATION postcondition is automatically
792       * satisfied, as is the block.min_strength portion of the
793       * ANALYSIS postcondition.
794       *
795       * The block.may_clear_execute_flag portion of the ANALYSIS
796       * postcondition is automatically satisfied because execute
797       * flags do not propagate outside of loops.
798       *
799       * The loop.may_set_return_flag portion of the ANALYSIS
800       * postcondition is handled below.
801       */
802      ++this->function.nesting_depth;
803      loop_record saved_loop = this->loop;
804      this->loop = loop_record(this->function.signature, ir);
805
806      /* Recursively lower nested jumps.  This satisfies the
807       * CONTAINED_JUMPS_LOWERED postcondition, except in the case of
808       * an unconditional continue or return at the bottom of the
809       * loop, which are handled below.
810       */
811      block_record body = visit_block(&ir->body_instructions);
812
813      /* If the loop ends in an unconditional continue, eliminate it
814       * because it is redundant.
815       */
816      ir_instruction *ir_last
817         = (ir_instruction *) ir->body_instructions.get_tail();
818      if (get_jump_strength(ir_last) == strength_continue) {
819         ir_last->remove();
820      }
821
822      /* If the loop ends in an unconditional return, and we are
823       * lowering returns, lower it.
824       */
825      if (this->function.lower_return)
826         lower_return_unconditionally(ir_last);
827
828      if(body.min_strength >= strength_break) {
829         /* FINISHME: If the min_strength of the loop body is
830          * strength_break or strength_return, that means that it
831          * isn't a loop at all, since control flow always leaves the
832          * body of the loop via break or return.  In principle the
833          * loop could be eliminated in this case.  This optimization
834          * is not implemented yet.
835          */
836      }
837
838      if(this->loop.break_flag) {
839         /* If a break flag was generated while visiting the body of
840          * the loop, then at least one break was lowered, so we need
841          * to generate an if statement at the end of the loop that
842          * does a "break" if the break flag is set.  The break we
843          * generate won't violate the CONTAINED_JUMPS_LOWERED
844          * postcondition, because should_lower_jump() always returns
845          * false for a break that happens at the end of a loop.
846          */
847         ir_if* break_if = new(ir) ir_if(new(ir) ir_dereference_variable(this->loop.break_flag));
848         break_if->then_instructions.push_tail(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
849         ir->body_instructions.push_tail(break_if);
850      }
851
852      /* If the body of the loop may set the return flag, then at
853       * least one return was lowered to a break, so we need to ensure
854       * that the return flag is checked after the body of the loop is
855       * executed.
856       */
857      if(this->loop.may_set_return_flag) {
858         assert(this->function.return_flag);
859         /* Generate the if statement to check the return flag */
860         ir_if* return_if = new(ir) ir_if(new(ir) ir_dereference_variable(this->function.return_flag));
861         /* Note: we also need to propagate the knowledge that the
862          * return flag may get set to the outer context.  This
863          * satisfies the loop.may_set_return_flag part of the
864          * ANALYSIS postcondition.
865          */
866         saved_loop.may_set_return_flag = true;
867         if(saved_loop.loop)
868            /* If this loop is nested inside another one, then the if
869             * statement that we generated should break out of that
870             * loop if the return flag is set.  Caller will lower that
871             * break statement if necessary.
872             */
873            return_if->then_instructions.push_tail(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
874         else
875            /* Otherwise, all we need to do is ensure that the
876             * instructions that follow are only executed if the
877             * return flag is clear.  We can do that by moving those
878             * instructions into the else clause of the generated if
879             * statement.
880             */
881            move_outer_block_inside(ir, &return_if->else_instructions);
882         ir->insert_after(return_if);
883      }
884
885      this->loop = saved_loop;
886      --this->function.nesting_depth;
887   }
888
889   virtual void visit(ir_function_signature *ir)
890   {
891      /* these are not strictly necessary */
892      assert(!this->function.signature);
893      assert(!this->loop.loop);
894
895      bool lower_return;
896      if (strcmp(ir->function_name(), "main") == 0)
897         lower_return = lower_main_return;
898      else
899         lower_return = lower_sub_return;
900
901      function_record saved_function = this->function;
902      loop_record saved_loop = this->loop;
903      this->function = function_record(ir, lower_return);
904      this->loop = loop_record(ir);
905
906      assert(!this->loop.loop);
907
908      /* Visit the body of the function to lower any jumps that occur
909       * in it, except possibly an unconditional return statement at
910       * the end of it.
911       */
912      visit_block(&ir->body);
913
914      /* If the body ended in an unconditional return of non-void,
915       * then we don't need to lower it because it's the one canonical
916       * return.
917       *
918       * If the body ended in a return of void, eliminate it because
919       * it is redundant.
920       */
921      if (ir->return_type->is_void() &&
922          get_jump_strength((ir_instruction *) ir->body.get_tail())) {
923         ir_jump *jump = (ir_jump *) ir->body.get_tail();
924         assert (jump->ir_type == ir_type_return);
925         jump->remove();
926      }
927
928      if(this->function.return_value)
929         ir->body.push_tail(new(ir) ir_return(new (ir) ir_dereference_variable(this->function.return_value)));
930
931      this->loop = saved_loop;
932      this->function = saved_function;
933   }
934
935   virtual void visit(class ir_function * ir)
936   {
937      visit_block(&ir->signatures);
938   }
939};
940
941bool
942do_lower_jumps(exec_list *instructions, bool pull_out_jumps, bool lower_sub_return, bool lower_main_return, bool lower_continue, bool lower_break)
943{
944   ir_lower_jumps_visitor v;
945   v.pull_out_jumps = pull_out_jumps;
946   v.lower_continue = lower_continue;
947   v.lower_break = lower_break;
948   v.lower_sub_return = lower_sub_return;
949   v.lower_main_return = lower_main_return;
950
951   do {
952      v.progress = false;
953      visit_exec_list(instructions, &v);
954   } while (v.progress);
955
956   return v.progress;
957}
958