Lines Matching defs:loop

52 // Simple loop header.  Fall in path on left, loop-back path on right.
110 // last in the loop. The trip-counter have to stride by a constant;
111 // the exit value is also loop invariant.
114 // CountedLoopNode has the incoming loop control and the loop-back-control
117 // CountedLoopNode if there is control flow in the loop), the post-increment
122 // CountedLoopEndNode also takes in the loop-invariant limit value.
125 // loop-back control. From CountedLoopEndNodes I can reach CountedLoopNodes
130 // inputs the incoming loop-start control and the loop-back control, so they
132 // loop-invariant stride and the loop-invariant limit value. CountedLoopNodes
133 // produce a loop-body control and the trip counter value. Since
151 // Log2 of original loop bodies in unrolled loop
165 // Will be reset (lower) if the loop's trip count is known.
185 // A 'main' loop has a pre-loop and a post-loop. The 'main' loop
189 // A following 'post' loop will run any remaining iterations. Used
190 // during Range Check Elimination, the 'post' loop will do any final
192 // the 'post' loop will do any epilog iterations needed. Basically,
193 // a 'post' loop can not profitably be further unrolled or RCE'd.
195 // A preceding 'pre' loop will run at least 1 iteration (to do peeling),
197 // so the following main loop 'knows' that it is striding down cache
200 // A 'main' loop that is ONLY unrolled or peeled, never RCE'd or
201 // Aligned, may be missing it's pre-loop.
306 // Note, final_value should fit into integer since counted loop has
327 IdealLoopTree *_parent; // Parent in loop tree
328 IdealLoopTree *_next; // Next sibling in loop tree
329 IdealLoopTree *_child; // First child in loop tree
331 // The head-tail backedge defines the loop.
332 // If tail is NULL then this loop has multiple backedges as part of the
333 // same loop. During cleanup I'll peel off the multiple backedges; merge
334 // them at the loop bottom and flow 1 real backedge into the loop.
335 Node *_head; // Head of loop
336 Node *_tail; // Tail of loop
348 Node_List* _safepts; // List of safepoints in this loop
349 Node_List* _required_safept; // A inner loop cannot delete these safepts;
350 bool _allow_optimizations; // Allow loop optimizations
365 // Set loop nesting depth. Accumulate has_call bits.
368 // Split out multiple fall-in edges from the loop header. Move them to a
369 // private RegionNode before the loop. This becomes the loop landing pad.
372 // Split out the outermost loop from this shared header.
376 // Feed that region as the one backedge to this loop.
379 // Split shared headers and insert loop landing pads.
381 // Returns TRUE if loop tree is structurally changed.
384 // Perform optimization to use the loop predicates for null checks and range checks.
385 // Applies to any loop level (not just the innermost one)
390 // current round of loop opts should stop.
394 // if the current round of loop opts should stop.
398 // executed (call dominates loop tail). These loops do not need non-call
402 // Allpaths backwards scan from loop tail, terminating each path at first safepoint
409 // Check for Node being a loop-breaking test
415 // Remove simplistic dead code from loop body
418 // Look for loop-exit tests with my 50/50 guesses from the Parsing stage.
422 // Return TRUE or FALSE if the loop should never be RCE'd or aligned.
426 // Return TRUE or FALSE if the loop should be unswitched -- clone
427 // loop with an invariant test
433 // Convert one iteration loop into normal code.
436 // Return TRUE or FALSE if the loop should be peeled or not. Peel if we can
437 // make some loop-invariant test (usually a null-check) happen before the
438 // loop.
441 // Return TRUE or FALSE if the loop should be maximally unrolled. Stash any
442 // known trip count in the counted loop node.
445 // Return TRUE or FALSE if the loop should be unrolled or not. Unroll if
446 // the loop is a CountedLoop and the body is small enough.
449 // Return TRUE or FALSE if the loop should be range-check-eliminated.
454 // Return TRUE or FALSE if the loop should be cache-line aligned.
456 // one array base can be aligned in a loop (unless the VM guarantees
464 // Compute loop exact trip count if possible
467 // Compute loop trip count from profile data
481 // Put loop body on igvn work list
489 void dump_head( ) const; // Dump loop head only
490 void dump() const; // Dump this loop recursively
491 void verify_tree(IdealLoopTree *loop, const IdealLoopTree *parent) const;
498 // loop tree. Drives the loop-based transformations on the ideal graph.
505 // Head of loop tree
619 // Set control and update loop membership
668 // Check for loop being set
669 // "n" must be a control node. Returns true if "n" is known to be in a loop.
674 // Set loop
675 void set_loop( Node *n, IdealLoopTree *loop ) {
676 _nodes.map(n->_idx, (Node*)loop);
681 // replaces loop reference, since that is not needed for dead node.
703 // Place 'n' in some loop nest, where 'n' is a CFG node
706 // Insert loop into the existing loop tree. 'innermost' is a leaf of the
707 // loop tree, not the root.
708 IdealLoopTree *sort( IdealLoopTree *loop, IdealLoopTree *innermost );
710 // Place Data nodes in some loop nest
749 // Is safept not required by an outer loop?
753 void replace_parallel_iv(IdealLoopTree *loop);
765 // build the loop tree and perform any requested optimizations
796 // Build and verify the loop tree without modifying the graph. This
804 // True if the method has at least 1 irreducible loop
810 bool is_counted_loop( Node *x, IdealLoopTree *loop );
812 Node* exact_limit( IdealLoopTree *loop );
816 // Dead nodes have no loop, so return the top level loop instead
822 // Is 'n' a (nested) member of 'loop'?
823 int is_member( const IdealLoopTree *loop, Node *n ) const {
824 return loop->is_member(get_loop(n)); }
826 // This is the basic building block of the loop optimizations. It clones an
827 // entire loop body. It makes an old_new loop body mapping; with this
828 // mapping you can find the new-loop equivalent to an old-loop node. All
829 // new-loop nodes are exactly equal to their old-loop counterparts, all
830 // edges are the same. All exits from the old-loop now have a RegionNode
831 // that merges the equivalent new-loop path. This is true even for the
832 // normal "loop-exit" condition. All uses of loop-invariant old-loop values
833 // now come from (one or more) Phis that merge their new-loop equivalents.
836 // the clone loop to dominate the original. Used in construction of
837 // pre-main-post loop sequence.
841 void clone_loop( IdealLoopTree *loop, Node_List &old_new, int dom_depth,
845 // making a pre-loop which must execute at least once, we can remove
846 // all loop-invariant dominated tests in the main body.
847 void peeled_dom_test_elim( IdealLoopTree *loop, Node_List &old_new );
849 // Generate code to do a loop peel for the given loop (and body).
851 void do_peeling( IdealLoopTree *loop, Node_List &old_new );
853 // Add pre and post loops around the given loop. These loops are used
855 void insert_pre_post_loops( IdealLoopTree *loop, Node_List &old_new, bool peel_only );
860 // Take steps to maximally unroll the loop. Peel any odd iterations, then
861 // unroll to do double iterations. The next round of major loop transforms
862 // will repeat till the doubled loop body does all remaining iterations in 1
864 void do_maximally_unroll( IdealLoopTree *loop, Node_List &old_new );
866 // Unroll the loop body one step - make each trip do 2 iterations.
867 void do_unroll( IdealLoopTree *loop, Node_List &old_new, bool adjust_min_trip );
885 void register_control(Node* n, IdealLoopTree *loop, Node* pred);
887 // Clone loop predicates to cloned loops (peeled, unswitched)
906 BoolNode* rc_predicate(IdealLoopTree *loop, Node* ctrl,
911 // Implementation of the loop predication to promote checks outside the loop
912 bool loop_predication_impl(IdealLoopTree *loop);
915 void collect_potentially_useful_predicates(IdealLoopTree *loop, Unique_Node_List &predicate_opaque1);
928 // Eliminate range-checks and other trip-counter vs loop-invariant tests.
929 void do_range_check( IdealLoopTree *loop, Node_List &old_new );
931 // Create a slow version of the loop by cloning the loop
933 ProjNode* create_slow_version_of_loop(IdealLoopTree *loop,
936 // Clone loop with an invariant test (that does not exit) and
939 void do_unswitching (IdealLoopTree *loop, Node_List &old_new);
942 IfNode* find_unswitching_candidate(const IdealLoopTree *loop) const;
945 // Constrain the main loop iterations so the affine function:
948 // the pre-loop or the post-loop until the condition holds true in the main
949 // loop. Scale_con, offset and limit are all loop invariant.
954 // Partially peel loop up through last_peel node.
955 bool partial_peel( IdealLoopTree *loop, Node_List &old_new );
958 void scheduled_nodelist( IdealLoopTree *loop, VectorSet& ctrl, Node_List &sched );
961 // Has use internal to the vector set (ie. not in a phi at the loop head)
962 bool has_use_internal_to_set( Node* n, VectorSet& vset, IdealLoopTree *loop );
963 // clone "n" for uses that are outside of loop
964 int clone_for_use_outside_loop( IdealLoopTree *loop, Node* n, Node_List& worklist );
966 void clone_for_special_use_inside_loop( IdealLoopTree *loop, Node* n,
968 // Insert phi(lp_entry_val, back_edge_val) at use->in(idx) for loop lp if phi does not already exist
971 // Validate the loop partition sets: peel and not_peel
972 bool is_valid_loop_partition( IdealLoopTree *loop, VectorSet& peel, Node_List& peel_list, VectorSet& not_peel );
973 // Ensure that uses outside of loop are of the right form
974 bool is_valid_clone_loop_form( IdealLoopTree *loop, Node_List& peel_list,
976 bool is_valid_clone_loop_exit_use( IdealLoopTree *loop, Node* use, uint exit_idx);
982 // Return the (unique) control output node that's in the loop (if it exists.)
983 Node* stay_in_loop( Node* n, IdealLoopTree *loop);
984 // Insert a signed compare loop exit cloned from an unsigned compare.
985 IfNode* insert_cmpi_loop_exit(IfNode* if_cmpu, IdealLoopTree *loop);
986 void remove_cmpi_loop_exit(IfNode* if_cmp, IdealLoopTree *loop);
988 void register_node(Node* n, IdealLoopTree *loop, Node* pred, int ddepth);
999 // "Nearly" because all Nodes have been cloned from the original in the loop,
1002 BoolNode *clone_iff( PhiNode *phi, IdealLoopTree *loop );
1003 CmpNode *clone_bool( PhiNode *phi, IdealLoopTree *loop );
1006 // Rework addressing expressions to get the most loop-invariant stuff
1015 // Mostly prevent loop-fallout uses of the pre-incremented trip counter
1018 void reorg_offsets( IdealLoopTree *loop );
1071 void dump( IdealLoopTree *loop, uint rpo_idx, Node_List &rpo_list ) const;
1076 // Dead nodes have no loop, so return the top level loop instead
1098 // Iterate over the loop tree using a preorder, left-to-right traversal.
1116 void next(); // Advance to next loop tree