GCC Middle and Back End API Reference
tree-ssa-loop-im.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "tree.h"
#include "gimple.h"
#include "cfghooks.h"
#include "tree-pass.h"
#include "ssa.h"
#include "gimple-pretty-print.h"
#include "fold-const.h"
#include "cfganal.h"
#include "tree-eh.h"
#include "gimplify.h"
#include "gimple-iterator.h"
#include "tree-cfg.h"
#include "tree-ssa-loop-manip.h"
#include "tree-ssa-loop.h"
#include "tree-into-ssa.h"
#include "cfgloop.h"
#include "tree-affine.h"
#include "tree-ssa-propagate.h"
#include "trans-mem.h"
#include "gimple-fold.h"
#include "tree-scalar-evolution.h"
#include "tree-ssa-loop-niter.h"
#include "alias.h"
#include "builtins.h"
#include "tree-dfa.h"
#include "tree-ssa.h"
#include "dbgcnt.h"
#include "insn-codes.h"
#include "optabs-tree.h"
Include dependency graph for tree-ssa-loop-im.cc:

Data Structures

struct  lim_aux_data
 
struct  mem_ref_loc
 
class  im_mem_ref
 
struct  mem_ref_hasher
 
struct  fmt_data
 
class  rewrite_mem_ref_loc
 
class  first_mem_ref_loc_1
 
class  sm_set_flag_if_changed
 
struct  sm_aux
 
struct  seq_entry
 
class  ref_always_accessed
 
class  ref_in_loop_hot_body
 

Macros

#define LIM_EXPENSIVE   ((unsigned) param_lim_expensive)
 
#define ALWAYS_EXECUTED_IN(BB)   ((class loop *) (BB)->aux)
 
#define SET_ALWAYS_EXECUTED_IN(BB, VAL)   ((BB)->aux = (void *) (VAL))
 
#define UNANALYZABLE_MEM_ID   0
 
#define MEM_ANALYZABLE(REF)   ((REF)->id != UNANALYZABLE_MEM_ID)
 

Enumerations

enum  dep_kind { lim_raw , sm_war , sm_waw }
 
enum  dep_state { dep_unknown , dep_independent , dep_dependent }
 
enum  move_pos { MOVE_IMPOSSIBLE , MOVE_PRESERVE_EXECUTION , MOVE_POSSIBLE }
 
enum  sm_kind { sm_ord , sm_unord , sm_other }
 

Functions

static void record_loop_dependence (class loop *loop, im_mem_ref *ref, dep_kind kind, dep_state state)
 
static dep_state query_loop_dependence (class loop *loop, im_mem_ref *ref, dep_kind kind)
 
static bool ref_indep_loop_p (class loop *, im_mem_ref *, dep_kind)
 
static bool ref_always_accessed_p (class loop *, im_mem_ref *, bool)
 
static bool refs_independent_p (im_mem_ref *, im_mem_ref *, bool=true)
 
static struct lim_aux_datainit_lim_data (gimple *stmt)
 
static struct lim_aux_dataget_lim_data (gimple *stmt)
 
static void free_lim_aux_data (struct lim_aux_data *data)
 
static void clear_lim_data (gimple *stmt)
 
static enum move_pos movement_possibility_1 (gimple *stmt)
 
static enum move_pos movement_possibility (gimple *stmt)
 
bool bb_colder_than_loop_preheader (basic_block bb, class loop *loop)
 
static class loopget_coldest_out_loop (class loop *outermost_loop, class loop *loop, basic_block curr_bb)
 
static class loopoutermost_invariant_loop (tree def, class loop *loop)
 
static bool add_dependency (tree def, struct lim_aux_data *data, class loop *loop, bool add_cost)
 
static unsigned stmt_cost (gimple *stmt)
 
static class loopoutermost_indep_loop (class loop *outer, class loop *loop, im_mem_ref *ref)
 
static treesimple_mem_ref_in_stmt (gimple *stmt, bool *is_store)
 
static bool extract_true_false_args_from_phi (basic_block dom, gphi *phi, tree *true_arg_p, tree *false_arg_p)
 
static bool determine_max_movement (gimple *stmt, bool must_preserve_exec)
 
static void set_level (gimple *stmt, class loop *orig_loop, class loop *level)
 
static void set_profitable_level (gimple *stmt)
 
static bool nonpure_call_p (gimple *stmt)
 
static gimplerewrite_reciprocal (gimple_stmt_iterator *bsi)
 
static gimplerewrite_bittest (gimple_stmt_iterator *bsi)
 
static void compute_invariantness (basic_block bb)
 
unsigned int move_computations_worker (basic_block bb)
 
static bool may_move_till (tree ref, tree *index, void *data)
 
static void force_move_till_op (tree op, class loop *orig_loop, class loop *loop)
 
static bool force_move_till (tree ref, tree *index, void *data)
 
static void memref_free (class im_mem_ref *mem)
 
static im_mem_refmem_ref_alloc (ao_ref *mem, unsigned hash, unsigned id)
 
static void record_mem_ref_loc (im_mem_ref *ref, gimple *stmt, tree *loc)
 
static bool set_ref_stored_in_loop (im_mem_ref *ref, class loop *loop)
 
static void mark_ref_stored (im_mem_ref *ref, class loop *loop)
 
static bool set_ref_loaded_in_loop (im_mem_ref *ref, class loop *loop)
 
static void mark_ref_loaded (im_mem_ref *ref, class loop *loop)
 
static void gather_mem_refs_stmt (class loop *loop, gimple *stmt)
 
static int sort_bbs_in_loop_postorder_cmp (const void *bb1_, const void *bb2_, void *bb_loop_postorder_)
 
static int sort_locs_in_loop_postorder_cmp (const void *loc1_, const void *loc2_, void *bb_loop_postorder_)
 
static void analyze_memory_references (bool store_motion)
 
static bool mem_refs_may_alias_p (im_mem_ref *mem1, im_mem_ref *mem2, hash_map< tree, name_expansion * > **ttae_cache, bool tbaa_p)
 
static int find_ref_loc_in_loop_cmp (const void *loop_, const void *loc_, void *bb_loop_postorder_)
 
template<typename FN >
static bool for_all_locs_in_loop (class loop *loop, im_mem_ref *ref, FN fn)
 
static void rewrite_mem_refs (class loop *loop, im_mem_ref *ref, tree tmp_var)
 
static mem_ref_locfirst_mem_ref_loc (class loop *loop, im_mem_ref *ref)
 
static basic_block execute_sm_if_changed (edge ex, tree mem, tree tmp_var, tree flag, edge preheader, hash_set< basic_block > *flag_bbs, edge &append_cond_position, edge &last_cond_fallthru)
 
static tree execute_sm_if_changed_flag_set (class loop *loop, im_mem_ref *ref, hash_set< basic_block > *bbs)
 
static void execute_sm (class loop *loop, im_mem_ref *ref, hash_map< im_mem_ref *, sm_aux * > &aux_map, bool maybe_mt, bool use_other_flag_var)
 
static void execute_sm_exit (class loop *loop, edge ex, vec< seq_entry > &seq, hash_map< im_mem_ref *, sm_aux * > &aux_map, sm_kind kind, edge &append_cond_position, edge &last_cond_fallthru)
 
static bool sm_seq_push_down (vec< seq_entry > &seq, unsigned ptr, unsigned *at)
 
static int sm_seq_valid_bb (class loop *loop, basic_block bb, tree vdef, vec< seq_entry > &seq, bitmap refs_not_in_seq, bitmap refs_not_supported, bool forked, bitmap fully_visited)
 
static void hoist_memory_references (class loop *loop, bitmap mem_refs, const vec< edge > &exits)
 
static bool can_sm_ref_p (class loop *loop, im_mem_ref *ref)
 
static void find_refs_for_sm (class loop *loop, bitmap sm_executed, bitmap refs_to_sm)
 
static bool loop_suitable_for_sm (class loop *loop, const vec< edge > &exits)
 
static void store_motion_loop (class loop *loop, bitmap sm_executed)
 
static void do_store_motion (void)
 
static void fill_always_executed_in_1 (class loop *loop, sbitmap contains_call)
 
static void fill_always_executed_in (void)
 
void fill_coldest_and_hotter_out_loop (class loop *coldest_loop, class loop *hotter_loop, class loop *loop)
 
static void tree_ssa_lim_initialize (bool store_motion)
 
static void tree_ssa_lim_finalize (void)
 
unsigned int loop_invariant_motion_in_fun (function *fun, bool store_motion)
 
gimple_opt_passmake_pass_lim (gcc::context *ctxt)
 

Variables

static hash_map< gimple *, lim_aux_data * > * lim_aux_data_map
 
vec< class loop * > coldest_outermost_loop
 
vec< class loop * > hotter_than_inner_loop
 
struct { 
 
   hash_table< mem_ref_hasher > *   refs 
 
   vec< im_mem_ref * >   refs_list 
 
   vec< bitmap_head >   refs_loaded_in_loop 
 
   vec< bitmap_head >   refs_stored_in_loop 
 
   vec< bitmap_head >   all_refs_stored_in_loop 
 
   hash_map< tree, name_expansion * > *   ttae_cache 
 
memory_accesses 
 
static bitmap_obstack lim_bitmap_obstack
 
static obstack mem_ref_obstack
 
static unsignedbb_loop_postorder
 

Macro Definition Documentation

◆ ALWAYS_EXECUTED_IN

#define ALWAYS_EXECUTED_IN ( BB)    ((class loop *) (BB)->aux)
The outermost loop for which execution of the header guarantees that the
block will be executed.   

Referenced by compute_invariantness(), determine_max_movement(), fill_always_executed_in_1(), and move_computations_worker().

◆ LIM_EXPENSIVE

#define LIM_EXPENSIVE   ((unsigned) param_lim_expensive)
Minimum cost of an expensive expression.   

Referenced by compute_invariantness(), determine_max_movement(), and stmt_cost().

◆ MEM_ANALYZABLE

#define MEM_ANALYZABLE ( REF)    ((REF)->id != UNANALYZABLE_MEM_ID)
Whether the reference was analyzable.   

Referenced by can_sm_ref_p(), and determine_max_movement().

◆ SET_ALWAYS_EXECUTED_IN

#define SET_ALWAYS_EXECUTED_IN ( BB,
VAL )   ((BB)->aux = (void *) (VAL))

◆ UNANALYZABLE_MEM_ID

#define UNANALYZABLE_MEM_ID   0
ID of the shared unanalyzable mem.   

Referenced by gather_mem_refs_stmt(), ref_indep_loop_p(), sm_seq_valid_bb(), and tree_ssa_lim_initialize().

Enumeration Type Documentation

◆ dep_kind

We use six bits per loop in the ref->dep_loop bitmap to record
the dep_kind x dep_state combinations.   
Enumerator
lim_raw 
sm_war 
sm_waw 

◆ dep_state

Enumerator
dep_unknown 
dep_independent 
dep_dependent 

◆ move_pos

The possibilities of statement movement.   
Enumerator
MOVE_IMPOSSIBLE 
MOVE_PRESERVE_EXECUTION 
MOVE_POSSIBLE 

◆ sm_kind

sm_ord is used for ordinary stores we can retain order with respect
    to other stores
sm_unord is used for conditional executed stores which need to be
    able to execute in arbitrary order with respect to other stores
sm_other is used for stores we do not try to apply store motion to.   
Enumerator
sm_ord 
sm_unord 
sm_other 

Function Documentation

◆ add_dependency()

static bool add_dependency ( tree def,
struct lim_aux_data * data,
class loop * loop,
bool add_cost )
static
DATA is a structure containing information associated with a statement
inside LOOP.  DEF is one of the operands of this statement.

Find the outermost loop enclosing LOOP in that value of DEF is invariant
and record this in DATA->max_loop field.  If DEF itself is defined inside
this loop as well (i.e. we need to hoist it out of the loop if we want
to hoist the statement represented by DATA), record the statement in that
DEF is defined to the DATA->depends list.  Additionally if ADD_COST is true,
add the cost of the computation of DEF to the DATA->cost.

If DEF is not invariant in LOOP, return false.  Otherwise return TRUE.   

References add_cost(), flow_loop_nested_p(), get_lim_data(), ggc_alloc(), gimple_bb(), lim_aux_data::max_loop, outermost_invariant_loop(), and SSA_NAME_DEF_STMT.

Referenced by determine_max_movement().

◆ analyze_memory_references()

◆ bb_colder_than_loop_preheader()

bool bb_colder_than_loop_preheader ( basic_block bb,
class loop * loop )
Compare the profile count inequality of bb and loop's preheader, it is
three-state as stated in profile-count.h, FALSE is returned if inequality
cannot be decided.   

References basic_block_def::count, gcc_assert, and loop_preheader_edge().

Referenced by fill_coldest_and_hotter_out_loop(), and get_coldest_out_loop().

◆ can_sm_ref_p()

◆ clear_lim_data()

static void clear_lim_data ( gimple * stmt)
static

◆ compute_invariantness()

◆ determine_max_movement()

static bool determine_max_movement ( gimple * stmt,
bool must_preserve_exec )
static
Determine the outermost loop to that it is possible to hoist a statement
STMT and store it to LIM_DATA (STMT)->max_loop.  To do this we determine
the outermost loop in that the value computed by STMT is invariant.
If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
we preserve the fact whether STMT is executed.  It also fills other related
information to LIM_DATA (STMT).

The function returns false if STMT cannot be hoisted outside of the loop it
is defined in, and true otherwise.   

References add_dependency(), ALWAYS_EXECUTED_IN, CDI_DOMINATORS, ECF_RETURNS_TWICE, extract_true_false_args_from_phi(), FOR_EACH_PHI_ARG, FOR_EACH_SSA_TREE_OPERAND, get_coldest_out_loop(), get_immediate_dominator(), get_lim_data(), ggc_alloc(), gimple_bb(), gimple_call_flags(), gimple_cond_code(), gimple_cond_lhs(), gimple_phi_num_args(), gimple_vuse(), gsi_end_p(), gsi_last_bb(), gsi_stmt(), LIM_EXPENSIVE, basic_block_def::loop_father, MEM_ANALYZABLE, memory_accesses, MIN, NULL, optab_vector, outermost_indep_loop(), lim_aux_data::ref, SSA_NAME_DEF_STMT, SSA_OP_USE, stmt_cost(), superloop_at_depth(), target_supports_op_p(), TREE_CODE, TREE_TYPE, UINT_MAX, USE_FROM_PTR, and VECTOR_TYPE_P.

Referenced by compute_invariantness().

◆ do_store_motion()

static void do_store_motion ( void )
static
Try to perform store motion for all memory references modified inside
loops.   

References BITMAP_ALLOC, BITMAP_FREE, current_loops, ggc_alloc(), loop::inner, lim_bitmap_obstack, loop::next, NULL, and store_motion_loop().

Referenced by loop_invariant_motion_in_fun().

◆ execute_sm()

static void execute_sm ( class loop * loop,
im_mem_ref * ref,
hash_map< im_mem_ref *, sm_aux * > & aux_map,
bool maybe_mt,
bool use_other_flag_var )
static

◆ execute_sm_exit()

◆ execute_sm_if_changed()

static basic_block execute_sm_if_changed ( edge ex,
tree mem,
tree tmp_var,
tree flag,
edge preheader,
hash_set< basic_block > * flag_bbs,
edge & append_cond_position,
edge & last_cond_fallthru )
static
Helper function for execute_sm.  Emit code to store TMP_VAR into
  MEM along edge EX.

  The store is only done if MEM has changed.  We do this so no
  changes to MEM occur on code paths that did not originally store
  into it.

  The common case for execute_sm will transform:

    for (...) {
      if (foo)
        stuff;
      else
        MEM = TMP_VAR;
    }

  into:

    lsm = MEM;
    for (...) {
      if (foo)
        stuff;
      else
        lsm = TMP_VAR;
    }
    MEM = lsm;

 This function will generate:

    lsm = MEM;

    lsm_flag = false;
    ...
    for (...) {
      if (foo)
        stuff;
      else {
        lsm = TMP_VAR;
        lsm_flag = true;
      }
    }
    if (lsm_flag)       <--
      MEM = lsm;        <-- (X)

 In case MEM and TMP_VAR are NULL the function will return the then
 block so the caller can insert (X) and other related stmts. 

References add_bb_to_loop(), add_phi_arg(), profile_probability::always(), profile_count::apply_probability(), profile_probability::apply_scale(), hash_set< KeyId, Lazy, Traits >::begin(), boolean_false_node, CDI_DOMINATORS, basic_block_def::count, create_empty_bb(), dominated_by_p(), hash_set< KeyId, Lazy, Traits >::end(), find_edge(), basic_block_def::flags, ggc_alloc(), gimple_build_assign(), gimple_build_cond(), gimple_phi_arg_def(), gimple_phi_arg_edge(), gimple_phi_num_args(), gimple_phi_result(), GSI_CONTINUE_LINKING, gsi_end_p(), gsi_insert_after(), gsi_next(), gsi_start_bb(), gsi_start_phis(), profile_probability::guessed_always(), i, basic_block_def::loop_father, make_edge(), make_single_succ_edge(), NULL_TREE, profile_count::probability_in(), recompute_dominator(), redirect_edge_succ(), set_immediate_dominator(), single_pred_p(), single_succ_edge(), split_block_after_labels(), split_edge(), profile_probability::uninitialized(), UNKNOWN_LOCATION, unshare_expr(), update_stmt(), virtual_operand_p(), and profile_count::zero().

Referenced by execute_sm_exit(), and hoist_memory_references().

◆ execute_sm_if_changed_flag_set()

static tree execute_sm_if_changed_flag_set ( class loop * loop,
im_mem_ref * ref,
hash_set< basic_block > * bbs )
static
Helper function for execute_sm.  On every location where REF is
set, set an appropriate flag indicating the store.   

References boolean_type_node, create_tmp_reg(), for_all_locs_in_loop(), get_lsm_tmp_name(), im_mem_ref::mem, and ao_ref::ref.

Referenced by execute_sm().

◆ extract_true_false_args_from_phi()

static bool extract_true_false_args_from_phi ( basic_block dom,
gphi * phi,
tree * true_arg_p,
tree * false_arg_p )
static
From a controlling predicate in DOM determine the arguments from
the PHI node PHI that are chosen if the predicate evaluates to
true and false and store them to *TRUE_ARG_P and *FALSE_ARG_P if
they are non-NULL.  Returns true if the arguments can be determined,
else return false.   

References extract_true_false_controlled_edges(), ggc_alloc(), gimple_bb(), and PHI_ARG_DEF.

Referenced by determine_max_movement(), and move_computations_worker().

◆ fill_always_executed_in()

static void fill_always_executed_in ( void )
static
Fills ALWAYS_EXECUTED_IN information for basic blocks, i.e.
for each such basic block bb records the outermost loop for that execution
of its header implies execution of bb.   

References bitmap_clear(), bitmap_set_bit, cfun, current_loops, fill_always_executed_in_1(), FOR_EACH_BB_FN, ggc_alloc(), gsi_end_p(), gsi_next(), gsi_start_bb(), gsi_stmt(), basic_block_def::index, loop::inner, last_basic_block_for_fn, loop::next, and nonpure_call_p().

Referenced by loop_invariant_motion_in_fun().

◆ fill_always_executed_in_1()

static void fill_always_executed_in_1 ( class loop * loop,
sbitmap contains_call )
static
Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
for each such basic block bb records the outermost loop for that execution
of its header implies execution of bb.  CONTAINS_CALL is the bitmap of
blocks that contain a nonpure call.   

References ALWAYS_EXECUTED_IN, bitmap_bit_p, CDI_DOMINATORS, dominated_by_p(), dump_enabled_p(), dump_printf(), fill_always_executed_in_1(), finite_loop_p(), first_dom_son(), basic_block_def::flags, flow_bb_inside_loop_p(), FOR_EACH_EDGE, get_immediate_dominator(), ggc_alloc(), loop::header, basic_block_def::index, loop::inner, last, loop::latch, basic_block_def::loop_father, MSG_NOTE, loop::next, next_dom_son(), NULL, loop::num, loop::num_nodes, SET_ALWAYS_EXECUTED_IN, basic_block_def::succs, and worklist.

Referenced by fill_always_executed_in(), and fill_always_executed_in_1().

◆ fill_coldest_and_hotter_out_loop()

void fill_coldest_and_hotter_out_loop ( class loop * coldest_loop,
class loop * hotter_loop,
class loop * loop )

◆ find_ref_loc_in_loop_cmp()

static int find_ref_loc_in_loop_cmp ( const void * loop_,
const void * loc_,
void * bb_loop_postorder_ )
static
Compare function for bsearch searching for reference locations
in a loop.   

References bb_loop_postorder, flow_loop_nested_p(), ggc_alloc(), gimple_bb(), basic_block_def::loop_father, loop::num, and mem_ref_loc::stmt.

Referenced by for_all_locs_in_loop().

◆ find_refs_for_sm()

static void find_refs_for_sm ( class loop * loop,
bitmap sm_executed,
bitmap refs_to_sm )
static
Marks the references in LOOP for that store motion should be performed
in REFS_TO_SM.  SM_EXECUTED is the set of references for that store
motion was performed in one of the outer loops.   

References bitmap_set_bit, can_sm_ref_p(), dbg_cnt(), EXECUTE_IF_AND_COMPL_IN_BITMAP, ggc_alloc(), i, memory_accesses, loop::num, and refs.

Referenced by store_motion_loop().

◆ first_mem_ref_loc()

static mem_ref_loc * first_mem_ref_loc ( class loop * loop,
im_mem_ref * ref )
static
Returns the first reference location to REF in LOOP.   

References for_all_locs_in_loop(), and NULL.

Referenced by execute_sm().

◆ for_all_locs_in_loop()

template<typename FN >
static bool for_all_locs_in_loop ( class loop * loop,
im_mem_ref * ref,
FN fn )
static
Iterates over all locations of REF in LOOP and its subloops calling
fn.operator() with the location as argument.  When that operator
returns true the iteration is stopped and true is returned.
Otherwise false is returned.   

References im_mem_ref::accesses_in_loop, bb_loop_postorder, find_ref_loc_in_loop_cmp(), flow_bb_inside_loop_p(), gimple_bb(), i, and mem_ref_loc::stmt.

Referenced by can_sm_ref_p(), execute_sm_if_changed_flag_set(), first_mem_ref_loc(), ref_always_accessed_p(), and rewrite_mem_refs().

◆ force_move_till()

static bool force_move_till ( tree ref,
tree * index,
void * data )
static

◆ force_move_till_op()

static void force_move_till_op ( tree op,
class loop * orig_loop,
class loop * loop )
static
If OP is SSA NAME, force the statement that defines it to be
moved out of the LOOP.  ORIG_LOOP is the loop in that EXPR is used.   

References gcc_assert, gimple_nop_p(), is_gimple_min_invariant(), set_level(), SSA_NAME_DEF_STMT, and TREE_CODE.

Referenced by force_move_till().

◆ free_lim_aux_data()

static void free_lim_aux_data ( struct lim_aux_data * data)
static
Releases the memory occupied by DATA.   

References free().

Referenced by clear_lim_data().

◆ gather_mem_refs_stmt()

◆ get_coldest_out_loop()

static class loop * get_coldest_out_loop ( class loop * outermost_loop,
class loop * loop,
basic_block curr_bb )
static
Check coldest loop between OUTERMOST_LOOP and LOOP by comparing profile
 count.
It does three steps check:
1) Check whether CURR_BB is cold in it's own loop_father, if it is cold, just
return NULL which means it should not be moved out at all;
2) CURR_BB is NOT cold, check if pre-computed COLDEST_LOOP is outside of
OUTERMOST_LOOP, if it is inside of OUTERMOST_LOOP, return the COLDEST_LOOP;
3) If COLDEST_LOOP is outside of OUTERMOST_LOOP, check whether there is a
hotter loop between OUTERMOST_LOOP and loop in pre-computed
HOTTER_THAN_INNER_LOOP, return it's nested inner loop, otherwise return
OUTERMOST_LOOP.
At last, the coldest_loop is inside of OUTERMOST_LOOP, just return it as
the hoist target.   

References bb_colder_than_loop_preheader(), coldest_outermost_loop, curr_bb, flow_loop_nested_p(), gcc_assert, ggc_alloc(), hotter_than_inner_loop, loop_depth(), NULL, and loop::num.

Referenced by determine_max_movement(), and ref_in_loop_hot_body::operator()().

◆ get_lim_data()

◆ hoist_memory_references()

◆ init_lim_data()

static struct lim_aux_data * init_lim_data ( gimple * stmt)
static

◆ loop_invariant_motion_in_fun()

◆ loop_suitable_for_sm()

static bool loop_suitable_for_sm ( class loop * loop,
const vec< edge > & exits )
static
Checks whether LOOP (with exits stored in EXITS array) is suitable
for a store motion optimization (i.e. whether we can insert statement
on its exits).   

References loop::exits, FOR_EACH_VEC_ELT, ggc_alloc(), and i.

Referenced by store_motion_loop().

◆ make_pass_lim()

gimple_opt_pass * make_pass_lim ( gcc::context * ctxt)

References ggc_alloc().

◆ mark_ref_loaded()

static void mark_ref_loaded ( im_mem_ref * ref,
class loop * loop )
static
Marks reference REF as loaded in LOOP.   

References current_loops, loop_outer(), and set_ref_loaded_in_loop().

Referenced by gather_mem_refs_stmt().

◆ mark_ref_stored()

static void mark_ref_stored ( im_mem_ref * ref,
class loop * loop )
static
Marks reference REF as stored in LOOP.   

References current_loops, loop_outer(), and set_ref_stored_in_loop().

Referenced by gather_mem_refs_stmt().

◆ may_move_till()

static bool may_move_till ( tree ref,
tree * index,
void * data )
static
Checks whether the statement defining variable *INDEX can be hoisted
out of the loop passed in DATA.  Callback for for_each_index.   

References ggc_alloc(), outermost_invariant_loop(), TREE_CODE, and TREE_OPERAND.

Referenced by can_sm_ref_p().

◆ mem_ref_alloc()

◆ mem_refs_may_alias_p()

static bool mem_refs_may_alias_p ( im_mem_ref * mem1,
im_mem_ref * mem2,
hash_map< tree, name_expansion * > ** ttae_cache,
bool tbaa_p )
static
Returns true if MEM1 and MEM2 may alias.  TTAE_CACHE is used as a cache in
tree_to_aff_combination_expand.   

References aff_comb_cannot_overlap_p(), aff_combination_add(), aff_combination_expand(), aff_combination_scale(), error_mark_node, gcc_checking_assert, get_inner_reference_aff(), ggc_alloc(), refs_may_alias_p_1(), and ttae_cache.

Referenced by refs_independent_p().

◆ memref_free()

static void memref_free ( class im_mem_ref * mem)
static
A function to free the mem_ref object OBJ.   

References im_mem_ref::accesses_in_loop.

Referenced by tree_ssa_lim_finalize().

◆ move_computations_worker()

◆ movement_possibility()

◆ movement_possibility_1()

static enum move_pos movement_possibility_1 ( gimple * stmt)
static
If it is possible to hoist the statement STMT unconditionally,
returns MOVE_POSSIBLE.
If it is possible to hoist the statement STMT, but we must avoid making
it executed if it would not be executed in the original program (e.g.
because it may trap), return MOVE_PRESERVE_EXECUTION.
Otherwise return MOVE_IMPOSSIBLE.   

References cfun, DECL_P, dump_file, element_precision(), ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_assign_single_p(), gimple_call_lhs(), gimple_could_trap_p(), gimple_get_lhs(), gimple_has_side_effects(), gimple_has_volatile_ops(), gimple_in_transaction(), gimple_phi_num_args(), gimple_phi_result(), gimple_vdef(), is_gimple_assign(), is_gimple_call(), is_global_var(), wi::ltu_p(), MOVE_IMPOSSIBLE, MOVE_POSSIBLE, MOVE_PRESERVE_EXECUTION, NULL_TREE, print_generic_expr(), SSA_NAME_OCCURS_IN_ABNORMAL_PHI, stmt_could_throw_p(), stmt_ends_bb_p(), TDF_SLIM, wi::to_wide(), TREE_CODE, TREE_TYPE, and virtual_operand_p().

Referenced by movement_possibility().

◆ nonpure_call_p()

static bool nonpure_call_p ( gimple * stmt)
static
Returns true if STMT is a call that has side effects.   

References ggc_alloc(), and gimple_has_side_effects().

Referenced by compute_invariantness(), and fill_always_executed_in().

◆ outermost_indep_loop()

static class loop * outermost_indep_loop ( class loop * outer,
class loop * loop,
im_mem_ref * ref )
static
Finds the outermost loop between OUTER and LOOP in that the memory reference
REF is independent.  If REF is not independent in LOOP, NULL is returned
instead.   

References bitmap_bit_p, ggc_alloc(), lim_raw, loop_depth(), NULL, loop::num, ref_indep_loop_p(), im_mem_ref::stored, and superloop_at_depth().

Referenced by determine_max_movement().

◆ outermost_invariant_loop()

static class loop * outermost_invariant_loop ( tree def,
class loop * loop )
static
Suppose that operand DEF is used inside the LOOP.  Returns the outermost
loop to that we could move the expression using DEF if it did not have
other operands, i.e. the outermost loop enclosing LOOP in that the value
of DEF is invariant.   

References find_common_loop(), gcc_assert, get_lim_data(), ggc_alloc(), gimple_bb(), is_gimple_min_invariant(), loop_depth(), loop_outer(), lim_aux_data::max_loop, NULL, SSA_NAME_DEF_STMT, superloop_at_depth(), and TREE_CODE.

Referenced by add_dependency(), compute_invariantness(), may_move_till(), and rewrite_bittest().

◆ query_loop_dependence()

static dep_state query_loop_dependence ( class loop * loop,
im_mem_ref * ref,
dep_kind kind )
static
Query the loop dependence cache of REF for LOOP, KIND.   

References bitmap_bit_p, dep_dependent, dep_independent, im_mem_ref::dep_loop, dep_unknown, ggc_alloc(), and loop::num.

Referenced by ref_indep_loop_p().

◆ record_loop_dependence()

static void record_loop_dependence ( class loop * loop,
im_mem_ref * ref,
dep_kind kind,
dep_state state )
static
Populate the loop dependence cache of REF for LOOP, KIND with STATE.   

References bitmap_set_bit, dep_dependent, im_mem_ref::dep_loop, dep_unknown, gcc_assert, ggc_alloc(), and loop::num.

Referenced by ref_indep_loop_p().

◆ record_mem_ref_loc()

static void record_mem_ref_loc ( im_mem_ref * ref,
gimple * stmt,
tree * loc )
static
Records memory reference location *LOC in LOOP to the memory reference
description REF.  The reference occurs in statement STMT.   

References im_mem_ref::accesses_in_loop, and ggc_alloc().

Referenced by gather_mem_refs_stmt().

◆ ref_always_accessed_p()

static bool ref_always_accessed_p ( class loop * loop,
im_mem_ref * ref,
bool stored_p )
static
Returns true if REF is always accessed in LOOP.  If STORED_P is true
make sure REF is always stored to in LOOP.   

References for_all_locs_in_loop(), and lim_aux_data::ref.

Referenced by can_sm_ref_p(), and execute_sm().

◆ ref_indep_loop_p()

static bool ref_indep_loop_p ( class loop * loop,
im_mem_ref * ref,
dep_kind kind )
static
Returns true if REF is independent on all other accessess in LOOP.
KIND specifies the kind of dependence to consider.
  lim_raw assumes REF is not stored in LOOP and disambiguates RAW
          dependences so if true REF can be hoisted out of LOOP
  sm_war disambiguates a store REF against all other loads to see
         whether the store can be sunk across loads out of LOOP
  sm_waw disambiguates a store REF against all other stores to see
         whether the store can be sunk across stores out of LOOP.   

References bitmap_bit_p, dep_dependent, dep_independent, dep_unknown, dump_file, dump_flags, error_mark_node, EXECUTE_IF_SET_IN_BITMAP, ggc_alloc(), i, im_mem_ref::id, loop::inner, lim_raw, im_mem_ref::mem, memory_accesses, loop::next, loop::num, query_loop_dependence(), record_loop_dependence(), lim_aux_data::ref, ref_indep_loop_p(), ref_maybe_used_by_stmt_p(), refs_independent_p(), sm_war, sm_waw, stmt_may_clobber_ref_p_1(), TDF_DETAILS, true, and UNANALYZABLE_MEM_ID.

Referenced by can_sm_ref_p(), hoist_memory_references(), outermost_indep_loop(), and ref_indep_loop_p().

◆ refs_independent_p()

static bool refs_independent_p ( im_mem_ref * ref1,
im_mem_ref * ref2,
bool tbaa_p )
static
Returns true if REF1 and REF2 are independent.   

References dump_file, dump_flags, ggc_alloc(), mem_refs_may_alias_p(), memory_accesses, and TDF_DETAILS.

Referenced by ref_indep_loop_p(), and sm_seq_push_down().

◆ rewrite_bittest()

◆ rewrite_mem_refs()

static void rewrite_mem_refs ( class loop * loop,
im_mem_ref * ref,
tree tmp_var )
static
Rewrites all references to REF in LOOP by variable TMP_VAR.   

References for_all_locs_in_loop().

Referenced by execute_sm().

◆ rewrite_reciprocal()

◆ set_level()

static void set_level ( gimple * stmt,
class loop * orig_loop,
class loop * level )
static
Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
and that one of the operands of this statement is computed by STMT.
Ensure that STMT (together with all the statements that define its
operands) is hoisted at least out of the loop LEVEL.   

References find_common_loop(), flow_loop_nested_p(), FOR_EACH_VEC_ELT, gcc_assert, get_lim_data(), ggc_alloc(), gimple_bb(), i, basic_block_def::loop_father, loop_outer(), NULL, and set_level().

Referenced by force_move_till_op(), set_level(), and set_profitable_level().

◆ set_profitable_level()

static void set_profitable_level ( gimple * stmt)
static
Determines an outermost loop from that we want to hoist the statement STMT.
For now we chose the outermost possible loop.  TODO -- use profiling
information to set it more sanely.   

References get_lim_data(), gimple_bb(), lim_aux_data::max_loop, and set_level().

Referenced by compute_invariantness().

◆ set_ref_loaded_in_loop()

static bool set_ref_loaded_in_loop ( im_mem_ref * ref,
class loop * loop )
static
Set the LOOP bit in REF loaded bitmap and allocate that if
necessary.  Return whether a bit was changed.   

References BITMAP_ALLOC, bitmap_set_bit, lim_bitmap_obstack, im_mem_ref::loaded, and loop::num.

Referenced by mark_ref_loaded().

◆ set_ref_stored_in_loop()

static bool set_ref_stored_in_loop ( im_mem_ref * ref,
class loop * loop )
static
Set the LOOP bit in REF stored bitmap and allocate that if
necessary.  Return whether a bit was changed.   

References BITMAP_ALLOC, bitmap_set_bit, lim_bitmap_obstack, loop::num, and im_mem_ref::stored.

Referenced by mark_ref_stored().

◆ simple_mem_ref_in_stmt()

static tree * simple_mem_ref_in_stmt ( gimple * stmt,
bool * is_store )
static
If there is a simple load or store to a memory reference in STMT, returns
the location of the memory reference, and sets IS_STORE according to whether
it is a store or load.  Otherwise, returns NULL.   

References ggc_alloc(), gimple_assign_lhs_ptr(), gimple_assign_rhs1_ptr(), gimple_assign_single_p(), gimple_vdef(), gimple_vuse(), is_gimple_min_invariant(), NULL, and TREE_CODE.

Referenced by gather_mem_refs_stmt().

◆ sm_seq_push_down()

static bool sm_seq_push_down ( vec< seq_entry > & seq,
unsigned ptr,
unsigned * at )
static
Push the SM candidate at index PTR in the sequence SEQ down until
we hit the next SM candidate.  Return true if that went OK and
false if we could not disambiguate agains another unrelated ref.
Update *AT to the index where the candidate now resides.   

References ggc_alloc(), memory_accesses, NULL_TREE, refs_independent_p(), sm_ord, and sm_other.

Referenced by hoist_memory_references(), and sm_seq_valid_bb().

◆ sm_seq_valid_bb()

◆ sort_bbs_in_loop_postorder_cmp()

static int sort_bbs_in_loop_postorder_cmp ( const void * bb1_,
const void * bb2_,
void * bb_loop_postorder_ )
static
qsort sort function to sort blocks after their loop fathers postorder.   

References bb_loop_postorder, and ggc_alloc().

Referenced by analyze_memory_references().

◆ sort_locs_in_loop_postorder_cmp()

static int sort_locs_in_loop_postorder_cmp ( const void * loc1_,
const void * loc2_,
void * bb_loop_postorder_ )
static
qsort sort function to sort ref locs after their loop fathers postorder.   

References bb_loop_postorder, ggc_alloc(), gimple_bb(), and basic_block_def::loop_father.

Referenced by analyze_memory_references().

◆ stmt_cost()

static unsigned stmt_cost ( gimple * stmt)
static
Returns an estimate for a cost of statement STMT.  The values here
are just ad-hoc constants, similar to costs for inlining.   

References CONSTRUCTOR_NELTS, fndecl_built_in_p(), ggc_alloc(), gimple_assign_rhs1(), gimple_assign_rhs_code(), gimple_call_fndecl(), gimple_references_memory_p(), is_gimple_call(), LIM_EXPENSIVE, tcc_comparison, and TREE_CODE_CLASS.

Referenced by determine_max_movement().

◆ store_motion_loop()

static void store_motion_loop ( class loop * loop,
bitmap sm_executed )
static
Try to perform store motion for all memory references modified inside
LOOP.  SM_EXECUTED is the bitmap of the memory references for that
store motion was executed in one of the outer loops.   

References BITMAP_ALLOC, bitmap_and_compl_into(), bitmap_empty_p(), BITMAP_FREE, bitmap_ior_into(), loop::exits, find_refs_for_sm(), get_loop_exit_edges(), ggc_alloc(), hoist_memory_references(), loop::inner, lim_bitmap_obstack, loop_suitable_for_sm(), NULL, and store_motion_loop().

Referenced by do_store_motion(), and store_motion_loop().

◆ tree_ssa_lim_finalize()

◆ tree_ssa_lim_initialize()

Variable Documentation

◆ all_refs_stored_in_loop

vec<bitmap_head> all_refs_stored_in_loop

◆ bb_loop_postorder

◆ coldest_outermost_loop

vec<class loop *> coldest_outermost_loop

◆ hotter_than_inner_loop

vec<class loop *> hotter_than_inner_loop

◆ lim_aux_data_map

hash_map<gimple *, lim_aux_data *>* lim_aux_data_map
static
Maps statements to their lim_aux_data.   

Referenced by clear_lim_data(), get_lim_data(), init_lim_data(), tree_ssa_lim_finalize(), and tree_ssa_lim_initialize().

◆ lim_bitmap_obstack

◆ mem_ref_obstack

obstack mem_ref_obstack
static

◆ [struct]

◆ refs

◆ refs_list

vec<im_mem_ref *> refs_list

◆ refs_loaded_in_loop

vec<bitmap_head> refs_loaded_in_loop

◆ refs_stored_in_loop

vec<bitmap_head> refs_stored_in_loop

◆ ttae_cache

hash_map<tree, name_expansion *>* ttae_cache

Referenced by mem_refs_may_alias_p().