GCC Middle and Back End API Reference
store-motion.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "rtl.h"
#include "tree.h"
#include "predict.h"
#include "df.h"
#include "toplev.h"
#include "cfgrtl.h"
#include "cfganal.h"
#include "lcm.h"
#include "cfgcleanup.h"
#include "expr.h"
#include "tree-pass.h"
#include "dbgcnt.h"
#include "rtl-iter.h"
#include "print-rtl.h"
Include dependency graph for store-motion.cc:

Data Structures

struct  st_expr
 
struct  st_expr_hasher
 

Macros

#define LAST_AVAIL_CHECK_FAILURE(x)
 

Functions

static struct st_exprst_expr_entry (rtx x)
 
static void free_st_expr_entry (struct st_expr *ptr)
 
static void free_store_motion_mems (void)
 
static int enumerate_store_motion_mems (void)
 
static struct st_exprfirst_st_expr (void)
 
static struct st_exprnext_st_expr (struct st_expr *ptr)
 
static void print_store_motion_mems (FILE *file)
 
static bool store_ops_ok (const vec< rtx > &x, int *regs_set)
 
static void extract_mentioned_regs (rtx x, vec< rtx > *mentioned_regs)
 
static bool load_kills_store (const_rtx x, const_rtx store_pattern, int after)
 
static bool find_loads (const_rtx x, const_rtx store_pattern, int after)
 
static bool store_killed_in_pat (const_rtx x, const_rtx pat, int after)
 
static bool store_killed_in_insn (const_rtx x, const vec< rtx > &x_regs, const rtx_insn *insn, int after)
 
static bool store_killed_after (const_rtx x, const vec< rtx > &x_regs, const rtx_insn *insn, const_basic_block bb, int *regs_set_after, rtx *fail_insn)
 
static bool store_killed_before (const_rtx x, const vec< rtx > &x_regs, const rtx_insn *insn, const_basic_block bb, int *regs_set_before)
 
static void find_moveable_store (rtx_insn *insn, int *regs_set_before, int *regs_set_after)
 
static int compute_store_table (void)
 
static void insert_insn_start_basic_block (rtx_insn *insn, basic_block bb)
 
static int insert_store (struct st_expr *expr, edge e)
 
static void remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
 
static void replace_store_insn (rtx reg, rtx_insn *del, basic_block bb, struct st_expr *smexpr)
 
static void delete_store (struct st_expr *expr, basic_block bb)
 
static void build_store_vectors (void)
 
static void free_store_memory (void)
 
static int one_store_motion_pass (void)
 
static unsigned int execute_rtl_store_motion (void)
 
rtl_opt_passmake_pass_rtl_store_motion (gcc::context *ctxt)
 

Variables

static struct st_exprstore_motion_mems = NULL
 
static sbitmapst_kill
 
static sbitmapst_avloc
 
static sbitmapst_antloc
 
static sbitmapst_transp
 
static sbitmapst_insert_map
 
static sbitmapst_delete_map
 
static int num_stores
 
static struct edge_list * edge_list
 
static hash_table< st_expr_hasher > * store_motion_mems_table
 

Macro Definition Documentation

◆ LAST_AVAIL_CHECK_FAILURE

#define LAST_AVAIL_CHECK_FAILURE ( x)
Value:
((x)->reaching_reg)
The last insn in the basic block that compute_store_table is processing,
where store_killed_after is true for X.
Since we go through the basic block from BB_END to BB_HEAD, this is
also the available store at the end of the basic block.  Therefore
this is in effect a cache, to avoid calling store_killed_after for
equivalent aliasing store expressions.
This value is only meaningful during the computation of the store
table.  We hi-jack the REACHING_REG field of struct st_expr to save
a bit of memory.   

Referenced by compute_store_table(), and find_moveable_store().

Function Documentation

◆ build_store_vectors()

◆ compute_store_table()

◆ delete_store()

static void delete_store ( struct st_expr * expr,
basic_block bb )
static
Delete a store, but copy the value that would have been stored into
the reaching_reg for later storing.   

References BLOCK_FOR_INSN(), gen_reg_rtx_and_attrs(), i, NULL_RTX, and replace_store_insn().

Referenced by one_store_motion_pass().

◆ enumerate_store_motion_mems()

static int enumerate_store_motion_mems ( void )
static
Assign each element of the list of mems a monotonically increasing value.   

References st_expr::index, st_expr::next, NULL, and store_motion_mems.

Referenced by compute_store_table().

◆ execute_rtl_store_motion()

static unsigned int execute_rtl_store_motion ( void )
static

◆ extract_mentioned_regs()

static void extract_mentioned_regs ( rtx x,
vec< rtx > * mentioned_regs )
static
Returns a list of registers mentioned in X.
FIXME: A regset would be prettier and less expensive.   

References FOR_EACH_SUBRTX_VAR, and REG_P.

Referenced by find_moveable_store().

◆ find_loads()

static bool find_loads ( const_rtx x,
const_rtx store_pattern,
int after )
static
Go through the entire rtx X, looking for any loads which might alias
STORE_PATTERN.  Return true if found.
AFTER is true if we are checking the case when STORE_PATTERN occurs
after the insn X.   

References find_loads(), GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, i, load_kills_store(), MEM_P, SET, SET_SRC, XEXP, XVECEXP, and XVECLEN.

Referenced by find_loads(), store_killed_in_insn(), and store_killed_in_pat().

◆ find_moveable_store()

static void find_moveable_store ( rtx_insn * insn,
int * regs_set_before,
int * regs_set_after )
static
Determine whether INSN is MEM store pattern that we will consider moving.
REGS_SET_BEFORE is bitmap of registers set before (and including) the
current insn, REGS_SET_AFTER is bitmap of registers set after (and
including) the insn in this basic block.  We must be passing through BB from
head to end, as we are using this fact to speed things up.

The results are stored this way:

-- the first anticipatable expression is added into ANTIC_STORES
-- if the processed expression is not anticipatable, NULL_RTX is added
   there instead, so that we can use it as indicator that no further
   expression of this type may be anticipatable
-- if the expression is available, it is added as head of AVAIL_STORES;
   consequently, all of them but this head are dead and may be deleted.
-- if the expression is not available, the insn due to that it fails to be
   available is stored in REACHING_REG (via LAST_AVAIL_CHECK_FAILURE).

The things are complicated a bit by fact that there already may be stores
to the same MEM from other blocks; also caller must take care of the
necessary cleanup of the temporary markers after end of the basic block.

References st_expr::antic_stores, st_expr::avail_stores, BB_END, BLOCK_FOR_INSN(), can_assign_to_reg_without_clobbers_p(), cfun, extract_mentioned_regs(), find_reg_note(), GET_MODE, LAST_AVAIL_CHECK_FAILURE, may_trap_p(), MEM_P, MEM_VOLATILE_P, NULL, NULL_RTX, st_expr::pattern_regs, PREV_INSN(), SET_DEST, SET_SRC, side_effects_p(), single_set(), st_expr_entry(), store_killed_after(), and store_killed_before().

Referenced by compute_store_table().

◆ first_st_expr()

static struct st_expr * first_st_expr ( void )
inlinestatic

◆ free_st_expr_entry()

static void free_st_expr_entry ( struct st_expr * ptr)
static
Free up an individual st_expr entry.   

References st_expr::antic_stores, st_expr::avail_stores, free(), and st_expr::pattern_regs.

Referenced by compute_store_table(), and free_store_motion_mems().

◆ free_store_memory()

static void free_store_memory ( void )
static

◆ free_store_motion_mems()

static void free_store_motion_mems ( void )
static
Free up all memory associated with the st_expr list.   

References free_st_expr_entry(), st_expr::next, NULL, store_motion_mems, and store_motion_mems_table.

Referenced by free_store_memory().

◆ insert_insn_start_basic_block()

static void insert_insn_start_basic_block ( rtx_insn * insn,
basic_block bb )
static
In all code following after this, REACHING_REG has its original
meaning again.  Avoid confusion, and undef the accessor macro for
the temporary marks usage in compute_store_table.   
Insert an instruction at the beginning of a basic block, and update
the BB_HEAD if needed.   

References BB_END, BB_HEAD, dump_file, emit_insn_after_noloc(), basic_block_def::index, LABEL_P, NEXT_INSN(), NOTE_INSN_BASIC_BLOCK_P, PREV_INSN(), and print_inline_rtx().

Referenced by insert_store().

◆ insert_store()

static int insert_store ( struct st_expr * expr,
edge e )
static
This routine will insert a store on an edge. EXPR is the st_expr entry for
the memory reference, and E is the edge to insert it on.  Returns nonzero
if an edge insertion was performed.   

References bitmap_bit_p, bitmap_clear_bit(), cfun, copy_rtx(), dump_file, EDGE_INDEX, EDGE_INDEX_NO_EDGE, EXIT_BLOCK_PTR_FOR_FN, FOR_EACH_EDGE, gcc_assert, gen_move_insn(), st_expr::index, insert_insn_on_edge(), insert_insn_start_basic_block(), NULL_RTX, print_inline_rtx(), and st_insert_map.

Referenced by one_store_motion_pass().

◆ load_kills_store()

static bool load_kills_store ( const_rtx x,
const_rtx store_pattern,
int after )
static
Check to see if the load X is aliased with STORE_PATTERN.
AFTER is true if we are checking the case when STORE_PATTERN occurs
after the X.   

References anti_dependence(), GET_MODE, and true_dependence().

Referenced by find_loads().

◆ make_pass_rtl_store_motion()

rtl_opt_pass * make_pass_rtl_store_motion ( gcc::context * ctxt)

◆ next_st_expr()

static struct st_expr * next_st_expr ( struct st_expr * ptr)
inlinestatic
Return the next item in the list after the specified one.   

References st_expr::next.

Referenced by build_store_vectors(), compute_store_table(), one_store_motion_pass(), and print_store_motion_mems().

◆ one_store_motion_pass()

◆ print_store_motion_mems()

static void print_store_motion_mems ( FILE * file)
static

◆ remove_reachable_equiv_notes()

static void remove_reachable_equiv_notes ( basic_block bb,
struct st_expr * smexpr )
static

◆ replace_store_insn()

◆ st_expr_entry()

static struct st_expr * st_expr_entry ( rtx x)
static
This will search the st_expr list for a matching expression. If it
doesn't find one, we create one and initialize it.   

References st_expr::antic_stores, st_expr::avail_stores, GET_MODE, st_expr::hash_index, hash_rtx(), st_expr::index, st_expr::next, NULL, NULL_RTX, st_expr::pattern, st_expr::pattern_regs, st_expr::reaching_reg, store_motion_mems, and store_motion_mems_table.

Referenced by find_moveable_store().

◆ store_killed_after()

static bool store_killed_after ( const_rtx x,
const vec< rtx > & x_regs,
const rtx_insn * insn,
const_basic_block bb,
int * regs_set_after,
rtx * fail_insn )
static
Returns true if the expression X is loaded or clobbered on or after INSN
within basic block BB.  REGS_SET_AFTER is bitmap of registers set in
or after the insn.  X_REGS is list of registers mentioned in X. If the store
is killed, return the last insn in that it occurs in FAIL_INSN.   

References BB_END, last, NULL_RTX, PREV_INSN(), store_killed_in_insn(), and store_ops_ok().

Referenced by build_store_vectors(), and find_moveable_store().

◆ store_killed_before()

static bool store_killed_before ( const_rtx x,
const vec< rtx > & x_regs,
const rtx_insn * insn,
const_basic_block bb,
int * regs_set_before )
static
Returns true if the expression X is loaded or clobbered on or before INSN
within basic block BB. X_REGS is list of registers mentioned in X.
REGS_SET_BEFORE is bitmap of registers set before or in this insn.   

References BB_HEAD, PREV_INSN(), store_killed_in_insn(), and store_ops_ok().

Referenced by find_moveable_store().

◆ store_killed_in_insn()

static bool store_killed_in_insn ( const_rtx x,
const vec< rtx > & x_regs,
const rtx_insn * insn,
int after )
static
Check if INSN kills the store pattern X (is aliased with it).
AFTER is true if we are checking the case when store X occurs
after the insn.  Return true if it does.   

References CALL_P, exp_equiv_p(), find_loads(), find_reg_equal_equiv_note(), GET_CODE, i, may_be_sp_based_p(), NONDEBUG_INSN_P, PATTERN(), RTL_CONST_CALL_P, SET, store_killed_in_pat(), XEXP, XVECEXP, and XVECLEN.

Referenced by store_killed_after(), and store_killed_before().

◆ store_killed_in_pat()

static bool store_killed_in_pat ( const_rtx x,
const_rtx pat,
int after )
inlinestatic
Go through pattern PAT looking for any loads which might kill the
store in X.  Return true if found.
AFTER is true if we are checking the case when loads kill X occurs
after the insn for PAT.   

References exp_equiv_p(), find_loads(), GET_CODE, MEM_P, output_dependence(), SET, SET_DEST, and XEXP.

Referenced by store_killed_in_insn().

◆ store_ops_ok()

static bool store_ops_ok ( const vec< rtx > & x,
int * regs_set )
static
Return zero if some of the registers in list X are killed
due to set of registers in bitmap REGS_SET.   

References REGNO, and regs_set.

Referenced by store_killed_after(), and store_killed_before().

Variable Documentation

◆ edge_list

struct edge_list* edge_list
static
Contains the edge_list returned by pre_edge_lcm.   

Referenced by compute_pre_data(), one_pre_gcse_pass(), pre_edge_lcm(), pre_edge_lcm_avs(), and pre_edge_rev_lcm().

◆ num_stores

int num_stores
static
Global holding the number of store expressions we are dealing with.   

Referenced by build_store_vectors(), and one_store_motion_pass().

◆ st_antloc

◆ st_avloc

◆ st_delete_map

sbitmap* st_delete_map
static
Nonzero for expressions which should be deleted in a specific block.   

Referenced by free_store_memory(), and one_store_motion_pass().

◆ st_insert_map

sbitmap* st_insert_map
static
Nonzero for expressions which should be inserted on a specific edge.   

Referenced by free_store_memory(), insert_store(), and one_store_motion_pass().

◆ st_kill

sbitmap* st_kill
static
These bitmaps will hold the local dataflow properties per basic block.   

Referenced by build_store_vectors(), free_store_memory(), and one_store_motion_pass().

◆ st_transp

◆ store_motion_mems

struct st_expr* store_motion_mems = NULL
static
Head of the list of load/store memory refs.   

Referenced by compute_store_table(), enumerate_store_motion_mems(), first_st_expr(), free_store_motion_mems(), and st_expr_entry().

◆ store_motion_mems_table

hash_table<st_expr_hasher>* store_motion_mems_table
static
Hashtable for the load/store memory refs.   

Referenced by compute_store_table(), free_store_motion_mems(), one_store_motion_pass(), and st_expr_entry().