GCC Middle and Back End API Reference
tree-ssa-reassoc.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "target.h"
#include "rtl.h"
#include "tree.h"
#include "gimple.h"
#include "cfghooks.h"
#include "alloc-pool.h"
#include "tree-pass.h"
#include "memmodel.h"
#include "tm_p.h"
#include "ssa.h"
#include "optabs-tree.h"
#include "gimple-pretty-print.h"
#include "diagnostic-core.h"
#include "fold-const.h"
#include "stor-layout.h"
#include "cfganal.h"
#include "gimple-iterator.h"
#include "gimple-fold.h"
#include "tree-eh.h"
#include "gimplify-me.h"
#include "tree-cfg.h"
#include "tree-ssa-loop.h"
#include "flags.h"
#include "tree-ssa.h"
#include "langhooks.h"
#include "cfgloop.h"
#include "builtins.h"
#include "gimplify.h"
#include "case-cfn-macros.h"
#include "tree-ssa-reassoc.h"
#include "tree-ssa-math-opts.h"
#include "gimple-range.h"
#include "internal-fn.h"
Include dependency graph for tree-ssa-reassoc.cc:

Data Structures

struct  oecount
 
struct  oecount_hasher
 
struct  v_info
 
struct  inter_bb_range_test_entry
 
struct  repeat_factor
 

Macros

#define PHI_LOOP_BIAS   (1 << 15)
 
#define INTEGER_CONST_TYPE   1 << 4
 
#define FLOAT_ONE_CONST_TYPE   1 << 3
 
#define FLOAT_CONST_TYPE   1 << 2
 
#define OTHER_CONST_TYPE   1 << 1
 
#define SPECIAL_BIASED_END_STMT   0 /* It is the end stmt of all ops. @endverbatim */
 
#define BIASED_END_STMT   1 /* It is the end stmt of normal or biased ops. @endverbatim */
 
#define NORMAL_END_STMT   2 /* It is the end stmt of normal ops. @endverbatim */
 

Typedefs

typedef std::pair< unsigned, unsignedv_info_elem
 
typedef v_infov_info_ptr
 

Functions

static int64_t get_rank (tree)
 
static bool reassoc_stmt_dominates_stmt_p (gimple *, gimple *)
 
static bool reassoc_remove_stmt (gimple_stmt_iterator *gsi)
 
static bool propagate_bias_p (gimple *stmt)
 
static int64_t phi_rank (gimple *stmt)
 
static int64_t propagate_rank (int64_t rank, tree op, bool *maybe_biased_p)
 
static int64_t find_operand_rank (tree e)
 
static void insert_operand_rank (tree e, int64_t rank)
 
static int constant_type (tree t)
 
static int sort_by_operand_rank (const void *pa, const void *pb)
 
static void add_to_ops_vec (vec< operand_entry * > *ops, tree op, gimple *stmt_to_insert=NULL)
 
static void add_repeat_to_ops_vec (vec< operand_entry * > *ops, tree op, HOST_WIDE_INT repeat)
 
static bool can_reassociate_op_p (tree op)
 
static bool can_reassociate_type_p (tree type)
 
static bool is_reassociable_op (gimple *stmt, enum tree_code code, class loop *loop)
 
static bool gimple_nop_conversion_p (gimple *stmt)
 
static tree get_unary_op (tree name, enum tree_code opcode)
 
static bool ops_equal_values_p (tree op1, tree op2)
 
static bool eliminate_duplicate_pair (enum tree_code opcode, vec< operand_entry * > *ops, bool *all_done, unsigned int i, operand_entry *curr, operand_entry *last)
 
static bool eliminate_plus_minus_pair (enum tree_code opcode, vec< operand_entry * > *ops, unsigned int currindex, operand_entry *curr)
 
static bool eliminate_not_pairs (enum tree_code opcode, vec< operand_entry * > *ops, unsigned int currindex, operand_entry *curr)
 
static void eliminate_using_constants (enum tree_code opcode, vec< operand_entry * > *ops)
 
static void linearize_expr_tree (vec< operand_entry * > *, gimple *, bool, bool)
 
static int oecount_cmp (const void *p1, const void *p2)
 
static bool stmt_is_power_of_op (gimple *stmt, tree op)
 
static HOST_WIDE_INT decrement_power (gimple *stmt)
 
static tree make_new_ssa_for_def (gimple *stmt, enum tree_code opcode, tree op)
 
static void make_new_ssa_for_all_defs (tree *def, enum tree_code opcode, tree op, vec< gimple * > &stmts_to_fix)
 
static void propagate_op_to_single_use (tree op, gimple *stmt, tree *def)
 
static void zero_one_operation (tree *def, enum tree_code opcode, tree op)
 
static void insert_stmt_after (gimple *stmt, gimple *insert_point)
 
static gimplebuild_and_add_sum (tree type, tree op1, tree op2, enum tree_code opcode)
 
static bool undistribute_ops_list (enum tree_code opcode, vec< operand_entry * > *ops, class loop *loop)
 
static int sort_by_mach_mode (const void *p_i, const void *p_j)
 
static void cleanup_vinfo_map (hash_map< tree, v_info_ptr > &info_map)
 
static bool undistribute_bitref_for_vector (enum tree_code opcode, vec< operand_entry * > *ops, struct loop *loop)
 
static bool eliminate_redundant_comparison (enum tree_code opcode, vec< operand_entry * > *ops, unsigned int currindex, operand_entry *curr)
 
static bool transform_add_to_multiply (vec< operand_entry * > *ops)
 
static void optimize_ops_list (enum tree_code opcode, vec< operand_entry * > *ops)
 
void dump_range_entry (FILE *file, struct range_entry *r, bool skip_exp)
 
DEBUG_FUNCTION void debug_range_entry (struct range_entry *r)
 
void init_range_entry (struct range_entry *r, tree exp, gimple *stmt)
 
static int range_entry_cmp (const void *a, const void *b)
 
static tree force_into_ssa_name (gimple_stmt_iterator *gsi, tree expr, bool before)
 
static bool update_range_test (struct range_entry *range, struct range_entry *otherrange, struct range_entry **otherrangep, unsigned int count, enum tree_code opcode, vec< operand_entry * > *ops, tree exp, gimple_seq seq, bool in_p, tree low, tree high, bool strict_overflow_p)
 
static bool optimize_range_tests_xor (enum tree_code opcode, tree type, tree lowi, tree lowj, tree highi, tree highj, vec< operand_entry * > *ops, struct range_entry *rangei, struct range_entry *rangej)
 
static bool optimize_range_tests_diff (enum tree_code opcode, tree type, tree lowi, tree lowj, tree highi, tree highj, vec< operand_entry * > *ops, struct range_entry *rangei, struct range_entry *rangej)
 
static bool optimize_range_tests_1 (enum tree_code opcode, int first, int length, bool optimize_xor, vec< operand_entry * > *ops, struct range_entry *ranges)
 
static tree extract_bit_test_mask (tree exp, int prec, tree totallow, tree low, tree high, wide_int *mask, tree *totallowp)
 
static bool optimize_range_tests_to_bit_test (enum tree_code opcode, int first, int length, vec< operand_entry * > *ops, struct range_entry *ranges)
 
static bool optimize_range_tests_cmp_bitwise (enum tree_code opcode, int first, int length, vec< operand_entry * > *ops, struct range_entry *ranges)
 
static bool optimize_range_tests_var_bound (enum tree_code opcode, int first, int length, vec< operand_entry * > *ops, struct range_entry *ranges, basic_block first_bb)
 
static bool optimize_range_tests (enum tree_code opcode, vec< operand_entry * > *ops, basic_block first_bb)
 
static tree_code ovce_extract_ops (tree var, gassign **rets, bool *reti, tree *type, tree *lhs, tree *rhs, gassign **vcond)
 
static bool optimize_vec_cond_expr (tree_code opcode, vec< operand_entry * > *ops)
 
static bool final_range_test_p (gimple *stmt)
 
static bool suitable_cond_bb (basic_block bb, basic_block test_bb, basic_block *other_bb, bool *test_swapped_p, bool backward)
 
bool no_side_effect_bb (basic_block bb)
 
static bool get_ops (tree var, enum tree_code code, vec< operand_entry * > *ops, class loop *loop)
 
static tree update_ops (tree var, enum tree_code code, const vec< operand_entry * > &ops, unsigned int *pidx, class loop *loop)
 
static bool maybe_optimize_range_tests (gimple *stmt)
 
static void remove_visited_stmt_chain (tree var)
 
static void swap_ops_for_binary_stmt (const vec< operand_entry * > &ops, unsigned int opindex)
 
static gimplefind_insert_point (gimple *stmt, tree rhs1, tree rhs2, bool &insert_before)
 
static void insert_stmt_before_use (gimple *stmt, gimple *stmt_to_insert)
 
static tree rewrite_expr_tree (gimple *stmt, enum tree_code rhs_code, unsigned int opindex, const vec< operand_entry * > &ops, bool changed, bool next_changed)
 
static int get_required_cycles (int ops_num, int cpu_width)
 
static int get_mult_latency_consider_fma (int ops_num, int mult_num, int width)
 
static int get_reassociation_width (vec< operand_entry * > *ops, int mult_num, tree lhs, enum tree_code opc, machine_mode mode)
 
static void rewrite_expr_tree_parallel (gassign *stmt, int width, bool has_fma, const vec< operand_entry * > &ops)
 
static void linearize_expr (gimple *stmt)
 
static gimpleget_single_immediate_use (tree lhs)
 
static tree negate_value (tree tonegate, gimple_stmt_iterator *gsip)
 
static bool should_break_up_subtract (gimple *stmt)
 
static void break_up_subtract (gimple *stmt, gimple_stmt_iterator *gsip)
 
static bool acceptable_pow_call (gcall *stmt, tree *base, HOST_WIDE_INT *exponent)
 
static bool try_special_add_to_ops (vec< operand_entry * > *ops, enum tree_code code, tree op, gimple *def_stmt)
 
static void repropagate_negates (void)
 
static void break_up_subtract_bb (basic_block bb)
 
static int compare_repeat_factors (const void *x1, const void *x2)
 
static tree attempt_builtin_powi (gimple *stmt, vec< operand_entry * > *ops)
 
static void attempt_builtin_copysign (vec< operand_entry * > *ops)
 
static void transform_stmt_to_copy (gimple_stmt_iterator *gsi, gimple *stmt, tree new_rhs)
 
static void transform_stmt_to_multiply (gimple_stmt_iterator *gsi, gimple *stmt, tree rhs1, tree rhs2)
 
static int rank_ops_for_fma (vec< operand_entry * > *ops)
 
static bool reassociate_bb (basic_block bb)
 
static void branch_fixup (void)
 
void dump_ops_vector (FILE *file, vec< operand_entry * > ops)
 
void debug_ops_vector (vec< operand_entry * > ops)
 
static bool do_reassoc (void)
 
static void init_reassoc (void)
 
static void fini_reassoc (void)
 
static unsigned int execute_reassoc (bool insert_powi_p, bool bias_loop_carried_phi_ranks_p)
 
gimple_opt_passmake_pass_reassoc (gcc::context *ctxt)
 

Variables

static bool reassoc_insert_powi_p
 
static bool reassoc_bias_loop_carried_phi_ranks_p
 
struct { 
 
   int   linearized 
 
   int   constants_eliminated 
 
   int   ops_eliminated 
 
   int   rewritten 
 
   int   pows_encountered 
 
   int   pows_created 
 
reassociate_stats 
 
static object_allocator< operand_entryoperand_entry_pool ("operand entry pool")
 
static unsigned int next_operand_entry_id
 
static int64_tbb_rank
 
static hash_map< tree, int64_t > * operand_rank
 
static auto_bitmap biased_names
 
static vec< treereassoc_branch_fixups
 
static vec< treeplus_negates
 
static vec< oecountcvec
 
static vec< repeat_factorrepeat_factor_vec
 

Macro Definition Documentation

◆ BIASED_END_STMT

#define BIASED_END_STMT   1 /* It is the end stmt of normal or biased ops. @endverbatim */

◆ FLOAT_CONST_TYPE

#define FLOAT_CONST_TYPE   1 << 2

Referenced by constant_type().

◆ FLOAT_ONE_CONST_TYPE

#define FLOAT_ONE_CONST_TYPE   1 << 3

Referenced by constant_type().

◆ INTEGER_CONST_TYPE

#define INTEGER_CONST_TYPE   1 << 4
We want integer ones to end up last no matter what, since they are
the ones we can do the most with.   

Referenced by constant_type().

◆ NORMAL_END_STMT

#define NORMAL_END_STMT   2 /* It is the end stmt of normal ops. @endverbatim */

◆ OTHER_CONST_TYPE

#define OTHER_CONST_TYPE   1 << 1

Referenced by constant_type().

◆ PHI_LOOP_BIAS

#define PHI_LOOP_BIAS   (1 << 15)
Bias amount for loop-carried phis.  We want this to be larger than
the depth of any reassociation tree we can see, but not larger than
the rank difference between two blocks.   

Referenced by phi_rank().

◆ SPECIAL_BIASED_END_STMT

#define SPECIAL_BIASED_END_STMT   0 /* It is the end stmt of all ops. @endverbatim */

Typedef Documentation

◆ v_info_elem

Pair to hold the information of one specific VECTOR_TYPE SSA_NAME:
first: element index for each relevant BIT_FIELD_REF.
second: the index of vec ops* for each relevant BIT_FIELD_REF.   

◆ v_info_ptr

Function Documentation

◆ acceptable_pow_call()

static bool acceptable_pow_call ( gcall * stmt,
tree * base,
HOST_WIDE_INT * exponent )
static
Determine whether STMT is a builtin call that raises an SSA name
to an integer power and has only one use.  If so, and this is early
reassociation and unsafe math optimizations are permitted, place
the SSA name in *BASE and the exponent in *EXPONENT, and return TRUE.
If any of these conditions does not hold, return FALSE.   

References ggc_alloc(), gimple_call_arg(), gimple_call_combined_fn(), HOST_BITS_PER_WIDE_INT, REAL_EXP, real_from_integer(), real_identical(), real_to_integer(), REAL_VALUE_TYPE, SIGNED, TREE_CODE, tree_fits_shwi_p(), TREE_REAL_CST, and tree_to_shwi().

Referenced by try_special_add_to_ops().

◆ add_repeat_to_ops_vec()

static void add_repeat_to_ops_vec ( vec< operand_entry * > * ops,
tree op,
HOST_WIDE_INT repeat )
static
Add an operand entry to *OPS for the tree operand OP with repeat
count REPEAT.   

References get_rank(), ggc_alloc(), next_operand_entry_id, NULL, operand_entry_pool, and reassociate_stats.

Referenced by try_special_add_to_ops().

◆ add_to_ops_vec()

◆ attempt_builtin_copysign()

◆ attempt_builtin_powi()

static tree attempt_builtin_powi ( gimple * stmt,
vec< operand_entry * > * ops )
static

◆ branch_fixup()

static void branch_fixup ( void )
static
Add jumps around shifts for range tests turned into bit tests.
For each SSA_NAME VAR we have code like:
VAR = ...; // final stmt of range comparison
// bit test here...;
OTHERVAR = ...; // final stmt of the bit test sequence
RES = VAR | OTHERVAR;
Turn the above into:
VAR = ...;
if (VAR != 0)
  goto <l3>;
else
  goto <l2>;
<l2>:
// bit test here...;
OTHERVAR = ...;
<l3>:
# RES = PHI<1(l1), OTHERVAR(l2)>;   

References add_phi_arg(), build_one_cst(), build_zero_cst(), CDI_DOMINATORS, CDI_POST_DOMINATORS, basic_block_def::count, create_phi_node(), profile_probability::even(), find_edge(), FOR_EACH_VEC_ELT, g, gcc_assert, gcc_unreachable, ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_bb(), gimple_build_cond(), gimple_location(), gimple_set_location(), gsi_for_stmt(), gsi_insert_after(), GSI_NEW_STMT, gsi_remove(), i, is_gimple_assign(), make_edge(), NULL_TREE, reassoc_branch_fixups, set_immediate_dominator(), single_imm_use(), single_succ_edge(), split_block(), SSA_NAME_DEF_STMT, and TREE_TYPE.

Referenced by execute_reassoc().

◆ break_up_subtract()

◆ break_up_subtract_bb()

static void break_up_subtract_bb ( basic_block bb)
static
Break up subtract operations in block BB.

We do this top down because we don't know whether the subtract is
part of a possible chain of reassociation except at the top.

IE given
d = f + g
c = a + e
b = c - d
q = b - r
k = t - q

we want to break up k = t - q, but we won't until we've transformed q
= b - r, which won't be broken up until we transform b = c - d.

En passant, clear the GIMPLE visited flag on every statement
and set UIDs within each basic block.   

References break_up_subtract(), break_up_subtract_bb(), can_reassociate_op_p(), can_reassociate_type_p(), CDI_DOMINATORS, first_dom_son(), ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_set_uid(), gimple_set_visited(), gsi_end_p(), gsi_next(), gsi_start_bb(), gsi_stmt(), is_gimple_assign(), next_dom_son(), plus_negates, should_break_up_subtract(), and TREE_TYPE.

Referenced by break_up_subtract_bb(), and do_reassoc().

◆ build_and_add_sum()

◆ can_reassociate_op_p()

static bool can_reassociate_op_p ( tree op)
static

◆ can_reassociate_type_p()

static bool can_reassociate_type_p ( tree type)
static
Returns true if we can reassociate operations of TYPE.
That is for integral or non-saturating fixed-point types, and for
floating point type when associative-math is enabled.   

References ANY_INTEGRAL_TYPE_P, FLOAT_TYPE_P, ggc_alloc(), NON_SAT_FIXED_POINT_TYPE_P, and TYPE_OVERFLOW_WRAPS.

Referenced by break_up_subtract_bb(), and reassociate_bb().

◆ cleanup_vinfo_map()

static void cleanup_vinfo_map ( hash_map< tree, v_info_ptr > & info_map)
static
Cleanup hash map for VECTOR information.   

References ggc_alloc(), and NULL.

Referenced by undistribute_bitref_for_vector().

◆ compare_repeat_factors()

static int compare_repeat_factors ( const void * x1,
const void * x2 )
static
Used for sorting the repeat factor vector.  Sort primarily by
ascending occurrence count, secondarily by descending rank.   

References ggc_alloc().

Referenced by attempt_builtin_powi().

◆ constant_type()

static int constant_type ( tree t)
inlinestatic
Classify an invariant tree into integer, float, or other, so that
we can sort them to be near other constants of the same type.   

References FLOAT_CONST_TYPE, FLOAT_ONE_CONST_TYPE, INTEGER_CONST_TYPE, INTEGRAL_TYPE_P, OTHER_CONST_TYPE, real_minus_onep(), real_onep(), SCALAR_FLOAT_TYPE_P, and TREE_TYPE.

Referenced by sort_by_operand_rank().

◆ debug_ops_vector()

DEBUG_FUNCTION void debug_ops_vector ( vec< operand_entry * > ops)
Dump the operand entry vector OPS to STDERR.   

References dump_ops_vector(), and ggc_alloc().

◆ debug_range_entry()

DEBUG_FUNCTION void debug_range_entry ( struct range_entry * r)
Dump the range entry R to STDERR.   

References dump_range_entry(), fputc(), ggc_alloc(), and r.

◆ decrement_power()

static HOST_WIDE_INT decrement_power ( gimple * stmt)
static
Given STMT which is a __builtin_pow* call, decrement its exponent
in place and return the result.  Assumes that stmt_is_power_of_op
was previously called for STMT and returned TRUE.   

References build_int_cst(), build_real(), gcc_unreachable, ggc_alloc(), gimple_call_arg(), gimple_call_combined_fn(), gimple_call_set_arg(), real_from_integer(), real_to_integer(), REAL_VALUE_TYPE, SIGNED, TREE_INT_CST_LOW, TREE_REAL_CST, and TREE_TYPE.

Referenced by zero_one_operation().

◆ do_reassoc()

static bool do_reassoc ( void )
static
Bubble up return status from reassociate_bb.   

References break_up_subtract_bb(), cfun, ENTRY_BLOCK_PTR_FOR_FN, EXIT_BLOCK_PTR_FOR_FN, and reassociate_bb().

Referenced by execute_reassoc().

◆ dump_ops_vector()

void dump_ops_vector ( FILE * file,
vec< operand_entry * > ops )
Dump the operand entry vector OPS to FILE.   

References FOR_EACH_VEC_ELT, ggc_alloc(), i, and print_generic_expr().

Referenced by debug_ops_vector().

◆ dump_range_entry()

void dump_range_entry ( FILE * file,
struct range_entry * r,
bool skip_exp )
The following functions are subroutines to optimize_range_tests and allow
it to try to change a logical combination of comparisons into a range
test.

For example, both
     X == 2 || X == 5 || X == 3 || X == 4
and
     X >= 2 && X <= 5
are converted to
     (unsigned) (X - 2) <= 3

For more information see comments above fold_test_range in fold-const.cc,
this implementation is for GIMPLE.   
Dump the range entry R to FILE, skipping its expression if SKIP_EXP.   

References fputc(), ggc_alloc(), print_generic_expr(), and r.

Referenced by debug_range_entry(), and update_range_test().

◆ eliminate_duplicate_pair()

static bool eliminate_duplicate_pair ( enum tree_code opcode,
vec< operand_entry * > * ops,
bool * all_done,
unsigned int i,
operand_entry * curr,
operand_entry * last )
static
If CURR and LAST are a pair of ops that OPCODE allows us to
eliminate through equivalences, do so, remove them from OPS, and
return true.  Otherwise, return false.   

References add_to_ops_vec(), build_zero_cst(), dump_file, dump_flags, ggc_alloc(), i, last, print_generic_expr(), print_generic_stmt(), reassociate_stats, TDF_DETAILS, and TREE_TYPE.

Referenced by optimize_ops_list().

◆ eliminate_not_pairs()

static bool eliminate_not_pairs ( enum tree_code opcode,
vec< operand_entry * > * ops,
unsigned int currindex,
operand_entry * curr )
static
If OPCODE is BIT_IOR_EXPR, BIT_AND_EXPR, and, CURR->OP is really a
bitwise not expression, look in OPS for a corresponding operand to
cancel it out.  If we find one, remove the other from OPS, replace
OPS[CURRINDEX] with 0, and return true.  Otherwise, return
false.  

References build_all_ones_cst(), build_zero_cst(), dump_file, dump_flags, get_unary_op(), ggc_alloc(), i, NULL_TREE, print_generic_expr(), reassociate_stats, TDF_DETAILS, TREE_CODE, and TREE_TYPE.

Referenced by optimize_ops_list().

◆ eliminate_plus_minus_pair()

static bool eliminate_plus_minus_pair ( enum tree_code opcode,
vec< operand_entry * > * ops,
unsigned int currindex,
operand_entry * curr )
static
If OPCODE is PLUS_EXPR, CURR->OP is a negate expression or a bitwise not
expression, look in OPS for a corresponding positive operation to cancel
it out.  If we find one, remove the other from OPS, replace
OPS[CURRINDEX] with 0 or -1, respectively, and return true.  Otherwise,
return false.  

References add_to_ops_vec(), build_all_ones_cst(), build_zero_cst(), dump_file, dump_flags, get_unary_op(), ggc_alloc(), gimple_assign_rhs_code(), i, NULL_TREE, ops_equal_values_p(), plus_negates, print_generic_expr(), reassociate_stats, SSA_NAME_DEF_STMT, TDF_DETAILS, TREE_CODE, and TREE_TYPE.

Referenced by optimize_ops_list().

◆ eliminate_redundant_comparison()

static bool eliminate_redundant_comparison ( enum tree_code opcode,
vec< operand_entry * > * ops,
unsigned int currindex,
operand_entry * curr )
static

◆ eliminate_using_constants()

static void eliminate_using_constants ( enum tree_code opcode,
vec< operand_entry * > * ops )
static
Use constant value that may be present in OPS to try to eliminate
operands.  Note that this function is only really used when we've
eliminated ops for other reasons, or merged constants.  Across
single statements, fold already does all of this, plus more.  There
is little point in duplicating logic, so I've only included the
identities that I could ever construct testcases to trigger.   

References ANY_INTEGRAL_TYPE_P, dump_file, dump_flags, FLOAT_TYPE_P, fold_real_zero_addition_p(), ggc_alloc(), HONOR_NANS(), HONOR_SIGNED_ZEROS(), HONOR_SNANS(), integer_all_onesp(), integer_onep(), integer_zerop(), real_onep(), real_zerop(), reassociate_stats, TDF_DETAILS, and TREE_TYPE.

Referenced by optimize_ops_list().

◆ execute_reassoc()

static unsigned int execute_reassoc ( bool insert_powi_p,
bool bias_loop_carried_phi_ranks_p )
static
Gate and execute functions for Reassociation.  If INSERT_POWI_P, enable
insertion of __builtin_powi calls.

Returns TODO_cfg_cleanup if a CFG cleanup pass is desired due to
optimization of a gimple conditional.  Otherwise returns zero.   

References branch_fixup(), do_reassoc(), fini_reassoc(), ggc_alloc(), init_reassoc(), reassoc_bias_loop_carried_phi_ranks_p, reassoc_insert_powi_p, repropagate_negates(), and TODO_cleanup_cfg.

◆ extract_bit_test_mask()

static tree extract_bit_test_mask ( tree exp,
int prec,
tree totallow,
tree low,
tree high,
wide_int * mask,
tree * totallowp )
static
Helper function of optimize_range_tests_to_bit_test.  Handle a single
range, EXP, LOW, HIGH, compute bit mask of bits to test and return
EXP on success, NULL otherwise.   

References compare_tree_int(), exp(), ggc_alloc(), range_entry::high, int_const_binop(), integer_zerop(), range_entry::low, wi::lshift(), wi::ltu_p(), wi::neg(), NULL_TREE, wi::popcount(), wi::sext(), wi::shifted_mask(), STRIP_NOPS, wi::to_widest(), TREE_CODE, tree_int_cst_lt(), tree_int_cst_sgn(), TREE_OPERAND, TREE_OVERFLOW, tree_to_uhwi(), TREE_TYPE, TYPE_PRECISION, wide_int_to_tree(), and wi::zext().

Referenced by optimize_range_tests_to_bit_test().

◆ final_range_test_p()

static bool final_range_test_p ( gimple * stmt)
static
Return true if STMT is a cast like:
<bb N>:
...
_123 = (int) _234;

<bb M>:
# _345 = PHI <_123(N), 1(...), 1(...)>
where _234 has bool type, _123 has single use and
bb N has a single successor M.  This is commonly used in
the last block of a range test.

Also Return true if STMT is tcc_compare like:
<bb N>:
...
_234 = a_2(D) == 2;

<bb M>:
# _345 = PHI <_234(N), 1(...), 1(...)>
_346 = (int) _345;
where _234 has booltype, single use and
bb N has a single successor M.  This is commonly used in
the last block of a range test.   

References EDGE_COMPLEX, flow_bb_inside_loop_p(), ggc_alloc(), gimple_assign_cast_p(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs_code(), gimple_bb(), INTEGRAL_TYPE_P, is_gimple_assign(), loop_containing_stmt(), single_imm_use(), single_succ_edge(), single_succ_p(), SSA_NAME_DEF_STMT, tcc_comparison, TREE_CODE, TREE_CODE_CLASS, and TREE_TYPE.

Referenced by maybe_optimize_range_tests(), and suitable_cond_bb().

◆ find_insert_point()

static gimple * find_insert_point ( gimple * stmt,
tree rhs1,
tree rhs2,
bool & insert_before )
inlinestatic
If definition of RHS1 or RHS2 dominates STMT, return the later of those
two definitions, otherwise return STMT.  Sets INSERT_BEFORE to indicate
whether RHS1 op RHS2 can be inserted before or needs to be inserted
after the returned stmt.   

References ggc_alloc(), reassoc_stmt_dominates_stmt_p(), SSA_NAME_DEF_STMT, and TREE_CODE.

Referenced by insert_stmt_before_use(), and rewrite_expr_tree().

◆ find_operand_rank()

static int64_t find_operand_rank ( tree e)
inlinestatic
Look up the operand rank structure for expression E.   

References ggc_alloc(), and operand_rank.

Referenced by get_rank().

◆ fini_reassoc()

static void fini_reassoc ( void )
static

◆ force_into_ssa_name()

static tree force_into_ssa_name ( gimple_stmt_iterator * gsi,
tree expr,
bool before )
static
Helper function for update_range_test.  Force EXPR into an SSA_NAME,
insert needed statements BEFORE or after GSI.   

References force_gimple_operand_gsi(), g, ggc_alloc(), gimple_assign_lhs(), gimple_build_assign(), GSI_CONTINUE_LINKING, gsi_insert_after(), gsi_insert_before(), GSI_SAME_STMT, make_ssa_name(), NULL_TREE, TREE_CODE, and TREE_TYPE.

Referenced by update_range_test().

◆ get_mult_latency_consider_fma()

static int get_mult_latency_consider_fma ( int ops_num,
int mult_num,
int width )
inlinestatic
Given that the target fully pipelines FMA instructions, return the latency
of MULT_EXPRs that can't be hidden by the FMAs.  WIDTH is the number of
pipes.   

References CEIL, gcc_checking_assert, and ggc_alloc().

Referenced by get_reassociation_width().

◆ get_ops()

static bool get_ops ( tree var,
enum tree_code code,
vec< operand_entry * > * ops,
class loop * loop )
static
If VAR is set by CODE (BIT_{AND,IOR}_EXPR) which is reassociable,
return true and fill in *OPS recursively.   

References get_ops(), ggc_alloc(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_set_visited(), has_single_use(), i, is_reassociable_op(), NULL, operand_entry_pool, SSA_NAME_DEF_STMT, and TREE_CODE.

Referenced by get_ops(), and maybe_optimize_range_tests().

◆ get_rank()

◆ get_reassociation_width()

static int get_reassociation_width ( vec< operand_entry * > * ops,
int mult_num,
tree lhs,
enum tree_code opc,
machine_mode mode )
static
Returns an optimal number of registers to use for computation of
given statements.

LHS is the result ssa name of OPS.  MULT_NUM is number of sub-expressions
that are MULT_EXPRs, when OPS are PLUS_EXPRs or MINUS_EXPRs.   

References FOR_EACH_IMM_USE_FAST, FOR_EACH_VEC_ELT, gcc_checking_assert, get_mult_latency_consider_fma(), get_required_cycles(), ggc_alloc(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_bb(), gimple_phi_arg_edge(), gimple_phi_result(), i, is_gimple_assign(), phi_arg_index_from_use(), SSA_NAME_DEF_STMT, targetm, TREE_CODE, tree_to_poly_int64(), TREE_TYPE, TYPE_SIZE, and USE_STMT.

Referenced by reassociate_bb().

◆ get_required_cycles()

static int get_required_cycles ( int ops_num,
int cpu_width )
static
Find out how many cycles we need to compute statements chain.
OPS_NUM holds number os statements in a chain.  CPU_WIDTH is a
maximum number of independent statements we may execute per cycle.   

References exact_log2(), floor_log2(), and ggc_alloc().

Referenced by get_reassociation_width().

◆ get_single_immediate_use()

static gimple * get_single_immediate_use ( tree lhs)
static
If LHS has a single immediate use that is a GIMPLE_ASSIGN statement, return
it.  Otherwise, return NULL.   

References ggc_alloc(), is_gimple_assign(), NULL, single_imm_use(), and TREE_CODE.

Referenced by repropagate_negates(), and should_break_up_subtract().

◆ get_unary_op()

static tree get_unary_op ( tree name,
enum tree_code opcode )
static
Given NAME, if NAME is defined by a unary operation OPCODE, return the
operand of the negate operation.  Otherwise, return NULL.   

References gimple_assign_rhs1(), gimple_assign_rhs_code(), gimple_nop_conversion_p(), is_gimple_assign(), NULL_TREE, SSA_NAME_DEF_STMT, and TREE_CODE.

Referenced by eliminate_not_pairs(), and eliminate_plus_minus_pair().

◆ gimple_nop_conversion_p()

static bool gimple_nop_conversion_p ( gimple * stmt)
static

◆ init_range_entry()

void init_range_entry ( struct range_entry * r,
tree exp,
gimple * stmt )

◆ init_reassoc()

◆ insert_operand_rank()

static void insert_operand_rank ( tree e,
int64_t rank )
inlinestatic
Insert {E,RANK} into the operand rank hashtable.   

References gcc_assert, and operand_rank.

Referenced by get_rank(), and init_reassoc().

◆ insert_stmt_after()

◆ insert_stmt_before_use()

static void insert_stmt_before_use ( gimple * stmt,
gimple * stmt_to_insert )
static

◆ is_reassociable_op()

static bool is_reassociable_op ( gimple * stmt,
enum tree_code code,
class loop * loop )
static

◆ linearize_expr()

◆ linearize_expr_tree()

◆ make_new_ssa_for_all_defs()

static void make_new_ssa_for_all_defs ( tree * def,
enum tree_code opcode,
tree op,
vec< gimple * > & stmts_to_fix )
static
Replace all SSAs defined in STMTS_TO_FIX and replace its
uses with new SSAs.  Also do this for the stmt that defines DEF
if *DEF is not OP.   

References FOR_EACH_VEC_ELT, ggc_alloc(), i, make_new_ssa_for_def(), SSA_NAME_DEF_STMT, and TREE_CODE.

Referenced by zero_one_operation().

◆ make_new_ssa_for_def()

◆ make_pass_reassoc()

gimple_opt_pass * make_pass_reassoc ( gcc::context * ctxt)

References ggc_alloc().

◆ maybe_optimize_range_tests()

static bool maybe_optimize_range_tests ( gimple * stmt)
static
Inter-bb range test optimization.

Returns TRUE if a gimple conditional is optimized to a true/false,
otherwise return FALSE.

This indicates to the caller that it should run a CFG cleanup pass
once reassociation is completed.   

References boolean_false_node, cfun, EDGE_COUNT, EDGE_SUCC, final_range_test_p(), find_edge(), fold_convert, FOR_EACH_EDGE, FOR_EACH_IMM_USE_ON_STMT, FOR_EACH_IMM_USE_STMT, g, gcc_assert, gcc_checking_assert, gcc_unreachable, get_ops(), ggc_alloc(), gimple_assign_cast_p(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs_code(), gimple_bb(), gimple_build_assign(), gimple_cond_code(), gimple_cond_lhs(), gimple_cond_make_false(), gimple_cond_make_true(), gimple_cond_rhs(), gimple_cond_set_code(), gimple_cond_set_lhs(), gimple_cond_set_rhs(), gimple_phi_arg_def(), gimple_set_uid(), gimple_set_visited(), gimple_uid(), gsi_for_stmt(), gsi_insert_before(), gsi_last_bb(), GSI_SAME_STMT, has_single_use(), i, basic_block_def::index, integer_onep(), integer_zerop(), INTEGRAL_TYPE_P, is_gimple_assign(), is_gimple_debug(), is_gimple_min_invariant(), last_nondebug_stmt(), loop_containing_stmt(), make_ssa_name(), no_side_effect_bb(), NULL, NULL_TREE, operand_entry_pool, optimize_range_tests(), reset_flow_sensitive_info_in_bb(), SET_USE, single_imm_use(), single_pred(), single_pred_p(), single_succ(), stmt_could_throw_p(), basic_block_def::succs, suitable_cond_bb(), tcc_comparison, TREE_CODE, TREE_CODE_CLASS, TREE_TYPE, update_ops(), and update_stmt().

Referenced by reassociate_bb().

◆ negate_value()

static tree negate_value ( tree tonegate,
gimple_stmt_iterator * gsip )
static
Recursively negate the value of TONEGATE, and return the SSA_NAME
representing the negated value.  Insertions of any necessary
instructions go before GSI.
This function is recursive in that, if you hand it "a_5" as the
value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
transform b_3 + b_4 into a_5 = -b_3 + -b_4.   

References fold_build1, force_gimple_operand_gsi(), g, ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_build_assign(), gimple_set_uid(), gimple_set_visited(), gimple_uid(), gsi_end_p(), gsi_for_stmt(), gsi_insert_before(), gsi_prev(), GSI_SAME_STMT, gsi_stmt(), has_single_use(), is_gimple_assign(), make_ssa_name(), negate_value(), NULL, NULL_TREE, SSA_NAME_DEF_STMT, TREE_CODE, and TREE_TYPE.

Referenced by break_up_subtract(), and negate_value().

◆ no_side_effect_bb()

bool no_side_effect_bb ( basic_block bb)
Return true if BB doesn't have side-effects that would disallow
range test optimization, all SSA_NAMEs set in the bb are consumed
in the bb and there are no PHIs.   

References FOR_EACH_IMM_USE_FAST, ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs_could_trap_p(), gimple_bb(), gimple_has_side_effects(), gimple_seq_empty_p(), gsi_end_p(), gsi_next(), gsi_start_bb(), gsi_stmt(), is_gimple_assign(), is_gimple_debug(), last, last_nondebug_stmt(), phi_nodes(), TREE_CODE, and USE_STMT.

Referenced by find_conditions(), and maybe_optimize_range_tests().

◆ oecount_cmp()

static int oecount_cmp ( const void * p1,
const void * p2 )
static
Comparison function for qsort sorting oecount elements by count.   

References ggc_alloc().

Referenced by undistribute_ops_list().

◆ ops_equal_values_p()

static bool ops_equal_values_p ( tree op1,
tree op2 )
static
Return true if OP1 and OP2 have the same value if casted to either type.   

References ggc_alloc(), gimple_assign_rhs1(), gimple_nop_conversion_p(), SSA_NAME_DEF_STMT, and TREE_CODE.

Referenced by eliminate_plus_minus_pair().

◆ optimize_ops_list()

static void optimize_ops_list ( enum tree_code opcode,
vec< operand_entry * > * ops )
static
Perform various identities and other optimizations on the list of
operand entries, stored in OPS.  The tree code for the binary
operation between all the operands is OPCODE.   

References add_to_ops_vec(), dump_file, dump_flags, eliminate_duplicate_pair(), eliminate_not_pairs(), eliminate_plus_minus_pair(), eliminate_redundant_comparison(), eliminate_using_constants(), fold_binary, ggc_alloc(), i, is_gimple_min_invariant(), NULL, optimize_ops_list(), reassociate_stats, TDF_DETAILS, TREE_TYPE, and useless_type_conversion_p().

Referenced by optimize_ops_list(), and reassociate_bb().

◆ optimize_range_tests()

static bool optimize_range_tests ( enum tree_code opcode,
vec< operand_entry * > * ops,
basic_block first_bb )
static
Optimize range tests, similarly how fold_range_test optimizes
it on trees.  The tree code for the binary
operation between all the operands is OPCODE.
If OPCODE is ERROR_MARK, optimize_range_tests is called from within
maybe_optimize_range_tests for inter-bb range optimization.
In that case if oe->op is NULL, oe->id is bb->index whose
GIMPLE_COND is && or ||ed into the test, and oe->rank says
the actual opcode.
FIRST_BB is the first basic block if OPCODE is ERROR_MARK.   

References BASIC_BLOCK_FOR_FN, cfun, error_mark_node, exp(), FOR_EACH_VEC_ELT, ggc_alloc(), range_entry::high, i, range_entry::in_p, init_range_entry(), last_nondebug_stmt(), range_entry::low, lshift_cheap_p(), merge_ranges(), NULL, NULL_TREE, optimize_function_for_speed_p(), optimize_range_tests_1(), optimize_range_tests_cmp_bitwise(), optimize_range_tests_to_bit_test(), optimize_range_tests_var_bound(), qsort, range_entry_cmp(), range_entry::strict_overflow_p, TREE_CODE, and update_range_test().

Referenced by maybe_optimize_range_tests(), and reassociate_bb().

◆ optimize_range_tests_1()

static bool optimize_range_tests_1 ( enum tree_code opcode,
int first,
int length,
bool optimize_xor,
vec< operand_entry * > * ops,
struct range_entry * ranges )
static
It does some common checks for function optimize_range_tests_xor and
optimize_range_tests_diff.
If OPTIMIZE_XOR is TRUE, it calls optimize_range_tests_xor.
Else it calls optimize_range_tests_diff.   

References boolean_type_node, changes, exp(), fold_binary, ggc_alloc(), i, range_entry::in_p, integer_onep(), INTEGRAL_TYPE_P, NULL_TREE, optimize_range_tests_diff(), optimize_range_tests_xor(), TREE_TYPE, type(), TYPE_MAX_VALUE, and TYPE_MIN_VALUE.

Referenced by optimize_range_tests().

◆ optimize_range_tests_cmp_bitwise()

static bool optimize_range_tests_cmp_bitwise ( enum tree_code opcode,
int first,
int length,
vec< operand_entry * > * ops,
struct range_entry * ranges )
static

◆ optimize_range_tests_diff()

static bool optimize_range_tests_diff ( enum tree_code opcode,
tree type,
tree lowi,
tree lowj,
tree highi,
tree highj,
vec< operand_entry * > * ops,
struct range_entry * rangei,
struct range_entry * rangej )
static
Optimize X == CST1 || X == CST2
if popcount (CST2 - CST1) == 1 into
((X - CST1) & ~(CST2 - CST1)) == 0.
Similarly for ranges.  E.g.
X == 43 || X == 76 || X == 44 || X == 78 || X == 77 || X == 46
|| X == 75 || X == 45
will be transformed by the previous optimization into
(X - 43U) <= 3U || (X - 75U) <= 3U
and this loop can transform that into
((X - 43U) & ~(75U - 43U)) <= 3U.   

References build_int_cst(), build_nonstandard_integer_type(), fold_binary, fold_build1, fold_build2, fold_convert, GET_MODE_PRECISION(), ggc_alloc(), integer_pow2p(), wi::max_value(), wi::min_value(), NULL, NULL_TREE, wi::to_wide(), TREE_CODE, tree_int_cst_equal(), TYPE_MAX_VALUE, TYPE_MIN_VALUE, TYPE_MODE, TYPE_PRECISION, TYPE_SIGN, unsigned_type_for(), and update_range_test().

Referenced by optimize_range_tests_1().

◆ optimize_range_tests_to_bit_test()

static bool optimize_range_tests_to_bit_test ( enum tree_code opcode,
int first,
int length,
vec< operand_entry * > * ops,
struct range_entry * ranges )
static
Attempt to optimize small range tests using bit test.
E.g.
X != 43 && X != 76 && X != 44 && X != 78 && X != 49
&& X != 77 && X != 46 && X != 75 && X != 45 && X != 82
has been by earlier optimizations optimized into:
((X - 43U) & ~32U) > 3U && X != 49 && X != 82
As all the 43 through 82 range is less than 64 numbers,
for 64-bit word targets optimize that into:
(X - 43U) > 40U && ((1 << (X - 43U)) & 0x8F0000004FULL) == 0   

References BASIC_BLOCK_FOR_FN, boolean_type_node, build_int_cst(), build_range_check(), build_zero_cst(), candidates, cfun, wi::clz(), compare_tree_int(), end(), exp(), extract_bit_test_mask(), fold_build2_loc(), fold_convert_loc(), force_gimple_operand(), g, gcc_assert, GEN_INT, gen_raw_REG(), GET_MODE_BITSIZE(), get_range_query(), ggc_alloc(), gimple_assign_lhs(), gimple_bb(), gimple_build_assign(), gimple_location(), gimple_seq_add_seq_without_update(), gimple_seq_add_stmt_without_update(), gimple_seq_discard(), gimple_set_location(), gimple_set_visited(), wi::gt_p(), range_entry::high, i, immed_wide_int_const(), range_entry::in_p, integer_type_node, INTEGRAL_TYPE_P, is_gimple_val(), last_nondebug_stmt(), wi::leu_p(), wi::lrshift(), wi::lshift(), wi::lt_p(), wi::ltu_p(), make_ssa_name(), MIN, NULL, NULL_TREE, optimize_bb_for_speed_p(), r, reassoc_branch_fixups, set_src_cost(), SSA_NAME_DEF_STMT, range_entry::strict_overflow_p, STRIP_NOPS, wi::to_wide(), wi::to_widest(), TREE_CODE, TREE_OPERAND, tree_to_uhwi(), TREE_TYPE, type(), lang_hooks_for_types::type_for_mode, TYPE_MAX_VALUE, TYPE_MIN_VALUE, TYPE_SIGN, lang_hooks::types, unshare_expr(), unsigned_type_for(), update_range_test(), wide_int_to_tree(), and word_mode.

Referenced by optimize_range_tests().

◆ optimize_range_tests_var_bound()

static bool optimize_range_tests_var_bound ( enum tree_code opcode,
int first,
int length,
vec< operand_entry * > * ops,
struct range_entry * ranges,
basic_block first_bb )
static

◆ optimize_range_tests_xor()

static bool optimize_range_tests_xor ( enum tree_code opcode,
tree type,
tree lowi,
tree lowj,
tree highi,
tree highj,
vec< operand_entry * > * ops,
struct range_entry * rangei,
struct range_entry * rangej )
static
Optimize X == CST1 || X == CST2
if popcount (CST1 ^ CST2) == 1 into
(X & ~(CST1 ^ CST2)) == (CST1 & ~(CST1 ^ CST2)).
Similarly for ranges.  E.g.
X != 2 && X != 3 && X != 10 && X != 11
will be transformed by the previous optimization into
!((X - 2U) <= 1U || (X - 10U) <= 1U)
and this loop can transform that into
!(((X & ~8) - 2U) <= 1U).   

References build_nonstandard_integer_type(), exp(), fold_binary, fold_build1, fold_build2, fold_convert, GET_MODE_PRECISION(), ggc_alloc(), integer_pow2p(), wi::max_value(), wi::min_value(), NULL, NULL_TREE, wi::to_wide(), TREE_CODE, tree_int_cst_equal(), TYPE_MAX_VALUE, TYPE_MIN_VALUE, TYPE_MODE, TYPE_PRECISION, TYPE_SIGN, TYPE_UNSIGNED, and update_range_test().

Referenced by optimize_range_tests_1().

◆ optimize_vec_cond_expr()

◆ ovce_extract_ops()

static tree_code ovce_extract_ops ( tree var,
gassign ** rets,
bool * reti,
tree * type,
tree * lhs,
tree * rhs,
gassign ** vcond )
static
A subroutine of optimize_vec_cond_expr to extract and canonicalize
the operands of the VEC_COND_EXPR.  Returns ERROR_MARK on failure,
otherwise the comparison code.  TYPE is a return value that is set
to type of comparison.   

References ggc_alloc(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs3(), gimple_assign_rhs_code(), integer_all_onesp(), integer_zerop(), invert_tree_comparison(), NULL, SSA_NAME_DEF_STMT, tcc_comparison, TREE_CODE, TREE_CODE_CLASS, and TREE_TYPE.

Referenced by optimize_vec_cond_expr().

◆ phi_rank()

static int64_t phi_rank ( gimple * stmt)
static
Rank assigned to a phi statement.  If STMT is a loop-carried phi of
an innermost loop, and the phi has only a single use which is inside
the loop, then the rank is the block rank of the loop latch plus an
extra bias for the loop-carried dependence.  This causes expressions
calculated into an accumulator variable to be independent for each
iteration of the loop.  If STMT is some other phi, the rank is the
block rank of its containing block.   

References bb_rank, ggc_alloc(), gimple_bb(), gimple_phi_arg_def(), gimple_phi_num_args(), gimple_phi_result(), loop::header, i, basic_block_def::index, loop::inner, loop::latch, basic_block_def::loop_father, PHI_LOOP_BIAS, reassoc_bias_loop_carried_phi_ranks_p, single_imm_use(), SSA_NAME_DEF_STMT, SSA_NAME_IS_DEFAULT_DEF, TREE_CODE, and virtual_operand_p().

Referenced by get_rank().

◆ propagate_bias_p()

static bool propagate_bias_p ( gimple * stmt)
static
Return TRUE iff PHI_LOOP_BIAS should be propagated from one of the STMT's
operands to the STMT's left-hand side.  The goal is to preserve bias in code
like this:

  x_1 = phi(x_0, x_2)
  a = x_1 | 1
  b = a ^ 2
  .MEM = b
  c = b + d
  x_2 = c + e

That is, we need to preserve bias along single-use chains originating from
loop-carried phis.  Only GIMPLE_ASSIGNs to SSA_NAMEs are considered to be
uses, because only they participate in rank propagation.   

References FOR_EACH_IMM_USE_FAST, ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs_code(), gimple_bb(), is_gimple_assign(), NULL, tcc_reference, TREE_CODE, TREE_CODE_CLASS, and USE_STMT.

Referenced by get_rank().

◆ propagate_op_to_single_use()

static void propagate_op_to_single_use ( tree op,
gimple * stmt,
tree * def )
static
Find the single immediate use of STMT's LHS, and replace it
with OP.  Remove STMT.  If STMT's LHS is the same as *DEF,
replace *DEF with OP as well.   

References gcc_assert, ggc_alloc(), gimple_assign_lhs(), gimple_call_lhs(), gsi_for_stmt(), has_single_use(), is_gimple_call(), reassoc_remove_stmt(), release_defs(), SET_USE, single_imm_use(), TREE_CODE, unlink_stmt_vdef(), and update_stmt().

Referenced by zero_one_operation().

◆ propagate_rank()

static int64_t propagate_rank ( int64_t rank,
tree op,
bool * maybe_biased_p )
static
Return the maximum of RANK and the rank that should be propagated
from expression OP.  For most operands, this is just the rank of OP.
For loop-carried phis, the value is zero to avoid undoing the bias
in favor of the phi.   

References biased_names, bitmap_bit_p, get_rank(), ggc_alloc(), MAX, NULL, SSA_NAME_VERSION, and TREE_CODE.

Referenced by get_rank().

◆ range_entry_cmp()

static int range_entry_cmp ( const void * a,
const void * b )
static
Comparison function for qsort.  Sort entries
without SSA_NAME exp first, then with SSA_NAMEs sorted
by increasing SSA_NAME_VERSION, and for the same SSA_NAMEs
by increasing ->low and if ->low is the same, by increasing
->high.  ->low == NULL_TREE means minimum, ->high == NULL_TREE
maximum.   

References a, b, boolean_type_node, range_entry::exp, fold_binary, gcc_checking_assert, ggc_alloc(), range_entry::high, range_entry::idx, integer_onep(), range_entry::low, NULL_TREE, SSA_NAME_VERSION, and TREE_CODE.

Referenced by optimize_range_tests().

◆ rank_ops_for_fma()

static int rank_ops_for_fma ( vec< operand_entry * > * ops)
static
Rearrange ops may have more FMA when the chain may has more than 2 FMAs.
Put no-mult ops and mult ops alternately at the end of the queue, which is
conducive to generating more FMA and reducing the loss of FMA when breaking
the chain.
E.g.
a * b + c * d + e generates:

_4  = c_9(D) * d_10(D);
_12 = .FMA (a_7(D), b_8(D), _4);
_11 = e_6(D) + _12;

Rearrange ops to -> e + a * b + c * d generates:

_4  = .FMA (c_7(D), d_8(D), _3);
_11 = .FMA (a_5(D), b_6(D), _4);

Return the number of MULT_EXPRs in the chain.   

References FOR_EACH_VEC_ELT, ggc_alloc(), gimple_assign_rhs1(), gimple_assign_rhs_code(), gimple_bb(), i, is_gimple_assign(), SSA_NAME_DEF_STMT, and TREE_CODE.

Referenced by reassociate_bb().

◆ reassoc_remove_stmt()

static bool reassoc_remove_stmt ( gimple_stmt_iterator * gsi)
static

◆ reassoc_stmt_dominates_stmt_p()

static bool reassoc_stmt_dominates_stmt_p ( gimple * s1,
gimple * s2 )
static
Returns true if statement S1 dominates statement S2.  Like
stmt_dominates_stmt_p, but uses stmt UIDs to optimize.   

References CDI_DOMINATORS, dominated_by_p(), gcc_assert, ggc_alloc(), gimple_bb(), gimple_uid(), gsi_end_p(), gsi_for_stmt(), gsi_next(), and gsi_stmt().

Referenced by build_and_add_sum(), find_insert_point(), optimize_range_tests_cmp_bitwise(), and sort_by_operand_rank().

◆ reassociate_bb()

static bool reassociate_bb ( basic_block bb)
static
Reassociate expressions in basic block BB and its post-dominator as
children.

Bubble up return status from maybe_optimize_range_tests.   

References associative_tree_code(), attempt_builtin_copysign(), attempt_builtin_powi(), bb_optimization_type(), can_reassociate_op_p(), can_reassociate_type_p(), CDI_POST_DOMINATORS, cfun, COMPLEX_FLOAT_TYPE_P, direct_internal_fn_supported_p(), dump_file, dump_flags, first_dom_son(), get_gimple_rhs_class(), get_reassociation_width(), ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), GIMPLE_BINARY_RHS, gimple_build_assign(), gimple_get_lhs(), gimple_location(), gimple_set_lhs(), gimple_set_location(), gimple_set_uid(), gimple_set_visited(), gimple_uid(), gimple_visited_p(), gsi_end_p(), gsi_insert_after(), gsi_last_bb(), GSI_NEW_STMT, gsi_prev(), gsi_stmt(), has_zero_uses(), HONOR_SIGNED_ZEROS(), HONOR_SNANS(), insert_stmt_before_use(), integer_minus_onep(), INTEGRAL_TYPE_P, is_gimple_assign(), last, last_nondebug_stmt(), linearize_expr_tree(), loop_containing_stmt(), make_ssa_name(), make_temp_ssa_name(), maybe_optimize_range_tests(), next_dom_son(), NULL, NULL_TREE, optimize_ops_list(), optimize_range_tests(), optimize_vec_cond_expr(), rank_ops_for_fma(), real_minus_onep(), reassoc_insert_powi_p, reassoc_remove_stmt(), reassociate_bb(), release_defs(), rewrite_expr_tree(), rewrite_expr_tree_parallel(), sort_by_operand_rank(), SSA_NAME_DEF_STMT, stmt_could_throw_p(), swap_ops_for_binary_stmt(), TDF_DETAILS, transform_add_to_multiply(), transform_stmt_to_copy(), transform_stmt_to_multiply(), TREE_CODE, TREE_TYPE, TYPE_MODE, undistribute_bitref_for_vector(), undistribute_ops_list(), update_stmt(), and VECTOR_TYPE_P.

Referenced by do_reassoc(), and reassociate_bb().

◆ remove_visited_stmt_chain()

static void remove_visited_stmt_chain ( tree var)
static

◆ repropagate_negates()

◆ rewrite_expr_tree()

static tree rewrite_expr_tree ( gimple * stmt,
enum tree_code rhs_code,
unsigned int opindex,
const vec< operand_entry * > & ops,
bool changed,
bool next_changed )
static
Recursively rewrite our linearized statements so that the operators
match those in OPS[OPINDEX], putting the computation in rank
order.  Return new lhs.
CHANGED is true if we shouldn't reuse the lhs SSA_NAME both in
the current stmt and during recursive invocations.
NEXT_CHANGED is true if we shouldn't reuse the lhs SSA_NAME in
recursive invocations.   

References changed, dump_file, dump_flags, find_insert_point(), gcc_assert, gcc_checking_assert, ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_set_rhs1(), gimple_assign_set_rhs2(), gimple_build_assign(), gimple_set_uid(), gimple_set_visited(), gimple_uid(), gsi_for_stmt(), gsi_insert_before(), GSI_SAME_STMT, insert_stmt_after(), insert_stmt_before_use(), make_ssa_name(), print_gimple_stmt(), remove_visited_stmt_chain(), rewrite_expr_tree(), SSA_NAME_DEF_STMT, TDF_DETAILS, TREE_TYPE, and update_stmt().

Referenced by reassociate_bb(), and rewrite_expr_tree().

◆ rewrite_expr_tree_parallel()

static void rewrite_expr_tree_parallel ( gassign * stmt,
int width,
bool has_fma,
const vec< operand_entry * > & ops )
static
Rewrite statements with dependency chain with regard the chance to generate
 FMA.
 For the chain with FMA: Try to keep fma opportunity as much as possible.
 For the chain without FMA: Putting the computation in rank order and trying
 to allow operations to be executed in parallel.
 E.g.
 e + f + a * b + c * d;

 ssa1 = e + a * b;
 ssa2 = f + c * d;
 ssa3 = ssa1 + ssa2;

 This reassociation approach preserves the chance of fma generation as much
 as possible.

 Another thing is to avoid adding loop-carried ops to long chains, otherwise
 the whole chain will have dependencies across the loop iteration.  Just keep
 loop-carried ops in a separate chain.
 E.g.
 x_1 = phi (x_0, x_2)
 y_1 = phi (y_0, y_2)

 a + b + c + d + e + x1 + y1

 SSA1 = a + b;
 SSA2 = c + d;
 SSA3 = SSA1 + e;
 SSA4 = SSA3 + SSA2;
 SSA5 = x1 + y1;
 SSA6 = SSA4 + SSA5;

References BIASED_END_STMT, biased_names, bitmap_bit_p, build_and_add_sum(), dump_file, dump_flags, FOR_EACH_VEC_ELT, gcc_assert, ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs_code(), gimple_assign_set_rhs1(), gimple_assign_set_rhs2(), gimple_set_visited(), i, insert_stmt_before_use(), NORMAL_END_STMT, NULL, print_gimple_stmt(), remove_visited_stmt_chain(), SPECIAL_BIASED_END_STMT, SSA_NAME_DEF_STMT, SSA_NAME_VERSION, swap_ops_for_binary_stmt(), TDF_DETAILS, TREE_CODE, TREE_TYPE, and update_stmt().

Referenced by reassociate_bb().

◆ should_break_up_subtract()

static bool should_break_up_subtract ( gimple * stmt)
static
Return true if we should break up the subtract in STMT into an add
with negate.  This is true when we the subtract operands are really
adds, or the subtract itself is used in an add expression.  In
either case, breaking up the subtract into an add with negate
exposes the adds to reassociation.   

References get_single_immediate_use(), ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), is_gimple_assign(), is_reassociable_op(), loop_containing_stmt(), SSA_NAME_DEF_STMT, and TREE_CODE.

Referenced by break_up_subtract_bb().

◆ sort_by_mach_mode()

static int sort_by_mach_mode ( const void * p_i,
const void * p_j )
static
Comparison function for qsort on VECTOR SSA_NAME trees by machine mode.   

References ggc_alloc(), SSA_NAME_VERSION, TREE_TYPE, and TYPE_MODE.

Referenced by undistribute_bitref_for_vector().

◆ sort_by_operand_rank()

static int sort_by_operand_rank ( const void * pa,
const void * pb )
static
qsort comparison function to sort operand entries PA and PB by rank
so that the sorted array is ordered by rank in decreasing order.   

References bb_rank, constant_type(), ggc_alloc(), gimple_bb(), reassoc_stmt_dominates_stmt_p(), SSA_NAME_DEF_STMT, SSA_NAME_VERSION, and TREE_CODE.

Referenced by attempt_builtin_powi(), and reassociate_bb().

◆ stmt_is_power_of_op()

static bool stmt_is_power_of_op ( gimple * stmt,
tree op )
static
Return TRUE iff STMT represents a builtin call that raises OP
to some exponent.   

References ggc_alloc(), gimple_call_arg(), gimple_call_combined_fn(), is_gimple_call(), and operand_equal_p().

Referenced by zero_one_operation().

◆ suitable_cond_bb()

static bool suitable_cond_bb ( basic_block bb,
basic_block test_bb,
basic_block * other_bb,
bool * test_swapped_p,
bool backward )
static
Return true if BB is suitable basic block for inter-bb range test
optimization.  If BACKWARD is true, BB should be the only predecessor
of TEST_BB, and *OTHER_BB is either NULL and filled by the routine,
or compared with to find a common basic block to which all conditions
branch to if true resp. false.  If BACKWARD is false, TEST_BB should
be the only predecessor of BB.  *TEST_SWAPPED_P is set to true if
TEST_BB is a bb ending in condition where the edge to non-*OTHER_BB
block points to an empty block that falls through into *OTHER_BB and
the phi args match that path.   

References cfun, EDGE_COUNT, EDGE_SUCC, empty_block_p(), final_range_test_p(), find_edge(), FOR_EACH_EDGE, ggc_alloc(), gimple_assign_lhs(), gimple_phi_arg_def(), gimple_visited_p(), gsi_end_p(), gsi_next(), gsi_start_phis(), integer_onep(), integer_zerop(), last_nondebug_stmt(), NULL, operand_equal_p(), gphi_iterator::phi(), single_pred_p(), single_succ(), single_succ_edge(), single_succ_p(), stmt_could_throw_p(), and basic_block_def::succs.

Referenced by maybe_optimize_range_tests().

◆ swap_ops_for_binary_stmt()

static void swap_ops_for_binary_stmt ( const vec< operand_entry * > & ops,
unsigned int opindex )
static
This function checks three consequtive operands in
passed operands vector OPS starting from OPINDEX and
swaps two operands if it is profitable for binary operation
consuming OPINDEX + 1 abnd OPINDEX + 2 operands.

We pair ops with the same rank if possible.   

References ggc_alloc().

Referenced by reassociate_bb(), and rewrite_expr_tree_parallel().

◆ transform_add_to_multiply()

static bool transform_add_to_multiply ( vec< operand_entry * > * ops)
static

◆ transform_stmt_to_copy()

static void transform_stmt_to_copy ( gimple_stmt_iterator * gsi,
gimple * stmt,
tree new_rhs )
static
Transform STMT at *GSI into a copy by replacing its rhs with NEW_RHS.   

References dump_file, dump_flags, ggc_alloc(), gimple_assign_rhs1(), gimple_assign_set_rhs_from_tree(), print_gimple_stmt(), remove_visited_stmt_chain(), TDF_DETAILS, and update_stmt().

Referenced by reassociate_bb().

◆ transform_stmt_to_multiply()

static void transform_stmt_to_multiply ( gimple_stmt_iterator * gsi,
gimple * stmt,
tree rhs1,
tree rhs2 )
static
Transform STMT at *GSI into a multiply of RHS1 and RHS2.   

References dump_file, dump_flags, ggc_alloc(), gimple_assign_set_rhs_with_ops(), gsi_stmt(), print_gimple_stmt(), remove_visited_stmt_chain(), TDF_DETAILS, and update_stmt().

Referenced by reassociate_bb().

◆ try_special_add_to_ops()

◆ undistribute_bitref_for_vector()

static bool undistribute_bitref_for_vector ( enum tree_code opcode,
vec< operand_entry * > * ops,
struct loop * loop )
static
Perform un-distribution of BIT_FIELD_REF on VECTOR_TYPE.
    V1[0] + V1[1] + ... + V1[k] + V2[0] + V2[1] + ... + V2[k] + ... Vn[k]
  is transformed to
    Vs = (V1 + V2 + ... + Vn)
    Vs[0] + Vs[1] + ... + Vs[k]

  The basic steps are listed below:

   1) Check the addition chain *OPS by looking those summands coming from
      VECTOR bit_field_ref on VECTOR type.  Put the information into
      v_info_map for each satisfied summand, using VECTOR SSA_NAME as key.

   2) For each key (VECTOR SSA_NAME), validate all its BIT_FIELD_REFs are
      continuous, they can cover the whole VECTOR perfectly without any holes.
      Obtain one VECTOR list which contain candidates to be transformed.

   3) Sort the VECTOR list by machine mode of VECTOR type, for each group of
      candidates with same mode, build the addition statements for them and
      generate BIT_FIELD_REFs accordingly.

  TODO:
      The current implementation requires the whole VECTORs should be fully
      covered, but it can be extended to support partial, checking adjacent
      but not fill the whole, it may need some cost model to define the
      boundary to do or not.

References bit_field_offset(), bit_field_size(), bitmap_bit_p, bitmap_clear_bit(), bitmap_empty_p(), bitmap_ones(), bitsize_int, build1(), build3(), build_all_ones_cst(), build_and_add_sum(), build_one_cst(), build_vector_type_for_mode(), build_zero_cst(), cand_vec, cleanup_vinfo_map(), dump_file, dump_flags, FOR_EACH_VEC_ELT, g, gcc_assert, GET_MODE_BITSIZE(), GET_MODE_SIZE(), get_rank(), ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs_code(), gimple_assign_set_rhs1(), gimple_assign_set_rhs2(), gimple_build_assign(), gimple_get_lhs(), gimple_set_uid(), gimple_set_visited(), gimple_uid(), gsi_for_stmt(), gsi_insert_before(), GSI_NEW_STMT, holes, i, insert_stmt_after(), is_gimple_assign(), is_reassociable_op(), make_ssa_name(), mode_for_vector(), NULL, optab_for_tree_code(), optab_handler(), optab_vector, print_gimple_stmt(), sbitmap_alloc(), sbitmap_free(), SCALAR_TYPE_MODE, sort_by_mach_mode(), SSA_NAME_DEF_STMT, TDF_DETAILS, poly_int< N, C >::to_constant(), TREE_CODE, TREE_OPERAND, tree_to_uhwi(), TREE_TYPE, TYPE_MODE, TYPE_SIZE, TYPE_VECTOR_SUBPARTS(), types_compatible_p(), update_stmt(), useless_type_conversion_p(), v_info::vec, v_info::vec_type, VECTOR_MODE_P, and VECTOR_TYPE_P.

Referenced by reassociate_bb().

◆ undistribute_ops_list()

static bool undistribute_ops_list ( enum tree_code opcode,
vec< operand_entry * > * ops,
class loop * loop )
static
Perform un-distribution of divisions and multiplications.
 A * X + B * X is transformed into (A + B) * X and A / X + B / X
 to (A + B) / X for real X.

 The algorithm is organized as follows.

  - First we walk the addition chain *OPS looking for summands that
    are defined by a multiplication or a real division.  This results
    in the candidates bitmap with relevant indices into *OPS.

  - Second we build the chains of multiplications or divisions for
    these candidates, counting the number of occurrences of (operand, code)
    pairs in all of the candidates chains.

  - Third we sort the (operand, code) pairs by number of occurrence and
    process them starting with the pair with the most uses.

    * For each such pair we walk the candidates again to build a
      second candidate bitmap noting all multiplication/division chains
      that have at least one occurrence of (operand, code).

    * We build an alternate addition chain only covering these
      candidates with one (operand, code) operation removed from their
      multiplication/division chain.

    * The first candidate gets replaced by the alternate addition chain
      multiplied/divided by the operand.

    * All candidate chains get disabled for further processing and
      processing of (operand, code) pairs continues.

The alternate addition chains built are re-processed by the main
reassociation algorithm which allows optimizing a * x * y + b * y * x
to (a + b ) * x * y in one invocation of the reassociation pass.   

References associative_tree_code(), bitmap_clear(), bitmap_first_set_bit(), bitmap_set_bit, build_and_add_sum(), build_zero_cst(), candidates, changed, oecount::cnt, cvec, dump_file, dump_flags, EXECUTE_IF_SET_IN_BITMAP, FOR_EACH_VEC_ELT, free(), get_rank(), ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs_code(), gimple_get_lhs(), i, oecount::id, is_gimple_assign(), is_reassociable_op(), linearize_expr_tree(), oecount::oecode, oecount_cmp(), oecount::op, print_generic_expr(), SSA_NAME_DEF_STMT, TDF_DETAILS, TDF_NONE, TREE_CODE, TREE_TYPE, and zero_one_operation().

Referenced by reassociate_bb().

◆ update_ops()

static tree update_ops ( tree var,
enum tree_code code,
const vec< operand_entry * > & ops,
unsigned int * pidx,
class loop * loop )
static

◆ update_range_test()

static bool update_range_test ( struct range_entry * range,
struct range_entry * otherrange,
struct range_entry ** otherrangep,
unsigned int count,
enum tree_code opcode,
vec< operand_entry * > * ops,
tree exp,
gimple_seq seq,
bool in_p,
tree low,
tree high,
bool strict_overflow_p )
static
Helper routine of optimize_range_test.
[EXP, IN_P, LOW, HIGH, STRICT_OVERFLOW_P] is a merged range for
RANGE and OTHERRANGE through OTHERRANGE + COUNT - 1 ranges,
OPCODE and OPS are arguments of optimize_range_tests.  If OTHERRANGE
is NULL, OTHERRANGEP should not be and then OTHERRANGEP points to
an array of COUNT pointers to other ranges.  Return
true if the range merge has been successful.
If OPCODE is ERROR_MARK, this is called from within
maybe_optimize_range_tests and is performing inter-bb range optimization.
In that case, whether an op is BIT_AND_EXPR or BIT_IOR_EXPR is found in
oe->rank.   

References arith_code_with_undefined_signed_overflow(), BASIC_BLOCK_FOR_FN, boolean_false_node, boolean_true_node, boolean_type_node, build_int_cst(), build_range_check(), CDI_DOMINATORS, cfun, count, dominated_by_p(), dump_file, dump_flags, dump_range_entry(), error_mark_node, exp(), range_entry::exp, fold_convert_loc(), force_into_ssa_name(), gcc_checking_assert, ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs_code(), gimple_bb(), gimple_location(), gimple_set_uid(), gimple_uid(), gsi_after_labels(), GSI_CONTINUE_LINKING, gsi_end_p(), gsi_for_stmt(), gsi_insert_seq_after(), gsi_insert_seq_before(), gsi_last_bb(), gsi_next(), gsi_none(), gsi_prev(), GSI_SAME_STMT, gsi_start_bb(), gsi_stmt(), range_entry::high, i, range_entry::idx, range_entry::in_p, integer_onep(), INTEGRAL_TYPE_P, invert_truthvalue_loc(), is_gimple_assign(), issue_strict_overflow_warning, last_nondebug_stmt(), range_entry::low, NULL, NULL_TREE, POINTER_TYPE_P, print_generic_expr(), r, rewrite_to_defined_overflow(), single_pred(), SSA_NAME_DEF_STMT, SSA_NAME_IS_DEFAULT_DEF, range_entry::strict_overflow_p, TDF_DETAILS, TREE_CODE, TREE_OPERAND, TREE_TYPE, TYPE_OVERFLOW_UNDEFINED, TYPE_PRECISION, TYPE_UNSIGNED, unshare_expr(), WARN_STRICT_OVERFLOW_COMPARISON, and warning_at().

Referenced by optimize_range_tests(), optimize_range_tests_cmp_bitwise(), optimize_range_tests_diff(), optimize_range_tests_to_bit_test(), and optimize_range_tests_xor().

◆ zero_one_operation()

static void zero_one_operation ( tree * def,
enum tree_code opcode,
tree op )
static
Walks the linear chain with result *DEF searching for an operation
with operand OP and code OPCODE removing that from the chain.  *DEF
is updated if there is only one operand but no operation left.   

References build_minus_one_cst(), decrement_power(), gcc_assert, ggc_alloc(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_assign_set_rhs_code(), has_single_use(), integer_minus_onep(), is_gimple_assign(), make_new_ssa_for_all_defs(), propagate_op_to_single_use(), real_minus_onep(), SSA_NAME_DEF_STMT, stmt_is_power_of_op(), TREE_CODE, and TREE_TYPE.

Referenced by undistribute_ops_list().

Variable Documentation

◆ bb_rank

int64_t* bb_rank
static
Starting rank number for a given basic block, so that we can rank
operations using unmovable instructions in that BB based on the bb
depth.   

Referenced by fini_reassoc(), get_rank(), init_reassoc(), phi_rank(), and sort_by_operand_rank().

◆ biased_names

auto_bitmap biased_names
static
SSA_NAMEs that are forms of loop accumulators and whose ranks need to be
biased.   

Referenced by fini_reassoc(), get_rank(), propagate_rank(), and rewrite_expr_tree_parallel().

◆ constants_eliminated

int constants_eliminated

◆ cvec

vec<oecount> cvec
static
The heap for the oecount hashtable and the sorted list of operands.   

Referenced by oecount_hasher::equal(), oecount_hasher::hash(), and undistribute_ops_list().

◆ linearized

int linearized

◆ next_operand_entry_id

unsigned int next_operand_entry_id
static
This is used to assign a unique ID to each struct operand_entry
so that qsort results are identical on different hosts.   

Referenced by add_repeat_to_ops_vec(), add_to_ops_vec(), and init_reassoc().

◆ operand_entry_pool

object_allocator< operand_entry > operand_entry_pool("operand entry pool") ( "operand entry pool" )
static

◆ operand_rank

hash_map<tree, int64_t>* operand_rank
static
Operand->rank hashtable.   

Referenced by find_operand_rank(), fini_reassoc(), init_reassoc(), and insert_operand_rank().

◆ ops_eliminated

int ops_eliminated

◆ plus_negates

◆ pows_created

int pows_created

◆ pows_encountered

int pows_encountered

◆ reassoc_bias_loop_carried_phi_ranks_p

bool reassoc_bias_loop_carried_phi_ranks_p
static
Enable biasing ranks of loop accumulators.  We don't want this before
vectorization, since it interferes with reduction chains.   

Referenced by execute_reassoc(), and phi_rank().

◆ reassoc_branch_fixups

vec<tree> reassoc_branch_fixups
static
Vector of SSA_NAMEs on which after reassociate_bb is done with
all basic blocks the CFG should be adjusted - basic blocks
split right after that SSA_NAME's definition statement and before
the only use, which must be a bit ior.   

Referenced by branch_fixup(), and optimize_range_tests_to_bit_test().

◆ reassoc_insert_powi_p

bool reassoc_insert_powi_p
static
Reassociation for trees.
   Copyright (C) 2005-2024 Free Software Foundation, Inc.
   Contributed by Daniel Berlin <dan@dberlin.org>

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option)
any later version.

GCC is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.   
This is a simple global reassociation pass.  It is, in part, based
on the LLVM pass of the same name (They do some things more/less
than we do, in different orders, etc).

It consists of five steps:

1. Breaking up subtract operations into addition + negate, where
it would promote the reassociation of adds.

2. Left linearization of the expression trees, so that (A+B)+(C+D)
becomes (((A+B)+C)+D), which is easier for us to rewrite later.
During linearization, we place the operands of the binary
expressions into a vector of operand_entry_*

3. Optimization of the operand lists, eliminating things like a +
-a, a & a, etc.

3a. Combine repeated factors with the same occurrence counts
into a __builtin_powi call that will later be optimized into
an optimal number of multiplies.

4. Rewrite the expression trees we linearized and optimized so
they are in proper rank order.

5. Repropagate negates, as nothing else will clean it up ATM.

A bit of theory on #4, since nobody seems to write anything down
about why it makes sense to do it the way they do it:

We could do this much nicer theoretically, but don't (for reasons
explained after how to do it theoretically nice :P).

In order to promote the most redundancy elimination, you want
binary expressions whose operands are the same rank (or
preferably, the same value) exposed to the redundancy eliminator,
for possible elimination.

So the way to do this if we really cared, is to build the new op
tree from the leaves to the roots, merging as you go, and putting the
new op on the end of the worklist, until you are left with one
thing on the worklist.

IE if you have to rewrite the following set of operands (listed with
rank in parentheses), with opcode PLUS_EXPR:

a (1),  b (1),  c (1),  d (2), e (2)


We start with our merge worklist empty, and the ops list with all of
those on it.

You want to first merge all leaves of the same rank, as much as
possible.

So first build a binary op of

mergetmp = a + b, and put "mergetmp" on the merge worklist.

Because there is no three operand form of PLUS_EXPR, c is not going to
be exposed to redundancy elimination as a rank 1 operand.

So you might as well throw it on the merge worklist (you could also
consider it to now be a rank two operand, and merge it with d and e,
but in this case, you then have evicted e from a binary op. So at
least in this situation, you can't win.)

Then build a binary op of d + e
mergetmp2 = d + e

and put mergetmp2 on the merge worklist.

so merge worklist = {mergetmp, c, mergetmp2}

Continue building binary ops of these operations until you have only
one operation left on the worklist.

So we have

build binary op
mergetmp3 = mergetmp + c

worklist = {mergetmp2, mergetmp3}

mergetmp4 = mergetmp2 + mergetmp3

worklist = {mergetmp4}

because we have one operation left, we can now just set the original
statement equal to the result of that operation.

This will at least expose a + b  and d + e to redundancy elimination
as binary operations.

For extra points, you can reuse the old statements to build the
mergetmps, since you shouldn't run out.

So why don't we do this?

Because it's expensive, and rarely will help.  Most trees we are
reassociating have 3 or less ops.  If they have 2 ops, they already
will be written into a nice single binary op.  If you have 3 ops, a
single simple check suffices to tell you whether the first two are of the
same rank.  If so, you know to order it

mergetmp = op1 + op2
newstmt = mergetmp + op3

instead of
mergetmp = op2 + op3
newstmt = mergetmp + op1

If all three are of the same rank, you can't expose them all in a
single binary operator anyway, so the above is *still* the best you
can do.

Thus, this is what we do.  When we have three ops left, we check to see
what order to put them in, and call it a day.  As a nod to vector sum
reduction, we check if any of the ops are really a phi node that is a
destructive update for the associating op, and keep the destructive
update together for vector sum reduction recognition.   
Enable insertion of __builtin_powi calls during execute_reassoc.  See
point 3a in the pass header comment.   

Referenced by execute_reassoc(), optimize_range_tests_cmp_bitwise(), reassociate_bb(), and try_special_add_to_ops().

◆ [struct]

◆ repeat_factor_vec

vec<repeat_factor> repeat_factor_vec
static

Referenced by attempt_builtin_powi().

◆ rewritten

int rewritten