GCC Middle and Back End API Reference
tree-vect-stmts.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "target.h"
#include "rtl.h"
#include "tree.h"
#include "gimple.h"
#include "ssa.h"
#include "optabs-tree.h"
#include "insn-config.h"
#include "recog.h"
#include "cgraph.h"
#include "dumpfile.h"
#include "alias.h"
#include "fold-const.h"
#include "stor-layout.h"
#include "tree-eh.h"
#include "gimplify.h"
#include "gimple-iterator.h"
#include "gimplify-me.h"
#include "tree-cfg.h"
#include "tree-ssa-loop-manip.h"
#include "cfgloop.h"
#include "explow.h"
#include "tree-ssa-loop.h"
#include "tree-scalar-evolution.h"
#include "tree-vectorizer.h"
#include "builtins.h"
#include "internal-fn.h"
#include "tree-vector-builder.h"
#include "vec-perm-indices.h"
#include "gimple-range.h"
#include "tree-ssa-loop-niter.h"
#include "gimple-fold.h"
#include "regs.h"
#include "attribs.h"
#include "optabs-libfuncs.h"
#include "langhooks.h"
Include dependency graph for tree-vect-stmts.cc:

Data Structures

struct  simd_call_arg_info
 

Enumerations

enum  scan_store_kind { scan_store_kind_perm , scan_store_kind_lshift_zero , scan_store_kind_lshift_cond }
 

Functions

tree stmt_vectype (class _stmt_vec_info *stmt_info)
 
bool stmt_in_inner_loop_p (vec_info *vinfo, class _stmt_vec_info *stmt_info)
 
static unsigned record_stmt_cost (stmt_vector_for_cost *body_cost_vec, int count, enum vect_cost_for_stmt kind, stmt_vec_info stmt_info, slp_tree node, tree vectype, int misalign, enum vect_cost_model_location where)
 
unsigned record_stmt_cost (stmt_vector_for_cost *body_cost_vec, int count, enum vect_cost_for_stmt kind, stmt_vec_info stmt_info, tree vectype, int misalign, enum vect_cost_model_location where)
 
unsigned record_stmt_cost (stmt_vector_for_cost *body_cost_vec, int count, enum vect_cost_for_stmt kind, slp_tree node, tree vectype, int misalign, enum vect_cost_model_location where)
 
unsigned record_stmt_cost (stmt_vector_for_cost *body_cost_vec, int count, enum vect_cost_for_stmt kind, enum vect_cost_model_location where)
 
static tree create_vector_array (tree elem_type, unsigned HOST_WIDE_INT nelems)
 
static tree read_vector_array (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, tree scalar_dest, tree array, unsigned HOST_WIDE_INT n)
 
static void write_vector_array (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, tree vect, tree array, unsigned HOST_WIDE_INT n)
 
static tree create_array_ref (tree type, tree ptr, tree alias_ptr_type)
 
static void vect_clobber_variable (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, tree var)
 
static void vect_mark_relevant (vec< stmt_vec_info > *worklist, stmt_vec_info stmt_info, enum vect_relevant relevant, bool live_p)
 
bool is_simple_and_all_uses_invariant (stmt_vec_info stmt_info, loop_vec_info loop_vinfo)
 
static bool vect_stmt_relevant_p (stmt_vec_info stmt_info, loop_vec_info loop_vinfo, enum vect_relevant *relevant, bool *live_p)
 
static bool exist_non_indexing_operands_for_use_p (tree use, stmt_vec_info stmt_info)
 
static opt_result process_use (stmt_vec_info stmt_vinfo, tree use, loop_vec_info loop_vinfo, enum vect_relevant relevant, vec< stmt_vec_info > *worklist, bool force)
 
opt_result vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo, bool *fatal)
 
static void vect_model_simple_cost (vec_info *, stmt_vec_info stmt_info, int ncopies, enum vect_def_type *dt, int ndts, slp_tree node, stmt_vector_for_cost *cost_vec, vect_cost_for_stmt kind=vector_stmt)
 
static void vect_model_promotion_demotion_cost (stmt_vec_info stmt_info, enum vect_def_type *dt, unsigned int ncopies, int pwr, stmt_vector_for_cost *cost_vec, bool widen_arith)
 
static bool cfun_returns (tree decl)
 
void vect_get_store_cost (vec_info *, stmt_vec_info stmt_info, int ncopies, dr_alignment_support alignment_support_scheme, int misalignment, unsigned int *inside_cost, stmt_vector_for_cost *body_cost_vec)
 
void vect_get_load_cost (vec_info *, stmt_vec_info stmt_info, int ncopies, dr_alignment_support alignment_support_scheme, int misalignment, bool add_realign_cost, unsigned int *inside_cost, unsigned int *prologue_cost, stmt_vector_for_cost *prologue_cost_vec, stmt_vector_for_cost *body_cost_vec, bool record_prologue_costs)
 
static void vect_init_vector_1 (vec_info *vinfo, stmt_vec_info stmt_vinfo, gimple *new_stmt, gimple_stmt_iterator *gsi)
 
tree vect_init_vector (vec_info *vinfo, stmt_vec_info stmt_info, tree val, tree type, gimple_stmt_iterator *gsi)
 
void vect_get_vec_defs_for_operand (vec_info *vinfo, stmt_vec_info stmt_vinfo, unsigned ncopies, tree op, vec< tree > *vec_oprnds, tree vectype)
 
void vect_get_vec_defs (vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node, unsigned ncopies, tree op0, tree vectype0, vec< tree > *vec_oprnds0, tree op1, tree vectype1, vec< tree > *vec_oprnds1, tree op2, tree vectype2, vec< tree > *vec_oprnds2, tree op3, tree vectype3, vec< tree > *vec_oprnds3)
 
void vect_get_vec_defs (vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node, unsigned ncopies, tree op0, vec< tree > *vec_oprnds0, tree op1, vec< tree > *vec_oprnds1, tree op2, vec< tree > *vec_oprnds2, tree op3, vec< tree > *vec_oprnds3)
 
static void vect_finish_stmt_generation_1 (vec_info *, stmt_vec_info stmt_info, gimple *vec_stmt)
 
void vect_finish_replace_stmt (vec_info *vinfo, stmt_vec_info stmt_info, gimple *vec_stmt)
 
void vect_finish_stmt_generation (vec_info *vinfo, stmt_vec_info stmt_info, gimple *vec_stmt, gimple_stmt_iterator *gsi)
 
static internal_fn vectorizable_internal_function (combined_fn cfn, tree fndecl, tree vectype_out, tree vectype_in)
 
static tree permute_vec_elements (vec_info *, tree, tree, tree, stmt_vec_info, gimple_stmt_iterator *)
 
static void check_load_store_for_partial_vectors (loop_vec_info loop_vinfo, tree vectype, slp_tree slp_node, vec_load_store_type vls_type, int group_size, vect_memory_access_type memory_access_type, gather_scatter_info *gs_info, tree scalar_mask)
 
static tree prepare_vec_mask (loop_vec_info loop_vinfo, tree mask_type, tree loop_mask, tree vec_mask, gimple_stmt_iterator *gsi)
 
static bool vect_truncate_gather_scatter_offset (stmt_vec_info stmt_info, loop_vec_info loop_vinfo, bool masked_p, gather_scatter_info *gs_info)
 
static bool vect_use_strided_gather_scatters_p (stmt_vec_info stmt_info, loop_vec_info loop_vinfo, bool masked_p, gather_scatter_info *gs_info)
 
static int compare_step_with_zero (vec_info *vinfo, stmt_vec_info stmt_info)
 
tree perm_mask_for_reverse (tree vectype)
 
static vect_memory_access_type get_negative_load_store_type (vec_info *vinfo, stmt_vec_info stmt_info, tree vectype, vec_load_store_type vls_type, unsigned int ncopies, poly_int64 *poffset)
 
tree vect_get_store_rhs (stmt_vec_info stmt_info)
 
static tree vector_vector_composition_type (tree vtype, poly_uint64 nelts, tree *ptype)
 
static bool get_group_load_store_type (vec_info *vinfo, stmt_vec_info stmt_info, tree vectype, slp_tree slp_node, bool masked_p, vec_load_store_type vls_type, vect_memory_access_type *memory_access_type, poly_int64 *poffset, dr_alignment_support *alignment_support_scheme, int *misalignment, gather_scatter_info *gs_info, internal_fn *lanes_ifn)
 
static bool get_load_store_type (vec_info *vinfo, stmt_vec_info stmt_info, tree vectype, slp_tree slp_node, bool masked_p, vec_load_store_type vls_type, unsigned int ncopies, vect_memory_access_type *memory_access_type, poly_int64 *poffset, dr_alignment_support *alignment_support_scheme, int *misalignment, gather_scatter_info *gs_info, internal_fn *lanes_ifn)
 
static bool vect_check_scalar_mask (vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node, unsigned mask_index, tree *mask, slp_tree *mask_node, vect_def_type *mask_dt_out, tree *mask_vectype_out)
 
static bool vect_check_store_rhs (vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node, tree *rhs, slp_tree *rhs_node, vect_def_type *rhs_dt_out, tree *rhs_vectype_out, vec_load_store_type *vls_type_out)
 
static tree vect_build_all_ones_mask (vec_info *vinfo, stmt_vec_info stmt_info, tree masktype)
 
static tree vect_build_zero_merge_argument (vec_info *vinfo, stmt_vec_info stmt_info, tree vectype)
 
static gimplevect_build_one_gather_load_call (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gather_scatter_info *gs_info, tree ptr, tree offset, tree mask)
 
static gimplevect_build_one_scatter_store_call (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gather_scatter_info *gs_info, tree ptr, tree offset, tree oprnd, tree mask)
 
static void vect_get_gather_scatter_ops (loop_vec_info loop_vinfo, class loop *loop, stmt_vec_info stmt_info, slp_tree slp_node, gather_scatter_info *gs_info, tree *dataref_ptr, vec< tree > *vec_offset)
 
static void vect_get_strided_load_store_ops (stmt_vec_info stmt_info, loop_vec_info loop_vinfo, gimple_stmt_iterator *gsi, gather_scatter_info *gs_info, tree *dataref_bump, tree *vec_offset, vec_loop_lens *loop_lens)
 
static tree vect_get_loop_variant_data_ptr_increment (vec_info *vinfo, tree aggr_type, gimple_stmt_iterator *gsi, vec_loop_lens *loop_lens, dr_vec_info *dr_info, vect_memory_access_type memory_access_type)
 
static tree vect_get_data_ptr_increment (vec_info *vinfo, gimple_stmt_iterator *gsi, dr_vec_info *dr_info, tree aggr_type, vect_memory_access_type memory_access_type, vec_loop_lens *loop_lens=nullptr)
 
static bool vectorizable_bswap (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, slp_tree *slp_op, tree vectype_in, stmt_vector_for_cost *cost_vec)
 
static bool simple_integer_narrowing (tree vectype_out, tree vectype_in, code_helper *convert_code)
 
static bool vectorizable_call (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *cost_vec)
 
static void vect_simd_lane_linear (tree op, class loop *loop, struct simd_call_arg_info *arginfo)
 
static bool vectorizable_simd_clone_call (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *)
 
static gimplevect_gen_widened_results_half (vec_info *vinfo, code_helper ch, tree vec_oprnd0, tree vec_oprnd1, int op_type, tree vec_dest, gimple_stmt_iterator *gsi, stmt_vec_info stmt_info)
 
static void vect_create_vectorized_demotion_stmts (vec_info *vinfo, vec< tree > *vec_oprnds, int multi_step_cvt, stmt_vec_info stmt_info, vec< tree > &vec_dsts, gimple_stmt_iterator *gsi, slp_tree slp_node, code_helper code, bool narrow_src_p)
 
static void vect_create_vectorized_promotion_stmts (vec_info *vinfo, vec< tree > *vec_oprnds0, vec< tree > *vec_oprnds1, stmt_vec_info stmt_info, tree vec_dest, gimple_stmt_iterator *gsi, code_helper ch1, code_helper ch2, int op_type)
 
static void vect_create_half_widening_stmts (vec_info *vinfo, vec< tree > *vec_oprnds0, vec< tree > *vec_oprnds1, stmt_vec_info stmt_info, tree vec_dest, gimple_stmt_iterator *gsi, code_helper code1, int op_type)
 
static bool vectorizable_conversion (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *cost_vec)
 
bool vect_nop_conversion_p (stmt_vec_info stmt_info)
 
static bool vectorizable_assignment (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *cost_vec)
 
bool vect_supportable_shift (vec_info *vinfo, enum tree_code code, tree scalar_type)
 
static bool vectorizable_shift (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *cost_vec)
 
static bool vectorizable_operation (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *cost_vec)
 
static void ensure_base_align (dr_vec_info *dr_info)
 
static tree get_group_alias_ptr_type (stmt_vec_info first_stmt_info)
 
static bool scan_operand_equal_p (tree ref1, tree ref2)
 
static int scan_store_can_perm_p (tree vectype, tree init, vec< enum scan_store_kind > *use_whole_vector=NULL)
 
static bool check_scan_store (vec_info *vinfo, stmt_vec_info stmt_info, tree vectype, enum vect_def_type rhs_dt, bool slp, tree mask, vect_memory_access_type memory_access_type)
 
static bool vectorizable_scan_store (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, int ncopies)
 
static bool vectorizable_store (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *cost_vec)
 
tree vect_gen_perm_mask_any (tree vectype, const vec_perm_indices &sel)
 
tree vect_gen_perm_mask_checked (tree vectype, const vec_perm_indices &sel)
 
static bool hoist_defs_of_uses (stmt_vec_info stmt_info, class loop *loop, bool hoist_p)
 
static bool vectorizable_load (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *cost_vec)
 
static bool vect_is_simple_cond (tree cond, vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node, tree *comp_vectype, enum vect_def_type *dts, tree vectype)
 
static bool vectorizable_condition (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *cost_vec)
 
static bool vectorizable_comparison_1 (vec_info *vinfo, tree vectype, stmt_vec_info stmt_info, tree_code code, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *cost_vec)
 
static bool vectorizable_comparison (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *cost_vec)
 
static bool vectorizable_early_exit (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, gimple **vec_stmt, slp_tree slp_node, stmt_vector_for_cost *cost_vec)
 
static bool can_vectorize_live_stmts (vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node, slp_instance slp_node_instance, bool vec_stmt_p, stmt_vector_for_cost *cost_vec)
 
opt_result vect_analyze_stmt (vec_info *vinfo, stmt_vec_info stmt_info, bool *need_to_vectorize, slp_tree node, slp_instance node_instance, stmt_vector_for_cost *cost_vec)
 
bool vect_transform_stmt (vec_info *vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, slp_tree slp_node, slp_instance slp_node_instance)
 
void vect_remove_stores (vec_info *vinfo, stmt_vec_info first_stmt_info)
 
tree get_related_vectype_for_scalar_type (machine_mode prevailing_mode, tree scalar_type, poly_uint64 nunits)
 
tree get_vectype_for_scalar_type (vec_info *vinfo, tree scalar_type, unsigned int group_size)
 
tree get_vectype_for_scalar_type (vec_info *vinfo, tree scalar_type, slp_tree node)
 
tree get_mask_type_for_scalar_type (vec_info *vinfo, tree scalar_type, unsigned int group_size)
 
tree get_mask_type_for_scalar_type (vec_info *vinfo, tree scalar_type, slp_tree node)
 
tree get_same_sized_vectype (tree scalar_type, tree vector_type)
 
bool vect_chooses_same_modes_p (vec_info *vinfo, machine_mode vector_mode)
 
bool vect_is_simple_use (tree operand, vec_info *vinfo, enum vect_def_type *dt, stmt_vec_info *def_stmt_info_out, gimple **def_stmt_out)
 
bool vect_is_simple_use (tree operand, vec_info *vinfo, enum vect_def_type *dt, tree *vectype, stmt_vec_info *def_stmt_info_out, gimple **def_stmt_out)
 
bool vect_is_simple_use (vec_info *vinfo, stmt_vec_info stmt, slp_tree slp_node, unsigned operand, tree *op, slp_tree *slp_def, enum vect_def_type *dt, tree *vectype, stmt_vec_info *def_stmt_info_out)
 
bool vect_maybe_update_slp_op_vectype (slp_tree op, tree vectype)
 
bool supportable_widening_operation (vec_info *vinfo, code_helper code, stmt_vec_info stmt_info, tree vectype_out, tree vectype_in, code_helper *code1, code_helper *code2, int *multi_step_cvt, vec< tree > *interm_types)
 
bool supportable_narrowing_operation (code_helper code, tree vectype_out, tree vectype_in, code_helper *code1, int *multi_step_cvt, vec< tree > *interm_types)
 
tree vect_gen_while (gimple_seq *seq, tree mask_type, tree start_index, tree end_index, const char *name)
 
tree vect_gen_while_not (gimple_seq *seq, tree mask_type, tree start_index, tree end_index)
 
opt_result vect_get_vector_types_for_stmt (vec_info *vinfo, stmt_vec_info stmt_info, tree *stmt_vectype_out, tree *nunits_vectype_out, unsigned int group_size)
 
gimple_seq vect_gen_len (tree len, tree start_index, tree end_index, tree len_limit)
 

Enumeration Type Documentation

◆ scan_store_kind

Enumerator
scan_store_kind_perm 
scan_store_kind_lshift_zero 
scan_store_kind_lshift_cond 

Function Documentation

◆ can_vectorize_live_stmts()

static bool can_vectorize_live_stmts ( vec_info * vinfo,
stmt_vec_info stmt_info,
slp_tree slp_node,
slp_instance slp_node_instance,
bool vec_stmt_p,
stmt_vector_for_cost * cost_vec )
static
If SLP_NODE is nonnull, return true if vectorizable_live_operation
can handle all live statements in the node.  Otherwise return true
if STMT_INFO is not live or if vectorizable_live_operation can handle it.
VEC_STMT_P is as for vectorizable_live_operation.   

References FOR_EACH_VEC_ELT, ggc_alloc(), i, LOOP_VINFO_EARLY_BREAKS, SLP_TREE_SCALAR_STMTS, STMT_VINFO_DEF_TYPE, STMT_VINFO_LIVE_P, vect_induction_def, and vectorizable_live_operation().

Referenced by vect_analyze_stmt(), and vect_transform_stmt().

◆ cfun_returns()

◆ check_load_store_for_partial_vectors()

static void check_load_store_for_partial_vectors ( loop_vec_info loop_vinfo,
tree vectype,
slp_tree slp_node,
vec_load_store_type vls_type,
int group_size,
vect_memory_access_type memory_access_type,
gather_scatter_info * gs_info,
tree scalar_mask )
static
Check whether a load or store statement in the loop described by
LOOP_VINFO is possible in a loop using partial vectors.  This is
testing whether the vectorizer pass has the appropriate support,
as well as whether the target does.

VLS_TYPE says whether the statement is a load or store and VECTYPE
is the type of the vector being loaded or stored.  SLP_NODE is the SLP
node that contains the statement, or null if none.  MEMORY_ACCESS_TYPE
says how the load or store is going to be implemented and GROUP_SIZE
is the number of load or store statements in the containing group.
If the access is a gather load or scatter store, GS_INFO describes
its arguments.  If the load or store is conditional, SCALAR_MASK is the
condition under which it occurs.

Clear LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P if a loop using partial
vectors is not supported, otherwise record the required rgroup control
types.   

References can_vec_mask_load_store_p(), dump_enabled_p(), dump_printf_loc(), gcc_unreachable, get_len_load_store_mode(), GET_MODE_UNIT_SIZE, ggc_alloc(), internal_gather_scatter_fn_supported_p(), LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P, LOOP_VINFO_LENS, LOOP_VINFO_MASKS, LOOP_VINFO_VECT_FACTOR, MSG_MISSED_OPTIMIZATION, SLP_TREE_NUMBER_OF_VEC_STMTS, targetm, TYPE_MODE, TYPE_VECTOR_SUBPARTS(), vect_get_num_copies(), vect_load_lanes_supported(), vect_location, vect_record_loop_len(), vect_record_loop_mask(), vect_store_lanes_supported(), VECTOR_MODE_P, VLS_LOAD, VMAT_CONTIGUOUS, VMAT_CONTIGUOUS_PERMUTE, VMAT_GATHER_SCATTER, VMAT_INVARIANT, and VMAT_LOAD_STORE_LANES.

Referenced by vectorizable_load(), and vectorizable_store().

◆ check_scan_store()

static bool check_scan_store ( vec_info * vinfo,
stmt_vec_info stmt_info,
tree vectype,
enum vect_def_type rhs_dt,
bool slp,
tree mask,
vect_memory_access_type memory_access_type )
static

◆ compare_step_with_zero()

static int compare_step_with_zero ( vec_info * vinfo,
stmt_vec_info stmt_info )
static
STMT_INFO is a non-strided load or store, meaning that it accesses
elements with a known constant step.  Return -1 if that step
is negative, 0 if it is zero, and 1 if it is greater than zero.   

References size_zero_node, STMT_VINFO_DR_INFO, tree_int_cst_compare(), and vect_dr_behavior().

Referenced by get_group_load_store_type(), and get_load_store_type().

◆ create_array_ref()

static tree create_array_ref ( tree type,
tree ptr,
tree alias_ptr_type )
static
PTR is a pointer to an array of type TYPE.  Return a representation
of *PTR.  The memory reference replaces those in FIRST_DR
(and its group).   

References build2(), build_int_cst(), get_ptr_info(), ggc_alloc(), set_ptr_info_alignment(), and TYPE_ALIGN_UNIT.

Referenced by vectorizable_load(), and vectorizable_store().

◆ create_vector_array()

static tree create_vector_array ( tree elem_type,
unsigned HOST_WIDE_INT nelems )
static
Return a variable of type ELEM_TYPE[NELEMS].   

References build_array_type_nelts(), create_tmp_var, and ggc_alloc().

Referenced by vectorizable_load(), and vectorizable_store().

◆ ensure_base_align()

◆ exist_non_indexing_operands_for_use_p()

static bool exist_non_indexing_operands_for_use_p ( tree use,
stmt_vec_info stmt_info )
static
Function exist_non_indexing_operands_for_use_p

USE is one of the uses attached to STMT_INFO.  Check if USE is
used in STMT_INFO for anything other than indexing an array.   

References ggc_alloc(), gimple_assign_copy_p(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_call_arg(), gimple_call_internal_fn(), gimple_call_internal_p(), internal_fn_mask_index(), internal_fn_stored_value_index(), internal_gather_scatter_fn_p(), STMT_VINFO_DATA_REF, and TREE_CODE.

Referenced by process_use().

◆ get_group_alias_ptr_type()

static tree get_group_alias_ptr_type ( stmt_vec_info first_stmt_info)
static
Function get_group_alias_ptr_type.

Return the alias type for the group starting at FIRST_STMT_INFO.   

References DR_GROUP_NEXT_ELEMENT, DR_REF, dump_enabled_p(), dump_printf_loc(), get_alias_set(), ggc_alloc(), MSG_NOTE, ptr_type_node, reference_alias_ptr_type(), STMT_VINFO_DATA_REF, and vect_location.

Referenced by vectorizable_load(), and vectorizable_store().

◆ get_group_load_store_type()

static bool get_group_load_store_type ( vec_info * vinfo,
stmt_vec_info stmt_info,
tree vectype,
slp_tree slp_node,
bool masked_p,
vec_load_store_type vls_type,
vect_memory_access_type * memory_access_type,
poly_int64 * poffset,
dr_alignment_support * alignment_support_scheme,
int * misalignment,
gather_scatter_info * gs_info,
internal_fn * lanes_ifn )
static

◆ get_load_store_type()

static bool get_load_store_type ( vec_info * vinfo,
stmt_vec_info stmt_info,
tree vectype,
slp_tree slp_node,
bool masked_p,
vec_load_store_type vls_type,
unsigned int ncopies,
vect_memory_access_type * memory_access_type,
poly_int64 * poffset,
dr_alignment_support * alignment_support_scheme,
int * misalignment,
gather_scatter_info * gs_info,
internal_fn * lanes_ifn )
static
Analyze load or store statement STMT_INFO of type VLS_TYPE.  Return true
if there is a memory access type that the vectorized form can use,
storing it in *MEMORY_ACCESS_TYPE if so.  If we decide to use gathers
or scatters, fill in GS_INFO accordingly.  In addition
*ALIGNMENT_SUPPORT_SCHEME is filled out and false is returned if
the target does not support the alignment scheme.  *MISALIGNMENT
is set according to the alignment of the access (including
DR_MISALIGNMENT_UNKNOWN when it is unknown).

SLP says whether we're performing SLP rather than loop vectorization.
MASKED_P is true if the statement is conditional on a vectorized mask.
VECTYPE is the vector type that the vectorized statements will use.
NCOPIES is the number of vector statements that will be needed.   

References compare_step_with_zero(), DR_GROUP_FIRST_ELEMENT, DR_GROUP_NEXT_ELEMENT, DR_GROUP_SIZE, dr_misalignment(), DR_MISALIGNMENT_UNKNOWN, dr_unaligned_supported, dr_unaligned_unsupported, dump_enabled_p(), dump_printf_loc(), gcc_assert, gcc_unreachable, get_group_load_store_type(), get_negative_load_store_type(), ggc_alloc(), IFN_LAST, poly_int< N, C >::is_constant(), is_gimple_call(), MSG_MISSED_OPTIMIZATION, pow2p_hwi(), STMT_VINFO_DR_INFO, STMT_VINFO_GATHER_SCATTER_P, STMT_VINFO_GROUPED_ACCESS, STMT_VINFO_STRIDED_P, tree_nop_conversion_p(), TREE_TYPE, TYPE_VECTOR_SUBPARTS(), vect_check_gather_scatter(), vect_is_simple_use(), vect_location, vect_supportable_dr_alignment(), vect_use_strided_gather_scatters_p(), VLS_LOAD, VMAT_CONTIGUOUS, VMAT_ELEMENTWISE, VMAT_GATHER_SCATTER, VMAT_INVARIANT, and VMAT_STRIDED_SLP.

Referenced by vectorizable_load(), and vectorizable_store().

◆ get_mask_type_for_scalar_type() [1/2]

tree get_mask_type_for_scalar_type ( vec_info * vinfo,
tree scalar_type,
slp_tree node )
Function get_mask_type_for_scalar_type.

Returns the mask type corresponding to a result of comparison
of vectors of specified SCALAR_TYPE as supported by target.
NODE, if nonnull, is the SLP tree node that will use the returned
vector type.   

References get_vectype_for_scalar_type(), ggc_alloc(), NULL, and truth_type_for().

◆ get_mask_type_for_scalar_type() [2/2]

tree get_mask_type_for_scalar_type ( vec_info * vinfo,
tree scalar_type,
unsigned int group_size )
Function get_mask_type_for_scalar_type.

Returns the mask type corresponding to a result of comparison
of vectors of specified SCALAR_TYPE as supported by target.
If GROUP_SIZE is nonzero and we're performing BB vectorization,
make sure that the number of elements in the vector is no bigger
than GROUP_SIZE.   

References get_vectype_for_scalar_type(), ggc_alloc(), NULL, and truth_type_for().

Referenced by check_bool_pattern(), vect_check_scalar_mask(), vect_convert_mask_for_vectype(), vect_determine_mask_precision(), vect_get_vector_types_for_stmt(), and vect_recog_mask_conversion_pattern().

◆ get_negative_load_store_type()

static vect_memory_access_type get_negative_load_store_type ( vec_info * vinfo,
stmt_vec_info stmt_info,
tree vectype,
vec_load_store_type vls_type,
unsigned int ncopies,
poly_int64 * poffset )
static
A subroutine of get_load_store_type, with a subset of the same
arguments.  Handle the case where STMT_INFO is a load or store that
accesses consecutive elements with a negative step.  Sets *POFFSET
to the offset to be applied to the DR for the first access.   

References dr_aligned, dr_misalignment(), dr_unaligned_supported, dump_enabled_p(), dump_printf_loc(), ggc_alloc(), MSG_MISSED_OPTIMIZATION, MSG_NOTE, perm_mask_for_reverse(), STMT_VINFO_DR_INFO, TREE_INT_CST_LOW, TREE_TYPE, TYPE_SIZE_UNIT, TYPE_VECTOR_SUBPARTS(), vect_location, vect_supportable_dr_alignment(), VLS_STORE_INVARIANT, VMAT_CONTIGUOUS_DOWN, VMAT_CONTIGUOUS_REVERSE, and VMAT_ELEMENTWISE.

Referenced by get_group_load_store_type(), and get_load_store_type().

◆ get_related_vectype_for_scalar_type()

tree get_related_vectype_for_scalar_type ( machine_mode prevailing_mode,
tree scalar_type,
poly_uint64 nunits )
If NUNITS is nonzero, return a vector type that contains NUNITS
elements of type SCALAR_TYPE, or null if the target doesn't support
such a type.

If NUNITS is zero, return a vector type that contains elements of
type SCALAR_TYPE, choosing whichever vector size the target prefers.

If PREVAILING_MODE is VOIDmode, we have not yet chosen a vector mode
for this vectorization region and want to "autodetect" the best choice.
Otherwise, PREVAILING_MODE is a previously-chosen vector TYPE_MODE
and we want the new type to be interoperable with it.   PREVAILING_MODE
in this case can be a scalar integer mode or a vector mode; when it
is a vector mode, the function acts like a tree-level version of
related_vector_mode.   

References build_nonstandard_integer_type(), build_qualified_type(), build_vector_type_for_mode(), gcc_assert, GET_MODE_BITSIZE(), GET_MODE_SIZE(), ggc_alloc(), INTEGRAL_MODE_P, INTEGRAL_TYPE_P, is_float_mode(), is_int_mode(), KEEP_QUAL_ADDR_SPACE, known_eq, mode_for_vector(), NULL_TREE, POINTER_TYPE_P, related_vector_mode(), SCALAR_FLOAT_TYPE_P, SCALAR_INT_MODE_P, targetm, TREE_CODE, TYPE_ADDR_SPACE, TYPE_ALIGN_UNIT, lang_hooks_for_types::type_for_mode, TYPE_MODE, TYPE_PRECISION, TYPE_QUALS, TYPE_UNSIGNED, lang_hooks::types, and VECTOR_MODE_P.

Referenced by get_same_sized_vectype(), get_vec_alignment_for_array_type(), get_vectype_for_scalar_type(), vect_create_epilog_for_reduction(), vect_create_partial_epilog(), and vect_find_reusable_accumulator().

◆ get_same_sized_vectype()

tree get_same_sized_vectype ( tree scalar_type,
tree vector_type )
Function get_same_sized_vectype

Returns a vector type corresponding to SCALAR_TYPE of size
VECTOR_TYPE if supported by the target.   

References GET_MODE_SIZE(), get_related_vectype_for_scalar_type(), ggc_alloc(), NULL_TREE, truth_type_for(), TYPE_MODE, and VECT_SCALAR_BOOLEAN_TYPE_P.

Referenced by vect_create_epilog_for_reduction(), vect_recog_rotate_pattern(), vectorizable_bswap(), vectorizable_conversion(), vectorizable_induction(), and vectorizable_reduction().

◆ get_vectype_for_scalar_type() [1/2]

tree get_vectype_for_scalar_type ( vec_info * vinfo,
tree scalar_type,
slp_tree node )
Return the vector type corresponding to SCALAR_TYPE as supported
by the target.  NODE, if nonnull, is the SLP tree node that will
use the returned vector type.   

References get_vectype_for_scalar_type(), ggc_alloc(), and SLP_TREE_LANES.

◆ get_vectype_for_scalar_type() [2/2]

tree get_vectype_for_scalar_type ( vec_info * vinfo,
tree scalar_type,
unsigned int group_size )
Function get_vectype_for_scalar_type.

Returns the vector type corresponding to SCALAR_TYPE as supported
by the target.  If GROUP_SIZE is nonzero and we're performing BB
vectorization, make sure that the number of elements in the vector
is no bigger than GROUP_SIZE.   

References hash_set< KeyId, Lazy, Traits >::add(), floor_log2(), gcc_assert, get_related_vectype_for_scalar_type(), ggc_alloc(), maybe_ge, vec_info::slp_instances, TYPE_MODE, TYPE_VECTOR_SUBPARTS(), vec_info::used_vector_modes, and vec_info::vector_mode.

Referenced by adjust_bool_pattern(), adjust_bool_pattern_cast(), can_duplicate_and_interleave_p(), check_bool_pattern(), get_initial_def_for_reduction(), get_mask_type_for_scalar_type(), get_mask_type_for_scalar_type(), get_vectype_for_scalar_type(), vect_add_conversion_to_pattern(), vect_analyze_data_refs(), vect_build_slp_instance(), vect_build_slp_tree_2(), vect_convert_input(), vect_determine_mask_precision(), vect_determine_vectorization_factor(), vect_gather_scatter_fn_p(), vect_get_vec_defs_for_operand(), vect_get_vector_types_for_stmt(), vect_is_simple_cond(), vect_phi_first_order_recurrence_p(), vect_recog_abd_pattern(), vect_recog_average_pattern(), vect_recog_bit_insert_pattern(), vect_recog_bitfield_ref_pattern(), vect_recog_bool_pattern(), vect_recog_cast_forwprop_pattern(), vect_recog_cond_expr_convert_pattern(), vect_recog_ctz_ffs_pattern(), vect_recog_divmod_pattern(), vect_recog_gather_scatter_pattern(), vect_recog_gcond_pattern(), vect_recog_mask_conversion_pattern(), vect_recog_mixed_size_cond_pattern(), vect_recog_mulhs_pattern(), vect_recog_mult_pattern(), vect_recog_over_widening_pattern(), vect_recog_popcount_clz_ctz_ffs_pattern(), vect_recog_pow_pattern(), vect_recog_rotate_pattern(), vect_recog_sat_add_pattern(), vect_recog_vector_vector_shift_pattern(), vect_recog_widen_abd_pattern(), vect_recog_widen_op_pattern(), vect_slp_prefer_store_lanes_p(), vect_split_statement(), vect_supportable_direct_optab_p(), vect_supportable_shift(), vect_synth_mult_by_constant(), vectorizable_assignment(), vectorizable_call(), vectorizable_comparison_1(), vectorizable_conversion(), vectorizable_operation(), vectorizable_reduction(), vectorizable_shift(), and vectorizable_simd_clone_call().

◆ hoist_defs_of_uses()

static bool hoist_defs_of_uses ( stmt_vec_info stmt_info,
class loop * loop,
bool hoist_p )
static
Hoist the definitions of all SSA uses on STMT_INFO out of the loop LOOP,
inserting them on the loops preheader edge.  Returns true if we
were successful in doing so (and thus STMT_INFO can be moved then),
otherwise returns false.  HOIST_P indicates if we want to hoist the
definitions of all SSA uses, it would be false when we are costing.   

References flow_bb_inside_loop_p(), FOR_EACH_SSA_TREE_OPERAND, ggc_alloc(), gimple_bb(), gimple_nop_p(), gsi_for_stmt(), gsi_insert_on_edge_immediate(), gsi_remove(), i, i2, loop_preheader_edge(), SSA_NAME_DEF_STMT, and SSA_OP_USE.

Referenced by vectorizable_load().

◆ is_simple_and_all_uses_invariant()

bool is_simple_and_all_uses_invariant ( stmt_vec_info stmt_info,
loop_vec_info loop_vinfo )
Function is_simple_and_all_uses_invariant

Return true if STMT_INFO is simple and all uses of it are invariant.   

References dump_enabled_p(), dump_printf_loc(), FOR_EACH_SSA_TREE_OPERAND, ggc_alloc(), MSG_MISSED_OPTIMIZATION, SSA_OP_USE, vect_constant_def, vect_external_def, vect_is_simple_use(), vect_location, and vect_uninitialized_def.

Referenced by vect_stmt_relevant_p(), and vectorizable_live_operation().

◆ perm_mask_for_reverse()

tree perm_mask_for_reverse ( tree vectype)
If the target supports a permute mask that reverses the elements in
a vector of type VECTYPE, return that mask, otherwise return null.   

References can_vec_perm_const_p(), ggc_alloc(), i, NULL_TREE, TYPE_MODE, TYPE_VECTOR_SUBPARTS(), and vect_gen_perm_mask_checked().

Referenced by get_negative_load_store_type(), vectorizable_load(), and vectorizable_store().

◆ permute_vec_elements()

static tree permute_vec_elements ( vec_info * vinfo,
tree x,
tree y,
tree mask_vec,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi )
static
Given a vector variable X and Y, that was generated for the scalar
STMT_INFO, generate instructions to permute the vector elements of X and Y
using permutation mask MASK_VEC, insert them at *GSI and return the
permuted vector variable.   

References ggc_alloc(), gimple_build_assign(), gimple_get_lhs(), make_ssa_name(), NULL, TREE_CODE, TREE_TYPE, vect_create_destination_var(), vect_finish_stmt_generation(), vect_get_new_vect_var(), vect_simple_var, and y.

Referenced by vectorizable_load().

◆ prepare_vec_mask()

static tree prepare_vec_mask ( loop_vec_info loop_vinfo,
tree mask_type,
tree loop_mask,
tree vec_mask,
gimple_stmt_iterator * gsi )
static
Return the mask input to a masked load or store.  VEC_MASK is the vectorized
form of the scalar mask condition and LOOP_MASK, if nonnull, is the mask
that needs to be applied to all loads and stores in a vectorized loop.
Return VEC_MASK if LOOP_MASK is null or if VEC_MASK is already masked,
otherwise return VEC_MASK & LOOP_MASK.

MASK_TYPE is the type of both masks.  If new statements are needed,
insert them before GSI.   

References hash_set< KeyId, Lazy, Traits >::contains(), gcc_assert, ggc_alloc(), gimple_build_assign(), gsi_insert_before(), GSI_SAME_STMT, make_temp_ssa_name(), NULL, TREE_TYPE, useless_type_conversion_p(), and _loop_vec_info::vec_cond_masked_set.

Referenced by vectorizable_call(), vectorizable_early_exit(), vectorizable_load(), vectorizable_operation(), vectorizable_simd_clone_call(), and vectorizable_store().

◆ process_use()

◆ read_vector_array()

static tree read_vector_array ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
tree scalar_dest,
tree array,
unsigned HOST_WIDE_INT n )
static
ARRAY is an array of vectors created by create_vector_array.
Return an SSA_NAME for the vector in index N.  The reference
is part of the vectorization of STMT_INFO and the vector is associated
with scalar destination SCALAR_DEST.   

References build4(), build_int_cst(), gcc_assert, ggc_alloc(), gimple_assign_set_lhs(), gimple_build_assign(), make_ssa_name(), NULL_TREE, size_type_node, TREE_CODE, TREE_TYPE, vect_create_destination_var(), and vect_finish_stmt_generation().

Referenced by vectorizable_load().

◆ record_stmt_cost() [1/4]

◆ record_stmt_cost() [2/4]

unsigned record_stmt_cost ( stmt_vector_for_cost * body_cost_vec,
int count,
enum vect_cost_for_stmt kind,
slp_tree node,
tree vectype,
int misalign,
enum vect_cost_model_location where )

◆ record_stmt_cost() [3/4]

◆ record_stmt_cost() [4/4]

unsigned record_stmt_cost ( stmt_vector_for_cost * body_cost_vec,
int count,
enum vect_cost_for_stmt kind,
stmt_vec_info stmt_info,
tree vectype,
int misalign,
enum vect_cost_model_location where )

References count, ggc_alloc(), NULL, and record_stmt_cost().

Referenced by record_stmt_cost().

◆ scan_operand_equal_p()

static bool scan_operand_equal_p ( tree ref1,
tree ref2 )
static

◆ scan_store_can_perm_p()

static int scan_store_can_perm_p ( tree vectype,
tree init,
vec< enum scan_store_kind > * use_whole_vector = NULL )
static
Function check_scan_store.

Verify if we can perform the needed permutations or whole vector shifts.
Return -1 on failure, otherwise exact log2 of vectype's nunits.
USE_WHOLE_VECTOR is a vector of enum scan_store_kind which operation
to do at each step.   

References can_vec_perm_const_p(), exact_log2(), expand_vec_cond_expr_p(), ggc_alloc(), HOST_WIDE_INT_1U, i, data_reference::indices, initializer_zerop(), optab_handler(), scan_store_kind_lshift_cond, scan_store_kind_lshift_zero, scan_store_kind_perm, TREE_CODE, truth_type_for(), TYPE_MODE, and TYPE_VECTOR_SUBPARTS().

Referenced by check_scan_store(), and vectorizable_scan_store().

◆ simple_integer_narrowing()

static bool simple_integer_narrowing ( tree vectype_out,
tree vectype_in,
code_helper * convert_code )
static
Return true if vector types VECTYPE_IN and VECTYPE_OUT have
integer elements and if we can narrow VECTYPE_IN to VECTYPE_OUT
in a single step.  On success, store the binary pack code in
*CONVERT_CODE.   

References ggc_alloc(), INTEGRAL_TYPE_P, supportable_narrowing_operation(), and TREE_TYPE.

Referenced by vectorizable_call().

◆ stmt_in_inner_loop_p()

bool stmt_in_inner_loop_p ( vec_info * vinfo,
class _stmt_vec_info * stmt_info )
Return TRUE iff the given statement is in an inner loop relative to
the loop being vectorized.   

References ggc_alloc(), gimple_bb(), loop::inner, basic_block_def::loop_father, LOOP_VINFO_LOOP, and STMT_VINFO_STMT.

Referenced by vector_costs::adjust_cost_for_freq().

◆ stmt_vectype()

tree stmt_vectype ( class _stmt_vec_info * stmt_info)
Statement Analysis and Transformation for Vectorization
   Copyright (C) 2003-2024 Free Software Foundation, Inc.
   Contributed by Dorit Naishlos <dorit@il.ibm.com>
   and Ira Rosen <irar@il.ibm.com>

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.   
For lang_hooks.types.type_for_mode.   
Return the vectorized type for the given statement.   

References STMT_VINFO_VECTYPE.

Referenced by vect_determine_vf_for_stmt_1(), and vect_get_vec_defs_for_operand().

◆ supportable_narrowing_operation()

bool supportable_narrowing_operation ( code_helper code,
tree vectype_out,
tree vectype_in,
code_helper * code1,
int * multi_step_cvt,
vec< tree > * interm_types )
Function supportable_narrowing_operation

Check whether an operation represented by the code CODE is a
narrowing operation that is supported by the target platform in
vector form (i.e., when operating on arguments of type VECTYPE_IN
and producing a result of type VECTYPE_OUT).

Narrowing operations we currently support are NOP (CONVERT), FIX_TRUNC
and FLOAT.  This function checks if these operations are supported by
the target platform directly via vector tree-codes.

Output:
- CODE1 is the code of a vector operation to be used when
vectorizing the operation, if available.
- MULTI_STEP_CVT determines the number of required intermediate steps in
case of multi-step conversion (like int->short->char - in that case
MULTI_STEP_CVT will be 1).
- INTERM_TYPES contains the intermediate type required to perform the
narrowing operation (short in the above example).    

References CASE_CONVERT, gcc_unreachable, ggc_alloc(), i, insn_data, code_helper::is_tree_code(), known_eq, MAX_INTERM_CVT_STEPS, insn_operand_data::mode, insn_data_d::operand, optab_default, optab_for_tree_code(), optab_handler(), SCALAR_INT_MODE_P, lang_hooks_for_types::type_for_mode, TYPE_MODE, TYPE_UNSIGNED, TYPE_VECTOR_SUBPARTS(), lang_hooks::types, unknown_optab, vect_double_mask_nunits(), and VECTOR_BOOLEAN_TYPE_P.

Referenced by simple_integer_narrowing(), and vectorizable_conversion().

◆ supportable_widening_operation()

bool supportable_widening_operation ( vec_info * vinfo,
code_helper code,
stmt_vec_info stmt_info,
tree vectype_out,
tree vectype_in,
code_helper * code1,
code_helper * code2,
int * multi_step_cvt,
vec< tree > * interm_types )
Function supportable_widening_operation

Check whether an operation represented by the code CODE is a
widening operation that is supported by the target platform in
vector form (i.e., when operating on arguments of type VECTYPE_IN
producing a result of type VECTYPE_OUT).

Widening operations we currently support are NOP (CONVERT), FLOAT,
FIX_TRUNC and WIDEN_MULT.  This function checks if these operations
are supported by the target platform either directly (via vector
tree-codes), or via target builtins.

Output:
- CODE1 and CODE2 are codes of vector operations to be used when
vectorizing the operation, if available.
- MULTI_STEP_CVT determines the number of required intermediate steps in
case of multi-step conversion (like char->short->int - in that case
MULTI_STEP_CVT will be 1).
- INTERM_TYPES contains the intermediate type required to perform the
widening operation (short in the above example).   

References as_combined_fn(), as_internal_fn(), build_vector_type_for_mode(), CASE_CONVERT, CONVERT_EXPR_CODE_P, direct_internal_fn_optab(), gcc_unreachable, GET_MODE_INNER, ggc_alloc(), gimple_assign_lhs(), i, insn_data, code_helper::is_tree_code(), known_eq, lookup_evenodd_internal_fn(), lookup_hilo_internal_fn(), LOOP_VINFO_LOOP, MAX_INTERM_CVT_STEPS, MAX_TREE_CODES, insn_operand_data::mode, nested_in_vect_loop_p(), NULL, insn_data_d::operand, optab_default, optab_for_tree_code(), optab_handler(), code_helper::safe_as_tree_code(), SCALAR_INT_MODE_P, STMT_VINFO_DEF_TYPE, STMT_VINFO_RELEVANT, supportable_widening_operation(), lang_hooks_for_types::type_for_mode, TYPE_MODE, TYPE_UNSIGNED, TYPE_VECTOR_SUBPARTS(), lang_hooks::types, unknown_optab, vect_halve_mask_nunits(), vect_reduction_def, vect_used_by_reduction, VECTOR_BOOLEAN_TYPE_P, VECTOR_MODE_P, and widening_fn_p().

Referenced by supportable_widening_operation(), vect_recog_abd_pattern(), vect_recog_widen_abd_pattern(), vect_recog_widen_op_pattern(), and vectorizable_conversion().

◆ vect_analyze_stmt()

opt_result vect_analyze_stmt ( vec_info * vinfo,
stmt_vec_info stmt_info,
bool * need_to_vectorize,
slp_tree node,
slp_instance node_instance,
stmt_vector_for_cost * cost_vec )

◆ vect_build_all_ones_mask()

static tree vect_build_all_ones_mask ( vec_info * vinfo,
stmt_vec_info stmt_info,
tree masktype )
static
Build an all-ones vector mask of type MASKTYPE while vectorizing STMT_INFO.
Note that we support masks with floating-point type, in which case the
floats are interpreted as a bitmask.   

References build_int_cst(), build_real(), build_vector_from_val(), gcc_unreachable, ggc_alloc(), NULL, r, real_from_target(), REAL_VALUE_TYPE, SCALAR_FLOAT_TYPE_P, TREE_CODE, TREE_TYPE, TYPE_MODE, vect_init_vector(), and VECTOR_BOOLEAN_TYPE_P.

Referenced by vect_build_one_gather_load_call(), and vectorizable_simd_clone_call().

◆ vect_build_one_gather_load_call()

static gimple * vect_build_one_gather_load_call ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gather_scatter_info * gs_info,
tree ptr,
tree offset,
tree mask )
static
Build a gather load call while vectorizing STMT_INFO.  Insert new
instructions before GSI and add them to VEC_STMT.  GS_INFO describes
the gather load operation.  If the load is conditional, MASK is the
vectorized condition, otherwise MASK is null.  PTR is the base
pointer and OFFSET is the vectorized offset.   

References build1(), build_int_cst(), build_zero_cst(), gcc_assert, gcc_checking_assert, ggc_alloc(), gimple_build_assign(), gimple_build_call(), gimple_call_set_lhs(), known_eq, NULL_TREE, offset, STMT_VINFO_VECTYPE, TREE_CHAIN, TREE_CODE, TREE_TYPE, TREE_VALUE, TYPE_ARG_TYPES, lang_hooks_for_types::type_for_mode, TYPE_MODE, TYPE_PRECISION, TYPE_VECTOR_SUBPARTS(), lang_hooks::types, types_compatible_p(), useless_type_conversion_p(), vect_build_all_ones_mask(), vect_build_zero_merge_argument(), vect_finish_stmt_generation(), vect_get_new_ssa_name(), vect_scalar_var, vect_simple_var, and VECTOR_TYPE_P.

Referenced by vectorizable_load().

◆ vect_build_one_scatter_store_call()

static gimple * vect_build_one_scatter_store_call ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gather_scatter_info * gs_info,
tree ptr,
tree offset,
tree oprnd,
tree mask )
static
Build a scatter store call while vectorizing STMT_INFO.  Insert new
instructions before GSI.  GS_INFO describes the scatter store operation.
PTR is the base pointer, OFFSET the vectorized offsets and OPRND the
vectorized data to store.
If the store is conditional, MASK is the vectorized condition, otherwise
MASK is null.   

References build1(), build_int_cst(), gcc_assert, gcc_checking_assert, ggc_alloc(), gimple_build_assign(), gimple_build_call(), known_eq, NULL, NULL_TREE, offset, TREE_CHAIN, TREE_CODE, TREE_TYPE, TREE_VALUE, TYPE_ARG_TYPES, lang_hooks_for_types::type_for_mode, TYPE_MODE, TYPE_PRECISION, TYPE_VECTOR_SUBPARTS(), lang_hooks::types, useless_type_conversion_p(), vect_finish_stmt_generation(), vect_get_new_ssa_name(), vect_init_vector(), vect_scalar_var, and vect_simple_var.

Referenced by vectorizable_store().

◆ vect_build_zero_merge_argument()

static tree vect_build_zero_merge_argument ( vec_info * vinfo,
stmt_vec_info stmt_info,
tree vectype )
static
Build an all-zero merge value of type VECTYPE while vectorizing
STMT_INFO as a gather load.   

References build_int_cst(), build_real(), build_vector_from_val(), gcc_unreachable, ggc_alloc(), NULL, r, real_from_target(), REAL_VALUE_TYPE, SCALAR_FLOAT_TYPE_P, TREE_CODE, TREE_TYPE, TYPE_MODE, and vect_init_vector().

Referenced by vect_build_one_gather_load_call().

◆ vect_check_scalar_mask()

static bool vect_check_scalar_mask ( vec_info * vinfo,
stmt_vec_info stmt_info,
slp_tree slp_node,
unsigned mask_index,
tree * mask,
slp_tree * mask_node,
vect_def_type * mask_dt_out,
tree * mask_vectype_out )
static
Return true if boolean argument at MASK_INDEX is suitable for vectorizing
conditional operation STMT_INFO.  When returning true, store the mask
in *MASK, the type of its definition in *MASK_DT_OUT, the type of the
vectorized mask in *MASK_VECTYPE_OUT and the SLP node corresponding
to the mask in *MASK_NODE if MASK_NODE is not NULL.   

References dump_enabled_p(), dump_printf_loc(), get_mask_type_for_scalar_type(), ggc_alloc(), MSG_MISSED_OPTIMIZATION, SLP_TREE_DEF_TYPE, STMT_VINFO_VECTYPE, TREE_TYPE, TYPE_VECTOR_SUBPARTS(), vect_internal_def, vect_is_simple_use(), vect_location, VECT_SCALAR_BOOLEAN_TYPE_P, and VECTOR_BOOLEAN_TYPE_P.

Referenced by vectorizable_call(), vectorizable_load(), and vectorizable_store().

◆ vect_check_store_rhs()

static bool vect_check_store_rhs ( vec_info * vinfo,
stmt_vec_info stmt_info,
slp_tree slp_node,
tree * rhs,
slp_tree * rhs_node,
vect_def_type * rhs_dt_out,
tree * rhs_vectype_out,
vec_load_store_type * vls_type_out )
static
Return true if stored value is suitable for vectorizing store
statement STMT_INFO.  When returning true, store the scalar stored
in *RHS and *RHS_NODE, the type of the definition in *RHS_DT_OUT,
the type of the vectorized store value in
*RHS_VECTYPE_OUT and the type of the store in *VLS_TYPE_OUT.   

References CONSTANT_CLASS_P, dump_enabled_p(), dump_printf_loc(), ggc_alloc(), gimple_call_internal_fn(), gimple_call_internal_p(), internal_fn_stored_value_index(), internal_store_fn_p(), MSG_MISSED_OPTIMIZATION, native_encode_expr(), NULL, STMT_VINFO_GATHER_SCATTER_P, STMT_VINFO_VECTYPE, useless_type_conversion_p(), vect_constant_def, vect_external_def, vect_is_simple_use(), vect_location, vect_slp_child_index_for_operand(), VLS_STORE, and VLS_STORE_INVARIANT.

Referenced by vectorizable_store().

◆ vect_chooses_same_modes_p()

bool vect_chooses_same_modes_p ( vec_info * vinfo,
machine_mode vector_mode )
Return true if replacing LOOP_VINFO->vector_mode with VECTOR_MODE
would not change the chosen vector modes.   

References hash_set< KeyId, Lazy, Traits >::begin(), hash_set< KeyId, Lazy, Traits >::end(), GET_MODE_INNER, i, related_vector_mode(), vec_info::used_vector_modes, and VECTOR_MODE_P.

Referenced by vect_analyze_loop_1(), and vect_slp_region().

◆ vect_clobber_variable()

static void vect_clobber_variable ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
tree var )
static
Add a clobber of variable VAR to the vectorization of STMT_INFO.
Emit the clobber before *GSI.   

References build_clobber(), ggc_alloc(), gimple_build_assign(), TREE_TYPE, and vect_finish_stmt_generation().

Referenced by vectorizable_load(), vectorizable_simd_clone_call(), and vectorizable_store().

◆ vect_create_half_widening_stmts()

static void vect_create_half_widening_stmts ( vec_info * vinfo,
vec< tree > * vec_oprnds0,
vec< tree > * vec_oprnds1,
stmt_vec_info stmt_info,
tree vec_dest,
gimple_stmt_iterator * gsi,
code_helper code1,
int op_type )
static
Create vectorized promotion stmts for widening stmts using only half the
potential vector size for input.   

References binary_op, FOR_EACH_VEC_ELT, gcc_assert, ggc_alloc(), gimple_assign_set_lhs(), gimple_build_assign(), i, make_ssa_name(), TREE_TYPE, vect_finish_stmt_generation(), vect_gimple_build(), VECTOR_TYPE_P, and vNULL.

Referenced by vectorizable_conversion().

◆ vect_create_vectorized_demotion_stmts()

static void vect_create_vectorized_demotion_stmts ( vec_info * vinfo,
vec< tree > * vec_oprnds,
int multi_step_cvt,
stmt_vec_info stmt_info,
vec< tree > & vec_dsts,
gimple_stmt_iterator * gsi,
slp_tree slp_node,
code_helper code,
bool narrow_src_p )
static
Create vectorized demotion statements for vector operands from VEC_OPRNDS.
For multi-step conversions store the resulting vectors and call the function
recursively. When NARROW_SRC_P is true, there's still a conversion after
narrowing, don't store the vectors in the SLP_NODE or in vector info of
the scalar statement(or in STMT_VINFO_RELATED_STMT chain).   

References ggc_alloc(), gimple_set_lhs(), i, make_ssa_name(), STMT_VINFO_VEC_STMTS, vect_create_vectorized_demotion_stmts(), vect_finish_stmt_generation(), and vect_gimple_build().

Referenced by vect_create_vectorized_demotion_stmts(), and vectorizable_conversion().

◆ vect_create_vectorized_promotion_stmts()

static void vect_create_vectorized_promotion_stmts ( vec_info * vinfo,
vec< tree > * vec_oprnds0,
vec< tree > * vec_oprnds1,
stmt_vec_info stmt_info,
tree vec_dest,
gimple_stmt_iterator * gsi,
code_helper ch1,
code_helper ch2,
int op_type )
static
Create vectorized promotion statements for vector operands from VEC_OPRNDS0
and VEC_OPRNDS1, for a binary operation associated with scalar statement
STMT_INFO.  For multi-step conversions store the resulting vectors and
call the function recursively.   

References binary_op, FOR_EACH_VEC_ELT, ggc_alloc(), gimple_assign_lhs(), gimple_call_lhs(), i, is_gimple_call(), NULL_TREE, vect_gen_widened_results_half(), and vNULL.

Referenced by vectorizable_conversion().

◆ vect_finish_replace_stmt()

void vect_finish_replace_stmt ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple * vec_stmt )
Replace the scalar statement STMT_INFO with a new vector statement VEC_STMT,
which sets the same scalar result as STMT_INFO did.  Create and return a
stmt_vec_info for VEC_STMT.   

References gcc_assert, ggc_alloc(), gimple_get_lhs(), gsi_for_stmt(), gsi_replace(), scalar_stmt, vect_finish_stmt_generation_1(), and vect_orig_stmt().

Referenced by vectorizable_condition(), and vectorize_fold_left_reduction().

◆ vect_finish_stmt_generation()

void vect_finish_stmt_generation ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple * vec_stmt,
gimple_stmt_iterator * gsi )
Add VEC_STMT to the vectorized implementation of STMT_INFO and insert it
before *GSI.  Create and return a stmt_vec_info for VEC_STMT.   

References copy_ssa_name(), ECF_CONST, ECF_NOVOPS, ECF_PURE, gcc_assert, ggc_alloc(), gimple_assign_lhs(), gimple_call_flags(), gimple_call_lhs(), gimple_has_mem_ops(), gimple_set_modified(), gimple_set_vdef(), gimple_set_vuse(), gimple_vdef(), gimple_vuse(), gimple_vuse_op(), gsi_end_p(), gsi_insert_before(), GSI_SAME_STMT, gsi_stmt(), is_gimple_assign(), is_gimple_call(), is_gimple_reg(), SET_USE, TREE_CODE, and vect_finish_stmt_generation_1().

Referenced by bump_vector_ptr(), permute_vec_elements(), read_vector_array(), vect_add_slp_permutation(), vect_build_one_gather_load_call(), vect_build_one_scatter_store_call(), vect_clobber_variable(), vect_create_half_widening_stmts(), vect_create_vectorized_demotion_stmts(), vect_emulate_mixed_dot_prod(), vect_gen_widened_results_half(), vect_init_vector_1(), vect_permute_load_chain(), vect_permute_store_chain(), vect_shift_permute_load_chain(), vect_transform_reduction(), vect_transform_slp_perm_load_1(), vectorizable_assignment(), vectorizable_bswap(), vectorizable_call(), vectorizable_comparison_1(), vectorizable_condition(), vectorizable_conversion(), vectorizable_early_exit(), vectorizable_load(), vectorizable_operation(), vectorizable_recurr(), vectorizable_scan_store(), vectorizable_shift(), vectorizable_simd_clone_call(), vectorizable_store(), vectorize_fold_left_reduction(), and write_vector_array().

◆ vect_finish_stmt_generation_1()

static void vect_finish_stmt_generation_1 ( vec_info * ,
stmt_vec_info stmt_info,
gimple * vec_stmt )
static
Helper function called by vect_finish_replace_stmt and
vect_finish_stmt_generation.  Set the location of the new
statement and create and return a stmt_vec_info for it.   

References add_stmt_to_eh_lp(), cfun, dump_enabled_p(), dump_printf_loc(), gcc_assert, ggc_alloc(), gimple_location(), gimple_set_location(), lookup_stmt_eh_lp(), MSG_NOTE, stmt_could_throw_p(), and vect_location.

Referenced by vect_finish_replace_stmt(), and vect_finish_stmt_generation().

◆ vect_gen_len()

gimple_seq vect_gen_len ( tree len,
tree start_index,
tree end_index,
tree len_limit )
Generate and return statement sequence that sets vector length LEN that is:

min_of_start_and_end = min (START_INDEX, END_INDEX);
left_len = END_INDEX - min_of_start_and_end;
rhs = min (left_len, LEN_LIMIT);
LEN = rhs;

Note: the cost of the code generated by this function is modeled
by vect_estimate_min_profitable_iters, so changes here may need
corresponding changes there.   

References gcc_assert, ggc_alloc(), gimple_build(), gimple_build_assign(), gimple_seq_add_stmt(), NULL, and TREE_TYPE.

Referenced by vect_set_loop_controls_directly().

◆ vect_gen_perm_mask_any()

tree vect_gen_perm_mask_any ( tree vectype,
const vec_perm_indices & sel )
Given a vector type VECTYPE, turns permutation SEL into the equivalent
VECTOR_CST mask.  No checks are made that the target platform supports the
mask, so callers may wish to test can_vec_perm_const_p separately, or use
vect_gen_perm_mask_checked.   

References build_vector_type(), gcc_assert, ggc_alloc(), known_eq, ssizetype, TYPE_VECTOR_SUBPARTS(), and vec_perm_indices_to_tree().

Referenced by vect_create_epilog_for_reduction(), vect_create_nonlinear_iv_init(), vect_gen_perm_mask_checked(), and vectorizable_scan_store().

◆ vect_gen_perm_mask_checked()

◆ vect_gen_while()

tree vect_gen_while ( gimple_seq * seq,
tree mask_type,
tree start_index,
tree end_index,
const char * name )
Generate and return a vector mask of MASK_TYPE such that
mask[I] is true iff J + START_INDEX < END_INDEX for all J <= I.
Add the statements to SEQ.   

References build_zero_cst(), direct_internal_fn_supported_p(), gcc_checking_assert, ggc_alloc(), gimple_build_call_internal(), gimple_call_set_lhs(), gimple_seq_add_stmt(), make_ssa_name(), make_temp_ssa_name(), NULL, OPTIMIZE_FOR_SPEED, and TREE_TYPE.

Referenced by vect_gen_while_not(), and vect_set_loop_controls_directly().

◆ vect_gen_while_not()

tree vect_gen_while_not ( gimple_seq * seq,
tree mask_type,
tree start_index,
tree end_index )
Generate a vector mask of type MASK_TYPE for which index I is false iff
J + START_INDEX < END_INDEX for all J <= I.  Add the statements to SEQ.   

References ggc_alloc(), gimple_build(), and vect_gen_while().

Referenced by vect_set_loop_controls_directly().

◆ vect_gen_widened_results_half()

static gimple * vect_gen_widened_results_half ( vec_info * vinfo,
code_helper ch,
tree vec_oprnd0,
tree vec_oprnd1,
int op_type,
tree vec_dest,
gimple_stmt_iterator * gsi,
stmt_vec_info stmt_info )
static
Function vect_gen_widened_results_half

Create a vector stmt whose code, type, number of arguments, and result
variable are CODE, OP_TYPE, and VEC_DEST, and its arguments are
VEC_OPRND0 and VEC_OPRND1.  The new vector stmt is to be inserted at GSI.
In the case that CODE is a CALL_EXPR, this means that a call to DECL
needs to be created (DECL is a function-decl of a target-builtin).
STMT_INFO is the original scalar stmt that we are vectorizing.   

References binary_op, ggc_alloc(), gimple_set_lhs(), make_ssa_name(), NULL, vect_finish_stmt_generation(), and vect_gimple_build().

Referenced by vect_create_vectorized_promotion_stmts().

◆ vect_get_data_ptr_increment()

static tree vect_get_data_ptr_increment ( vec_info * vinfo,
gimple_stmt_iterator * gsi,
dr_vec_info * dr_info,
tree aggr_type,
vect_memory_access_type memory_access_type,
vec_loop_lens * loop_lens = nullptr )
static
Return the amount that should be added to a vector pointer to move
to the next or previous copy of AGGR_TYPE.  DR_INFO is the data reference
being vectorized and MEMORY_ACCESS_TYPE describes the type of
vectorization.   

References fold_build1, ggc_alloc(), LOOP_VINFO_USING_SELECT_VL_P, size_zero_node, innermost_loop_behavior::step, tree_int_cst_sgn(), TREE_TYPE, TYPE_SIZE_UNIT, vect_dr_behavior(), vect_get_loop_variant_data_ptr_increment(), and VMAT_INVARIANT.

Referenced by vectorizable_load(), vectorizable_scan_store(), and vectorizable_store().

◆ vect_get_gather_scatter_ops()

static void vect_get_gather_scatter_ops ( loop_vec_info loop_vinfo,
class loop * loop,
stmt_vec_info stmt_info,
slp_tree slp_node,
gather_scatter_info * gs_info,
tree * dataref_ptr,
vec< tree > * vec_offset )
static
Prepare the base and offset in GS_INFO for vectorization.
Set *DATAREF_PTR to the loop-invariant base address and *VEC_OFFSET
to the vectorized offset argument for the first copy of STMT_INFO.
STMT_INFO is the statement described by GS_INFO and LOOP is the
containing loop.   

References force_gimple_operand(), gcc_assert, ggc_alloc(), gsi_insert_seq_on_edge_immediate(), loop_preheader_edge(), NULL, NULL_TREE, SLP_TREE_CHILDREN, vect_get_num_copies(), vect_get_slp_defs(), and vect_get_vec_defs_for_operand().

Referenced by vectorizable_load(), and vectorizable_store().

◆ vect_get_load_cost()

void vect_get_load_cost ( vec_info * ,
stmt_vec_info stmt_info,
int ncopies,
dr_alignment_support alignment_support_scheme,
int misalignment,
bool add_realign_cost,
unsigned int * inside_cost,
unsigned int * prologue_cost,
stmt_vector_for_cost * prologue_cost_vec,
stmt_vector_for_cost * body_cost_vec,
bool record_prologue_costs )

◆ vect_get_loop_variant_data_ptr_increment()

static tree vect_get_loop_variant_data_ptr_increment ( vec_info * vinfo,
tree aggr_type,
gimple_stmt_iterator * gsi,
vec_loop_lens * loop_lens,
dr_vec_info * dr_info,
vect_memory_access_type memory_access_type )
static
Prepare the pointer IVs which needs to be updated by a variable amount.
Such variable amount is the outcome of .SELECT_VL. In this case, we can
allow each iteration process the flexible number of elements as long as
the number <= vf elments.

Return data reference according to SELECT_VL.
If new statements are needed, insert them before GSI.   

References fold_build2, gcc_assert, ggc_alloc(), gimple_build_assign(), gsi_insert_before(), GSI_SAME_STMT, make_temp_ssa_name(), NULL, innermost_loop_behavior::step, wi::to_widest(), TREE_TYPE, vect_dr_behavior(), vect_get_loop_len(), VMAT_GATHER_SCATTER, and wide_int_to_tree().

Referenced by vect_get_data_ptr_increment().

◆ vect_get_store_cost()

void vect_get_store_cost ( vec_info * ,
stmt_vec_info stmt_info,
int ncopies,
dr_alignment_support alignment_support_scheme,
int misalignment,
unsigned int * inside_cost,
stmt_vector_for_cost * body_cost_vec )

◆ vect_get_store_rhs()

tree vect_get_store_rhs ( stmt_vec_info stmt_info)

◆ vect_get_strided_load_store_ops()

static void vect_get_strided_load_store_ops ( stmt_vec_info stmt_info,
loop_vec_info loop_vinfo,
gimple_stmt_iterator * gsi,
gather_scatter_info * gs_info,
tree * dataref_bump,
tree * vec_offset,
vec_loop_lens * loop_lens )
static
Prepare to implement a grouped or strided load or store using
the gather load or scatter store operation described by GS_INFO.
STMT_INFO is the load or store statement.

Set *DATAREF_BUMP to the amount that should be added to the base
address after each copy of the vectorized statement.  Set *VEC_OFFSET
to an invariant offset vector in which element I has the value
I * DR_STEP / SCALE.   

References build_zero_cst(), cse_and_gimplify_to_preheader(), DR_STEP, fold_build2, fold_convert, force_gimple_operand_gsi(), ggc_alloc(), GSI_SAME_STMT, LOOP_VINFO_USING_SELECT_VL_P, NULL_TREE, offset, size_binop, size_int, sizetype, ssize_int, STMT_VINFO_DATA_REF, STMT_VINFO_VECTYPE, TREE_TYPE, TYPE_VECTOR_SUBPARTS(), unshare_expr(), and vect_get_loop_len().

Referenced by vectorizable_load(), and vectorizable_store().

◆ vect_get_vec_defs() [1/2]

void vect_get_vec_defs ( vec_info * vinfo,
stmt_vec_info stmt_info,
slp_tree slp_node,
unsigned ncopies,
tree op0,
tree vectype0,
vec< tree > * vec_oprnds0,
tree op1,
tree vectype1,
vec< tree > * vec_oprnds1,
tree op2,
tree vectype2,
vec< tree > * vec_oprnds2,
tree op3,
tree vectype3,
vec< tree > * vec_oprnds3 )

◆ vect_get_vec_defs() [2/2]

void vect_get_vec_defs ( vec_info * vinfo,
stmt_vec_info stmt_info,
slp_tree slp_node,
unsigned ncopies,
tree op0,
vec< tree > * vec_oprnds0,
tree op1,
vec< tree > * vec_oprnds1,
tree op2,
vec< tree > * vec_oprnds2,
tree op3,
vec< tree > * vec_oprnds3 )

◆ vect_get_vec_defs_for_operand()

void vect_get_vec_defs_for_operand ( vec_info * vinfo,
stmt_vec_info stmt_vinfo,
unsigned ncopies,
tree op,
vec< tree > * vec_oprnds,
tree vectype )
Function vect_get_vec_defs_for_operand.

OP is an operand in STMT_VINFO.  This function returns a vector of
NCOPIES defs that will be used in the vectorized stmts for STMT_VINFO.

In the case that OP is an SSA_NAME which is defined in the loop, then
STMT_VINFO_VEC_STMTS of the defining stmt holds the relevant defs.

In case OP is an invariant or constant, a new stmt that creates a vector def
needs to be introduced.  VECTYPE may be used to specify a required type for
vector invariant.   

References dump_enabled_p(), dump_printf_loc(), gcc_assert, get_vectype_for_scalar_type(), ggc_alloc(), gimple_get_lhs(), i, MSG_NOTE, NULL, stmt_vectype(), STMT_VINFO_VEC_STMTS, STMT_VINFO_VECTYPE, TREE_TYPE, truth_type_for(), vect_constant_def, vect_external_def, vect_init_vector(), vect_is_simple_use(), vect_location, VECT_SCALAR_BOOLEAN_TYPE_P, vect_stmt_to_vectorize(), and VECTOR_BOOLEAN_TYPE_P.

Referenced by vect_get_gather_scatter_ops(), vect_get_vec_defs(), vect_transform_cycle_phi(), vect_transform_reduction(), vectorizable_call(), vectorizable_induction(), vectorizable_load(), vectorizable_simd_clone_call(), vectorizable_store(), and vectorize_fold_left_reduction().

◆ vect_get_vector_types_for_stmt()

opt_result vect_get_vector_types_for_stmt ( vec_info * vinfo,
stmt_vec_info stmt_info,
tree * stmt_vectype_out,
tree * nunits_vectype_out,
unsigned int group_size )
Try to compute the vector types required to vectorize STMT_INFO,
returning true on success and false if vectorization isn't possible.
If GROUP_SIZE is nonzero and we're performing BB vectorization,
take sure that the number of elements in the vectors is no bigger
than GROUP_SIZE.

On success:

- Set *STMT_VECTYPE_OUT to:
  - NULL_TREE if the statement doesn't need to be vectorized;
  - the equivalent of STMT_VINFO_VECTYPE otherwise.

- Set *NUNITS_VECTYPE_OUT to the vector type that contains the maximum
  number of units needed to vectorize STMT_INFO, or NULL_TREE if the
  statement does not help to determine the overall number of units.   

References build_nonstandard_integer_type(), DR_REF, dump_dec(), dump_enabled_p(), dump_printf(), dump_printf_loc(), opt_result::failure_at(), gcc_assert, get_mask_type_for_scalar_type(), get_vectype_for_scalar_type(), ggc_alloc(), gimple_call_arg(), gimple_call_internal_p(), gimple_get_lhs(), MSG_NOTE, NULL_TREE, vec_info::slp_instances, STMT_VINFO_DATA_REF, STMT_VINFO_VECTYPE, opt_result::success(), TREE_TYPE, TYPE_MODE, TYPE_VECTOR_SUBPARTS(), vect_get_smallest_scalar_type(), vect_location, vect_use_mask_type_p(), VECTOR_BOOLEAN_TYPE_P, and VECTOR_MODE_P.

Referenced by vect_build_slp_tree_1(), and vect_determine_vf_for_stmt_1().

◆ vect_init_vector()

tree vect_init_vector ( vec_info * vinfo,
stmt_vec_info stmt_info,
tree val,
tree type,
gimple_stmt_iterator * gsi )
Function vect_init_vector.

Insert a new stmt (INIT_STMT) that initializes a new variable of type
TYPE with the value VAL.  If TYPE is a vector type and VAL does not have
vector type a vector with all elements equal to VAL is created first.
Place the initialization at GSI if it is not NULL.  Otherwise, place the
initialization at the loop preheader.
Return the DEF of INIT_STMT.
It will be used in the vectorization of STMT_INFO.   

References build_all_ones_cst(), build_vector_from_val(), build_zero_cst(), CONSTANT_CLASS_P, gcc_assert, ggc_alloc(), gimple_build(), gimple_build_assign(), gimple_convert(), gsi_end_p(), gsi_remove(), gsi_start(), gsi_stmt(), integer_zerop(), INTEGRAL_TYPE_P, make_ssa_name(), NULL, TREE_TYPE, types_compatible_p(), useless_type_conversion_p(), vect_get_new_ssa_name(), vect_init_vector_1(), vect_simple_var, VECTOR_BOOLEAN_TYPE_P, and VECTOR_TYPE_P.

Referenced by vect_build_all_ones_mask(), vect_build_one_scatter_store_call(), vect_build_zero_merge_argument(), vect_create_nonlinear_iv_vec_step(), vect_get_vec_defs_for_operand(), vectorizable_induction(), vectorizable_load(), vectorizable_recurr(), and vectorizable_shift().

◆ vect_init_vector_1()

static void vect_init_vector_1 ( vec_info * vinfo,
stmt_vec_info stmt_vinfo,
gimple * new_stmt,
gimple_stmt_iterator * gsi )
static
Insert the new stmt NEW_STMT at *GSI or at the appropriate place in
the loop preheader for the vectorized stmt STMT_VINFO.   

References dump_enabled_p(), dump_printf_loc(), ggc_alloc(), vec_info::insert_on_entry(), MSG_NOTE, vect_finish_stmt_generation(), and vect_location.

Referenced by vect_init_vector(), and vectorizable_call().

◆ vect_is_simple_cond()

static bool vect_is_simple_cond ( tree cond,
vec_info * vinfo,
stmt_vec_info stmt_info,
slp_tree slp_node,
tree * comp_vectype,
enum vect_def_type * dts,
tree vectype )
static
Function vect_is_simple_cond.

Input:
LOOP - the loop that is being vectorized.
COND - Condition that is checked for simple use.

Output:
*COMP_VECTYPE - the vector type for the comparison.
*DTS - The def types for the arguments of the comparison

Returns whether a COND can be vectorized.  Checks whether
condition operands are supportable using vec_is_simple_use.   

References build_nonstandard_integer_type(), COMPARISON_CLASS_P, get_vectype_for_scalar_type(), ggc_alloc(), INTEGRAL_TYPE_P, NULL_TREE, TREE_CODE, tree_int_cst_lt(), TREE_OPERAND, TREE_TYPE, truth_type_for(), TYPE_SIZE, TYPE_UNSIGNED, TYPE_VECTOR_SUBPARTS(), vect_constant_def, vect_is_simple_use(), VECT_SCALAR_BOOLEAN_TYPE_P, VECTOR_BOOLEAN_TYPE_P, and vector_element_bits().

Referenced by vectorizable_condition().

◆ vect_is_simple_use() [1/3]

bool vect_is_simple_use ( tree operand,
vec_info * vinfo,
enum vect_def_type * dt,
stmt_vec_info * def_stmt_info_out,
gimple ** def_stmt_out )
Function vect_is_simple_use.

Input:
VINFO - the vect info of the loop or basic block that is being vectorized.
OPERAND - operand in the loop or bb.
Output:
DEF_STMT_INFO_OUT (optional) - information about the defining stmt in
  case OPERAND is an SSA_NAME that is defined in the vectorizable region
DEF_STMT_OUT (optional) - the defining stmt in case OPERAND is an SSA_NAME;
  the definition could be anywhere in the function
DT - the type of definition

Returns whether a stmt with OPERAND can be vectorized.
For loops, supportable operands are constants, loop invariants, and operands
that are defined by the current iteration of the loop.  Unsupportable
operands are those that are defined by a previous iteration of the loop (as
is the case in reduction/induction computations).
For basic blocks, supportable operands are constants and bb invariants.
For now, operands defined outside the basic block are not supported.   

References CONSTANT_CLASS_P, dump_enabled_p(), dump_generic_expr(), dump_gimple_expr(), dump_printf(), dump_printf_loc(), ggc_alloc(), is_gimple_min_invariant(), vec_info::lookup_def(), MSG_MISSED_OPTIMIZATION, MSG_NOTE, NULL, SSA_NAME_DEF_STMT, SSA_NAME_IS_DEFAULT_DEF, STMT_VINFO_DEF_TYPE, TDF_SLIM, TREE_CODE, vect_condition_def, vect_constant_def, vect_double_reduction_def, vect_external_def, vect_first_order_recurrence, vect_induction_def, vect_internal_def, vect_location, vect_nested_cycle, vect_reduction_def, vect_stmt_to_vectorize(), vect_uninitialized_def, and vect_unknown_def_type.

Referenced by get_group_load_store_type(), get_load_store_type(), is_simple_and_all_uses_invariant(), process_use(), type_conversion_p(), vect_check_scalar_mask(), vect_check_store_rhs(), vect_get_and_check_slp_defs(), vect_get_vec_defs_for_operand(), vect_is_simple_cond(), vect_is_simple_use(), vect_is_simple_use(), vect_look_through_possible_promotion(), vect_recog_mask_conversion_pattern(), vect_recog_rotate_pattern(), vect_slp_linearize_chain(), vect_widened_op_tree(), vectorizable_assignment(), vectorizable_call(), vectorizable_comparison_1(), vectorizable_condition(), vectorizable_conversion(), vectorizable_early_exit(), vectorizable_operation(), vectorizable_reduction(), vectorizable_shift(), vectorizable_simd_clone_call(), and vectorizable_store().

◆ vect_is_simple_use() [2/3]

bool vect_is_simple_use ( tree operand,
vec_info * vinfo,
enum vect_def_type * dt,
tree * vectype,
stmt_vec_info * def_stmt_info_out,
gimple ** def_stmt_out )
Function vect_is_simple_use.

Same as vect_is_simple_use but also determines the vector operand
type of OPERAND and stores it to *VECTYPE.  If the definition of
OPERAND is vect_uninitialized_def, vect_constant_def or
vect_external_def *VECTYPE will be set to NULL_TREE and the caller
is responsible to compute the best suited vector type for the
scalar operand.   

References dump_enabled_p(), dump_printf_loc(), gcc_assert, gcc_unreachable, ggc_alloc(), MSG_NOTE, NULL_TREE, STMT_VINFO_VECTYPE, vect_constant_def, vect_double_reduction_def, vect_external_def, vect_first_order_recurrence, vect_induction_def, vect_internal_def, vect_is_simple_use(), vect_location, vect_nested_cycle, vect_reduction_def, and vect_uninitialized_def.

◆ vect_is_simple_use() [3/3]

bool vect_is_simple_use ( vec_info * vinfo,
stmt_vec_info stmt,
slp_tree slp_node,
unsigned operand,
tree * op,
slp_tree * slp_def,
enum vect_def_type * dt,
tree * vectype,
stmt_vec_info * def_stmt_info_out )
Function vect_is_simple_use.

Same as vect_is_simple_use but determines the operand by operand
position OPERAND from either STMT or SLP_NODE, filling in *OP
and *SLP_DEF (when SLP_NODE is not NULL).   

References COMPARISON_CLASS_P, gcc_unreachable, ggc_alloc(), gimple_assign_rhs1(), gimple_assign_rhs_code(), gimple_call_arg(), gimple_get_lhs(), gimple_op(), NULL, SLP_TREE_CHILDREN, SLP_TREE_DEF_TYPE, SLP_TREE_REPRESENTATIVE, SLP_TREE_SCALAR_OPS, SLP_TREE_VECTYPE, TREE_OPERAND, vect_internal_def, and vect_is_simple_use().

◆ vect_mark_relevant()

static void vect_mark_relevant ( vec< stmt_vec_info > * worklist,
stmt_vec_info stmt_info,
enum vect_relevant relevant,
bool live_p )
static
Utility functions used by vect_mark_stmts_to_be_vectorized.   
Function vect_mark_relevant.

Mark STMT_INFO as "relevant for vectorization" and add it to WORKLIST.   

References dump_enabled_p(), dump_printf_loc(), gcc_assert, ggc_alloc(), MSG_NOTE, STMT_VINFO_IN_PATTERN_P, STMT_VINFO_LIVE_P, STMT_VINFO_RELATED_STMT, STMT_VINFO_RELEVANT, vect_location, vect_unused_in_scope, vect_used_only_live, and worklist.

Referenced by process_use(), and vect_mark_stmts_to_be_vectorized().

◆ vect_mark_stmts_to_be_vectorized()

opt_result vect_mark_stmts_to_be_vectorized ( loop_vec_info loop_vinfo,
bool * fatal )

◆ vect_maybe_update_slp_op_vectype()

◆ vect_model_promotion_demotion_cost()

static void vect_model_promotion_demotion_cost ( stmt_vec_info stmt_info,
enum vect_def_type * dt,
unsigned int ncopies,
int pwr,
stmt_vector_for_cost * cost_vec,
bool widen_arith )
static
Model cost for type demotion and promotion operations.  PWR is
normally zero for single-step promotions and demotions.  It will be
one if two-step promotion/demotion is required, and so on.  NCOPIES
is the number of vector results (and thus number of instructions)
for the narrowest end of the operation chain.  Each additional
step doubles the number of instructions required.  If WIDEN_ARITH
is true the stmt is doing widening arithmetic.   

References dump_enabled_p(), dump_printf_loc(), ggc_alloc(), i, MSG_NOTE, record_stmt_cost(), vec_promote_demote, vect_body, vect_constant_def, vect_external_def, vect_location, vect_prologue, and vector_stmt.

Referenced by vectorizable_conversion().

◆ vect_model_simple_cost()

static void vect_model_simple_cost ( vec_info * ,
stmt_vec_info stmt_info,
int ncopies,
enum vect_def_type * dt,
int ndts,
slp_tree node,
stmt_vector_for_cost * cost_vec,
vect_cost_for_stmt kind = vector_stmt )
static
Function vect_model_simple_cost.

Models cost for simple operations, i.e. those that only emit ncopies of a
single op.  Right now, this does not account for multiple insns that could
be generated for the single vector op.  We will handle that shortly.   

References dump_enabled_p(), dump_printf_loc(), gcc_assert, ggc_alloc(), i, MSG_NOTE, NULL, record_stmt_cost(), scalar_to_vec, SLP_TREE_NUMBER_OF_VEC_STMTS, vect_body, vect_constant_def, vect_external_def, vect_location, and vect_prologue.

Referenced by vectorizable_assignment(), vectorizable_call(), vectorizable_comparison_1(), vectorizable_condition(), vectorizable_conversion(), vectorizable_operation(), and vectorizable_shift().

◆ vect_nop_conversion_p()

bool vect_nop_conversion_p ( stmt_vec_info stmt_info)
Return true if we can assume from the scalar form of STMT_INFO that
neither the scalar nor the vector forms will generate code.  STMT_INFO
is known not to involve a data reference.   

References CONVERT_EXPR_CODE_P, ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs_code(), tree_nop_conversion_p(), and TREE_TYPE.

Referenced by vect_bb_slp_scalar_cost(), vect_compute_single_scalar_iteration_cost(), and vectorizable_assignment().

◆ vect_remove_stores()

void vect_remove_stores ( vec_info * vinfo,
stmt_vec_info first_stmt_info )
Remove a group of stores (for SLP or interleaving), free their
stmt_vec_info.   

References DR_GROUP_NEXT_ELEMENT, ggc_alloc(), vec_info::remove_stmt(), and vect_orig_stmt().

Referenced by vect_transform_loop().

◆ vect_simd_lane_linear()

static void vect_simd_lane_linear ( tree op,
class loop * loop,
struct simd_call_arg_info * arginfo )
static

◆ vect_stmt_relevant_p()

static bool vect_stmt_relevant_p ( stmt_vec_info stmt_info,
loop_vec_info loop_vinfo,
enum vect_relevant * relevant,
bool * live_p )
static
Function vect_stmt_relevant_p.

Return true if STMT_INFO, in the loop that is represented by LOOP_VINFO,
is "relevant for vectorization".

A stmt is considered "relevant for vectorization" if:
- it has uses outside the loop.
- it has vdefs (it alters memory).
- control stmts in the loop (except for the exit condition).
- it is an induction and we have multiple exits.

CHECKME: what other side effects would the vectorizer allow?   

References DEF_FROM_PTR, dump_enabled_p(), dump_printf_loc(), flow_bb_inside_loop_p(), FOR_EACH_IMM_USE_FAST, FOR_EACH_PHI_OR_STMT_DEF, gcc_assert, ggc_alloc(), gimple_bb(), gimple_clobber_p(), gimple_vdef(), loop::inner, is_ctrl_stmt(), is_gimple_debug(), is_simple_and_all_uses_invariant(), LOOP_VINFO_EARLY_BREAKS, LOOP_VINFO_LOOP, LOOP_VINFO_LOOP_IV_COND, MSG_NOTE, SSA_OP_DEF, STMT_VINFO_DEF_TYPE, STMT_VINFO_STMT, USE_STMT, vect_induction_def, vect_location, vect_unused_in_scope, vect_used_in_scope, and vect_used_only_live.

Referenced by vect_mark_stmts_to_be_vectorized().

◆ vect_supportable_shift()

bool vect_supportable_shift ( vec_info * vinfo,
enum tree_code code,
tree scalar_type )
Return TRUE if CODE (a shift operation) is supported for SCALAR_TYPE
either as shift by a scalar or by a vector.   

References get_vectype_for_scalar_type(), ggc_alloc(), optab_for_tree_code(), optab_handler(), optab_scalar, optab_vector, and TYPE_MODE.

Referenced by vect_synth_mult_by_constant().

◆ vect_transform_stmt()

bool vect_transform_stmt ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
slp_tree slp_node,
slp_instance slp_node_instance )

◆ vect_truncate_gather_scatter_offset()

static bool vect_truncate_gather_scatter_offset ( stmt_vec_info stmt_info,
loop_vec_info loop_vinfo,
bool masked_p,
gather_scatter_info * gs_info )
static
Determine whether we can use a gather load or scatter store to vectorize
strided load or store STMT_INFO by truncating the current offset to a
smaller width.  We need to be able to construct an offset vector:

  { 0, X, X*2, X*3, ... }

without loss of precision, where X is STMT_INFO's DR_STEP.

Return true if this is possible, describing the gather load or scatter
store in GS_INFO.  MASKED_P is true if the load or store is conditional.   

References build_nonstandard_integer_type(), ceil_log2(), count, dr_info::dr, DR_IS_READ, DR_REF, DR_STEP, dump_enabled_p(), dump_printf_loc(), element_mode(), fold_convert, GET_MODE_BITSIZE(), ggc_alloc(), i, IFN_LAST, LOOP_VINFO_LOOP, max_loop_iterations(), wi::min_precision(), MSG_NOTE, wi::mul(), wi::multiple_of_p(), NULL_TREE, wi::OVF_NONE, SCALAR_TYPE_MODE, SIGNED, STMT_VINFO_DR_INFO, STMT_VINFO_VECTYPE, wi::to_widest(), TREE_CODE, TREE_TYPE, UNSIGNED, vect_constant_def, vect_gather_scatter_fn_p(), vect_get_scalar_dr_size(), vect_location, and vect_max_vf().

Referenced by vect_use_strided_gather_scatters_p().

◆ vect_use_strided_gather_scatters_p()

static bool vect_use_strided_gather_scatters_p ( stmt_vec_info stmt_info,
loop_vec_info loop_vinfo,
bool masked_p,
gather_scatter_info * gs_info )
static
Return true if we can use gather/scatter internal functions to
vectorize STMT_INFO, which is a grouped or strided load or store.
MASKED_P is true if load or store is conditional.  When returning
true, fill in GS_INFO with the information required to perform the
operation.   

References dump_enabled_p(), dump_printf_loc(), fold_convert, gcc_assert, ggc_alloc(), IFN_LAST, MSG_NOTE, TREE_TYPE, TYPE_PRECISION, vect_check_gather_scatter(), vect_location, and vect_truncate_gather_scatter_offset().

Referenced by get_group_load_store_type(), and get_load_store_type().

◆ vector_vector_composition_type()

static tree vector_vector_composition_type ( tree vtype,
poly_uint64 nelts,
tree * ptype )
static
Function VECTOR_VECTOR_COMPOSITION_TYPE

This function returns a vector type which can be composed with NETLS pieces,
whose type is recorded in PTYPE.  VTYPE should be a vector type, and has the
same vector size as the return vector.  It checks target whether supports
pieces-size vector mode for construction firstly, if target fails to, check
pieces-size scalar mode for construction further.  It returns NULL_TREE if
fails to find the available composition.

For example, for (vtype=V16QI, nelts=4), we can probably get:
  - V16QI with PTYPE V4QI.
  - V4SI with PTYPE SI.
  - NULL_TREE.   

References build_nonstandard_integer_type(), build_vector_type(), convert_optab_handler(), gcc_assert, GET_MODE_BITSIZE(), ggc_alloc(), int_mode_for_size(), known_eq, known_gt, NULL_TREE, related_vector_mode(), SCALAR_TYPE_MODE, TREE_TYPE, TYPE_MODE, TYPE_VECTOR_SUBPARTS(), VECTOR_MODE_P, and VECTOR_TYPE_P.

Referenced by get_group_load_store_type(), and vectorizable_load().

◆ vectorizable_assignment()

static bool vectorizable_assignment ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gimple ** vec_stmt,
slp_tree slp_node,
stmt_vector_for_cost * cost_vec )
static

◆ vectorizable_bswap()

◆ vectorizable_call()

static bool vectorizable_call ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gimple ** vec_stmt,
slp_tree slp_node,
stmt_vector_for_cost * cost_vec )
static
Function vectorizable_call.

Check if STMT_INFO performs a function call that can be vectorized.
If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
stmt to replace it, put it in VEC_STMT, and insert it at GSI.
Return true if STMT_INFO is vectorizable in this way.   

References as_internal_fn(), build_index_vector(), build_int_cst(), build_zero_cst(), BUILT_IN_MD, call_vec_info_type, CFN_LAST, cfun, direct_internal_fn_supported_p(), dump_enabled_p(), dump_printf_loc(), DUMP_VECT_SCOPE, fndecl_built_in_p(), FOR_EACH_VEC_ELT, gcc_assert, gcc_checking_assert, gcc_unreachable, get_conditional_internal_fn(), get_len_internal_fn(), get_vectype_for_scalar_type(), ggc_alloc(), gimple_build_assign(), gimple_build_call_internal_vec(), gimple_build_call_vec(), gimple_call_arg(), gimple_call_builtin_p(), gimple_call_combined_fn(), gimple_call_fndecl(), gimple_call_internal_fn(), gimple_call_internal_p(), gimple_call_lhs(), gimple_call_num_args(), gimple_call_set_lhs(), gimple_call_set_nothrow(), gimple_get_lhs(), gimple_vuse(), i, IFN_LAST, internal_fn_len_index(), internal_fn_mask_index(), internal_fn_p(), internal_load_fn_p(), internal_store_fn_p(), intQI_type_node, known_eq, LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P, LOOP_VINFO_FULLY_MASKED_P, LOOP_VINFO_FULLY_WITH_LENGTH_P, LOOP_VINFO_LENS, LOOP_VINFO_LOOP, LOOP_VINFO_MASKS, LOOP_VINFO_PARTIAL_LOAD_STORE_BIAS, make_ssa_name(), MAX_TREE_CODES, MSG_MISSED_OPTIMIZATION, MSG_NOTE, NULL, NULL_TREE, OPTIMIZE_FOR_SPEED, prepare_vec_mask(), record_stmt_cost(), vec_info::replace_stmt(), simple_integer_narrowing(), SLP_TREE_NUMBER_OF_VEC_STMTS, SSA_NAME_VAR, data_reference::stmt, stmt_can_throw_internal(), STMT_VINFO_DEF_TYPE, STMT_VINFO_REDUC_IDX, STMT_VINFO_RELEVANT_P, STMT_VINFO_TYPE, STMT_VINFO_VEC_STMTS, STMT_VINFO_VECTYPE, targetm, TREE_CODE, TREE_TYPE, TYPE_VECTOR_SUBPARTS(), types_compatible_p(), unsigned_type_node, vec_promote_demote, vect_body, vect_check_scalar_mask(), vect_create_destination_var(), vect_emulated_vector_p(), vect_finish_stmt_generation(), vect_get_loop_len(), vect_get_loop_mask(), vect_get_new_ssa_name(), vect_get_num_copies(), vect_get_slp_defs(), vect_get_vec_defs_for_operand(), vect_gimple_build(), vect_init_vector_1(), vect_internal_def, vect_is_simple_use(), vect_location, vect_maybe_update_slp_op_vectype(), vect_model_simple_cost(), vect_orig_stmt(), vect_record_loop_len(), vect_record_loop_mask(), vect_simple_var, vect_unknown_def_type, VECTOR_BOOLEAN_TYPE_P, vectorizable_bswap(), vectorizable_internal_function(), and vNULL.

Referenced by vect_analyze_stmt(), and vect_transform_stmt().

◆ vectorizable_comparison()

static bool vectorizable_comparison ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gimple ** vec_stmt,
slp_tree slp_node,
stmt_vector_for_cost * cost_vec )
static
vectorizable_comparison.

Check if STMT_INFO is comparison expression that can be vectorized.
If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
comparison, put it in VEC_STMT, and insert it at GSI.

Return true if STMT_INFO is vectorizable in this way.   

References comparison_vec_info_type, ggc_alloc(), gimple_assign_rhs_code(), STMT_VINFO_DEF_TYPE, STMT_VINFO_RELEVANT_P, STMT_VINFO_TYPE, STMT_VINFO_VECTYPE, vect_internal_def, and vectorizable_comparison_1().

Referenced by vect_analyze_stmt(), and vect_transform_stmt().

◆ vectorizable_comparison_1()

◆ vectorizable_condition()

static bool vectorizable_condition ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gimple ** vec_stmt,
slp_tree slp_node,
stmt_vector_for_cost * cost_vec )
static
vectorizable_condition.

Check if STMT_INFO is conditional modify expression that can be vectorized.
If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
stmt using VEC_COND_EXPR  to replace it, put it in VEC_STMT, and insert it
at GSI.

When STMT_INFO is vectorized as a nested cycle, for_reduction is true.

Return true if STMT_INFO is vectorizable in this way.   

References build_int_cst(), scalar_cond_masked_key::code, COMPARISON_CLASS_P, condition_vec_info_type, hash_set< KeyId, Lazy, Traits >::contains(), direct_internal_fn_supported_p(), dump_enabled_p(), dump_printf_loc(), expand_vec_cmp_expr_p(), expand_vec_cond_expr_p(), EXTRACT_LAST_REDUCTION, FOR_EACH_VEC_ELT, g, gcc_assert, ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs_code(), gimple_build(), gimple_build_assign(), gimple_build_call_internal(), gimple_call_set_lhs(), gimple_get_lhs(), gsi_for_stmt(), gsi_insert_before(), gsi_remove(), GSI_SAME_STMT, gsi_stmt(), HONOR_NANS(), i, info_for_reduction(), intQI_type_node, invert_tree_comparison(), scalar_cond_masked_key::inverted_p, is_gimple_val(), LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P, LOOP_VINFO_FULLY_MASKED_P, LOOP_VINFO_FULLY_WITH_LENGTH_P, LOOP_VINFO_LENS, LOOP_VINFO_MASKS, LOOP_VINFO_PARTIAL_LOAD_STORE_BIAS, make_ssa_name(), MSG_MISSED_OPTIMIZATION, NULL, NULL_TREE, scalar_cond_masked_key::op0, optab_default, optab_for_tree_code(), optab_handler(), OPTIMIZE_FOR_SPEED, _loop_vec_info::scalar_cond_masked_set, size_int, SLP_TREE_CHILDREN, SLP_TREE_NUMBER_OF_VEC_STMTS, SSA_NAME_DEF_STMT, STMT_VINFO_DEF_TYPE, STMT_VINFO_REDUC_DEF, STMT_VINFO_REDUC_IDX, STMT_VINFO_REDUC_TYPE, STMT_VINFO_RELEVANT_P, STMT_VINFO_TYPE, STMT_VINFO_VEC_STMTS, STMT_VINFO_VECTYPE, tcc_comparison, TREE_CODE, TREE_CODE_CLASS, TREE_CODE_REDUCTION, TREE_OPERAND, TREE_TYPE, truth_type_for(), TYPE_MODE, TYPE_VECTOR_SUBPARTS(), useless_type_conversion_p(), vec_to_scalar, vect_create_destination_var(), vect_finish_replace_stmt(), vect_finish_stmt_generation(), vect_get_loop_len(), vect_get_loop_mask(), vect_get_num_copies(), vect_get_vec_defs(), vect_internal_def, vect_is_simple_cond(), vect_is_simple_use(), vect_location, vect_maybe_update_slp_op_vectype(), vect_model_simple_cost(), vect_nested_cycle, vect_orig_stmt(), vect_record_loop_len(), vect_record_loop_mask(), vect_unknown_def_type, VECTOR_BOOLEAN_TYPE_P, vector_stmt, and vNULL.

Referenced by vect_analyze_stmt(), and vect_transform_stmt().

◆ vectorizable_conversion()

static bool vectorizable_conversion ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gimple ** vec_stmt,
slp_tree slp_node,
stmt_vector_for_cost * cost_vec )
static
Check if STMT_INFO performs a conversion operation that can be vectorized.
If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
stmt to replace it, put it in VEC_STMT, and insert it at GSI.
Return true if STMT_INFO is vectorizable in this way.   

References binary_op, build_nonstandard_integer_type(), CONSTANT_CLASS_P, CONVERT_EXPR_CODE_P, dump_enabled_p(), dump_printf_loc(), DUMP_VECT_SCOPE, fold_convert, FOR_EACH_2XWIDER_MODE, FOR_EACH_VEC_ELT, gcc_assert, gcc_checking_assert, gcc_unreachable, GET_MODE_BITSIZE(), GET_MODE_SIZE(), get_same_sized_vectype(), get_vectype_for_scalar_type(), ggc_alloc(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_assign_set_lhs(), gimple_call_arg(), gimple_call_internal_fn(), gimple_call_internal_p(), gimple_call_num_args(), gimple_get_lhs(), gimple_set_lhs(), i, int_mode_for_size(), INTEGRAL_TYPE_P, is_gimple_assign(), is_gimple_call(), code_helper::is_tree_code(), known_eq, make_ssa_name(), wi::min_precision(), MSG_MISSED_OPTIMIZATION, MSG_NOTE, NULL, NULL_TREE, SCALAR_FLOAT_TYPE_P, SCALAR_TYPE_MODE, SIGNED, SLP_TREE_NUMBER_OF_VEC_STMTS, SSA_NAME_DEF_STMT, STMT_VINFO_DEF_TYPE, STMT_VINFO_RELEVANT_P, STMT_VINFO_TYPE, STMT_VINFO_VEC_STMTS, STMT_VINFO_VECTYPE, supportable_convert_operation(), supportable_half_widening_operation(), supportable_narrowing_operation(), supportable_widening_operation(), TREE_CODE, TREE_CODE_LENGTH, TREE_TYPE, type_conversion_vec_info_type, type_demotion_vec_info_type, type_has_mode_precision_p(), TYPE_PRECISION, type_promotion_vec_info_type, TYPE_VECTOR_SUBPARTS(), unary_op, vect_create_destination_var(), vect_create_half_widening_stmts(), vect_create_vectorized_demotion_stmts(), vect_create_vectorized_promotion_stmts(), vect_finish_stmt_generation(), vect_get_num_copies(), vect_get_range_info(), vect_get_vec_defs(), vect_gimple_build(), vect_internal_def, vect_is_simple_use(), vect_location, vect_maybe_update_slp_op_vectype(), vect_model_promotion_demotion_cost(), vect_model_simple_cost(), vect_pow2(), vect_unknown_def_type, VECTOR_BOOLEAN_TYPE_P, vNULL, and widening_fn_p().

Referenced by vect_analyze_stmt(), and vect_transform_stmt().

◆ vectorizable_early_exit()

◆ vectorizable_internal_function()

static internal_fn vectorizable_internal_function ( combined_fn cfn,
tree fndecl,
tree vectype_out,
tree vectype_in )
static
We want to vectorize a call to combined function CFN with function
decl FNDECL, using VECTYPE_OUT as the type of the output and VECTYPE_IN
as the types of all inputs.  Check whether this is possible using
an internal function, returning its code if so or IFN_LAST if not.   

References as_internal_fn(), associated_internal_fn(), direct_internal_fn(), direct_internal_fn_p(), direct_internal_fn_supported_p(), ggc_alloc(), IFN_LAST, internal_fn_p(), OPTIMIZE_FOR_SPEED, direct_internal_fn_info::type0, direct_internal_fn_info::type1, TYPE_SIZE, and direct_internal_fn_info::vectorizable.

Referenced by vectorizable_call().

◆ vectorizable_load()

static bool vectorizable_load ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gimple ** vec_stmt,
slp_tree slp_node,
stmt_vector_for_cost * cost_vec )
static
vectorizable_load.

Check if STMT_INFO reads a non scalar data-ref (array/pointer/structure)
that can be vectorized.
If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
stmt to replace it, put it in VEC_STMT, and insert it at GSI.
Return true if STMT_INFO is vectorizable in this way.   

References add_phi_arg(), alias_sets_conflict_p(), vec_info::any_known_not_updated_vssa, bitsize_int, build1(), build2(), build_aligned_type(), build_array_type_nelts(), build_constructor(), build_int_cst(), build_int_cstu(), build_minus_one_cst(), build_vector_type_for_mode(), build_zero_cst(), bump_vector_ptr(), can_vec_mask_load_store_p(), CEIL, ceil_log2(), check_load_store_for_partial_vectors(), CONSTRUCTOR_APPEND_ELT, convert_to_ptrofftype, copy_ssa_name(), count, create_array_ref(), create_iv(), create_vector_array(), cse_and_gimplify_to_preheader(), dr_info::dr, dr_aligned, dr_alignment(), DR_BASE_ADDRESS, dr_explicit_realign, dr_explicit_realign_optimized, DR_GROUP_FIRST_ELEMENT, DR_GROUP_GAP, DR_GROUP_NEXT_ELEMENT, DR_GROUP_SIZE, DR_INIT, DR_MISALIGNMENT_UNKNOWN, DR_REF, DR_STEP, DR_STEP_ALIGNMENT, DR_TARGET_ALIGNMENT, dr_unaligned_supported, dump_enabled_p(), dump_printf_loc(), ensure_base_align(), fold_build2, fold_build_pointer_plus, fold_convert, FOR_EACH_VEC_ELT, gcc_assert, gcc_unreachable, get_alias_set(), get_dr_vinfo_offset(), get_group_alias_ptr_type(), get_len_load_store_mode(), get_load_store_type(), GET_MODE_SIZE(), GET_MODE_UNIT_SIZE, get_object_alignment(), get_ptr_info(), get_virtual_phi(), ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_rhs_code(), gimple_assign_set_lhs(), gimple_bb(), gimple_build(), gimple_build_assign(), gimple_build_call_internal(), gimple_call_internal_fn(), gimple_call_internal_p(), gimple_call_lhs(), gimple_call_set_lhs(), gimple_call_set_nothrow(), gimple_convert(), gimple_move_vops(), gimple_seq_add_stmt(), gimple_set_lhs(), gimple_set_vuse(), gimple_vuse(), gsi_insert_on_edge_immediate(), gsi_insert_seq_before(), gsi_next(), GSI_SAME_STMT, gsi_stmt(), loop::header, hoist_defs_of_uses(), i, IFN_LAST, int_const_binop(), integer_zerop(), internal_fn_mask_index(), internal_load_fn_p(), intQI_type_node, known_eq, known_ge, known_gt, known_lt, least_bit_hwi(), load_vec_info_type, basic_block_def::loop_father, loop_latch_edge(), loop_preheader_edge(), LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P, LOOP_VINFO_FULLY_MASKED_P, LOOP_VINFO_FULLY_WITH_LENGTH_P, LOOP_VINFO_LENS, LOOP_VINFO_LOOP, LOOP_VINFO_MASKS, LOOP_VINFO_NO_DATA_DEPENDENCIES, LOOP_VINFO_PARTIAL_LOAD_STORE_BIAS, LOOP_VINFO_PEELING_FOR_GAPS, LOOP_VINFO_USING_PARTIAL_VECTORS_P, LOOP_VINFO_USING_SELECT_VL_P, LOOP_VINFO_VECT_FACTOR, make_ssa_name(), maybe_gt, MSG_MISSED_OPTIMIZATION, MSG_NOTE, nested_in_vect_loop_p(), NULL, NULL_TREE, loop::num, offset, optab_handler(), perm_mask_for_reverse(), permute_vec_elements(), PHI_ARG_DEF_FROM_EDGE, prepare_vec_mask(), ptr_type_node, read_vector_array(), record_stmt_cost(), reference_alias_ptr_type(), SCALAR_INT_MODE_P, scalar_load, scalar_to_vec, set_ptr_info_alignment(), size_binop, size_int, size_one_node, sizetype, SLP_TREE_CHILDREN, SLP_TREE_LANES, SLP_TREE_LOAD_PERMUTATION, SLP_TREE_NUMBER_OF_VEC_STMTS, SLP_TREE_SCALAR_STMTS, SLP_TREE_VECTYPE, SSA_NAME_DEF_STMT, standard_iv_increment_position(), STMT_VINFO_DATA_REF, STMT_VINFO_DEF_TYPE, STMT_VINFO_DR_INFO, STMT_VINFO_GATHER_SCATTER_P, STMT_VINFO_GROUPED_ACCESS, STMT_VINFO_MEMORY_ACCESS_TYPE, STMT_VINFO_MIN_NEG_DIST, STMT_VINFO_RELEVANT_P, STMT_VINFO_SIMD_LANE_ACCESS_P, STMT_VINFO_SLP_VECT_ONLY, STMT_VINFO_TYPE, STMT_VINFO_VEC_STMTS, STMT_VINFO_VECTYPE, tcc_declaration, poly_int< N, C >::to_constant(), wi::to_wide(), TREE_CODE, TREE_CODE_CLASS, tree_int_cst_sgn(), TREE_OPERAND, tree_to_poly_uint64(), tree_to_uhwi(), TREE_TYPE, truth_type_for(), TYPE_ALIGN, TYPE_MODE, TYPE_SIZE, TYPE_SIZE_UNIT, TYPE_VECTOR_SUBPARTS(), UNKNOWN_LOCATION, unshare_expr(), unsigned_intQI_type_node, VAR_P, vec_alloc(), vec_construct, vec_perm, vec_to_scalar, vect_body, vect_build_one_gather_load_call(), vect_check_scalar_mask(), vect_clobber_variable(), vect_copy_ref_info(), vect_create_data_ref_ptr(), vect_create_destination_var(), vect_dr_behavior(), vect_find_first_scalar_stmt_in_slp(), vect_finish_stmt_generation(), vect_gen_perm_mask_checked(), vect_get_data_ptr_increment(), vect_get_gather_scatter_ops(), vect_get_load_cost(), vect_get_loop_len(), vect_get_loop_mask(), vect_get_new_ssa_name(), vect_get_num_copies(), vect_get_place_in_interleaving_chain(), vect_get_scalar_dr_size(), vect_get_slp_defs(), vect_get_strided_load_store_ops(), vect_get_vec_defs_for_operand(), vect_init_vector(), vect_internal_def, vect_known_alignment_in_bytes(), vect_location, vect_maybe_update_slp_op_vectype(), vect_nunits_for_cost(), vect_prologue, vect_record_grouped_load_vectors(), vect_setup_realignment(), vect_simple_var, vect_slp_child_index_for_operand(), vect_transform_grouped_load(), vect_transform_slp_perm_load(), vect_unknown_def_type, VECTOR_MODE_P, VECTOR_TYPE_P, vector_vector_composition_type(), VLS_LOAD, VMAT_CONTIGUOUS, VMAT_CONTIGUOUS_PERMUTE, VMAT_CONTIGUOUS_REVERSE, VMAT_ELEMENTWISE, VMAT_GATHER_SCATTER, VMAT_INVARIANT, VMAT_LOAD_STORE_LANES, VMAT_STRIDED_SLP, vNULL, and wide_int_to_tree().

Referenced by vect_analyze_stmt(), and vect_transform_stmt().

◆ vectorizable_operation()

static bool vectorizable_operation ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gimple ** vec_stmt,
slp_tree slp_node,
stmt_vector_for_cost * cost_vec )
static
Function vectorizable_operation.

Check if STMT_INFO performs a binary, unary or ternary operation that can
be vectorized.
If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
stmt to replace it, put it in VEC_STMT, and insert it at GSI.
Return true if STMT_INFO is vectorizable in this way.   

References hash_set< KeyId, Lazy, Traits >::add(), binary_op, build1(), build_int_cst(), build_minus_one_cst(), build_nonstandard_integer_type(), build_replicated_int_cst(), can_mult_highpart_p(), hash_set< KeyId, Lazy, Traits >::contains(), direct_internal_fn_supported_p(), dump_enabled_p(), dump_printf(), dump_printf_loc(), DUMP_VECT_SCOPE, FOR_EACH_VEC_ELT, gcc_assert, get_conditional_internal_fn(), get_conditional_len_internal_fn(), GET_MODE_BITSIZE(), GET_MODE_MASK, GET_MODE_SIZE(), get_vectype_for_scalar_type(), ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs_code(), gimple_assign_set_lhs(), gimple_build_assign(), gimple_build_call_internal_vec(), gimple_call_set_lhs(), gimple_call_set_nothrow(), gimple_could_trap_p(), i, IFN_LAST, INTEGRAL_TYPE_P, intQI_type_node, LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P, LOOP_VINFO_FULLY_MASKED_P, LOOP_VINFO_FULLY_WITH_LENGTH_P, LOOP_VINFO_LENS, LOOP_VINFO_MASKS, LOOP_VINFO_PARTIAL_LOAD_STORE_BIAS, make_ssa_name(), MSG_MISSED_OPTIMIZATION, MSG_NOTE, NULL, NULL_TREE, op_vec_info_type, optab_default, optab_for_tree_code(), optab_handler(), optab_libfunc(), OPTIMIZE_FOR_SPEED, prepare_vec_mask(), record_stmt_cost(), _loop_vec_info::scalar_cond_masked_set, scalar_stmt, SLP_TREE_NUMBER_OF_VEC_STMTS, STMT_VINFO_DATA_REF, STMT_VINFO_DEF_TYPE, STMT_VINFO_REDUC_IDX, STMT_VINFO_RELEVANT_P, STMT_VINFO_TYPE, STMT_VINFO_VEC_STMTS, STMT_VINFO_VECTYPE, suppress_warning(), targetm, tcc_comparison, ternary_op, TREE_CODE_CLASS, TREE_CODE_LENGTH, tree_nop_conversion_p(), TREE_TYPE, truth_type_for(), type_has_mode_precision_p(), TYPE_MODE, TYPE_UNSIGNED, TYPE_VECTOR_SUBPARTS(), unary_op, _loop_vec_info::vec_cond_masked_set, vect_body, vect_can_vectorize_without_simd_p(), vect_constant_def, vect_create_destination_var(), vect_emulated_vector_p(), vect_external_def, vect_finish_stmt_generation(), vect_get_loop_len(), vect_get_loop_mask(), vect_get_num_copies(), vect_get_vec_defs(), vect_internal_def, vect_is_simple_use(), vect_location, vect_maybe_update_slp_op_vectype(), vect_model_simple_cost(), vect_prologue, vect_record_loop_len(), vect_record_loop_mask(), VECT_SCALAR_BOOLEAN_TYPE_P, vect_unknown_def_type, VECTOR_BOOLEAN_TYPE_P, vector_element_bits(), vNULL, and word_mode.

Referenced by vect_analyze_stmt(), and vect_transform_stmt().

◆ vectorizable_scan_store()

static bool vectorizable_scan_store ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gimple ** vec_stmt,
int ncopies )
static

◆ vectorizable_shift()

static bool vectorizable_shift ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gimple ** vec_stmt,
slp_tree slp_node,
stmt_vector_for_cost * cost_vec )
static
Function vectorizable_shift.

Check if STMT_INFO performs a shift operation that can be vectorized.
If VEC_STMT is also passed, vectorize the STMT_INFO: create a vectorized
stmt to replace it, put it in VEC_STMT, and insert it at GSI.
Return true if STMT_INFO is vectorizable in this way.   

References bitsize_zero_node, build3(), dump_enabled_p(), dump_printf_loc(), DUMP_VECT_SCOPE, fold_convert, FOR_EACH_VEC_ELT, gcc_assert, get_vectype_for_scalar_type(), ggc_alloc(), gimple_assign_lhs(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_assign_set_lhs(), gimple_build_assign(), i, insn_data, is_pattern_stmt_p(), make_ssa_name(), insn_operand_data::mode, MSG_MISSED_OPTIMIZATION, MSG_NOTE, NULL, NULL_TREE, insn_data_d::operand, operand_equal_p(), optab_for_tree_code(), optab_handler(), optab_scalar, optab_vector, shift_vec_info_type, SLP_TREE_DEF_TYPE, SLP_TREE_NUMBER_OF_VEC_STMTS, SLP_TREE_SCALAR_OPS, SLP_TREE_SCALAR_STMTS, STMT_VINFO_DEF_TYPE, STMT_VINFO_RELEVANT_P, STMT_VINFO_TYPE, STMT_VINFO_VEC_STMTS, STMT_VINFO_VECTYPE, TREE_CODE, tree_nop_conversion_p(), TREE_TYPE, type_has_mode_precision_p(), TYPE_MODE, TYPE_SIZE, TYPE_VECTOR_SUBPARTS(), vect_constant_def, vect_create_destination_var(), vect_emulated_vector_p(), vect_external_def, vect_finish_stmt_generation(), vect_get_num_copies(), vect_get_vec_defs(), vect_induction_def, vect_init_vector(), vect_internal_def, vect_is_simple_use(), vect_location, vect_maybe_update_slp_op_vectype(), vect_model_simple_cost(), vect_nested_cycle, vect_unknown_def_type, VECTOR_MODE_P, and vNULL.

Referenced by vect_analyze_stmt(), and vect_transform_stmt().

◆ vectorizable_simd_clone_call()

static bool vectorizable_simd_clone_call ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gimple ** vec_stmt,
slp_tree slp_node,
stmt_vector_for_cost *  )
static
Function vectorizable_simd_clone_call.

Check if STMT_INFO performs a function call that can be vectorized
by calling a simd clone of the function.
If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
stmt to replace it, put it in VEC_STMT, and insert it at GSI.
Return true if STMT_INFO is vectorizable in this way.   
vect_model_simple_cost (vinfo, stmt_info, ncopies,
                        dt, slp_node, cost_vec);  

References hash_set< KeyId, Lazy, Traits >::add(), add_phi_arg(), vec_info::any_known_not_updated_vssa, arg_type, iv::base, bitsize_int, boolean_false_node, boolean_true_node, build1(), build2(), build3(), build4(), build_constructor(), build_fold_addr_expr, build_int_cst(), build_nonstandard_integer_type(), build_vector_from_val(), build_zero_cst(), call_simd_clone_vec_info_type, cfun, CONSTRUCTOR_APPEND_ELT, copy_ssa_name(), create_phi_node(), create_tmp_var, dump_enabled_p(), dump_printf_loc(), DUMP_VECT_SCOPE, exact_log2(), expand_vec_cond_expr_p(), floor_log2(), fold_build1, fold_build2, fold_convert, force_gimple_operand(), gcc_assert, gcc_checking_assert, gcc_unreachable, cgraph_node::get(), GET_MODE_BITSIZE(), GET_MODE_SIZE(), get_pointer_alignment(), get_vectype_for_scalar_type(), ggc_alloc(), gimple_assign_lhs(), gimple_build_assign(), gimple_build_call_vec(), gimple_build_nop(), gimple_call_arg(), gimple_call_fndecl(), gimple_call_internal_p(), gimple_call_lhs(), gimple_call_num_args(), gimple_call_set_lhs(), gimple_get_lhs(), gimple_vdef(), gimple_vuse(), gsi_after_labels(), gsi_insert_after(), gsi_insert_before(), gsi_insert_seq_on_edge_immediate(), GSI_NEW_STMT, GSI_SAME_STMT, loop::header, i, integer_one_node, integer_zero_node, known_eq, known_lt, loop_containing_stmt(), loop_latch_edge(), loop_preheader_edge(), LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P, LOOP_VINFO_FULLY_MASKED_P, LOOP_VINFO_LOOP, LOOP_VINFO_MASKS, LOOP_VINFO_NITERS, LOOP_VINFO_NITERS_UNCHANGED, LOOP_VINFO_VECT_FACTOR, make_ssa_name(), MSG_MISSED_OPTIMIZATION, MSG_NOTE, wi::mul(), nested_in_vect_loop_p(), NULL, NULL_TREE, num_calls(), POINTER_TYPE_P, prepare_vec_mask(), vec_info::replace_stmt(), loop::safelen, SCALAR_INT_MODE_P, si, SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP, SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP, SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP, SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP, SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP, SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP, SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP, SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP, SIMD_CLONE_ARG_TYPE_MASK, SIMD_CLONE_ARG_TYPE_UNIFORM, SIMD_CLONE_ARG_TYPE_VECTOR, cgraph_node::simd_clones, simple_iv(), size_int, size_type_node, sizetype, SLP_TREE_LANES, SLP_TREE_SIMD_CLONE_INFO, SLP_TREE_VEC_DEFS, iv::step, stmt_can_throw_internal(), STMT_VINFO_DEF_TYPE, STMT_VINFO_RELEVANT_P, STMT_VINFO_SIMD_CLONE_INFO, STMT_VINFO_TYPE, STMT_VINFO_VEC_STMTS, STMT_VINFO_VECTYPE, targetm, TREE_CODE, tree_fits_shwi_p(), TREE_OPERAND, tree_to_shwi(), tree_to_uhwi(), TREE_TYPE, type(), lang_hooks_for_types::type_for_mode, TYPE_MODE, TYPE_SIZE, TYPE_VECTOR_SUBPARTS(), lang_hooks::types, types_compatible_p(), UNKNOWN_LOCATION, unlink_stmt_vdef(), unshare_expr(), useless_type_conversion_p(), vec_alloc(), _loop_vec_info::vec_cond_masked_set, vect_build_all_ones_mask(), vect_clobber_variable(), vect_constant_def, vect_create_destination_var(), vect_external_def, vect_finish_stmt_generation(), vect_get_loop_mask(), vect_get_slp_defs(), vect_get_vec_defs_for_operand(), vect_internal_def, vect_is_simple_use(), vect_location, vect_maybe_update_slp_op_vectype(), vect_orig_stmt(), vect_record_loop_mask(), vect_simd_lane_linear(), vect_slp_child_index_for_operand(), vect_uninitialized_def, VECTOR_BOOLEAN_TYPE_P, vector_unroll_factor, vNULL, and wide_int_to_tree().

Referenced by vect_analyze_stmt(), and vect_transform_stmt().

◆ vectorizable_store()

static bool vectorizable_store ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
gimple ** vec_stmt,
slp_tree slp_node,
stmt_vector_for_cost * cost_vec )
static
Function vectorizable_store.

Check if STMT_INFO defines a non scalar data-ref (array/pointer/structure)
that can be vectorized.
If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
stmt to replace it, put it in VEC_STMT, and insert it at GSI.
Return true if STMT_INFO is vectorizable in this way.   

References aggregate_value_p(), alias_sets_conflict_p(), bitsize_int, bitsizetype, build1(), build2(), build_aligned_type(), build_array_type_nelts(), build_int_cst(), build_minus_one_cst(), build_nonstandard_integer_type(), build_vector_type(), build_vector_type_for_mode(), bump_vector_ptr(), can_vec_mask_load_store_p(), ceil_log2(), cfun, cfun_returns(), check_load_store_for_partial_vectors(), check_scan_store(), convert_optab_handler(), convert_to_ptrofftype, copy_ssa_name(), count, create_array_ref(), create_iv(), create_vector_array(), cse_and_gimplify_to_preheader(), DECL_P, dr_aligned, dr_alignment(), DR_BASE_ADDRESS, DR_GROUP_FIRST_ELEMENT, DR_GROUP_NEXT_ELEMENT, DR_GROUP_SIZE, DR_INIT, dr_misalignment(), DR_MISALIGNMENT_UNKNOWN, DR_REF, DR_STEP, DR_TARGET_ALIGNMENT, dr_unaligned_supported, dump_enabled_p(), dump_printf_loc(), ensure_base_align(), fold_build2, fold_build3, fold_build_pointer_plus, fold_convert, force_gimple_operand_gsi(), g, gcc_assert, gcc_unreachable, get_alias_set(), get_base_address(), get_dr_vinfo_offset(), get_group_alias_ptr_type(), get_len_load_store_mode(), get_load_store_type(), GET_MODE, GET_MODE_BITSIZE(), GET_MODE_UNIT_SIZE, get_object_alignment(), get_ptr_info(), ggc_alloc(), gimple_assign_lhs(), gimple_build(), gimple_build_assign(), gimple_build_call_internal(), gimple_call_internal_fn(), gimple_call_internal_p(), gimple_call_set_lhs(), gimple_call_set_nothrow(), gimple_convert(), gimple_set_lhs(), gsi_insert_seq_before(), GSI_SAME_STMT, gsi_stmt(), hard_function_value(), hard_regno_nregs(), i, IFN_LAST, int_const_binop(), int_mode_for_size(), integer_zerop(), internal_fn_mask_index(), internal_store_fn_p(), intQI_type_node, is_pattern_stmt_p(), known_eq, known_ne, least_bit_hwi(), LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P, LOOP_VINFO_FULLY_MASKED_P, LOOP_VINFO_FULLY_WITH_LENGTH_P, LOOP_VINFO_HAS_MASK_STORE, LOOP_VINFO_LENS, LOOP_VINFO_LOOP, LOOP_VINFO_MASKS, LOOP_VINFO_PARTIAL_LOAD_STORE_BIAS, LOOP_VINFO_USING_SELECT_VL_P, LOOP_VINFO_VECT_FACTOR, make_ssa_name(), MSG_MISSED_OPTIMIZATION, MSG_NOTE, nested_in_vect_loop_p(), NULL, NULL_TREE, offset, optab_handler(), perm_mask_for_reverse(), prepare_vec_mask(), ptr_type_node, PURE_SLP_STMT, record_stmt_cost(), reference_alias_ptr_type(), REG_P, REGNO, related_vector_mode(), SCALAR_INT_MODE_P, scalar_load, scalar_store, scalar_to_vec, SCALAR_TYPE_MODE, set_ptr_info_alignment(), size_binop, size_int, sizetype, SLP_TREE_NUMBER_OF_VEC_STMTS, SLP_TREE_SCALAR_STMTS, SSA_NAME_DEF_STMT, standard_iv_increment_position(), STMT_VINFO_DATA_REF, STMT_VINFO_DEF_TYPE, STMT_VINFO_DR_INFO, STMT_VINFO_GATHER_SCATTER_P, STMT_VINFO_GROUPED_ACCESS, STMT_VINFO_MEMORY_ACCESS_TYPE, STMT_VINFO_RELEVANT_P, STMT_VINFO_SIMD_LANE_ACCESS_P, STMT_VINFO_TYPE, STMT_VINFO_VEC_STMTS, STMT_VINFO_VECTYPE, store_vec_info_type, poly_int< N, C >::to_constant(), TREE_CODE, TREE_OPERAND, tree_to_uhwi(), TREE_TYPE, truth_type_for(), TYPE_ALIGN, TYPE_MODE, TYPE_SIZE, TYPE_SIZE_UNIT, TYPE_VECTOR_SUBPARTS(), unshare_expr(), unsigned_intQI_type_node, useless_type_conversion_p(), VAR_P, vec_alloc(), vec_perm, vec_to_scalar, vect_body, vect_build_one_scatter_store_call(), vect_check_scalar_mask(), vect_check_store_rhs(), vect_clobber_variable(), vect_constant_def, vect_copy_ref_info(), vect_create_data_ref_ptr(), vect_create_destination_var(), vect_dr_behavior(), vect_epilogue, vect_external_def, vect_finish_stmt_generation(), vect_gen_perm_mask_checked(), vect_get_data_ptr_increment(), vect_get_gather_scatter_ops(), vect_get_loop_len(), vect_get_loop_mask(), vect_get_new_ssa_name(), vect_get_num_copies(), vect_get_slp_defs(), vect_get_store_cost(), vect_get_store_rhs(), vect_get_strided_load_store_ops(), vect_get_vec_defs(), vect_get_vec_defs_for_operand(), vect_internal_def, vect_is_simple_use(), vect_location, vect_maybe_update_slp_op_vectype(), vect_nunits_for_cost(), vect_permute_store_chain(), vect_prologue, vect_simple_var, vect_slp_child_index_for_operand(), vect_supportable_dr_alignment(), vect_unknown_def_type, VECTOR_BOOLEAN_TYPE_P, VECTOR_MODE_P, vector_store, VECTOR_TYPE_P, vectorizable_scan_store(), VLS_STORE_INVARIANT, VMAT_CONTIGUOUS, VMAT_CONTIGUOUS_DOWN, VMAT_CONTIGUOUS_PERMUTE, VMAT_CONTIGUOUS_REVERSE, VMAT_ELEMENTWISE, VMAT_GATHER_SCATTER, VMAT_LOAD_STORE_LANES, VMAT_STRIDED_SLP, and write_vector_array().

Referenced by vect_analyze_stmt(), and vect_transform_stmt().

◆ write_vector_array()

static void write_vector_array ( vec_info * vinfo,
stmt_vec_info stmt_info,
gimple_stmt_iterator * gsi,
tree vect,
tree array,
unsigned HOST_WIDE_INT n )
static
ARRAY is an array of vectors created by create_vector_array.
Emit code to store SSA_NAME VECT in index N of the array.
The store is part of the vectorization of STMT_INFO.   

References build4(), build_int_cst(), ggc_alloc(), gimple_build_assign(), NULL_TREE, size_type_node, TREE_TYPE, and vect_finish_stmt_generation().

Referenced by vectorizable_store().