GCC Middle and Back End API Reference
expr.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "target.h"
#include "rtl.h"
#include "tree.h"
#include "gimple.h"
#include "predict.h"
#include "memmodel.h"
#include "tm_p.h"
#include "ssa.h"
#include "optabs.h"
#include "expmed.h"
#include "regs.h"
#include "emit-rtl.h"
#include "recog.h"
#include "cgraph.h"
#include "diagnostic.h"
#include "alias.h"
#include "fold-const.h"
#include "stor-layout.h"
#include "attribs.h"
#include "varasm.h"
#include "except.h"
#include "insn-attr.h"
#include "dojump.h"
#include "explow.h"
#include "calls.h"
#include "stmt.h"
#include "expr.h"
#include "optabs-tree.h"
#include "libfuncs.h"
#include "reload.h"
#include "langhooks.h"
#include "common/common-target.h"
#include "tree-dfa.h"
#include "tree-ssa-live.h"
#include "tree-outof-ssa.h"
#include "tree-ssa-address.h"
#include "builtins.h"
#include "ccmp.h"
#include "gimple-iterator.h"
#include "gimple-fold.h"
#include "rtx-vector-builder.h"
#include "tree-pretty-print.h"
#include "flags.h"
#include "internal-fn.h"
Include dependency graph for expr.cc:

Data Structures

class  pieces_addr
 
class  op_by_pieces_d
 
class  move_by_pieces_d
 
class  store_by_pieces_d
 
class  compare_by_pieces_d
 

Macros

#define PUSHG_P(to)
 
#define REDUCE_BIT_FIELD(expr)
 
#define EXTEND_BITINT(expr)
 

Functions

static bool block_move_libcall_safe_for_call_parm (void)
 
static bool emit_block_move_via_pattern (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT, unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT, bool)
 
static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned, int)
 
static void emit_block_move_via_sized_loop (rtx, rtx, rtx, unsigned, unsigned)
 
static void emit_block_move_via_oriented_loop (rtx, rtx, rtx, unsigned, unsigned)
 
static rtx emit_block_cmp_via_loop (rtx, rtx, rtx, tree, rtx, bool, unsigned, unsigned)
 
static rtx_insncompress_float_constant (rtx, rtx)
 
static rtx get_subtarget (rtx)
 
static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64, machine_mode, tree, alias_set_type, bool, bool)
 
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree)
 
static bool is_aligning_offset (const_tree, const_tree)
 
static rtx reduce_to_bit_field_precision (rtx, rtx, tree)
 
static rtx do_store_flag (const_sepops, rtx, machine_mode)
 
static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, profile_probability)
 
static rtx const_vector_from_tree (tree)
 
static tree tree_expr_size (const_tree)
 
static void convert_mode_scalar (rtx, rtx, int)
 
void init_expr_target (void)
 
void init_expr (void)
 
void convert_move (rtx to, rtx from, int unsignedp)
 
rtx convert_to_mode (machine_mode mode, rtx x, int unsignedp)
 
rtx convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
 
rtx convert_float_to_wider_int (machine_mode mode, machine_mode fmode, rtx x)
 
rtx convert_wider_int_to_float (machine_mode mode, machine_mode imode, rtx x)
 
static unsigned int alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
 
static bool can_use_qi_vectors (by_pieces_operation op)
 
static bool by_pieces_mode_supported_p (fixed_size_mode mode, by_pieces_operation op)
 
static fixed_size_mode widest_fixed_size_mode_for_size (unsigned int size, by_pieces_operation op)
 
static bool can_do_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align, enum by_pieces_operation op)
 
bool can_move_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align)
 
unsigned HOST_WIDE_INT by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align, unsigned int max_size, by_pieces_operation op)
 
rtx move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, unsigned int align, memop_ret retmode)
 
bool can_store_by_pieces (unsigned HOST_WIDE_INT len, by_pieces_constfn constfun, void *constfundata, unsigned int align, bool memsetp)
 
rtx store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, by_pieces_constfn constfun, void *constfundata, unsigned int align, bool memsetp, memop_ret retmode)
 
void clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
 
static rtx compare_by_pieces (rtx arg0, rtx arg1, unsigned HOST_WIDE_INT len, rtx target, unsigned int align, by_pieces_constfn a1_cfn, void *a1_cfn_data)
 
rtx emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method, unsigned int expected_align, HOST_WIDE_INT expected_size, unsigned HOST_WIDE_INT min_size, unsigned HOST_WIDE_INT max_size, unsigned HOST_WIDE_INT probable_max_size, bool bail_out_libcall, bool *is_move_done, bool might_overlap, unsigned ctz_size)
 
rtx emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method, unsigned int ctz_size)
 
static bool emit_block_move_via_pattern (rtx x, rtx y, rtx size, unsigned int align, unsigned int expected_align, HOST_WIDE_INT expected_size, unsigned HOST_WIDE_INT min_size, unsigned HOST_WIDE_INT max_size, unsigned HOST_WIDE_INT probable_max_size, bool might_overlap)
 
static void emit_block_move_via_sized_loop (rtx x, rtx y, rtx size, unsigned int align, unsigned int ctz_size)
 
static void emit_block_move_via_oriented_loop (rtx x, rtx y, rtx size, unsigned int align, unsigned int ctz_size)
 
static void emit_block_move_via_loop (rtx x, rtx y, rtx size, unsigned int align, int incr)
 
rtx emit_block_op_via_libcall (enum built_in_function fncode, rtx dst, rtx src, rtx size, bool tailcall)
 
rtx expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx, tree arg3_type, rtx arg3_rtx, HOST_WIDE_INT align)
 
static rtx emit_block_cmp_via_cmpmem (rtx x, rtx y, rtx len, tree len_type, rtx target, unsigned align)
 
rtx emit_block_cmp_hints (rtx x, rtx y, rtx len, tree len_type, rtx target, bool equality_only, by_pieces_constfn y_cfn, void *y_cfndata, unsigned ctz_len)
 
void move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
 
void move_block_from_reg (int regno, rtx x, int nregs)
 
rtx gen_group_rtx (rtx orig)
 
static void emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, poly_int64 ssize)
 
void emit_group_load (rtx dst, rtx src, tree type, poly_int64 ssize)
 
rtx emit_group_load_into_temps (rtx parallel, rtx src, tree type, poly_int64 ssize)
 
void emit_group_move (rtx dst, rtx src)
 
rtx emit_group_move_into_temps (rtx src)
 
void emit_group_store (rtx orig_dst, rtx src, tree type, poly_int64 ssize)
 
rtx maybe_emit_group_store (rtx x, tree type)
 
static void copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
 
rtx copy_blkmode_to_reg (machine_mode mode_in, tree src)
 
void use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
 
void clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
 
void use_regs (rtx *call_fusage, int regno, int nregs)
 
void use_group_regs (rtx *call_fusage, rtx regs)
 
static gimpleget_def_for_expr (tree name, enum tree_code code)
 
static gimpleget_def_for_expr_class (tree name, enum tree_code_class tclass)
 
rtx clear_storage_hints (rtx object, rtx size, enum block_op_methods method, unsigned int expected_align, HOST_WIDE_INT expected_size, unsigned HOST_WIDE_INT min_size, unsigned HOST_WIDE_INT max_size, unsigned HOST_WIDE_INT probable_max_size, unsigned ctz_size)
 
rtx clear_storage (rtx object, rtx size, enum block_op_methods method)
 
rtx set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
 
bool set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align, unsigned int expected_align, HOST_WIDE_INT expected_size, unsigned HOST_WIDE_INT min_size, unsigned HOST_WIDE_INT max_size, unsigned HOST_WIDE_INT probable_max_size)
 
void write_complex_part (rtx cplx, rtx val, bool imag_p, bool undefined_p)
 
rtx read_complex_part (rtx cplx, bool imag_p)
 
static rtx emit_move_change_mode (machine_mode new_mode, machine_mode old_mode, rtx x, bool force)
 
static rtx_insnemit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
 
rtx emit_move_resolve_push (machine_mode mode, rtx x)
 
rtx_insnemit_move_complex_push (machine_mode mode, rtx x, rtx y)
 
rtx_insnemit_move_complex_parts (rtx x, rtx y)
 
static rtx_insnemit_move_complex (machine_mode mode, rtx x, rtx y)
 
static rtx_insnemit_move_ccmode (machine_mode mode, rtx x, rtx y)
 
static bool undefined_operand_subword_p (const_rtx op, int i)
 
static rtx_insnemit_move_multi_word (machine_mode mode, rtx x, rtx y)
 
rtx_insnemit_move_insn_1 (rtx x, rtx y)
 
rtx_insnemit_move_insn (rtx x, rtx y)
 
rtx_insngen_move_insn (rtx x, rtx y)
 
rtx push_block (rtx size, poly_int64 extra, int below)
 
static rtx mem_autoinc_base (rtx mem)
 
poly_int64 find_args_size_adjust (rtx_insn *insn)
 
poly_int64 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, poly_int64 end_args_size)
 
static int memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
 
bool emit_push_insn (rtx x, machine_mode mode, tree type, rtx size, unsigned int align, int partial, rtx reg, poly_int64 extra, rtx args_addr, rtx args_so_far, int reg_parm_stack_space, rtx alignment_pad, bool sibcall_p)
 
static bool optimize_bitfield_assignment_op (poly_uint64 pbitsize, poly_uint64 pbitpos, poly_uint64 pbitregion_start, poly_uint64 pbitregion_end, machine_mode mode1, rtx str_rtx, tree to, tree src, bool reverse)
 
void get_bit_range (poly_uint64 *bitstart, poly_uint64 *bitend, tree exp, poly_int64 *bitpos, tree *offset)
 
bool non_mem_decl_p (tree base)
 
bool mem_ref_refers_to_non_mem_p (tree ref)
 
void expand_assignment (tree to, tree from, bool nontemporal)
 
bool emit_storent_insn (rtx to, rtx from)
 
static rtx string_cst_read_str (void *data, void *, HOST_WIDE_INT offset, fixed_size_mode mode)
 
rtx store_expr (tree exp, rtx target, int call_param_p, bool nontemporal, bool reverse)
 
static bool flexible_array_member_p (const_tree f, const_tree type)
 
static HOST_WIDE_INT count_type_elements (const_tree type, bool for_ctor_p)
 
static bool categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts, HOST_WIDE_INT *p_unique_nz_elts, HOST_WIDE_INT *p_init_elts, int *p_complete)
 
bool categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts, HOST_WIDE_INT *p_unique_nz_elts, HOST_WIDE_INT *p_init_elts, int *p_complete)
 
bool immediate_const_ctor_p (const_tree ctor, unsigned int words)
 
bool complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts, const_tree last_type)
 
static bool mostly_zeros_p (const_tree exp)
 
static bool all_zeros_p (const_tree exp)
 
static void store_constructor_field (rtx target, poly_uint64 bitsize, poly_int64 bitpos, poly_uint64 bitregion_start, poly_uint64 bitregion_end, machine_mode mode, tree exp, int cleared, alias_set_type alias_set, bool reverse)
 
static int fields_length (const_tree type)
 
void store_constructor (tree exp, rtx target, int cleared, poly_int64 size, bool reverse)
 
tree get_inner_reference (tree exp, poly_int64 *pbitsize, poly_int64 *pbitpos, tree *poffset, machine_mode *pmode, int *punsignedp, int *preversep, int *pvolatilep)
 
static unsigned HOST_WIDE_INT target_align (const_tree target)
 
rtx force_operand (rtx value, rtx target)
 
bool safe_from_p (const_rtx x, tree exp, int top_p)
 
unsigned HOST_WIDE_INT highest_pow2_factor (const_tree exp)
 
static enum rtx_code convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
 
void expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1, enum expand_modifier modifier)
 
static rtx expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
 
static rtx expand_expr_addr_expr_1 (tree exp, rtx target, scalar_int_mode tmode, enum expand_modifier modifier, addr_space_t as)
 
static rtx expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode, enum expand_modifier modifier)
 
static rtx expand_constructor (tree exp, rtx target, enum expand_modifier modifier, bool avoid_temp_mem)
 
rtx expand_expr_real (tree exp, rtx target, machine_mode tmode, enum expand_modifier modifier, rtx *alt_rtl, bool inner_reference_p)
 
static rtx expand_cond_expr_using_cmove (tree treeop0, tree treeop1, tree treeop2)
 
static rtx expand_misaligned_mem_ref (rtx temp, machine_mode mode, int unsignedp, unsigned int align, rtx target, rtx *alt_rtl)
 
static rtx expand_expr_divmod (tree_code code, machine_mode mode, tree treeop0, tree treeop1, rtx op0, rtx op1, rtx target, int unsignedp)
 
rtx expand_expr_real_2 (const_sepops ops, rtx target, machine_mode tmode, enum expand_modifier modifier)
 
static bool stmt_is_replaceable_p (gimple *stmt)
 
rtx expand_expr_real_gassign (gassign *g, rtx target, machine_mode tmode, enum expand_modifier modifier, rtx *alt_rtl, bool inner_reference_p)
 
rtx expand_expr_real_1 (tree exp, rtx target, machine_mode tmode, enum expand_modifier modifier, rtx *alt_rtl, bool inner_reference_p)
 
static tree constant_byte_string (tree arg, tree *ptr_offset, tree *mem_size, tree *decl, bool valrep=false)
 
tree string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
 
tree byte_representation (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
 
enum tree_code maybe_optimize_pow2p_mod_cmp (enum tree_code code, tree *arg0, tree *arg1)
 
enum tree_code maybe_optimize_mod_cmp (enum tree_code code, tree *arg0, tree *arg1)
 
void maybe_optimize_sub_cmp_0 (enum tree_code code, tree *arg0, tree *arg1)
 
static rtx expand_single_bit_test (location_t loc, enum tree_code code, tree inner, int bitnum, tree result_type, rtx target, machine_mode mode)
 
bool try_casesi (tree index_type, tree index_expr, tree minval, tree range, rtx table_label, rtx default_label, rtx fallback_label, profile_probability default_probability)
 
bool try_tablejump (tree index_type, tree index_expr, tree minval, tree range, rtx table_label, rtx default_label, profile_probability default_probability)
 
static rtx const_vector_mask_from_tree (tree exp)
 
tree build_personality_function (const char *lang)
 
rtx get_personality_function (tree decl)
 
rtx expr_size (tree exp)
 
HOST_WIDE_INT int_expr_size (const_tree exp)
 
unsigned HOST_WIDE_INT gf2n_poly_long_div_quotient (unsigned HOST_WIDE_INT polynomial, unsigned short n)
 
static unsigned HOST_WIDE_INT calculate_crc (unsigned HOST_WIDE_INT crc, unsigned HOST_WIDE_INT polynomial, unsigned short crc_bits)
 
rtx assemble_crc_table (tree id, unsigned HOST_WIDE_INT polynom, unsigned short crc_bits)
 
rtx generate_crc_table (unsigned HOST_WIDE_INT polynom, unsigned short crc_bits)
 
void calculate_table_based_CRC (rtx *crc, const rtx &input_data, const rtx &polynomial, machine_mode data_mode)
 
void expand_crc_table_based (rtx op0, rtx op1, rtx op2, rtx op3, machine_mode data_mode)
 
void gen_common_operation_to_reflect (rtx *op, unsigned HOST_WIDE_INT and1_value, unsigned HOST_WIDE_INT and2_value, unsigned shift_val)
 
void reflect_64_bit_value (rtx *op)
 
void reflect_32_bit_value (rtx *op)
 
void reflect_16_bit_value (rtx *op)
 
void reflect_8_bit_value (rtx *op)
 
void generate_reflecting_code_standard (rtx *op)
 
void expand_reversed_crc_table_based (rtx op0, rtx op1, rtx op2, rtx op3, machine_mode data_mode, void(*gen_reflecting_code)(rtx *op))
 

Variables

int cse_not_expected
 

Macro Definition Documentation

◆ EXTEND_BITINT

#define EXTEND_BITINT ( expr)
Value:
((TREE_CODE (type) == BITINT_TYPE \
&& reduce_bit_field \
&& mode != BLKmode \
&& modifier != EXPAND_MEMORY \
&& modifier != EXPAND_WRITE \
&& modifier != EXPAND_INITIALIZER \
&& modifier != EXPAND_CONST_ADDRESS) \
Definition genmatch.cc:1496
static rtx reduce_to_bit_field_precision(rtx, rtx, tree)
Definition expr.cc:12766
@ EXPAND_MEMORY
Definition expr.h:36
@ EXPAND_WRITE
Definition expr.h:35
@ EXPAND_CONST_ADDRESS
Definition expr.h:35
@ EXPAND_INITIALIZER
Definition expr.h:35
#define NULL_RTX
Definition rtl.h:706
Definition gengtype.h:252
#define TREE_CODE(NODE)
Definition tree.h:324

Referenced by expand_expr_real_1().

◆ PUSHG_P

#define PUSHG_P ( to)
Value:
false
Derived class from op_by_pieces_d, providing support for block move
operations.   

◆ REDUCE_BIT_FIELD

#define REDUCE_BIT_FIELD ( expr)
Value:
(reduce_bit_field \
target, \
type) \
: (expr))
rtx expr
Definition postreload.cc:697

Referenced by expand_expr_real_2().

Function Documentation

◆ alignment_for_piecewise_move()

static unsigned int alignment_for_piecewise_move ( unsigned int max_pieces,
unsigned int align )
static
Return the largest alignment we can use for doing a move (or store)
of MAX_PIECES.  ALIGN is the largest alignment we could use.   

References FOR_EACH_MODE_IN_CLASS, GET_MODE_ALIGNMENT, GET_MODE_SIZE(), int_mode_for_size(), MAX, NARROWEST_INT_MODE, opt_mode< T >::require(), and targetm.

Referenced by by_pieces_ninsns(), can_store_by_pieces(), and op_by_pieces_d::op_by_pieces_d().

◆ all_zeros_p()

static bool all_zeros_p ( const_tree exp)
static
Return true if EXP contains all zeros.   

References categorize_ctor_elements(), exp(), initializer_zerop(), and TREE_CODE.

Referenced by expand_constructor().

◆ assemble_crc_table()

rtx assemble_crc_table ( tree id,
unsigned HOST_WIDE_INT polynom,
unsigned short crc_bits )
Assemble CRC table with 256 elements for the given POLYNOM and CRC_BITS with
given ID.
ID is the identifier of the table, the name of the table is unique,
contains CRC size and the polynomial.
POLYNOM is the polynomial used to calculate the CRC table's elements.
CRC_BITS is the size of CRC, may be 8, 16, ... .  

References build_array_type(), build_constructor_from_vec(), build_decl(), build_index_type(), build_int_cstu(), calculate_crc(), DECL_ARTIFICIAL, DECL_ASSEMBLER_NAME, DECL_INITIAL, varpool_node::finalize_decl(), i, IDENTIFIER_POINTER, make_decl_one_only(), make_unsigned_type(), mark_decl_referenced(), SET_DECL_ASSEMBLER_NAME, size_int, TREE_ASM_WRITTEN, TREE_PUBLIC, TREE_READONLY, TREE_STATIC, UNKNOWN_LOCATION, vec_alloc(), and vec_safe_push().

Referenced by generate_crc_table().

◆ block_move_libcall_safe_for_call_parm()

static bool block_move_libcall_safe_for_call_parm ( void )
static
A subroutine of emit_block_move.  Returns true if calling the
block move libcall will not clobber any parameters which may have
already been placed on the stack.   

References builtin_decl_implicit(), NULL_RTX, NULL_TREE, OUTGOING_REG_PARM_STACK_SPACE, REG_P, targetm, TREE_CHAIN, TREE_TYPE, TREE_VALUE, TYPE_ARG_TYPES, TYPE_MODE, and void_list_node.

Referenced by emit_block_move_hints().

◆ build_personality_function()

◆ by_pieces_mode_supported_p()

static bool by_pieces_mode_supported_p ( fixed_size_mode mode,
by_pieces_operation op )
static
Return true if optabs exists for the mode and certain by pieces
operations.   

References can_compare_p(), ccp_jump, CLEAR_BY_PIECES, COMPARE_BY_PIECES, optab_handler(), SET_BY_PIECES, and VECTOR_MODE_P.

Referenced by op_by_pieces_d::smallest_fixed_size_mode_for_size(), and widest_fixed_size_mode_for_size().

◆ by_pieces_ninsns()

unsigned HOST_WIDE_INT by_pieces_ninsns ( unsigned HOST_WIDE_INT l,
unsigned int align,
unsigned int max_size,
by_pieces_operation op )
Return number of insns required to perform operation OP by pieces
for L bytes.  ALIGN (in bits) is maximum alignment we can assume.   

References alignment_for_piecewise_move(), COMPARE_BY_PIECES, gcc_assert, GET_MODE_ALIGNMENT, GET_MODE_SIZE(), MOVE_MAX_PIECES, optab_handler(), ROUND_UP, targetm, and widest_fixed_size_mode_for_size().

Referenced by default_use_by_pieces_infrastructure_p(), and op_by_pieces_d::op_by_pieces_d().

◆ byte_representation()

tree byte_representation ( tree arg,
tree * ptr_offset,
tree * mem_size,
tree * decl )
Similar to string_constant, return a STRING_CST corresponding
to the value representation of the first argument if it's
a constant.   

References constant_byte_string().

Referenced by getbyterep(), and gimple_fold_builtin_memchr().

◆ calculate_crc()

static unsigned HOST_WIDE_INT calculate_crc ( unsigned HOST_WIDE_INT crc,
unsigned HOST_WIDE_INT polynomial,
unsigned short crc_bits )
static
Calculate CRC for the initial CRC and given POLYNOMIAL.
CRC_BITS is CRC size.   

References CHAR_BIT, HOST_WIDE_INT_1U, and i.

Referenced by assemble_crc_table().

◆ calculate_table_based_CRC()

void calculate_table_based_CRC ( rtx * crc,
const rtx & input_data,
const rtx & polynomial,
machine_mode data_mode )
Generate table-based CRC code for the given CRC, INPUT_DATA and the
POLYNOMIAL (without leading 1).

First, using POLYNOMIAL's value generates CRC table of 256 elements,
then generates the assembly for the following code,
where crc_bit_size and data_bit_size may be 8, 16, 32, 64, depending on CRC:

  for (int i = 0; i < data_bit_size / 8; i++)
    crc = (crc << 8) ^ crc_table[(crc >> (crc_bit_size - 8))
                            ^ (data >> (data_bit_size - (i + 1) * 8)
                            & 0xFF))];

So to take values from the table, we need 8-bit data.
If input data size is not 8, then first we extract upper 8 bits,
then the other 8 bits, and so on.   

References convert_move(), exact_log2(), expand_and(), expand_binop(), expand_shift(), force_reg(), gen_int_mode(), gen_reg_rtx(), gen_rtx_MEM(), generate_crc_table(), GET_MODE, GET_MODE_BITSIZE(), GET_MODE_SIZE(), i, expand_operand::mode, NULL_RTX, OPTAB_DIRECT, OPTAB_WIDEN, poly_int< N, C >::to_constant(), UINTVAL, and validize_mem().

Referenced by expand_crc_table_based(), and expand_reversed_crc_table_based().

◆ can_do_by_pieces()

static bool can_do_by_pieces ( unsigned HOST_WIDE_INT len,
unsigned int align,
enum by_pieces_operation op )
static
Determine whether an operation OP on LEN bytes with alignment ALIGN can
and should be performed piecewise.   

References optimize_insn_for_speed_p(), and targetm.

Referenced by can_move_by_pieces(), emit_block_cmp_hints(), and emit_block_cmp_via_loop().

◆ can_move_by_pieces()

bool can_move_by_pieces ( unsigned HOST_WIDE_INT len,
unsigned int align )
Determine whether the LEN bytes can be moved by using several move
instructions.  Return nonzero if a call to move_by_pieces should
succeed.   

References can_do_by_pieces(), and MOVE_BY_PIECES.

Referenced by emit_block_move_hints(), emit_block_move_via_loop(), emit_block_move_via_sized_loop(), emit_push_insn(), expand_constructor(), gimple_stringops_transform(), and gimplify_init_constructor().

◆ can_store_by_pieces()

bool can_store_by_pieces ( unsigned HOST_WIDE_INT len,
by_pieces_constfn constfun,
void * constfundata,
unsigned int align,
bool memsetp )
Determine whether the LEN bytes generated by CONSTFUN can be
stored to memory using several move instructions.  CONSTFUNDATA is
a pointer which will be passed as argument in every CONSTFUN call.
ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
a memset operation and false if it's a copy of a constant string.
Return true if a call to store_by_pieces should succeed.   

References alignment_for_piecewise_move(), gcc_assert, GET_MODE_ALIGNMENT, GET_MODE_SIZE(), HAVE_POST_DECREMENT, HAVE_PRE_DECREMENT, offset, optab_handler(), optimize_insn_for_speed_p(), SET_BY_PIECES, STORE_BY_PIECES, STORE_MAX_PIECES, targetm, VECTOR_MODE_P, and widest_fixed_size_mode_for_size().

Referenced by asan_emit_stack_protection(), can_store_by_multiple_pieces(), expand_builtin_memory_copy_args(), expand_builtin_memset_args(), expand_builtin_strncpy(), gimple_stringops_transform(), simplify_builtin_call(), store_expr(), and try_store_by_multiple_pieces().

◆ can_use_qi_vectors()

static bool can_use_qi_vectors ( by_pieces_operation op)
static
Return true if we know how to implement OP using vectors of bytes.   

References CLEAR_BY_PIECES, COMPARE_BY_PIECES, and SET_BY_PIECES.

Referenced by op_by_pieces_d::smallest_fixed_size_mode_for_size(), and widest_fixed_size_mode_for_size().

◆ categorize_ctor_elements()

bool categorize_ctor_elements ( const_tree ctor,
HOST_WIDE_INT * p_nz_elts,
HOST_WIDE_INT * p_unique_nz_elts,
HOST_WIDE_INT * p_init_elts,
int * p_complete )
Examine CTOR to discover:
* how many scalar fields are set to nonzero values,
  and place it in *P_NZ_ELTS;
* the same, but counting RANGE_EXPRs as multiplier of 1 instead of
  high - low + 1 (this can be useful for callers to determine ctors
  that could be cheaply initialized with - perhaps nested - loops
  compared to copied from huge read-only data),
  and place it in *P_UNIQUE_NZ_ELTS;
* how many scalar fields in total are in CTOR,
  and place it in *P_ELT_COUNT.
* whether the constructor is complete -- in the sense that every
  meaningful byte is explicitly given a value --
  and place it in *P_COMPLETE:
  -  0 if any field is missing
  -  1 if all fields are initialized, and there's no padding
  - -1 if all fields are initialized, but there's padding

Return whether or not CTOR is a valid static constant initializer, the same
as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".   

References categorize_ctor_elements_1().

Referenced by all_zeros_p(), gimplify_init_constructor(), and mostly_zeros_p().

◆ categorize_ctor_elements_1()

◆ clear_by_pieces()

void clear_by_pieces ( rtx to,
unsigned HOST_WIDE_INT,
unsigned int align )
Generate several move instructions to clear LEN bytes of block TO.  (A MEM
rtx with BLKmode).  ALIGN is maximum alignment we can assume.   

References builtin_memset_read_str(), and CLEAR_BY_PIECES.

Referenced by clear_storage_hints().

◆ clear_storage()

rtx clear_storage ( rtx object,
rtx size,
enum block_op_methods )
Write zeros through the storage of OBJECT.
If OBJECT has BLKmode, SIZE is its length in bytes.   

References clear_storage_hints(), GET_CODE, GET_MODE, GET_MODE_MASK, and UINTVAL.

Referenced by asan_clear_shadow(), expand_constructor(), store_constructor(), and store_expr().

◆ clear_storage_hints()

rtx clear_storage_hints ( rtx object,
rtx size,
enum block_op_methods method,
unsigned int expected_align,
HOST_WIDE_INT expected_size,
unsigned HOST_WIDE_INT min_size,
unsigned HOST_WIDE_INT max_size,
unsigned HOST_WIDE_INT probable_max_size,
unsigned ctz_size )

◆ clobber_reg_mode()

void clobber_reg_mode ( rtx * call_fusage,
rtx reg,
machine_mode mode )
Add a CLOBBER expression for REG to the (possibly empty) list pointed
to by CALL_FUSAGE.  REG must denote a hard register.   

References gcc_assert, gen_rtx_EXPR_LIST(), expand_operand::mode, REG_P, and REGNO.

Referenced by clobber_reg().

◆ compare_by_pieces()

static rtx compare_by_pieces ( rtx arg0,
rtx arg1,
unsigned HOST_WIDE_INT len,
rtx target,
unsigned int align,
by_pieces_constfn a1_cfn,
void * a1_cfn_data )
static
Generate several move instructions to compare LEN bytes from blocks
ARG0 and ARG1.  (These are MEM rtx's with BLKmode).

If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
used to push FROM to the stack.

ALIGN is maximum stack alignment we can assume.

Optionally, the caller can pass a constfn and associated data in A1_CFN
and A1_CFN_DATA. describing that the second operand being compared is a
known constant and how to obtain its data.   

References const0_rtx, const1_rtx, emit_barrier(), emit_jump(), emit_label(), emit_move_insn(), fail_label, gen_label_rtx(), gen_reg_rtx(), integer_type_node, NULL_RTX, REG_P, REGNO, and TYPE_MODE.

Referenced by emit_block_cmp_hints(), and emit_block_cmp_via_loop().

◆ complete_ctor_at_level_p()

bool complete_ctor_at_level_p ( const_tree type,
HOST_WIDE_INT num_elts,
const_tree last_type )
TYPE is initialized by a constructor with NUM_ELTS elements, the last
of which had type LAST_TYPE.  Each element was itself a complete
initializer, in the sense that every meaningful byte was explicitly
given a value.  Return true if the same is true for the constructor
as a whole.   

References count_type_elements(), DECL_CHAIN, gcc_assert, simple_cst_equal(), TREE_CODE, TYPE_FIELDS, TYPE_SIZE, and ZERO_INIT_PADDING_BITS_UNIONS.

Referenced by categorize_ctor_elements_1().

◆ compress_float_constant()

static rtx_insn * compress_float_constant ( rtx x,
rtx y )
static

◆ const_vector_from_tree()

◆ const_vector_mask_from_tree()

◆ constant_byte_string()

static tree constant_byte_string ( tree arg,
tree * ptr_offset,
tree * mem_size,
tree * decl,
bool valrep = false )
static

◆ convert_float_to_wider_int()

rtx convert_float_to_wider_int ( machine_mode mode,
machine_mode fmode,
rtx x )
Variant of convert_modes for ABI parameter passing/return.
Return an rtx for a value that would result from converting X from
a floating point mode FMODE to wider integer mode MODE.   

References convert_modes(), force_reg(), gcc_assert, gen_lowpart, int_mode_for_mode(), opt_mode< T >::require(), SCALAR_FLOAT_MODE_P, and SCALAR_INT_MODE_P.

Referenced by expand_value_return(), and precompute_register_parameters().

◆ convert_mode_scalar()

◆ convert_modes()

rtx convert_modes ( machine_mode mode,
machine_mode oldmode,
rtx x,
int unsignedp )
Return an rtx for a value that would result
from converting X from mode OLDMODE to mode MODE.
Both modes may be floating, or both integer.
UNSIGNEDP is nonzero if X is an unsigned value.

This can be done by referring to a part of X in place
or by copying to a new temporary with conversion.

You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.   

References CONST_POLY_INT_P, CONST_SCALAR_INT_P, convert_move(), direct_load, force_subreg(), wide_int_storage::from(), gcc_assert, gen_lowpart, gen_reg_rtx(), GET_CODE, GET_MODE, GET_MODE_BITSIZE(), GET_MODE_PRECISION(), HARD_REGISTER_P, immed_wide_int_const(), is_a(), is_int_mode(), known_eq, MEM_P, MEM_VOLATILE_P, REG_P, REGNO, SIGNED, SUBREG_CHECK_PROMOTED_SIGN, subreg_promoted_mode(), SUBREG_PROMOTED_SET, SUBREG_PROMOTED_VAR_P, SUBREG_REG, targetm, TRULY_NOOP_TRUNCATION_MODES_P, UNSIGNED, and VECTOR_MODE_P.

Referenced by avoid_expensive_constant(), can_widen_mult_without_libcall(), convert_float_to_wider_int(), convert_memory_address_addr_space_1(), convert_to_mode(), emit_block_cmp_via_loop(), emit_block_move_via_loop(), emit_block_move_via_oriented_loop(), emit_library_call_value_1(), emit_store_flag_1(), emit_store_flag_int(), expand_arith_overflow_result_store(), expand_asan_emit_allocas_unpoison(), expand_binop_directly(), expand_builtin_extend_pointer(), expand_builtin_issignaling(), expand_cond_expr_using_cmove(), expand_divmod(), expand_doubleword_mod(), expand_expr_addr_expr_1(), expand_expr_force_mode(), expand_expr_real_1(), expand_expr_real_2(), expand_gimple_stmt_1(), expand_ifn_atomic_compare_exchange(), expand_ifn_atomic_compare_exchange_into_call(), expand_mul_overflow(), expand_speculation_safe_value(), expand_twoval_binop(), expand_twoval_unop(), expand_value_return(), expand_widening_mult(), expmed_mult_highpart_optab(), extract_high_half(), extract_low_bits(), inline_string_cmp(), insert_value_copy_on_edge(), maybe_legitimize_operand(), optimize_bitfield_assignment_op(), precompute_arguments(), precompute_register_parameters(), prepare_operand(), push_block(), store_expr(), store_field(), store_one_arg(), try_tablejump(), and widen_operand().

◆ convert_move()

void convert_move ( rtx to,
rtx from,
int unsignedp )
Copy data from FROM to TO, where the machine modes are not the same.
Both modes may be integer, or both may be floating, or both may be
fixed-point.
UNSIGNEDP should be nonzero if FROM is an unsigned type.
This causes zero-extension instead of sign-extension.   

References CONSTANT_P, convert_mode_scalar(), convert_move(), convert_optab_handler(), emit_move_insn(), emit_unop_insn(), force_subreg(), gcc_assert, gen_lowpart, GET_CODE, GET_MODE, GET_MODE_BITSIZE(), GET_MODE_PRECISION(), GET_MODE_UNIT_PRECISION, is_a(), known_eq, simplify_gen_subreg(), SUBREG_CHECK_PROMOTED_SIGN, subreg_promoted_mode(), SUBREG_PROMOTED_SET, SUBREG_PROMOTED_VAR_P, SUBREG_REG, VECTOR_MODE_P, and XEXP.

Referenced by assign_call_lhs(), calculate_table_based_CRC(), convert_mode_scalar(), convert_modes(), convert_move(), do_tablejump(), doloop_modify(), emit_block_cmp_via_loop(), emit_conditional_add(), emit_conditional_move_1(), emit_conditional_neg_or_complement(), emit_cstore(), emit_store_flag_1(), emit_store_flag_int(), expand_assignment(), expand_binop(), expand_builtin_memcmp(), expand_builtin_strcmp(), expand_builtin_strlen(), expand_builtin_strncmp(), expand_crc_table_based(), expand_doubleword_clz_ctz_ffs(), expand_expr_real_1(), expand_expr_real_2(), expand_ffs(), expand_fix(), expand_float(), expand_function_end(), expand_function_start(), expand_gimple_stmt_1(), expand_reversed_crc_table_based(), expand_sfix_optab(), expand_single_bit_test(), expand_twoval_binop(), expand_twoval_unop(), expand_ubsan_result_store(), expand_unop(), force_operand(), std_expand_builtin_va_start(), store_bit_field_using_insv(), store_constructor(), and store_expr().

◆ convert_to_mode()

rtx convert_to_mode ( machine_mode mode,
rtx x,
int unsignedp )
Return an rtx for a value that would result
from converting X to mode MODE.
Both X and MODE may be floating, or both integer.
UNSIGNEDP is nonzero if X is an unsigned value.
This can be done by referring to a part of X in place
or by copying to a new temporary with conversion.   

References convert_modes().

Referenced by anti_adjust_stack_and_probe(), anti_adjust_stack_and_probe_stack_clash(), assign_call_lhs(), assign_parm_setup_block(), assign_parm_setup_reg(), assign_parm_setup_stack(), builtin_memset_gen_str(), convert_extracted_bit_field(), convert_mode_scalar(), copy_blkmode_from_reg(), do_tablejump(), emit_block_op_via_libcall(), emit_partition_copy(), expand_assignment(), expand_binop(), expand_builtin_bswap(), expand_builtin_extract_return_addr(), expand_builtin_int_roundingfn_2(), expand_builtin_interclass_mathfn(), expand_builtin_memcmp(), expand_builtin_memset_args(), expand_builtin_powi(), expand_builtin_stack_address(), expand_builtin_strcmp(), expand_builtin_strlen(), expand_builtin_strncmp(), expand_builtin_unop(), expand_expr_real_1(), expand_expr_real_2(), expand_fix(), expand_float(), expand_parity(), expand_POPCOUNT(), expand_sfix_optab(), expand_widening_mult(), expmed_mult_highpart(), extract_fixed_bit_field_1(), get_dynamic_stack_size(), maybe_legitimize_operand(), prepare_cmp_insn(), prepare_float_lib_cmp(), prepare_libcall_arg(), probe_stack_range(), set_storage_via_libcall(), sjlj_emit_dispatch_table(), store_expr(), store_fixed_bit_field_1(), try_casesi(), and try_store_by_multiple_pieces().

◆ convert_tree_comp_to_rtx()

static enum rtx_code convert_tree_comp_to_rtx ( enum tree_code tcode,
int unsignedp )
static
Convert the tree comparison code TCODE to the rtl one where the
signedness is UNSIGNEDP.   

References gcc_unreachable.

Referenced by expand_cond_expr_using_cmove().

◆ convert_wider_int_to_float()

rtx convert_wider_int_to_float ( machine_mode mode,
machine_mode imode,
rtx x )
Variant of convert_modes for ABI parameter passing/return.
Return an rtx for a value that would result from converting X from
an integer mode IMODE to a narrower floating point mode MODE.   

References force_reg(), gcc_assert, gen_lowpart, gen_lowpart_SUBREG(), int_mode_for_mode(), opt_mode< T >::require(), SCALAR_FLOAT_MODE_P, and SCALAR_INT_MODE_P.

Referenced by assign_parm_setup_stack(), expand_call(), and expand_expr_real_1().

◆ copy_blkmode_from_reg()

◆ copy_blkmode_to_reg()

rtx copy_blkmode_to_reg ( machine_mode mode_in,
tree src )

◆ count_type_elements()

static HOST_WIDE_INT count_type_elements ( const_tree type,
bool for_ctor_p )
static
If FOR_CTOR_P, return the number of top-level elements that a constructor
must have in order for it to completely initialize a value of type TYPE.
Return -1 if the number isn't known.

If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.   

References array_type_nelts_minus_one(), count_type_elements(), DECL_CHAIN, flexible_array_member_p(), gcc_assert, gcc_unreachable, simple_cst_equal(), TREE_CODE, tree_fits_uhwi_p(), tree_to_uhwi(), TREE_TYPE, TYPE_FIELDS, TYPE_SIZE, and TYPE_VECTOR_SUBPARTS().

Referenced by categorize_ctor_elements_1(), complete_ctor_at_level_p(), and count_type_elements().

◆ do_store_flag()

static rtx do_store_flag ( const_sepops ops,
rtx target,
machine_mode mode )
static
Generate code to calculate OPS, and exploded expression
using a store-flag instruction and return an rtx for the result.
OPS reflects a comparison.

If TARGET is nonzero, store the result there if convenient.

Return zero if there is no suitable set-flag instruction
available on this machine.

Once expand_expr has been called on the arguments of the comparison,
we are committed to doing the store flag, since it is not safe to
re-evaluate the expression.  We emit the store-flag insn by calling
emit_store_flag, but only expand the arguments if we have a reason
to believe that emit_store_flag will be successful.  If we think that
it will, but it isn't, we have to simulate the store-flag with a
set/jump/set sequence.   

References build2(), separate_ops::code, const0_rtx, do_store_flag(), emit_store_flag_force(), error_mark_node, wi::exact_log2(), expand_binop(), EXPAND_NORMAL, expand_operands(), expand_single_bit_test(), expand_vec_cmp_expr(), expand_vec_cmp_expr_p(), FUNC_OR_METHOD_TYPE_P, gcc_assert, gcc_unreachable, GEN_INT, gen_reg_rtx(), get_def_for_expr(), GET_MODE, GET_MODE_PRECISION(), get_subtarget(), gimple_assign_rhs1(), gimple_assign_rhs2(), HOST_WIDE_INT_1U, integer_all_onesp(), integer_onep(), integer_pow2p(), integer_zero_node, integer_zerop(), separate_ops::location, maybe_optimize_mod_cmp(), maybe_optimize_sub_cmp_0(), NULL_RTX, separate_ops::op0, separate_ops::op1, OPTAB_WIDEN, POINTER_TYPE_P, wi::popcount(), SCALAR_INT_MODE_P, STRIP_NOPS, swap_condition(), targetm, wi::to_wide(), TREE_CODE, tree_nonzero_bits(), TREE_TYPE, separate_ops::type, type(), lang_hooks_for_types::type_for_mode, TYPE_MODE, TYPE_PRECISION, TYPE_UNSIGNED, TYPE_VECTOR_SUBPARTS(), lang_hooks::types, VECTOR_BOOLEAN_TYPE_P, and VECTOR_TYPE_P.

Referenced by do_store_flag(), and expand_expr_real_2().

◆ do_tablejump()

static void do_tablejump ( rtx index,
machine_mode mode,
rtx range,
rtx table_label,
rtx default_label,
profile_probability default_probability )
static
Attempt to generate a tablejump instruction; same concept.   
Subroutine of the next function.

INDEX is the value being switched on, with the lowest value
in the table already subtracted.
MODE is its expected mode (needed if INDEX is constant).
RANGE is the length of the jump table.
TABLE_LABEL is a CODE_LABEL rtx for the table itself.

DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
index value is out of range.
DEFAULT_PROBABILITY is the probability of jumping to
the default label.   

References as_a(), CASE_VECTOR_PC_RELATIVE, cfun, convert_move(), convert_to_mode(), copy_to_mode_reg(), emit_barrier(), emit_cmp_and_jump_insns(), emit_jump_insn(), gen_const_mem(), gen_int_mode(), gen_reg_rtx(), GET_CODE, GET_MODE_PRECISION(), GET_MODE_SIZE(), HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT_1U, INTVAL, memory_address, expand_operand::mode, NULL_RTX, REG_P, simplify_gen_binary(), SUBREG_PROMOTED_SIGNED_P, SUBREG_PROMOTED_VAR_P, targetm, and UINTVAL.

Referenced by try_tablejump().

◆ emit_block_cmp_hints()

rtx emit_block_cmp_hints ( rtx x,
rtx y,
rtx len,
tree len_type,
rtx target,
bool equality_only,
by_pieces_constfn y_cfn,
void * y_cfndata,
unsigned ctz_len )
Emit code to compare a block Y to a block X.  This may be done with
string-compare instructions, with multiple scalar instructions,
or with a library call.

Both X and Y must be MEM rtx's.  LEN is an rtx that says how long
they are.  LEN_TYPE is the type of the expression that was used to
calculate it, and CTZ_LEN is the known trailing-zeros count of LEN,
so LEN must be a multiple of 1<<CTZ_LEN even if it's not constant.

If EQUALITY_ONLY is true, it means we don't have to return the tri-state
value of a normal memcmp call, instead we can just compare for equality.
If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
returning NULL_RTX.

Optionally, the caller can pass a constfn and associated data in Y_CFN
and Y_CFN_DATA. describing that the second operand being compared is a
known constant and how to obtain its data.
Return the result of the comparison, or NULL_RTX if we failed to
perform the operation.   

References adjust_address, can_do_by_pieces(), COMPARE_BY_PIECES, compare_by_pieces(), const0_rtx, CONST_INT_P, emit_block_cmp_via_cmpmem(), emit_block_cmp_via_loop(), gcc_assert, ILSOP_MEMCMP, INTVAL, MEM_ALIGN, MEM_P, MIN, expand_operand::target, and y.

Referenced by expand_builtin_memcmp().

◆ emit_block_cmp_via_cmpmem()

static rtx emit_block_cmp_via_cmpmem ( rtx x,
rtx y,
rtx len,
tree len_type,
rtx target,
unsigned align )
static
Expand a block compare between X and Y with length LEN using the
cmpmem optab, placing the result in TARGET.  LEN_TYPE is the type
of the expression that was used to calculate the length.  ALIGN
gives the known minimum common alignment.   

References direct_optab_handler(), expand_cmpstrn_or_cmpmem(), NULL_RTX, expand_operand::target, and y.

Referenced by emit_block_cmp_hints().

◆ emit_block_cmp_via_loop()

rtx emit_block_cmp_via_loop ( rtx x,
rtx y,
rtx len,
tree len_type,
rtx target,
bool equality_only,
unsigned align,
unsigned ctz_len )
static

◆ emit_block_move()

◆ emit_block_move_hints()

rtx emit_block_move_hints ( rtx x,
rtx y,
rtx size,
enum block_op_methods method,
unsigned int expected_align,
HOST_WIDE_INT expected_size,
unsigned HOST_WIDE_INT min_size,
unsigned HOST_WIDE_INT max_size,
unsigned HOST_WIDE_INT probable_max_size,
bool bail_out_libcall,
bool * is_move_done,
bool might_overlap,
unsigned ctz_size )
Emit code to move a block Y to a block X.  This may be done with
string-move instructions, with multiple scalar move instructions,
or with a library call.

Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
SIZE is an rtx that says how long they are.
ALIGN is the maximum alignment we can assume they have.
METHOD describes what kind of copy this is, and what mechanisms may be used.
MIN_SIZE is the minimal size of block to move
MAX_SIZE is the maximal size of block to move, if it cannot be represented
in unsigned HOST_WIDE_INT, than it is mask of all ones.
CTZ_SIZE is the trailing-zeros count of SIZE; even a nonconstant SIZE is
known to be a multiple of 1<<CTZ_SIZE.

Return the address of the new block, if memcpy is called and returns it,
0 otherwise.   

References ADDR_SPACE_GENERIC_P, adjust_address, block_move_libcall_safe_for_call_parm(), BLOCK_OP_CALL_PARM, BLOCK_OP_NO_LIBCALL, BLOCK_OP_NO_LIBCALL_RET, BLOCK_OP_NORMAL, BLOCK_OP_TAILCALL, can_move_by_pieces(), CONST_INT_P, emit_block_copy_via_libcall(), emit_block_move_via_oriented_loop(), emit_block_move_via_pattern(), emit_block_move_via_sized_loop(), gcc_assert, gcc_unreachable, ILSOP_MEMCPY, ILSOP_MEMMOVE, INTVAL, MEM_ADDR_SPACE, MEM_ALIGN, MEM_P, MEM_VOLATILE_P, MIN, move_by_pieces(), NO_DEFER_POP, OK_DEFER_POP, pc_rtx, poly_int_rtx_p(), RETURN_BEGIN, rtx_equal_p(), set_mem_size(), shallow_copy_rtx(), and y.

Referenced by emit_block_move(), emit_block_move_via_loop(), and expand_builtin_memory_copy_args().

◆ emit_block_move_via_loop() [1/2]

static void emit_block_move_via_loop ( rtx x,
rtx y,
rtx size,
unsigned int align,
int incr )
static
A subroutine of emit_block_move.  Copy the data via an explicit
loop.  This is used only when libcalls are forbidden, or when
inlining is required.  INCR is the block size to be copied in each
loop iteration.  If it is negative, the absolute value is used, and
the block is copied backwards.  INCR must be a power of two, an
exact divisor for SIZE and ALIGN, and imply a mode that can be
safely copied per iteration assuming no overlap.   

References apply_scale(), BLOCK_OP_NO_LIBCALL, can_move_by_pieces(), change_address(), const0_rtx, convert_modes(), do_pending_stack_adjust(), emit_block_move_hints(), emit_cmp_and_jump_insns(), emit_jump(), emit_label(), emit_move_insn(), opt_mode< T >::exists(), expand_simple_binop(), force_operand(), gcc_checking_assert, GEN_INT, gen_label_rtx(), gen_reg_rtx(), get_address_mode(), GET_MODE, GET_MODE_BITSIZE(), profile_probability::guessed_always(), int_mode_for_size(), NULL_RTX, OPTAB_LIB_WIDEN, opt_mode< T >::require(), simplify_gen_binary(), word_mode, XEXP, and y.

◆ emit_block_move_via_loop() [2/2]

static void emit_block_move_via_loop ( rtx ,
rtx ,
rtx ,
unsigned ,
int  )
static

◆ emit_block_move_via_oriented_loop() [1/2]

static void emit_block_move_via_oriented_loop ( rtx x,
rtx y,
rtx size,
unsigned int align,
unsigned int ctz_size )
static
Like emit_block_move_via_sized_loop, but besides choosing INCR so
as to ensure safe moves even in case of overlap, output dynamic
tests to choose between two loops, one moving downwards, another
moving upwards.   

References apply_scale(), CONST_INT_P, convert_modes(), wi::ctz(), do_pending_stack_adjust(), emit_block_move_via_loop(), emit_cmp_and_jump_insns(), emit_jump(), emit_label(), force_operand(), gcc_checking_assert, gen_label_rtx(), GET_MODE, GET_MODE_BITSIZE(), profile_probability::guessed_always(), HOST_WIDE_INT_1U, int_mode_for_size(), MAX, expand_operand::mode, NULL_RTX, opt_mode< T >::require(), simplify_gen_binary(), smallest_int_mode_for_size(), UINTVAL, XEXP, and y.

◆ emit_block_move_via_oriented_loop() [2/2]

static void emit_block_move_via_oriented_loop ( rtx ,
rtx ,
rtx ,
unsigned ,
unsigned  )
static

Referenced by emit_block_move_hints().

◆ emit_block_move_via_pattern() [1/2]

static bool emit_block_move_via_pattern ( rtx x,
rtx y,
rtx size,
unsigned int align,
unsigned int expected_align,
HOST_WIDE_INT expected_size,
unsigned HOST_WIDE_INT min_size,
unsigned HOST_WIDE_INT max_size,
unsigned HOST_WIDE_INT probable_max_size,
bool might_overlap )
static
A subroutine of emit_block_move.  Expand a cpymem or movmem pattern;
return true if successful.

X is the destination of the copy or move.
Y is the source of the copy or move.
SIZE is the size of the block to be moved.

MIGHT_OVERLAP indicates this originated with expansion of a
builtin_memmove() and the source and destination blocks may
overlap.

References CONST_INT_P, create_convert_operand_to(), create_fixed_operand(), create_integer_operand(), direct_optab_handler(), FOR_EACH_MODE_IN_CLASS, gcc_assert, GET_MODE_BITSIZE(), GET_MODE_MASK, insn_data, INTVAL, maybe_expand_insn(), expand_operand::mode, NULL, opt_mode< T >::require(), and y.

◆ emit_block_move_via_pattern() [2/2]

static bool emit_block_move_via_pattern ( rtx ,
rtx ,
rtx ,
unsigned ,
unsigned ,
HOST_WIDE_INT ,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
bool  )
static

Referenced by emit_block_move_hints().

◆ emit_block_move_via_sized_loop() [1/2]

static void emit_block_move_via_sized_loop ( rtx x,
rtx y,
rtx size,
unsigned int align,
unsigned int ctz_size )
static
Like emit_block_move_via_loop, but choose a suitable INCR based on
ALIGN and CTZ_SIZE.   

References can_move_by_pieces(), CONST_INT_P, wi::ctz(), emit_block_move_via_loop(), gcc_checking_assert, HOST_WIDE_INT_1U, MAX, UINTVAL, and y.

◆ emit_block_move_via_sized_loop() [2/2]

static void emit_block_move_via_sized_loop ( rtx ,
rtx ,
rtx ,
unsigned ,
unsigned  )
static

Referenced by emit_block_move_hints().

◆ emit_block_op_via_libcall()

rtx emit_block_op_via_libcall ( enum built_in_function fncode,
rtx dst,
rtx src,
rtx size,
bool tailcall )

◆ emit_group_load()

void emit_group_load ( rtx dst,
rtx src,
tree type,
poly_int64 ssize )
Emit code to move a block SRC of type TYPE to a block DST,
where DST is non-consecutive registers represented by a PARALLEL.
SSIZE represents the total size of block ORIG_SRC in bytes, or -1
if not known.   

References emit_group_load_1(), emit_move_insn(), i, NULL, XEXP, XVECEXP, and XVECLEN.

Referenced by emit_group_store(), emit_library_call_value_1(), emit_push_insn(), expand_assignment(), expand_function_end(), expand_value_return(), and store_expr().

◆ emit_group_load_1()

static void emit_group_load_1 ( rtx * tmps,
rtx dst,
rtx orig_src,
tree type,
poly_int64 ssize )
static

◆ emit_group_load_into_temps()

rtx emit_group_load_into_temps ( rtx parallel,
rtx src,
tree type,
poly_int64 ssize )
Similar, but load SRC into new pseudos in a format that looks like
PARALLEL.  This can later be fed to emit_group_move to get things
in the right place.   

References alloc_EXPR_LIST(), emit_group_load_1(), force_reg(), GET_MODE, i, REG_NOTE_KIND, rtvec_alloc(), RTVEC_ELT, XEXP, XVECEXP, and XVECLEN.

Referenced by precompute_register_parameters(), and store_one_arg().

◆ emit_group_move()

void emit_group_move ( rtx dst,
rtx src )
Emit code to move a block SRC to block DST, where SRC and DST are
non-consecutive groups of registers, each represented by a PARALLEL.   

References emit_move_insn(), gcc_assert, GET_CODE, i, XEXP, XVECEXP, and XVECLEN.

Referenced by expand_assignment(), expand_call(), expand_function_end(), load_register_parameters(), and store_expr().

◆ emit_group_move_into_temps()

rtx emit_group_move_into_temps ( rtx src)
Move a group of registers represented by a PARALLEL into pseudos.   

References alloc_EXPR_LIST(), copy_to_reg(), GET_MODE, i, REG_NOTE_KIND, rtvec_alloc(), RTVEC_ELT, XEXP, XVECEXP, and XVECLEN.

Referenced by assign_parm_setup_block(), and expand_call().

◆ emit_group_store()

◆ emit_move_ccmode()

static rtx_insn * emit_move_ccmode ( machine_mode mode,
rtx x,
rtx y )
static
A subroutine of emit_move_insn_1.  Generate a move from Y into X.
MODE is known to be MODE_CC.  Returns the last instruction emitted.   

References emit_insn(), emit_move_change_mode(), emit_move_via_integer(), gcc_assert, expand_operand::mode, NULL, optab_handler(), and y.

Referenced by emit_move_insn_1().

◆ emit_move_change_mode()

static rtx emit_move_change_mode ( machine_mode new_mode,
machine_mode old_mode,
rtx x,
bool force )
static
A subroutine of emit_move_insn_1.  Yet another lowpart generator.
NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
represented in NEW_MODE.  If FORCE is true, this will never happen, as
we'll force-create a SUBREG if needed.   

References adjust_address, adjust_address_nv, copy_replacements(), gen_rtx_MEM(), GET_MODE, MEM_COPY_ATTRIBUTES, MEM_P, new_mode(), push_operand(), reload_in_progress, simplify_gen_subreg(), simplify_subreg(), and XEXP.

Referenced by emit_move_ccmode(), and emit_move_via_integer().

◆ emit_move_complex()

static rtx_insn * emit_move_complex ( machine_mode mode,
rtx x,
rtx y )
static

◆ emit_move_complex_parts()

rtx_insn * emit_move_complex_parts ( rtx x,
rtx y )
A subroutine of emit_move_complex.  Perform the move from Y to X
via two moves of the parts.  Returns the last instruction emitted.   

References emit_clobber(), get_last_insn(), read_complex_part(), reg_overlap_mentioned_p(), REG_P, reload_completed, reload_in_progress, write_complex_part(), and y.

Referenced by emit_move_complex().

◆ emit_move_complex_push()

rtx_insn * emit_move_complex_push ( machine_mode mode,
rtx x,
rtx y )
A subroutine of emit_move_complex.  Generate a move from Y into X.
X is known to satisfy push_operand, and MODE is known to be complex.
Returns the last instruction emitted.   

References emit_move_insn(), emit_move_resolve_push(), gcc_unreachable, gen_rtx_MEM(), GET_CODE, GET_MODE_INNER, GET_MODE_SIZE(), expand_operand::mode, read_complex_part(), XEXP, and y.

Referenced by emit_move_complex().

◆ emit_move_insn()

rtx_insn * emit_move_insn ( rtx x,
rtx y )
Generate code to copy Y into X.
Both Y and X must have the same mode, except that
Y can be a constant with VOIDmode.
This mode cannot be BLKmode; use emit_block_move for that.

Return the last instruction emitted.   

References adjust_address, compress_float_constant(), CONSTANT_P, copy_rtx(), emit_move_insn_1(), force_const_mem(), gcc_assert, GET_MODE, GET_MODE_ALIGNMENT, GET_MODE_SIZE(), known_eq, MEM_ADDR_SPACE, MEM_ALIGN, MEM_P, memory_address_addr_space_p(), expand_operand::mode, NULL_RTX, optab_handler(), push_operand(), REG_P, rtx_equal_p(), SCALAR_FLOAT_MODE_P, SET_DEST, SET_SRC, set_unique_reg_note(), simplify_subreg(), single_set(), SUBREG_P, SUBREG_REG, targetm, use_anchored_address(), validize_mem(), XEXP, and y.

Referenced by adjust_stack_1(), allocate_dynamic_stack_space(), asan_clear_shadow(), asan_emit_stack_protection(), assign_call_lhs(), assign_parm_setup_block(), assign_parm_setup_reg(), assign_parm_setup_stack(), assign_parms_unsplit_complex(), attempt_change(), avoid_likely_spilled_reg(), builtin_memset_gen_str(), builtin_memset_read_str(), clear_storage_hints(), combine_reaching_defs(), combine_var_copies_in_loop_exit(), compare_by_pieces(), compress_float_constant(), convert_mode_scalar(), convert_move(), copy_blkmode_from_reg(), copy_blkmode_to_reg(), copy_to_mode_reg(), copy_to_reg(), copy_to_suggested_reg(), curr_insn_transform(), default_speculation_safe_value(), default_zero_call_used_regs(), do_jump_by_parts_zero_rtx(), emit_block_cmp_via_loop(), emit_block_move_via_loop(), emit_conditional_move(), emit_conditional_move_1(), emit_group_load(), emit_group_load_1(), emit_group_move(), emit_group_store(), emit_initial_value_sets(), emit_libcall_block_1(), emit_library_call_value_1(), emit_move_complex_push(), emit_move_list(), emit_move_multi_word(), emit_move_resolve_push(), emit_partition_copy(), emit_push_insn(), emit_stack_probe(), emit_store_flag_force(), emit_store_flag_int(), expand_abs(), expand_absneg_bit(), expand_and(), expand_asm_stmt(), expand_assignment(), expand_atomic_compare_and_swap(), expand_atomic_fetch_op(), expand_atomic_load(), expand_atomic_store(), expand_binop(), expand_BITINTTOFLOAT(), expand_builtin_apply(), expand_builtin_apply_args_1(), expand_builtin_atomic_clear(), expand_builtin_atomic_compare_exchange(), expand_builtin_eh_copy_values(), expand_builtin_eh_return(), expand_BUILTIN_EXPECT(), expand_builtin_goacc_parlevel_id_size(), expand_builtin_init_descriptor(), expand_builtin_issignaling(), expand_builtin_longjmp(), expand_builtin_nonlocal_goto(), expand_builtin_return(), expand_builtin_setjmp_receiver(), expand_builtin_setjmp_setup(), expand_builtin_sincos(), expand_builtin_stpcpy_1(), expand_builtin_strlen(), expand_builtin_strub_enter(), expand_builtin_strub_leave(), expand_builtin_strub_update(), expand_call(), expand_compare_and_swap_loop(), expand_copysign_absneg(), expand_copysign_bit(), expand_dec(), expand_DIVMOD(), expand_divmod(), expand_doubleword_bswap(), expand_doubleword_mult(), expand_dw2_landing_pad_for_region(), expand_eh_return(), expand_expr_real_1(), expand_expr_real_2(), expand_fix(), expand_fixed_convert(), expand_float(), expand_function_end(), expand_function_start(), expand_gimple_stmt_1(), expand_GOACC_DIM_POS(), expand_GOACC_DIM_SIZE(), expand_HWASAN_CHOOSE_TAG(), expand_HWASAN_SET_TAG(), expand_ifn_atomic_bit_test_and(), expand_ifn_atomic_compare_exchange_into_call(), expand_ifn_atomic_op_fetch_cmp_0(), expand_inc(), expand_movstr(), expand_mul_overflow(), expand_mult_const(), expand_one_ssa_partition(), expand_POPCOUNT(), expand_rotate_as_vec_perm(), expand_SET_EDOM(), expand_smod_pow2(), expand_strided_load_optab_fn(), expand_subword_shift(), expand_superword_shift(), expand_ubsan_result_store(), expand_unop(), expand_value_return(), expand_vec_set_optab_fn(), expand_vector_ubsan_overflow(), extract_bit_field_1(), extract_integral_bit_field(), find_shift_sequence(), fix_crossing_unconditional_branches(), asan_redzone_buffer::flush_redzone_payload(), force_expand_binop(), force_not_mem(), force_operand(), force_reg(), get_arg_pointer_save_area(), get_dynamic_stack_base(), inherit_in_ebb(), init_one_dwarf_reg_size(), init_return_column_size(), init_set_costs(), initialize_uninitialized_regs(), inline_string_cmp(), insert_base_initialization(), insert_value_copy_on_edge(), insert_var_expansion_initialization(), instantiate_virtual_regs_in_insn(), load_register_parameters(), lra_emit_add(), lra_emit_move(), make_safe_from(), match_asm_constraints_1(), maybe_emit_unop_insn(), move_block_from_reg(), move_block_to_reg(), noce_emit_cmove(), noce_emit_move_insn(), optimize_bitfield_assignment_op(), prepare_call_address(), prepare_copy_insn(), probe_stack_range(), resolve_shift_zext(), resolve_simple_move(), sjlj_emit_dispatch_table(), sjlj_emit_function_enter(), sjlj_mark_call_sites(), split_iv(), stack_protect_prologue(), store_bit_field(), store_bit_field_1(), store_bit_field_using_insv(), store_constructor(), store_expr(), store_fixed_bit_field_1(), store_integral_bit_field(), store_one_arg(), store_unaligned_arguments_into_pseudos(), try_store_by_multiple_pieces(), unroll_loop_runtime_iterations(), widen_bswap(), widen_operand(), and write_complex_part().

◆ emit_move_insn_1()

◆ emit_move_multi_word()

static rtx_insn * emit_move_multi_word ( machine_mode mode,
rtx x,
rtx y )
static
A subroutine of emit_move_insn_1.  Generate a move from Y into X.
MODE is any multi-word or full-word mode that lacks a move_insn
pattern.  Note that you will get better code if you define such
patterns, even if they must turn into multiple assembler instructions.   

References CEIL, CONSTANT_P, emit_clobber(), emit_insn(), emit_move_insn(), emit_move_resolve_push(), end_sequence(), find_replacement(), force_const_mem(), gcc_assert, GET_CODE, get_insns(), GET_MODE_SIZE(), i, MEM_P, expand_operand::mode, mode_size, operand_subword(), operand_subword_force(), push_operand(), reload_completed, reload_in_progress, replace_equiv_address_nv(), start_sequence(), poly_int< N, C >::to_constant(), undefined_operand_subword_p(), use_anchored_address(), XEXP, and y.

Referenced by emit_move_insn_1().

◆ emit_move_resolve_push()

rtx emit_move_resolve_push ( machine_mode mode,
rtx x )
A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
Return an equivalent MEM that does not use an auto-increment.   

References emit_move_insn(), expand_simple_binop(), gcc_assert, gcc_unreachable, gen_int_mode(), GET_CODE, GET_MODE_SIZE(), known_eq, expand_operand::mode, OPTAB_LIB_WIDEN, plus_constant(), replace_equiv_address(), rtx_to_poly_int64(), stack_pointer_rtx, and XEXP.

Referenced by emit_move_complex_push(), and emit_move_multi_word().

◆ emit_move_via_integer()

static rtx_insn * emit_move_via_integer ( machine_mode mode,
rtx x,
rtx y,
bool force )
static
A subroutine of emit_move_insn_1.  Generate a move from Y into X using
an integer mode of the same size as MODE.  Returns the instruction
emitted, or NULL if such a move could not be generated.   

References emit_insn(), emit_move_change_mode(), int_mode_for_mode(), expand_operand::mode, NULL, NULL_RTX, optab_handler(), and y.

Referenced by emit_move_ccmode(), emit_move_complex(), and emit_move_insn_1().

◆ emit_push_insn()

bool emit_push_insn ( rtx x,
machine_mode mode,
tree type,
rtx size,
unsigned int align,
int partial,
rtx reg,
poly_int64 extra,
rtx args_addr,
rtx args_so_far,
int reg_parm_stack_space,
rtx alignment_pad,
bool sibcall_p )
Generate code to push X onto the stack, assuming it has mode MODE and
type TYPE.
MODE is redundant except when X is a CONST_INT (since they don't
carry mode info).
SIZE is an rtx for the size of data to be copied (in bytes),
needed only if X is BLKmode.
Return true if successful.  May return false if asked to push a
partial argument during a sibcall optimization (as specified by
SIBCALL_P) and the incoming and outgoing pointers cannot be shown
to not overlap.

ALIGN (in bits) is maximum alignment we can assume.

If PARTIAL and REG are both nonzero, then copy that many of the first
bytes of X into registers starting with REG, and push the rest of X.
The amount of space pushed is decreased by PARTIAL bytes.
REG must be a hard register in this case.
If REG is zero but PARTIAL is not, take any all others actions for an
argument partially in registers, but do not actually load any
registers.

EXTRA is the amount in bytes of extra space to leave next to this arg.
This is ignored if an argument block has already been allocated.

On a machine that lacks real push insns, ARGS_ADDR is the address of
the bottom of the argument block for this call.  We use indexing off there
to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
argument block has not been preallocated.

ARGS_SO_FAR is the size of args previously pushed for this call.

REG_PARM_STACK_SPACE is nonzero if functions require stack space
for arguments passed in registers.  If nonzero, it will be the number
of bytes required.   

References ACCUMULATE_OUTGOING_ARGS, adjust_address, anti_adjust_stack(), assign_temp(), BLOCK_OP_CALL_PARM, can_move_by_pieces(), CONST_INT_P, CONSTANT_P, copy_to_reg(), DECL_INITIAL, emit_block_move(), emit_group_load(), emit_move_insn(), emit_push_insn(), expand_binop(), force_const_mem(), gcc_assert, GEN_INT, gen_int_mode(), gen_reg_rtx(), gen_rtx_MEM(), gen_rtx_REG(), GET_CODE, GET_MODE, GET_MODE_ALIGNMENT, GET_MODE_CLASS, GET_MODE_SIZE(), i, immediate_const_ctor_p(), int_expr_size(), INTVAL, known_eq, MEM_ALIGN, MEM_P, memory_address, memory_load_overlap(), expand_operand::mode, move_block_to_reg(), move_by_pieces(), NULL, NULL_RTX, NULL_TREE, offset, operand_subword_force(), OPTAB_LIB_WIDEN, PAD_DOWNWARD, PAD_NONE, PAD_UPWARD, plus_constant(), poly_int_rtx_p(), push_block(), reg_mentioned_p(), REG_P, REGNO, RETURN_BEGIN, set_mem_align(), simplify_gen_binary(), STACK_GROWS_DOWNWARD, STACK_PUSH_CODE, store_constructor(), SYMBOL_REF_DECL, SYMBOL_REF_P, expand_operand::target, targetm, poly_int< N, C >::to_constant(), TREE_READONLY, TREE_SIDE_EFFECTS, lang_hooks_for_types::type_for_mode, lang_hooks::types, validize_mem(), VAR_P, virtual_outgoing_args_rtx, virtual_stack_dynamic_rtx, word_mode, and XEXP.

Referenced by emit_library_call_value_1(), emit_push_insn(), and store_one_arg().

◆ emit_storent_insn()

bool emit_storent_insn ( rtx to,
rtx from )
Emits nontemporal store insn that moves FROM to TO.  Returns true if this
succeeded, false otherwise.   

References create_fixed_operand(), create_input_operand(), GET_MODE, maybe_expand_insn(), expand_operand::mode, and optab_handler().

Referenced by store_expr().

◆ expand_assignment()

void expand_assignment ( tree to,
tree from,
bool nontemporal )
Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
is true, try generating a nontemporal store.   

References ADDR_SPACE_GENERIC_P, adjust_address, aggregate_value_p(), as_a(), assign_stack_temp(), bits_to_bytes_round_down, BLOCK_OP_NORMAL, cfun, change_address(), COMPLETE_TYPE_P, COMPLEX_MODE_P, const0_rtx, convert_memory_address_addr_space(), convert_move(), convert_to_mode(), copy_blkmode_from_reg(), copy_blkmode_to_reg(), create_fixed_operand(), create_input_operand(), DECL_BIT_FIELD_TYPE, DECL_HARD_REGISTER, DECL_P, DECL_RTL, emit_block_move(), emit_block_move_via_libcall(), emit_group_load(), emit_group_move(), emit_group_store(), emit_move_insn(), expand_builtin_trap(), expand_expr(), expand_insn(), EXPAND_NORMAL, expand_normal(), EXPAND_SUM, EXPAND_WRITE, expr_size(), flip_storage_order(), force_not_mem(), force_operand(), force_subreg(), gcc_assert, gcc_checking_assert, gen_rtx_MEM(), get_address_mode(), get_alias_set(), get_bit_range(), GET_CODE, get_inner_reference(), GET_MODE, GET_MODE_ALIGNMENT, GET_MODE_BITSIZE(), GET_MODE_INNER, GET_MODE_PRECISION(), GET_MODE_SIZE(), GET_MODE_UNIT_BITSIZE, get_object_alignment(), handled_component_p(), highest_pow2_factor_for_target(), int_size_in_bytes(), known_eq, known_ge, known_le, lowpart_subreg(), maybe_emit_group_store(), maybe_gt, MEM_ALIGN, MEM_P, mem_ref_refers_to_non_mem_p(), MEM_VOLATILE_P, expand_operand::mode, NULL, NULL_RTX, NULL_TREE, num_trailing_bits, offset, offset_address(), operand_equal_p(), optab_handler(), optimize_bitfield_assignment_op(), POINTER_TYPE_P, pop_temp_slots(), preserve_temp_slots(), push_temp_slots(), read_complex_part(), REF_REVERSE_STORAGE_ORDER, refs_may_alias_p(), REG_P, set_mem_attributes_minus_bitpos(), shallow_copy_rtx(), simplify_gen_unary(), size_int, store_bit_field(), store_expr(), store_field(), SUBREG_P, subreg_promoted_mode(), SUBREG_PROMOTED_SIGN, SUBREG_PROMOTED_VAR_P, SUBREG_REG, subreg_unpromoted_mode(), targetm, TREE_CODE, TREE_OPERAND, TREE_TYPE, TYPE_ADDR_SPACE, TYPE_MODE, TYPE_SIZE, expand_operand::value, VAR_P, write_complex_part(), and XEXP.

Referenced by assign_parm_setup_reg(), expand_ACCESS_WITH_SIZE(), expand_asm_stmt(), expand_bitquery(), expand_call_stmt(), expand_DEFERRED_INIT(), expand_expr_real_1(), expand_gimple_stmt_1(), expand_LAUNDER(), store_constructor(), and ubsan_encode_value().

◆ expand_cmpstrn_or_cmpmem()

rtx expand_cmpstrn_or_cmpmem ( insn_code icode,
rtx target,
rtx arg1_rtx,
rtx arg2_rtx,
tree arg3_type,
rtx arg3_rtx,
HOST_WIDE_INT align )
Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
ARG3_TYPE is the type of ARG3_RTX.  Return the result rtx on success,
otherwise return null.   

References create_convert_operand_from(), create_fixed_operand(), create_integer_operand(), create_output_operand(), HARD_REGISTER_P, insn_data, maybe_expand_insn(), insn_operand_data::mode, NULL_RTX, insn_data_d::operand, REG_P, expand_operand::target, TYPE_MODE, TYPE_UNSIGNED, and expand_operand::value.

Referenced by emit_block_cmp_via_cmpmem(), expand_builtin_strcmp(), and expand_builtin_strncmp().

◆ expand_cond_expr_using_cmove()

static rtx expand_cond_expr_using_cmove ( tree treeop0,
tree treeop1,
tree treeop2 )
static

◆ expand_constructor()

static rtx expand_constructor ( tree exp,
rtx target,
enum expand_modifier modifier,
bool avoid_temp_mem )
static
Generate code for computing CONSTRUCTOR EXP.
An rtx for the computed value is returned.  If AVOID_TEMP_MEM
is TRUE, instead of creating a temporary variable in memory
NULL is returned and the caller needs to handle it differently.   

References all_zeros_p(), assign_temp(), BLOCK_OP_NORMAL, can_move_by_pieces(), clear_storage(), exp(), EXPAND_CONST_ADDRESS, expand_expr_constant(), EXPAND_INITIALIZER, EXPAND_STACK_PARM, EXPAND_SUM, expr_size(), GET_CODE, int_expr_size(), MEM_VOLATILE_P, expand_operand::mode, mostly_zeros_p(), NULL_RTX, safe_from_p(), store_constructor(), expand_operand::target, TREE_ADDRESSABLE, TREE_CONSTANT, tree_fits_uhwi_p(), TREE_STATIC, tree_to_uhwi(), TREE_TYPE, TYPE_ALIGN, TYPE_MODE, TYPE_SIZE_UNIT, and validize_mem().

Referenced by expand_expr_real_1().

◆ expand_crc_table_based()

void expand_crc_table_based ( rtx op0,
rtx op1,
rtx op2,
rtx op3,
machine_mode data_mode )
Generate table-based CRC code for the given CRC, INPUT_DATA and the
POLYNOMIAL (without leading 1).

CRC is OP1, data is OP2 and the polynomial is OP3.
This must generate a CRC table and an assembly for the following code,
where crc_bit_size and data_bit_size may be 8, 16, 32, 64:
uint_crc_bit_size_t
crc_crc_bit_size (uint_crc_bit_size_t crc_init,
             uint_data_bit_size_t data, size_t size)
{
  uint_crc_bit_size_t crc = crc_init;
  for (int i = 0; i < data_bit_size / 8; i++)
    crc = (crc << 8) ^ crc_table[(crc >> (crc_bit_size - 8))
                            ^ (data >> (data_bit_size - (i + 1) * 8)
                            & 0xFF))];
  return crc;
}   

References calculate_table_based_CRC(), CONST_INT_P, convert_move(), gcc_assert, gen_reg_rtx(), and GET_MODE.

Referenced by expand_builtin_crc_table_based(), and expand_crc_optab_fn().

◆ expand_expr_addr_expr()

static rtx expand_expr_addr_expr ( tree exp,
rtx target,
machine_mode tmode,
enum expand_modifier modifier )
static
A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
The TARGET, TMODE and MODIFIER arguments are as for expand_expr.   

References ADDR_SPACE_GENERIC, convert_memory_address_addr_space(), exp(), expand_expr_addr_expr_1(), GET_MODE, POINTER_TYPE_P, ptr_mode, expand_operand::target, targetm, TREE_OPERAND, TREE_TYPE, TYPE_ADDR_SPACE, and TYPE_MODE.

Referenced by expand_expr_real_1().

◆ expand_expr_addr_expr_1()

◆ expand_expr_constant()

static rtx expand_expr_constant ( tree exp,
int defer,
enum expand_modifier modifier )
static
Return a MEM that contains constant EXP.  DEFER is as for
output_constant_def and MODIFIER is as for expand_expr.   

References exp(), EXPAND_INITIALIZER, output_constant_def(), and use_anchored_address().

Referenced by expand_constructor(), expand_expr_addr_expr_1(), and expand_expr_real_1().

◆ expand_expr_divmod()

static rtx expand_expr_divmod ( tree_code code,
machine_mode mode,
tree treeop0,
tree treeop1,
rtx op0,
rtx op1,
rtx target,
int unsignedp )
static
Helper function of expand_expr_2, expand a division or modulo.
op0 and op1 should be already expanded treeop0 and treeop1, using
expand_operands.   

References do_pending_stack_adjust(), dump_file, dump_flags, emit_insn(), end_sequence(), expand_divmod(), get_insns(), get_range_pos_neg(), expand_operand::mode, optimize_insn_for_speed_p(), SCALAR_INT_MODE_P, seq_cost(), start_sequence(), expand_operand::target, and TDF_DETAILS.

Referenced by expand_expr_real_2().

◆ expand_expr_real()

rtx expand_expr_real ( tree exp,
rtx target,
machine_mode tmode,
enum expand_modifier modifier,
rtx * alt_rtl,
bool inner_reference_p )
expand_expr: generate code for computing expression EXP.
An rtx for the computed value is returned.  The value is never null.
In the case of a void EXP, const0_rtx is returned.

The value may be stored in TARGET if TARGET is nonzero.
TARGET is just a suggestion; callers must assume that
the rtx returned may not be the same as TARGET.

If TARGET is CONST0_RTX, it means that the value will be ignored.

If TMODE is not VOIDmode, it suggests generating the
result in mode TMODE.  But this is done only when convenient.
Otherwise, TMODE is ignored and the value generated in its natural mode.
TMODE is just a suggestion; callers must assume that
the rtx returned may not have mode TMODE.

Note that TARGET may have neither TMODE nor MODE.  In that case, it
probably will not be used.

If MODIFIER is EXPAND_SUM then when EXP is an addition
we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
or a nest of (PLUS ...) and (MINUS ...) where the terms are
products as above, or REG or MEM, or constant.
Ordinarily in such cases we would output mul or add instructions
and then return a pseudo reg containing the sum.

EXPAND_INITIALIZER is much like EXPAND_SUM except that
it also marks a label as absolutely required (it can't be dead).
It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
This is used for outputting expressions used in initializers.

EXPAND_CONST_ADDRESS says that it is okay to return a MEM
with a constant address even if that address is not normally legitimate.
EXPAND_INITIALIZER and EXPAND_SUM also have this effect.

EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
a call parameter.  Such targets require special care as we haven't yet
marked TARGET so that it's safe from being trashed by libcalls.  We
don't want to use TARGET for anything but the final result;
Intermediate values must go elsewhere.   Additionally, calls to
emit_block_move will be flagged with BLOCK_OP_CALL_PARM.

If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
recursively.
If the result can be stored at TARGET, and ALT_RTL is non-NULL,
then *ALT_RTL is set to TARGET (before legitimziation).

If INNER_REFERENCE_P is true, we are expanding an inner reference.
In this case, we don't adjust a returned MEM rtx that wouldn't be
sufficiently aligned for its mode; instead, it's up to the caller
to deal with it afterwards.  This is used to make sure that unaligned
base objects for which out-of-bounds accesses are supported, for
example record types with trailing arrays, aren't realigned behind
the back of the caller.
The normal operating mode is to pass FALSE for this parameter.   

References CONST0_RTX, const0_rtx, exp(), expand_expr_real_1(), expand_operand::target, TREE_CODE, and TREE_TYPE.

Referenced by expand_expr(), expand_expr_real_1(), expand_expr_real_gassign(), expand_normal(), and store_expr().

◆ expand_expr_real_1()

rtx expand_expr_real_1 ( tree exp,
rtx target,
machine_mode tmode,
enum expand_modifier modifier,
rtx * alt_rtl,
bool inner_reference_p )

References add_to_hard_reg_set(), addr_for_mem_ref(), adjust_address, adjust_address_nv, AGGREGATE_TYPE_P, array_ref_low_bound(), as_a(), assign_stack_temp(), assign_stack_temp_for_type(), assign_temp(), bits_to_bytes_round_down, bits_to_bytes_round_up, bitsize_int, BLOCK_OP_CALL_PARM, BLOCK_OP_NORMAL, build1(), build2(), build3(), build_constructor(), build_decl(), build_int_cst(), build_zero_cst(), BUILT_IN_FRONTEND, CALL_EXPR_VA_ARG_PACK, CALL_FROM_THUNK_P, change_address(), separate_ops::code, compare_tree_int(), COMPLETE_OR_UNBOUND_ARRAY_TYPE_P, COMPLETE_TYPE_P, COMPLEX_MODE_P, const0_rtx, const_double_from_real_value(), CONST_FIXED_FROM_FIXED_VALUE, const_vector_from_tree(), CONSTANT_P, CONSTRUCTOR_APPEND_ELT, CONSTRUCTOR_ELTS, CONVERT_EXPR_CODE_P, convert_modes(), convert_move(), convert_to_mode(), convert_wider_int_to_float(), copy_rtx(), copy_to_reg(), crtl, ctor_for_folding(), curr_insn_location(), current_function_decl, currently_expanding_to_rtl, DECL_ALIGN, DECL_ARTIFICIAL, DECL_ATTRIBUTES, DECL_BIT_FIELD_TYPE, DECL_BUILT_IN_CLASS, DECL_EXTERNAL, decl_function_context(), DECL_IGNORED_P, DECL_INITIAL, DECL_MODE, lang_hooks::decl_printable_name, DECL_RTL, DECL_RTL_SET_P, DECL_SIZE, direct_load, do_pending_stack_adjust(), opt_mode< T >::else_blk(), emit_block_move(), emit_insn(), emit_label(), emit_move_insn(), error(), error_mark_node, opt_mode< T >::exists(), exp(), expand_assignment(), expand_builtin(), expand_call(), EXPAND_CONST_ADDRESS, expand_constructor(), expand_expr(), expand_expr_addr_expr(), expand_expr_addr_expr_1(), expand_expr_constant(), expand_expr_real(), expand_expr_real_1(), expand_expr_real_2(), expand_expr_real_gassign(), EXPAND_INITIALIZER, expand_internal_call(), EXPAND_MEMORY, expand_misaligned_mem_ref(), EXPAND_NORMAL, expand_normal(), expand_shift(), EXPAND_STACK_PARM, EXPAND_SUM, EXPAND_WRITE, EXPR_LOCATION, EXTEND_BITINT, extract_bit_field(), flip_storage_order(), fndecl_built_in_p(), fold(), fold_convert_loc(), fold_read_from_constant_string(), fold_unary_loc(), FOR_EACH_CONSTRUCTOR_ELT, FOR_EACH_CONSTRUCTOR_VALUE, force_const_mem(), force_operand(), force_reg(), poly_int< N, C >::force_shwi(), g, gcc_assert, gcc_checking_assert, gcc_unreachable, gen_int_mode(), gen_label_rtx(), gen_lowpart, gen_lowpart_common(), gen_lowpart_SUBREG(), gen_raw_REG(), gen_reg_rtx(), gen_rtx_MEM(), get_address_mode(), get_callee_fndecl(), GET_CODE, get_def_for_expr(), get_gimple_for_ssa_name(), get_inner_reference(), GET_MODE, GET_MODE_ALIGNMENT, GET_MODE_BITSIZE(), GET_MODE_CLASS, GET_MODE_INNER, GET_MODE_PRECISION(), GET_MODE_SIZE(), get_object_alignment(), get_rtx_for_ssa_name(), get_subtarget(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_call_fntype(), gimple_call_internal_p(), gimple_call_set_lhs(), handled_component_p(), HARD_REGISTER_P, highest_pow2_factor(), i, identifier_to_locale(), immed_wide_int_const(), immediate_const_ctor_p(), int_expr_size(), int_mode_for_size(), int_size_in_bytes(), integer_onep(), integer_zerop(), INTEGRAL_TYPE_P, is_aligning_offset(), is_gimple_assign(), is_int_mode(), jumpif(), jumpifnot(), known_eq, known_ge, known_le, known_lt, LABEL_REF_NONLOCAL_P, label_rtx(), LAST_VIRTUAL_REGISTER, layout_decl(), bitint_info::limb_mode, limb_prec, separate_ops::location, lookup_attribute(), make_tree(), mark_reg_pointer(), MAX_FIXED_MODE_SIZE, maybe_gt, MEM_ADDR_SPACE, MEM_ALIGN, MEM_P, mem_ref_offset(), mem_ref_refers_to_non_mem_p(), MEM_VOLATILE_P, memory_address_addr_space(), memory_address_addr_space_p(), expand_operand::mode, mode_size, NULL, NULL_RTX, NULL_TREE, num_trailing_bits, OBJ_TYPE_REF_EXPR, offset, offset_address(), separate_ops::op0, separate_ops::op1, separate_ops::op2, optab_handler(), poly_int_cst_value(), poly_int_tree_p(), promote_decl_mode(), promote_function_mode(), promote_ssa_mode(), PUT_MODE(), read_complex_part(), REF_REVERSE_STORAGE_ORDER, REG_P, REGNO, replace_equiv_address(), safe_is_a(), SAVE_EXPR_RESOLVED_P, SCALAR_FLOAT_MODE_P, SCALAR_INT_MODE_P, SCALAR_INT_TYPE_MODE, SCOPE_FILE_SCOPE_P, SET_DECL_RTL, set_mem_addr_space(), set_mem_align(), set_mem_attributes(), set_mem_expr(), set_mem_size(), simplify_gen_binary(), size_diffop_loc(), size_int, sizetype, ssa_name, SSA_NAME_DEF_STMT, SSA_NAME_IS_DEFAULT_DEF, SSA_NAME_VAR, stmt_is_replaceable_p(), store_constructor(), store_expr(), SUBREG_PROMOTED_SET, SUBREG_PROMOTED_VAR_P, targetm, tcc_binary, tcc_comparison, tcc_unary, wi::to_poly_offset(), wi::to_wide(), TREE_ADDRESSABLE, TREE_CODE, TREE_CODE_CLASS, TREE_CODE_LENGTH, tree_fits_uhwi_p(), TREE_FIXED_CST, TREE_IMAGPART, tree_int_cst_equal(), TREE_INT_CST_LOW, TREE_OPERAND, tree_output_constant_def(), TREE_READONLY, TREE_REAL_CST, TREE_REALPART, TREE_SIDE_EFFECTS, TREE_STATIC, TREE_STRING_LENGTH, TREE_STRING_POINTER, TREE_THIS_VOLATILE, TREE_TYPE, TREE_USED, TREE_VALUE, separate_ops::type, type(), TYPE_ADDR_SPACE, TYPE_ALIGN, lang_hooks_for_types::type_for_mode, type_has_mode_precision_p(), TYPE_MODE, TYPE_PRECISION, TYPE_REVERSE_STORAGE_ORDER, TYPE_SIZE, TYPE_UNSIGNED, lang_hooks::types, profile_probability::uninitialized(), use_anchored_address(), validize_mem(), VAR_P, vec_alloc(), VECTOR_CST_ELT, VECTOR_CST_NELTS, VECTOR_MODE_P, VECTOR_TYPE_P, VL_EXP_CLASS_P, warning_at(), and XEXP.

Referenced by expand_expr_real(), and expand_expr_real_1().

◆ expand_expr_real_2()

rtx expand_expr_real_2 ( const_sepops ops,
rtx target,
machine_mode tmode,
enum expand_modifier modifier )

References add_cost(), adjust_address, adjust_address_nv, ALL_FIXED_POINT_MODE_P, arg_pointer_rtx, assign_temp(), BITS_PER_WORD, build_int_cst(), can_conditionally_move_p(), CASE_CONVERT, choose_mult_variant(), separate_ops::code, const0_rtx, const1_rtx, CONST_INT_P, CONSTANT_P, constm1_rtx, CONVERT_EXPR_CODE_P, convert_modes(), convert_move(), convert_to_mode(), copy_rtx(), copy_to_mode_reg(), algorithm::cost, create_convert_operand_from(), create_input_operand(), create_output_operand(), DECL_RTL, do_compare_rtx_and_jump(), do_pending_stack_adjust(), do_store_flag(), emit_barrier(), emit_conditional_move(), emit_insn(), emit_jump_insn(), emit_label(), emit_move_insn(), end_sequence(), error_mark_node, expand_abs(), expand_and(), expand_binop(), expand_cond_expr_using_cmove(), expand_expr(), expand_expr_divmod(), expand_fix(), expand_fixed_convert(), expand_float(), EXPAND_INITIALIZER, expand_insn(), expand_mult(), expand_mult_highpart(), expand_mult_highpart_adjust(), EXPAND_NORMAL, expand_normal(), expand_operands(), EXPAND_STACK_PARM, EXPAND_SUM, expand_ternary_op(), expand_unop(), expand_variable_shift(), expand_vec_perm_const(), expand_vec_perm_var(), expand_vec_series_expr(), expand_vector_broadcast(), expand_widen_pattern_expr(), expand_widening_mult(), find_widening_optab_handler, fold_build1, fold_convert_loc(), force_lowpart_subreg(), force_operand(), force_reg(), frame_pointer_rtx, gcc_assert, gcc_unreachable, gen_highpart(), GEN_INT, gen_int_mode(), gen_label_rtx(), gen_reg_rtx(), GET_CODE, get_def_for_expr(), get_gimple_rhs_class(), get_insns(), GET_MODE, GET_MODE_2XWIDER_MODE(), GET_MODE_BITSIZE(), GET_MODE_CLASS, GET_MODE_INNER, GET_MODE_PRECISION(), GET_MODE_SIZE(), get_subtarget(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), GIMPLE_BINARY_RHS, GIMPLE_TERNARY_RHS, GIMPLE_UNARY_RHS, have_insn_for(), HWI_COMPUTABLE_MODE_P(), immed_wide_int_const(), int_fits_type_p(), INTEGRAL_TYPE_P, INTVAL, is_gimple_assign(), jumpifnot(), jumpifnot_1(), separate_ops::location, lowpart_subreg(), wi::mask(), MEM_P, MEM_VOLATILE_P, expand_operand::mode, mul_cost(), neg_cost(), negate_rtx(), NO_DEFER_POP, NULL, NULL_RTX, OK_DEFER_POP, separate_ops::op0, separate_ops::op1, separate_ops::op2, operand_equal_p(), algorithm::ops, optab_default, optab_for_tree_code(), optab_handler(), OPTAB_LIB_WIDEN, OPTAB_WIDEN, optimize_insn_for_speed_p(), plus_constant(), POINTER_TYPE_P, ptr_mode, really_constant_p(), REDUCE_BIT_FIELD, reg_overlap_mentioned_p(), REG_P, REGNO, safe_from_p(), SCALAR_INT_MODE_P, SCALAR_INT_TYPE_MODE, seq_cost(), set_mem_attributes(), wi::shwi(), simplify_gen_binary(), simplify_gen_subreg(), sizetype, SSA_NAME_DEF_STMT, ssizetype, stack_pointer_rtx, start_sequence(), store_bit_field(), store_expr(), store_field(), subreg_highpart_offset(), expand_operand::target, targetm, TREE_ADDRESSABLE, TREE_CODE, TREE_CONSTANT, tree_fits_shwi_p(), TREE_INT_CST_LOW, TREE_OPERAND, tree_to_poly_uint64(), tree_to_shwi(), tree_to_uhwi(), tree_to_vec_perm_builder(), TREE_TYPE, separate_ops::type, type(), TYPE_ADDR_SPACE, type_has_mode_precision_p(), TYPE_MODE, TYPE_PRECISION, TYPE_REVERSE_STORAGE_ORDER, TYPE_SATURATING, TYPE_SIZE, TYPE_UNSIGNED, TYPE_VECTOR_SUBPARTS(), profile_probability::uninitialized(), expand_operand::value, VAR_P, VECTOR_BOOLEAN_TYPE_P, VECTOR_MODE_P, VECTOR_TYPE_P, void_type_node, VOID_TYPE_P, word_mode, write_complex_part(), and XEXP.

Referenced by expand_arith_overflow(), expand_DIVMOD(), expand_expr_real_1(), expand_expr_real_gassign(), expand_mul_overflow(), expand_vector_ubsan_overflow(), maybe_optimize_mod_cmp(), and maybe_optimize_pow2p_mod_cmp().

◆ expand_expr_real_gassign()

◆ expand_misaligned_mem_ref()

static rtx expand_misaligned_mem_ref ( rtx temp,
machine_mode mode,
int unsignedp,
unsigned int align,
rtx target,
rtx * alt_rtl )
static
A helper function for expand_expr_real_2 to be used with a
misaligned mem_ref TEMP.  Assume an unsigned type if UNSIGNEDP
is nonzero, with alignment ALIGN in bits.
Store the value at TARGET if possible (if TARGET is nonzero).
Regardless of TARGET, we return the rtx for where the value is placed.
If the result can be stored at TARGET, and ALT_RTL is non-NULL,
then *ALT_RTL is set to TARGET (before legitimziation).   

References create_fixed_operand(), create_output_operand(), expand_insn(), extract_bit_field(), GET_MODE_BITSIZE(), expand_operand::mode, NULL_RTX, optab_handler(), expand_operand::target, targetm, and expand_operand::value.

Referenced by expand_expr_real_1().

◆ expand_operands()

void expand_operands ( tree exp0,
tree exp1,
rtx target,
rtx * op0,
rtx * op1,
enum expand_modifier modifier )
Subroutine of expand_expr.  Expand the two operands of a binary
expression EXP0 and EXP1 placing the results in OP0 and OP1.
The value may be stored in TARGET if TARGET is nonzero.  The
MODIFIER argument is as documented by expand_expr.   

References copy_rtx(), expand_expr(), NULL_RTX, operand_equal_p(), safe_from_p(), and expand_operand::target.

Referenced by do_store_flag(), expand_cond_expr_using_cmove(), and expand_expr_real_2().

◆ expand_reversed_crc_table_based()

void expand_reversed_crc_table_based ( rtx op0,
rtx op1,
rtx op2,
rtx op3,
machine_mode data_mode,
void(* gen_reflecting_code )(rtx *op) )
Generate table-based reversed CRC code for the given CRC, INPUT_DATA and
the POLYNOMIAL (without leading 1).

CRC is OP1, data is OP2 and the polynomial is OP3.
This must generate CRC table and assembly for the following code,
where crc_bit_size and data_bit_size may be 8, 16, 32, 64:
uint_crc_bit_size_t
crc_crc_bit_size (uint_crc_bit_size_t crc_init,
                   uint_data_bit_size_t data, size_t size)
{
  reflect (crc_init)
  uint_crc_bit_size_t crc = crc_init;
  reflect (data);
  for (int i = 0; i < data_bit_size / 8; i++)
    crc = (crc << 8) ^ crc_table[(crc >> (crc_bit_size - 8))
                  ^ (data >> (data_bit_size - (i + 1) * 8) & 0xFF))];
  reflect (crc);
  return crc;
}   

References calculate_table_based_CRC(), CONST_INT_P, convert_move(), gcc_assert, gen_reg_rtx(), and GET_MODE.

Referenced by expand_builtin_crc_table_based(), and expand_crc_optab_fn().

◆ expand_single_bit_test()

static rtx expand_single_bit_test ( location_t loc,
enum tree_code code,
tree inner,
int bitnum,
tree result_type,
rtx target,
machine_mode mode )
static

◆ expr_size()

◆ fields_length()

static int fields_length ( const_tree type)
static
Returns the number of FIELD_DECLs in TYPE.   

References count, DECL_CHAIN, TREE_CODE, and TYPE_FIELDS.

Referenced by store_constructor().

◆ find_args_size_adjust()

poly_int64 find_args_size_adjust ( rtx_insn * insn)
A utility routine used here, in reload, and in try_split.  The insns
after PREV up to and including LAST are known to adjust the stack,
with a final value of END_ARGS_SIZE.  Iterate backward from LAST
placing notes as appropriate.  PREV may be NULL, indicating the
entire insn sequence prior to LAST should be scanned.

The set of allowed stack pointer modifications is small:
  (1) One or more auto-inc style memory references (aka pushes),
  (2) One or more addition/subtraction with the SP as destination,
  (3) A single move insn with the SP as destination,
  (4) A call_pop insn,
  (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.

Insns in the sequence that do not modify the SP are ignored,
except for noreturn calls.

The return value is the amount of adjustment that can be trivially
verified, via immediate operand or auto-inc.  If the adjustment
cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN.   

References CALL_P, gcc_assert, gcc_checking_assert, gcc_unreachable, GET_CODE, GET_MODE, GET_MODE_SIZE(), HOST_WIDE_INT_MIN, i, mem_autoinc_base(), NULL, offset, PATTERN(), REG_P, REGNO, rtx_equal_p(), rtx_to_poly_int64(), SCALAR_INT_MODE_P, SET, SET_DEST, SET_SRC, single_set(), stack_pointer_rtx, strip_offset(), XEXP, XVECEXP, and XVECLEN.

Referenced by fixup_args_size_notes(), and old_insns_match_p().

◆ fixup_args_size_notes()

◆ flexible_array_member_p()

static bool flexible_array_member_p ( const_tree f,
const_tree type )
static
Return true if field F of structure TYPE is a flexible array.   

References DECL_CHAIN, int_size_in_bytes(), integer_zerop(), NULL, TREE_CODE, TREE_TYPE, TYPE_DOMAIN, TYPE_MAX_VALUE, and TYPE_MIN_VALUE.

Referenced by count_type_elements().

◆ force_operand()

rtx force_operand ( rtx value,
rtx target )
Given an rtx VALUE that may contain additions and multiplications, return
an equivalent value that just refers to a register, memory, or constant.
This is done by generating instructions to perform the arithmetic and
returning a pseudo-register containing the value.

The returned value may be a REG, SUBREG, MEM or constant.   

References ARITHMETIC_P, CONST_INT_P, CONSTANT_P, convert_move(), emit_move_insn(), expand_divmod(), expand_fix(), expand_float(), expand_mult(), expand_simple_binop(), expand_simple_unop(), FLOAT_MODE_P, force_operand(), force_reg(), gen_reg_rtx(), GET_CODE, GET_MODE, get_subtarget(), INTEGRAL_MODE_P, MEM_P, negate_rtx(), NULL_RTX, OPTAB_LIB_WIDEN, paradoxical_subreg_p(), pic_offset_table_rtx, REG_P, simplify_gen_subreg(), SUBREG_BYTE, SUBREG_REG, expand_operand::target, UNARY_P, expand_operand::value, VIRTUAL_REGISTER_P, and XEXP.

Referenced by add_test(), allocate_dynamic_stack_space(), anti_adjust_stack_and_probe(), combine_var_copies_in_loop_exit(), compare_and_jump_seq(), compute_stack_clash_protection_loop_data(), copy_to_mode_reg(), copy_to_reg(), doloop_modify(), emit_block_cmp_via_loop(), emit_block_move_via_loop(), emit_block_move_via_oriented_loop(), emit_library_call_value_1(), expand_assignment(), expand_builtin_adjust_descriptor(), expand_builtin_apply_args_1(), expand_builtin_memory_copy_args(), expand_builtin_memset_args(), expand_builtin_setjmp_setup(), expand_builtin_stpcpy_1(), expand_builtin_strncpy(), expand_builtin_strub_leave(), expand_call(), expand_divmod(), expand_expr_addr_expr_1(), expand_expr_real_1(), expand_expr_real_2(), expand_gimple_stmt_1(), expand_movstr(), expand_mult_const(), expand_mult_highpart_adjust(), expmed_mult_highpart(), force_operand(), force_reg(), get_dynamic_stack_size(), insert_base_initialization(), instantiate_virtual_regs_in_insn(), memory_address_addr_space(), probe_stack_range(), round_push(), rtl_lv_add_condition_to_bb(), split_iv(), store_expr(), try_store_by_multiple_pieces(), and unroll_loop_runtime_iterations().

◆ gen_common_operation_to_reflect()

void gen_common_operation_to_reflect ( rtx * op,
unsigned HOST_WIDE_INT and1_value,
unsigned HOST_WIDE_INT and2_value,
unsigned shift_val )
Generate the common operation for reflecting values:
*OP = (*OP & AND1_VALUE) << SHIFT_VAL | (*OP & AND2_VALUE) >> SHIFT_VAL;   

References expand_and(), expand_binop(), expand_shift(), gen_int_mode(), GET_MODE, NULL_RTX, and OPTAB_LIB_WIDEN.

Referenced by reflect_16_bit_value(), reflect_32_bit_value(), reflect_64_bit_value(), and reflect_8_bit_value().

◆ gen_group_rtx()

rtx gen_group_rtx ( rtx orig)
Generate a PARALLEL rtx for a new non-consecutive group of registers from
ORIG, where ORIG is a non-consecutive group of registers represented by
a PARALLEL.  The clone is identical to the original except in that the
original set of registers is replaced by a new set of pseudo registers.
The new set has the same modes as the original set.   

References gcc_assert, gen_reg_rtx(), gen_rtvec_v(), gen_rtx_EXPR_LIST(), GET_CODE, GET_MODE, i, expand_operand::mode, offset, XEXP, XVECEXP, and XVECLEN.

Referenced by expand_function_start().

◆ gen_move_insn()

◆ generate_crc_table()

rtx generate_crc_table ( unsigned HOST_WIDE_INT polynom,
unsigned short crc_bits )
Generate CRC lookup table by calculating CRC for all possible
8-bit data values.  The table is stored with a specific name in the read-only
static data section.
POLYNOM is the polynomial used to calculate the CRC table's elements.
CRC_BITS is the size of CRC, may be 8, 16, ... .   

References assemble_crc_table(), gcc_assert, get_identifier(), HOST_WIDE_INT_PRINT_HEX, IDENTIFIER_POINTER, and maybe_get_identifier().

Referenced by calculate_table_based_CRC().

◆ generate_reflecting_code_standard()

void generate_reflecting_code_standard ( rtx * op)
Generate instruction sequence which reflects the value of the OP
using shift, and, or operations.  OP's mode may be less than word_mode.   

References gcc_assert, GET_MODE, GET_MODE_BITSIZE(), reflect_16_bit_value(), reflect_32_bit_value(), reflect_64_bit_value(), and reflect_8_bit_value().

Referenced by expand_builtin_crc_table_based(), and expand_crc_optab_fn().

◆ get_bit_range()

void get_bit_range ( poly_uint64 * bitstart,
poly_uint64 * bitend,
tree exp,
poly_int64 * bitpos,
tree * offset )
In the C++ memory model, consecutive bit fields in a structure are
considered one memory location.

Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
returns the bit range of consecutive bits in which this COMPONENT_REF
belongs.  The values are returned in *BITSTART and *BITEND.  *BITPOS
and *OFFSET may be adjusted in the process.

If the access does not need to be restricted, 0 is returned in both
*BITSTART and *BITEND.   

References DECL_BIT_FIELD_REPRESENTATIVE, DECL_FIELD_BIT_OFFSET, DECL_FIELD_OFFSET, DECL_SIZE, exp(), gcc_assert, get_inner_reference(), handled_component_p(), maybe_gt, NULL_TREE, offset, poly_int_tree_p(), size_binop, size_int, TREE_CODE, TREE_OPERAND, tree_to_poly_uint64(), and tree_to_uhwi().

Referenced by expand_assignment(), and optimize_bit_field_compare().

◆ get_def_for_expr()

static gimple * get_def_for_expr ( tree name,
enum tree_code code )
static
Return the defining gimple statement for SSA_NAME NAME if it is an
assigment and the code of the expresion on the RHS is CODE.  Return
NULL otherwise.   

References get_gimple_for_ssa_name(), gimple_assign_rhs_code(), is_gimple_assign(), NULL, and TREE_CODE.

Referenced by do_store_flag(), expand_expr_real_1(), expand_expr_real_2(), expand_single_bit_test(), maybe_optimize_mod_cmp(), maybe_optimize_pow2p_mod_cmp(), maybe_optimize_sub_cmp_0(), and store_field().

◆ get_def_for_expr_class()

static gimple * get_def_for_expr_class ( tree name,
enum tree_code_class tclass )
static
Return the defining gimple statement for SSA_NAME NAME if it is an
assigment and the class of the expresion on the RHS is CLASS.  Return
NULL otherwise.   

References get_gimple_for_ssa_name(), gimple_assign_rhs_code(), is_gimple_assign(), NULL, TREE_CODE, and TREE_CODE_CLASS.

Referenced by expand_cond_expr_using_cmove().

◆ get_inner_reference()

tree get_inner_reference ( tree exp,
poly_int64 * pbitsize,
poly_int64 * pbitpos,
tree * poffset,
machine_mode * pmode,
int * punsignedp,
int * preversep,
int * pvolatilep )
Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
codes and find the ultimate containing object, which we return.

We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
storage order of the field.
If the position of the field is variable, we store a tree
giving the variable offset (in units) in *POFFSET.
This offset is in addition to the bit position.
If the position is not variable, we store 0 in *POFFSET.

If any of the extraction expressions is volatile,
we store 1 in *PVOLATILEP.  Otherwise we don't change that.

If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
Otherwise, it is a mode that can be used to access the field.

If the field describes a variable-sized object, *PMODE is set to
BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
this case, but the address of the object can be found.   

References array_ref_element_size(), array_ref_low_bound(), bits_to_bytes_round_down, build_int_cst(), component_ref_field_offset(), DECL_BIT_FIELD, DECL_BIT_FIELD_TYPE, DECL_FIELD_BIT_OFFSET, DECL_MODE, DECL_SIZE, DECL_UNSIGNED, exp(), fold_build2, fold_convert, poly_int< N, C >::force_shwi(), GET_MODE_BITSIZE(), integer_zerop(), INTEGRAL_TYPE_P, mem_ref_offset(), expand_operand::mode, NULL_TREE, num_trailing_bits, offset, poly_int_tree_p(), reverse_storage_order_for_component_p(), wi::sext(), size_binop, size_zero_node, sizetype, wi::to_poly_offset(), poly_int< N, C >::to_shwi(), TREE_CODE, tree_int_cst_equal(), TREE_OPERAND, TREE_THIS_VOLATILE, TREE_TYPE, TYPE_MODE, TYPE_MODE_RAW, TYPE_PRECISION, TYPE_SIZE, TYPE_UNSIGNED, VECTOR_MODE_P, and VECTOR_TYPE_P.

Referenced by decode_field_reference(), delegitimize_mem_from_attrs(), dr_analyze_innermost(), expand_assignment(), expand_debug_expr(), expand_expr_addr_expr_1(), expand_expr_real_1(), extract_base_bit_offset(), fold_comparison(), fold_unary_loc(), fortran_common(), get_base_for_alignment_1(), get_bit_range(), get_inner_reference_aff(), get_object_alignment_2(), instrument_bool_enum_load(), instrument_derefs(), instrument_expr(), instrument_object_size(), interpret_rhs_expr(), loc_list_for_address_of_addr_expr_of_indirect_ref(), loc_list_from_tree_1(), make_bit_field_ref(), maybe_instrument_pointer_overflow(), maybe_optimize_ubsan_ptr_ifn(), optimize_bit_field_compare(), fold_using_range::range_of_address(), scan_operand_equal_p(), slsr_process_ref(), split_address_to_core_and_offset(), split_constant_offset_1(), tree_to_aff_combination(), and vect_check_gather_scatter().

◆ get_personality_function()

◆ get_subtarget()

static rtx get_subtarget ( rtx x)
static
Return X if X can be used as a subtarget in a sequence of arithmetic
operations.   

References REG_P, and REGNO.

Referenced by do_store_flag(), expand_expr_real_1(), expand_expr_real_2(), and force_operand().

◆ gf2n_poly_long_div_quotient()

unsigned HOST_WIDE_INT gf2n_poly_long_div_quotient ( unsigned HOST_WIDE_INT polynomial,
unsigned short n )
Return the quotient of polynomial long division of x^2N by POLYNOMIAL
in GF (2^N).
Author: Richard Sandiford <richard.sandiford@arm.com>   

References gcc_assert, HOST_WIDE_INT_1U, and i.

◆ highest_pow2_factor()

unsigned HOST_WIDE_INT highest_pow2_factor ( const_tree exp)
Return the highest power of two that EXP is known to be a multiple of.
This is used in updating alignment of MEMs in array references.   

References exp(), HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT_1U, and tree_ctz().

Referenced by dr_analyze_innermost(), expand_expr_real_1(), get_base_for_alignment_1(), highest_pow2_factor_for_target(), store_constructor(), store_expr(), and vect_find_stmt_data_reference().

◆ highest_pow2_factor_for_target()

static unsigned HOST_WIDE_INT highest_pow2_factor_for_target ( const_tree target,
const_tree exp )
static
Similar, except that the alignment requirements of TARGET are
taken into account.  Assume it is at least as aligned as its
type, unless it is a COMPONENT_REF in which case the layout of
the structure gives the alignment.   

References exp(), highest_pow2_factor(), MAX, expand_operand::target, and target_align().

Referenced by expand_assignment().

◆ immediate_const_ctor_p()

bool immediate_const_ctor_p ( const_tree ctor,
unsigned int words )
Return true if constructor CTOR is simple enough to be materialized
in an integer mode register.  Limit the size to WORDS words, which
is 1 by default.   

References CONSTRUCTOR_NELTS, initializer_constant_valid_for_bitfield_p(), int_expr_size(), TREE_ADDRESSABLE, TREE_CODE, TREE_CONSTANT, and TREE_TYPE.

Referenced by emit_push_insn(), expand_expr_real_1(), and load_register_parameters().

◆ init_expr()

◆ init_expr_target()

void init_expr_target ( void )
This is run to set up which modes can be used
directly in memory and to initialize the block move optab.  It is run
at the beginning of compilation and when the target is reinitialized.   

References as_a(), can_extend_p(), direct_load, direct_store, float_extend_from_mem, FOR_EACH_MODE_IN_CLASS, FOR_EACH_MODE_UNTIL, frame_pointer_rtx, gen_raw_REG(), gen_rtx_MEM(), gen_rtx_REG(), insn_operand_matches(), LAST_VIRTUAL_REGISTER, NULL_RTX, PATTERN(), PUT_MODE(), recog(), opt_mode< T >::require(), rtx_alloc(), SET_DEST, set_mode_and_regno(), SET_SRC, stack_pointer_rtx, targetm, and word_mode.

Referenced by backend_init_target().

◆ int_expr_size()

HOST_WIDE_INT int_expr_size ( const_tree exp)
Return a wide integer for the size in bytes of the value of EXP, or -1
if the size can vary or is larger than an integer.   

References exp(), gcc_assert, TREE_CODE, tree_expr_size(), tree_fits_shwi_p(), TREE_OPERAND, and tree_to_shwi().

Referenced by emit_push_insn(), expand_constructor(), expand_expr_real_1(), immediate_const_ctor_p(), load_register_parameters(), and store_expr().

◆ is_aligning_offset()

static bool is_aligning_offset ( const_tree offset,
const_tree exp )
static
Subroutine of above: returns true if OFFSET corresponds to an offset that
when applied to the address of EXP produces an address known to be
aligned more than BIGGEST_ALIGNMENT.   

References compare_tree_int(), CONVERT_EXPR_P, exp(), offset, pow2p_hwi(), TREE_CODE, tree_fits_uhwi_p(), TREE_OPERAND, and tree_to_uhwi().

Referenced by expand_expr_real_1().

◆ maybe_emit_group_store()

rtx maybe_emit_group_store ( rtx x,
tree type )
Return a form of X that does not use a PARALLEL.  TYPE is the type
of the value stored in X.   

References emit_group_store(), gcc_checking_assert, gen_reg_rtx(), GET_CODE, GET_MODE, int_size_in_bytes(), expand_operand::mode, and TYPE_MODE.

Referenced by expand_assignment(), expand_builtin_int_roundingfn(), and expand_builtin_int_roundingfn_2().

◆ maybe_optimize_mod_cmp()

enum tree_code maybe_optimize_mod_cmp ( enum tree_code code,
tree * arg0,
tree * arg1 )
Attempt to optimize unsigned (X % C1) == C2 (or (X % C1) != C2).
If C1 is odd to:
(X - C2) * C3 <= C4 (or >), where
C3 is modular multiplicative inverse of C1 and 1<<prec and
C4 is ((1<<prec) - 1) / C1 or ((1<<prec) - 1) / C1 - 1 (the latter
if C2 > ((1<<prec) - 1) % C1).
If C1 is even, S = ctz (C1) and C2 is 0, use
((X * C3) r>> S) <= C4, where C3 is modular multiplicative
inverse of C1>>S and 1<<prec and C4 is (((1<<prec) - 1) / (C1>>S)) >> S.

For signed (X % C1) == 0 if C1 is odd to (all operations in it
unsigned):
(X * C3) + C4 <= 2 * C4, where
C3 is modular multiplicative inverse of (unsigned) C1 and 1<<prec and
C4 is ((1<<(prec - 1) - 1) / C1).
If C1 is even, S = ctz(C1), use
((X * C3) + C4) r>> S <= (C4 >> (S - 1))
where C3 is modular multiplicative inverse of (unsigned)(C1>>S) and 1<<prec
and C4 is ((1<<(prec - 1) - 1) / (C1>>S)) & (-1<<S).

See the Hacker's Delight book, section 10-17.   

References a, b, wi::bit_and(), build_int_cst(), separate_ops::code, compare_tree_int(), wi::ctz(), wi::divmod_trunc(), do_pending_stack_adjust(), emit_insn(), end_sequence(), expand_expr_real_2(), EXPAND_NORMAL, expand_normal(), fold_build2_loc(), fold_convert, fold_convert_loc(), FOR_EACH_IMM_USE_FAST, wide_int_storage::from(), gcc_checking_assert, get_def_for_expr(), get_insns(), GET_MODE_BITSIZE(), get_range_pos_neg(), gimple_assign_rhs1(), gimple_assign_rhs2(), gimple_assign_rhs_code(), gimple_bb(), gimple_location(), wi::gtu_p(), integer_pow2p(), integer_zerop(), is_a(), is_gimple_assign(), separate_ops::location, wi::lrshift(), wi::lshift(), make_tree(), wi::mask(), maybe_optimize_pow2p_mod_cmp(), wi::mod_inv(), NULL, NULL_RTX, NULL_TREE, separate_ops::op0, separate_ops::op1, separate_ops::op2, operand_equal_p(), optimize_insn_for_speed_p(), rtx_cost(), seq_cost(), wi::shifted_mask(), SIGNED, start_sequence(), wi::to_wide(), TREE_CODE, tree_int_cst_le(), tree_int_cst_sgn(), TREE_TYPE, separate_ops::type, TYPE_MODE, TYPE_PRECISION, TYPE_UNSIGNED, wi::udiv_trunc(), wi::umod_trunc(), UNSIGNED, unsigned_type_for(), USE_STMT, and wide_int_to_tree().

Referenced by do_store_flag(), and expand_gimple_cond().

◆ maybe_optimize_pow2p_mod_cmp()

◆ maybe_optimize_sub_cmp_0()

◆ mem_autoinc_base()

static rtx mem_autoinc_base ( rtx mem)
static
A utility routine that returns the base of an auto-inc memory, or NULL.   

References GET_CODE, GET_RTX_CLASS, MEM_P, NULL, RTX_AUTOINC, and XEXP.

Referenced by find_args_size_adjust().

◆ mem_ref_refers_to_non_mem_p()

bool mem_ref_refers_to_non_mem_p ( tree ref)
Returns true if REF refers to an object that does not
reside in memory and has non-BLKmode.   

References non_mem_decl_p(), TREE_CODE, and TREE_OPERAND.

Referenced by expand_assignment(), expand_DEFERRED_INIT(), and expand_expr_real_1().

◆ memory_load_overlap()

static int memory_load_overlap ( rtx x,
rtx y,
HOST_WIDE_INT size )
static
If reading SIZE bytes from X will end up reading from
Y return the number of bytes that overlap.  Return -1
if there is no overlap or -2 if we can't determine
(for example when X and Y have different base registers).   

References CONST_INT_P, IN_RANGE, INTVAL, plus_constant(), simplify_gen_binary(), and y.

Referenced by emit_push_insn().

◆ mostly_zeros_p()

static bool mostly_zeros_p ( const_tree exp)
static
Return true if EXP contains mostly (3/4) zeros.   

References categorize_ctor_elements(), exp(), initializer_zerop(), and TREE_CODE.

Referenced by expand_constructor(), and store_constructor().

◆ move_block_from_reg()

void move_block_from_reg ( int regno,
rtx x,
int nregs )
Copy all or part of a BLKmode value X out of registers starting at REGNO.
The number of registers to be filled is NREGS.   

References delete_insns_since(), emit_insn(), emit_move_insn(), gcc_assert, GEN_INT, gen_rtx_REG(), get_last_insn(), i, last, operand_subword(), targetm, and word_mode.

Referenced by assign_parm_adjust_entry_rtl(), and assign_parm_setup_block().

◆ move_block_to_reg()

void move_block_to_reg ( int regno,
rtx x,
int nregs,
machine_mode mode )
Copy all or part of a value X into registers starting at REGNO.
The number of registers to be filled is NREGS.   

References CONSTANT_P, delete_insns_since(), emit_insn(), emit_move_insn(), force_const_mem(), GEN_INT, gen_rtx_REG(), get_last_insn(), i, last, expand_operand::mode, operand_subword_force(), targetm, validize_mem(), and word_mode.

Referenced by emit_push_insn(), and load_register_parameters().

◆ move_by_pieces()

rtx move_by_pieces ( rtx to,
rtx from,
unsigned HOST_WIDE_INT len,
unsigned int align,
memop_ret retmode )
Generate several move instructions to copy LEN bytes from block FROM to
block TO.  (These are MEM rtx's with BLKmode).

If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
used to push FROM to the stack.

ALIGN is maximum stack alignment we can assume.

Return value is based on RETMODE argument.   

References gcc_unreachable, NULL, and RETURN_BEGIN.

Referenced by emit_block_move_hints(), and emit_push_insn().

◆ non_mem_decl_p()

bool non_mem_decl_p ( tree base)
Returns true if BASE is a DECL that does not reside in memory and
has non-BLKmode.  DECL_RTL must not be a MEM; if
DECL_RTL was not set yet, return false.   

References DECL_MODE, DECL_P, DECL_RTL, DECL_RTL_SET_P, MEM_P, and TREE_ADDRESSABLE.

Referenced by expand_DEFERRED_INIT(), and mem_ref_refers_to_non_mem_p().

◆ optimize_bitfield_assignment_op()

◆ push_block()

rtx push_block ( rtx size,
poly_int64 extra,
int below )
Pushing data onto the stack.   
Push a block of length SIZE (perhaps variable)
and return an rtx to address the beginning of the block.
The value may be virtual_outgoing_args_rtx.

EXTRA is the number of bytes of padding to push in addition to SIZE.
BELOW nonzero means this padding comes at low addresses;
otherwise, the padding comes at high addresses.   

References anti_adjust_stack(), CONSTANT_P, convert_modes(), copy_to_mode_reg(), expand_binop(), gen_int_mode(), known_eq, memory_address, NARROWEST_INT_MODE, negate_rtx(), OPTAB_LIB_WIDEN, plus_constant(), poly_int_rtx_p(), ptr_mode, REG_P, STACK_GROWS_DOWNWARD, and virtual_outgoing_args_rtx.

Referenced by emit_library_call_value_1(), emit_push_insn(), and expand_call().

◆ read_complex_part()

rtx read_complex_part ( rtx cplx,
bool imag_p )

◆ reduce_to_bit_field_precision()

static rtx reduce_to_bit_field_precision ( rtx exp,
rtx target,
tree type )
static
Subroutine of above: reduce EXP to the precision of TYPE (in the
signedness of TYPE), possibly returning the result in TARGET.
TYPE is known to be a partial integer type.   

References count, exp(), expand_and(), expand_expr(), EXPAND_NORMAL, expand_shift(), gcc_assert, GET_MODE, GET_MODE_PRECISION(), immed_wide_int_const(), wi::mask(), poly_int_rtx_p(), SCALAR_INT_TYPE_MODE, wi::to_poly_wide(), TYPE_PRECISION, TYPE_UNSIGNED, and wide_int_to_tree().

◆ reflect_16_bit_value()

void reflect_16_bit_value ( rtx * op)
Reflect 16-bit value for the 16-bit target.   

References gen_common_operation_to_reflect().

Referenced by generate_reflecting_code_standard().

◆ reflect_32_bit_value()

void reflect_32_bit_value ( rtx * op)
Reflect 32-bit value for the 32-bit target.   

References gen_common_operation_to_reflect().

Referenced by generate_reflecting_code_standard().

◆ reflect_64_bit_value()

void reflect_64_bit_value ( rtx * op)
Reflect 64-bit value for the 64-bit target.   

References gen_common_operation_to_reflect().

Referenced by generate_reflecting_code_standard().

◆ reflect_8_bit_value()

void reflect_8_bit_value ( rtx * op)
Reflect 8-bit value for the 8-bit target.   

References gen_common_operation_to_reflect().

Referenced by generate_reflecting_code_standard().

◆ safe_from_p()

bool safe_from_p ( const_rtx x,
tree exp,
int top_p )
Subroutine of expand_expr: return true iff there is no way that
EXP can reference X, which is being modified.  TOP_P is nonzero if this
call is going to be used to determine whether we need a temporary
for EXP, as opposed to a recursive call to this function.

It is always safe for this routine to return false since it merely
searches for optimization opportunities.   

References alias_sets_conflict_p(), COMPLETE_TYPE_P, CONSTRUCTOR_ELTS, DECL_EXPR_DECL, DECL_INITIAL, DECL_P, DECL_RTL, DECL_RTL_IF_SET, DECL_RTL_SET_P, exp(), FOR_EACH_VEC_SAFE_ELT, gcc_unreachable, get_alias_set(), GET_CODE, GET_MODE, i, constructor_elt::index, MEM_ALIAS_SET, MEM_P, NULL_TREE, REG_P, REGNO, rtx_equal_p(), safe_from_p(), staticp(), SUBREG_REG, tcc_binary, tcc_comparison, tcc_constant, tcc_declaration, tcc_exceptional, tcc_expression, tcc_reference, tcc_statement, tcc_type, tcc_unary, tcc_vl_exp, TREE_CHAIN, TREE_CODE, TREE_CODE_CLASS, TREE_OPERAND, TREE_OPERAND_LENGTH, TREE_STATIC, TREE_TYPE, TREE_VALUE, true_dependence(), TYPE_ARRAY_MAX_SIZE, TYPE_SIZE, constructor_elt::value, virtual_outgoing_args_rtx, and XEXP.

Referenced by expand_builtin_fabs(), expand_constructor(), expand_expr_real_2(), expand_operands(), and safe_from_p().

◆ set_storage_via_libcall()

rtx set_storage_via_libcall ( rtx object,
rtx size,
rtx val,
bool tailcall )
A subroutine of clear_storage.  Expand a call to memset.
Return the return value of memset, 0 otherwise.   

References build_call_expr(), builtin_decl_implicit(), CALL_EXPR_TAILCALL, CONST_INT_P, convert_to_mode(), copy_addr_to_reg(), copy_to_mode_reg(), expand_call(), integer_type_node, make_tree(), NULL_RTX, ptr_type_node, sizetype, TYPE_MODE, and XEXP.

Referenced by clear_storage_hints().

◆ set_storage_via_setmem()

bool set_storage_via_setmem ( rtx object,
rtx size,
rtx val,
unsigned int align,
unsigned int expected_align,
HOST_WIDE_INT expected_size,
unsigned HOST_WIDE_INT min_size,
unsigned HOST_WIDE_INT max_size,
unsigned HOST_WIDE_INT probable_max_size )

◆ stmt_is_replaceable_p()

static bool stmt_is_replaceable_p ( gimple * stmt)
static
Return TRUE if expression STMT is suitable for replacement.
Never consider memory loads as replaceable, because those don't ever lead
into constant expressions.   

References gimple_assign_rhs1(), gimple_assign_single_p(), is_gimple_val(), and ssa_is_replaceable_p().

Referenced by expand_expr_real_1().

◆ store_by_pieces()

rtx store_by_pieces ( rtx to,
unsigned HOST_WIDE_INT len,
by_pieces_constfn constfun,
void * constfundata,
unsigned int align,
bool memsetp,
memop_ret retmode )
Generate several move instructions to store LEN bytes generated by
CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
pointer which will be passed as argument in every CONSTFUN call.
ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
a memset operation and false if it's a copy of a constant string.
Return value is based on RETMODE argument.   

References gcc_assert, optimize_insn_for_speed_p(), RETURN_BEGIN, RETURN_END_MINUS_ONE, SET_BY_PIECES, STORE_BY_PIECES, and targetm.

Referenced by asan_emit_stack_protection(), expand_builtin_memory_copy_args(), expand_builtin_memset_args(), expand_builtin_strncpy(), store_expr(), and try_store_by_multiple_pieces().

◆ store_constructor()

void store_constructor ( tree exp,
rtx target,
int cleared,
poly_int64 size,
bool reverse )
Store the value of constructor EXP into the rtx TARGET.
TARGET is either a REG or a MEM; we know it cannot conflict, since
safe_from_p has been called.
CLEARED is true if TARGET is known to have been zero'd.
SIZE is the number of bytes of TARGET we are allowed to modify: this
may not be the same as the size of EXP if we are assigning to a field
which has been packed to exclude padding bits.
If REVERSE is true, the store is to be done in reverse order.   

References adjust_address, bit_position(), BITS_PER_WORD, BLOCK_OP_NORMAL, build2(), build_decl(), build_int_cst(), clear_storage(), CONST0_RTX, CONSTRUCTOR_ELT, CONSTRUCTOR_ELTS, CONSTRUCTOR_NELTS, convert_move(), convert_optab_handler(), copy_rtx(), count, create_input_operand(), create_output_operand(), DECL_BIT_FIELD, DECL_FIELD_OFFSET, DECL_MODE, DECL_NONADDRESSABLE_P, DECL_SIZE, do_pending_stack_adjust(), emit_clobber(), emit_insn(), emit_jump(), emit_label(), emit_move_insn(), exp(), expand_assignment(), expand_binop(), expand_insn(), expand_normal(), EXPR_LOCATION, expr_size(), fields_length(), fold_build2, fold_convert, FOR_EACH_CONSTRUCTOR_ELT, FOR_EACH_CONSTRUCTOR_VALUE, force_reg(), gcc_assert, gcc_unreachable, GEN_INT, gen_int_mode(), gen_label_rtx(), gen_reg_rtx(), get_alias_set(), GET_MODE, GET_MODE_BITSIZE(), GET_MODE_CLASS, GET_MODE_INNER, GET_MODE_NUNITS(), GET_MODE_PRECISION(), GET_MODE_SIZE(), highest_pow2_factor(), HOST_WIDE_INT_1U, i, constructor_elt::index, initializer_zerop(), int_bit_position(), int_size_in_bytes(), integer_one_node, integer_type_node, jumpif(), known_eq, known_gt, known_le, wi::mask(), maybe_gt, MEM_ALIAS_SET, MEM_KEEP_ALIAS_SET_P, MEM_P, expand_operand::mode, mostly_zeros_p(), NULL, NULL_TREE, offset, offset_address(), optab_handler(), OPTAB_WIDEN, poly_int_tree_p(), promote_decl_mode(), REG_P, REGMODE_NATURAL_SIZE, rtvec_alloc(), RTVEC_ELT, rtx_equal_p(), SCALAR_INT_MODE_P, SET_DECL_RTL, size_binop, ssize_int, ssizetype, store_constructor(), store_constructor_field(), store_expr(), expand_operand::target, poly_int< N, C >::to_constant(), TREE_CODE, tree_fits_shwi_p(), tree_fits_uhwi_p(), TREE_OPERAND, TREE_SIDE_EFFECTS, TREE_STATIC, tree_to_poly_uint64(), tree_to_shwi(), tree_to_uhwi(), TREE_TYPE, TYPE_DOMAIN, lang_hooks_for_types::type_for_mode, TYPE_MAX_VALUE, TYPE_MIN_VALUE, TYPE_MODE, TYPE_NONALIASED_COMPONENT, TYPE_PRECISION, TYPE_REVERSE_STORAGE_ORDER, TYPE_SIZE, TYPE_SIZE_UNIT, TYPE_UNSIGNED, TYPE_VECTOR_SUBPARTS(), lang_hooks::types, uniform_vector_p(), profile_probability::uninitialized(), constructor_elt::value, expand_operand::value, vec_safe_iterate(), VECTOR_BOOLEAN_TYPE_P, vector_element_bits(), VECTOR_MODE_P, VECTOR_TYPE_P, wide_int_to_tree(), word_mode, and WORD_REGISTER_OPERATIONS.

Referenced by emit_push_insn(), expand_constructor(), expand_expr_real_1(), load_register_parameters(), store_constructor(), store_constructor_field(), and store_field().

◆ store_constructor_field()

static void store_constructor_field ( rtx target,
poly_uint64 bitsize,
poly_int64 bitpos,
poly_uint64 bitregion_start,
poly_uint64 bitregion_end,
machine_mode mode,
tree exp,
int cleared,
alias_set_type alias_set,
bool reverse )
static
Helper function for store_constructor.
TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
CLEARED is as for store_constructor.
ALIAS_SET is the alias set to use for any stores.
If REVERSE is true, the store is to be done in reverse order.

This provides a recursive shortcut back to store_constructor when it isn't
necessary to go through store_field.  This is so that we can pass through
the cleared field to let store_constructor know that we may not have to
clear a substructure if the outer structure has already been cleared.   

References adjust_address, copy_rtx(), exp(), GET_MODE, GET_MODE_ALIGNMENT, known_eq, MEM_ALIAS_SET, MEM_KEEP_ALIAS_SET_P, MEM_P, expand_operand::mode, set_mem_alias_set(), store_constructor(), store_field(), expand_operand::target, and TREE_CODE.

Referenced by store_constructor().

◆ store_expr()

rtx store_expr ( tree exp,
rtx target,
int call_param_p,
bool nontemporal,
bool reverse )
Generate code for computing expression EXP,
and storing the value into TARGET.

If the mode is BLKmode then we may return TARGET itself.
It turns out that in BLKmode it doesn't cause a problem.
because C has no operators that could combine two different
assignments into the same BLKmode object with different values
with no sequence point.  Will other languages need this to
be more thorough?

If CALL_PARAM_P is nonzero, this is a store into a call param on the
stack, and block moves may need to be treated specially.

If NONTEMPORAL is true, try using a nontemporal store instruction.

If REVERSE is true, the store is to be done in reverse order.   

References adjust_address, adjust_address_1(), BLOCK_OP_CALL_PARM, BLOCK_OP_NORMAL, build_array_type(), build_index_type(), can_store_by_pieces(), change_address(), clear_storage(), const0_rtx, CONST_INT_P, CONSTANT_P, convert_modes(), convert_move(), convert_to_mode(), copy_blkmode_from_reg(), copy_node(), curr_insn_location(), do_pending_stack_adjust(), emit_barrier(), emit_block_move(), emit_cmp_and_jump_insns(), emit_group_load(), emit_group_move(), emit_group_store(), emit_jump_insn(), emit_label(), emit_move_insn(), emit_storent_insn(), exp(), expand_binop(), expand_expr(), expand_expr_real(), EXPAND_NORMAL, EXPAND_STACK_PARM, expr_size(), flip_storage_order(), fold_convert_loc(), force_operand(), gcc_assert, gcc_unreachable, GEN_INT, gen_label_rtx(), get_address_mode(), GET_CODE, GET_MODE, GET_MODE_BITSIZE(), GET_MODE_CLASS, GET_MODE_PRECISION(), highest_pow2_factor(), int_expr_size(), int_size_in_bytes(), integer_zerop(), INTEGRAL_TYPE_P, INTVAL, jumpifnot(), known_eq, make_tree(), MEM_ADDR_SPACE, MEM_ALIGN, MEM_P, mems_same_for_tbaa_p(), NO_DEFER_POP, NULL, NULL_RTX, offset_address(), OK_DEFER_POP, OPTAB_LIB_WIDEN, plus_constant(), poly_int_rtx_p(), REG_P, RETURN_BEGIN, RETURN_END, rtx_equal_p(), rtx_to_poly_int64(), SCALAR_INT_MODE_P, side_effects_p(), signed_or_unsigned_type_for(), simplify_gen_subreg(), size_binop_loc(), size_int, sizetype, store_bit_field(), store_by_pieces(), store_expr(), STORE_MAX_PIECES, string_cst_read_str(), SUBREG_CHECK_PROMOTED_SIGN, subreg_promoted_mode(), SUBREG_PROMOTED_SIGN, SUBREG_PROMOTED_VAR_P, SUBREG_REG, subreg_unpromoted_mode(), expand_operand::target, targetm, TREE_CODE, TREE_OPERAND, TREE_STRING_LENGTH, TREE_STRING_POINTER, TREE_TYPE, lang_hooks_for_types::type_for_mode, TYPE_MODE, TYPE_PRECISION, TYPE_UNSIGNED, lang_hooks::types, UINTVAL, profile_probability::uninitialized(), and VOID_TYPE_P.

Referenced by expand_assignment(), expand_expr_real_1(), expand_expr_real_2(), initialize_argument_information(), insert_value_copy_on_edge(), store_constructor(), store_expr(), and store_field().

◆ store_field()

static rtx store_field ( rtx target,
poly_int64 bitsize,
poly_int64 bitpos,
poly_uint64 bitregion_start,
poly_uint64 bitregion_end,
machine_mode mode,
tree exp,
alias_set_type alias_set,
bool nontemporal,
bool reverse )
static
Store the value of EXP (an expression tree)
into a subfield of TARGET which has mode MODE and occupies
BITSIZE bits, starting BITPOS bits from the start of TARGET.
If MODE is VOIDmode, it means that we are storing into a bit-field.

BITREGION_START is bitpos of the first bitfield in this region.
BITREGION_END is the bitpos of the ending bitfield in this region.
These two fields are 0, if the C++ memory model does not apply,
or we are not interested in keeping track of bitfield regions.

Always return const0_rtx unless we have something particular to
return.

ALIAS_SET is the alias set for the destination.  This value will
(in general) be different from that for TARGET, since TARGET is a
reference to the containing structure.

If NONTEMPORAL is true, try generating a nontemporal store.

If REVERSE is true, the store is to be done in reverse order.   

References adjust_address, AGGREGATE_TYPE_P, BITS_PER_WORD, bits_to_bytes_round_up, BLOCK_OP_NORMAL, CALL_EXPR_RETURN_SLOT_OPT, const0_rtx, convert_modes(), copy_blkmode_from_reg(), copy_rtx(), DECL_MODE, DECL_P, DECL_SIZE, direct_store, emit_block_move(), emit_group_store(), exp(), expand_expr(), EXPAND_NORMAL, expand_normal(), expand_shift(), extract_bit_field(), flip_storage_order(), gcc_assert, gcc_checking_assert, gen_int_mode(), gen_reg_rtx(), GET_CODE, get_def_for_expr(), GET_MODE, GET_MODE_ALIGNMENT, GET_MODE_BITSIZE(), GET_MODE_CLASS, gimple_assign_rhs1(), int_size_in_bytes(), INTEGRAL_TYPE_P, poly_int< N, C >::is_constant(), is_int_mode(), known_eq, known_ge, known_le, maybe_gt, MEM_ALIAS_SET, MEM_ALIGN, MEM_KEEP_ALIAS_SET_P, MEM_P, expand_operand::mode, NULL, NULL_RTX, poly_int_tree_p(), REG_P, opt_mode< T >::require(), set_mem_alias_set(), smallest_int_mode_for_size(), store_bit_field(), store_constructor(), store_expr(), expand_operand::target, targetm, poly_int< N, C >::to_constant(), wi::to_poly_offset(), TREE_ADDRESSABLE, TREE_CODE, TREE_OPERAND, TREE_TYPE, TYPE_MODE, TYPE_PRECISION, TYPE_REVERSE_STORAGE_ORDER, and TYPE_SIZE.

Referenced by expand_assignment(), expand_expr_real_2(), and store_constructor_field().

◆ string_constant()

tree string_constant ( tree arg,
tree * ptr_offset,
tree * mem_size,
tree * decl )
Return STRING_CST if an ARG corresponds to a string constant or zero
if it doesn't.  If we return nonzero, set *PTR_OFFSET to the (possibly
non-constant) offset in bytes within the string that ARG is accessing.
If MEM_SIZE is non-zero the storage size of the memory is returned.
If DECL is non-zero the constant declaration is returned if available.   

References constant_byte_string().

Referenced by c_strlen(), constant_byte_string(), fold_read_from_constant_string(), getbyterep(), gimple_fold_builtin_memory_op(), and simplify_builtin_call().

◆ string_cst_read_str()

static rtx string_cst_read_str ( void * data,
void * ,
HOST_WIDE_INT offset,
fixed_size_mode mode )
static
Helper function for store_expr storing of STRING_CST.   

References c_readstr(), const0_rtx, gcc_assert, GET_MODE_SIZE(), expand_operand::mode, offset, TREE_STRING_LENGTH, and TREE_STRING_POINTER.

Referenced by store_expr().

◆ target_align()

static unsigned HOST_WIDE_INT target_align ( const_tree target)
static

◆ tree_expr_size()

static tree tree_expr_size ( const_tree exp)
static
Returns a tree for the size of EXP in bytes.   

References DECL_P, DECL_SIZE_UNIT, exp(), size_in_bytes(), and TREE_TYPE.

Referenced by expr_size(), and int_expr_size().

◆ try_casesi()

bool try_casesi ( tree index_type,
tree index_expr,
tree minval,
tree range,
rtx table_label,
rtx default_label,
rtx fallback_label,
profile_probability default_probability )
Attempt to generate a casesi instruction.  Returns true if successful,
false otherwise (i.e. if there is no casesi instruction).

DEFAULT_PROBABILITY is the probability of jumping to the default
label.   

References build2(), convert_to_mode(), create_convert_operand_from_type(), create_fixed_operand(), create_input_operand(), do_pending_stack_adjust(), emit_cmp_and_jump_insns(), expand_jump_insn(), expand_normal(), fold_convert, GET_MODE_BITSIZE(), integer_zero_node, NULL_RTX, SCALAR_INT_TYPE_MODE, targetm, TREE_TYPE, lang_hooks_for_types::type_for_mode, and lang_hooks::types.

Referenced by emit_case_dispatch_table().

◆ try_tablejump()

bool try_tablejump ( tree index_type,
tree index_expr,
tree minval,
tree range,
rtx table_label,
rtx default_label,
profile_probability default_probability )

◆ undefined_operand_subword_p()

static bool undefined_operand_subword_p ( const_rtx op,
int i )
static
Return true if word I of OP lies entirely in the
undefined bits of a paradoxical subreg.   

References GET_CODE, GET_MODE, GET_MODE_SIZE(), i, known_ge, known_le, offset, subreg_memory_offset(), and SUBREG_REG.

Referenced by emit_move_multi_word().

◆ use_group_regs()

void use_group_regs ( rtx * call_fusage,
rtx regs )
Add USE expressions to *CALL_FUSAGE for each REG contained in the
PARALLEL REGS.  This is for calls that pass values in multiple
non-contiguous locations.  The Irix 6 ABI has examples of this.   

References i, REG_P, use_reg(), XEXP, XVECEXP, and XVECLEN.

Referenced by emit_library_call_value_1(), and load_register_parameters().

◆ use_reg_mode()

void use_reg_mode ( rtx * call_fusage,
rtx reg,
machine_mode mode )
Add a USE expression for REG to the (possibly empty) list pointed
to by CALL_FUSAGE.  REG must denote a hard register.   

References gcc_assert, gen_rtx_EXPR_LIST(), HARD_REGISTER_P, expand_operand::mode, and REG_P.

Referenced by load_register_parameters(), and use_reg().

◆ use_regs()

void use_regs ( rtx * call_fusage,
int regno,
int nregs )
Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
starting at REGNO.  All of these registers must be hard registers.   

References gcc_assert, i, regno_reg_rtx, and use_reg().

Referenced by emit_library_call_value_1(), and load_register_parameters().

◆ widest_fixed_size_mode_for_size()

static fixed_size_mode widest_fixed_size_mode_for_size ( unsigned int size,
by_pieces_operation op )
static

◆ write_complex_part()

void write_complex_part ( rtx cplx,
rtx val,
bool imag_p,
bool undefined_p )

Variable Documentation

◆ cse_not_expected

int cse_not_expected
Convert tree expression to rtl instructions, for GNU compiler.
   Copyright (C) 1988-2024 Free Software Foundation, Inc.

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.   
Include expr.h after insn-config.h so we get HAVE_conditional_move.   
If this is nonzero, we do not bother generating VOLATILE
around volatile memory references, and we are willing to
output indirect addresses.  If cse is to follow, we reject
indirect addresses so a useful potential cse is generated;
if it is used only once, instruction combination will produce
the same indirect address eventually.   

Referenced by assemble_asm(), memory_address_addr_space(), noce_try_cmove_arith(), prepare_function_start(), rest_of_handle_cse(), rest_of_handle_cse2(), rest_of_handle_cse_after_global_opts(), and use_anchored_address().