Line data Source code
1 : /* Analyze RTL for GNU compiler.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "target.h"
26 : #include "rtl.h"
27 : #include "rtlanal.h"
28 : #include "tree.h"
29 : #include "predict.h"
30 : #include "df.h"
31 : #include "memmodel.h"
32 : #include "tm_p.h"
33 : #include "insn-config.h"
34 : #include "regs.h"
35 : #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
36 : #include "recog.h"
37 : #include "addresses.h"
38 : #include "rtl-iter.h"
39 : #include "hard-reg-set.h"
40 : #include "function-abi.h"
41 :
42 : /* Forward declarations */
43 : static void set_of_1 (rtx, const_rtx, void *);
44 : static bool covers_regno_p (const_rtx, unsigned int);
45 : static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
46 : static bool computed_jump_p_1 (const_rtx);
47 : static void parms_set (rtx, const_rtx, void *);
48 :
49 : static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, scalar_int_mode,
50 : const_rtx, machine_mode,
51 : unsigned HOST_WIDE_INT);
52 : static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, scalar_int_mode,
53 : const_rtx, machine_mode,
54 : unsigned HOST_WIDE_INT);
55 : static unsigned int cached_num_sign_bit_copies (const_rtx, scalar_int_mode,
56 : const_rtx, machine_mode,
57 : unsigned int);
58 : static unsigned int num_sign_bit_copies1 (const_rtx, scalar_int_mode,
59 : const_rtx, machine_mode,
60 : unsigned int);
61 :
62 : rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE];
63 : rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE];
64 :
65 : /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
66 : If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
67 : SIGN_EXTEND then while narrowing we also have to enforce the
68 : representation and sign-extend the value to mode DESTINATION_REP.
69 :
70 : If the value is already sign-extended to DESTINATION_REP mode we
71 : can just switch to DESTINATION mode on it. For each pair of
72 : integral modes SOURCE and DESTINATION, when truncating from SOURCE
73 : to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
74 : contains the number of high-order bits in SOURCE that have to be
75 : copies of the sign-bit so that we can do this mode-switch to
76 : DESTINATION. */
77 :
78 : static unsigned int
79 : num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
80 :
81 : /* Store X into index I of ARRAY. ARRAY is known to have at least I
82 : elements. Return the new base of ARRAY. */
83 :
84 : template <typename T>
85 : typename T::value_type *
86 9122088 : generic_subrtx_iterator <T>::add_single_to_queue (array_type &array,
87 : value_type *base,
88 : size_t i, value_type x)
89 : {
90 9122088 : if (base == array.stack)
91 : {
92 4664077 : if (i < LOCAL_ELEMS)
93 : {
94 4363599 : base[i] = x;
95 4363599 : return base;
96 : }
97 300478 : gcc_checking_assert (i == LOCAL_ELEMS);
98 : /* A previous iteration might also have moved from the stack to the
99 : heap, in which case the heap array will already be big enough. */
100 300478 : if (vec_safe_length (array.heap) <= i)
101 300478 : vec_safe_grow (array.heap, i + 1, true);
102 300478 : base = array.heap->address ();
103 300478 : memcpy (base, array.stack, sizeof (array.stack));
104 300478 : base[LOCAL_ELEMS] = x;
105 300478 : return base;
106 : }
107 4458011 : unsigned int length = array.heap->length ();
108 4458011 : if (length > i)
109 : {
110 1157468 : gcc_checking_assert (base == array.heap->address ());
111 1157468 : base[i] = x;
112 1157468 : return base;
113 : }
114 : else
115 : {
116 3300543 : gcc_checking_assert (i == length);
117 3300543 : vec_safe_push (array.heap, x);
118 3300543 : return array.heap->address ();
119 : }
120 : }
121 :
122 : /* Add the subrtxes of X to worklist ARRAY, starting at END. Return the
123 : number of elements added to the worklist. */
124 :
125 : template <typename T>
126 : size_t
127 325623231 : generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
128 : value_type *base,
129 : size_t end, rtx_type x)
130 : {
131 325623231 : enum rtx_code code = GET_CODE (x);
132 325623231 : const char *format = GET_RTX_FORMAT (code);
133 325623231 : size_t orig_end = end;
134 325623231 : if (UNLIKELY (INSN_P (x)))
135 : {
136 : /* Put the pattern at the top of the queue, since that's what
137 : we're likely to want most. It also allows for the SEQUENCE
138 : code below. */
139 136584 : for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i)
140 120642 : if (format[i] == 'e')
141 : {
142 32600 : value_type subx = T::get_value (x->u.fld[i].rt_rtx);
143 32600 : if (LIKELY (end < LOCAL_ELEMS))
144 32600 : base[end++] = subx;
145 : else
146 0 : base = add_single_to_queue (array, base, end++, subx);
147 : }
148 : }
149 : else
150 710720279 : for (int i = 0; format[i]; ++i)
151 385112990 : if (format[i] == 'e')
152 : {
153 518997 : value_type subx = T::get_value (x->u.fld[i].rt_rtx);
154 518997 : if (LIKELY (end < LOCAL_ELEMS))
155 0 : base[end++] = subx;
156 : else
157 518997 : base = add_single_to_queue (array, base, end++, subx);
158 : }
159 384593993 : else if (format[i] == 'E')
160 : {
161 331872443 : unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec);
162 331872443 : rtx *vec = x->u.fld[i].rt_rtvec->elem;
163 331872443 : if (LIKELY (end + length <= LOCAL_ELEMS))
164 1013748609 : for (unsigned int j = 0; j < length; j++)
165 682258635 : base[end++] = T::get_value (vec[j]);
166 : else
167 8985560 : for (unsigned int j = 0; j < length; j++)
168 8603091 : base = add_single_to_queue (array, base, end++,
169 8603091 : T::get_value (vec[j]));
170 331872443 : if (code == SEQUENCE && end == length)
171 : /* If the subrtxes of the sequence fill the entire array then
172 : we know that no other parts of a containing insn are queued.
173 : The caller is therefore iterating over the sequence as a
174 : PATTERN (...), so we also want the patterns of the
175 : subinstructions. */
176 0 : for (unsigned int j = 0; j < length; j++)
177 : {
178 0 : typename T::rtx_type x = T::get_rtx (base[j]);
179 0 : if (INSN_P (x))
180 0 : base[j] = T::get_value (PATTERN (x));
181 : }
182 : }
183 325623231 : return end - orig_end;
184 : }
185 :
186 : template <typename T>
187 : void
188 300478 : generic_subrtx_iterator <T>::free_array (array_type &array)
189 : {
190 300478 : vec_free (array.heap);
191 300478 : }
192 :
193 : template <typename T>
194 : const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS;
195 :
196 : template class generic_subrtx_iterator <const_rtx_accessor>;
197 : template class generic_subrtx_iterator <rtx_var_accessor>;
198 : template class generic_subrtx_iterator <rtx_ptr_accessor>;
199 :
200 : /* Return true if the value of X is unstable
201 : (would be different at a different point in the program).
202 : The frame pointer, arg pointer, etc. are considered stable
203 : (within one function) and so is anything marked `unchanging'. */
204 :
205 : bool
206 0 : rtx_unstable_p (const_rtx x)
207 : {
208 0 : const RTX_CODE code = GET_CODE (x);
209 0 : int i;
210 0 : const char *fmt;
211 :
212 0 : switch (code)
213 : {
214 0 : case MEM:
215 0 : return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
216 :
217 : case CONST:
218 : CASE_CONST_ANY:
219 : case SYMBOL_REF:
220 : case LABEL_REF:
221 : return false;
222 :
223 0 : case REG:
224 : /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
225 0 : if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
226 : /* The arg pointer varies if it is not a fixed register. */
227 0 : || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
228 : return false;
229 : /* ??? When call-clobbered, the value is stable modulo the restore
230 : that must happen after a call. This currently screws up local-alloc
231 : into believing that the restore is not needed. */
232 0 : if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
233 : return false;
234 : return true;
235 :
236 0 : case ASM_OPERANDS:
237 0 : if (MEM_VOLATILE_P (x))
238 : return true;
239 :
240 : /* Fall through. */
241 :
242 0 : default:
243 0 : break;
244 : }
245 :
246 0 : fmt = GET_RTX_FORMAT (code);
247 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
248 0 : if (fmt[i] == 'e')
249 : {
250 0 : if (rtx_unstable_p (XEXP (x, i)))
251 : return true;
252 : }
253 0 : else if (fmt[i] == 'E')
254 : {
255 : int j;
256 0 : for (j = 0; j < XVECLEN (x, i); j++)
257 0 : if (rtx_unstable_p (XVECEXP (x, i, j)))
258 : return true;
259 : }
260 :
261 : return false;
262 : }
263 :
264 : /* Return true if X has a value that can vary even between two
265 : executions of the program. false means X can be compared reliably
266 : against certain constants or near-constants.
267 : FOR_ALIAS is nonzero if we are called from alias analysis; if it is
268 : zero, we are slightly more conservative.
269 : The frame pointer and the arg pointer are considered constant. */
270 :
271 : bool
272 495838796 : rtx_varies_p (const_rtx x, bool for_alias)
273 : {
274 495838796 : RTX_CODE code;
275 495838796 : int i;
276 495838796 : const char *fmt;
277 :
278 495838796 : if (!x)
279 : return false;
280 :
281 495838796 : code = GET_CODE (x);
282 495838796 : switch (code)
283 : {
284 92407641 : case MEM:
285 92407641 : return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
286 :
287 : case CONST:
288 : CASE_CONST_ANY:
289 : case SYMBOL_REF:
290 : case LABEL_REF:
291 : return false;
292 :
293 159825442 : case REG:
294 : /* Note that we have to test for the actual rtx used for the frame
295 : and arg pointers and not just the register number in case we have
296 : eliminated the frame and/or arg pointer and are using it
297 : for pseudos. */
298 159825442 : if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
299 : /* The arg pointer varies if it is not a fixed register. */
300 141826282 : || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
301 : return false;
302 141595187 : if (x == pic_offset_table_rtx
303 : /* ??? When call-clobbered, the value is stable modulo the restore
304 : that must happen after a call. This currently screws up
305 : local-alloc into believing that the restore is not needed, so we
306 : must return 0 only if we are called from alias analysis. */
307 : && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
308 : return false;
309 : return true;
310 :
311 0 : case LO_SUM:
312 : /* The operand 0 of a LO_SUM is considered constant
313 : (in fact it is related specifically to operand 1)
314 : during alias analysis. */
315 0 : return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
316 0 : || rtx_varies_p (XEXP (x, 1), for_alias);
317 :
318 91949 : case ASM_OPERANDS:
319 91949 : if (MEM_VOLATILE_P (x))
320 : return true;
321 :
322 : /* Fall through. */
323 :
324 137616228 : default:
325 137616228 : break;
326 : }
327 :
328 137616228 : fmt = GET_RTX_FORMAT (code);
329 240968198 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
330 218445109 : if (fmt[i] == 'e')
331 : {
332 200192824 : if (rtx_varies_p (XEXP (x, i), for_alias))
333 : return true;
334 : }
335 18252285 : else if (fmt[i] == 'E')
336 : {
337 : int j;
338 19350223 : for (j = 0; j < XVECLEN (x, i); j++)
339 15230062 : if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
340 : return true;
341 : }
342 :
343 : return false;
344 : }
345 :
346 : /* Compute an approximation for the offset between the register
347 : FROM and TO for the current function, as it was at the start
348 : of the routine. */
349 :
350 : static poly_int64
351 239069186 : get_initial_register_offset (int from, int to)
352 : {
353 239069186 : static const struct elim_table_t
354 : {
355 : const int from;
356 : const int to;
357 : } table[] = ELIMINABLE_REGS;
358 239069186 : poly_int64 offset1, offset2;
359 239069186 : unsigned int i, j;
360 :
361 239069186 : if (to == from)
362 0 : return 0;
363 :
364 : /* It is not safe to call INITIAL_ELIMINATION_OFFSET before the epilogue
365 : is completed, but we need to give at least an estimate for the stack
366 : pointer based on the frame size. */
367 239069186 : if (!epilogue_completed)
368 : {
369 131466472 : offset1 = crtl->outgoing_args_size + get_frame_size ();
370 : #if !STACK_GROWS_DOWNWARD
371 : offset1 = - offset1;
372 : #endif
373 131466472 : if (to == STACK_POINTER_REGNUM)
374 130914322 : return offset1;
375 552150 : else if (from == STACK_POINTER_REGNUM)
376 276075 : return - offset1;
377 : else
378 276075 : return 0;
379 : }
380 :
381 108746414 : for (i = 0; i < ARRAY_SIZE (table); i++)
382 108746414 : if (table[i].from == from)
383 : {
384 107602714 : if (table[i].to == to)
385 : {
386 106459014 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
387 : offset1);
388 106459014 : return offset1;
389 : }
390 5718500 : for (j = 0; j < ARRAY_SIZE (table); j++)
391 : {
392 4574800 : if (table[j].to == to
393 2287400 : && table[j].from == table[i].to)
394 : {
395 0 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
396 : offset1);
397 0 : INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
398 : offset2);
399 0 : return offset1 + offset2;
400 : }
401 4574800 : if (table[j].from == to
402 0 : && table[j].to == table[i].to)
403 : {
404 0 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
405 : offset1);
406 0 : INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
407 : offset2);
408 0 : return offset1 - offset2;
409 : }
410 : }
411 : }
412 1143700 : else if (table[i].to == from)
413 : {
414 1143700 : if (table[i].from == to)
415 : {
416 0 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
417 : offset1);
418 0 : return - offset1;
419 : }
420 2287400 : for (j = 0; j < ARRAY_SIZE (table); j++)
421 : {
422 2287400 : if (table[j].to == to
423 1143700 : && table[j].from == table[i].from)
424 : {
425 1143700 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
426 : offset1);
427 1143700 : INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
428 : offset2);
429 1143700 : return - offset1 + offset2;
430 : }
431 1143700 : if (table[j].from == to
432 0 : && table[j].to == table[i].from)
433 : {
434 0 : INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
435 : offset1);
436 0 : INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
437 : offset2);
438 0 : return - offset1 - offset2;
439 : }
440 : }
441 : }
442 :
443 : /* If the requested register combination was not found,
444 : try a different more simple combination. */
445 0 : if (from == ARG_POINTER_REGNUM)
446 : return get_initial_register_offset (HARD_FRAME_POINTER_REGNUM, to);
447 0 : else if (to == ARG_POINTER_REGNUM)
448 : return get_initial_register_offset (from, HARD_FRAME_POINTER_REGNUM);
449 0 : else if (from == HARD_FRAME_POINTER_REGNUM)
450 : return get_initial_register_offset (FRAME_POINTER_REGNUM, to);
451 0 : else if (to == HARD_FRAME_POINTER_REGNUM)
452 : return get_initial_register_offset (from, FRAME_POINTER_REGNUM);
453 : else
454 0 : return 0;
455 : }
456 :
457 : /* Return true if the use of X+OFFSET as an address in a MEM with SIZE
458 : bytes can cause a trap. MODE is the mode of the MEM (not that of X) and
459 : UNALIGNED_MEMS controls whether true is returned for unaligned memory
460 : references on strict alignment machines. */
461 :
462 : static bool
463 812016716 : rtx_addr_can_trap_p_1 (const_rtx x, poly_int64 offset, poly_int64 size,
464 : machine_mode mode, bool unaligned_mems)
465 : {
466 812016716 : enum rtx_code code = GET_CODE (x);
467 812016716 : gcc_checking_assert (mode == BLKmode
468 : || mode == VOIDmode
469 : || known_size_p (size));
470 812016716 : poly_int64 const_x1;
471 :
472 : /* The offset must be a multiple of the mode size if we are considering
473 : unaligned memory references on strict alignment machines. */
474 812016716 : if (STRICT_ALIGNMENT
475 : && unaligned_mems
476 : && mode != BLKmode
477 : && mode != VOIDmode)
478 : {
479 : poly_int64 actual_offset = offset;
480 :
481 : #ifdef SPARC_STACK_BOUNDARY_HACK
482 : /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
483 : the real alignment of %sp. However, when it does this, the
484 : alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
485 : if (SPARC_STACK_BOUNDARY_HACK
486 : && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
487 : actual_offset -= STACK_POINTER_OFFSET;
488 : #endif
489 :
490 : if (!multiple_p (actual_offset, GET_MODE_SIZE (mode)))
491 : return true;
492 : }
493 :
494 812016716 : switch (code)
495 : {
496 6035302 : case SYMBOL_REF:
497 6035302 : if (SYMBOL_REF_WEAK (x))
498 : return true;
499 5704592 : if (!CONSTANT_POOL_ADDRESS_P (x) && !SYMBOL_REF_FUNCTION_P (x))
500 : {
501 698963 : tree decl;
502 698963 : poly_int64 decl_size;
503 :
504 698963 : if (maybe_lt (offset, 0))
505 : return true;
506 697983 : if (!known_size_p (size))
507 621 : return maybe_ne (offset, 0);
508 :
509 : /* If the size of the access or of the symbol is unknown,
510 : assume the worst. */
511 697362 : decl = SYMBOL_REF_DECL (x);
512 :
513 : /* Else check that the access is in bounds. TODO: restructure
514 : expr_size/tree_expr_size/int_expr_size and just use the latter. */
515 697362 : if (!decl)
516 239259 : decl_size = -1;
517 458103 : else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
518 : {
519 450149 : if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &decl_size))
520 0 : decl_size = -1;
521 : }
522 7954 : else if (TREE_CODE (decl) == STRING_CST)
523 0 : decl_size = TREE_STRING_LENGTH (decl);
524 7954 : else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
525 0 : decl_size = int_size_in_bytes (TREE_TYPE (decl));
526 : else
527 7954 : decl_size = -1;
528 :
529 697362 : return (!known_size_p (decl_size) || known_eq (decl_size, 0)
530 697362 : ? maybe_ne (offset, 0)
531 697362 : : !known_subrange_p (offset, size, 0, decl_size));
532 : }
533 :
534 : return false;
535 :
536 : case LABEL_REF:
537 : return false;
538 :
539 405669043 : case REG:
540 : /* Stack references are assumed not to trap, but we need to deal with
541 : nonsensical offsets. */
542 405669043 : if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
543 397703921 : || x == stack_pointer_rtx
544 : /* The arg pointer varies if it is not a fixed register. */
545 161471786 : || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
546 : {
547 : #ifdef RED_ZONE_SIZE
548 244284215 : poly_int64 red_zone_size = RED_ZONE_SIZE;
549 : #else
550 : poly_int64 red_zone_size = 0;
551 : #endif
552 244284215 : poly_int64 stack_boundary = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
553 244284215 : poly_int64 low_bound, high_bound;
554 :
555 244284215 : if (!known_size_p (size))
556 : return true;
557 :
558 244280677 : if (x == frame_pointer_rtx)
559 : {
560 6544313 : if (FRAME_GROWS_DOWNWARD)
561 : {
562 6544313 : high_bound = targetm.starting_frame_offset ();
563 6544313 : low_bound = high_bound - get_frame_size ();
564 : }
565 : else
566 : {
567 : low_bound = targetm.starting_frame_offset ();
568 : high_bound = low_bound + get_frame_size ();
569 : }
570 : }
571 237736364 : else if (x == hard_frame_pointer_rtx)
572 : {
573 1419775 : poly_int64 sp_offset
574 1419775 : = get_initial_register_offset (STACK_POINTER_REGNUM,
575 : HARD_FRAME_POINTER_REGNUM);
576 1419775 : poly_int64 ap_offset
577 1419775 : = get_initial_register_offset (ARG_POINTER_REGNUM,
578 : HARD_FRAME_POINTER_REGNUM);
579 :
580 : #if STACK_GROWS_DOWNWARD
581 1419775 : low_bound = sp_offset - red_zone_size - stack_boundary;
582 1419775 : high_bound = ap_offset
583 1419775 : + FIRST_PARM_OFFSET (current_function_decl)
584 : #if !ARGS_GROW_DOWNWARD
585 1419775 : + crtl->args.size
586 : #endif
587 1419775 : + stack_boundary;
588 : #else
589 : high_bound = sp_offset + red_zone_size + stack_boundary;
590 : low_bound = ap_offset
591 : + FIRST_PARM_OFFSET (current_function_decl)
592 : #if ARGS_GROW_DOWNWARD
593 : - crtl->args.size
594 : #endif
595 : - stack_boundary;
596 : #endif
597 : }
598 236316589 : else if (x == stack_pointer_rtx)
599 : {
600 236229636 : poly_int64 ap_offset
601 236229636 : = get_initial_register_offset (ARG_POINTER_REGNUM,
602 : STACK_POINTER_REGNUM);
603 :
604 : #if STACK_GROWS_DOWNWARD
605 236229636 : low_bound = - red_zone_size - stack_boundary;
606 236229636 : high_bound = ap_offset
607 236229636 : + FIRST_PARM_OFFSET (current_function_decl)
608 : #if !ARGS_GROW_DOWNWARD
609 236229636 : + crtl->args.size
610 : #endif
611 236229636 : + stack_boundary;
612 : #else
613 : high_bound = red_zone_size + stack_boundary;
614 : low_bound = ap_offset
615 : + FIRST_PARM_OFFSET (current_function_decl)
616 : #if ARGS_GROW_DOWNWARD
617 : - crtl->args.size
618 : #endif
619 : - stack_boundary;
620 : #endif
621 : }
622 : else
623 : {
624 : /* We assume that accesses are safe to at least the
625 : next stack boundary.
626 : Examples are varargs and __builtin_return_address. */
627 : #if ARGS_GROW_DOWNWARD
628 : high_bound = FIRST_PARM_OFFSET (current_function_decl)
629 : + stack_boundary;
630 : low_bound = FIRST_PARM_OFFSET (current_function_decl)
631 : - crtl->args.size - stack_boundary;
632 : #else
633 86953 : low_bound = FIRST_PARM_OFFSET (current_function_decl)
634 86953 : - stack_boundary;
635 86953 : high_bound = FIRST_PARM_OFFSET (current_function_decl)
636 86953 : + crtl->args.size + stack_boundary;
637 : #endif
638 : }
639 :
640 244280677 : if (known_ge (offset, low_bound)
641 244280677 : && known_le (offset, high_bound - size))
642 : return false;
643 : return true;
644 : }
645 : /* All of the virtual frame registers are stack references. */
646 161384828 : if (VIRTUAL_REGISTER_P (x))
647 : return false;
648 : return true;
649 :
650 295325 : case CONST:
651 295325 : return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
652 295325 : mode, unaligned_mems);
653 :
654 176930130 : case PLUS:
655 : /* An address is assumed not to trap if:
656 : - it is the pic register plus a const unspec without offset. */
657 176930130 : if (XEXP (x, 0) == pic_offset_table_rtx
658 38931 : && GET_CODE (XEXP (x, 1)) == CONST
659 38897 : && GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
660 176965734 : && known_eq (offset, 0))
661 : return false;
662 :
663 : /* - or it is an address that can't trap plus a constant integer. */
664 176894526 : if (poly_int_rtx_p (XEXP (x, 1), &const_x1)
665 148521351 : && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + const_x1,
666 : size, mode, unaligned_mems))
667 : return false;
668 :
669 : return true;
670 :
671 412788 : case LO_SUM:
672 412788 : case PRE_MODIFY:
673 412788 : return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
674 412788 : mode, unaligned_mems);
675 :
676 196386862 : case PRE_DEC:
677 196386862 : case PRE_INC:
678 196386862 : case POST_DEC:
679 196386862 : case POST_INC:
680 196386862 : case POST_MODIFY:
681 196386862 : return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
682 196386862 : mode, unaligned_mems);
683 :
684 : default:
685 : break;
686 : }
687 :
688 : /* If it isn't one of the case above, it can cause a trap. */
689 : return true;
690 : }
691 :
692 : /* Return true if the use of X as an address in a MEM can cause a trap. */
693 :
694 : bool
695 13940434 : rtx_addr_can_trap_p (const_rtx x)
696 : {
697 13940434 : return rtx_addr_can_trap_p_1 (x, 0, -1, BLKmode, false);
698 : }
699 :
700 : /* Return true if X contains a MEM subrtx. */
701 :
702 : bool
703 22131537 : contains_mem_rtx_p (rtx x)
704 : {
705 22131537 : subrtx_iterator::array_type array;
706 68801462 : FOR_EACH_SUBRTX (iter, array, x, ALL)
707 53920119 : if (MEM_P (*iter))
708 7250194 : return true;
709 :
710 14881343 : return false;
711 22131537 : }
712 :
713 : /* Return true if X is an address that is known to not be zero. */
714 :
715 : bool
716 56873045 : nonzero_address_p (const_rtx x)
717 : {
718 56875616 : const enum rtx_code code = GET_CODE (x);
719 :
720 56875616 : switch (code)
721 : {
722 3788 : case SYMBOL_REF:
723 3788 : return flag_delete_null_pointer_checks && !SYMBOL_REF_WEAK (x);
724 :
725 : case LABEL_REF:
726 : return true;
727 :
728 26586880 : case REG:
729 : /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
730 26586880 : if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
731 26586826 : || x == stack_pointer_rtx
732 26586826 : || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
733 : return true;
734 : /* All of the virtual frame registers are stack references. */
735 26586826 : if (VIRTUAL_REGISTER_P (x))
736 : return true;
737 : return false;
738 :
739 2571 : case CONST:
740 2571 : return nonzero_address_p (XEXP (x, 0));
741 :
742 12637441 : case PLUS:
743 : /* Handle PIC references. */
744 12637441 : if (XEXP (x, 0) == pic_offset_table_rtx
745 0 : && CONSTANT_P (XEXP (x, 1)))
746 : return true;
747 : return false;
748 :
749 0 : case PRE_MODIFY:
750 : /* Similar to the above; allow positive offsets. Further, since
751 : auto-inc is only allowed in memories, the register must be a
752 : pointer. */
753 0 : if (CONST_INT_P (XEXP (x, 1))
754 0 : && INTVAL (XEXP (x, 1)) > 0)
755 : return true;
756 0 : return nonzero_address_p (XEXP (x, 0));
757 :
758 : case PRE_INC:
759 : /* Similarly. Further, the offset is always positive. */
760 : return true;
761 :
762 0 : case PRE_DEC:
763 0 : case POST_DEC:
764 0 : case POST_INC:
765 0 : case POST_MODIFY:
766 0 : return nonzero_address_p (XEXP (x, 0));
767 :
768 0 : case LO_SUM:
769 0 : return nonzero_address_p (XEXP (x, 1));
770 :
771 : default:
772 : break;
773 : }
774 :
775 : /* If it isn't one of the case above, might be zero. */
776 : return false;
777 : }
778 :
779 : /* Return true if X refers to a memory location whose address
780 : cannot be compared reliably with constant addresses,
781 : or if X refers to a BLKmode memory object.
782 : FOR_ALIAS is nonzero if we are called from alias analysis; if it is
783 : zero, we are slightly more conservative. */
784 :
785 : bool
786 0 : rtx_addr_varies_p (const_rtx x, bool for_alias)
787 : {
788 0 : enum rtx_code code;
789 0 : int i;
790 0 : const char *fmt;
791 :
792 0 : if (x == 0)
793 : return false;
794 :
795 0 : code = GET_CODE (x);
796 0 : if (code == MEM)
797 0 : return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
798 :
799 0 : fmt = GET_RTX_FORMAT (code);
800 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
801 0 : if (fmt[i] == 'e')
802 : {
803 0 : if (rtx_addr_varies_p (XEXP (x, i), for_alias))
804 : return true;
805 : }
806 0 : else if (fmt[i] == 'E')
807 : {
808 : int j;
809 0 : for (j = 0; j < XVECLEN (x, i); j++)
810 0 : if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
811 : return true;
812 : }
813 : return false;
814 : }
815 :
816 : /* Get the declaration of the function called by INSN. */
817 :
818 : tree
819 308591905 : get_call_fndecl (const rtx_insn *insn)
820 : {
821 308591905 : rtx note, datum;
822 :
823 308591905 : note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
824 308591905 : if (note == NULL_RTX)
825 : return NULL_TREE;
826 :
827 305321556 : datum = XEXP (note, 0);
828 305321556 : if (datum != NULL_RTX)
829 294031037 : return SYMBOL_REF_DECL (datum);
830 :
831 : return NULL_TREE;
832 : }
833 :
834 : /* Return the value of the integer term in X, if one is apparent;
835 : otherwise return 0.
836 : Only obvious integer terms are detected.
837 : This is used in cse.cc with the `related_value' field. */
838 :
839 : HOST_WIDE_INT
840 460864 : get_integer_term (const_rtx x)
841 : {
842 460864 : if (GET_CODE (x) == CONST)
843 258925 : x = XEXP (x, 0);
844 :
845 460864 : if (GET_CODE (x) == MINUS
846 0 : && CONST_INT_P (XEXP (x, 1)))
847 0 : return - INTVAL (XEXP (x, 1));
848 460864 : if (GET_CODE (x) == PLUS
849 258925 : && CONST_INT_P (XEXP (x, 1)))
850 258925 : return INTVAL (XEXP (x, 1));
851 : return 0;
852 : }
853 :
854 : /* If X is a constant, return the value sans apparent integer term;
855 : otherwise return 0.
856 : Only obvious integer terms are detected. */
857 :
858 : rtx
859 1370425 : get_related_value (const_rtx x)
860 : {
861 1370425 : if (GET_CODE (x) != CONST)
862 : return 0;
863 1370425 : x = XEXP (x, 0);
864 1370425 : if (GET_CODE (x) == PLUS
865 1342972 : && CONST_INT_P (XEXP (x, 1)))
866 1342972 : return XEXP (x, 0);
867 27453 : else if (GET_CODE (x) == MINUS
868 0 : && CONST_INT_P (XEXP (x, 1)))
869 0 : return XEXP (x, 0);
870 : return 0;
871 : }
872 :
873 : /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
874 : to somewhere in the same object or object_block as SYMBOL. */
875 :
876 : bool
877 0 : offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
878 : {
879 0 : tree decl;
880 :
881 0 : if (GET_CODE (symbol) != SYMBOL_REF)
882 : return false;
883 :
884 0 : if (offset == 0)
885 : return true;
886 :
887 0 : if (offset > 0)
888 : {
889 0 : if (CONSTANT_POOL_ADDRESS_P (symbol)
890 0 : && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
891 0 : return true;
892 :
893 0 : decl = SYMBOL_REF_DECL (symbol);
894 0 : if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
895 : return true;
896 : }
897 :
898 0 : if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
899 0 : && SYMBOL_REF_BLOCK (symbol)
900 0 : && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
901 0 : && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
902 0 : < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
903 : return true;
904 :
905 : return false;
906 : }
907 :
908 : /* Split X into a base and a constant offset, storing them in *BASE_OUT
909 : and *OFFSET_OUT respectively. */
910 :
911 : void
912 0 : split_const (rtx x, rtx *base_out, rtx *offset_out)
913 : {
914 0 : if (GET_CODE (x) == CONST)
915 : {
916 0 : x = XEXP (x, 0);
917 0 : if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
918 : {
919 0 : *base_out = XEXP (x, 0);
920 0 : *offset_out = XEXP (x, 1);
921 0 : return;
922 : }
923 : }
924 0 : *base_out = x;
925 0 : *offset_out = const0_rtx;
926 : }
927 :
928 : /* Express integer value X as some value Y plus a polynomial offset,
929 : where Y is either const0_rtx, X or something within X (as opposed
930 : to a new rtx). Return the Y and store the offset in *OFFSET_OUT. */
931 :
932 : rtx
933 415275241 : strip_offset (rtx x, poly_int64 *offset_out)
934 : {
935 415275241 : rtx base = const0_rtx;
936 415275241 : rtx test = x;
937 415275241 : if (GET_CODE (test) == CONST)
938 8886066 : test = XEXP (test, 0);
939 415275241 : if (GET_CODE (test) == PLUS)
940 : {
941 312946067 : base = XEXP (test, 0);
942 312946067 : test = XEXP (test, 1);
943 : }
944 415275241 : if (poly_int_rtx_p (test, offset_out))
945 293000494 : return base;
946 122274747 : *offset_out = 0;
947 122274747 : return x;
948 : }
949 :
950 : /* Return the argument size in REG_ARGS_SIZE note X. */
951 :
952 : poly_int64
953 5294154 : get_args_size (const_rtx x)
954 : {
955 5294154 : gcc_checking_assert (REG_NOTE_KIND (x) == REG_ARGS_SIZE);
956 5294154 : return rtx_to_poly_int64 (XEXP (x, 0));
957 : }
958 :
959 : /* Return the number of places FIND appears within X. If COUNT_DEST is
960 : zero, we do not count occurrences inside the destination of a SET. */
961 :
962 : int
963 8725377 : count_occurrences (const_rtx x, const_rtx find, int count_dest)
964 : {
965 8725377 : int i, j;
966 8725377 : enum rtx_code code;
967 8725377 : const char *format_ptr;
968 8725377 : int count;
969 :
970 8725377 : if (x == find)
971 : return 1;
972 :
973 6353896 : code = GET_CODE (x);
974 :
975 6353896 : switch (code)
976 : {
977 : case REG:
978 : CASE_CONST_ANY:
979 : case SYMBOL_REF:
980 : case CODE_LABEL:
981 : case PC:
982 : return 0;
983 :
984 0 : case EXPR_LIST:
985 0 : count = count_occurrences (XEXP (x, 0), find, count_dest);
986 0 : if (XEXP (x, 1))
987 0 : count += count_occurrences (XEXP (x, 1), find, count_dest);
988 : return count;
989 :
990 73873 : case MEM:
991 73873 : if (MEM_P (find) && rtx_equal_p (x, find))
992 : return 1;
993 : break;
994 :
995 0 : case SET:
996 0 : if (SET_DEST (x) == find && ! count_dest)
997 0 : return count_occurrences (SET_SRC (x), find, count_dest);
998 : break;
999 :
1000 : default:
1001 : break;
1002 : }
1003 :
1004 3221444 : format_ptr = GET_RTX_FORMAT (code);
1005 3221444 : count = 0;
1006 :
1007 9537125 : for (i = 0; i < GET_RTX_LENGTH (code); i++)
1008 : {
1009 6315681 : switch (*format_ptr++)
1010 : {
1011 6154739 : case 'e':
1012 6154739 : count += count_occurrences (XEXP (x, i), find, count_dest);
1013 6154739 : break;
1014 :
1015 : case 'E':
1016 135942 : for (j = 0; j < XVECLEN (x, i); j++)
1017 112604 : count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
1018 : break;
1019 : }
1020 : }
1021 : return count;
1022 : }
1023 :
1024 :
1025 : /* Return TRUE if OP is a register or subreg of a register that
1026 : holds an unsigned quantity. Otherwise, return FALSE. */
1027 :
1028 : bool
1029 0 : unsigned_reg_p (rtx op)
1030 : {
1031 0 : if (REG_P (op)
1032 0 : && REG_EXPR (op)
1033 0 : && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
1034 : return true;
1035 :
1036 0 : if (GET_CODE (op) == SUBREG
1037 0 : && SUBREG_PROMOTED_SIGN (op))
1038 0 : return true;
1039 :
1040 : return false;
1041 : }
1042 :
1043 :
1044 : /* Return true if register REG appears somewhere within IN.
1045 : Also works if REG is not a register; in this case it checks
1046 : for a subexpression of IN that is Lisp "equal" to REG. */
1047 :
1048 : bool
1049 438170562 : reg_mentioned_p (const_rtx reg, const_rtx in)
1050 : {
1051 438170562 : const char *fmt;
1052 438170562 : int i;
1053 438170562 : enum rtx_code code;
1054 :
1055 438170562 : if (in == 0)
1056 : return false;
1057 :
1058 432915061 : if (reg == in)
1059 : return true;
1060 :
1061 420659678 : if (GET_CODE (in) == LABEL_REF)
1062 6332356 : return reg == label_ref_label (in);
1063 :
1064 414327322 : code = GET_CODE (in);
1065 :
1066 414327322 : switch (code)
1067 : {
1068 : /* Compare registers by number. */
1069 152893498 : case REG:
1070 152893498 : return REG_P (reg) && REGNO (in) == REGNO (reg);
1071 :
1072 : /* These codes have no constituent expressions
1073 : and are unique. */
1074 : case SCRATCH:
1075 : case PC:
1076 : return false;
1077 :
1078 : CASE_CONST_ANY:
1079 : /* These are kept unique for a given value. */
1080 : return false;
1081 :
1082 157392997 : default:
1083 157392997 : break;
1084 : }
1085 :
1086 157392997 : if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
1087 : return true;
1088 :
1089 157226418 : fmt = GET_RTX_FORMAT (code);
1090 :
1091 419012183 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1092 : {
1093 300341305 : if (fmt[i] == 'E')
1094 : {
1095 3147546 : int j;
1096 11063330 : for (j = XVECLEN (in, i) - 1; j >= 0; j--)
1097 8145474 : if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
1098 : return true;
1099 : }
1100 297193759 : else if (fmt[i] == 'e'
1101 297193759 : && reg_mentioned_p (reg, XEXP (in, i)))
1102 : return true;
1103 : }
1104 : return false;
1105 : }
1106 :
1107 : /* Return true if in between BEG and END, exclusive of BEG and END, there is
1108 : no CODE_LABEL insn. */
1109 :
1110 : bool
1111 0 : no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
1112 : {
1113 0 : rtx_insn *p;
1114 0 : if (beg == end)
1115 : return false;
1116 0 : for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
1117 0 : if (LABEL_P (p))
1118 : return false;
1119 : return true;
1120 : }
1121 :
1122 : /* Return true if register REG is used in an insn between
1123 : FROM_INSN and TO_INSN (exclusive of those two). */
1124 :
1125 : bool
1126 23931621 : reg_used_between_p (const_rtx reg, const rtx_insn *from_insn,
1127 : const rtx_insn *to_insn)
1128 : {
1129 23931621 : rtx_insn *insn;
1130 :
1131 23931621 : if (from_insn == to_insn)
1132 : return false;
1133 :
1134 172680713 : for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
1135 125205323 : if (NONDEBUG_INSN_P (insn)
1136 125205323 : && (reg_overlap_mentioned_p (reg, PATTERN (insn))
1137 69076394 : || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
1138 387852 : return true;
1139 : return false;
1140 : }
1141 :
1142 : /* Return true if the old value of X, a register, is referenced in BODY. If X
1143 : is entirely replaced by a new value and the only use is as a SET_DEST,
1144 : we do not consider it a reference. */
1145 :
1146 : bool
1147 131682814 : reg_referenced_p (const_rtx x, const_rtx body)
1148 : {
1149 131682814 : int i;
1150 :
1151 131682814 : switch (GET_CODE (body))
1152 : {
1153 97877413 : case SET:
1154 97877413 : if (reg_overlap_mentioned_p (x, SET_SRC (body)))
1155 : return true;
1156 :
1157 : /* If the destination is anything other than PC, a REG or a SUBREG
1158 : of a REG that occupies all of the REG, the insn references X if
1159 : it is mentioned in the destination. */
1160 62373014 : if (GET_CODE (SET_DEST (body)) != PC
1161 62373014 : && !REG_P (SET_DEST (body))
1162 2745923 : && ! (GET_CODE (SET_DEST (body)) == SUBREG
1163 512958 : && REG_P (SUBREG_REG (SET_DEST (body)))
1164 512958 : && !read_modify_subreg_p (SET_DEST (body)))
1165 64273210 : && reg_overlap_mentioned_p (x, SET_DEST (body)))
1166 : return true;
1167 : return false;
1168 :
1169 6957 : case ASM_OPERANDS:
1170 16214 : for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1171 14869 : if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
1172 : return true;
1173 : return false;
1174 :
1175 386857 : case CALL:
1176 386857 : case USE:
1177 386857 : case IF_THEN_ELSE:
1178 386857 : return reg_overlap_mentioned_p (x, body);
1179 :
1180 0 : case TRAP_IF:
1181 0 : return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
1182 :
1183 2086 : case PREFETCH:
1184 2086 : return reg_overlap_mentioned_p (x, XEXP (body, 0));
1185 :
1186 28487 : case UNSPEC:
1187 28487 : case UNSPEC_VOLATILE:
1188 56960 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1189 31084 : if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
1190 : return true;
1191 : return false;
1192 :
1193 17112993 : case PARALLEL:
1194 49086693 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1195 35207222 : if (reg_referenced_p (x, XVECEXP (body, 0, i)))
1196 : return true;
1197 : return false;
1198 :
1199 16261824 : case CLOBBER:
1200 16261824 : if (MEM_P (XEXP (body, 0)))
1201 11909 : if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
1202 : return true;
1203 : return false;
1204 :
1205 0 : case COND_EXEC:
1206 0 : if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
1207 : return true;
1208 0 : return reg_referenced_p (x, COND_EXEC_CODE (body));
1209 :
1210 : default:
1211 : return false;
1212 : }
1213 : }
1214 :
1215 : /* Return true if register REG is set or clobbered in an insn between
1216 : FROM_INSN and TO_INSN (exclusive of those two). */
1217 :
1218 : bool
1219 65361873 : reg_set_between_p (const_rtx reg, const rtx_insn *from_insn,
1220 : const rtx_insn *to_insn)
1221 : {
1222 65361873 : const rtx_insn *insn;
1223 :
1224 65361873 : if (from_insn == to_insn)
1225 : return false;
1226 :
1227 320527173 : for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
1228 193312828 : if (INSN_P (insn) && reg_set_p (reg, insn))
1229 : return true;
1230 : return false;
1231 : }
1232 :
1233 : /* Return true if REG is set or clobbered inside INSN. */
1234 :
1235 : bool
1236 1162730084 : reg_set_p (const_rtx reg, const_rtx insn)
1237 : {
1238 : /* After delay slot handling, call and branch insns might be in a
1239 : sequence. Check all the elements there. */
1240 1162730084 : if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1241 : {
1242 0 : for (int i = 0; i < XVECLEN (PATTERN (insn), 0); ++i)
1243 0 : if (reg_set_p (reg, XVECEXP (PATTERN (insn), 0, i)))
1244 : return true;
1245 :
1246 : return false;
1247 : }
1248 :
1249 : /* We can be passed an insn or part of one. If we are passed an insn,
1250 : check if a side-effect of the insn clobbers REG. */
1251 1162730084 : if (INSN_P (insn)
1252 1162730084 : && (FIND_REG_INC_NOTE (insn, reg)
1253 : || (CALL_P (insn)
1254 66496811 : && ((REG_P (reg)
1255 66496811 : && REGNO (reg) < FIRST_PSEUDO_REGISTER
1256 62217541 : && (insn_callee_abi (as_a<const rtx_insn *> (insn))
1257 62217541 : .clobbers_reg_p (GET_MODE (reg), REGNO (reg))))
1258 65484504 : || MEM_P (reg)
1259 65484504 : || find_reg_fusage (insn, CLOBBER, reg)))))
1260 1012307 : return true;
1261 :
1262 : /* There are no REG_INC notes for SP autoinc. */
1263 1161717777 : if (reg == stack_pointer_rtx && INSN_P (insn))
1264 : {
1265 5870461 : subrtx_var_iterator::array_type array;
1266 46928578 : FOR_EACH_SUBRTX_VAR (iter, array, PATTERN (insn), NONCONST)
1267 : {
1268 41749738 : rtx mem = *iter;
1269 41749738 : if (mem
1270 41749738 : && MEM_P (mem)
1271 4619679 : && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
1272 : {
1273 691621 : if (XEXP (XEXP (mem, 0), 0) == stack_pointer_rtx)
1274 691621 : return true;
1275 0 : iter.skip_subrtxes ();
1276 : }
1277 : }
1278 5870461 : }
1279 :
1280 1161026156 : return set_of (reg, insn) != NULL_RTX;
1281 : }
1282 :
1283 : /* Similar to reg_set_between_p, but check all registers in X. Return false
1284 : only if none of them are modified between START and END. Return true if
1285 : X contains a MEM; this routine does use memory aliasing. */
1286 :
1287 : bool
1288 160773962 : modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end)
1289 : {
1290 160773962 : const enum rtx_code code = GET_CODE (x);
1291 160773962 : const char *fmt;
1292 160773962 : int i, j;
1293 160773962 : rtx_insn *insn;
1294 :
1295 160773962 : if (start == end)
1296 : return false;
1297 :
1298 160773962 : switch (code)
1299 : {
1300 : CASE_CONST_ANY:
1301 : case CONST:
1302 : case SYMBOL_REF:
1303 : case LABEL_REF:
1304 : return false;
1305 :
1306 : case PC:
1307 : return true;
1308 :
1309 10766550 : case MEM:
1310 10766550 : if (modified_between_p (XEXP (x, 0), start, end))
1311 : return true;
1312 10757530 : if (MEM_READONLY_P (x))
1313 : return false;
1314 57617358 : for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1315 37103355 : if (memory_modified_in_insn_p (x, insn))
1316 : return true;
1317 : return false;
1318 :
1319 62143982 : case REG:
1320 62143982 : return reg_set_between_p (x, start, end);
1321 :
1322 50548978 : default:
1323 50548978 : break;
1324 : }
1325 :
1326 50548978 : fmt = GET_RTX_FORMAT (code);
1327 148434317 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1328 : {
1329 99227268 : if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1330 : return true;
1331 :
1332 97886035 : else if (fmt[i] == 'E')
1333 3838235 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1334 2777693 : if (modified_between_p (XVECEXP (x, i, j), start, end))
1335 : return true;
1336 : }
1337 :
1338 : return false;
1339 : }
1340 :
1341 : /* Similar to reg_set_p, but check all registers in X. Return false only if
1342 : none of them are modified in INSN. Return true if X contains a MEM; this
1343 : routine does use memory aliasing. */
1344 :
1345 : bool
1346 1068856450 : modified_in_p (const_rtx x, const_rtx insn)
1347 : {
1348 1068856450 : const enum rtx_code code = GET_CODE (x);
1349 1068856450 : const char *fmt;
1350 1068856450 : int i, j;
1351 :
1352 1068856450 : switch (code)
1353 : {
1354 : CASE_CONST_ANY:
1355 : case CONST:
1356 : case SYMBOL_REF:
1357 : case LABEL_REF:
1358 : return false;
1359 :
1360 : case PC:
1361 : return true;
1362 :
1363 8018351 : case MEM:
1364 8018351 : if (modified_in_p (XEXP (x, 0), insn))
1365 : return true;
1366 7975014 : if (MEM_READONLY_P (x))
1367 : return false;
1368 7761393 : if (memory_modified_in_insn_p (x, insn))
1369 : return true;
1370 : return false;
1371 :
1372 933380109 : case REG:
1373 933380109 : return reg_set_p (x, insn);
1374 :
1375 71698264 : default:
1376 71698264 : break;
1377 : }
1378 :
1379 71698264 : fmt = GET_RTX_FORMAT (code);
1380 206623954 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1381 : {
1382 140869429 : if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1383 : return true;
1384 :
1385 134934226 : else if (fmt[i] == 'E')
1386 1310774 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1387 675698 : if (modified_in_p (XVECEXP (x, i, j), insn))
1388 : return true;
1389 : }
1390 :
1391 : return false;
1392 : }
1393 :
1394 : /* Return true if X is a SUBREG and if storing a value to X would
1395 : preserve some of its SUBREG_REG. For example, on a normal 32-bit
1396 : target, using a SUBREG to store to one half of a DImode REG would
1397 : preserve the other half. */
1398 :
1399 : bool
1400 142514572 : read_modify_subreg_p (const_rtx x)
1401 : {
1402 142514572 : if (GET_CODE (x) != SUBREG)
1403 : return false;
1404 48190636 : poly_uint64 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1405 48190636 : poly_uint64 osize = GET_MODE_SIZE (GET_MODE (x));
1406 24095318 : poly_uint64 regsize = REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
1407 : /* The inner and outer modes of a subreg must be ordered, so that we
1408 : can tell whether they're paradoxical or partial. */
1409 24095318 : gcc_checking_assert (ordered_p (isize, osize));
1410 24095318 : return (maybe_gt (isize, osize) && maybe_gt (isize, regsize));
1411 : }
1412 :
1413 : /* Helper function for set_of. */
1414 : struct set_of_data
1415 : {
1416 : const_rtx found;
1417 : const_rtx pat;
1418 : };
1419 :
1420 : static void
1421 1230484870 : set_of_1 (rtx x, const_rtx pat, void *data1)
1422 : {
1423 1230484870 : struct set_of_data *const data = (struct set_of_data *) (data1);
1424 1230484870 : if (rtx_equal_p (x, data->pat)
1425 1230484870 : || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1426 70869310 : data->found = pat;
1427 1230484870 : }
1428 :
1429 : /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1430 : (either directly or via STRICT_LOW_PART and similar modifiers). */
1431 : const_rtx
1432 1211750437 : set_of (const_rtx pat, const_rtx insn)
1433 : {
1434 1211750437 : struct set_of_data data;
1435 1211750437 : data.found = NULL_RTX;
1436 1211750437 : data.pat = pat;
1437 1211750437 : note_pattern_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1438 1211750437 : return data.found;
1439 : }
1440 :
1441 : /* Check whether instruction pattern PAT contains a SET with the following
1442 : properties:
1443 :
1444 : - the SET is executed unconditionally; and
1445 : - either:
1446 : - the destination of the SET is a REG that contains REGNO; or
1447 : - both:
1448 : - the destination of the SET is a SUBREG of such a REG; and
1449 : - writing to the subreg clobbers all of the SUBREG_REG
1450 : (in other words, read_modify_subreg_p is false).
1451 :
1452 : If PAT does have a SET like that, return the set, otherwise return null.
1453 :
1454 : This is intended to be an alternative to single_set for passes that
1455 : can handle patterns with multiple_sets. */
1456 : rtx
1457 132726393 : simple_regno_set (rtx pat, unsigned int regno)
1458 : {
1459 132726393 : if (GET_CODE (pat) == PARALLEL)
1460 : {
1461 22295047 : int last = XVECLEN (pat, 0) - 1;
1462 22295161 : for (int i = 0; i < last; ++i)
1463 22295047 : if (rtx set = simple_regno_set (XVECEXP (pat, 0, i), regno))
1464 : return set;
1465 :
1466 114 : pat = XVECEXP (pat, 0, last);
1467 : }
1468 :
1469 110431460 : if (GET_CODE (pat) == SET
1470 110431460 : && covers_regno_no_parallel_p (SET_DEST (pat), regno))
1471 : return pat;
1472 :
1473 : return nullptr;
1474 : }
1475 :
1476 : /* Add all hard register in X to *PSET. */
1477 : void
1478 3890324 : find_all_hard_regs (const_rtx x, HARD_REG_SET *pset)
1479 : {
1480 3890324 : subrtx_iterator::array_type array;
1481 10555371 : FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1482 : {
1483 6665047 : const_rtx x = *iter;
1484 6665047 : if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1485 3298359 : add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1486 : }
1487 3890324 : }
1488 :
1489 : /* This function, called through note_stores, collects sets and
1490 : clobbers of hard registers in a HARD_REG_SET, which is pointed to
1491 : by DATA. */
1492 : void
1493 22421848 : record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1494 : {
1495 22421848 : HARD_REG_SET *pset = (HARD_REG_SET *)data;
1496 22421848 : if (REG_P (x) && HARD_REGISTER_P (x))
1497 14852870 : add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1498 22421848 : }
1499 :
1500 : /* Examine INSN, and compute the set of hard registers written by it.
1501 : Store it in *PSET. Should only be called after reload.
1502 :
1503 : IMPLICIT is true if we should include registers that are fully-clobbered
1504 : by calls. This should be used with caution, since it doesn't include
1505 : partially-clobbered registers. */
1506 : void
1507 17280313 : find_all_hard_reg_sets (const rtx_insn *insn, HARD_REG_SET *pset, bool implicit)
1508 : {
1509 17280313 : rtx link;
1510 :
1511 17280313 : CLEAR_HARD_REG_SET (*pset);
1512 17280313 : note_stores (insn, record_hard_reg_sets, pset);
1513 17280313 : if (CALL_P (insn) && implicit)
1514 0 : *pset |= insn_callee_abi (insn).full_reg_clobbers ();
1515 33797988 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1516 16517675 : if (REG_NOTE_KIND (link) == REG_INC)
1517 0 : record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1518 17280313 : }
1519 :
1520 : /* Like record_hard_reg_sets, but called through note_uses. */
1521 : void
1522 3890324 : record_hard_reg_uses (rtx *px, void *data)
1523 : {
1524 3890324 : find_all_hard_regs (*px, (HARD_REG_SET *) data);
1525 3890324 : }
1526 :
1527 : /* Given an INSN, return a SET expression if this insn has only a single SET.
1528 : It may also have CLOBBERs, USEs, or SET whose output
1529 : will not be used, which we ignore. */
1530 :
1531 : rtx
1532 1911444327 : single_set_2 (const rtx_insn *insn, const_rtx pat)
1533 : {
1534 1911444327 : rtx set = NULL;
1535 1911444327 : int set_verified = 1;
1536 1911444327 : int i;
1537 :
1538 1911444327 : if (GET_CODE (pat) == PARALLEL)
1539 : {
1540 2145549232 : for (i = 0; i < XVECLEN (pat, 0); i++)
1541 : {
1542 1459541492 : rtx sub = XVECEXP (pat, 0, i);
1543 1459541492 : switch (GET_CODE (sub))
1544 : {
1545 : case USE:
1546 : case CLOBBER:
1547 1423586478 : break;
1548 :
1549 : default:
1550 35955014 : return NULL_RTX;
1551 :
1552 735202409 : case SET:
1553 : /* We can consider insns having multiple sets, where all
1554 : but one are dead as single set insns. In common case
1555 : only single set is present in the pattern so we want
1556 : to avoid checking for REG_UNUSED notes unless necessary.
1557 :
1558 : When we reach set first time, we just expect this is
1559 : the single set we are looking for and only when more
1560 : sets are found in the insn, we check them. */
1561 788012674 : auto unused = [] (const rtx_insn *insn, rtx dest) {
1562 52810265 : if (!df)
1563 : return false;
1564 51991405 : if (df_note)
1565 30619945 : return !!find_reg_note (insn, REG_UNUSED, dest);
1566 21371460 : return (REG_P (dest)
1567 19515505 : && !HARD_REGISTER_P (dest)
1568 5651786 : && REGNO (dest) < df->regs_inited
1569 27022588 : && DF_REG_USE_COUNT (REGNO (dest)) == 0);
1570 : };
1571 735202409 : if (!set_verified)
1572 : {
1573 28025670 : if (unused (insn, SET_DEST (set)) && !side_effects_p (set))
1574 : set = NULL;
1575 : else
1576 : set_verified = 1;
1577 : }
1578 735202409 : if (!set)
1579 : set = sub, set_verified = 0;
1580 24784595 : else if (!unused (insn, SET_DEST (sub)) || side_effects_p (sub))
1581 20182951 : return NULL_RTX;
1582 : break;
1583 : }
1584 : }
1585 : }
1586 : return set;
1587 : }
1588 :
1589 : /* Given an INSN, return true if it has more than one SET, else return
1590 : false. */
1591 :
1592 : bool
1593 284975989 : multiple_sets (const_rtx insn)
1594 : {
1595 284975989 : bool found;
1596 284975989 : int i;
1597 :
1598 : /* INSN must be an insn. */
1599 284975989 : if (! INSN_P (insn))
1600 : return false;
1601 :
1602 : /* Only a PARALLEL can have multiple SETs. */
1603 284975989 : if (GET_CODE (PATTERN (insn)) == PARALLEL)
1604 : {
1605 258147414 : for (i = 0, found = false; i < XVECLEN (PATTERN (insn), 0); i++)
1606 173924542 : if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1607 : {
1608 : /* If we have already found a SET, then return now. */
1609 87819717 : if (found)
1610 : return true;
1611 : else
1612 : found = true;
1613 : }
1614 : }
1615 :
1616 : /* Either zero or one SET. */
1617 : return false;
1618 : }
1619 :
1620 : /* Return true if the destination of SET equals the source
1621 : and there are no side effects. */
1622 :
1623 : bool
1624 1676950198 : set_noop_p (const_rtx set)
1625 : {
1626 1676950198 : rtx src = SET_SRC (set);
1627 1676950198 : rtx dst = SET_DEST (set);
1628 :
1629 1676950198 : if (dst == pc_rtx && src == pc_rtx)
1630 : return true;
1631 :
1632 1676942441 : if (MEM_P (dst) && MEM_P (src))
1633 9000855 : return (rtx_equal_p (dst, src)
1634 4353 : && !side_effects_p (dst)
1635 9005069 : && !side_effects_p (src));
1636 :
1637 1667941586 : if (GET_CODE (dst) == ZERO_EXTRACT)
1638 108193 : return (rtx_equal_p (XEXP (dst, 0), src)
1639 6407 : && !BITS_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1640 0 : && !side_effects_p (src)
1641 108193 : && !side_effects_p (XEXP (dst, 0)));
1642 :
1643 1667833393 : if (GET_CODE (dst) == STRICT_LOW_PART)
1644 264853 : dst = XEXP (dst, 0);
1645 :
1646 1667833393 : if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1647 : {
1648 830636 : if (maybe_ne (SUBREG_BYTE (src), SUBREG_BYTE (dst)))
1649 : return false;
1650 785493 : src = SUBREG_REG (src);
1651 785493 : dst = SUBREG_REG (dst);
1652 785493 : if (GET_MODE (src) != GET_MODE (dst))
1653 : /* It is hard to tell whether subregs refer to the same bits, so act
1654 : conservatively and return false. */
1655 : return false;
1656 : }
1657 :
1658 : /* It is a NOOP if destination overlaps with selected src vector
1659 : elements. */
1660 1667718134 : if (GET_CODE (src) == VEC_SELECT
1661 7755319 : && REG_P (XEXP (src, 0)) && REG_P (dst)
1662 2632635 : && HARD_REGISTER_P (XEXP (src, 0))
1663 1668280956 : && HARD_REGISTER_P (dst))
1664 : {
1665 562586 : int i;
1666 562586 : rtx par = XEXP (src, 1);
1667 562586 : rtx src0 = XEXP (src, 0);
1668 562586 : poly_int64 c0;
1669 562586 : if (!poly_int_rtx_p (XVECEXP (par, 0, 0), &c0))
1670 : return false;
1671 1125172 : poly_int64 offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
1672 :
1673 947411 : for (i = 1; i < XVECLEN (par, 0); i++)
1674 : {
1675 621038 : poly_int64 c0i;
1676 621038 : if (!poly_int_rtx_p (XVECEXP (par, 0, i), &c0i)
1677 621038 : || maybe_ne (c0i, c0 + i))
1678 1676324660 : return false;
1679 : }
1680 326373 : return
1681 326373 : REG_CAN_CHANGE_MODE_P (REGNO (dst), GET_MODE (src0), GET_MODE (dst))
1682 326373 : && validate_subreg (GET_MODE (dst), GET_MODE (src0), src0, offset)
1683 759942 : && simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
1684 121412 : offset, GET_MODE (dst)) == (int) REGNO (dst);
1685 : }
1686 :
1687 421816430 : return (REG_P (src) && REG_P (dst)
1688 1899769567 : && REGNO (src) == REGNO (dst));
1689 : }
1690 :
1691 : /* Return true if an insn consists only of SETs, each of which only sets a
1692 : value to itself. */
1693 :
1694 : bool
1695 1145153204 : noop_move_p (const rtx_insn *insn)
1696 : {
1697 1145153204 : rtx pat = PATTERN (insn);
1698 :
1699 1145153204 : if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1700 : return true;
1701 :
1702 : /* Check the code to be executed for COND_EXEC. */
1703 1145146449 : if (GET_CODE (pat) == COND_EXEC)
1704 0 : pat = COND_EXEC_CODE (pat);
1705 :
1706 1145146449 : if (GET_CODE (pat) == SET && set_noop_p (pat))
1707 : return true;
1708 :
1709 1145116446 : if (GET_CODE (pat) == PARALLEL)
1710 : {
1711 : int i;
1712 : /* If nothing but SETs of registers to themselves,
1713 : this insn can also be deleted. */
1714 159207525 : for (i = 0; i < XVECLEN (pat, 0); i++)
1715 : {
1716 159207419 : rtx tem = XVECEXP (pat, 0, i);
1717 :
1718 159207419 : if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1719 21413 : continue;
1720 :
1721 159186006 : if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1722 159185898 : return false;
1723 : }
1724 :
1725 : return true;
1726 : }
1727 : return false;
1728 : }
1729 :
1730 :
1731 : /* Return true if register in range [REGNO, ENDREGNO)
1732 : appears either explicitly or implicitly in X
1733 : other than being stored into.
1734 :
1735 : References contained within the substructure at LOC do not count.
1736 : LOC may be zero, meaning don't ignore anything. */
1737 :
1738 : bool
1739 2211688468 : refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1740 : rtx *loc)
1741 : {
1742 2939459316 : int i;
1743 2939459316 : unsigned int x_regno;
1744 2939459316 : RTX_CODE code;
1745 2939459316 : const char *fmt;
1746 :
1747 2939459316 : repeat:
1748 : /* The contents of a REG_NONNEG note is always zero, so we must come here
1749 : upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1750 2939459316 : if (x == 0)
1751 : return false;
1752 :
1753 2939459316 : code = GET_CODE (x);
1754 :
1755 2939459316 : switch (code)
1756 : {
1757 1601401338 : case REG:
1758 1601401338 : x_regno = REGNO (x);
1759 :
1760 : /* If we modifying the stack, frame, or argument pointer, it will
1761 : clobber a virtual register. In fact, we could be more precise,
1762 : but it isn't worth it. */
1763 1601401338 : if ((x_regno == STACK_POINTER_REGNUM
1764 1601401338 : || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1765 1601401338 : && x_regno == ARG_POINTER_REGNUM)
1766 : || x_regno == FRAME_POINTER_REGNUM)
1767 125468766 : && VIRTUAL_REGISTER_NUM_P (regno))
1768 : return true;
1769 :
1770 1601401336 : return endregno > x_regno && regno < END_REGNO (x);
1771 :
1772 33248137 : case SUBREG:
1773 : /* If this is a SUBREG of a hard reg, we can see exactly which
1774 : registers are being modified. Otherwise, handle normally. */
1775 33248137 : if (REG_P (SUBREG_REG (x))
1776 33248137 : && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1777 : {
1778 1856 : unsigned int inner_regno = subreg_regno (x);
1779 1856 : unsigned int inner_endregno
1780 : = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1781 1856 : ? subreg_nregs (x) : 1);
1782 :
1783 1856 : return endregno > inner_regno && regno < inner_endregno;
1784 : }
1785 : break;
1786 :
1787 98155352 : case CLOBBER:
1788 98155352 : case SET:
1789 98155352 : if (&SET_DEST (x) != loc
1790 : /* Note setting a SUBREG counts as referring to the REG it is in for
1791 : a pseudo but not for hard registers since we can
1792 : treat each word individually. */
1793 98155352 : && ((GET_CODE (SET_DEST (x)) == SUBREG
1794 667259 : && loc != &SUBREG_REG (SET_DEST (x))
1795 667259 : && REG_P (SUBREG_REG (SET_DEST (x)))
1796 667259 : && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1797 667259 : && refers_to_regno_p (regno, endregno,
1798 : SUBREG_REG (SET_DEST (x)), loc))
1799 98133910 : || (!REG_P (SET_DEST (x))
1800 10293066 : && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1801 134409 : return true;
1802 :
1803 98020943 : if (code == CLOBBER || loc == &SET_SRC (x))
1804 : return false;
1805 81741759 : x = SET_SRC (x);
1806 81741759 : goto repeat;
1807 :
1808 : default:
1809 : break;
1810 : }
1811 :
1812 : /* X does not match, so try its subexpressions. */
1813 :
1814 1239900770 : fmt = GET_RTX_FORMAT (code);
1815 2418418019 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1816 : {
1817 1831745514 : if (fmt[i] == 'e' && loc != &XEXP (x, i))
1818 : {
1819 1128101121 : if (i == 0)
1820 : {
1821 646029089 : x = XEXP (x, 0);
1822 646029089 : goto repeat;
1823 : }
1824 : else
1825 482072032 : if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1826 : return true;
1827 : }
1828 703644393 : else if (fmt[i] == 'E')
1829 : {
1830 44798714 : int j;
1831 164627460 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1832 121507045 : if (loc != &XVECEXP (x, i, j)
1833 121507045 : && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1834 : return true;
1835 : }
1836 : }
1837 : return false;
1838 : }
1839 :
1840 : /* Rreturn true if modifying X will affect IN. If X is a register or a SUBREG,
1841 : we check if any register number in X conflicts with the relevant register
1842 : numbers. If X is a constant, return false. If X is a MEM, return true iff
1843 : IN contains a MEM (we don't bother checking for memory addresses that can't
1844 : conflict because we expect this to be a rare case. */
1845 :
1846 : bool
1847 1490100986 : reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1848 : {
1849 1490100986 : unsigned int regno, endregno;
1850 :
1851 : /* If either argument is a constant, then modifying X cannot
1852 : affect IN. Here we look at IN, we can profitably combine
1853 : CONSTANT_P (x) with the switch statement below. */
1854 1490100986 : if (CONSTANT_P (in))
1855 : return false;
1856 :
1857 1461263928 : recurse:
1858 1461267842 : switch (GET_CODE (x))
1859 : {
1860 3914 : case CLOBBER:
1861 3914 : case STRICT_LOW_PART:
1862 3914 : case ZERO_EXTRACT:
1863 3914 : case SIGN_EXTRACT:
1864 : /* Overly conservative. */
1865 3914 : x = XEXP (x, 0);
1866 3914 : goto recurse;
1867 :
1868 1326358 : case SUBREG:
1869 1326358 : regno = REGNO (SUBREG_REG (x));
1870 1326358 : if (regno < FIRST_PSEUDO_REGISTER)
1871 0 : regno = subreg_regno (x);
1872 1326358 : endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1873 0 : ? subreg_nregs (x) : 1);
1874 1326358 : goto do_reg;
1875 :
1876 1456108232 : case REG:
1877 1456108232 : regno = REGNO (x);
1878 1456108232 : endregno = END_REGNO (x);
1879 1457434590 : do_reg:
1880 1457434590 : return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1881 :
1882 1814640 : case MEM:
1883 1814640 : {
1884 1814640 : const char *fmt;
1885 1814640 : int i;
1886 :
1887 1814640 : if (MEM_P (in))
1888 : return true;
1889 :
1890 1676284 : fmt = GET_RTX_FORMAT (GET_CODE (in));
1891 3570226 : for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1892 1941833 : if (fmt[i] == 'e')
1893 : {
1894 523326 : if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1895 : return true;
1896 : }
1897 1418507 : else if (fmt[i] == 'E')
1898 : {
1899 8268 : int j;
1900 37057 : for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1901 29824 : if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1902 : return true;
1903 : }
1904 :
1905 : return false;
1906 : }
1907 :
1908 1897552 : case SCRATCH:
1909 1897552 : case PC:
1910 1897552 : return reg_mentioned_p (x, in);
1911 :
1912 676 : case PARALLEL:
1913 676 : {
1914 676 : int i;
1915 :
1916 : /* If any register in here refers to it we return true. */
1917 1218 : for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1918 1084 : if (XEXP (XVECEXP (x, 0, i), 0) != 0
1919 1084 : && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1920 : return true;
1921 : return false;
1922 : }
1923 :
1924 116470 : default:
1925 116470 : gcc_assert (CONSTANT_P (x));
1926 : return false;
1927 : }
1928 : }
1929 :
1930 : /* Call FUN on each register or MEM that is stored into or clobbered by X.
1931 : (X would be the pattern of an insn). DATA is an arbitrary pointer,
1932 : ignored by note_stores, but passed to FUN.
1933 :
1934 : FUN receives three arguments:
1935 : 1. the REG, MEM or PC being stored in or clobbered,
1936 : 2. the SET or CLOBBER rtx that does the store,
1937 : 3. the pointer DATA provided to note_stores.
1938 :
1939 : If the item being stored in or clobbered is a SUBREG of a hard register,
1940 : the SUBREG will be passed. */
1941 :
1942 : void
1943 7085256605 : note_pattern_stores (const_rtx x,
1944 : void (*fun) (rtx, const_rtx, void *), void *data)
1945 : {
1946 7085256605 : int i;
1947 :
1948 7085256605 : if (GET_CODE (x) == COND_EXEC)
1949 0 : x = COND_EXEC_CODE (x);
1950 :
1951 7085256605 : if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1952 : {
1953 5218803400 : rtx dest = SET_DEST (x);
1954 :
1955 5218803400 : while ((GET_CODE (dest) == SUBREG
1956 20093648 : && (!REG_P (SUBREG_REG (dest))
1957 20093648 : || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1958 5219756872 : || GET_CODE (dest) == ZERO_EXTRACT
1959 10459308195 : || GET_CODE (dest) == STRICT_LOW_PART)
1960 21016862 : dest = XEXP (dest, 0);
1961 :
1962 : /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1963 : each of whose first operand is a register. */
1964 5218803400 : if (GET_CODE (dest) == PARALLEL)
1965 : {
1966 1469477 : for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1967 932275 : if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1968 932275 : (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1969 : }
1970 : else
1971 5218266198 : (*fun) (dest, x, data);
1972 : }
1973 :
1974 1866453205 : else if (GET_CODE (x) == PARALLEL)
1975 2134224047 : for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1976 1437400264 : note_pattern_stores (XVECEXP (x, 0, i), fun, data);
1977 7085256605 : }
1978 :
1979 : /* Same, but for an instruction. If the instruction is a call, include
1980 : any CLOBBERs in its CALL_INSN_FUNCTION_USAGE. */
1981 :
1982 : void
1983 3434892627 : note_stores (const rtx_insn *insn,
1984 : void (*fun) (rtx, const_rtx, void *), void *data)
1985 : {
1986 3434892627 : if (CALL_P (insn))
1987 164477298 : for (rtx link = CALL_INSN_FUNCTION_USAGE (insn);
1988 501404197 : link; link = XEXP (link, 1))
1989 336926899 : if (GET_CODE (XEXP (link, 0)) == CLOBBER)
1990 12056865 : note_pattern_stores (XEXP (link, 0), fun, data);
1991 3434892627 : note_pattern_stores (PATTERN (insn), fun, data);
1992 3434892627 : }
1993 :
1994 : /* Like notes_stores, but call FUN for each expression that is being
1995 : referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1996 : FUN for each expression, not any interior subexpressions. FUN receives a
1997 : pointer to the expression and the DATA passed to this function.
1998 :
1999 : Note that this is not quite the same test as that done in reg_referenced_p
2000 : since that considers something as being referenced if it is being
2001 : partially set, while we do not. */
2002 :
2003 : void
2004 1111315566 : note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
2005 : {
2006 1111315566 : rtx body = *pbody;
2007 1111315566 : int i;
2008 :
2009 1111315566 : switch (GET_CODE (body))
2010 : {
2011 0 : case COND_EXEC:
2012 0 : (*fun) (&COND_EXEC_TEST (body), data);
2013 0 : note_uses (&COND_EXEC_CODE (body), fun, data);
2014 0 : return;
2015 :
2016 85969595 : case PARALLEL:
2017 265671213 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2018 179701618 : note_uses (&XVECEXP (body, 0, i), fun, data);
2019 : return;
2020 :
2021 0 : case SEQUENCE:
2022 0 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2023 0 : note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
2024 : return;
2025 :
2026 4846084 : case USE:
2027 4846084 : (*fun) (&XEXP (body, 0), data);
2028 4846084 : return;
2029 :
2030 267080 : case ASM_OPERANDS:
2031 374028 : for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
2032 106948 : (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
2033 : return;
2034 :
2035 138118 : case TRAP_IF:
2036 138118 : (*fun) (&TRAP_CONDITION (body), data);
2037 138118 : return;
2038 :
2039 10914 : case PREFETCH:
2040 10914 : (*fun) (&XEXP (body, 0), data);
2041 10914 : return;
2042 :
2043 3237900 : case UNSPEC:
2044 3237900 : case UNSPEC_VOLATILE:
2045 6496533 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2046 3258633 : (*fun) (&XVECEXP (body, 0, i), data);
2047 : return;
2048 :
2049 83952145 : case CLOBBER:
2050 83952145 : if (MEM_P (XEXP (body, 0)))
2051 3463678 : (*fun) (&XEXP (XEXP (body, 0), 0), data);
2052 : return;
2053 :
2054 549893505 : case SET:
2055 549893505 : {
2056 549893505 : rtx dest = SET_DEST (body);
2057 :
2058 : /* For sets we replace everything in source plus registers in memory
2059 : expression in store and operands of a ZERO_EXTRACT. */
2060 549893505 : (*fun) (&SET_SRC (body), data);
2061 :
2062 549893505 : if (GET_CODE (dest) == ZERO_EXTRACT)
2063 : {
2064 32932 : (*fun) (&XEXP (dest, 1), data);
2065 32932 : (*fun) (&XEXP (dest, 2), data);
2066 : }
2067 :
2068 552039395 : while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
2069 2145890 : dest = XEXP (dest, 0);
2070 :
2071 549893505 : if (MEM_P (dest))
2072 94745502 : (*fun) (&XEXP (dest, 0), data);
2073 : }
2074 : return;
2075 :
2076 383000225 : default:
2077 : /* All the other possibilities never store. */
2078 383000225 : (*fun) (pbody, data);
2079 383000225 : return;
2080 : }
2081 : }
2082 :
2083 : /* Try to add a description of REG X to this object, stopping once
2084 : the REF_END limit has been reached. FLAGS is a bitmask of
2085 : rtx_obj_reference flags that describe the context. */
2086 :
2087 : void
2088 835384519 : rtx_properties::try_to_add_reg (const_rtx x, unsigned int flags)
2089 : {
2090 835384519 : if (REG_NREGS (x) != 1)
2091 2632337 : flags |= rtx_obj_flags::IS_MULTIREG;
2092 835384519 : machine_mode mode = GET_MODE (x);
2093 835384519 : unsigned int start_regno = REGNO (x);
2094 835384519 : unsigned int end_regno = END_REGNO (x);
2095 1673401375 : for (unsigned int regno = start_regno; regno < end_regno; ++regno)
2096 838016856 : if (ref_iter != ref_end)
2097 837862244 : *ref_iter++ = rtx_obj_reference (regno, flags, mode,
2098 837862244 : regno - start_regno);
2099 835384519 : }
2100 :
2101 : /* Add a description of destination X to this object. FLAGS is a bitmask
2102 : of rtx_obj_reference flags that describe the context.
2103 :
2104 : This routine accepts all rtxes that can legitimately appear in a
2105 : SET_DEST. */
2106 :
2107 : void
2108 429163673 : rtx_properties::try_to_add_dest (const_rtx x, unsigned int flags)
2109 : {
2110 : /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
2111 : each of whose first operand is a register. */
2112 429163673 : if (UNLIKELY (GET_CODE (x) == PARALLEL))
2113 : {
2114 132644 : for (int i = XVECLEN (x, 0) - 1; i >= 0; --i)
2115 83699 : if (rtx dest = XEXP (XVECEXP (x, 0, i), 0))
2116 83699 : try_to_add_dest (dest, flags);
2117 : return;
2118 : }
2119 :
2120 429114728 : unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS;
2121 429114728 : flags |= rtx_obj_flags::IS_WRITE;
2122 430743546 : for (;;)
2123 430743546 : if (GET_CODE (x) == ZERO_EXTRACT)
2124 : {
2125 20444 : try_to_add_src (XEXP (x, 1), base_flags);
2126 20444 : try_to_add_src (XEXP (x, 2), base_flags);
2127 20444 : flags |= rtx_obj_flags::IS_READ;
2128 20444 : x = XEXP (x, 0);
2129 : }
2130 430723102 : else if (GET_CODE (x) == STRICT_LOW_PART)
2131 : {
2132 56162 : flags |= rtx_obj_flags::IS_READ;
2133 56162 : x = XEXP (x, 0);
2134 : }
2135 430666940 : else if (GET_CODE (x) == SUBREG)
2136 : {
2137 1552212 : flags |= rtx_obj_flags::IN_SUBREG;
2138 1552212 : if (read_modify_subreg_p (x))
2139 899105 : flags |= rtx_obj_flags::IS_READ;
2140 1552212 : x = SUBREG_REG (x);
2141 : }
2142 : else
2143 : break;
2144 :
2145 429114728 : if (MEM_P (x))
2146 : {
2147 59724452 : if (ref_iter != ref_end)
2148 59713796 : *ref_iter++ = rtx_obj_reference (MEM_REGNO, flags, GET_MODE (x));
2149 :
2150 59724452 : unsigned int addr_flags = base_flags | rtx_obj_flags::IN_MEM_STORE;
2151 59724452 : if (flags & rtx_obj_flags::IS_READ)
2152 3870 : addr_flags |= rtx_obj_flags::IN_MEM_LOAD;
2153 59724452 : try_to_add_src (XEXP (x, 0), addr_flags);
2154 59724452 : return;
2155 : }
2156 :
2157 369390276 : if (LIKELY (REG_P (x)))
2158 : {
2159 326482392 : if (REGNO (x) == STACK_POINTER_REGNUM)
2160 : {
2161 : /* Stack accesses are dependent on previous allocations and
2162 : anti-dependent on later deallocations, so both types of
2163 : stack operation are akin to a memory write. */
2164 23527919 : if (ref_iter != ref_end)
2165 23527919 : *ref_iter++ = rtx_obj_reference (MEM_REGNO, flags, BLKmode);
2166 :
2167 : /* We want to keep sp alive everywhere - by making all
2168 : writes to sp also use sp. */
2169 23527919 : flags |= rtx_obj_flags::IS_READ;
2170 : }
2171 326482392 : try_to_add_reg (x, flags);
2172 326482392 : return;
2173 : }
2174 : }
2175 :
2176 : /* Try to add a description of source X to this object, stopping once
2177 : the REF_END limit has been reached. FLAGS is a bitmask of
2178 : rtx_obj_reference flags that describe the context.
2179 :
2180 : This routine accepts all rtxes that can legitimately appear in a SET_SRC. */
2181 :
2182 : void
2183 929644172 : rtx_properties::try_to_add_src (const_rtx x, unsigned int flags)
2184 : {
2185 929644172 : unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS;
2186 929644172 : subrtx_iterator::array_type array;
2187 2944324172 : FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2188 : {
2189 2014680000 : const_rtx x = *iter;
2190 2014680000 : rtx_code code = GET_CODE (x);
2191 2014680000 : if (code == REG)
2192 508902127 : try_to_add_reg (x, flags | rtx_obj_flags::IS_READ);
2193 : else if (code == MEM)
2194 : {
2195 108770020 : if (MEM_VOLATILE_P (x))
2196 3180969 : has_volatile_refs = true;
2197 :
2198 108770020 : if (!MEM_READONLY_P (x) && ref_iter != ref_end)
2199 : {
2200 103905879 : auto mem_flags = flags | rtx_obj_flags::IS_READ;
2201 103905879 : *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags,
2202 103905879 : GET_MODE (x));
2203 : }
2204 :
2205 108770020 : try_to_add_src (XEXP (x, 0),
2206 : base_flags | rtx_obj_flags::IN_MEM_LOAD);
2207 108770020 : iter.skip_subrtxes ();
2208 : }
2209 : else if (code == SUBREG)
2210 : {
2211 8150310 : try_to_add_src (SUBREG_REG (x), flags | rtx_obj_flags::IN_SUBREG);
2212 8150310 : iter.skip_subrtxes ();
2213 : }
2214 : else if (code == UNSPEC_VOLATILE)
2215 2645863 : has_volatile_refs = true;
2216 : else if (code == ASM_INPUT || code == ASM_OPERANDS)
2217 : {
2218 1060689 : has_asm = true;
2219 1060689 : if (MEM_VOLATILE_P (x))
2220 339850 : has_volatile_refs = true;
2221 : }
2222 : else if (code == PRE_INC
2223 : || code == PRE_DEC
2224 : || code == POST_INC
2225 : || code == POST_DEC
2226 : || code == PRE_MODIFY
2227 : || code == POST_MODIFY)
2228 : {
2229 12233711 : has_pre_post_modify = true;
2230 :
2231 12233711 : unsigned int addr_flags = (flags
2232 : | rtx_obj_flags::IS_PRE_POST_MODIFY
2233 : | rtx_obj_flags::IS_READ);
2234 12233711 : try_to_add_dest (XEXP (x, 0), addr_flags);
2235 12233711 : if (code == PRE_MODIFY || code == POST_MODIFY)
2236 355753 : iter.substitute (XEXP (XEXP (x, 1), 1));
2237 : else
2238 11877958 : iter.skip_subrtxes ();
2239 : }
2240 : else if (code == CALL)
2241 26818202 : has_call = true;
2242 : }
2243 929644172 : }
2244 :
2245 : /* Try to add a description of instruction pattern PAT to this object,
2246 : stopping once the REF_END limit has been reached. */
2247 :
2248 : void
2249 730239950 : rtx_properties::try_to_add_pattern (const_rtx pat)
2250 : {
2251 782674469 : switch (GET_CODE (pat))
2252 : {
2253 0 : case COND_EXEC:
2254 0 : try_to_add_src (COND_EXEC_TEST (pat));
2255 0 : try_to_add_pattern (COND_EXEC_CODE (pat));
2256 0 : break;
2257 :
2258 52434519 : case PARALLEL:
2259 52434519 : {
2260 52434519 : int last = XVECLEN (pat, 0) - 1;
2261 108650160 : for (int i = 0; i < last; ++i)
2262 56215641 : try_to_add_pattern (XVECEXP (pat, 0, i));
2263 52434519 : try_to_add_pattern (XVECEXP (pat, 0, last));
2264 52434519 : break;
2265 : }
2266 :
2267 228714 : case ASM_OPERANDS:
2268 299074 : for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (pat); i < len; ++i)
2269 70360 : try_to_add_src (ASM_OPERANDS_INPUT (pat, i));
2270 : break;
2271 :
2272 52427694 : case CLOBBER:
2273 52427694 : try_to_add_dest (XEXP (pat, 0), rtx_obj_flags::IS_CLOBBER);
2274 52427694 : break;
2275 :
2276 361670373 : case SET:
2277 361670373 : try_to_add_dest (SET_DEST (pat));
2278 361670373 : try_to_add_src (SET_SRC (pat));
2279 361670373 : break;
2280 :
2281 315913169 : default:
2282 : /* All the other possibilities never store and can use a normal
2283 : rtx walk. This includes:
2284 :
2285 : - USE
2286 : - TRAP_IF
2287 : - PREFETCH
2288 : - UNSPEC
2289 : - UNSPEC_VOLATILE. */
2290 315913169 : try_to_add_src (pat);
2291 315913169 : break;
2292 : }
2293 730239950 : }
2294 :
2295 : /* Try to add a description of INSN to this object, stopping once
2296 : the REF_END limit has been reached. INCLUDE_NOTES is true if the
2297 : description should include REG_EQUAL and REG_EQUIV notes; all such
2298 : references will then be marked with rtx_obj_flags::IN_NOTE.
2299 :
2300 : For calls, this description includes all accesses in
2301 : CALL_INSN_FUNCTION_USAGE. It also include all implicit accesses
2302 : to global registers by the target function. However, it does not
2303 : include clobbers performed by the target function; callers that want
2304 : this information should instead use the function_abi interface. */
2305 :
2306 : void
2307 656774095 : rtx_properties::try_to_add_insn (const rtx_insn *insn, bool include_notes)
2308 : {
2309 656774095 : if (CALL_P (insn))
2310 : {
2311 : /* Non-const functions can read from global registers. Impure
2312 : functions can also set them.
2313 :
2314 : Adding the global registers first removes a situation in which
2315 : a fixed-form clobber of register R could come before a real set
2316 : of register R. */
2317 26801386 : if (!hard_reg_set_empty_p (global_reg_set)
2318 26801386 : && !RTL_CONST_CALL_P (insn))
2319 : {
2320 514 : unsigned int flags = rtx_obj_flags::IS_READ;
2321 514 : if (!RTL_PURE_CALL_P (insn))
2322 470 : flags |= rtx_obj_flags::IS_WRITE;
2323 47802 : for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2324 : /* As a special case, the stack pointer is invariant across calls
2325 : even if it has been marked global; see the corresponding
2326 : handling in df_get_call_refs. */
2327 47288 : if (regno != STACK_POINTER_REGNUM
2328 46774 : && global_regs[regno]
2329 436 : && ref_iter != ref_end)
2330 436 : *ref_iter++ = rtx_obj_reference (regno, flags,
2331 436 : reg_raw_mode[regno], 0);
2332 : }
2333 : /* Untyped calls implicitly set all function value registers.
2334 : Again, we add them first in case the main pattern contains
2335 : a fixed-form clobber. */
2336 26801386 : if (find_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX))
2337 209994 : for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2338 207736 : if (targetm.calls.function_value_regno_p (regno)
2339 207736 : && ref_iter != ref_end)
2340 18064 : *ref_iter++ = rtx_obj_reference (regno, rtx_obj_flags::IS_WRITE,
2341 18064 : reg_raw_mode[regno], 0);
2342 26801386 : if (ref_iter != ref_end && !RTL_CONST_CALL_P (insn))
2343 : {
2344 25908197 : auto mem_flags = rtx_obj_flags::IS_READ;
2345 25908197 : if (!RTL_PURE_CALL_P (insn))
2346 24232097 : mem_flags |= rtx_obj_flags::IS_WRITE;
2347 25908197 : *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags, BLKmode);
2348 : }
2349 26801386 : try_to_add_pattern (PATTERN (insn));
2350 82857820 : for (rtx link = CALL_INSN_FUNCTION_USAGE (insn); link;
2351 56056434 : link = XEXP (link, 1))
2352 : {
2353 56056434 : rtx x = XEXP (link, 0);
2354 56056434 : if (GET_CODE (x) == CLOBBER)
2355 2748196 : try_to_add_dest (XEXP (x, 0), rtx_obj_flags::IS_CLOBBER);
2356 53308238 : else if (GET_CODE (x) == USE)
2357 52905370 : try_to_add_src (XEXP (x, 0));
2358 : }
2359 : }
2360 : else
2361 629972709 : try_to_add_pattern (PATTERN (insn));
2362 :
2363 656774095 : if (include_notes)
2364 995094886 : for (rtx note = REG_NOTES (insn); note; note = XEXP (note, 1))
2365 338321201 : if (REG_NOTE_KIND (note) == REG_EQUAL
2366 338321201 : || REG_NOTE_KIND (note) == REG_EQUIV)
2367 22399230 : try_to_add_note (XEXP (note, 0));
2368 656774095 : }
2369 :
2370 : /* Grow the storage by a bit while keeping the contents of the first
2371 : START elements. */
2372 :
2373 : void
2374 28516 : vec_rtx_properties_base::grow (ptrdiff_t start)
2375 : {
2376 : /* The same heuristic that vec uses. */
2377 28516 : ptrdiff_t new_elems = (ref_end - ref_begin) * 3 / 2;
2378 28516 : if (ref_begin == m_storage)
2379 : {
2380 24416 : ref_begin = XNEWVEC (rtx_obj_reference, new_elems);
2381 24416 : if (start)
2382 0 : memcpy (ref_begin, m_storage, start * sizeof (rtx_obj_reference));
2383 : }
2384 : else
2385 4100 : ref_begin = reinterpret_cast<rtx_obj_reference *>
2386 4100 : (xrealloc (ref_begin, new_elems * sizeof (rtx_obj_reference)));
2387 28516 : ref_iter = ref_begin + start;
2388 28516 : ref_end = ref_begin + new_elems;
2389 28516 : }
2390 :
2391 : /* Return true if X's old contents don't survive after INSN.
2392 : This will be true if X is a register and X dies in INSN or because
2393 : INSN entirely sets X.
2394 :
2395 : "Entirely set" means set directly and not through a SUBREG, or
2396 : ZERO_EXTRACT, so no trace of the old contents remains.
2397 : Likewise, REG_INC does not count.
2398 :
2399 : REG may be a hard or pseudo reg. Renumbering is not taken into account,
2400 : but for this use that makes no difference, since regs don't overlap
2401 : during their lifetimes. Therefore, this function may be used
2402 : at any time after deaths have been computed.
2403 :
2404 : If REG is a hard reg that occupies multiple machine registers, this
2405 : function will only return true if each of those registers will be replaced
2406 : by INSN. */
2407 :
2408 : bool
2409 116617612 : dead_or_set_p (const rtx_insn *insn, const_rtx x)
2410 : {
2411 116617612 : unsigned int regno, end_regno;
2412 116617612 : unsigned int i;
2413 :
2414 116617612 : gcc_assert (REG_P (x));
2415 :
2416 116617612 : regno = REGNO (x);
2417 116617612 : end_regno = END_REGNO (x);
2418 206496811 : for (i = regno; i < end_regno; i++)
2419 116618654 : if (! dead_or_set_regno_p (insn, i))
2420 : return false;
2421 :
2422 : return true;
2423 : }
2424 :
2425 : /* Return TRUE iff DEST is a register or subreg of a register, is a
2426 : complete rather than read-modify-write destination, and contains
2427 : register TEST_REGNO. */
2428 :
2429 : static bool
2430 207338384 : covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
2431 : {
2432 207338384 : unsigned int regno, endregno;
2433 :
2434 207338384 : if (GET_CODE (dest) == SUBREG && !read_modify_subreg_p (dest))
2435 500210 : dest = SUBREG_REG (dest);
2436 :
2437 207338384 : if (!REG_P (dest))
2438 : return false;
2439 :
2440 198552074 : regno = REGNO (dest);
2441 198552074 : endregno = END_REGNO (dest);
2442 198552074 : return (test_regno >= regno && test_regno < endregno);
2443 : }
2444 :
2445 : /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
2446 : any member matches the covers_regno_no_parallel_p criteria. */
2447 :
2448 : static bool
2449 96906824 : covers_regno_p (const_rtx dest, unsigned int test_regno)
2450 : {
2451 96906824 : if (GET_CODE (dest) == PARALLEL)
2452 : {
2453 : /* Some targets place small structures in registers for return
2454 : values of functions, and those registers are wrapped in
2455 : PARALLELs that we may see as the destination of a SET. */
2456 304 : int i;
2457 :
2458 822 : for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
2459 : {
2460 518 : rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
2461 518 : if (inner != NULL_RTX
2462 518 : && covers_regno_no_parallel_p (inner, test_regno))
2463 : return true;
2464 : }
2465 :
2466 : return false;
2467 : }
2468 : else
2469 96906520 : return covers_regno_no_parallel_p (dest, test_regno);
2470 : }
2471 :
2472 : /* Utility function for dead_or_set_p to check an individual register. */
2473 :
2474 : bool
2475 118814034 : dead_or_set_regno_p (const rtx_insn *insn, unsigned int test_regno)
2476 : {
2477 118814034 : const_rtx pattern;
2478 :
2479 : /* See if there is a death note for something that includes TEST_REGNO. */
2480 118814034 : if (find_regno_note (insn, REG_DEAD, test_regno))
2481 : return true;
2482 :
2483 76807839 : if (CALL_P (insn)
2484 76807839 : && find_regno_fusage (insn, CLOBBER, test_regno))
2485 : return true;
2486 :
2487 76793751 : pattern = PATTERN (insn);
2488 :
2489 : /* If a COND_EXEC is not executed, the value survives. */
2490 76793751 : if (GET_CODE (pattern) == COND_EXEC)
2491 : return false;
2492 :
2493 76793751 : if (GET_CODE (pattern) == SET || GET_CODE (pattern) == CLOBBER)
2494 56312633 : return covers_regno_p (SET_DEST (pattern), test_regno);
2495 20481118 : else if (GET_CODE (pattern) == PARALLEL)
2496 : {
2497 20265949 : int i;
2498 :
2499 46986216 : for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
2500 : {
2501 40835619 : rtx body = XVECEXP (pattern, 0, i);
2502 :
2503 40835619 : if (GET_CODE (body) == COND_EXEC)
2504 0 : body = COND_EXEC_CODE (body);
2505 :
2506 20518968 : if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
2507 61113159 : && covers_regno_p (SET_DEST (body), test_regno))
2508 : return true;
2509 : }
2510 : }
2511 :
2512 : return false;
2513 : }
2514 :
2515 : /* Return the reg-note of kind KIND in insn INSN, if there is one.
2516 : If DATUM is nonzero, look for one whose datum is DATUM. */
2517 :
2518 : rtx
2519 8194979895 : find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
2520 : {
2521 8194979895 : rtx link;
2522 :
2523 8194979895 : gcc_checking_assert (insn);
2524 :
2525 : /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2526 8194979895 : if (! INSN_P (insn))
2527 : return 0;
2528 8079440878 : if (datum == 0)
2529 : {
2530 16093274458 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2531 9591900208 : if (REG_NOTE_KIND (link) == kind)
2532 : return link;
2533 : return 0;
2534 : }
2535 :
2536 689052648 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2537 394091909 : if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
2538 : return link;
2539 : return 0;
2540 : }
2541 :
2542 : /* Return the reg-note of kind KIND in insn INSN which applies to register
2543 : number REGNO, if any. Return 0 if there is no such reg-note. Note that
2544 : the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
2545 : it might be the case that the note overlaps REGNO. */
2546 :
2547 : rtx
2548 392206597 : find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
2549 : {
2550 392206597 : rtx link;
2551 :
2552 : /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2553 392206597 : if (! INSN_P (insn))
2554 : return 0;
2555 :
2556 581537787 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2557 366095298 : if (REG_NOTE_KIND (link) == kind
2558 : /* Verify that it is a register, so that scratch and MEM won't cause a
2559 : problem here. */
2560 257416461 : && REG_P (XEXP (link, 0))
2561 257416461 : && REGNO (XEXP (link, 0)) <= regno
2562 572837658 : && END_REGNO (XEXP (link, 0)) > regno)
2563 : return link;
2564 : return 0;
2565 : }
2566 :
2567 : /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
2568 : has such a note. */
2569 :
2570 : rtx
2571 1761788172 : find_reg_equal_equiv_note (const_rtx insn)
2572 : {
2573 1761788172 : rtx link;
2574 :
2575 1761788172 : if (!INSN_P (insn))
2576 : return 0;
2577 :
2578 3042282222 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2579 1379287063 : if (REG_NOTE_KIND (link) == REG_EQUAL
2580 1379287063 : || REG_NOTE_KIND (link) == REG_EQUIV)
2581 : {
2582 : /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
2583 : insns that have multiple sets. Checking single_set to
2584 : make sure of this is not the proper check, as explained
2585 : in the comment in set_unique_reg_note.
2586 :
2587 : This should be changed into an assert. */
2588 92484059 : if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
2589 : return 0;
2590 92484059 : return link;
2591 : }
2592 : return NULL;
2593 : }
2594 :
2595 : /* Check whether INSN is a single_set whose source is known to be
2596 : equivalent to a constant. Return that constant if so, otherwise
2597 : return null. */
2598 :
2599 : rtx
2600 2218453 : find_constant_src (const rtx_insn *insn)
2601 : {
2602 2218453 : rtx note, set, x;
2603 :
2604 2218453 : set = single_set (insn);
2605 2218453 : if (set)
2606 : {
2607 2218453 : x = avoid_constant_pool_reference (SET_SRC (set));
2608 2218453 : if (CONSTANT_P (x))
2609 : return x;
2610 : }
2611 :
2612 1547546 : note = find_reg_equal_equiv_note (insn);
2613 1547546 : if (note && CONSTANT_P (XEXP (note, 0)))
2614 610 : return XEXP (note, 0);
2615 :
2616 : return NULL_RTX;
2617 : }
2618 :
2619 : /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
2620 : in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2621 :
2622 : bool
2623 88186953 : find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
2624 : {
2625 : /* If it's not a CALL_INSN, it can't possibly have a
2626 : CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
2627 88186953 : if (!CALL_P (insn))
2628 : return false;
2629 :
2630 88186953 : gcc_assert (datum);
2631 :
2632 88186953 : if (!REG_P (datum))
2633 : {
2634 33604 : rtx link;
2635 :
2636 33604 : for (link = CALL_INSN_FUNCTION_USAGE (insn);
2637 69500 : link;
2638 35896 : link = XEXP (link, 1))
2639 35896 : if (GET_CODE (XEXP (link, 0)) == code
2640 35896 : && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
2641 : return true;
2642 : }
2643 : else
2644 : {
2645 88153349 : unsigned int regno = REGNO (datum);
2646 :
2647 : /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2648 : to pseudo registers, so don't bother checking. */
2649 :
2650 88153349 : if (regno < FIRST_PSEUDO_REGISTER)
2651 : {
2652 81184897 : unsigned int end_regno = END_REGNO (datum);
2653 81184897 : unsigned int i;
2654 :
2655 142423302 : for (i = regno; i < end_regno; i++)
2656 81184897 : if (find_regno_fusage (insn, code, i))
2657 : return true;
2658 : }
2659 : }
2660 :
2661 : return false;
2662 : }
2663 :
2664 : /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2665 : in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2666 :
2667 : bool
2668 81413808 : find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
2669 : {
2670 81413808 : rtx link;
2671 :
2672 : /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2673 : to pseudo registers, so don't bother checking. */
2674 :
2675 81413808 : if (regno >= FIRST_PSEUDO_REGISTER
2676 81355933 : || !CALL_P (insn) )
2677 : return false;
2678 :
2679 241808730 : for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2680 : {
2681 180414931 : rtx op, reg;
2682 :
2683 180414931 : if (GET_CODE (op = XEXP (link, 0)) == code
2684 51262931 : && REG_P (reg = XEXP (op, 0))
2685 51254315 : && REGNO (reg) <= regno
2686 214448338 : && END_REGNO (reg) > regno)
2687 : return true;
2688 : }
2689 :
2690 : return false;
2691 : }
2692 :
2693 :
2694 : /* Return true if KIND is an integer REG_NOTE. */
2695 :
2696 : static bool
2697 0 : int_reg_note_p (enum reg_note kind)
2698 : {
2699 0 : return kind == REG_BR_PROB;
2700 : }
2701 :
2702 : /* Allocate a register note with kind KIND and datum DATUM. LIST is
2703 : stored as the pointer to the next register note. */
2704 :
2705 : rtx
2706 757823816 : alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
2707 : {
2708 757823816 : rtx note;
2709 :
2710 757823816 : gcc_checking_assert (!int_reg_note_p (kind));
2711 757823816 : switch (kind)
2712 : {
2713 27995 : case REG_LABEL_TARGET:
2714 27995 : case REG_LABEL_OPERAND:
2715 27995 : case REG_TM:
2716 : /* These types of register notes use an INSN_LIST rather than an
2717 : EXPR_LIST, so that copying is done right and dumps look
2718 : better. */
2719 27995 : note = alloc_INSN_LIST (datum, list);
2720 27995 : PUT_REG_NOTE_KIND (note, kind);
2721 27995 : break;
2722 :
2723 757795821 : default:
2724 757795821 : note = alloc_EXPR_LIST (kind, datum, list);
2725 757795821 : break;
2726 : }
2727 :
2728 757823816 : return note;
2729 : }
2730 :
2731 : /* Add register note with kind KIND and datum DATUM to INSN. */
2732 :
2733 : void
2734 750998094 : add_reg_note (rtx insn, enum reg_note kind, rtx datum)
2735 : {
2736 750998094 : REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
2737 750998094 : }
2738 :
2739 : /* Add an integer register note with kind KIND and datum DATUM to INSN. */
2740 :
2741 : void
2742 5279631 : add_int_reg_note (rtx_insn *insn, enum reg_note kind, int datum)
2743 : {
2744 5279631 : gcc_checking_assert (int_reg_note_p (kind));
2745 5279631 : REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
2746 : datum, REG_NOTES (insn));
2747 5279631 : }
2748 :
2749 : /* Add a REG_ARGS_SIZE note to INSN with value VALUE. */
2750 :
2751 : void
2752 5477135 : add_args_size_note (rtx_insn *insn, poly_int64 value)
2753 : {
2754 5477135 : gcc_checking_assert (!find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX));
2755 8968512 : add_reg_note (insn, REG_ARGS_SIZE, gen_int_mode (value, Pmode));
2756 5477135 : }
2757 :
2758 : /* Add a register note like NOTE to INSN. */
2759 :
2760 : void
2761 0 : add_shallow_copy_of_reg_note (rtx_insn *insn, rtx note)
2762 : {
2763 0 : if (GET_CODE (note) == INT_LIST)
2764 0 : add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0));
2765 : else
2766 0 : add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2767 0 : }
2768 :
2769 : /* Duplicate NOTE and return the copy. */
2770 : rtx
2771 2293573 : duplicate_reg_note (rtx note)
2772 : {
2773 2293573 : reg_note kind = REG_NOTE_KIND (note);
2774 :
2775 2293573 : if (GET_CODE (note) == INT_LIST)
2776 300357 : return gen_rtx_INT_LIST ((machine_mode) kind, XINT (note, 0), NULL_RTX);
2777 1993216 : else if (GET_CODE (note) == EXPR_LIST)
2778 1993216 : return alloc_reg_note (kind, copy_insn_1 (XEXP (note, 0)), NULL_RTX);
2779 : else
2780 0 : return alloc_reg_note (kind, XEXP (note, 0), NULL_RTX);
2781 : }
2782 :
2783 : /* Remove register note NOTE from the REG_NOTES of INSN. */
2784 :
2785 : void
2786 8271103 : remove_note (rtx_insn *insn, const_rtx note)
2787 : {
2788 8271103 : rtx link;
2789 :
2790 8271103 : if (note == NULL_RTX)
2791 : return;
2792 :
2793 7451878 : if (REG_NOTES (insn) == note)
2794 6960431 : REG_NOTES (insn) = XEXP (note, 1);
2795 : else
2796 982747 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2797 982747 : if (XEXP (link, 1) == note)
2798 : {
2799 491447 : XEXP (link, 1) = XEXP (note, 1);
2800 491447 : break;
2801 : }
2802 :
2803 7451878 : switch (REG_NOTE_KIND (note))
2804 : {
2805 2640820 : case REG_EQUAL:
2806 2640820 : case REG_EQUIV:
2807 2640820 : df_notes_rescan (insn);
2808 2640820 : break;
2809 : default:
2810 : break;
2811 : }
2812 : }
2813 :
2814 : /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.
2815 : If NO_RESCAN is false and any notes were removed, call
2816 : df_notes_rescan. Return true if any note has been removed. */
2817 :
2818 : bool
2819 33409 : remove_reg_equal_equiv_notes (rtx_insn *insn, bool no_rescan)
2820 : {
2821 33409 : rtx *loc;
2822 33409 : bool ret = false;
2823 :
2824 33409 : loc = ®_NOTES (insn);
2825 40711 : while (*loc)
2826 : {
2827 7302 : enum reg_note kind = REG_NOTE_KIND (*loc);
2828 7302 : if (kind == REG_EQUAL || kind == REG_EQUIV)
2829 : {
2830 382 : *loc = XEXP (*loc, 1);
2831 382 : ret = true;
2832 : }
2833 : else
2834 6920 : loc = &XEXP (*loc, 1);
2835 : }
2836 33409 : if (ret && !no_rescan)
2837 382 : df_notes_rescan (insn);
2838 33409 : return ret;
2839 : }
2840 :
2841 : /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */
2842 :
2843 : void
2844 3916024 : remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
2845 : {
2846 3916024 : df_ref eq_use;
2847 :
2848 3916024 : if (!df)
2849 : return;
2850 :
2851 : /* This loop is a little tricky. We cannot just go down the chain because
2852 : it is being modified by some actions in the loop. So we just iterate
2853 : over the head. We plan to drain the list anyway. */
2854 4052208 : while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2855 : {
2856 136184 : rtx_insn *insn = DF_REF_INSN (eq_use);
2857 136184 : rtx note = find_reg_equal_equiv_note (insn);
2858 :
2859 : /* This assert is generally triggered when someone deletes a REG_EQUAL
2860 : or REG_EQUIV note by hacking the list manually rather than calling
2861 : remove_note. */
2862 136184 : gcc_assert (note);
2863 :
2864 136184 : remove_note (insn, note);
2865 : }
2866 : }
2867 :
2868 : /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2869 : return 1 if it is found. A simple equality test is used to determine if
2870 : NODE matches. */
2871 :
2872 : bool
2873 26 : in_insn_list_p (const rtx_insn_list *listp, const rtx_insn *node)
2874 : {
2875 26 : const_rtx x;
2876 :
2877 26 : for (x = listp; x; x = XEXP (x, 1))
2878 0 : if (node == XEXP (x, 0))
2879 : return true;
2880 :
2881 : return false;
2882 : }
2883 :
2884 : /* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and
2885 : remove that entry from the list if it is found.
2886 :
2887 : A simple equality test is used to determine if NODE matches. */
2888 :
2889 : void
2890 7665080 : remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp)
2891 : {
2892 7665080 : rtx_insn_list *temp = *listp;
2893 7665080 : rtx_insn_list *prev = NULL;
2894 :
2895 7681541 : while (temp)
2896 : {
2897 16487 : if (node == temp->insn ())
2898 : {
2899 : /* Splice the node out of the list. */
2900 26 : if (prev)
2901 0 : XEXP (prev, 1) = temp->next ();
2902 : else
2903 26 : *listp = temp->next ();
2904 :
2905 26 : gcc_checking_assert (!in_insn_list_p (temp->next (), node));
2906 : return;
2907 : }
2908 :
2909 16461 : prev = temp;
2910 16461 : temp = temp->next ();
2911 : }
2912 : }
2913 :
2914 : /* Return true if X contains any volatile instructions. These are instructions
2915 : which may cause unpredictable machine state instructions, and thus no
2916 : instructions or register uses should be moved or combined across them.
2917 : This includes only volatile asms and UNSPEC_VOLATILE instructions. */
2918 :
2919 : bool
2920 694225855 : volatile_insn_p (const_rtx x)
2921 : {
2922 694225855 : const RTX_CODE code = GET_CODE (x);
2923 694225855 : switch (code)
2924 : {
2925 : case LABEL_REF:
2926 : case SYMBOL_REF:
2927 : case CONST:
2928 : CASE_CONST_ANY:
2929 : case PC:
2930 : case REG:
2931 : case SCRATCH:
2932 : case CLOBBER:
2933 : case ADDR_VEC:
2934 : case ADDR_DIFF_VEC:
2935 : case CALL:
2936 : case MEM:
2937 : return false;
2938 :
2939 : case UNSPEC_VOLATILE:
2940 : return true;
2941 :
2942 135524 : case ASM_INPUT:
2943 135524 : case ASM_OPERANDS:
2944 135524 : if (MEM_VOLATILE_P (x))
2945 : return true;
2946 :
2947 324012085 : default:
2948 324012085 : break;
2949 : }
2950 :
2951 : /* Recursively scan the operands of this expression. */
2952 :
2953 324012085 : {
2954 324012085 : const char *const fmt = GET_RTX_FORMAT (code);
2955 324012085 : int i;
2956 :
2957 887883003 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2958 : {
2959 564168778 : if (fmt[i] == 'e')
2960 : {
2961 439665335 : if (volatile_insn_p (XEXP (x, i)))
2962 : return true;
2963 : }
2964 124503443 : else if (fmt[i] == 'E')
2965 : {
2966 : int j;
2967 70069342 : for (j = 0; j < XVECLEN (x, i); j++)
2968 48222210 : if (volatile_insn_p (XVECEXP (x, i, j)))
2969 : return true;
2970 : }
2971 : }
2972 : }
2973 : return false;
2974 : }
2975 :
2976 : /* Return true if X contains any volatile memory references
2977 : UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2978 :
2979 : bool
2980 5181259251 : volatile_refs_p (const_rtx x)
2981 : {
2982 5181259251 : const RTX_CODE code = GET_CODE (x);
2983 5181259251 : switch (code)
2984 : {
2985 : case LABEL_REF:
2986 : case SYMBOL_REF:
2987 : case CONST:
2988 : CASE_CONST_ANY:
2989 : case PC:
2990 : case REG:
2991 : case SCRATCH:
2992 : case CLOBBER:
2993 : case ADDR_VEC:
2994 : case ADDR_DIFF_VEC:
2995 : return false;
2996 :
2997 : case UNSPEC_VOLATILE:
2998 : return true;
2999 :
3000 393708116 : case MEM:
3001 393708116 : case ASM_INPUT:
3002 393708116 : case ASM_OPERANDS:
3003 393708116 : if (MEM_VOLATILE_P (x))
3004 : return true;
3005 :
3006 2206270722 : default:
3007 2206270722 : break;
3008 : }
3009 :
3010 : /* Recursively scan the operands of this expression. */
3011 :
3012 2206270722 : {
3013 2206270722 : const char *const fmt = GET_RTX_FORMAT (code);
3014 2206270722 : int i;
3015 :
3016 6486548244 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3017 : {
3018 4296782965 : if (fmt[i] == 'e')
3019 : {
3020 3822530469 : if (volatile_refs_p (XEXP (x, i)))
3021 : return true;
3022 : }
3023 474252496 : else if (fmt[i] == 'E')
3024 : {
3025 : int j;
3026 160256685 : for (j = 0; j < XVECLEN (x, i); j++)
3027 111082486 : if (volatile_refs_p (XVECEXP (x, i, j)))
3028 : return true;
3029 : }
3030 : }
3031 : }
3032 : return false;
3033 : }
3034 :
3035 : /* Similar to above, except that it also rejects register pre- and post-
3036 : incrementing. */
3037 :
3038 : bool
3039 5334683671 : side_effects_p (const_rtx x)
3040 : {
3041 5334683671 : const RTX_CODE code = GET_CODE (x);
3042 5334683671 : switch (code)
3043 : {
3044 : case LABEL_REF:
3045 : case SYMBOL_REF:
3046 : case CONST:
3047 : CASE_CONST_ANY:
3048 : case PC:
3049 : case REG:
3050 : case SCRATCH:
3051 : case ADDR_VEC:
3052 : case ADDR_DIFF_VEC:
3053 : case VAR_LOCATION:
3054 : return false;
3055 :
3056 62276182 : case CLOBBER:
3057 : /* Reject CLOBBER with a non-VOID mode. These are made by combine.cc
3058 : when some combination can't be done. If we see one, don't think
3059 : that we can simplify the expression. */
3060 62276182 : return (GET_MODE (x) != VOIDmode);
3061 :
3062 : case PRE_INC:
3063 : case PRE_DEC:
3064 : case POST_INC:
3065 : case POST_DEC:
3066 : case PRE_MODIFY:
3067 : case POST_MODIFY:
3068 : case CALL:
3069 : case UNSPEC_VOLATILE:
3070 : return true;
3071 :
3072 327160721 : case MEM:
3073 327160721 : case ASM_INPUT:
3074 327160721 : case ASM_OPERANDS:
3075 327160721 : if (MEM_VOLATILE_P (x))
3076 : return true;
3077 :
3078 1890743925 : default:
3079 1890743925 : break;
3080 : }
3081 :
3082 : /* Recursively scan the operands of this expression. */
3083 :
3084 1890743925 : {
3085 1890743925 : const char *fmt = GET_RTX_FORMAT (code);
3086 1890743925 : int i;
3087 :
3088 5624494985 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3089 : {
3090 3783519745 : if (fmt[i] == 'e')
3091 : {
3092 3347879051 : if (side_effects_p (XEXP (x, i)))
3093 : return true;
3094 : }
3095 435640694 : else if (fmt[i] == 'E')
3096 : {
3097 : int j;
3098 234550192 : for (j = 0; j < XVECLEN (x, i); j++)
3099 158301788 : if (side_effects_p (XVECEXP (x, i, j)))
3100 : return true;
3101 : }
3102 : }
3103 : }
3104 : return false;
3105 : }
3106 :
3107 : /* Return true if evaluating rtx X might cause a trap.
3108 : FLAGS controls how to consider MEMs. A true means the context
3109 : of the access may have changed from the original, such that the
3110 : address may have become invalid. */
3111 :
3112 : bool
3113 9034786318 : may_trap_p_1 (const_rtx x, unsigned flags)
3114 : {
3115 9034786318 : int i;
3116 9034786318 : enum rtx_code code;
3117 9034786318 : const char *fmt;
3118 :
3119 : /* We make no distinction currently, but this function is part of
3120 : the internal target-hooks ABI so we keep the parameter as
3121 : "unsigned flags". */
3122 9034786318 : bool code_changed = flags != 0;
3123 :
3124 9034786318 : if (x == 0)
3125 : return false;
3126 9034784432 : code = GET_CODE (x);
3127 9034784432 : switch (code)
3128 : {
3129 : /* Handle these cases quickly. */
3130 : CASE_CONST_ANY:
3131 : case SYMBOL_REF:
3132 : case LABEL_REF:
3133 : case CONST:
3134 : case PC:
3135 : case REG:
3136 : case SCRATCH:
3137 : return false;
3138 :
3139 5823472 : case UNSPEC:
3140 5823472 : return targetm.unspec_may_trap_p (x, flags);
3141 :
3142 : case UNSPEC_VOLATILE:
3143 : case ASM_INPUT:
3144 : case TRAP_IF:
3145 : return true;
3146 :
3147 185339 : case ASM_OPERANDS:
3148 185339 : return MEM_VOLATILE_P (x);
3149 :
3150 : /* Memory ref can trap unless it's a static var or a stack slot. */
3151 1074443964 : case MEM:
3152 : /* Recognize specific pattern of stack checking probes. */
3153 1074443964 : if (flag_stack_check
3154 7726 : && MEM_VOLATILE_P (x)
3155 1074444764 : && XEXP (x, 0) == stack_pointer_rtx)
3156 : return true;
3157 1074443165 : if (/* MEM_NOTRAP_P only relates to the actual position of the memory
3158 : reference; moving it out of context such as when moving code
3159 : when optimizing, might cause its address to become invalid. */
3160 : code_changed
3161 1074443165 : || !MEM_NOTRAP_P (x))
3162 : {
3163 527783026 : poly_int64 size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : -1;
3164 452459956 : return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
3165 452459956 : GET_MODE (x), code_changed);
3166 : }
3167 :
3168 : return false;
3169 :
3170 : /* Division by a non-constant might trap. */
3171 1003529 : case DIV:
3172 1003529 : case MOD:
3173 1003529 : case UDIV:
3174 1003529 : case UMOD:
3175 1003529 : if (HONOR_SNANS (x))
3176 : return true;
3177 1002862 : if (FLOAT_MODE_P (GET_MODE (x)))
3178 420578 : return flag_trapping_math;
3179 582284 : if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
3180 : return true;
3181 81397 : if (GET_CODE (XEXP (x, 1)) == CONST_VECTOR)
3182 : {
3183 : /* For CONST_VECTOR, return 1 if any element is or might be zero. */
3184 0 : unsigned int n_elts;
3185 0 : rtx op = XEXP (x, 1);
3186 0 : if (!GET_MODE_NUNITS (GET_MODE (op)).is_constant (&n_elts))
3187 : {
3188 : if (!CONST_VECTOR_DUPLICATE_P (op))
3189 295242713 : return true;
3190 : for (unsigned i = 0; i < (unsigned int) XVECLEN (op, 0); i++)
3191 : if (CONST_VECTOR_ENCODED_ELT (op, i) == const0_rtx)
3192 : return true;
3193 : }
3194 : else
3195 0 : for (unsigned i = 0; i < n_elts; i++)
3196 0 : if (CONST_VECTOR_ELT (op, i) == const0_rtx)
3197 : return true;
3198 : }
3199 : break;
3200 :
3201 : case EXPR_LIST:
3202 : /* An EXPR_LIST is used to represent a function call. This
3203 : certainly may trap. */
3204 : return true;
3205 :
3206 185180137 : case GE:
3207 185180137 : case GT:
3208 185180137 : case LE:
3209 185180137 : case LT:
3210 185180137 : case LTGT:
3211 185180137 : case COMPARE:
3212 : /* Treat min/max similar as comparisons. */
3213 185180137 : case SMIN:
3214 185180137 : case SMAX:
3215 : /* Some floating point comparisons may trap. */
3216 185180137 : if (!flag_trapping_math)
3217 : break;
3218 : /* ??? There is no machine independent way to check for tests that trap
3219 : when COMPARE is used, though many targets do make this distinction.
3220 : For instance, sparc uses CCFPE for compares which generate exceptions
3221 : and CCFP for compares which do not generate exceptions. */
3222 183565510 : if (HONOR_NANS (x))
3223 : return true;
3224 : /* But often the compare has some CC mode, so check operand
3225 : modes as well. */
3226 183539980 : if (HONOR_NANS (XEXP (x, 0))
3227 183539980 : || HONOR_NANS (XEXP (x, 1)))
3228 2188154 : return true;
3229 : break;
3230 :
3231 28115156 : case EQ:
3232 28115156 : case NE:
3233 28115156 : if (HONOR_SNANS (x))
3234 : return true;
3235 : /* Often comparison is CC mode, so check operand modes. */
3236 28115139 : if (HONOR_SNANS (XEXP (x, 0))
3237 28115139 : || HONOR_SNANS (XEXP (x, 1)))
3238 0 : return true;
3239 : break;
3240 :
3241 354035 : case FIX:
3242 354035 : case UNSIGNED_FIX:
3243 : /* Conversion of floating point might trap. */
3244 354035 : if (flag_trapping_math && HONOR_NANS (XEXP (x, 0)))
3245 : return true;
3246 : break;
3247 :
3248 : case PARALLEL:
3249 : case NEG:
3250 : case ABS:
3251 : case SUBREG:
3252 : case VEC_MERGE:
3253 : case VEC_SELECT:
3254 : case VEC_CONCAT:
3255 : case VEC_DUPLICATE:
3256 : /* These operations don't trap even with floating point. */
3257 : break;
3258 :
3259 3075845411 : default:
3260 : /* Any floating arithmetic may trap. */
3261 3075845411 : if (FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
3262 : return true;
3263 : }
3264 :
3265 3701947622 : fmt = GET_RTX_FORMAT (code);
3266 9967183835 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3267 : {
3268 6516627804 : if (fmt[i] == 'e')
3269 : {
3270 6066015441 : if (may_trap_p_1 (XEXP (x, i), flags))
3271 : return true;
3272 : }
3273 450612363 : else if (fmt[i] == 'E')
3274 : {
3275 : int j;
3276 969245738 : for (j = 0; j < XVECLEN (x, i); j++)
3277 667962950 : if (may_trap_p_1 (XVECEXP (x, i, j), flags))
3278 : return true;
3279 : }
3280 : }
3281 : return false;
3282 : }
3283 :
3284 : /* Return true if evaluating rtx X might cause a trap. */
3285 :
3286 : bool
3287 2282900194 : may_trap_p (const_rtx x)
3288 : {
3289 2282900194 : return may_trap_p_1 (x, 0);
3290 : }
3291 :
3292 : /* Same as above, but additionally return true if evaluating rtx X might
3293 : cause a fault. We define a fault for the purpose of this function as a
3294 : erroneous execution condition that cannot be encountered during the normal
3295 : execution of a valid program; the typical example is an unaligned memory
3296 : access on a strict alignment machine. The compiler guarantees that it
3297 : doesn't generate code that will fault from a valid program, but this
3298 : guarantee doesn't mean anything for individual instructions. Consider
3299 : the following example:
3300 :
3301 : struct S { int d; union { char *cp; int *ip; }; };
3302 :
3303 : int foo(struct S *s)
3304 : {
3305 : if (s->d == 1)
3306 : return *s->ip;
3307 : else
3308 : return *s->cp;
3309 : }
3310 :
3311 : on a strict alignment machine. In a valid program, foo will never be
3312 : invoked on a structure for which d is equal to 1 and the underlying
3313 : unique field of the union not aligned on a 4-byte boundary, but the
3314 : expression *s->ip might cause a fault if considered individually.
3315 :
3316 : At the RTL level, potentially problematic expressions will almost always
3317 : verify may_trap_p; for example, the above dereference can be emitted as
3318 : (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
3319 : However, suppose that foo is inlined in a caller that causes s->cp to
3320 : point to a local character variable and guarantees that s->d is not set
3321 : to 1; foo may have been effectively translated into pseudo-RTL as:
3322 :
3323 : if ((reg:SI) == 1)
3324 : (set (reg:SI) (mem:SI (%fp - 7)))
3325 : else
3326 : (set (reg:QI) (mem:QI (%fp - 7)))
3327 :
3328 : Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
3329 : memory reference to a stack slot, but it will certainly cause a fault
3330 : on a strict alignment machine. */
3331 :
3332 : bool
3333 10556593 : may_trap_or_fault_p (const_rtx x)
3334 : {
3335 10556593 : return may_trap_p_1 (x, 1);
3336 : }
3337 :
3338 : /* Replace any occurrence of FROM in X with TO. The function does
3339 : not enter into CONST_DOUBLE for the replace.
3340 :
3341 : Note that copying is not done so X must not be shared unless all copies
3342 : are to be modified.
3343 :
3344 : ALL_REGS is true if we want to replace all REGs equal to FROM, not just
3345 : those pointer-equal ones. */
3346 :
3347 : rtx
3348 8759862 : replace_rtx (rtx x, rtx from, rtx to, bool all_regs)
3349 : {
3350 8759862 : int i, j;
3351 8759862 : const char *fmt;
3352 :
3353 8759862 : if (x == from)
3354 : return to;
3355 :
3356 : /* Allow this function to make replacements in EXPR_LISTs. */
3357 6360922 : if (x == 0)
3358 : return 0;
3359 :
3360 6360922 : if (all_regs
3361 0 : && REG_P (x)
3362 0 : && REG_P (from)
3363 6360922 : && REGNO (x) == REGNO (from))
3364 : {
3365 0 : gcc_assert (GET_MODE (x) == GET_MODE (from));
3366 : return to;
3367 : }
3368 6360922 : else if (GET_CODE (x) == SUBREG)
3369 : {
3370 48031 : rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to, all_regs);
3371 :
3372 48031 : if (CONST_SCALAR_INT_P (new_rtx))
3373 : {
3374 2 : x = simplify_subreg (GET_MODE (x), new_rtx,
3375 1 : GET_MODE (SUBREG_REG (x)),
3376 1 : SUBREG_BYTE (x));
3377 1 : gcc_assert (x);
3378 : }
3379 : else
3380 48030 : SUBREG_REG (x) = new_rtx;
3381 :
3382 48031 : return x;
3383 : }
3384 6312891 : else if (GET_CODE (x) == ZERO_EXTEND)
3385 : {
3386 191856 : rtx new_rtx = replace_rtx (XEXP (x, 0), from, to, all_regs);
3387 :
3388 191856 : if (CONST_SCALAR_INT_P (new_rtx))
3389 : {
3390 2 : x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3391 1 : new_rtx, GET_MODE (XEXP (x, 0)));
3392 1 : gcc_assert (x);
3393 : }
3394 : else
3395 191855 : XEXP (x, 0) = new_rtx;
3396 :
3397 191856 : return x;
3398 : }
3399 :
3400 6121035 : fmt = GET_RTX_FORMAT (GET_CODE (x));
3401 15184818 : for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3402 : {
3403 9063783 : if (fmt[i] == 'e')
3404 5921175 : XEXP (x, i) = replace_rtx (XEXP (x, i), from, to, all_regs);
3405 3142608 : else if (fmt[i] == 'E')
3406 135966 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3407 112622 : XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j),
3408 : from, to, all_regs);
3409 : }
3410 :
3411 : return x;
3412 : }
3413 :
3414 : /* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL. Also track
3415 : the change in LABEL_NUSES if UPDATE_LABEL_NUSES. */
3416 :
3417 : void
3418 11210 : replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses)
3419 : {
3420 : /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long. */
3421 11210 : rtx x = *loc;
3422 11210 : if (JUMP_TABLE_DATA_P (x))
3423 : {
3424 12 : x = PATTERN (x);
3425 12 : rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC);
3426 12 : int len = GET_NUM_ELEM (vec);
3427 138 : for (int i = 0; i < len; ++i)
3428 : {
3429 126 : rtx ref = RTVEC_ELT (vec, i);
3430 126 : if (XEXP (ref, 0) == old_label)
3431 : {
3432 0 : XEXP (ref, 0) = new_label;
3433 0 : if (update_label_nuses)
3434 : {
3435 0 : ++LABEL_NUSES (new_label);
3436 0 : --LABEL_NUSES (old_label);
3437 : }
3438 : }
3439 : }
3440 12 : return;
3441 : }
3442 :
3443 : /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
3444 : field. This is not handled by the iterator because it doesn't
3445 : handle unprinted ('0') fields. */
3446 11198 : if (JUMP_P (x) && JUMP_LABEL (x) == old_label)
3447 1972 : JUMP_LABEL (x) = new_label;
3448 :
3449 11198 : subrtx_ptr_iterator::array_type array;
3450 110034 : FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL)
3451 : {
3452 98836 : rtx *loc = *iter;
3453 98836 : if (rtx x = *loc)
3454 : {
3455 88259 : if (GET_CODE (x) == SYMBOL_REF
3456 88259 : && CONSTANT_POOL_ADDRESS_P (x))
3457 : {
3458 339 : rtx c = get_pool_constant (x);
3459 339 : if (rtx_referenced_p (old_label, c))
3460 : {
3461 : /* Create a copy of constant C; replace the label inside
3462 : but do not update LABEL_NUSES because uses in constant pool
3463 : are not counted. */
3464 0 : rtx new_c = copy_rtx (c);
3465 0 : replace_label (&new_c, old_label, new_label, false);
3466 :
3467 : /* Add the new constant NEW_C to constant pool and replace
3468 : the old reference to constant by new reference. */
3469 0 : rtx new_mem = force_const_mem (get_pool_mode (x), new_c);
3470 0 : *loc = replace_rtx (x, x, XEXP (new_mem, 0));
3471 : }
3472 : }
3473 :
3474 88259 : if ((GET_CODE (x) == LABEL_REF
3475 84015 : || GET_CODE (x) == INSN_LIST)
3476 6212 : && XEXP (x, 0) == old_label)
3477 : {
3478 5892 : XEXP (x, 0) = new_label;
3479 5892 : if (update_label_nuses)
3480 : {
3481 0 : ++LABEL_NUSES (new_label);
3482 0 : --LABEL_NUSES (old_label);
3483 : }
3484 : }
3485 : }
3486 : }
3487 11198 : }
3488 :
3489 : void
3490 11210 : replace_label_in_insn (rtx_insn *insn, rtx_insn *old_label,
3491 : rtx_insn *new_label, bool update_label_nuses)
3492 : {
3493 11210 : rtx insn_as_rtx = insn;
3494 11210 : replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses);
3495 11210 : gcc_checking_assert (insn_as_rtx == insn);
3496 11210 : }
3497 :
3498 : /* Return true if X is referenced in BODY. */
3499 :
3500 : bool
3501 351429 : rtx_referenced_p (const_rtx x, const_rtx body)
3502 : {
3503 351429 : subrtx_iterator::array_type array;
3504 1721575 : FOR_EACH_SUBRTX (iter, array, body, ALL)
3505 1392953 : if (const_rtx y = *iter)
3506 : {
3507 : /* Check if a label_ref Y refers to label X. */
3508 1386895 : if (GET_CODE (y) == LABEL_REF
3509 11899 : && LABEL_P (x)
3510 1398791 : && label_ref_label (y) == x)
3511 22807 : return true;
3512 :
3513 1386895 : if (rtx_equal_p (x, y))
3514 : return true;
3515 :
3516 : /* If Y is a reference to pool constant traverse the constant. */
3517 1364088 : if (GET_CODE (y) == SYMBOL_REF
3518 1364088 : && CONSTANT_POOL_ADDRESS_P (y))
3519 6999 : iter.substitute (get_pool_constant (y));
3520 : }
3521 328622 : return false;
3522 351429 : }
3523 :
3524 : /* If INSN is a tablejump return true and store the label (before jump table) to
3525 : *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
3526 :
3527 : bool
3528 107309431 : tablejump_p (const rtx_insn *insn, rtx_insn **labelp,
3529 : rtx_jump_table_data **tablep)
3530 : {
3531 107309431 : if (!JUMP_P (insn))
3532 : return false;
3533 :
3534 80889866 : rtx target = JUMP_LABEL (insn);
3535 80889866 : if (target == NULL_RTX || ANY_RETURN_P (target))
3536 : return false;
3537 :
3538 77309087 : rtx_insn *label = as_a<rtx_insn *> (target);
3539 77309087 : rtx_insn *table = next_insn (label);
3540 77309087 : if (table == NULL_RTX || !JUMP_TABLE_DATA_P (table))
3541 : return false;
3542 :
3543 126661 : if (labelp)
3544 84210 : *labelp = label;
3545 126661 : if (tablep)
3546 122584 : *tablep = as_a <rtx_jump_table_data *> (table);
3547 : return true;
3548 : }
3549 :
3550 : /* For INSN known to satisfy tablejump_p, determine if it actually is a
3551 : CASESI. Return the insn pattern if so, NULL_RTX otherwise. */
3552 :
3553 : rtx
3554 23891 : tablejump_casesi_pattern (const rtx_insn *insn)
3555 : {
3556 23891 : rtx tmp;
3557 :
3558 23891 : if ((tmp = single_set (insn)) != NULL
3559 23891 : && SET_DEST (tmp) == pc_rtx
3560 23891 : && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
3561 23891 : && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
3562 0 : return tmp;
3563 :
3564 : return NULL_RTX;
3565 : }
3566 :
3567 : /* A subroutine of computed_jump_p, return true if X contains a REG or MEM or
3568 : constant that is not in the constant pool and not in the condition
3569 : of an IF_THEN_ELSE. */
3570 :
3571 : static bool
3572 1911 : computed_jump_p_1 (const_rtx x)
3573 : {
3574 1911 : const enum rtx_code code = GET_CODE (x);
3575 1911 : int i, j;
3576 1911 : const char *fmt;
3577 :
3578 1911 : switch (code)
3579 : {
3580 : case LABEL_REF:
3581 : case PC:
3582 : return false;
3583 :
3584 : case CONST:
3585 : CASE_CONST_ANY:
3586 : case SYMBOL_REF:
3587 : case REG:
3588 : return true;
3589 :
3590 323 : case MEM:
3591 323 : return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3592 14 : && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
3593 :
3594 0 : case IF_THEN_ELSE:
3595 0 : return (computed_jump_p_1 (XEXP (x, 1))
3596 0 : || computed_jump_p_1 (XEXP (x, 2)));
3597 :
3598 0 : default:
3599 0 : break;
3600 : }
3601 :
3602 0 : fmt = GET_RTX_FORMAT (code);
3603 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3604 : {
3605 0 : if (fmt[i] == 'e'
3606 0 : && computed_jump_p_1 (XEXP (x, i)))
3607 : return true;
3608 :
3609 0 : else if (fmt[i] == 'E')
3610 0 : for (j = 0; j < XVECLEN (x, i); j++)
3611 0 : if (computed_jump_p_1 (XVECEXP (x, i, j)))
3612 : return true;
3613 : }
3614 :
3615 : return false;
3616 : }
3617 :
3618 : /* Return true if INSN is an indirect jump (aka computed jump).
3619 :
3620 : Tablejumps and casesi insns are not considered indirect jumps;
3621 : we can recognize them by a (use (label_ref)). */
3622 :
3623 : bool
3624 46217040 : computed_jump_p (const rtx_insn *insn)
3625 : {
3626 46217040 : int i;
3627 46217040 : if (JUMP_P (insn))
3628 : {
3629 42198407 : rtx pat = PATTERN (insn);
3630 :
3631 : /* If we have a JUMP_LABEL set, we're not a computed jump. */
3632 42198407 : if (JUMP_LABEL (insn) != NULL)
3633 : return false;
3634 :
3635 2431 : if (GET_CODE (pat) == PARALLEL)
3636 : {
3637 489 : int len = XVECLEN (pat, 0);
3638 489 : bool has_use_labelref = false;
3639 :
3640 1467 : for (i = len - 1; i >= 0; i--)
3641 978 : if (GET_CODE (XVECEXP (pat, 0, i)) == USE
3642 0 : && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
3643 : == LABEL_REF))
3644 : {
3645 : has_use_labelref = true;
3646 : break;
3647 : }
3648 :
3649 489 : if (! has_use_labelref)
3650 1467 : for (i = len - 1; i >= 0; i--)
3651 978 : if (GET_CODE (XVECEXP (pat, 0, i)) == SET
3652 0 : && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
3653 978 : && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
3654 : return true;
3655 : }
3656 1942 : else if (GET_CODE (pat) == SET
3657 1911 : && SET_DEST (pat) == pc_rtx
3658 3853 : && computed_jump_p_1 (SET_SRC (pat)))
3659 : return true;
3660 : }
3661 : return false;
3662 : }
3663 :
3664 :
3665 :
3666 : /* MEM has a PRE/POST-INC/DEC/MODIFY address X. Extract the operands of
3667 : the equivalent add insn and pass the result to FN, using DATA as the
3668 : final argument. */
3669 :
3670 : static int
3671 19698916 : for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data)
3672 : {
3673 19698916 : rtx x = XEXP (mem, 0);
3674 19698916 : switch (GET_CODE (x))
3675 : {
3676 2319833 : case PRE_INC:
3677 2319833 : case POST_INC:
3678 2319833 : {
3679 4639666 : poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
3680 2319833 : rtx r1 = XEXP (x, 0);
3681 2319833 : rtx c = gen_int_mode (size, GET_MODE (r1));
3682 2319833 : return fn (mem, x, r1, r1, c, data);
3683 : }
3684 :
3685 16985851 : case PRE_DEC:
3686 16985851 : case POST_DEC:
3687 16985851 : {
3688 33971702 : poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
3689 16985851 : rtx r1 = XEXP (x, 0);
3690 16985851 : rtx c = gen_int_mode (-size, GET_MODE (r1));
3691 16985851 : return fn (mem, x, r1, r1, c, data);
3692 : }
3693 :
3694 393232 : case PRE_MODIFY:
3695 393232 : case POST_MODIFY:
3696 393232 : {
3697 393232 : rtx r1 = XEXP (x, 0);
3698 393232 : rtx add = XEXP (x, 1);
3699 393232 : return fn (mem, x, r1, add, NULL, data);
3700 : }
3701 :
3702 0 : default:
3703 0 : gcc_unreachable ();
3704 : }
3705 : }
3706 :
3707 : /* Traverse *LOC looking for MEMs that have autoinc addresses.
3708 : For each such autoinc operation found, call FN, passing it
3709 : the innermost enclosing MEM, the operation itself, the RTX modified
3710 : by the operation, two RTXs (the second may be NULL) that, once
3711 : added, represent the value to be held by the modified RTX
3712 : afterwards, and DATA. FN is to return 0 to continue the
3713 : traversal or any other value to have it returned to the caller of
3714 : for_each_inc_dec. */
3715 :
3716 : int
3717 1024261865 : for_each_inc_dec (rtx x,
3718 : for_each_inc_dec_fn fn,
3719 : void *data)
3720 : {
3721 1024261865 : subrtx_var_iterator::array_type array;
3722 5432512856 : FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
3723 : {
3724 4408250991 : rtx mem = *iter;
3725 4408250991 : if (mem
3726 4408250991 : && MEM_P (mem)
3727 254787204 : && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
3728 : {
3729 19698916 : int res = for_each_inc_dec_find_inc_dec (mem, fn, data);
3730 19698916 : if (res != 0)
3731 0 : return res;
3732 19698916 : iter.skip_subrtxes ();
3733 : }
3734 : }
3735 1024261865 : return 0;
3736 1024261865 : }
3737 :
3738 :
3739 : /* Searches X for any reference to REGNO, returning the rtx of the
3740 : reference found if any. Otherwise, returns NULL_RTX. */
3741 :
3742 : rtx
3743 0 : regno_use_in (unsigned int regno, rtx x)
3744 : {
3745 0 : const char *fmt;
3746 0 : int i, j;
3747 0 : rtx tem;
3748 :
3749 0 : if (REG_P (x) && REGNO (x) == regno)
3750 : return x;
3751 :
3752 0 : fmt = GET_RTX_FORMAT (GET_CODE (x));
3753 0 : for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3754 : {
3755 0 : if (fmt[i] == 'e')
3756 : {
3757 0 : if ((tem = regno_use_in (regno, XEXP (x, i))))
3758 : return tem;
3759 : }
3760 0 : else if (fmt[i] == 'E')
3761 0 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3762 0 : if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3763 : return tem;
3764 : }
3765 :
3766 : return NULL_RTX;
3767 : }
3768 :
3769 : /* Return a value indicating whether OP, an operand of a commutative
3770 : operation, is preferred as the first or second operand. The more
3771 : positive the value, the stronger the preference for being the first
3772 : operand. */
3773 :
3774 : int
3775 2212835232 : commutative_operand_precedence (rtx op)
3776 : {
3777 2212835232 : enum rtx_code code = GET_CODE (op);
3778 :
3779 : /* Constants always become the second operand. Prefer "nice" constants. */
3780 2212835232 : if (code == CONST_INT)
3781 : return -10;
3782 : if (code == CONST_WIDE_INT)
3783 : return -9;
3784 : if (code == CONST_POLY_INT)
3785 : return -8;
3786 : if (code == CONST_DOUBLE)
3787 : return -8;
3788 : if (code == CONST_FIXED)
3789 : return -8;
3790 1316671085 : op = avoid_constant_pool_reference (op);
3791 1316671085 : code = GET_CODE (op);
3792 :
3793 1316671085 : switch (GET_RTX_CLASS (code))
3794 : {
3795 27759314 : case RTX_CONST_OBJ:
3796 27759314 : if (code == CONST_INT)
3797 : return -7;
3798 : if (code == CONST_WIDE_INT)
3799 : return -6;
3800 : if (code == CONST_POLY_INT)
3801 : return -5;
3802 : if (code == CONST_DOUBLE)
3803 : return -5;
3804 : if (code == CONST_FIXED)
3805 : return -5;
3806 : return -4;
3807 :
3808 41586688 : case RTX_EXTRA:
3809 : /* SUBREGs of objects should come second. */
3810 41586688 : if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3811 : return -3;
3812 : return 0;
3813 :
3814 977169544 : case RTX_OBJ:
3815 : /* Complex expressions should be the first, so decrease priority
3816 : of objects. Prefer pointer objects over non pointer objects. */
3817 881458405 : if ((REG_P (op) && REG_POINTER (op))
3818 1466578867 : || (MEM_P (op) && MEM_POINTER (op)))
3819 410955979 : return -1;
3820 : return -2;
3821 :
3822 : case RTX_COMM_ARITH:
3823 : /* Prefer operands that are themselves commutative to be first.
3824 : This helps to make things linear. In particular,
3825 : (and (and (reg) (reg)) (not (reg))) is canonical. */
3826 : return 4;
3827 :
3828 80983758 : case RTX_BIN_ARITH:
3829 : /* If only one operand is a binary expression, it will be the first
3830 : operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3831 : is canonical, although it will usually be further simplified. */
3832 80983758 : return 2;
3833 :
3834 26094232 : case RTX_UNARY:
3835 : /* Then prefer NEG and NOT. */
3836 26094232 : if (code == NEG || code == NOT)
3837 : return 1;
3838 : /* FALLTHRU */
3839 :
3840 : default:
3841 : return 0;
3842 : }
3843 : }
3844 :
3845 : /* Return true iff it is necessary to swap operands of commutative operation
3846 : in order to canonicalize expression. */
3847 :
3848 : bool
3849 980575382 : swap_commutative_operands_p (rtx x, rtx y)
3850 : {
3851 980575382 : return (commutative_operand_precedence (x)
3852 980575382 : < commutative_operand_precedence (y));
3853 : }
3854 :
3855 : /* Return true if X is an autoincrement side effect and the register is
3856 : not the stack pointer. */
3857 : bool
3858 0 : auto_inc_p (const_rtx x)
3859 : {
3860 0 : switch (GET_CODE (x))
3861 : {
3862 0 : case PRE_INC:
3863 0 : case POST_INC:
3864 0 : case PRE_DEC:
3865 0 : case POST_DEC:
3866 0 : case PRE_MODIFY:
3867 0 : case POST_MODIFY:
3868 : /* There are no REG_INC notes for SP. */
3869 0 : if (XEXP (x, 0) != stack_pointer_rtx)
3870 0 : return true;
3871 : default:
3872 : break;
3873 : }
3874 : return false;
3875 : }
3876 :
3877 : /* Return true if IN contains a piece of rtl that has the address LOC. */
3878 : bool
3879 1071986 : loc_mentioned_in_p (rtx *loc, const_rtx in)
3880 : {
3881 1071986 : enum rtx_code code;
3882 1071986 : const char *fmt;
3883 1071986 : int i, j;
3884 :
3885 1071986 : if (!in)
3886 : return false;
3887 :
3888 1071986 : code = GET_CODE (in);
3889 1071986 : fmt = GET_RTX_FORMAT (code);
3890 2141141 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3891 : {
3892 1701681 : if (fmt[i] == 'e')
3893 : {
3894 1052232 : if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3895 631810 : return true;
3896 : }
3897 649449 : else if (fmt[i] == 'E')
3898 26503 : for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3899 18281 : if (loc == &XVECEXP (in, i, j)
3900 18281 : || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3901 716 : return true;
3902 : }
3903 : return false;
3904 : }
3905 :
3906 : /* Reinterpret a subreg as a bit extraction from an integer and return
3907 : the position of the least significant bit of the extracted value.
3908 : In other words, if the extraction were performed as a shift right
3909 : and mask, return the number of bits to shift right.
3910 :
3911 : The outer value of the subreg has OUTER_BYTES bytes and starts at
3912 : byte offset SUBREG_BYTE within an inner value of INNER_BYTES bytes. */
3913 :
3914 : poly_uint64
3915 47676321 : subreg_size_lsb (poly_uint64 outer_bytes,
3916 : poly_uint64 inner_bytes,
3917 : poly_uint64 subreg_byte)
3918 : {
3919 47676321 : poly_uint64 subreg_end, trailing_bytes, byte_pos;
3920 :
3921 : /* A paradoxical subreg begins at bit position 0. */
3922 47676321 : gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
3923 47676321 : if (maybe_gt (outer_bytes, inner_bytes))
3924 : {
3925 43113 : gcc_checking_assert (known_eq (subreg_byte, 0U));
3926 43113 : return 0;
3927 : }
3928 :
3929 47633208 : subreg_end = subreg_byte + outer_bytes;
3930 47633208 : trailing_bytes = inner_bytes - subreg_end;
3931 47633208 : if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
3932 : byte_pos = trailing_bytes;
3933 47633208 : else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
3934 47633208 : byte_pos = subreg_byte;
3935 : else
3936 : {
3937 : /* When bytes and words have opposite endianness, we must be able
3938 : to split offsets into words and bytes at compile time. */
3939 : poly_uint64 leading_word_part
3940 : = force_align_down (subreg_byte, UNITS_PER_WORD);
3941 : poly_uint64 trailing_word_part
3942 : = force_align_down (trailing_bytes, UNITS_PER_WORD);
3943 : /* If the subreg crosses a word boundary ensure that
3944 : it also begins and ends on a word boundary. */
3945 : gcc_assert (known_le (subreg_end - leading_word_part,
3946 : (unsigned int) UNITS_PER_WORD)
3947 : || (known_eq (leading_word_part, subreg_byte)
3948 : && known_eq (trailing_word_part, trailing_bytes)));
3949 : if (WORDS_BIG_ENDIAN)
3950 : byte_pos = trailing_word_part + (subreg_byte - leading_word_part);
3951 : else
3952 : byte_pos = leading_word_part + (trailing_bytes - trailing_word_part);
3953 : }
3954 :
3955 47633208 : return byte_pos * BITS_PER_UNIT;
3956 : }
3957 :
3958 : /* Given a subreg X, return the bit offset where the subreg begins
3959 : (counting from the least significant bit of the reg). */
3960 :
3961 : poly_uint64
3962 2943104 : subreg_lsb (const_rtx x)
3963 : {
3964 5886208 : return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3965 2943104 : SUBREG_BYTE (x));
3966 : }
3967 :
3968 : /* Return the subreg byte offset for a subreg whose outer value has
3969 : OUTER_BYTES bytes, whose inner value has INNER_BYTES bytes, and where
3970 : there are LSB_SHIFT *bits* between the lsb of the outer value and the
3971 : lsb of the inner value. This is the inverse of the calculation
3972 : performed by subreg_lsb_1 (which converts byte offsets to bit shifts). */
3973 :
3974 : poly_uint64
3975 39700722 : subreg_size_offset_from_lsb (poly_uint64 outer_bytes, poly_uint64 inner_bytes,
3976 : poly_uint64 lsb_shift)
3977 : {
3978 : /* A paradoxical subreg begins at bit position 0. */
3979 39700722 : gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
3980 39700722 : if (maybe_gt (outer_bytes, inner_bytes))
3981 : {
3982 0 : gcc_checking_assert (known_eq (lsb_shift, 0U));
3983 0 : return 0;
3984 : }
3985 :
3986 39700722 : poly_uint64 lower_bytes = exact_div (lsb_shift, BITS_PER_UNIT);
3987 39700722 : poly_uint64 upper_bytes = inner_bytes - (lower_bytes + outer_bytes);
3988 39700722 : if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
3989 : return upper_bytes;
3990 39700722 : else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
3991 39700722 : return lower_bytes;
3992 : else
3993 : {
3994 : /* When bytes and words have opposite endianness, we must be able
3995 : to split offsets into words and bytes at compile time. */
3996 : poly_uint64 lower_word_part = force_align_down (lower_bytes,
3997 : UNITS_PER_WORD);
3998 : poly_uint64 upper_word_part = force_align_down (upper_bytes,
3999 : UNITS_PER_WORD);
4000 : if (WORDS_BIG_ENDIAN)
4001 : return upper_word_part + (lower_bytes - lower_word_part);
4002 : else
4003 : return lower_word_part + (upper_bytes - upper_word_part);
4004 : }
4005 : }
4006 :
4007 : /* Fill in information about a subreg of a hard register.
4008 : xregno - A regno of an inner hard subreg_reg (or what will become one).
4009 : xmode - The mode of xregno.
4010 : offset - The byte offset.
4011 : ymode - The mode of a top level SUBREG (or what may become one).
4012 : info - Pointer to structure to fill in.
4013 :
4014 : Rather than considering one particular inner register (and thus one
4015 : particular "outer" register) in isolation, this function really uses
4016 : XREGNO as a model for a sequence of isomorphic hard registers. Thus the
4017 : function does not check whether adding INFO->offset to XREGNO gives
4018 : a valid hard register; even if INFO->offset + XREGNO is out of range,
4019 : there might be another register of the same type that is in range.
4020 : Likewise it doesn't check whether targetm.hard_regno_mode_ok accepts
4021 : the new register, since that can depend on things like whether the final
4022 : register number is even or odd. Callers that want to check whether
4023 : this particular subreg can be replaced by a simple (reg ...) should
4024 : use simplify_subreg_regno. */
4025 :
4026 : void
4027 33703202 : subreg_get_info (unsigned int xregno, machine_mode xmode,
4028 : poly_uint64 offset, machine_mode ymode,
4029 : struct subreg_info *info)
4030 : {
4031 33703202 : unsigned int nregs_xmode, nregs_ymode;
4032 :
4033 33703202 : gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
4034 :
4035 67406404 : poly_uint64 xsize = GET_MODE_SIZE (xmode);
4036 67406404 : poly_uint64 ysize = GET_MODE_SIZE (ymode);
4037 :
4038 33703202 : bool rknown = false;
4039 :
4040 : /* If the register representation of a non-scalar mode has holes in it,
4041 : we expect the scalar units to be concatenated together, with the holes
4042 : distributed evenly among the scalar units. Each scalar unit must occupy
4043 : at least one register. */
4044 33703202 : if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
4045 : {
4046 : /* As a consequence, we must be dealing with a constant number of
4047 : scalars, and thus a constant offset and number of units. */
4048 0 : HOST_WIDE_INT coffset = offset.to_constant ();
4049 0 : HOST_WIDE_INT cysize = ysize.to_constant ();
4050 0 : nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
4051 0 : unsigned int nunits = GET_MODE_NUNITS (xmode).to_constant ();
4052 0 : scalar_mode xmode_unit = GET_MODE_INNER (xmode);
4053 0 : gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
4054 0 : gcc_assert (nregs_xmode
4055 : == (nunits
4056 : * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
4057 0 : gcc_assert (hard_regno_nregs (xregno, xmode)
4058 : == hard_regno_nregs (xregno, xmode_unit) * nunits);
4059 :
4060 : /* You can only ask for a SUBREG of a value with holes in the middle
4061 : if you don't cross the holes. (Such a SUBREG should be done by
4062 : picking a different register class, or doing it in memory if
4063 : necessary.) An example of a value with holes is XCmode on 32-bit
4064 : x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
4065 : 3 for each part, but in memory it's two 128-bit parts.
4066 : Padding is assumed to be at the end (not necessarily the 'high part')
4067 : of each unit. */
4068 0 : if ((coffset / GET_MODE_SIZE (xmode_unit) + 1 < nunits)
4069 0 : && (coffset / GET_MODE_SIZE (xmode_unit)
4070 0 : != ((coffset + cysize - 1) / GET_MODE_SIZE (xmode_unit))))
4071 : {
4072 0 : info->representable_p = false;
4073 0 : rknown = true;
4074 : }
4075 : }
4076 : else
4077 33703202 : nregs_xmode = hard_regno_nregs (xregno, xmode);
4078 :
4079 33703202 : nregs_ymode = hard_regno_nregs (xregno, ymode);
4080 :
4081 : /* Subreg sizes must be ordered, so that we can tell whether they are
4082 : partial, paradoxical or complete. */
4083 33703202 : gcc_checking_assert (ordered_p (xsize, ysize));
4084 :
4085 : /* Paradoxical subregs are otherwise valid. */
4086 33703202 : if (!rknown && known_eq (offset, 0U) && maybe_gt (ysize, xsize))
4087 : {
4088 13591687 : info->representable_p = true;
4089 : /* If this is a big endian paradoxical subreg, which uses more
4090 : actual hard registers than the original register, we must
4091 : return a negative offset so that we find the proper highpart
4092 : of the register.
4093 :
4094 : We assume that the ordering of registers within a multi-register
4095 : value has a consistent endianness: if bytes and register words
4096 : have different endianness, the hard registers that make up a
4097 : multi-register value must be at least word-sized. */
4098 13591687 : if (REG_WORDS_BIG_ENDIAN)
4099 : info->offset = (int) nregs_xmode - (int) nregs_ymode;
4100 : else
4101 13591687 : info->offset = 0;
4102 13591687 : info->nregs = nregs_ymode;
4103 13591687 : return;
4104 : }
4105 :
4106 : /* If registers store different numbers of bits in the different
4107 : modes, we cannot generally form this subreg. */
4108 20111515 : poly_uint64 regsize_xmode, regsize_ymode;
4109 17266637 : if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
4110 0 : && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
4111 20111515 : && multiple_p (xsize, nregs_xmode, ®size_xmode)
4112 20111515 : && multiple_p (ysize, nregs_ymode, ®size_ymode))
4113 : {
4114 20111515 : if (!rknown
4115 20111515 : && ((nregs_ymode > 1 && maybe_gt (regsize_xmode, regsize_ymode))
4116 20111503 : || (nregs_xmode > 1 && maybe_gt (regsize_ymode, regsize_xmode))))
4117 : {
4118 119 : info->representable_p = false;
4119 119 : if (!can_div_away_from_zero_p (ysize, regsize_xmode, &info->nregs)
4120 119 : || !can_div_trunc_p (offset, regsize_xmode, &info->offset))
4121 : /* Checked by validate_subreg. We must know at compile time
4122 : which inner registers are being accessed. */
4123 : gcc_unreachable ();
4124 33195198 : return;
4125 : }
4126 : /* It's not valid to extract a subreg of mode YMODE at OFFSET that
4127 : would go outside of XMODE. */
4128 20111396 : if (!rknown && maybe_gt (ysize + offset, xsize))
4129 : {
4130 0 : info->representable_p = false;
4131 0 : info->nregs = nregs_ymode;
4132 0 : if (!can_div_trunc_p (offset, regsize_xmode, &info->offset))
4133 : /* Checked by validate_subreg. We must know at compile time
4134 : which inner registers are being accessed. */
4135 : gcc_unreachable ();
4136 0 : return;
4137 : }
4138 : /* Quick exit for the simple and common case of extracting whole
4139 : subregisters from a multiregister value. */
4140 : /* ??? It would be better to integrate this into the code below,
4141 : if we can generalize the concept enough and figure out how
4142 : odd-sized modes can coexist with the other weird cases we support. */
4143 20111396 : HOST_WIDE_INT count;
4144 20111396 : if (!rknown
4145 : && WORDS_BIG_ENDIAN == REG_WORDS_BIG_ENDIAN
4146 20111396 : && known_eq (regsize_xmode, regsize_ymode)
4147 20111396 : && constant_multiple_p (offset, regsize_ymode, &count))
4148 : {
4149 12800300 : info->representable_p = true;
4150 12800300 : info->nregs = nregs_ymode;
4151 12800300 : info->offset = count;
4152 12800300 : gcc_assert (info->offset + info->nregs <= (int) nregs_xmode);
4153 : return;
4154 : }
4155 : }
4156 :
4157 : /* Lowpart subregs are otherwise valid. */
4158 7311096 : if (!rknown && known_eq (offset, subreg_lowpart_offset (ymode, xmode)))
4159 : {
4160 6803092 : info->representable_p = true;
4161 6803092 : rknown = true;
4162 :
4163 6803092 : if (known_eq (offset, 0U) || nregs_xmode == nregs_ymode)
4164 : {
4165 6803092 : info->offset = 0;
4166 6803092 : info->nregs = nregs_ymode;
4167 6803092 : return;
4168 : }
4169 : }
4170 :
4171 : /* Set NUM_BLOCKS to the number of independently-representable YMODE
4172 : values there are in (reg:XMODE XREGNO). We can view the register
4173 : as consisting of this number of independent "blocks", where each
4174 : block occupies NREGS_YMODE registers and contains exactly one
4175 : representable YMODE value. */
4176 508004 : gcc_assert ((nregs_xmode % nregs_ymode) == 0);
4177 508004 : unsigned int num_blocks = nregs_xmode / nregs_ymode;
4178 :
4179 : /* Calculate the number of bytes in each block. This must always
4180 : be exact, otherwise we don't know how to verify the constraint.
4181 : These conditions may be relaxed but subreg_regno_offset would
4182 : need to be redesigned. */
4183 508004 : poly_uint64 bytes_per_block = exact_div (xsize, num_blocks);
4184 :
4185 : /* Get the number of the first block that contains the subreg and the byte
4186 : offset of the subreg from the start of that block. */
4187 508004 : unsigned int block_number;
4188 508004 : poly_uint64 subblock_offset;
4189 508004 : if (!can_div_trunc_p (offset, bytes_per_block, &block_number,
4190 : &subblock_offset))
4191 : /* Checked by validate_subreg. We must know at compile time which
4192 : inner registers are being accessed. */
4193 : gcc_unreachable ();
4194 :
4195 508004 : if (!rknown)
4196 : {
4197 : /* Only the lowpart of each block is representable. */
4198 508004 : info->representable_p
4199 508004 : = known_eq (subblock_offset,
4200 : subreg_size_lowpart_offset (ysize, bytes_per_block));
4201 508004 : rknown = true;
4202 : }
4203 :
4204 : /* We assume that the ordering of registers within a multi-register
4205 : value has a consistent endianness: if bytes and register words
4206 : have different endianness, the hard registers that make up a
4207 : multi-register value must be at least word-sized. */
4208 508004 : if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN)
4209 : /* The block number we calculated above followed memory endianness.
4210 : Convert it to register endianness by counting back from the end.
4211 : (Note that, because of the assumption above, each block must be
4212 : at least word-sized.) */
4213 : info->offset = (num_blocks - block_number - 1) * nregs_ymode;
4214 : else
4215 508004 : info->offset = block_number * nregs_ymode;
4216 508004 : info->nregs = nregs_ymode;
4217 : }
4218 :
4219 : /* This function returns the regno offset of a subreg expression.
4220 : xregno - A regno of an inner hard subreg_reg (or what will become one).
4221 : xmode - The mode of xregno.
4222 : offset - The byte offset.
4223 : ymode - The mode of a top level SUBREG (or what may become one).
4224 : RETURN - The regno offset which would be used. */
4225 : unsigned int
4226 5393432 : subreg_regno_offset (unsigned int xregno, machine_mode xmode,
4227 : poly_uint64 offset, machine_mode ymode)
4228 : {
4229 5393432 : struct subreg_info info;
4230 5393432 : subreg_get_info (xregno, xmode, offset, ymode, &info);
4231 5393432 : return info.offset;
4232 : }
4233 :
4234 : /* This function returns true when the offset is representable via
4235 : subreg_offset in the given regno.
4236 : xregno - A regno of an inner hard subreg_reg (or what will become one).
4237 : xmode - The mode of xregno.
4238 : offset - The byte offset.
4239 : ymode - The mode of a top level SUBREG (or what may become one).
4240 : RETURN - Whether the offset is representable. */
4241 : bool
4242 0 : subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
4243 : poly_uint64 offset, machine_mode ymode)
4244 : {
4245 0 : struct subreg_info info;
4246 0 : subreg_get_info (xregno, xmode, offset, ymode, &info);
4247 0 : return info.representable_p;
4248 : }
4249 :
4250 : /* Return the number of a YMODE register to which
4251 :
4252 : (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
4253 :
4254 : can be simplified. Return -1 if the subreg can't be simplified.
4255 :
4256 : XREGNO is a hard register number. ALLOW_STACK_REGS is true if
4257 : we should allow subregs of stack_pointer_rtx, frame_pointer_rtx.
4258 : and arg_pointer_rtx (which are normally expected to be the unique
4259 : way of referring to their respective registers). */
4260 :
4261 :
4262 : int
4263 29076267 : simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
4264 : poly_uint64 offset, machine_mode ymode,
4265 : bool allow_stack_regs)
4266 : {
4267 29076267 : struct subreg_info info;
4268 29076267 : unsigned int yregno;
4269 :
4270 : /* Give the backend a chance to disallow the mode change. */
4271 29076267 : if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
4272 29076267 : && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
4273 29076267 : && !REG_CAN_CHANGE_MODE_P (xregno, xmode, ymode))
4274 : return -1;
4275 :
4276 28433625 : if (!allow_stack_regs)
4277 : {
4278 : /* We shouldn't simplify stack-related registers. */
4279 28088787 : if ((!reload_completed || frame_pointer_needed)
4280 24633262 : && xregno == FRAME_POINTER_REGNUM)
4281 : return -1;
4282 :
4283 27983124 : if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4284 : && xregno == ARG_POINTER_REGNUM)
4285 : return -1;
4286 :
4287 27881319 : if (xregno == STACK_POINTER_REGNUM
4288 : /* We should convert hard stack register in LRA if it is
4289 : possible. */
4290 102734 : && ! lra_in_progress)
4291 : return -1;
4292 : }
4293 :
4294 : /* Try to get the register offset. */
4295 28125047 : subreg_get_info (xregno, xmode, offset, ymode, &info);
4296 28125047 : if (!info.representable_p)
4297 : return -1;
4298 :
4299 : /* Make sure that the offsetted register value is in range. */
4300 27674781 : yregno = xregno + info.offset;
4301 27674781 : if (!HARD_REGISTER_NUM_P (yregno))
4302 : return -1;
4303 :
4304 : /* See whether (reg:YMODE YREGNO) is valid.
4305 :
4306 : ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
4307 : This is a kludge to work around how complex FP arguments are passed
4308 : on IA-64 and should be fixed. See PR target/49226. */
4309 27662784 : if (!targetm.hard_regno_mode_ok (yregno, ymode)
4310 27662784 : && targetm.hard_regno_mode_ok (xregno, xmode))
4311 : return -1;
4312 :
4313 27442148 : return (int) yregno;
4314 : }
4315 :
4316 : /* A wrapper around simplify_subreg_regno that uses subreg_lowpart_offset
4317 : (xmode, ymode) as the offset. */
4318 :
4319 : int
4320 0 : lowpart_subreg_regno (unsigned int regno, machine_mode xmode,
4321 : machine_mode ymode)
4322 : {
4323 0 : poly_uint64 offset = subreg_lowpart_offset (xmode, ymode);
4324 0 : return simplify_subreg_regno (regno, xmode, offset, ymode);
4325 : }
4326 :
4327 : /* Return the final regno that a subreg expression refers to. */
4328 : unsigned int
4329 11236 : subreg_regno (const_rtx x)
4330 : {
4331 11236 : unsigned int ret;
4332 11236 : rtx subreg = SUBREG_REG (x);
4333 11236 : int regno = REGNO (subreg);
4334 :
4335 22472 : ret = regno + subreg_regno_offset (regno,
4336 11236 : GET_MODE (subreg),
4337 11236 : SUBREG_BYTE (x),
4338 11236 : GET_MODE (x));
4339 11236 : return ret;
4340 :
4341 : }
4342 :
4343 : /* Return the number of registers that a subreg expression refers
4344 : to. */
4345 : unsigned int
4346 178105 : subreg_nregs (const_rtx x)
4347 : {
4348 178105 : return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
4349 : }
4350 :
4351 : /* Return the number of registers that a subreg REG with REGNO
4352 : expression refers to. This is a copy of the rtlanal.cc:subreg_nregs
4353 : changed so that the regno can be passed in. */
4354 :
4355 : unsigned int
4356 178105 : subreg_nregs_with_regno (unsigned int regno, const_rtx x)
4357 : {
4358 178105 : struct subreg_info info;
4359 178105 : rtx subreg = SUBREG_REG (x);
4360 :
4361 178105 : subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
4362 : &info);
4363 178105 : return info.nregs;
4364 : }
4365 :
4366 : struct parms_set_data
4367 : {
4368 : int nregs;
4369 : HARD_REG_SET regs;
4370 : };
4371 :
4372 : /* Helper function for noticing stores to parameter registers. */
4373 : static void
4374 65914 : parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
4375 : {
4376 65914 : struct parms_set_data *const d = (struct parms_set_data *) data;
4377 65912 : if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4378 131826 : && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
4379 : {
4380 65584 : CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
4381 65584 : d->nregs--;
4382 : }
4383 65914 : }
4384 :
4385 : /* Look backward for first parameter to be loaded.
4386 : Note that loads of all parameters will not necessarily be
4387 : found if CSE has eliminated some of them (e.g., an argument
4388 : to the outer function is passed down as a parameter).
4389 : Do not skip BOUNDARY. */
4390 : rtx_insn *
4391 40646 : find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary)
4392 : {
4393 40646 : struct parms_set_data parm;
4394 40646 : rtx p;
4395 40646 : rtx_insn *before, *first_set;
4396 :
4397 : /* Since different machines initialize their parameter registers
4398 : in different orders, assume nothing. Collect the set of all
4399 : parameter registers. */
4400 40646 : CLEAR_HARD_REG_SET (parm.regs);
4401 40646 : parm.nregs = 0;
4402 121570 : for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
4403 80924 : if (GET_CODE (XEXP (p, 0)) == USE
4404 80762 : && REG_P (XEXP (XEXP (p, 0), 0))
4405 148928 : && !STATIC_CHAIN_REG_P (XEXP (XEXP (p, 0), 0)))
4406 : {
4407 67729 : gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
4408 :
4409 : /* We only care about registers which can hold function
4410 : arguments. */
4411 67729 : if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
4412 1788 : continue;
4413 :
4414 65941 : SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
4415 65941 : parm.nregs++;
4416 : }
4417 : before = call_insn;
4418 : first_set = call_insn;
4419 :
4420 : /* Search backward for the first set of a register in this set. */
4421 106230 : while (parm.nregs && before != boundary)
4422 : {
4423 65914 : before = PREV_INSN (before);
4424 :
4425 : /* It is possible that some loads got CSEed from one call to
4426 : another. Stop in that case. */
4427 65914 : if (CALL_P (before))
4428 : break;
4429 :
4430 : /* Our caller needs either ensure that we will find all sets
4431 : (in case code has not been optimized yet), or take care
4432 : for possible labels in a way by setting boundary to preceding
4433 : CODE_LABEL. */
4434 65914 : if (LABEL_P (before))
4435 : {
4436 0 : gcc_assert (before == boundary);
4437 : break;
4438 : }
4439 :
4440 65914 : if (INSN_P (before))
4441 : {
4442 65914 : int nregs_old = parm.nregs;
4443 65914 : note_stores (before, parms_set, &parm);
4444 : /* If we found something that did not set a parameter reg,
4445 : we're done. Do not keep going, as that might result
4446 : in hoisting an insn before the setting of a pseudo
4447 : that is used by the hoisted insn. */
4448 65914 : if (nregs_old != parm.nregs)
4449 : first_set = before;
4450 : else
4451 : break;
4452 : }
4453 : }
4454 40646 : return first_set;
4455 : }
4456 :
4457 : /* Return true if we should avoid inserting code between INSN and preceding
4458 : call instruction. */
4459 :
4460 : bool
4461 11259068 : keep_with_call_p (const rtx_insn *insn)
4462 : {
4463 11259068 : rtx set;
4464 :
4465 11259068 : if (INSN_P (insn) && (set = single_set (insn)) != NULL)
4466 : {
4467 7571278 : if (REG_P (SET_DEST (set))
4468 2042295 : && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
4469 2042295 : && fixed_regs[REGNO (SET_DEST (set))]
4470 7737948 : && general_operand (SET_SRC (set), VOIDmode))
4471 : return true;
4472 7570927 : if (REG_P (SET_SRC (set))
4473 790172 : && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
4474 500657 : && REG_P (SET_DEST (set))
4475 7731772 : && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
4476 : return true;
4477 : /* There may be a stack pop just after the call and before the store
4478 : of the return register. Search for the actual store when deciding
4479 : if we can break or not. */
4480 7570927 : if (SET_DEST (set) == stack_pointer_rtx)
4481 : {
4482 : /* This CONST_CAST is okay because next_nonnote_insn just
4483 : returns its argument and we assign it to a const_rtx
4484 : variable. */
4485 165178 : const rtx_insn *i2
4486 165178 : = next_nonnote_insn (const_cast<rtx_insn *> (insn));
4487 165178 : if (i2 && keep_with_call_p (i2))
4488 : return true;
4489 : }
4490 : }
4491 : return false;
4492 : }
4493 :
4494 : /* Return true if LABEL is a target of JUMP_INSN. This applies only
4495 : to non-complex jumps. That is, direct unconditional, conditional,
4496 : and tablejumps, but not computed jumps or returns. It also does
4497 : not apply to the fallthru case of a conditional jump. */
4498 :
4499 : bool
4500 24199970 : label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn)
4501 : {
4502 24199970 : rtx tmp = JUMP_LABEL (jump_insn);
4503 24199970 : rtx_jump_table_data *table;
4504 :
4505 24199970 : if (label == tmp)
4506 : return true;
4507 :
4508 4035801 : if (tablejump_p (jump_insn, NULL, &table))
4509 : {
4510 0 : rtvec vec = table->get_labels ();
4511 0 : int i, veclen = GET_NUM_ELEM (vec);
4512 :
4513 0 : for (i = 0; i < veclen; ++i)
4514 0 : if (XEXP (RTVEC_ELT (vec, i), 0) == label)
4515 : return true;
4516 : }
4517 :
4518 4035801 : if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
4519 : return true;
4520 :
4521 : return false;
4522 : }
4523 :
4524 :
4525 : /* Return an estimate of the cost of computing rtx X.
4526 : One use is in cse, to decide which expression to keep in the hash table.
4527 : Another is in rtl generation, to pick the cheapest way to multiply.
4528 : Other uses like the latter are expected in the future.
4529 :
4530 : X appears as operand OPNO in an expression with code OUTER_CODE.
4531 : SPEED specifies whether costs optimized for speed or size should
4532 : be returned. */
4533 :
4534 : int
4535 12469610290 : rtx_cost (rtx x, machine_mode mode, enum rtx_code outer_code,
4536 : int opno, bool speed)
4537 : {
4538 12469610290 : int i, j;
4539 12469610290 : enum rtx_code code;
4540 12469610290 : const char *fmt;
4541 12469610290 : int total;
4542 12469610290 : int factor;
4543 12469610290 : unsigned mode_size;
4544 :
4545 12469610290 : if (x == 0)
4546 : return 0;
4547 :
4548 12469610290 : if (GET_CODE (x) == SET)
4549 : /* A SET doesn't have a mode, so let's look at the SET_DEST to get
4550 : the mode for the factor. */
4551 47325526 : mode = GET_MODE (SET_DEST (x));
4552 12422284764 : else if (GET_MODE (x) != VOIDmode)
4553 9574192358 : mode = GET_MODE (x);
4554 :
4555 24939220580 : mode_size = estimated_poly_value (GET_MODE_SIZE (mode));
4556 :
4557 : /* A size N times larger than UNITS_PER_WORD likely needs N times as
4558 : many insns, taking N times as long. */
4559 12976082625 : factor = mode_size > UNITS_PER_WORD ? mode_size / UNITS_PER_WORD : 1;
4560 :
4561 : /* Compute the default costs of certain things.
4562 : Note that targetm.rtx_costs can override the defaults. */
4563 :
4564 12469610290 : code = GET_CODE (x);
4565 12469610290 : switch (code)
4566 : {
4567 1739939297 : case MULT:
4568 1739939297 : case FMA:
4569 1739939297 : case SS_MULT:
4570 1739939297 : case US_MULT:
4571 1739939297 : case SMUL_HIGHPART:
4572 1739939297 : case UMUL_HIGHPART:
4573 : /* Multiplication has time-complexity O(N*N), where N is the
4574 : number of units (translated from digits) when using
4575 : schoolbook long multiplication. */
4576 1739939297 : total = factor * factor * COSTS_N_INSNS (5);
4577 1739939297 : break;
4578 71742640 : case DIV:
4579 71742640 : case UDIV:
4580 71742640 : case MOD:
4581 71742640 : case UMOD:
4582 71742640 : case SS_DIV:
4583 71742640 : case US_DIV:
4584 : /* Similarly, complexity for schoolbook long division. */
4585 71742640 : total = factor * factor * COSTS_N_INSNS (7);
4586 71742640 : break;
4587 0 : case USE:
4588 : /* Used in combine.cc as a marker. */
4589 0 : total = 0;
4590 0 : break;
4591 10657928353 : default:
4592 10657928353 : total = factor * COSTS_N_INSNS (1);
4593 : }
4594 :
4595 12469610290 : switch (code)
4596 : {
4597 : case REG:
4598 : return 0;
4599 :
4600 11596300 : case SUBREG:
4601 11596300 : total = 0;
4602 : /* If we can't tie these modes, make this expensive. The larger
4603 : the mode, the more expensive it is. */
4604 11596300 : if (!targetm.modes_tieable_p (mode, GET_MODE (SUBREG_REG (x))))
4605 4289450 : return COSTS_N_INSNS (2 + factor);
4606 : break;
4607 :
4608 17095095 : case TRUNCATE:
4609 17095095 : if (targetm.modes_tieable_p (mode, GET_MODE (XEXP (x, 0))))
4610 : {
4611 3811011 : total = 0;
4612 3811011 : break;
4613 : }
4614 : /* FALLTHRU */
4615 7628701434 : default:
4616 7628701434 : if (targetm.rtx_costs (x, mode, outer_code, opno, &total, speed))
4617 3498558296 : return total;
4618 : break;
4619 : }
4620 :
4621 : /* Sum the costs of the sub-rtx's, plus cost of this operation,
4622 : which is already in total. */
4623 :
4624 4141260999 : fmt = GET_RTX_FORMAT (code);
4625 12333363816 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4626 8192102817 : if (fmt[i] == 'e')
4627 8123186687 : total += rtx_cost (XEXP (x, i), mode, code, i, speed);
4628 68916130 : else if (fmt[i] == 'E')
4629 11748017 : for (j = 0; j < XVECLEN (x, i); j++)
4630 6693332 : total += rtx_cost (XVECEXP (x, i, j), mode, code, i, speed);
4631 :
4632 4141260999 : return total;
4633 : }
4634 :
4635 : /* Fill in the structure C with information about both speed and size rtx
4636 : costs for X, which is operand OPNO in an expression with code OUTER. */
4637 :
4638 : void
4639 2204393 : get_full_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno,
4640 : struct full_rtx_costs *c)
4641 : {
4642 2204393 : c->speed = rtx_cost (x, mode, outer, opno, true);
4643 2204393 : c->size = rtx_cost (x, mode, outer, opno, false);
4644 2204393 : }
4645 :
4646 :
4647 : /* Return cost of address expression X.
4648 : Expect that X is properly formed address reference.
4649 :
4650 : SPEED parameter specify whether costs optimized for speed or size should
4651 : be returned. */
4652 :
4653 : int
4654 10867516 : address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
4655 : {
4656 : /* We may be asked for cost of various unusual addresses, such as operands
4657 : of push instruction. It is not worthwhile to complicate writing
4658 : of the target hook by such cases. */
4659 :
4660 10867516 : if (!memory_address_addr_space_p (mode, x, as))
4661 : return 1000;
4662 :
4663 10783477 : return targetm.address_cost (x, mode, as, speed);
4664 : }
4665 :
4666 : /* If the target doesn't override, compute the cost as with arithmetic. */
4667 :
4668 : int
4669 0 : default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
4670 : {
4671 0 : return rtx_cost (x, Pmode, MEM, 0, speed);
4672 : }
4673 :
4674 :
4675 : unsigned HOST_WIDE_INT
4676 692633191 : nonzero_bits (const_rtx x, machine_mode mode)
4677 : {
4678 692633191 : if (mode == VOIDmode)
4679 0 : mode = GET_MODE (x);
4680 692633191 : scalar_int_mode int_mode;
4681 692633191 : if (!is_a <scalar_int_mode> (mode, &int_mode))
4682 20366382 : return GET_MODE_MASK (mode);
4683 672266809 : return cached_nonzero_bits (x, int_mode, NULL_RTX, VOIDmode, 0);
4684 : }
4685 :
4686 : unsigned int
4687 245542944 : num_sign_bit_copies (const_rtx x, machine_mode mode)
4688 : {
4689 245542944 : if (mode == VOIDmode)
4690 1 : mode = GET_MODE (x);
4691 245542944 : scalar_int_mode int_mode;
4692 245542944 : if (!is_a <scalar_int_mode> (mode, &int_mode))
4693 : return 1;
4694 225495523 : return cached_num_sign_bit_copies (x, int_mode, NULL_RTX, VOIDmode, 0);
4695 : }
4696 :
4697 : /* Return true if nonzero_bits1 might recurse into both operands
4698 : of X. */
4699 :
4700 : static inline bool
4701 1420919877 : nonzero_bits_binary_arith_p (const_rtx x)
4702 : {
4703 1420919877 : if (!ARITHMETIC_P (x))
4704 : return false;
4705 247662816 : switch (GET_CODE (x))
4706 : {
4707 : case AND:
4708 : case XOR:
4709 : case IOR:
4710 : case UMIN:
4711 : case UMAX:
4712 : case SMIN:
4713 : case SMAX:
4714 : case PLUS:
4715 : case MINUS:
4716 : case MULT:
4717 : case DIV:
4718 : case UDIV:
4719 : case MOD:
4720 : case UMOD:
4721 : return true;
4722 : default:
4723 : return false;
4724 : }
4725 : }
4726 :
4727 : /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
4728 : It avoids exponential behavior in nonzero_bits1 when X has
4729 : identical subexpressions on the first or the second level. */
4730 :
4731 : static unsigned HOST_WIDE_INT
4732 1143782893 : cached_nonzero_bits (const_rtx x, scalar_int_mode mode, const_rtx known_x,
4733 : machine_mode known_mode,
4734 : unsigned HOST_WIDE_INT known_ret)
4735 : {
4736 1143782893 : if (x == known_x && mode == known_mode)
4737 : return known_ret;
4738 :
4739 : /* Try to find identical subexpressions. If found call
4740 : nonzero_bits1 on X with the subexpressions as KNOWN_X and the
4741 : precomputed value for the subexpression as KNOWN_RET. */
4742 :
4743 1141529814 : if (nonzero_bits_binary_arith_p (x))
4744 : {
4745 140302738 : rtx x0 = XEXP (x, 0);
4746 140302738 : rtx x1 = XEXP (x, 1);
4747 :
4748 : /* Check the first level. */
4749 140302738 : if (x0 == x1)
4750 59558 : return nonzero_bits1 (x, mode, x0, mode,
4751 : cached_nonzero_bits (x0, mode, known_x,
4752 59558 : known_mode, known_ret));
4753 :
4754 : /* Check the second level. */
4755 140243180 : if (nonzero_bits_binary_arith_p (x0)
4756 140243180 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4757 1096297 : return nonzero_bits1 (x, mode, x1, mode,
4758 : cached_nonzero_bits (x1, mode, known_x,
4759 1096297 : known_mode, known_ret));
4760 :
4761 139146883 : if (nonzero_bits_binary_arith_p (x1)
4762 139146883 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4763 6214 : return nonzero_bits1 (x, mode, x0, mode,
4764 : cached_nonzero_bits (x0, mode, known_x,
4765 6214 : known_mode, known_ret));
4766 : }
4767 :
4768 1140367745 : return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
4769 : }
4770 :
4771 : /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
4772 : We don't let nonzero_bits recur into num_sign_bit_copies, because that
4773 : is less useful. We can't allow both, because that results in exponential
4774 : run time recursion. There is a nullstone testcase that triggered
4775 : this. This macro avoids accidental uses of num_sign_bit_copies. */
4776 : #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
4777 :
4778 : /* Given an expression, X, compute which bits in X can be nonzero.
4779 : We don't care about bits outside of those defined in MODE.
4780 :
4781 : For most X this is simply GET_MODE_MASK (GET_MODE (X)), but if X is
4782 : an arithmetic operation, we can do better. */
4783 :
4784 : static unsigned HOST_WIDE_INT
4785 1141529814 : nonzero_bits1 (const_rtx x, scalar_int_mode mode, const_rtx known_x,
4786 : machine_mode known_mode,
4787 : unsigned HOST_WIDE_INT known_ret)
4788 : {
4789 1141529814 : unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
4790 1141529814 : unsigned HOST_WIDE_INT inner_nz;
4791 1141529814 : enum rtx_code code = GET_CODE (x);
4792 1141529814 : machine_mode inner_mode;
4793 1141529814 : unsigned int inner_width;
4794 1141529814 : scalar_int_mode xmode;
4795 :
4796 1141529814 : unsigned int mode_width = GET_MODE_PRECISION (mode);
4797 :
4798 : /* For unary ops like ffs or popcount we want to determine the number of
4799 : nonzero bits from the operand. This only matters with very large
4800 : vector modes. A
4801 : (popcount:DI (V128BImode)
4802 : should not get a nonzero-bit mask of (1 << 7) - 1 as that could
4803 : lead to incorrect optimizations based on it, see PR123501. */
4804 1141529814 : unsigned int op_mode_width = mode_width;
4805 1141529814 : machine_mode op_mode = mode;
4806 1141529814 : if (UNARY_P (x))
4807 : {
4808 15937128 : const_rtx op = XEXP (x, 0);
4809 15937128 : if (GET_MODE_PRECISION (GET_MODE (op)).is_constant ())
4810 : {
4811 15937128 : op_mode = GET_MODE (op);
4812 15937128 : op_mode_width = GET_MODE_PRECISION (op_mode).to_constant ();
4813 : }
4814 : }
4815 :
4816 1141529814 : if (CONST_INT_P (x))
4817 : {
4818 117781473 : if (SHORT_IMMEDIATES_SIGN_EXTEND
4819 : && INTVAL (x) > 0
4820 : && mode_width < BITS_PER_WORD
4821 : && (UINTVAL (x) & (HOST_WIDE_INT_1U << (mode_width - 1))) != 0)
4822 : return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width);
4823 :
4824 117781473 : return UINTVAL (x);
4825 : }
4826 :
4827 1023748341 : if (!is_a <scalar_int_mode> (GET_MODE (x), &xmode))
4828 : return nonzero;
4829 1023419232 : unsigned int xmode_width = GET_MODE_PRECISION (xmode);
4830 :
4831 : /* If X is wider than MODE, use its mode instead. */
4832 1023419232 : if (xmode_width > mode_width)
4833 : {
4834 18314495 : mode = xmode;
4835 18314495 : nonzero = GET_MODE_MASK (mode);
4836 18314495 : mode_width = xmode_width;
4837 : }
4838 :
4839 1023419232 : if (mode_width > HOST_BITS_PER_WIDE_INT)
4840 : /* Our only callers in this case look for single bit values. So
4841 : just return the mode mask. Those tests will then be false. */
4842 : return nonzero;
4843 :
4844 : /* If MODE is wider than X, but both are a single word for both the host
4845 : and target machines, we can compute this from which bits of the object
4846 : might be nonzero in its own mode, taking into account the fact that, on
4847 : CISC machines, accessing an object in a wider mode generally causes the
4848 : high-order bits to become undefined, so they are not known to be zero.
4849 : We extend this reasoning to RISC machines for operations that might not
4850 : operate on the full registers. */
4851 1022064107 : if (mode_width > xmode_width
4852 111536643 : && xmode_width <= BITS_PER_WORD
4853 : && xmode_width <= HOST_BITS_PER_WIDE_INT
4854 : && !(WORD_REGISTER_OPERATIONS && word_register_operation_p (x)))
4855 : {
4856 95214926 : nonzero &= cached_nonzero_bits (x, xmode,
4857 : known_x, known_mode, known_ret);
4858 95214926 : nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode);
4859 95214926 : return nonzero;
4860 : }
4861 :
4862 : /* Please keep nonzero_bits_binary_arith_p above in sync with
4863 : the code in the switch below. */
4864 926849181 : switch (code)
4865 : {
4866 511417159 : case REG:
4867 : #if defined(POINTERS_EXTEND_UNSIGNED)
4868 : /* If pointers extend unsigned and this is a pointer in Pmode, say that
4869 : all the bits above ptr_mode are known to be zero. */
4870 : /* As we do not know which address space the pointer is referring to,
4871 : we can do this only if the target does not support different pointer
4872 : or address modes depending on the address space. */
4873 511417159 : if (target_default_pointer_address_modes_p ()
4874 : && POINTERS_EXTEND_UNSIGNED
4875 573910485 : && xmode == Pmode
4876 324826835 : && REG_POINTER (x)
4877 598075527 : && !targetm.have_ptr_extend ())
4878 86658368 : nonzero &= GET_MODE_MASK (ptr_mode);
4879 : #endif
4880 :
4881 : /* Include declared information about alignment of pointers. */
4882 : /* ??? We don't properly preserve REG_POINTER changes across
4883 : pointer-to-integer casts, so we can't trust it except for
4884 : things that we know must be pointers. See execute/960116-1.c. */
4885 511417159 : if ((x == stack_pointer_rtx
4886 510409785 : || x == frame_pointer_rtx
4887 496206040 : || x == arg_pointer_rtx)
4888 526170325 : && REGNO_POINTER_ALIGN (REGNO (x)))
4889 : {
4890 15760540 : unsigned HOST_WIDE_INT alignment
4891 15760540 : = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
4892 :
4893 : #ifdef PUSH_ROUNDING
4894 : /* If PUSH_ROUNDING is defined, it is possible for the
4895 : stack to be momentarily aligned only to that amount,
4896 : so we pick the least alignment. */
4897 15760540 : if (x == stack_pointer_rtx && targetm.calls.push_argument (0))
4898 : {
4899 782907 : poly_uint64 rounded_1 = PUSH_ROUNDING (poly_int64 (1));
4900 782907 : alignment = MIN (known_alignment (rounded_1), alignment);
4901 : }
4902 : #endif
4903 :
4904 15760540 : nonzero &= ~(alignment - 1);
4905 : }
4906 :
4907 511417159 : {
4908 511417159 : unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
4909 511417159 : rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, xmode, mode,
4910 : &nonzero_for_hook);
4911 :
4912 511417159 : if (new_rtx)
4913 6 : nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
4914 : known_mode, known_ret);
4915 :
4916 511417159 : return nonzero_for_hook;
4917 : }
4918 :
4919 : case MEM:
4920 : /* In many, if not most, RISC machines, reading a byte from memory
4921 : zeros the rest of the register. Noticing that fact saves a lot
4922 : of extra zero-extends. */
4923 : if (load_extend_op (xmode) == ZERO_EXTEND)
4924 : nonzero &= GET_MODE_MASK (xmode);
4925 : break;
4926 :
4927 9554177 : case EQ: case NE:
4928 9554177 : case UNEQ: case LTGT:
4929 9554177 : case GT: case GTU: case UNGT:
4930 9554177 : case LT: case LTU: case UNLT:
4931 9554177 : case GE: case GEU: case UNGE:
4932 9554177 : case LE: case LEU: case UNLE:
4933 9554177 : case UNORDERED: case ORDERED:
4934 : /* If this produces an integer result, we know which bits are set.
4935 : Code here used to clear bits outside the mode of X, but that is
4936 : now done above. */
4937 : /* Mind that MODE is the mode the caller wants to look at this
4938 : operation in, and not the actual operation mode. We can wind
4939 : up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4940 : that describes the results of a vector compare. */
4941 9554177 : if (GET_MODE_CLASS (xmode) == MODE_INT
4942 9554177 : && mode_width <= HOST_BITS_PER_WIDE_INT)
4943 1141529814 : nonzero = STORE_FLAG_VALUE;
4944 : break;
4945 :
4946 999011 : case NEG:
4947 : #if 0
4948 : /* Disabled to avoid exponential mutual recursion between nonzero_bits
4949 : and num_sign_bit_copies. */
4950 : if (num_sign_bit_copies (XEXP (x, 0), xmode) == xmode_width)
4951 : nonzero = 1;
4952 : #endif
4953 :
4954 999011 : if (xmode_width < mode_width)
4955 0 : nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode));
4956 : break;
4957 :
4958 : case ABS:
4959 : #if 0
4960 : /* Disabled to avoid exponential mutual recursion between nonzero_bits
4961 : and num_sign_bit_copies. */
4962 : if (num_sign_bit_copies (XEXP (x, 0), xmode) == xmode_width)
4963 : nonzero = 1;
4964 : #endif
4965 : break;
4966 :
4967 9723 : case TRUNCATE:
4968 9723 : nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4969 : known_x, known_mode, known_ret)
4970 9723 : & GET_MODE_MASK (mode));
4971 9723 : break;
4972 :
4973 6642937 : case ZERO_EXTEND:
4974 6642937 : nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4975 : known_x, known_mode, known_ret);
4976 6642937 : if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4977 6642937 : nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4978 : break;
4979 :
4980 1864208 : case SIGN_EXTEND:
4981 : /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4982 : Otherwise, show all the bits in the outer mode but not the inner
4983 : may be nonzero. */
4984 1864208 : inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4985 : known_x, known_mode, known_ret);
4986 1864208 : if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4987 : {
4988 1864208 : inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4989 1864208 : if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
4990 1826208 : inner_nz |= (GET_MODE_MASK (mode)
4991 1826208 : & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4992 : }
4993 :
4994 1864208 : nonzero &= inner_nz;
4995 1864208 : break;
4996 :
4997 16250685 : case AND:
4998 16250685 : nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4999 : known_x, known_mode, known_ret)
5000 16250685 : & cached_nonzero_bits (XEXP (x, 1), mode,
5001 : known_x, known_mode, known_ret);
5002 16250685 : break;
5003 :
5004 10407370 : case XOR: case IOR:
5005 10407370 : case UMIN: case UMAX: case SMIN: case SMAX:
5006 10407370 : {
5007 10407370 : unsigned HOST_WIDE_INT nonzero0
5008 10407370 : = cached_nonzero_bits (XEXP (x, 0), mode,
5009 : known_x, known_mode, known_ret);
5010 :
5011 : /* Don't call nonzero_bits for the second time if it cannot change
5012 : anything. */
5013 10407370 : if ((nonzero & nonzero0) != nonzero)
5014 9852774 : nonzero &= nonzero0
5015 4926387 : | cached_nonzero_bits (XEXP (x, 1), mode,
5016 : known_x, known_mode, known_ret);
5017 : }
5018 : break;
5019 :
5020 94033480 : case PLUS: case MINUS:
5021 94033480 : case MULT:
5022 94033480 : case DIV: case UDIV:
5023 94033480 : case MOD: case UMOD:
5024 : /* We can apply the rules of arithmetic to compute the number of
5025 : high- and low-order zero bits of these operations. We start by
5026 : computing the width (position of the highest-order nonzero bit)
5027 : and the number of low-order zero bits for each value. */
5028 94033480 : {
5029 94033480 : unsigned HOST_WIDE_INT nz0
5030 94033480 : = cached_nonzero_bits (XEXP (x, 0), mode,
5031 : known_x, known_mode, known_ret);
5032 94033480 : unsigned HOST_WIDE_INT nz1
5033 94033480 : = cached_nonzero_bits (XEXP (x, 1), mode,
5034 : known_x, known_mode, known_ret);
5035 94033480 : int sign_index = xmode_width - 1;
5036 94033480 : int width0 = floor_log2 (nz0) + 1;
5037 94033480 : int width1 = floor_log2 (nz1) + 1;
5038 94033480 : int low0 = ctz_or_zero (nz0);
5039 94033480 : int low1 = ctz_or_zero (nz1);
5040 94033480 : unsigned HOST_WIDE_INT op0_maybe_minusp
5041 94033480 : = nz0 & (HOST_WIDE_INT_1U << sign_index);
5042 94033480 : unsigned HOST_WIDE_INT op1_maybe_minusp
5043 : = nz1 & (HOST_WIDE_INT_1U << sign_index);
5044 94033480 : unsigned int result_width = mode_width;
5045 94033480 : int result_low = 0;
5046 :
5047 94033480 : switch (code)
5048 : {
5049 69643982 : case PLUS:
5050 69643982 : result_width = MAX (width0, width1) + 1;
5051 69643982 : result_low = MIN (low0, low1);
5052 : break;
5053 15259934 : case MINUS:
5054 15259934 : result_low = MIN (low0, low1);
5055 : break;
5056 7360894 : case MULT:
5057 7360894 : result_width = width0 + width1;
5058 7360894 : result_low = low0 + low1;
5059 7360894 : break;
5060 676448 : case DIV:
5061 676448 : if (width1 == 0)
5062 : break;
5063 666738 : if (!op0_maybe_minusp && !op1_maybe_minusp)
5064 23051 : result_width = width0;
5065 : break;
5066 284853 : case UDIV:
5067 284853 : if (width1 == 0)
5068 : break;
5069 283985 : result_width = width0;
5070 283985 : break;
5071 409731 : case MOD:
5072 409731 : if (width1 == 0)
5073 : break;
5074 402399 : if (!op0_maybe_minusp && !op1_maybe_minusp)
5075 21301 : result_width = MIN (width0, width1);
5076 402399 : result_low = MIN (low0, low1);
5077 : break;
5078 397638 : case UMOD:
5079 397638 : if (width1 == 0)
5080 : break;
5081 397534 : result_width = MIN (width0, width1);
5082 397534 : result_low = MIN (low0, low1);
5083 : break;
5084 0 : default:
5085 0 : gcc_unreachable ();
5086 : }
5087 :
5088 : /* Note that mode_width <= HOST_BITS_PER_WIDE_INT, see above. */
5089 94033480 : if (result_width < mode_width)
5090 4107001 : nonzero &= (HOST_WIDE_INT_1U << result_width) - 1;
5091 :
5092 94033480 : if (result_low > 0)
5093 : {
5094 6846985 : if (result_low < HOST_BITS_PER_WIDE_INT)
5095 6846973 : nonzero &= ~((HOST_WIDE_INT_1U << result_low) - 1);
5096 : else
5097 : nonzero = 0;
5098 : }
5099 : }
5100 : break;
5101 :
5102 1173560 : case ZERO_EXTRACT:
5103 1173560 : if (CONST_INT_P (XEXP (x, 1))
5104 1173183 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5105 1173000 : nonzero &= (HOST_WIDE_INT_1U << INTVAL (XEXP (x, 1))) - 1;
5106 : break;
5107 :
5108 75137083 : case SUBREG:
5109 : /* If this is a SUBREG formed for a promoted variable that has
5110 : been zero-extended, we know that at least the high-order bits
5111 : are zero, though others might be too. */
5112 75137083 : if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
5113 37258 : nonzero = GET_MODE_MASK (xmode)
5114 37258 : & cached_nonzero_bits (SUBREG_REG (x), xmode,
5115 : known_x, known_mode, known_ret);
5116 :
5117 : /* If the inner mode is a single word for both the host and target
5118 : machines, we can compute this from which bits of the inner
5119 : object might be nonzero. */
5120 75137083 : inner_mode = GET_MODE (SUBREG_REG (x));
5121 75137083 : if (GET_MODE_PRECISION (inner_mode).is_constant (&inner_width)
5122 80280661 : && inner_width <= BITS_PER_WORD
5123 : && inner_width <= HOST_BITS_PER_WIDE_INT)
5124 : {
5125 71030679 : nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
5126 : known_x, known_mode, known_ret);
5127 :
5128 : /* On a typical CISC machine, accessing an object in a wider mode
5129 : causes the high-order bits to become undefined. So they are
5130 : not known to be zero.
5131 :
5132 : On a typical RISC machine, we only have to worry about the way
5133 : loads are extended. Otherwise, if we get a reload for the inner
5134 : part, it may be loaded from the stack, and then we may lose all
5135 : the zero bits that existed before the store to the stack. */
5136 71030679 : rtx_code extend_op;
5137 71030679 : if ((!WORD_REGISTER_OPERATIONS
5138 : || ((extend_op = load_extend_op (inner_mode)) == SIGN_EXTEND
5139 : ? val_signbit_known_set_p (inner_mode, nonzero)
5140 : : extend_op != ZERO_EXTEND)
5141 : || !MEM_P (SUBREG_REG (x)))
5142 : && xmode_width > inner_width)
5143 54325873 : nonzero
5144 54325873 : |= (GET_MODE_MASK (GET_MODE (x)) & ~GET_MODE_MASK (inner_mode));
5145 : }
5146 : break;
5147 :
5148 55750103 : case ASHIFT:
5149 55750103 : case ASHIFTRT:
5150 55750103 : case LSHIFTRT:
5151 55750103 : case ROTATE:
5152 55750103 : case ROTATERT:
5153 : /* The nonzero bits are in two classes: any bits within MODE
5154 : that aren't in xmode are always significant. The rest of the
5155 : nonzero bits are those that are significant in the operand of
5156 : the shift when shifted the appropriate number of bits. This
5157 : shows that high-order bits are cleared by the right shift and
5158 : low-order bits by left shifts. */
5159 55750103 : if (CONST_INT_P (XEXP (x, 1))
5160 54269921 : && INTVAL (XEXP (x, 1)) >= 0
5161 54269775 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5162 54269687 : && INTVAL (XEXP (x, 1)) < xmode_width)
5163 : {
5164 54269616 : int count = INTVAL (XEXP (x, 1));
5165 54269616 : unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (xmode);
5166 54269616 : unsigned HOST_WIDE_INT op_nonzero
5167 54269616 : = cached_nonzero_bits (XEXP (x, 0), mode,
5168 : known_x, known_mode, known_ret);
5169 54269616 : unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5170 54269616 : unsigned HOST_WIDE_INT outer = 0;
5171 :
5172 54269616 : if (mode_width > xmode_width)
5173 0 : outer = (op_nonzero & nonzero & ~mode_mask);
5174 :
5175 54269616 : switch (code)
5176 : {
5177 31482313 : case ASHIFT:
5178 31482313 : inner <<= count;
5179 31482313 : break;
5180 :
5181 14518221 : case LSHIFTRT:
5182 14518221 : inner >>= count;
5183 14518221 : break;
5184 :
5185 8155078 : case ASHIFTRT:
5186 8155078 : inner >>= count;
5187 :
5188 : /* If the sign bit may have been nonzero before the shift, we
5189 : need to mark all the places it could have been copied to
5190 : by the shift as possibly nonzero. */
5191 8155078 : if (inner & (HOST_WIDE_INT_1U << (xmode_width - 1 - count)))
5192 8141073 : inner |= (((HOST_WIDE_INT_1U << count) - 1)
5193 8141073 : << (xmode_width - count));
5194 : break;
5195 :
5196 72608 : case ROTATE:
5197 72608 : inner = (inner << (count % xmode_width)
5198 72608 : | (inner >> (xmode_width - (count % xmode_width))))
5199 : & mode_mask;
5200 72608 : break;
5201 :
5202 41396 : case ROTATERT:
5203 41396 : inner = (inner >> (count % xmode_width)
5204 41396 : | (inner << (xmode_width - (count % xmode_width))))
5205 : & mode_mask;
5206 41396 : break;
5207 :
5208 : default:
5209 : gcc_unreachable ();
5210 : }
5211 :
5212 54269616 : nonzero &= (outer | inner);
5213 : }
5214 : break;
5215 :
5216 5084 : case FFS:
5217 5084 : case POPCOUNT:
5218 : /* This is at most the number of bits in the mode. */
5219 5084 : nonzero = (HOST_WIDE_INT_UC (2) << (floor_log2 (op_mode_width))) - 1;
5220 5084 : break;
5221 :
5222 743148 : case CLZ:
5223 : /* If CLZ has a known value at zero, then the nonzero bits are
5224 : that value, plus the number of bits in the mode minus one.
5225 : If we have a different operand mode, don't try to get nonzero
5226 : bits as currently nonzero is not a poly_int. */
5227 743148 : if (op_mode == mode
5228 1486284 : && CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
5229 1147 : nonzero
5230 2294 : |= (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
5231 : else
5232 : nonzero = -1;
5233 : break;
5234 :
5235 38164 : case CTZ:
5236 : /* If CTZ has a known value at zero, then the nonzero bits are
5237 : that value, plus the number of bits in the mode minus one.
5238 : See above for op_mode != mode. */
5239 38164 : if (op_mode == mode
5240 76328 : && CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
5241 1363 : nonzero
5242 2726 : |= (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
5243 : else
5244 : nonzero = -1;
5245 : break;
5246 :
5247 8 : case CLRSB:
5248 : /* This is at most the number of bits in the mode minus 1. */
5249 8 : nonzero = (HOST_WIDE_INT_1U << (floor_log2 (op_mode_width))) - 1;
5250 8 : break;
5251 :
5252 : case PARITY:
5253 1141529814 : nonzero = 1;
5254 : break;
5255 :
5256 3811264 : case IF_THEN_ELSE:
5257 3811264 : {
5258 3811264 : unsigned HOST_WIDE_INT nonzero_true
5259 3811264 : = cached_nonzero_bits (XEXP (x, 1), mode,
5260 : known_x, known_mode, known_ret);
5261 :
5262 : /* Don't call nonzero_bits for the second time if it cannot change
5263 : anything. */
5264 3811264 : if ((nonzero & nonzero_true) != nonzero)
5265 3142622 : nonzero &= nonzero_true
5266 1571311 : | cached_nonzero_bits (XEXP (x, 2), mode,
5267 : known_x, known_mode, known_ret);
5268 : }
5269 : break;
5270 :
5271 : default:
5272 : break;
5273 : }
5274 :
5275 : return nonzero;
5276 : }
5277 :
5278 : /* See the macro definition above. */
5279 : #undef cached_num_sign_bit_copies
5280 :
5281 :
5282 : /* Return true if num_sign_bit_copies1 might recurse into both operands
5283 : of X. */
5284 :
5285 : static inline bool
5286 442265761 : num_sign_bit_copies_binary_arith_p (const_rtx x)
5287 : {
5288 442265761 : if (!ARITHMETIC_P (x))
5289 : return false;
5290 80203262 : switch (GET_CODE (x))
5291 : {
5292 : case IOR:
5293 : case AND:
5294 : case XOR:
5295 : case SMIN:
5296 : case SMAX:
5297 : case UMIN:
5298 : case UMAX:
5299 : case PLUS:
5300 : case MINUS:
5301 : case MULT:
5302 : return true;
5303 : default:
5304 : return false;
5305 : }
5306 : }
5307 :
5308 : /* The function cached_num_sign_bit_copies is a wrapper around
5309 : num_sign_bit_copies1. It avoids exponential behavior in
5310 : num_sign_bit_copies1 when X has identical subexpressions on the
5311 : first or the second level. */
5312 :
5313 : static unsigned int
5314 346118776 : cached_num_sign_bit_copies (const_rtx x, scalar_int_mode mode,
5315 : const_rtx known_x, machine_mode known_mode,
5316 : unsigned int known_ret)
5317 : {
5318 346118776 : if (x == known_x && mode == known_mode)
5319 : return known_ret;
5320 :
5321 : /* Try to find identical subexpressions. If found call
5322 : num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
5323 : the precomputed value for the subexpression as KNOWN_RET. */
5324 :
5325 344384547 : if (num_sign_bit_copies_binary_arith_p (x))
5326 : {
5327 49325530 : rtx x0 = XEXP (x, 0);
5328 49325530 : rtx x1 = XEXP (x, 1);
5329 :
5330 : /* Check the first level. */
5331 49325530 : if (x0 == x1)
5332 17157 : return
5333 17157 : num_sign_bit_copies1 (x, mode, x0, mode,
5334 : cached_num_sign_bit_copies (x0, mode, known_x,
5335 : known_mode,
5336 17157 : known_ret));
5337 :
5338 : /* Check the second level. */
5339 49308373 : if (num_sign_bit_copies_binary_arith_p (x0)
5340 49308373 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
5341 735532 : return
5342 735532 : num_sign_bit_copies1 (x, mode, x1, mode,
5343 : cached_num_sign_bit_copies (x1, mode, known_x,
5344 : known_mode,
5345 735532 : known_ret));
5346 :
5347 48572841 : if (num_sign_bit_copies_binary_arith_p (x1)
5348 48572841 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
5349 402 : return
5350 402 : num_sign_bit_copies1 (x, mode, x0, mode,
5351 : cached_num_sign_bit_copies (x0, mode, known_x,
5352 : known_mode,
5353 402 : known_ret));
5354 : }
5355 :
5356 343631456 : return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
5357 : }
5358 :
5359 : /* Return the number of bits at the high-order end of X that are known to
5360 : be equal to the sign bit. X will be used in mode MODE. The returned
5361 : value will always be between 1 and the number of bits in MODE. */
5362 :
5363 : static unsigned int
5364 344384547 : num_sign_bit_copies1 (const_rtx x, scalar_int_mode mode, const_rtx known_x,
5365 : machine_mode known_mode,
5366 : unsigned int known_ret)
5367 : {
5368 344384547 : enum rtx_code code = GET_CODE (x);
5369 344384547 : unsigned int bitwidth = GET_MODE_PRECISION (mode);
5370 344384547 : int num0, num1, result;
5371 344384547 : unsigned HOST_WIDE_INT nonzero;
5372 :
5373 344384547 : if (CONST_INT_P (x))
5374 : {
5375 : /* If the constant is negative, take its 1's complement and remask.
5376 : Then see how many zero bits we have. */
5377 44247660 : nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
5378 44247660 : if (bitwidth <= HOST_BITS_PER_WIDE_INT
5379 43944259 : && (nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5380 19440083 : nonzero = (~nonzero) & GET_MODE_MASK (mode);
5381 :
5382 44247660 : return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
5383 : }
5384 :
5385 300136887 : scalar_int_mode xmode, inner_mode;
5386 505276801 : if (!is_a <scalar_int_mode> (GET_MODE (x), &xmode))
5387 : return 1;
5388 :
5389 299808323 : unsigned int xmode_width = GET_MODE_PRECISION (xmode);
5390 :
5391 : /* For a smaller mode, just ignore the high bits. */
5392 299808323 : if (bitwidth < xmode_width)
5393 : {
5394 37232 : num0 = cached_num_sign_bit_copies (x, xmode,
5395 : known_x, known_mode, known_ret);
5396 37232 : return MAX (1, num0 - (int) (xmode_width - bitwidth));
5397 : }
5398 :
5399 299771091 : if (bitwidth > xmode_width)
5400 : {
5401 : /* If this machine does not do all register operations on the entire
5402 : register and MODE is wider than the mode of X, we can say nothing
5403 : at all about the high-order bits. We extend this reasoning to RISC
5404 : machines for operations that might not operate on full registers. */
5405 : if (!(WORD_REGISTER_OPERATIONS && word_register_operation_p (x)))
5406 : return 1;
5407 :
5408 : /* Likewise on machines that do, if the mode of the object is smaller
5409 : than a word and loads of that size don't sign extend, we can say
5410 : nothing about the high order bits. */
5411 : if (xmode_width < BITS_PER_WORD
5412 : && load_extend_op (xmode) != SIGN_EXTEND)
5413 : return 1;
5414 : }
5415 :
5416 : /* Please keep num_sign_bit_copies_binary_arith_p above in sync with
5417 : the code in the switch below. */
5418 299771079 : switch (code)
5419 : {
5420 156943918 : case REG:
5421 :
5422 : #if defined(POINTERS_EXTEND_UNSIGNED)
5423 : /* If pointers extend signed and this is a pointer in Pmode, say that
5424 : all the bits above ptr_mode are known to be sign bit copies. */
5425 : /* As we do not know which address space the pointer is referring to,
5426 : we can do this only if the target does not support different pointer
5427 : or address modes depending on the address space. */
5428 156943918 : if (target_default_pointer_address_modes_p ()
5429 : && ! POINTERS_EXTEND_UNSIGNED && xmode == Pmode
5430 : && mode == Pmode && REG_POINTER (x)
5431 : && !targetm.have_ptr_extend ())
5432 : return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
5433 : #endif
5434 :
5435 156943918 : {
5436 156943918 : unsigned int copies_for_hook = 1, copies = 1;
5437 156943918 : rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, xmode, mode,
5438 : &copies_for_hook);
5439 :
5440 156943918 : if (new_rtx)
5441 5 : copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
5442 : known_mode, known_ret);
5443 :
5444 156943918 : if (copies > 1 || copies_for_hook > 1)
5445 22396613 : return MAX (copies, copies_for_hook);
5446 :
5447 : /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
5448 : }
5449 134547305 : break;
5450 :
5451 : case MEM:
5452 : /* Some RISC machines sign-extend all loads of smaller than a word. */
5453 : if (load_extend_op (xmode) == SIGN_EXTEND)
5454 : return MAX (1, ((int) bitwidth - (int) xmode_width + 1));
5455 : break;
5456 :
5457 19816190 : case SUBREG:
5458 : /* If this is a SUBREG for a promoted object that is sign-extended
5459 : and we are looking at it in a wider mode, we know that at least the
5460 : high-order bits are known to be sign bit copies. */
5461 :
5462 19816190 : if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x))
5463 : {
5464 0 : num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
5465 : known_x, known_mode, known_ret);
5466 0 : return MAX ((int) bitwidth - (int) xmode_width + 1, num0);
5467 : }
5468 :
5469 19816190 : if (is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (x)), &inner_mode))
5470 : {
5471 : /* For a smaller object, just ignore the high bits. */
5472 19611521 : if (bitwidth <= GET_MODE_PRECISION (inner_mode))
5473 : {
5474 6148008 : num0 = cached_num_sign_bit_copies (SUBREG_REG (x), inner_mode,
5475 : known_x, known_mode,
5476 : known_ret);
5477 6148008 : return MAX (1, num0 - (int) (GET_MODE_PRECISION (inner_mode)
5478 : - bitwidth));
5479 : }
5480 :
5481 : /* For paradoxical SUBREGs on machines where all register operations
5482 : affect the entire register, just look inside. Note that we are
5483 : passing MODE to the recursive call, so the number of sign bit
5484 : copies will remain relative to that mode, not the inner mode.
5485 :
5486 : This works only if loads sign extend. Otherwise, if we get a
5487 : reload for the inner part, it may be loaded from the stack, and
5488 : then we lose all sign bit copies that existed before the store
5489 : to the stack. */
5490 : if (WORD_REGISTER_OPERATIONS
5491 : && load_extend_op (inner_mode) == SIGN_EXTEND
5492 : && paradoxical_subreg_p (x)
5493 : && MEM_P (SUBREG_REG (x)))
5494 : return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
5495 : known_x, known_mode, known_ret);
5496 : }
5497 : break;
5498 :
5499 2751 : case SIGN_EXTRACT:
5500 2751 : if (CONST_INT_P (XEXP (x, 1)))
5501 2751 : return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
5502 : break;
5503 :
5504 1876365 : case SIGN_EXTEND:
5505 1876365 : if (is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
5506 1876365 : return (bitwidth - GET_MODE_PRECISION (inner_mode)
5507 1876365 : + cached_num_sign_bit_copies (XEXP (x, 0), inner_mode,
5508 1876365 : known_x, known_mode, known_ret));
5509 : break;
5510 :
5511 86 : case TRUNCATE:
5512 : /* For a smaller object, just ignore the high bits. */
5513 86 : inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
5514 86 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), inner_mode,
5515 : known_x, known_mode, known_ret);
5516 86 : return MAX (1, (num0 - (int) (GET_MODE_PRECISION (inner_mode)
5517 : - bitwidth)));
5518 :
5519 1054513 : case NOT:
5520 1054513 : return cached_num_sign_bit_copies (XEXP (x, 0), mode,
5521 1054513 : known_x, known_mode, known_ret);
5522 :
5523 21186 : case ROTATE: case ROTATERT:
5524 : /* If we are rotating left by a number of bits less than the number
5525 : of sign bit copies, we can just subtract that amount from the
5526 : number. */
5527 21186 : if (CONST_INT_P (XEXP (x, 1))
5528 11800 : && INTVAL (XEXP (x, 1)) >= 0
5529 11797 : && INTVAL (XEXP (x, 1)) < (int) bitwidth)
5530 : {
5531 11797 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5532 : known_x, known_mode, known_ret);
5533 11797 : return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
5534 : : (int) bitwidth - INTVAL (XEXP (x, 1))));
5535 : }
5536 : break;
5537 :
5538 757993 : case NEG:
5539 : /* In general, this subtracts one sign bit copy. But if the value
5540 : is known to be positive, the number of sign bit copies is the
5541 : same as that of the input. Finally, if the input has just one bit
5542 : that might be nonzero, all the bits are copies of the sign bit. */
5543 757993 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5544 : known_x, known_mode, known_ret);
5545 757993 : if (bitwidth > HOST_BITS_PER_WIDE_INT)
5546 28354 : return num0 > 1 ? num0 - 1 : 1;
5547 :
5548 729639 : nonzero = nonzero_bits (XEXP (x, 0), mode);
5549 729639 : if (nonzero == 1)
5550 : return bitwidth;
5551 :
5552 337865 : if (num0 > 1
5553 87542 : && ((HOST_WIDE_INT_1U << (bitwidth - 1)) & nonzero))
5554 46016 : num0--;
5555 :
5556 337865 : return num0;
5557 :
5558 5731991 : case IOR: case AND: case XOR:
5559 5731991 : case SMIN: case SMAX: case UMIN: case UMAX:
5560 : /* Logical operations will preserve the number of sign-bit copies.
5561 : MIN and MAX operations always return one of the operands. */
5562 5731991 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5563 : known_x, known_mode, known_ret);
5564 5731991 : num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5565 : known_x, known_mode, known_ret);
5566 :
5567 : /* If num1 is clearing some of the top bits then regardless of
5568 : the other term, we are guaranteed to have at least that many
5569 : high-order zero bits. */
5570 5731991 : if (code == AND
5571 5731991 : && num1 > 1
5572 2288042 : && bitwidth <= HOST_BITS_PER_WIDE_INT
5573 2277768 : && CONST_INT_P (XEXP (x, 1))
5574 2079136 : && (UINTVAL (XEXP (x, 1))
5575 2079136 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) == 0)
5576 : return num1;
5577 :
5578 : /* Similarly for IOR when setting high-order bits. */
5579 4215875 : if (code == IOR
5580 4215875 : && num1 > 1
5581 469167 : && bitwidth <= HOST_BITS_PER_WIDE_INT
5582 467620 : && CONST_INT_P (XEXP (x, 1))
5583 139477 : && (UINTVAL (XEXP (x, 1))
5584 139477 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5585 : return num1;
5586 :
5587 4211750 : return MIN (num0, num1);
5588 :
5589 42310425 : case PLUS: case MINUS:
5590 : /* For addition and subtraction, we can have a 1-bit carry. However,
5591 : if we are subtracting 1 from a positive number, there will not
5592 : be such a carry. Furthermore, if the positive number is known to
5593 : be 0 or 1, we know the result is either -1 or 0. */
5594 :
5595 42310425 : if (code == PLUS && XEXP (x, 1) == constm1_rtx
5596 1325084 : && bitwidth <= HOST_BITS_PER_WIDE_INT)
5597 : {
5598 1320244 : nonzero = nonzero_bits (XEXP (x, 0), mode);
5599 1320244 : if (((HOST_WIDE_INT_1U << (bitwidth - 1)) & nonzero) == 0)
5600 109684 : return (nonzero == 1 || nonzero == 0 ? bitwidth
5601 103996 : : bitwidth - floor_log2 (nonzero) - 1);
5602 : }
5603 :
5604 42200741 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5605 : known_x, known_mode, known_ret);
5606 42200741 : num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5607 : known_x, known_mode, known_ret);
5608 42200741 : result = MAX (1, MIN (num0, num1) - 1);
5609 :
5610 42200741 : return result;
5611 :
5612 1282754 : case MULT:
5613 : /* The number of bits of the product is the sum of the number of
5614 : bits of both terms. However, unless one of the terms if known
5615 : to be positive, we must allow for an additional bit since negating
5616 : a negative number can remove one sign bit copy. */
5617 :
5618 1282754 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5619 : known_x, known_mode, known_ret);
5620 1282754 : num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5621 : known_x, known_mode, known_ret);
5622 :
5623 1282754 : result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
5624 1282754 : if (result > 0
5625 1282754 : && (bitwidth > HOST_BITS_PER_WIDE_INT
5626 331543 : || (((nonzero_bits (XEXP (x, 0), mode)
5627 331543 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5628 181351 : && ((nonzero_bits (XEXP (x, 1), mode)
5629 : & (HOST_WIDE_INT_1U << (bitwidth - 1)))
5630 181351 : != 0))))
5631 30857 : result--;
5632 :
5633 1282754 : return MAX (1, result);
5634 :
5635 122108 : case UDIV:
5636 : /* The result must be <= the first operand. If the first operand
5637 : has the high bit set, we know nothing about the number of sign
5638 : bit copies. */
5639 122108 : if (bitwidth > HOST_BITS_PER_WIDE_INT)
5640 : return 1;
5641 122108 : else if ((nonzero_bits (XEXP (x, 0), mode)
5642 122108 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5643 : return 1;
5644 : else
5645 22860 : return cached_num_sign_bit_copies (XEXP (x, 0), mode,
5646 22860 : known_x, known_mode, known_ret);
5647 :
5648 119176 : case UMOD:
5649 : /* The result must be <= the second operand. If the second operand
5650 : has (or just might have) the high bit set, we know nothing about
5651 : the number of sign bit copies. */
5652 119176 : if (bitwidth > HOST_BITS_PER_WIDE_INT)
5653 : return 1;
5654 119176 : else if ((nonzero_bits (XEXP (x, 1), mode)
5655 119176 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5656 : return 1;
5657 : else
5658 31203 : return cached_num_sign_bit_copies (XEXP (x, 1), mode,
5659 31203 : known_x, known_mode, known_ret);
5660 :
5661 210919 : case DIV:
5662 : /* Similar to unsigned division, except that we have to worry about
5663 : the case where the divisor is negative, in which case we have
5664 : to add 1. */
5665 210919 : result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5666 : known_x, known_mode, known_ret);
5667 210919 : if (result > 1
5668 210919 : && (bitwidth > HOST_BITS_PER_WIDE_INT
5669 17993 : || (nonzero_bits (XEXP (x, 1), mode)
5670 17993 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0))
5671 15445 : result--;
5672 :
5673 210919 : return result;
5674 :
5675 132914 : case MOD:
5676 132914 : result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5677 : known_x, known_mode, known_ret);
5678 132914 : if (result > 1
5679 132914 : && (bitwidth > HOST_BITS_PER_WIDE_INT
5680 23334 : || (nonzero_bits (XEXP (x, 1), mode)
5681 23334 : & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0))
5682 10512 : result--;
5683 :
5684 132914 : return result;
5685 :
5686 933289 : case ASHIFTRT:
5687 : /* Shifts by a constant add to the number of bits equal to the
5688 : sign bit. */
5689 933289 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5690 : known_x, known_mode, known_ret);
5691 933289 : if (CONST_INT_P (XEXP (x, 1))
5692 896796 : && INTVAL (XEXP (x, 1)) > 0
5693 896796 : && INTVAL (XEXP (x, 1)) < xmode_width)
5694 896796 : num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
5695 :
5696 933289 : return num0;
5697 :
5698 8890053 : case ASHIFT:
5699 : /* Left shifts destroy copies. */
5700 8890053 : if (!CONST_INT_P (XEXP (x, 1))
5701 8696134 : || INTVAL (XEXP (x, 1)) < 0
5702 8695990 : || INTVAL (XEXP (x, 1)) >= (int) bitwidth
5703 8695948 : || INTVAL (XEXP (x, 1)) >= xmode_width)
5704 : return 1;
5705 :
5706 8695948 : num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5707 : known_x, known_mode, known_ret);
5708 8695948 : return MAX (1, num0 - INTVAL (XEXP (x, 1)));
5709 :
5710 763029 : case IF_THEN_ELSE:
5711 763029 : num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5712 : known_x, known_mode, known_ret);
5713 763029 : num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
5714 : known_x, known_mode, known_ret);
5715 763029 : return MIN (num0, num1);
5716 :
5717 2337074 : case EQ: case NE: case GE: case GT: case LE: case LT:
5718 2337074 : case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
5719 2337074 : case GEU: case GTU: case LEU: case LTU:
5720 2337074 : case UNORDERED: case ORDERED:
5721 : /* If the constant is negative, take its 1's complement and remask.
5722 : Then see how many zero bits we have. */
5723 2337074 : nonzero = STORE_FLAG_VALUE;
5724 2337074 : if (bitwidth <= HOST_BITS_PER_WIDE_INT
5725 2337074 : && (nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
5726 0 : nonzero = (~nonzero) & GET_MODE_MASK (mode);
5727 :
5728 2337074 : return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
5729 :
5730 : default:
5731 : break;
5732 : }
5733 :
5734 : /* If we haven't been able to figure it out by one of the above rules,
5735 : see if some of the high-order bits are known to be zero. If so,
5736 : count those bits and return one less than that amount. If we can't
5737 : safely compute the mask for this mode, always return BITWIDTH. */
5738 :
5739 204689221 : bitwidth = GET_MODE_PRECISION (mode);
5740 204689221 : if (bitwidth > HOST_BITS_PER_WIDE_INT)
5741 : return 1;
5742 :
5743 198872627 : nonzero = nonzero_bits (x, mode);
5744 198872627 : return nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))
5745 203974426 : ? 1 : bitwidth - floor_log2 (nonzero) - 1;
5746 : }
5747 :
5748 : /* Calculate the rtx_cost of a single instruction pattern. A return value of
5749 : zero indicates an instruction pattern without a known cost. */
5750 :
5751 : int
5752 151032384 : pattern_cost (rtx pat, bool speed)
5753 : {
5754 151032384 : int i, cost;
5755 151032384 : rtx set;
5756 :
5757 : /* Extract the single set rtx from the instruction pattern. We
5758 : can't use single_set since we only have the pattern. We also
5759 : consider PARALLELs of a normal set and a single comparison. In
5760 : that case we use the cost of the non-comparison SET operation,
5761 : which is most-likely to be the real cost of this operation. */
5762 151032384 : if (GET_CODE (pat) == SET)
5763 : set = pat;
5764 67441048 : else if (GET_CODE (pat) == PARALLEL)
5765 : {
5766 : set = NULL_RTX;
5767 : rtx comparison = NULL_RTX;
5768 :
5769 46136684 : for (i = 0; i < XVECLEN (pat, 0); i++)
5770 : {
5771 31037563 : rtx x = XVECEXP (pat, 0, i);
5772 31037563 : if (GET_CODE (x) == SET)
5773 : {
5774 15694439 : if (GET_CODE (SET_SRC (x)) == COMPARE
5775 15419597 : || GET_MODE_CLASS (GET_MODE (SET_DEST (x))) == MODE_CC)
5776 : {
5777 341144 : if (comparison)
5778 : return 0;
5779 : comparison = x;
5780 : }
5781 : else
5782 : {
5783 15353295 : if (set)
5784 : return 0;
5785 : set = x;
5786 : }
5787 : }
5788 : }
5789 :
5790 15099121 : if (!set && comparison)
5791 : set = comparison;
5792 :
5793 14952685 : if (!set)
5794 : return 0;
5795 : }
5796 : else
5797 : return 0;
5798 :
5799 98617823 : cost = set_src_cost (SET_SRC (set), GET_MODE (SET_DEST (set)), speed);
5800 98617823 : return MAX (COSTS_N_INSNS (1), cost);
5801 : }
5802 :
5803 : /* Calculate the cost of a single instruction. A return value of zero
5804 : indicates an instruction pattern without a known cost. */
5805 :
5806 : int
5807 148912208 : insn_cost (rtx_insn *insn, bool speed)
5808 : {
5809 148912208 : if (targetm.insn_cost)
5810 148912208 : return targetm.insn_cost (insn, speed);
5811 :
5812 0 : return pattern_cost (PATTERN (insn), speed);
5813 : }
5814 :
5815 : /* Returns estimate on cost of computing SEQ. */
5816 :
5817 : unsigned
5818 2062670 : seq_cost (const rtx_insn *seq, bool speed)
5819 : {
5820 2062670 : unsigned cost = 0;
5821 2062670 : rtx set;
5822 :
5823 5406303 : for (; seq; seq = NEXT_INSN (seq))
5824 : {
5825 3343633 : set = single_set (seq);
5826 3343633 : if (set)
5827 3335269 : cost += set_rtx_cost (set, speed);
5828 8364 : else if (NONDEBUG_INSN_P (seq))
5829 : {
5830 8031 : int this_cost = insn_cost (const_cast<struct rtx_insn *> (seq),
5831 : speed);
5832 8031 : if (this_cost > 0)
5833 698 : cost += this_cost;
5834 : else
5835 7333 : cost++;
5836 : }
5837 : }
5838 :
5839 2062670 : return cost;
5840 : }
5841 :
5842 : /* Given an insn INSN and condition COND, return the condition in a
5843 : canonical form to simplify testing by callers. Specifically:
5844 :
5845 : (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
5846 : (2) Both operands will be machine operands.
5847 : (3) If an operand is a constant, it will be the second operand.
5848 : (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
5849 : for GE, GEU, and LEU.
5850 :
5851 : If the condition cannot be understood, or is an inequality floating-point
5852 : comparison which needs to be reversed, 0 will be returned.
5853 :
5854 : If REVERSE is nonzero, then reverse the condition prior to canonizing it.
5855 :
5856 : If EARLIEST is nonzero, it is a pointer to a place where the earliest
5857 : insn used in locating the condition was found. If a replacement test
5858 : of the condition is desired, it should be placed in front of that
5859 : insn and we will be sure that the inputs are still valid.
5860 :
5861 : If WANT_REG is nonzero, we wish the condition to be relative to that
5862 : register, if possible. Therefore, do not canonicalize the condition
5863 : further. If ALLOW_CC_MODE is nonzero, allow the condition returned
5864 : to be a compare to a CC mode register.
5865 :
5866 : If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
5867 : and at INSN. */
5868 :
5869 : rtx
5870 38135189 : canonicalize_condition (rtx_insn *insn, rtx cond, int reverse,
5871 : rtx_insn **earliest,
5872 : rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
5873 : {
5874 38135189 : enum rtx_code code;
5875 38135189 : rtx_insn *prev = insn;
5876 38135189 : const_rtx set;
5877 38135189 : rtx tem;
5878 38135189 : rtx op0, op1;
5879 38135189 : int reverse_code = 0;
5880 38135189 : machine_mode mode;
5881 38135189 : basic_block bb = BLOCK_FOR_INSN (insn);
5882 :
5883 38135189 : code = GET_CODE (cond);
5884 38135189 : mode = GET_MODE (cond);
5885 38135189 : op0 = XEXP (cond, 0);
5886 38135189 : op1 = XEXP (cond, 1);
5887 :
5888 38135189 : if (reverse)
5889 1904718 : code = reversed_comparison_code (cond, insn);
5890 38135189 : if (code == UNKNOWN)
5891 : return 0;
5892 :
5893 38135189 : if (earliest)
5894 18304493 : *earliest = insn;
5895 :
5896 : /* If we are comparing a register with zero, see if the register is set
5897 : in the previous insn to a COMPARE or a comparison operation. Perform
5898 : the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
5899 : in cse.cc */
5900 :
5901 80932161 : while ((GET_RTX_CLASS (code) == RTX_COMPARE
5902 : || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
5903 80932161 : && op1 == CONST0_RTX (GET_MODE (op0))
5904 141864700 : && op0 != want_reg)
5905 : {
5906 : /* Set nonzero when we find something of interest. */
5907 60932539 : rtx x = 0;
5908 :
5909 : /* If this is a COMPARE, pick up the two things being compared. */
5910 60932539 : if (GET_CODE (op0) == COMPARE)
5911 : {
5912 0 : op1 = XEXP (op0, 1);
5913 0 : op0 = XEXP (op0, 0);
5914 0 : continue;
5915 : }
5916 60932539 : else if (!REG_P (op0))
5917 : break;
5918 :
5919 : /* Go back to the previous insn. Stop if it is not an INSN. We also
5920 : stop if it isn't a single set or if it has a REG_INC note because
5921 : we don't want to bother dealing with it. */
5922 :
5923 55882356 : prev = prev_nonnote_nondebug_insn (prev);
5924 :
5925 55882356 : if (prev == 0
5926 55779309 : || !NONJUMP_INSN_P (prev)
5927 : || FIND_REG_INC_NOTE (prev, NULL_RTX)
5928 : /* In cfglayout mode, there do not have to be labels at the
5929 : beginning of a block, or jumps at the end, so the previous
5930 : conditions would not stop us when we reach bb boundary. */
5931 106515250 : || BLOCK_FOR_INSN (prev) != bb)
5932 : break;
5933 :
5934 50531626 : set = set_of (op0, prev);
5935 :
5936 50531626 : if (set
5937 50531626 : && (GET_CODE (set) != SET
5938 44006075 : || !rtx_equal_p (SET_DEST (set), op0)))
5939 : break;
5940 :
5941 : /* If this is setting OP0, get what it sets it to if it looks
5942 : relevant. */
5943 50431060 : if (set)
5944 : {
5945 43905509 : machine_mode inner_mode = GET_MODE (SET_DEST (set));
5946 : #ifdef FLOAT_STORE_FLAG_VALUE
5947 : REAL_VALUE_TYPE fsfv;
5948 : #endif
5949 :
5950 : /* ??? We may not combine comparisons done in a CCmode with
5951 : comparisons not done in a CCmode. This is to aid targets
5952 : like Alpha that have an IEEE compliant EQ instruction, and
5953 : a non-IEEE compliant BEQ instruction. The use of CCmode is
5954 : actually artificial, simply to prevent the combination, but
5955 : should not affect other platforms.
5956 :
5957 : However, we must allow VOIDmode comparisons to match either
5958 : CCmode or non-CCmode comparison, because some ports have
5959 : modeless comparisons inside branch patterns.
5960 :
5961 : ??? This mode check should perhaps look more like the mode check
5962 : in simplify_comparison in combine. */
5963 43905509 : if (((GET_MODE_CLASS (mode) == MODE_CC)
5964 43905509 : != (GET_MODE_CLASS (inner_mode) == MODE_CC))
5965 37475473 : && mode != VOIDmode
5966 0 : && inner_mode != VOIDmode)
5967 : break;
5968 43905509 : if (GET_CODE (SET_SRC (set)) == COMPARE
5969 43905509 : || (((code == NE
5970 4722242 : || (code == LT
5971 155628 : && val_signbit_known_set_p (inner_mode,
5972 : STORE_FLAG_VALUE))
5973 : #ifdef FLOAT_STORE_FLAG_VALUE
5974 : || (code == LT
5975 : && SCALAR_FLOAT_MODE_P (inner_mode)
5976 : && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5977 : REAL_VALUE_NEGATIVE (fsfv)))
5978 : #endif
5979 : ))
5980 2819302 : && COMPARISON_P (SET_SRC (set))))
5981 36673104 : x = SET_SRC (set);
5982 7232405 : else if (((code == EQ
5983 3990314 : || (code == GE
5984 130023 : && val_signbit_known_set_p (inner_mode,
5985 : STORE_FLAG_VALUE))
5986 : #ifdef FLOAT_STORE_FLAG_VALUE
5987 : || (code == GE
5988 : && SCALAR_FLOAT_MODE_P (inner_mode)
5989 : && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5990 : REAL_VALUE_NEGATIVE (fsfv)))
5991 : #endif
5992 : ))
5993 7232405 : && COMPARISON_P (SET_SRC (set)))
5994 : {
5995 : reverse_code = 1;
5996 : x = SET_SRC (set);
5997 : }
5998 6904590 : else if ((code == EQ || code == NE)
5999 5424439 : && GET_CODE (SET_SRC (set)) == XOR)
6000 : /* Handle sequences like:
6001 :
6002 : (set op0 (xor X Y))
6003 : ...(eq|ne op0 (const_int 0))...
6004 :
6005 : in which case:
6006 :
6007 : (eq op0 (const_int 0)) reduces to (eq X Y)
6008 : (ne op0 (const_int 0)) reduces to (ne X Y)
6009 :
6010 : This is the form used by MIPS16, for example. */
6011 : x = SET_SRC (set);
6012 : else
6013 : break;
6014 : }
6015 :
6016 6525551 : else if (reg_set_p (op0, prev))
6017 : /* If this sets OP0, but not directly, we have to give up. */
6018 : break;
6019 :
6020 43534597 : if (x)
6021 : {
6022 : /* If the caller is expecting the condition to be valid at INSN,
6023 : make sure X doesn't change before INSN. */
6024 37009046 : if (valid_at_insn_p)
6025 23517180 : if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
6026 : break;
6027 36271421 : if (COMPARISON_P (x))
6028 130631 : code = GET_CODE (x);
6029 36271421 : if (reverse_code)
6030 : {
6031 84846 : code = reversed_comparison_code (x, prev);
6032 84846 : if (code == UNKNOWN)
6033 : return 0;
6034 : reverse_code = 0;
6035 : }
6036 :
6037 36271421 : op0 = XEXP (x, 0), op1 = XEXP (x, 1);
6038 36271421 : if (earliest)
6039 17659706 : *earliest = prev;
6040 : }
6041 : }
6042 :
6043 : /* If constant is first, put it last. */
6044 38135189 : if (CONSTANT_P (op0))
6045 21213 : code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
6046 :
6047 : /* If OP0 is the result of a comparison, we weren't able to find what
6048 : was really being compared, so fail. */
6049 38135189 : if (!allow_cc_mode
6050 20599577 : && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
6051 : return 0;
6052 :
6053 : /* Canonicalize any ordered comparison with integers involving equality
6054 : if we can do computations in the relevant mode and we do not
6055 : overflow. */
6056 :
6057 36867985 : scalar_int_mode op0_mode;
6058 36867985 : if (CONST_INT_P (op1)
6059 24809501 : && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
6060 61039186 : && GET_MODE_PRECISION (op0_mode) <= HOST_BITS_PER_WIDE_INT)
6061 : {
6062 24141764 : HOST_WIDE_INT const_val = INTVAL (op1);
6063 24141764 : unsigned HOST_WIDE_INT uconst_val = const_val;
6064 24141764 : unsigned HOST_WIDE_INT max_val
6065 24141764 : = (unsigned HOST_WIDE_INT) GET_MODE_MASK (op0_mode);
6066 :
6067 24141764 : switch (code)
6068 : {
6069 910234 : case LE:
6070 910234 : if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
6071 910234 : code = LT, op1 = gen_int_mode (const_val + 1, op0_mode);
6072 : break;
6073 :
6074 : /* When cross-compiling, const_val might be sign-extended from
6075 : BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
6076 301395 : case GE:
6077 301395 : if ((const_val & max_val)
6078 301395 : != (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (op0_mode) - 1)))
6079 301395 : code = GT, op1 = gen_int_mode (const_val - 1, op0_mode);
6080 : break;
6081 :
6082 676367 : case LEU:
6083 676367 : if (uconst_val < max_val)
6084 666919 : code = LTU, op1 = gen_int_mode (uconst_val + 1, op0_mode);
6085 : break;
6086 :
6087 121492 : case GEU:
6088 121492 : if (uconst_val != 0)
6089 121487 : code = GTU, op1 = gen_int_mode (uconst_val - 1, op0_mode);
6090 : break;
6091 :
6092 : default:
6093 : break;
6094 : }
6095 : }
6096 :
6097 : /* We promised to return a comparison. */
6098 36867985 : rtx ret = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
6099 36867985 : if (COMPARISON_P (ret))
6100 : return ret;
6101 : return 0;
6102 : }
6103 :
6104 : /* Given a jump insn JUMP, return the condition that will cause it to branch
6105 : to its JUMP_LABEL. If the condition cannot be understood, or is an
6106 : inequality floating-point comparison which needs to be reversed, 0 will
6107 : be returned.
6108 :
6109 : If EARLIEST is nonzero, it is a pointer to a place where the earliest
6110 : insn used in locating the condition was found. If a replacement test
6111 : of the condition is desired, it should be placed in front of that
6112 : insn and we will be sure that the inputs are still valid. If EARLIEST
6113 : is null, the returned condition will be valid at INSN.
6114 :
6115 : If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
6116 : compare CC mode register.
6117 :
6118 : VALID_AT_INSN_P is the same as for canonicalize_condition. */
6119 :
6120 : rtx
6121 37386221 : get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode,
6122 : int valid_at_insn_p)
6123 : {
6124 37386221 : rtx cond;
6125 37386221 : int reverse;
6126 37386221 : rtx set;
6127 :
6128 : /* If this is not a standard conditional jump, we can't parse it. */
6129 37386221 : if (!JUMP_P (jump)
6130 37386221 : || ! any_condjump_p (jump))
6131 3693788 : return 0;
6132 33692433 : set = pc_set (jump);
6133 :
6134 33692433 : cond = XEXP (SET_SRC (set), 0);
6135 :
6136 : /* If this branches to JUMP_LABEL when the condition is false, reverse
6137 : the condition. */
6138 33692433 : reverse
6139 67384866 : = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
6140 33692433 : && label_ref_label (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump);
6141 :
6142 33692433 : return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
6143 33692433 : allow_cc_mode, valid_at_insn_p);
6144 : }
6145 :
6146 : /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
6147 : TARGET_MODE_REP_EXTENDED.
6148 :
6149 : Note that we assume that the property of
6150 : TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
6151 : narrower than mode B. I.e., if A is a mode narrower than B then in
6152 : order to be able to operate on it in mode B, mode A needs to
6153 : satisfy the requirements set by the representation of mode B. */
6154 :
6155 : static void
6156 278641 : init_num_sign_bit_copies_in_rep (void)
6157 : {
6158 278641 : opt_scalar_int_mode in_mode_iter;
6159 278641 : scalar_int_mode mode;
6160 :
6161 2229128 : FOR_EACH_MODE_IN_CLASS (in_mode_iter, MODE_INT)
6162 7801948 : FOR_EACH_MODE_UNTIL (mode, in_mode_iter.require ())
6163 : {
6164 5851461 : scalar_int_mode in_mode = in_mode_iter.require ();
6165 5851461 : scalar_int_mode i;
6166 :
6167 : /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
6168 : extends to the next widest mode. */
6169 5851461 : gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
6170 : || GET_MODE_WIDER_MODE (mode).require () == in_mode);
6171 :
6172 : /* We are in in_mode. Count how many bits outside of mode
6173 : have to be copies of the sign-bit. */
6174 21455357 : FOR_EACH_MODE (i, mode, in_mode)
6175 : {
6176 : /* This must always exist (for the last iteration it will be
6177 : IN_MODE). */
6178 15603896 : scalar_int_mode wider = GET_MODE_WIDER_MODE (i).require ();
6179 :
6180 15603896 : if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
6181 : /* We can only check sign-bit copies starting from the
6182 : top-bit. In order to be able to check the bits we
6183 : have already seen we pretend that subsequent bits
6184 : have to be sign-bit copies too. */
6185 15603896 : || num_sign_bit_copies_in_rep [in_mode][mode])
6186 0 : num_sign_bit_copies_in_rep [in_mode][mode]
6187 0 : += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
6188 : }
6189 : }
6190 278641 : }
6191 :
6192 : /* Suppose that truncation from the machine mode of X to MODE is not a
6193 : no-op. See if there is anything special about X so that we can
6194 : assume it already contains a truncated value of MODE. */
6195 :
6196 : bool
6197 0 : truncated_to_mode (machine_mode mode, const_rtx x)
6198 : {
6199 : /* This register has already been used in MODE without explicit
6200 : truncation. */
6201 0 : if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
6202 : return true;
6203 :
6204 : /* This explicit TRUNCATE may be needed on targets that require
6205 : MODE to be suitably extended when stored in X. Targets such as
6206 : mips64 use (sign_extend:DI (truncate:SI (reg:DI x))) to perform
6207 : an explicit extension, avoiding use of (subreg:SI (reg:DI x))
6208 : which is assumed to already be extended. */
6209 0 : scalar_int_mode imode, omode;
6210 0 : if (is_a <scalar_int_mode> (mode, &imode)
6211 0 : && is_a <scalar_int_mode> (GET_MODE (x), &omode)
6212 0 : && targetm.mode_rep_extended (imode, omode) != UNKNOWN)
6213 : return false;
6214 :
6215 : /* See if we already satisfy the requirements of MODE. If yes we
6216 : can just switch to MODE. */
6217 0 : if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
6218 0 : && (num_sign_bit_copies (x, GET_MODE (x))
6219 0 : >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
6220 : return true;
6221 :
6222 : return false;
6223 : }
6224 :
6225 : /* Return true if RTX code CODE has a single sequence of zero or more
6226 : "e" operands and no rtvec operands. Initialize its rtx_all_subrtx_bounds
6227 : entry in that case. */
6228 :
6229 : static bool
6230 42910714 : setup_reg_subrtx_bounds (unsigned int code)
6231 : {
6232 42910714 : const char *format = GET_RTX_FORMAT ((enum rtx_code) code);
6233 42910714 : unsigned int i = 0;
6234 60465097 : for (; format[i] != 'e'; ++i)
6235 : {
6236 27028177 : if (!format[i])
6237 : /* No subrtxes. Leave start and count as 0. */
6238 : return true;
6239 19783511 : if (format[i] == 'E' || format[i] == 'V')
6240 : return false;
6241 : }
6242 :
6243 : /* Record the sequence of 'e's. */
6244 33436920 : rtx_all_subrtx_bounds[code].start = i;
6245 54334995 : do
6246 54334995 : ++i;
6247 54334995 : while (format[i] == 'e');
6248 33436920 : rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start;
6249 : /* rtl-iter.h relies on this. */
6250 33436920 : gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3);
6251 :
6252 37337894 : for (; format[i]; ++i)
6253 5294179 : if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e')
6254 : return false;
6255 :
6256 : return true;
6257 : }
6258 :
6259 : /* Initialize rtx_all_subrtx_bounds. */
6260 : void
6261 278641 : init_rtlanal (void)
6262 : {
6263 278641 : int i;
6264 43189355 : for (i = 0; i < NUM_RTX_CODE; i++)
6265 : {
6266 42910714 : if (!setup_reg_subrtx_bounds (i))
6267 3622333 : rtx_all_subrtx_bounds[i].count = UCHAR_MAX;
6268 42910714 : if (GET_RTX_CLASS (i) != RTX_CONST_OBJ)
6269 40124304 : rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i];
6270 : }
6271 :
6272 278641 : init_num_sign_bit_copies_in_rep ();
6273 278641 : }
6274 :
6275 : /* Check whether this is a constant pool constant. */
6276 : bool
6277 11659 : constant_pool_constant_p (rtx x)
6278 : {
6279 11659 : x = avoid_constant_pool_reference (x);
6280 11659 : return CONST_DOUBLE_P (x);
6281 : }
6282 :
6283 : /* If M is a bitmask that selects a field of low-order bits within an item but
6284 : not the entire word, return the length of the field. Return -1 otherwise.
6285 : M is used in machine mode MODE. */
6286 :
6287 : int
6288 8080 : low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
6289 : {
6290 8080 : if (mode != VOIDmode)
6291 : {
6292 8080 : if (!HWI_COMPUTABLE_MODE_P (mode))
6293 : return -1;
6294 8080 : m &= GET_MODE_MASK (mode);
6295 : }
6296 :
6297 8080 : return exact_log2 (m + 1);
6298 : }
6299 :
6300 : /* Return the mode of MEM's address. */
6301 :
6302 : scalar_int_mode
6303 179852298 : get_address_mode (rtx mem)
6304 : {
6305 179852298 : machine_mode mode;
6306 :
6307 179852298 : gcc_assert (MEM_P (mem));
6308 179852298 : mode = GET_MODE (XEXP (mem, 0));
6309 179852298 : if (mode != VOIDmode)
6310 179345139 : return as_a <scalar_int_mode> (mode);
6311 530802 : return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
6312 : }
6313 :
6314 : /* Split up a CONST_DOUBLE or integer constant rtx
6315 : into two rtx's for single words,
6316 : storing in *FIRST the word that comes first in memory in the target
6317 : and in *SECOND the other.
6318 :
6319 : TODO: This function needs to be rewritten to work on any size
6320 : integer. */
6321 :
6322 : void
6323 0 : split_double (rtx value, rtx *first, rtx *second)
6324 : {
6325 0 : if (CONST_INT_P (value))
6326 : {
6327 0 : if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
6328 : {
6329 : /* In this case the CONST_INT holds both target words.
6330 : Extract the bits from it into two word-sized pieces.
6331 : Sign extend each half to HOST_WIDE_INT. */
6332 0 : unsigned HOST_WIDE_INT low, high;
6333 0 : unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
6334 0 : unsigned bits_per_word = BITS_PER_WORD;
6335 :
6336 : /* Set sign_bit to the most significant bit of a word. */
6337 0 : sign_bit = 1;
6338 0 : sign_bit <<= bits_per_word - 1;
6339 :
6340 : /* Set mask so that all bits of the word are set. We could
6341 : have used 1 << BITS_PER_WORD instead of basing the
6342 : calculation on sign_bit. However, on machines where
6343 : HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
6344 : compiler warning, even though the code would never be
6345 : executed. */
6346 0 : mask = sign_bit << 1;
6347 0 : mask--;
6348 :
6349 : /* Set sign_extend as any remaining bits. */
6350 0 : sign_extend = ~mask;
6351 :
6352 : /* Pick the lower word and sign-extend it. */
6353 0 : low = INTVAL (value);
6354 0 : low &= mask;
6355 0 : if (low & sign_bit)
6356 0 : low |= sign_extend;
6357 :
6358 : /* Pick the higher word, shifted to the least significant
6359 : bits, and sign-extend it. */
6360 0 : high = INTVAL (value);
6361 0 : high >>= bits_per_word - 1;
6362 0 : high >>= 1;
6363 0 : high &= mask;
6364 0 : if (high & sign_bit)
6365 0 : high |= sign_extend;
6366 :
6367 : /* Store the words in the target machine order. */
6368 0 : if (WORDS_BIG_ENDIAN)
6369 : {
6370 : *first = GEN_INT (high);
6371 : *second = GEN_INT (low);
6372 : }
6373 : else
6374 : {
6375 0 : *first = GEN_INT (low);
6376 0 : *second = GEN_INT (high);
6377 : }
6378 : }
6379 : else
6380 : {
6381 : /* The rule for using CONST_INT for a wider mode
6382 : is that we regard the value as signed.
6383 : So sign-extend it. */
6384 0 : rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
6385 0 : if (WORDS_BIG_ENDIAN)
6386 : {
6387 : *first = high;
6388 : *second = value;
6389 : }
6390 : else
6391 : {
6392 0 : *first = value;
6393 0 : *second = high;
6394 : }
6395 : }
6396 : }
6397 0 : else if (GET_CODE (value) == CONST_WIDE_INT)
6398 : {
6399 : /* All of this is scary code and needs to be converted to
6400 : properly work with any size integer. */
6401 0 : gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2);
6402 0 : if (WORDS_BIG_ENDIAN)
6403 : {
6404 : *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
6405 : *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
6406 : }
6407 : else
6408 : {
6409 0 : *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
6410 0 : *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
6411 : }
6412 : }
6413 0 : else if (!CONST_DOUBLE_P (value))
6414 : {
6415 0 : if (WORDS_BIG_ENDIAN)
6416 : {
6417 : *first = const0_rtx;
6418 : *second = value;
6419 : }
6420 : else
6421 : {
6422 0 : *first = value;
6423 0 : *second = const0_rtx;
6424 : }
6425 : }
6426 0 : else if (GET_MODE (value) == VOIDmode
6427 : /* This is the old way we did CONST_DOUBLE integers. */
6428 0 : || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
6429 : {
6430 : /* In an integer, the words are defined as most and least significant.
6431 : So order them by the target's convention. */
6432 0 : if (WORDS_BIG_ENDIAN)
6433 : {
6434 : *first = GEN_INT (CONST_DOUBLE_HIGH (value));
6435 : *second = GEN_INT (CONST_DOUBLE_LOW (value));
6436 : }
6437 : else
6438 : {
6439 0 : *first = GEN_INT (CONST_DOUBLE_LOW (value));
6440 0 : *second = GEN_INT (CONST_DOUBLE_HIGH (value));
6441 : }
6442 : }
6443 : else
6444 : {
6445 0 : long l[2];
6446 :
6447 : /* Note, this converts the REAL_VALUE_TYPE to the target's
6448 : format, splits up the floating point double and outputs
6449 : exactly 32 bits of it into each of l[0] and l[1] --
6450 : not necessarily BITS_PER_WORD bits. */
6451 0 : REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (value), l);
6452 :
6453 : /* If 32 bits is an entire word for the target, but not for the host,
6454 : then sign-extend on the host so that the number will look the same
6455 : way on the host that it would on the target. See for instance
6456 : simplify_unary_operation. The #if is needed to avoid compiler
6457 : warnings. */
6458 :
6459 : #if HOST_BITS_PER_LONG > 32
6460 0 : if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
6461 : {
6462 0 : if (l[0] & ((long) 1 << 31))
6463 0 : l[0] |= ((unsigned long) (-1) << 32);
6464 0 : if (l[1] & ((long) 1 << 31))
6465 0 : l[1] |= ((unsigned long) (-1) << 32);
6466 : }
6467 : #endif
6468 :
6469 0 : *first = GEN_INT (l[0]);
6470 0 : *second = GEN_INT (l[1]);
6471 : }
6472 0 : }
6473 :
6474 : /* Return true if X is a sign_extract or zero_extract from the least
6475 : significant bit. */
6476 :
6477 : static bool
6478 210886015 : lsb_bitfield_op_p (rtx x)
6479 : {
6480 0 : if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
6481 : {
6482 0 : machine_mode mode = GET_MODE (XEXP (x, 0));
6483 0 : HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
6484 0 : HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
6485 0 : poly_int64 remaining_bits = GET_MODE_PRECISION (mode) - len;
6486 :
6487 0 : return known_eq (pos, BITS_BIG_ENDIAN ? remaining_bits : 0);
6488 : }
6489 : return false;
6490 : }
6491 :
6492 : /* Strip outer address "mutations" from LOC and return a pointer to the
6493 : inner value. If OUTER_CODE is nonnull, store the code of the innermost
6494 : stripped expression there.
6495 :
6496 : "Mutations" either convert between modes or apply some kind of
6497 : extension, truncation or alignment. */
6498 :
6499 : rtx *
6500 210883904 : strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
6501 : {
6502 210957336 : for (;;)
6503 : {
6504 210957336 : enum rtx_code code = GET_CODE (*loc);
6505 210957336 : if (GET_RTX_CLASS (code) == RTX_UNARY)
6506 : /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
6507 : used to convert between pointer sizes. */
6508 71321 : loc = &XEXP (*loc, 0);
6509 210886015 : else if (lsb_bitfield_op_p (*loc))
6510 : /* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively
6511 : acts as a combined truncation and extension. */
6512 0 : loc = &XEXP (*loc, 0);
6513 210886015 : else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
6514 : /* (and ... (const_int -X)) is used to align to X bytes. */
6515 2099 : loc = &XEXP (*loc, 0);
6516 210883916 : else if (code == SUBREG
6517 30153 : && (!OBJECT_P (SUBREG_REG (*loc))
6518 30151 : || CONSTANT_P (SUBREG_REG (*loc)))
6519 210883928 : && subreg_lowpart_p (*loc))
6520 : /* (subreg (operator ...) ...) inside AND is used for mode
6521 : conversion too. It is also used for load-address operations
6522 : in which an extension can be done for free, such as:
6523 :
6524 : (zero_extend:DI
6525 : (subreg:SI (plus:DI (reg:DI R) (symbol_ref:DI "foo") 0)))
6526 :
6527 : The latter usage also covers subregs of plain "displacements",
6528 : such as:
6529 :
6530 : (zero_extend:DI (subreg:SI (symbol_ref:DI "foo") 0))
6531 :
6532 : The inner address should then be the symbol_ref, not the subreg,
6533 : similarly to the plus case above.
6534 :
6535 : In contrast, the subreg in:
6536 :
6537 : (zero_extend:DI (subreg:SI (reg:DI R) 0))
6538 :
6539 : should be treated as the base, since it should be replaced by
6540 : an SImode hard register during register allocation. */
6541 12 : loc = &SUBREG_REG (*loc);
6542 : else
6543 210883904 : return loc;
6544 73432 : if (outer_code)
6545 73432 : *outer_code = code;
6546 : }
6547 : }
6548 :
6549 : /* Return true if CODE applies some kind of scale. The scaled value is
6550 : is the first operand and the scale is the second. */
6551 :
6552 : static bool
6553 66090766 : binary_scale_code_p (enum rtx_code code)
6554 : {
6555 66090766 : return (code == MULT
6556 66090766 : || code == ASHIFT
6557 : /* Needed by ARM targets. */
6558 : || code == ASHIFTRT
6559 : || code == LSHIFTRT
6560 63902793 : || code == ROTATE
6561 63902793 : || code == ROTATERT);
6562 : }
6563 :
6564 : /* Return true if X appears to be a valid base or index term. */
6565 : static bool
6566 132181532 : valid_base_or_index_term_p (rtx x)
6567 : {
6568 132181532 : if (GET_CODE (x) == SCRATCH)
6569 : return true;
6570 : /* Handle what appear to be eliminated forms of a register. If we reach
6571 : here, the elimination occurs outside of the outermost PLUS tree,
6572 : and so the elimination offset cannot be treated as a displacement
6573 : of the main address. Instead, we need to treat the whole PLUS as
6574 : the base or index term. The address can only be made legitimate by
6575 : reloading the PLUS. */
6576 132181532 : if (GET_CODE (x) == PLUS && CONST_SCALAR_INT_P (XEXP (x, 1)))
6577 0 : x = XEXP (x, 0);
6578 132181532 : if (GET_CODE (x) == SUBREG)
6579 51685 : x = SUBREG_REG (x);
6580 132181532 : return REG_P (x) || MEM_P (x);
6581 : }
6582 :
6583 : /* If *INNER can be interpreted as a base, return a pointer to the inner term
6584 : (see address_info). Return null otherwise. */
6585 :
6586 : static rtx *
6587 66090766 : get_base_term (rtx *inner)
6588 : {
6589 66090766 : if (GET_CODE (*inner) == LO_SUM)
6590 0 : inner = strip_address_mutations (&XEXP (*inner, 0));
6591 66090766 : if (valid_base_or_index_term_p (*inner))
6592 63902793 : return inner;
6593 : return 0;
6594 : }
6595 :
6596 : /* If *INNER can be interpreted as an index, return a pointer to the inner term
6597 : (see address_info). Return null otherwise. */
6598 :
6599 : static rtx *
6600 66090766 : get_index_term (rtx *inner)
6601 : {
6602 : /* At present, only constant scales are allowed. */
6603 66090766 : if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1)))
6604 2187973 : inner = strip_address_mutations (&XEXP (*inner, 0));
6605 66090766 : if (valid_base_or_index_term_p (*inner))
6606 66090766 : return inner;
6607 : return 0;
6608 : }
6609 :
6610 : /* Set the segment part of address INFO to LOC, given that INNER is the
6611 : unmutated value. */
6612 :
6613 : static void
6614 17 : set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
6615 : {
6616 17 : gcc_assert (!info->segment);
6617 17 : info->segment = loc;
6618 17 : info->segment_term = inner;
6619 17 : }
6620 :
6621 : /* Set the base part of address INFO to LOC, given that INNER is the
6622 : unmutated value. */
6623 :
6624 : static void
6625 64446866 : set_address_base (struct address_info *info, rtx *loc, rtx *inner)
6626 : {
6627 64446866 : gcc_assert (!info->base);
6628 64446866 : info->base = loc;
6629 64446866 : info->base_term = inner;
6630 64446866 : }
6631 :
6632 : /* Set the index part of address INFO to LOC, given that INNER is the
6633 : unmutated value. */
6634 :
6635 : static void
6636 3726378 : set_address_index (struct address_info *info, rtx *loc, rtx *inner)
6637 : {
6638 3726378 : gcc_assert (!info->index);
6639 3726378 : info->index = loc;
6640 3726378 : info->index_term = inner;
6641 3726378 : }
6642 :
6643 : /* Set the displacement part of address INFO to LOC, given that INNER
6644 : is the constant term. */
6645 :
6646 : static void
6647 65626602 : set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
6648 : {
6649 65626602 : gcc_assert (!info->disp);
6650 65626602 : info->disp = loc;
6651 65626602 : info->disp_term = inner;
6652 65626602 : }
6653 :
6654 : /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address. Set up the
6655 : rest of INFO accordingly. */
6656 :
6657 : static void
6658 1979133 : decompose_incdec_address (struct address_info *info)
6659 : {
6660 1979133 : info->autoinc_p = true;
6661 :
6662 1979133 : rtx *base = &XEXP (*info->inner, 0);
6663 1979133 : set_address_base (info, base, base);
6664 1979133 : gcc_checking_assert (info->base == info->base_term);
6665 :
6666 : /* These addresses are only valid when the size of the addressed
6667 : value is known. */
6668 1979133 : gcc_checking_assert (info->mode != VOIDmode);
6669 1979133 : }
6670 :
6671 : /* INFO->INNER describes a {PRE,POST}_MODIFY address. Set up the rest
6672 : of INFO accordingly. */
6673 :
6674 : static void
6675 103345 : decompose_automod_address (struct address_info *info)
6676 : {
6677 103345 : info->autoinc_p = true;
6678 :
6679 103345 : rtx *base = &XEXP (*info->inner, 0);
6680 103345 : set_address_base (info, base, base);
6681 103345 : gcc_checking_assert (info->base == info->base_term);
6682 :
6683 103345 : rtx plus = XEXP (*info->inner, 1);
6684 103345 : gcc_assert (GET_CODE (plus) == PLUS);
6685 :
6686 103345 : info->base_term2 = &XEXP (plus, 0);
6687 103345 : gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
6688 :
6689 103345 : rtx *step = &XEXP (plus, 1);
6690 103345 : rtx *inner_step = strip_address_mutations (step);
6691 103345 : if (CONSTANT_P (*inner_step))
6692 103345 : set_address_disp (info, step, inner_step);
6693 : else
6694 0 : set_address_index (info, step, inner_step);
6695 103345 : }
6696 :
6697 : /* Treat *LOC as a tree of PLUS operands and store pointers to the summed
6698 : values in [PTR, END). Return a pointer to the end of the used array. */
6699 :
6700 : static rtx **
6701 131614040 : extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
6702 : {
6703 188332012 : rtx x = *loc;
6704 188332012 : if (GET_CODE (x) == PLUS)
6705 : {
6706 56717972 : ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
6707 56717972 : ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
6708 : }
6709 : else
6710 : {
6711 131614040 : gcc_assert (ptr != end);
6712 131614040 : *ptr++ = loc;
6713 : }
6714 131614040 : return ptr;
6715 : }
6716 :
6717 : /* Evaluate the likelihood of X being a base or index value, returning
6718 : positive if it is likely to be a base, negative if it is likely to be
6719 : an index, and 0 if we can't tell. Make the magnitude of the return
6720 : value reflect the amount of confidence we have in the answer.
6721 :
6722 : MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */
6723 :
6724 : static int
6725 3076810 : baseness (rtx x, machine_mode mode, addr_space_t as,
6726 : enum rtx_code outer_code, enum rtx_code index_code)
6727 : {
6728 : /* Believe *_POINTER unless the address shape requires otherwise. */
6729 3076810 : if (REG_P (x) && REG_POINTER (x))
6730 : return 2;
6731 1756892 : if (MEM_P (x) && MEM_POINTER (x))
6732 : return 2;
6733 :
6734 1756892 : if (REG_P (x) && HARD_REGISTER_P (x))
6735 : {
6736 : /* X is a hard register. If it only fits one of the base
6737 : or index classes, choose that interpretation. */
6738 12 : int regno = REGNO (x);
6739 12 : bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
6740 12 : bool index_p = REGNO_OK_FOR_INDEX_P (regno);
6741 12 : if (base_p != index_p)
6742 0 : return base_p ? 1 : -1;
6743 : }
6744 : return 0;
6745 : }
6746 :
6747 : /* INFO->INNER describes a normal, non-automodified address.
6748 : Fill in the rest of INFO accordingly. */
6749 :
6750 : static void
6751 74896068 : decompose_normal_address (struct address_info *info)
6752 : {
6753 : /* Treat the address as the sum of up to four values. */
6754 74896068 : rtx *ops[4];
6755 74896068 : size_t n_ops = extract_plus_operands (info->inner, ops,
6756 74896068 : ops + ARRAY_SIZE (ops)) - ops;
6757 :
6758 : /* If there is more than one component, any base component is in a PLUS. */
6759 74896068 : if (n_ops > 1)
6760 55228282 : info->base_outer_code = PLUS;
6761 :
6762 : /* Try to classify each sum operand now. Leave those that could be
6763 : either a base or an index in OPS. */
6764 : rtx *inner_ops[4];
6765 : size_t out = 0;
6766 206510108 : for (size_t in = 0; in < n_ops; ++in)
6767 : {
6768 131614040 : rtx *loc = ops[in];
6769 131614040 : rtx *inner = strip_address_mutations (loc);
6770 131614040 : if (CONSTANT_P (*inner))
6771 65523257 : set_address_disp (info, loc, inner);
6772 66090783 : else if (GET_CODE (*inner) == UNSPEC)
6773 17 : set_address_segment (info, loc, inner);
6774 : else
6775 : {
6776 : /* The only other possibilities are a base or an index. */
6777 66090766 : rtx *base_term = get_base_term (inner);
6778 66090766 : rtx *index_term = get_index_term (inner);
6779 66090766 : gcc_assert (base_term || index_term);
6780 66090766 : if (!base_term)
6781 2187973 : set_address_index (info, loc, index_term);
6782 63902793 : else if (!index_term)
6783 0 : set_address_base (info, loc, base_term);
6784 : else
6785 : {
6786 63902793 : gcc_assert (base_term == index_term);
6787 63902793 : ops[out] = loc;
6788 63902793 : inner_ops[out] = base_term;
6789 63902793 : ++out;
6790 : }
6791 : }
6792 : }
6793 :
6794 : /* Classify the remaining OPS members as bases and indexes. */
6795 74896068 : if (out == 1)
6796 : {
6797 : /* If we haven't seen a base or an index yet, assume that this is
6798 : the base. If we were confident that another term was the base
6799 : or index, treat the remaining operand as the other kind. */
6800 60825983 : if (!info->base)
6801 60825983 : set_address_base (info, ops[0], inner_ops[0]);
6802 : else
6803 0 : set_address_index (info, ops[0], inner_ops[0]);
6804 : }
6805 14070085 : else if (out == 2)
6806 : {
6807 1538405 : auto address_mode = targetm.addr_space.address_mode (info->as);
6808 1538405 : rtx inner_op0 = *inner_ops[0];
6809 1538405 : rtx inner_op1 = *inner_ops[1];
6810 1538405 : int base;
6811 : /* If one inner operand has the expected mode for a base and the other
6812 : doesn't, assume that the other one is the index. This is useful
6813 : for addresses such as:
6814 :
6815 : (plus (zero_extend X) Y)
6816 :
6817 : zero_extend is not in itself enough to assume an index, since bases
6818 : can be zero-extended on POINTERS_EXTEND_UNSIGNED targets. But if
6819 : Y has address mode and X doesn't, there should be little doubt that
6820 : Y is the base. */
6821 1538405 : if (GET_MODE (inner_op0) == address_mode
6822 1538405 : && GET_MODE (inner_op1) != address_mode)
6823 : base = 0;
6824 1538405 : else if (GET_MODE (inner_op1) == address_mode
6825 1538405 : && GET_MODE (inner_op0) != address_mode)
6826 : base = 1;
6827 : /* In the event of a tie, assume the base comes first. */
6828 1538405 : else if (baseness (inner_op0, info->mode, info->as, PLUS,
6829 1538405 : GET_CODE (*ops[1]))
6830 1538405 : >= baseness (inner_op1, info->mode, info->as, PLUS,
6831 1538405 : GET_CODE (*ops[0])))
6832 : base = 0;
6833 : else
6834 7734 : base = 1;
6835 1538405 : set_address_base (info, ops[base], inner_ops[base]);
6836 1538405 : set_address_index (info, ops[1 - base], inner_ops[1 - base]);
6837 : }
6838 : else
6839 12531680 : gcc_assert (out == 0);
6840 74896068 : }
6841 :
6842 : /* Describe address *LOC in *INFO. MODE is the mode of the addressed value,
6843 : or VOIDmode if not known. AS is the address space associated with LOC.
6844 : OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */
6845 :
6846 : void
6847 76978546 : decompose_address (struct address_info *info, rtx *loc, machine_mode mode,
6848 : addr_space_t as, enum rtx_code outer_code)
6849 : {
6850 76978546 : memset (info, 0, sizeof (*info));
6851 76978546 : info->mode = mode;
6852 76978546 : info->as = as;
6853 76978546 : info->addr_outer_code = outer_code;
6854 76978546 : info->outer = loc;
6855 76978546 : info->inner = strip_address_mutations (loc, &outer_code);
6856 76978546 : info->base_outer_code = outer_code;
6857 76978546 : switch (GET_CODE (*info->inner))
6858 : {
6859 1979133 : case PRE_DEC:
6860 1979133 : case PRE_INC:
6861 1979133 : case POST_DEC:
6862 1979133 : case POST_INC:
6863 1979133 : decompose_incdec_address (info);
6864 1979133 : break;
6865 :
6866 103345 : case PRE_MODIFY:
6867 103345 : case POST_MODIFY:
6868 103345 : decompose_automod_address (info);
6869 103345 : break;
6870 :
6871 74896068 : default:
6872 74896068 : decompose_normal_address (info);
6873 74896068 : break;
6874 : }
6875 76978546 : }
6876 :
6877 : /* Describe address operand LOC in INFO. */
6878 :
6879 : void
6880 3436221 : decompose_lea_address (struct address_info *info, rtx *loc)
6881 : {
6882 3436221 : decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
6883 3436221 : }
6884 :
6885 : /* Describe the address of MEM X in INFO. */
6886 :
6887 : void
6888 73529212 : decompose_mem_address (struct address_info *info, rtx x)
6889 : {
6890 73529212 : gcc_assert (MEM_P (x));
6891 73529212 : decompose_address (info, &XEXP (x, 0), GET_MODE (x),
6892 73529212 : MEM_ADDR_SPACE (x), MEM);
6893 73529212 : }
6894 :
6895 : /* Update INFO after a change to the address it describes. */
6896 :
6897 : void
6898 13113 : update_address (struct address_info *info)
6899 : {
6900 13113 : decompose_address (info, info->outer, info->mode, info->as,
6901 : info->addr_outer_code);
6902 13113 : }
6903 :
6904 : /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
6905 : more complicated than that. */
6906 :
6907 : HOST_WIDE_INT
6908 0 : get_index_scale (const struct address_info *info)
6909 : {
6910 0 : rtx index = *info->index;
6911 0 : if (GET_CODE (index) == MULT
6912 0 : && CONST_INT_P (XEXP (index, 1))
6913 0 : && info->index_term == &XEXP (index, 0))
6914 0 : return INTVAL (XEXP (index, 1));
6915 :
6916 0 : if (GET_CODE (index) == ASHIFT
6917 0 : && CONST_INT_P (XEXP (index, 1))
6918 0 : && info->index_term == &XEXP (index, 0))
6919 0 : return HOST_WIDE_INT_1 << INTVAL (XEXP (index, 1));
6920 :
6921 0 : if (info->index == info->index_term)
6922 0 : return 1;
6923 :
6924 : return 0;
6925 : }
6926 :
6927 : /* Return the "index code" of INFO, in the form required by
6928 : ok_for_base_p_1. */
6929 :
6930 : enum rtx_code
6931 33193900 : get_index_code (const struct address_info *info)
6932 : {
6933 33193900 : if (info->index)
6934 1518452 : return GET_CODE (*info->index);
6935 :
6936 31675448 : if (info->disp)
6937 25840036 : return GET_CODE (*info->disp);
6938 :
6939 : return SCRATCH;
6940 : }
6941 :
6942 : /* Return true if RTL X contains a SYMBOL_REF. */
6943 :
6944 : bool
6945 749908 : contains_symbol_ref_p (const_rtx x)
6946 : {
6947 749908 : subrtx_iterator::array_type array;
6948 3095402 : FOR_EACH_SUBRTX (iter, array, x, ALL)
6949 2422872 : if (SYMBOL_REF_P (*iter))
6950 77378 : return true;
6951 :
6952 672530 : return false;
6953 749908 : }
6954 :
6955 : /* Return true if RTL X contains a SYMBOL_REF or LABEL_REF. */
6956 :
6957 : bool
6958 362243 : contains_symbolic_reference_p (const_rtx x)
6959 : {
6960 362243 : subrtx_iterator::array_type array;
6961 836848 : FOR_EACH_SUBRTX (iter, array, x, ALL)
6962 479158 : if (SYMBOL_REF_P (*iter) || GET_CODE (*iter) == LABEL_REF)
6963 4553 : return true;
6964 :
6965 357690 : return false;
6966 362243 : }
6967 :
6968 : /* Return true if RTL X contains a constant pool address. */
6969 :
6970 : bool
6971 0 : contains_constant_pool_address_p (const_rtx x)
6972 : {
6973 0 : subrtx_iterator::array_type array;
6974 0 : FOR_EACH_SUBRTX (iter, array, x, ALL)
6975 0 : if (SYMBOL_REF_P (*iter) && CONSTANT_POOL_ADDRESS_P (*iter))
6976 0 : return true;
6977 :
6978 0 : return false;
6979 0 : }
6980 :
6981 :
6982 : /* Return true if X contains a thread-local symbol. */
6983 :
6984 : bool
6985 0 : tls_referenced_p (const_rtx x)
6986 : {
6987 0 : if (!targetm.have_tls)
6988 : return false;
6989 :
6990 0 : subrtx_iterator::array_type array;
6991 0 : FOR_EACH_SUBRTX (iter, array, x, ALL)
6992 0 : if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0)
6993 0 : return true;
6994 0 : return false;
6995 0 : }
6996 :
6997 : /* Process recursively X of INSN and add REG_INC notes if necessary. */
6998 : void
6999 0 : add_auto_inc_notes (rtx_insn *insn, rtx x)
7000 : {
7001 0 : enum rtx_code code = GET_CODE (x);
7002 0 : const char *fmt;
7003 0 : int i, j;
7004 :
7005 0 : if (code == MEM && auto_inc_p (XEXP (x, 0)))
7006 : {
7007 0 : add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
7008 0 : return;
7009 : }
7010 :
7011 : /* Scan all X sub-expressions. */
7012 0 : fmt = GET_RTX_FORMAT (code);
7013 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7014 : {
7015 0 : if (fmt[i] == 'e')
7016 0 : add_auto_inc_notes (insn, XEXP (x, i));
7017 0 : else if (fmt[i] == 'E')
7018 0 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7019 0 : add_auto_inc_notes (insn, XVECEXP (x, i, j));
7020 : }
7021 : }
7022 :
7023 : /* Return true if INSN is the second element of a pair of macro-fused
7024 : single_sets, both of which having the same register output as another. */
7025 : bool
7026 67477693 : single_output_fused_pair_p (rtx_insn *insn)
7027 : {
7028 67477693 : rtx set, prev_set;
7029 67477693 : rtx_insn *prev;
7030 :
7031 67477693 : return INSN_P (insn)
7032 67477693 : && SCHED_GROUP_P (insn)
7033 4125978 : && (prev = prev_nonnote_nondebug_insn (insn))
7034 4125978 : && (set = single_set (insn)) != NULL_RTX
7035 4125978 : && (prev_set = single_set (prev))
7036 : != NULL_RTX
7037 4125834 : && REG_P (SET_DEST (set))
7038 0 : && REG_P (SET_DEST (prev_set))
7039 67477693 : && (!reload_completed
7040 0 : || REGNO (SET_DEST (set)) == REGNO (SET_DEST (prev_set)));
7041 : }
7042 :
7043 : /* Return true if X is register asm. */
7044 :
7045 : bool
7046 18330738 : register_asm_p (const_rtx x)
7047 : {
7048 18330738 : return (REG_P (x)
7049 18330738 : && REG_EXPR (x) != NULL_TREE
7050 8968543 : && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (x))
7051 2807540 : && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (x))
7052 18377607 : && DECL_REGISTER (REG_EXPR (x)));
7053 : }
7054 :
7055 : /* Return true if, for all OP of mode OP_MODE:
7056 :
7057 : (vec_select:RESULT_MODE OP SEL)
7058 :
7059 : is equivalent to the highpart RESULT_MODE of OP. */
7060 :
7061 : bool
7062 0 : vec_series_highpart_p (machine_mode result_mode, machine_mode op_mode, rtx sel)
7063 : {
7064 0 : int nunits;
7065 0 : if (GET_MODE_NUNITS (op_mode).is_constant (&nunits)
7066 0 : && targetm.can_change_mode_class (op_mode, result_mode, ALL_REGS))
7067 : {
7068 0 : int offset = BYTES_BIG_ENDIAN ? 0 : nunits - XVECLEN (sel, 0);
7069 0 : return rtvec_series_p (XVEC (sel, 0), offset);
7070 : }
7071 : return false;
7072 : }
7073 :
7074 : /* Return true if, for all OP of mode OP_MODE:
7075 :
7076 : (vec_select:RESULT_MODE OP SEL)
7077 :
7078 : is equivalent to the lowpart RESULT_MODE of OP. */
7079 :
7080 : bool
7081 5152544 : vec_series_lowpart_p (machine_mode result_mode, machine_mode op_mode, rtx sel)
7082 : {
7083 5152544 : int nunits;
7084 5152544 : if (GET_MODE_NUNITS (op_mode).is_constant (&nunits)
7085 5152544 : && targetm.can_change_mode_class (op_mode, result_mode, ALL_REGS))
7086 : {
7087 652676 : int offset = BYTES_BIG_ENDIAN ? nunits - XVECLEN (sel, 0) : 0;
7088 652676 : return rtvec_series_p (XVEC (sel, 0), offset);
7089 : }
7090 : return false;
7091 : }
7092 :
7093 : /* Return true if X contains a paradoxical subreg. */
7094 :
7095 : bool
7096 1184654 : contains_paradoxical_subreg_p (rtx x)
7097 : {
7098 1184654 : subrtx_var_iterator::array_type array;
7099 5021943 : FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
7100 : {
7101 3887782 : x = *iter;
7102 3887782 : if (SUBREG_P (x) && paradoxical_subreg_p (x))
7103 50493 : return true;
7104 : }
7105 1134161 : return false;
7106 1184654 : }
|